hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
18dcfc116c13d11972a4f2c27b8abd963bb14b58 | 23,941 | //! A bunch of math-related functions for use with
//! the physics system.
use crate::entity::{ChunkEntities, PositionComponent};
use crate::physics::block_bboxes::bbox_for_block;
use crate::physics::AABBExt;
use feather_blocks::Block;
use feather_core::world::{BlockPosition, ChunkMap, Position};
use feather_core::{BlockExt, ChunkPosition};
use glm::{vec3, DVec3, Vec3};
use heapless::consts::*;
use nalgebra::{Isometry3, Point3};
use ncollide3d::bounding_volume::AABB;
use ncollide3d::query;
use ncollide3d::query::{Ray, RayCast};
use ncollide3d::shape::{Compound, Cuboid, ShapeHandle};
use smallvec::SmallVec;
use specs::storage::GenericReadStorage;
use specs::Entity;
use std::cmp::Ordering;
use std::f64::INFINITY;
// TODO is a bitflag really the most
// idiomatic way to do this?
bitflags! {
/// A side.
///
/// * East is on the positive X side.
/// * West is on the negative X side.
/// * North is on the positive Z side.
/// * South is on the positive Z side.
/// * Top is on the positive Y side.
/// * Bottom is on the negative Y side.
pub struct Side: u8 {
const EAST = 0x01;
const WEST = 0x02;
const NORTH = 0x04;
const SOUTH = 0x08;
const TOP = 0x10;
const BOTTOM = 0x20;
const NONE = 0x40;
}
}
impl Side {
/// Returns a vector with coordinates set to 1.0
/// where the face is toward the positive axis
/// and to -1.0 where the face is toward the negative
/// axis.
pub fn as_vector(self) -> DVec3 {
let mut vector = glm::vec3(0.0, 0.0, 0.0);
if self.contains(Side::EAST) {
vector.x = 1.0;
} else if self.contains(Side::WEST) {
vector.x = -1.0;
}
if self.contains(Side::NORTH) {
vector.z = 1.0;
} else if self.contains(Side::SOUTH) {
vector.z = -1.0;
}
if self.contains(Side::TOP) {
vector.y = 1.0;
} else if self.contains(Side::BOTTOM) {
vector.y = -1.0;
}
vector
}
}
/// The position at which a ray impacts a block.
#[derive(Debug, Clone, PartialEq)]
pub struct RayImpact {
/// The position of the block which was impacted.
pub block: BlockPosition,
/// The exact position, in world coordinates, at
/// which the ray met the block.
pub pos: Position,
/// The face(s) of the block where the ray impacted.
pub face: Side,
}
/// Finds the first block impacted by the given ray.
///
/// Traces up to `max_distance` before returning `None`
/// if no block was found.
pub fn block_impacted_by_ray(
chunk_map: &ChunkMap,
origin: DVec3,
ray: DVec3,
max_distance_squared: f64,
) -> Option<RayImpact> {
if ray == vec3(0.0, 0.0, 0.0) {
return None;
}
// Go along path of ray and find all points
// where one or more coordinates are integers.
// Any position with an integer component
// is a block boundary, which means a block
// could be found at the position.
//
// This algorithm is based on "A Fast Voxel Traversal Algorithm for Ray Tracing"
// by John Amanatides and Andrew Woo and has been adapted
// to our purposes.
let direction = ray.normalize();
let mut dist_traveled = glm::vec3(0.0f64, 0.0, 0.0);
let mut step = glm::vec3(0, 0, 0);
let mut delta = glm::vec3(INFINITY, INFINITY, INFINITY);
let mut next = glm::vec3(INFINITY, INFINITY, INFINITY);
// TODO this implementation does not properly
// handle when a ray hits multiple faces.
// In practice, this should not be an issue,
// but it may causes subtle issues in the future.
let mut face = Side::NONE;
match direction.x.partial_cmp(&0.0).unwrap() {
Ordering::Greater => {
step.x = 1;
delta.x = 1.0 / direction.x;
next.x = ((origin.x + 1.0).floor() - origin.x) / direction.x; // Brings X position to next integer
}
Ordering::Less => {
step.x = -1;
delta.x = (1.0 / direction.x).abs();
next.x = ((origin.x - (origin.x - 1.0).ceil()) / direction.x).abs();
}
_ => (),
}
match direction.y.partial_cmp(&0.0).unwrap() {
Ordering::Greater => {
step.y = 1;
delta.y = 1.0 / direction.y;
next.y = ((origin.y + 1.0).floor() - origin.y) / direction.y;
}
Ordering::Less => {
step.y = -1;
delta.y = (1.0 / direction.y).abs();
next.y = ((origin.y - (origin.y - 1.0).ceil()) / direction.y).abs();
}
_ => (),
}
match direction.z.partial_cmp(&0.0).unwrap() {
Ordering::Greater => {
step.z = 1;
delta.z = 1.0 / direction.z;
next.z = ((origin.z + 1.0).floor() - origin.z) / direction.z;
}
Ordering::Less => {
step.z = -1;
delta.z = (1.0 / direction.z).abs();
next.z = ((origin.z - (origin.z - 1.0).ceil()) / direction.z).abs();
}
_ => (),
}
let mut current_pos = Position::from(origin).block_pos();
while dist_traveled.magnitude_squared() < max_distance_squared {
if let Some(block) = chunk_map.block_at(current_pos) {
if block.is_solid() {
// Calculate world-space position of
// impact using `ncollide`.
let ray = Ray::new(Point3::from(origin), direction);
let shape = block_shape(&block);
let isometry = block_isometry(current_pos);
let impact = match shape.toi_and_normal_with_ray(&isometry, &ray, true) {
Some(toi) => toi,
None => continue,
};
let pos = Position::from(origin + impact.toi * direction);
return Some(RayImpact {
block: current_pos,
pos,
face,
});
}
} else {
// Traveled outside loaded chunks - no blocks found
return None;
}
if next.x < next.y {
if next.x < next.z {
next.x += delta.x;
current_pos.x += step.x;
dist_traveled.x += 1.0;
face = if step.x == 1 { Side::WEST } else { Side::EAST }
} else {
next.z += delta.z;
current_pos.z += step.z;
dist_traveled.z += 1.0;
face = if step.z == 1 {
Side::SOUTH
} else {
Side::NORTH
}
}
} else if next.y < next.z {
next.y += delta.y;
current_pos.y += step.y;
dist_traveled.y += 1.0;
face = if step.y == 1 { Side::BOTTOM } else { Side::TOP }
} else {
next.z += delta.z;
current_pos.z += step.z;
dist_traveled.z += 1.0;
face = if step.z == 1 {
Side::SOUTH
} else {
Side::NORTH
}
}
}
None
}
/// Returns all entities within the given distance of the given
/// position.
///
/// # Panics
/// Panics if either coordinate of the radius is negative.
pub fn nearby_entities<S>(
chunk_entities: &ChunkEntities,
positions: &S,
pos: Position,
radius: DVec3,
) -> SmallVec<[Entity; 4]>
where
S: GenericReadStorage<Component = PositionComponent>,
{
assert!(radius.x >= 0.0);
assert!(radius.y >= 0.0);
assert!(radius.z >= 0.0);
let mut result = smallvec![];
for chunk in chunks_within_distance(pos, radius) {
let entities = chunk_entities.entities_in_chunk(chunk);
entities
.iter()
.copied()
.filter(|e| {
let epos = positions.get(*e);
if let Some(epos) = epos {
let epos = epos.current;
(epos.x - pos.x).abs() <= radius.x
&& (epos.y - pos.y).abs() <= radius.y
&& (epos.z - pos.z).abs() <= radius.z
} else {
false
}
})
.for_each(|e| result.push(e));
}
result
}
/// The offsets which need to be applied to a position
/// to prevent it from intersecting with a block.
#[derive(Debug, Clone)]
pub struct BlockIntersect {
offset: DVec3,
x: bool,
y: bool,
z: bool,
}
impl BlockIntersect {
/// Applies this offset to the given position.
pub fn apply_to(&self, pos: &mut Position) {
pos.x += self.offset.x;
pos.y += self.offset.y;
pos.z += self.offset.z;
}
/// Returns whether the X axis is affected.
pub fn x_affected(&self) -> bool {
self.x
}
/// Returns whether the Y axis is affected.
pub fn y_affected(&self) -> bool {
self.y
}
/// Returns whether the Z axis is affected.
pub fn z_affected(&self) -> bool {
self.z
}
}
/// Returns a struct containing position offsets which
/// must be applied to prevent blocks from intersecting
/// the bounding box. Call `BlockIntersect::apply` to
/// apply the offsets to a position.
///
/// `prev` should be the entity's position on the previous
/// tick. This is used to calculate impact points.
///
/// # Restrictions
/// Currently, bounding boxes with side lengths greater
/// than 1 are not supported. If the bounding box's size
/// is more than 1, this function will panic.
pub fn blocks_intersecting_bbox(
chunk_map: &ChunkMap,
mut from: Position,
mut dest: Position,
bbox: &AABB<f64>,
) -> BlockIntersect {
let bbox_size = bbox.size() / 2.0;
// Center along Y axis of bounding box is at bottom, not center.
// This is a quick fix to get around this.
from.y += bbox_size.y;
dest.y += bbox_size.y;
assert!(bbox_size.x <= 1.0);
assert!(bbox_size.y <= 1.0);
assert!(bbox_size.z <= 1.0);
let mut result = BlockIntersect {
offset: vec3(0.0, 0.0, 0.0),
x: false,
y: false,
z: false,
};
// Vector of axis and signs to pass to `adjacent_to_bbox()`.
let axis = [(1, 1), (1, -1), (0, 1), (0, -1), (2, 1), (2, -1)];
// Compute a vector of compound shapes and axis normals representing adjacent blocks.
let mut blocks: SmallVec<[Compound<f64>; 4]> = smallvec![];
// Don't check the same block twice.
let mut checked = heapless::FnvIndexSet::new();
for (axis, sign) in &axis {
let compound = adjacent_to_bbox(*axis, *sign, bbox, dest, &chunk_map, &mut checked);
blocks.push(compound);
}
// Go through blocks and check for time of impact from original
// position to the block. If the time of impact is <= 1, the entity
// has collided with the block; update the position accordingly.
let velocity = (dest - from).as_vec();
let bbox_shape = bbox_to_cuboid(&bbox);
for compound in blocks {
let toi = match query::time_of_impact(
&Isometry3::translation(0.0, 0.0, 0.0),
&vec3(0.0, 0.0, 0.0),
&compound,
&Isometry3::new(from.as_vec(), vec3(0.0, 0.0, 0.0)),
&velocity,
&bbox_shape,
1.0,
0.0,
) {
Some(toi) => toi,
None => continue, // No impact
};
let world_pos = from + velocity * toi.toi;
let absolute_offset = world_pos - dest;
let normal = {
let x_diff = absolute_offset.x.abs();
let y_diff = absolute_offset.y.abs();
let z_diff = absolute_offset.z.abs();
if x_diff > y_diff && x_diff > z_diff {
vec3(1.0, 0.0, 0.0)
} else if y_diff > x_diff && y_diff > z_diff {
vec3(0.0, 1.0, 0.0)
} else {
vec3(0.0, 0.0, 1.0)
}
};
result.offset += absolute_offset.as_vec().component_mul(&normal);
if normal.x != 0.0 {
result.x = true;
}
if normal.y != 0.0 {
result.y = true;
}
if normal.z != 0.0 {
result.z = true;
}
}
result
}
/// Returns a `Compound` representing up to four blocks
/// adjacent to a bounding box along the provided axis.
///
/// Any block positions in `checked` will not be added to the compound.
/// `checked` will also be updated to account for any added blocks.
///
/// `axis` must be one of the following:
/// * `0` for the X axis;
/// * `1` for the Y axis; or
/// * `2` for the Z axis.
///
/// `sign` must be either -1 or 1.
pub fn adjacent_to_bbox(
axis: usize,
sign: i32,
bbox: &AABB<f64>,
pos: Position,
chunk_map: &ChunkMap,
checked: &mut heapless::FnvIndexSet<BlockPosition, U32>,
) -> Compound<f64> {
assert!(axis <= 2);
assert!(sign == -1 || sign == 1);
let sign = f64::from(sign);
let size = bbox.size() / 2.0;
let mut blocks: SmallVec<[(BlockPosition, Block); 4]> = smallvec![];
let other_axis1 = match axis {
0 => 1,
1 => 2,
2 => 0,
_ => unreachable!(),
};
let other_axis2 = match axis {
0 => 2,
1 => 0,
2 => 1,
_ => unreachable!(),
};
let offsets = {
let mut offsets = [vec3(0.0, 0.0, 0.0); 4];
// Offset for upper right corner, upper left, bottom right, and bottom left.
// Upper right
offsets[0][axis] = size[axis] * sign;
offsets[0][other_axis1] = size[other_axis1];
offsets[0][other_axis2] = size[other_axis2];
// Upper left
offsets[1][axis] = size[axis] * sign;
offsets[1][other_axis1] = size[other_axis1] * -1.0;
offsets[1][other_axis2] = size[other_axis2];
// Bottom right
offsets[2][axis] = size[axis] * sign;
offsets[2][other_axis1] = size[other_axis1];
offsets[2][other_axis2] = size[other_axis2] * -1.0;
// Bottom left
offsets[3][axis] = size[axis] * sign;
offsets[3][other_axis1] = size[other_axis1] * -1.0;
offsets[3][other_axis2] = size[other_axis2] * -1.0;
offsets
};
// Go through offsets and append block position if the block is solid.
for offset in &offsets {
let block_pos = (pos + *offset).block_pos();
if checked.contains(&block_pos) {
continue;
}
match chunk_map.block_at(block_pos) {
Some(block) => {
if block.is_solid() {
checked.insert(block_pos).unwrap();
blocks.push((block_pos, block));
}
}
None => continue,
}
}
let mut shapes = Vec::with_capacity(4);
for (block_pos, block) in &blocks {
let isometry = block_isometry(*block_pos);
let shape = block_shape(&block);
shapes.push((isometry, ShapeHandle::new(shape)));
}
Compound::new(shapes)
}
/// Returns an `ncollide` `Cuboid` corresponding to the given block.
pub fn block_shape(block: &Block) -> Cuboid<f64> {
let bbox = bbox_for_block(block);
Cuboid::new(bbox.half_extents())
}
/// Returns an `Isometry` representing a block's translation.
pub fn block_isometry(pos: BlockPosition) -> Isometry3<f64> {
Isometry3::new(
vec3(
f64::from(pos.x) + 0.5,
f64::from(pos.y) + 0.5,
f64::from(pos.z) + 0.5,
),
vec3(0.0, 0.0, 0.0),
)
}
/// Finds all chunks within a given distance (in blocks)
/// of a position.
///
/// The Y coordinate of `distance` is ignored.
pub fn chunks_within_distance(
mut pos: Position,
mut distance: DVec3,
) -> SmallVec<[ChunkPosition; 9]> {
assert!(distance.x >= 0.0);
assert!(distance.z >= 0.0);
let mut result = smallvec![];
let mut x_len = 0;
let mut z_len = 0;
let center_chunk_pos = pos.chunk_pos();
loop {
let needed = ((pos.x + 16.0) / 16.0).floor() * 16.0 - pos.x;
if needed > distance.x {
break;
}
distance.x -= needed;
pos.x += needed;
x_len += 1;
}
loop {
let needed = ((pos.z + 16.0) / 16.0).floor() * 16.0 - pos.z;
if needed > distance.z {
break;
}
distance.z -= needed;
pos.z += needed;
z_len += 1;
}
for x in -x_len..=x_len {
for z in -z_len..=z_len {
result.push(ChunkPosition::new(
x + center_chunk_pos.x,
z + center_chunk_pos.z,
));
}
}
result
}
/// Returns a point at the "front" of the bounding
/// box when it is traveling in the given direction.
///
/// The direction vector is expected to be normalized.
pub fn bbox_front(bbox: &AABB<f64>, direction: Vec3) -> Position {
let direction = DVec3::new(
f64::from(direction.x),
f64::from(direction.y),
f64::from(direction.z),
);
let cuboid = bbox_to_cuboid(bbox);
let origin = Point3::from([0.0, 0.0, 0.0]);
let ray = Ray::new(origin, direction);
let toi = cuboid
.toi_with_ray(
&Isometry3::new(vec3(0.0, 0.0, 0.0), vec3(0.0, 0.0, 0.0)),
&ray,
false,
)
.unwrap();
Position::from(direction * toi)
}
/// Converts an axis-aligned bounding box to a cuboid shape.
pub fn bbox_to_cuboid(bbox: &AABB<f64>) -> Cuboid<f64> {
let lengths = bbox.maxs() - bbox.mins();
let half_lengths = vec3(lengths.x / 2.0, lengths.y / 2.0, lengths.z / 2.0);
Cuboid::new(half_lengths)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::entity::test;
use crate::testframework as t;
use feather_core::world::chunk::Chunk;
use feather_core::world::ChunkPosition;
use feather_core::Block;
use specs::{Builder, WorldExt};
use std::collections::HashSet;
#[test]
fn test_block_impacted_by_ray() {
let mut map = chunk_map();
assert_eq!(
block_impacted_by_ray(&map, vec3(0.0, 65.0, 0.0), vec3(0.0, -1.0, 0.0), 5.0),
Some(RayImpact {
block: BlockPosition::new(0, 64, 0),
pos: position!(0.0, 65.0, 0.0),
face: Side::TOP,
})
);
assert_eq!(
block_impacted_by_ray(&map, vec3(0.0, 65.0, 0.0), vec3(0.0, 1.0, 0.0), 256.0),
None
);
assert_eq!(
block_impacted_by_ray(&map, vec3(0.0, 70.0, 0.0), vec3(0.0, -1.0, 0.0), 5.0),
None
);
map.set_block_at(BlockPosition::new(1, 65, 1), Block::Stone)
.unwrap();
assert_eq!(
block_impacted_by_ray(&map, vec3(0.0, 66.0, 0.0), vec3(1.0, -1.0, 1.0), 5.0),
Some(RayImpact {
block: BlockPosition::new(1, 65, 1),
pos: position!(1.0, 65.0, 1.0),
face: Side::WEST, // This should be three faces—see the TODO above
})
);
}
fn chunk_map() -> ChunkMap {
let mut map = ChunkMap::new();
for x in -2..=2 {
for z in -2..=2 {
let pos = ChunkPosition::new(x, z);
let mut chunk = Chunk::new(pos);
for x in 0..16 {
for y in 0..=64 {
for z in 0..16 {
chunk.set_block_at(x, y, z, Block::Stone);
}
}
}
map.set_chunk_at(pos, chunk);
}
}
map
}
#[test]
fn test_nearby_entities() {
let (mut w, mut d) = t::init_world();
t::populate_with_air(&mut w); // Prevents entities from getting despawned for being outside loaded chunks
let e1 = test::create(&mut w, position!(0.0, 0.0, 0.0)).build();
let e2 = test::create(&mut w, position!(-100.0, 0.0, 50.0)).build();
let e3 = test::create(&mut w, position!(100.0, 50.0, 50.0)).build();
let e4 = test::create(&mut w, position!(100.0, 1.0, -50.0)).build();
d.dispatch(&w);
w.maintain();
let entities = nearby_entities(
&w.fetch(),
&w.read_component(),
position!(0.0, 0.0, 0.0),
vec3(100.0, 1.0, 50.0),
)
.into_iter()
.collect::<HashSet<_>>();
assert_eq!(entities.len(), 3);
assert!(entities.contains(&e1));
assert!(entities.contains(&e2));
assert!(!entities.contains(&e3));
assert!(entities.contains(&e4));
}
#[test]
fn test_chunks_within_distance_basic() {
let pos = position!(0.0, 0.0, 0.0);
let distance = vec3(16.0, 0.0, 16.0);
let chunks = chunks_within_distance(pos, distance);
dbg!(chunks.clone());
let set = chunks.into_iter().collect::<HashSet<_>>();
for x in -1..=1 {
for z in -1..=1 {
assert!(set.contains(&ChunkPosition::new(x, z)));
}
}
assert_eq!(set.len(), 9);
}
#[test]
fn test_chunks_within_distance_complex() {
let pos = position!(32.0, 0.0, -32.0);
let distance = vec3(32.0, 0.0, 31.0);
let chunks = chunks_within_distance(pos, distance);
dbg!(chunks.clone());
assert_eq!(chunks.len(), 15);
let set = chunks.into_iter().collect::<HashSet<_>>();
for x in 0..=4 {
for z in -3..=-1 {
assert!(set.contains(&ChunkPosition::new(x, z)));
}
}
}
#[test]
#[should_panic]
fn test_chunks_within_distance_negative_distance() {
let pos = position!(16.0, 0.0, 16.0);
let distance = vec3(-0.1, -50.0, 0.0);
chunks_within_distance(pos, distance);
}
#[test]
fn test_bbox_front() {
let bbox = AABB::new(Point3::from([0.0, 0.0, 0.0]), Point3::from([1.0, 2.0, 3.0]));
let direction = vec3(1.0, 0.0, 0.0);
assert_eq!(bbox_front(&bbox, direction), position!(0.5, 0.0, 0.0),);
}
#[test]
fn test_bbox_to_cuboid() {
let bbox = AABB::new(Point3::from([0.0, 0.0, 0.0]), Point3::from([1.0, 2.0, 3.0]));
let half_extents = *bbox_to_cuboid(&bbox).half_extents();
assert_float_eq!(half_extents.x, 0.5);
assert_float_eq!(half_extents.y, 1.0);
assert_float_eq!(half_extents.z, 1.5);
}
#[test]
fn test_blocks_intersecting_bbox() {
let chunk_map = chunk_map();
let froms = [
position!(0.0, 66.0, 0.0),
position!(100.0, 65.0, 0.0),
position!(0.0, 100.0, 0.0),
];
let dests = [
position!(0.0, 65.0, 0.0),
position!(100.0, 65.0, 0.0),
position!(0.0, 90.0, 0.0),
];
let results = [
position!(0.0, 65.0, 0.0),
position!(100.0, 65.0, 0.0),
position!(0.0, 90.0, 0.0),
];
let bbox = crate::physics::component::bbox(0.25, 0.25, 0.25);
for ((from, dest), result) in froms.iter().zip(&dests).zip(&results) {
let intersect = blocks_intersecting_bbox(&chunk_map, *from, *dest, &bbox);
let mut pos = *dest;
intersect.apply_to(&mut pos);
assert_pos_eq!(pos, result);
}
}
#[test]
fn test_adjacent_to_bbox() {
let chunk_map = chunk_map();
let bbox = crate::physics::component::bbox(0.25, 0.25, 0.25);
let pos = position!(0.0, 65.0, 0.0);
let axis = 1;
let sign = -1;
let mut checked = heapless::FnvIndexSet::new();
let _ = adjacent_to_bbox(axis, sign, &bbox, pos, &chunk_map, &mut checked);
assert!(checked.contains(&BlockPosition::new(0, 64, 0)));
}
}
| 28.63756 | 113 | 0.527881 |
e2e6c2063ad26926695b2d4e6afa9d48a1b0a6a9 | 3,312 | use core::str::Chars;
use std::collections::VecDeque;
use crate::lexer::rules::MatchResult;
// Helper struct to match an exact string
#[derive(Clone, Copy)]
pub enum MatchCase {
Sensitive,
AsciiInsensitive,
// Insensitive, // too complicated
}
#[derive(Clone)]
pub struct StrMatcher<'a> {
target: &'a str,
match_case: MatchCase,
chars: Chars<'a>,
peek: VecDeque<Option<char>>,
last_result: MatchResult,
count: usize, // track how far have we advanced through the target
}
impl<'a> StrMatcher<'a> {
pub fn new(target: &'a str, match_case: MatchCase) -> Self {
StrMatcher {
target,
match_case,
chars: target.chars(),
peek: VecDeque::new(),
last_result: MatchResult::IncompleteMatch,
count: 0,
}
}
pub fn case_sensitive(target: &'a str) -> Self { StrMatcher::new(target, MatchCase::Sensitive) }
pub fn ascii_case_insensitive(target: &'a str) -> Self { StrMatcher::new(target, MatchCase::AsciiInsensitive) }
pub fn target(&self) -> &'a str { self.target }
pub fn match_case(&self) -> MatchCase { self.match_case }
pub fn last_match_result(&self) -> MatchResult { self.last_result }
pub fn count(&self) -> usize { self.count }
pub fn reset(&mut self) {
self.last_result = MatchResult::IncompleteMatch;
self.chars = self.target.chars();
self.peek.clear();
self.count = 0;
}
pub fn reset_target(&mut self, target: &'a str) {
self.target = target;
self.reset();
}
fn peek_nth(&mut self, n: usize) -> Option<char> {
while self.peek.len() < n + 1 {
self.peek.push_back(self.chars.next());
}
self.peek[n]
}
fn advance(&mut self) -> Option<char> {
self.count += 1;
match self.peek.pop_front() {
Some(o) => o,
None => self.chars.next()
}
}
fn compare_chars(&self, a: &char, b: &char) -> bool {
match self.match_case {
MatchCase::Sensitive => a == b,
MatchCase::AsciiInsensitive => a.eq_ignore_ascii_case(b),
}
}
pub fn peek_match(&mut self, next: char) -> MatchResult {
// if the match already failed, don't bother looking at any further input
if !self.last_result.is_match() {
return MatchResult::NoMatch;
}
match self.peek_nth(0) {
Some(this_ch) if self.compare_chars(&this_ch, &next) => {
if self.peek_nth(1).is_none() {
MatchResult::CompleteMatch
} else {
MatchResult::IncompleteMatch
}
},
_ => MatchResult::NoMatch,
}
}
pub fn update_match(&mut self, next: char) -> MatchResult {
self.last_result = self.peek_match(next);
self.advance();
self.last_result
}
pub fn try_match(&mut self, next: char) -> MatchResult {
let match_result = self.peek_match(next);
if match_result.is_match() {
self.last_result = match_result;
self.advance();
}
match_result
}
} | 28.067797 | 115 | 0.547403 |
3327612988e87709c1bce9faf01bf48999132b15 | 9,366 | // Copyright 2019 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! Tests for phragmen.
#![cfg(test)]
use crate::mock::*;
use crate::{elect, PhragmenResult};
use sp_runtime::Perbill;
use substrate_test_utils::assert_eq_uvec;
#[test]
fn float_phragmen_poc_works() {
let candidates = vec![1, 2, 3];
let voters = vec![(10, vec![1, 2]), (20, vec![1, 3]), (30, vec![2, 3])];
let stake_of = create_stake_of(&[(10, 10), (20, 20), (30, 30), (1, 0), (2, 0), (3, 0)]);
let mut phragmen_result = elect_float(2, 2, candidates, voters, &stake_of).unwrap();
let winners = phragmen_result.clone().winners;
let assignments = phragmen_result.clone().assignments;
assert_eq_uvec!(winners, vec![(2, 40), (3, 50)]);
assert_eq_uvec!(
assignments,
vec![
(10, vec![(2, 1.0)]),
(20, vec![(3, 1.0)]),
(30, vec![(2, 0.5), (3, 0.5)]),
]
);
let mut support_map = build_support_map(&mut phragmen_result, &stake_of);
assert_eq!(
support_map.get(&2).unwrap(),
&_Support {
own: 0.0,
total: 25.0,
others: vec![(10u64, 10.0), (30u64, 15.0)]
}
);
assert_eq!(
support_map.get(&3).unwrap(),
&_Support {
own: 0.0,
total: 35.0,
others: vec![(20u64, 20.0), (30u64, 15.0)]
}
);
equalize_float(phragmen_result.assignments, &mut support_map, 0.0, 2, stake_of);
assert_eq!(
support_map.get(&2).unwrap(),
&_Support {
own: 0.0,
total: 30.0,
others: vec![(10u64, 10.0), (30u64, 20.0)]
}
);
assert_eq!(
support_map.get(&3).unwrap(),
&_Support {
own: 0.0,
total: 30.0,
others: vec![(20u64, 20.0), (30u64, 10.0)]
}
);
}
#[test]
fn phragmen_poc_works() {
let candidates = vec![1, 2, 3];
let voters = vec![(10, vec![1, 2]), (20, vec![1, 3]), (30, vec![2, 3])];
let PhragmenResult { winners, assignments } = elect::<_, _, _, TestCurrencyToVote>(
2,
2,
candidates,
voters,
create_stake_of(&[(10, 10), (20, 20), (30, 30)]),
)
.unwrap();
assert_eq_uvec!(winners, vec![(2, 40), (3, 50)]);
assert_eq_uvec!(
assignments,
vec![
(10, vec![(2, Perbill::from_percent(100))]),
(20, vec![(3, Perbill::from_percent(100))]),
(
30,
vec![(2, Perbill::from_percent(100 / 2)), (3, Perbill::from_percent(100 / 2))]
),
]
);
}
#[test]
fn phragmen_poc_2_works() {
let candidates = vec![10, 20, 30];
let voters = vec![(2, vec![10, 20, 30]), (4, vec![10, 20, 40])];
let stake_of = create_stake_of(&[(10, 1000), (20, 1000), (30, 1000), (40, 1000), (2, 500), (4, 500)]);
run_and_compare(candidates, voters, stake_of, 2, 2);
}
#[test]
fn phragmen_poc_3_works() {
let candidates = vec![10, 20, 30];
let voters = vec![(2, vec![10, 20, 30]), (4, vec![10, 20, 40])];
let stake_of = create_stake_of(&[(10, 1000), (20, 1000), (30, 1000), (2, 50), (4, 1000)]);
run_and_compare(candidates, voters, stake_of, 2, 2);
}
#[test]
fn phragmen_accuracy_on_large_scale_only_validators() {
// because of this particular situation we had per_u128 and now rational128. In practice, a
// candidate can have the maximum amount of tokens, and also supported by the maximum.
let candidates = vec![1, 2, 3, 4, 5];
let stake_of = create_stake_of(&[
(1, (u64::max_value() - 1).into()),
(2, (u64::max_value() - 4).into()),
(3, (u64::max_value() - 5).into()),
(4, (u64::max_value() - 3).into()),
(5, (u64::max_value() - 2).into()),
]);
let PhragmenResult { winners, assignments } = elect::<_, _, _, TestCurrencyToVote>(
2,
2,
candidates.clone(),
auto_generate_self_voters(&candidates),
stake_of,
)
.unwrap();
assert_eq_uvec!(
winners,
vec![(1, 18446744073709551614u128), (5, 18446744073709551613u128)]
);
assert_eq!(assignments.len(), 2);
check_assignments(assignments);
}
#[test]
fn phragmen_accuracy_on_large_scale_validators_and_nominators() {
let candidates = vec![1, 2, 3, 4, 5];
let mut voters = vec![(13, vec![1, 3, 5]), (14, vec![2, 4])];
voters.extend(auto_generate_self_voters(&candidates));
let stake_of = create_stake_of(&[
(1, (u64::max_value() - 1).into()),
(2, (u64::max_value() - 4).into()),
(3, (u64::max_value() - 5).into()),
(4, (u64::max_value() - 3).into()),
(5, (u64::max_value() - 2).into()),
(13, (u64::max_value() - 10).into()),
(14, u64::max_value().into()),
]);
let PhragmenResult { winners, assignments } =
elect::<_, _, _, TestCurrencyToVote>(2, 2, candidates, voters, stake_of).unwrap();
assert_eq_uvec!(
winners,
vec![(2, 36893488147419103226u128), (1, 36893488147419103219u128)]
);
assert_eq!(
assignments,
vec![
(13, vec![(1, Perbill::one())]),
(14, vec![(2, Perbill::one())]),
(1, vec![(1, Perbill::one())]),
(2, vec![(2, Perbill::one())]),
]
);
check_assignments(assignments);
}
#[test]
fn phragmen_accuracy_on_small_scale_self_vote() {
let candidates = vec![40, 10, 20, 30];
let voters = auto_generate_self_voters(&candidates);
let stake_of = create_stake_of(&[(40, 0), (10, 1), (20, 2), (30, 1)]);
let PhragmenResult {
winners,
assignments: _,
} = elect::<_, _, _, TestCurrencyToVote>(3, 3, candidates, voters, stake_of).unwrap();
assert_eq_uvec!(winners, vec![(20, 2), (10, 1), (30, 1)]);
}
#[test]
fn phragmen_accuracy_on_small_scale_no_self_vote() {
let candidates = vec![40, 10, 20, 30];
let voters = vec![(1, vec![10]), (2, vec![20]), (3, vec![30]), (4, vec![40])];
let stake_of = create_stake_of(&[
(40, 1000), // don't care
(10, 1000), // don't care
(20, 1000), // don't care
(30, 1000), // don't care
(4, 0),
(1, 1),
(2, 2),
(3, 1),
]);
let PhragmenResult {
winners,
assignments: _,
} = elect::<_, _, _, TestCurrencyToVote>(3, 3, candidates, voters, stake_of).unwrap();
assert_eq_uvec!(winners, vec![(20, 2), (10, 1), (30, 1)]);
}
#[test]
fn phragmen_large_scale_test() {
let candidates = vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24];
let mut voters = vec![(50, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24])];
voters.extend(auto_generate_self_voters(&candidates));
let stake_of = create_stake_of(&[
(2, 1),
(4, 100),
(6, 1000000),
(8, 100000000001000),
(10, 100000000002000),
(12, 100000000003000),
(14, 400000000000000),
(16, 400000000001000),
(18, 18000000000000000),
(20, 20000000000000000),
(22, 500000000000100000),
(24, 500000000000200000),
(50, 990000000000000000),
]);
let PhragmenResult { winners, assignments } =
elect::<_, _, _, TestCurrencyToVote>(2, 2, candidates, voters, stake_of).unwrap();
assert_eq_uvec!(
winners,
vec![(24, 1490000000000200000u128), (22, 1490000000000100000u128)]
);
check_assignments(assignments);
}
#[test]
fn phragmen_large_scale_test_2() {
let nom_budget: u64 = 1_000_000_000_000_000_000;
let c_budget: u64 = 4_000_000;
let candidates = vec![2, 4];
let mut voters = vec![(50, vec![2, 4])];
voters.extend(auto_generate_self_voters(&candidates));
let stake_of = create_stake_of(&[(2, c_budget.into()), (4, c_budget.into()), (50, nom_budget.into())]);
let PhragmenResult { winners, assignments } =
elect::<_, _, _, TestCurrencyToVote>(2, 2, candidates, voters, stake_of).unwrap();
assert_eq_uvec!(
winners,
vec![(2, 1000000000004000000u128), (4, 1000000000004000000u128)]
);
assert_eq!(
assignments,
vec![
(
50,
vec![(2, Perbill::from_parts(500000001)), (4, Perbill::from_parts(499999999))]
),
(2, vec![(2, Perbill::one())]),
(4, vec![(4, Perbill::one())]),
],
);
check_assignments(assignments);
}
#[test]
fn phragmen_linear_equalize() {
let candidates = vec![11, 21, 31, 41, 51, 61, 71];
let voters = vec![
(2, vec![11]),
(4, vec![11, 21]),
(6, vec![21, 31]),
(8, vec![31, 41]),
(110, vec![41, 51]),
(120, vec![51, 61]),
(130, vec![61, 71]),
];
let stake_of = create_stake_of(&[
(11, 1000),
(21, 1000),
(31, 1000),
(41, 1000),
(51, 1000),
(61, 1000),
(71, 1000),
(2, 2000),
(4, 1000),
(6, 1000),
(8, 1000),
(110, 1000),
(120, 1000),
(130, 1000),
]);
run_and_compare(candidates, voters, stake_of, 2, 2);
}
#[test]
fn elect_has_no_entry_barrier() {
let candidates = vec![10, 20, 30];
let voters = vec![(1, vec![10]), (2, vec![20])];
let stake_of = create_stake_of(&[(1, 10), (2, 10)]);
let PhragmenResult {
winners,
assignments: _,
} = elect::<_, _, _, TestCurrencyToVote>(3, 3, candidates, voters, stake_of).unwrap();
// 30 is elected with stake 0. The caller is responsible for stripping this.
assert_eq_uvec!(winners, vec![(10, 10), (20, 10), (30, 0),]);
}
#[test]
fn minimum_to_elect_is_respected() {
let candidates = vec![10, 20, 30];
let voters = vec![(1, vec![10]), (2, vec![20])];
let stake_of = create_stake_of(&[(1, 10), (2, 10)]);
let maybe_result = elect::<_, _, _, TestCurrencyToVote>(10, 10, candidates, voters, stake_of);
assert!(maybe_result.is_none());
}
| 26.683761 | 104 | 0.625454 |
1c8b0cd3bfd5a342684a1351d6d407add43fbc83 | 24,113 | // Lumol, an extensible molecular simulation engine
// Copyright (C) Lumol's contributors — BSD license
use crate::{AnglePotential, BondPotential, DihedralPotential, PairPotential};
use crate::Potential;
use crate::math::erfc;
use std::f64::consts::PI;
/// No-op potential.
///
/// The `NullPotential` always returns 0 as energy and force. It should be used
/// to indicate that there is no potential interaction for a given set of
/// particles.
///
/// # Examples
///
/// ```
/// # use lumol_core::energy::Potential;
/// # use lumol_core::energy::NullPotential;
/// let potential = NullPotential;
/// assert_eq!(potential.energy(0.1), 0.0);
/// assert_eq!(potential.energy(100000.0), 0.0);
///
/// assert_eq!(potential.force(0.1), 0.0);
/// assert_eq!(potential.force(100000.0), 0.0);
/// ```
#[derive(Clone, Copy)]
pub struct NullPotential;
impl Potential for NullPotential {
fn energy(&self, _: f64) -> f64 {
0.0
}
fn force(&self, _: f64) -> f64 {
0.0
}
}
impl PairPotential for NullPotential {
fn tail_energy(&self, _: f64) -> f64 {
0.0
}
fn tail_virial(&self, _: f64) -> f64 {
0.0
}
}
impl BondPotential for NullPotential {}
impl AnglePotential for NullPotential {}
impl DihedralPotential for NullPotential {}
/// Lennard-Jones potential.
///
/// $$ V(r) = 4 * \epsilon * \left[ \left(\frac \sigma r \right)^{12} -
/// \left(\frac \sigma r \right)^6 \right] $$
///
/// where $\sigma$ is the Lennard-Jones distance constant, and $\epsilon$ the
/// energetic constant.
///
/// # Examples
///
/// ```
/// # use lumol_core::energy::Potential;
/// # use lumol_core::energy::LennardJones;
/// let potential = LennardJones { sigma: 2.0, epsilon: 10.0 };
/// assert_eq!(potential.energy(2.0), 0.0);
/// assert_eq!(potential.energy(3.0), -3.203365942785746);
///
/// assert_eq!(potential.force(2.0), 120.0);
/// ```
#[derive(Clone, Copy)]
pub struct LennardJones {
/// Distance constant of the Lennard-Jones potential
pub sigma: f64,
/// Energy constant of the Lennard-Jones potential
pub epsilon: f64,
}
impl Potential for LennardJones {
fn energy(&self, r: f64) -> f64 {
let s6 = f64::powi(self.sigma / r, 6);
4.0 * self.epsilon * (f64::powi(s6, 2) - s6)
}
fn force(&self, r: f64) -> f64 {
let s6 = f64::powi(self.sigma / r, 6);
-24.0 * self.epsilon * (s6 - 2.0 * f64::powi(s6, 2)) / r
}
}
impl PairPotential for LennardJones {
fn tail_energy(&self, cutoff: f64) -> f64 {
let s3 = self.sigma * self.sigma * self.sigma;
let rc3 = cutoff * cutoff * cutoff;
let s9 = s3 * s3 * s3;
let rc9 = rc3 * rc3 * rc3;
4.0 / 3.0 * self.epsilon * s3 * (1.0 / 3.0 * s9 / rc9 - s3 / rc3)
}
fn tail_virial(&self, cutoff: f64) -> f64 {
let s3 = self.sigma * self.sigma * self.sigma;
let rc3 = cutoff * cutoff * cutoff;
let s9 = s3 * s3 * s3;
let rc9 = rc3 * rc3 * rc3;
8.0 * self.epsilon * s3 * (2.0 / 3.0 * s9 / rc9 - s3 / rc3)
}
}
/// Harmonic potential.
///
/// $$ V(x) = \frac{1}{2} k (x - x_0)^2 $$
///
/// where $x_0$ is the distance equilibrium, and $k$ the elastic constant.
///
/// # Examples
///
/// ```
/// # use lumol_core::energy::Potential;
/// # use lumol_core::energy::Harmonic;
/// let potential = Harmonic { x0: 2.0, k: 100.0 };
/// assert_eq!(potential.energy(2.0), 0.0);
/// assert_eq!(potential.energy(3.0), 50.0);
///
/// assert_eq!(potential.force(2.0), 0.0);
/// assert_eq!(potential.force(1.5), 50.0);
/// ```
#[derive(Clone, Copy)]
pub struct Harmonic {
/// Spring constant
pub k: f64,
/// Equilibrium value
pub x0: f64,
}
impl Potential for Harmonic {
fn energy(&self, x: f64) -> f64 {
let dx = x - self.x0;
0.5 * self.k * dx * dx
}
fn force(&self, x: f64) -> f64 {
self.k * (self.x0 - x)
}
}
impl PairPotential for Harmonic {
// These two functions should return infinity, as the Harmonic potential
// does not goes to zero at infinity. We use 0 instead to ignore the tail
// contribution to the energy/virial.
fn tail_energy(&self, _: f64) -> f64 {
0.0
}
fn tail_virial(&self, _: f64) -> f64 {
0.0
}
}
impl BondPotential for Harmonic {}
impl AnglePotential for Harmonic {}
impl DihedralPotential for Harmonic {}
/// Cosine harmonic potential.
///
/// $$ V(x) = \frac{1}{2} k \left[\cos(x) - \cos(x_0) \right]^2 $$
///
/// where $x_0$ is the equilibrium value, and $k$ the elastic constant.
///
/// # Examples
///
/// ```
/// # use lumol_core::energy::Potential;
/// # use lumol_core::energy::CosineHarmonic;
/// let potential = CosineHarmonic::new(/*k*/ 100.0, /*x0*/ 2.0);
/// assert_eq!(potential.energy(2.0), 0.0);
/// assert_eq!(potential.energy(3.0), 16.464942078100552);
///
/// assert_eq!(potential.force(2.0), 0.0);
/// ```
#[derive(Clone, Copy)]
pub struct CosineHarmonic {
/// Spring constant
k: f64,
/// Cosine of the equilibrium value
cos_x0: f64,
}
impl CosineHarmonic {
/// Create a new `CosineHarmonic` potentials, with elastic constant of `k`
/// and equilibrium value of `x0`
pub fn new(k: f64, x0: f64) -> CosineHarmonic {
CosineHarmonic {
k: k,
cos_x0: f64::cos(x0),
}
}
}
impl Potential for CosineHarmonic {
fn energy(&self, x: f64) -> f64 {
let dr = f64::cos(x) - self.cos_x0;
0.5 * self.k * dr * dr
}
fn force(&self, x: f64) -> f64 {
self.k * (f64::cos(x) - self.cos_x0) * f64::sin(x)
}
}
impl AnglePotential for CosineHarmonic {}
impl DihedralPotential for CosineHarmonic {}
/// Torsion potential.
///
/// This potential is intended for use with dihedral angles, using a custom
/// periodicity and multiple minima.
///
/// $$ V(x) = k (1 + \cos(n x - \delta))$$
///
/// where $k$ is the force constant, $n$ the periodicity of the potential, and
/// $\delta$ the equilibrium angle.
///
/// # Examples
///
/// ```
/// # use lumol_core::energy::Potential;
/// # use lumol_core::energy::Torsion;
/// # use std::f64::consts::PI;
/// let potential = Torsion { delta: PI / 2.0, k: 10.0, n: 3 };
/// assert_eq!(potential.energy(PI / 2.0), 0.0);
/// assert_eq!(potential.energy(PI / 3.0), 10.0);
///
/// assert!(potential.force(PI / 2.0).abs() < 1e-12);
/// ```
#[derive(Clone, Copy)]
pub struct Torsion {
/// Force constant
pub k: f64,
/// Equilibrium value
pub delta: f64,
/// Multiplicity of the potential
pub n: usize,
}
impl Potential for Torsion {
fn energy(&self, phi: f64) -> f64 {
let n = self.n as f64;
let cos = f64::cos(n * phi - self.delta);
self.k * (1.0 + cos)
}
fn force(&self, phi: f64) -> f64 {
let n = self.n as f64;
let sin = f64::sin(n * phi - self.delta);
self.k * n * sin
}
}
impl DihedralPotential for Torsion {}
/// Buckingham potential.
///
/// $$ V(x) = A \exp \left(\frac{\sigma - r}{\rho} \right) - \frac{C}{r^6} $$
///
/// where $A$ and $C$ are energetic constants, and $\rho$ and $\sigma$ are
/// length parameters.
///
/// # Examples
///
/// ```
/// # use lumol_core::energy::Potential;
/// # use lumol_core::energy::Buckingham;
/// let potential = Buckingham { a: 2.0, c: 1.0, rho: 5.3 };
/// assert_eq!(potential.energy(2.2), 1.3117360696239022);
/// assert_eq!(potential.force(2.2), 0.2251072178835946);
/// ```
#[derive(Clone, Copy)]
pub struct Buckingham {
/// Exponential term energetic constant
pub a: f64,
/// `1/r^6` term energetic constant
pub c: f64,
/// Width of the exponential term length constant
pub rho: f64,
}
impl Potential for Buckingham {
fn energy(&self, r: f64) -> f64 {
let r3 = r * r * r;
let r6 = r3 * r3;
let exp = f64::exp(-r / self.rho);
self.a * exp - self.c / r6
}
fn force(&self, r: f64) -> f64 {
let r3 = r * r * r;
let r7 = r3 * r3 * r;
let exp = f64::exp(-r / self.rho);
self.a / self.rho * exp - 6.0 * self.c / r7
}
}
impl PairPotential for Buckingham {
fn tail_energy(&self, rc: f64) -> f64 {
let rc2 = rc * rc;
let rc3 = rc2 * rc;
let exp = f64::exp(-rc / self.rho);
let factor = rc2 - 2.0 * rc * self.rho + 2.0 * self.rho * self.rho;
self.a * self.rho * exp * factor - self.c / (3.0 * rc3)
}
fn tail_virial(&self, rc: f64) -> f64 {
let rc2 = rc * rc;
let rc3 = rc2 * rc;
let exp = f64::exp(-rc / self.rho);
let factor = rc3 + 3.0 * rc2 * self.rho + 6.0 * rc * self.rho * self.rho +
6.0 * self.rho * self.rho * self.rho;
self.a * exp * factor - 20.0 * self.c / rc3 + 8.0
}
}
/// Born-Mayer-Huggins potential.
///
/// $$ V(x) = A \exp \left(\frac{\sigma - r}{\rho} \right) - \frac{C}{r^6} +
/// \frac{D}{r^8} $$
///
/// where $A$, $C$ and $D$ are energetic constants; $\sigma$ and $\rho$ are
/// length parameters.
///
/// # Examples
///
/// ```
/// # use lumol_core::energy::Potential;
/// # use lumol_core::energy::BornMayerHuggins;
/// let potential = BornMayerHuggins { a: 2.0, c: 1.0, d: 0.5, sigma: 1.5, rho: 5.3 };
/// assert_eq!(potential.energy(2.2), 1.7446409593340713);
/// assert_eq!(potential.force(2.2), 0.30992873382584607);
/// ```
#[derive(Clone, Copy)]
pub struct BornMayerHuggins {
/// Exponential term energetic constant
pub a: f64,
/// `1/r^6` term energetic constant
pub c: f64,
/// `1/r^8` term energetic constant
pub d: f64,
/// Sphere diameter length constant
pub sigma: f64,
/// Width of the exponential term length constant
pub rho: f64,
}
impl Potential for BornMayerHuggins {
fn energy(&self, r: f64) -> f64 {
let r2 = r * r;
let r6 = r2 * r2 * r2;
let exp = f64::exp((self.sigma - r) / self.rho);
self.a * exp - self.c / r6 + self.d / (r6 * r2)
}
fn force(&self, r: f64) -> f64 {
let r2 = r * r;
let r7 = r2 * r2 * r2 * r;
let exp = f64::exp((self.sigma - r) / self.rho);
self.a / self.rho * exp - 6.0 * self.c / r7 + 8.0 * self.d / (r7 * r2)
}
}
impl PairPotential for BornMayerHuggins {
fn tail_energy(&self, rc: f64) -> f64 {
let rc2 = rc * rc;
let rc3 = rc2 * rc;
let exp = f64::exp((self.sigma - rc) / self.rho);
let factor = rc2 - 2.0 * rc * self.rho + 2.0 * self.rho * self.rho;
self.a * self.rho * exp * factor - self.c / (3.0 * rc3) + self.d / (5.0 * rc2 * rc3)
}
fn tail_virial(&self, rc: f64) -> f64 {
let rc2 = rc * rc;
let rc3 = rc2 * rc;
let exp = f64::exp((self.sigma - rc) / self.rho);
let factor = rc3 + 3.0 * rc2 * self.rho + 6.0 * rc * self.rho * self.rho +
6.0 * self.rho * self.rho * self.rho;
self.a * exp * factor - 20.0 * self.c / rc3 + 8.0 * self.d / (5.0 * rc2 * rc3)
}
}
/// Morse potential
///
/// $$ V(x) = \text{depth} * \left( 1 - \exp(a (x_0 - x))^2 \right)$$
///
/// where the parameters are $x_0$ for the equilibrium value, `depth` for the
/// well depth, and $a$ for the well width.
///
/// # Examples
///
/// ```
/// # use lumol_core::energy::Potential;
/// # use lumol_core::energy::Morse;
/// let potential = Morse { a: 2.0, x0: 1.3, depth: 4.0 };
/// assert_eq!(potential.energy(1.0), 2.703517287822119);
/// assert_eq!(potential.force(1.0), -37.12187076378477);
/// ```
#[derive(Clone, Copy)]
pub struct Morse {
/// Exponential term width value
pub a: f64,
/// Equilibrium value
pub x0: f64,
/// Well depth value
pub depth: f64,
}
impl Potential for Morse {
fn energy(&self, r: f64) -> f64 {
let rc = 1.0 - f64::exp((self.x0 - r) * self.a);
self.depth * rc * rc
}
fn force(&self, r: f64) -> f64 {
let exp = f64::exp((self.x0 - r) * self.a);
2.0 * self.depth * (1.0 - exp * exp) * self.a
}
}
impl PairPotential for Morse {
fn tail_energy(&self, _: f64) -> f64 {
0.0
}
fn tail_virial(&self, _: f64) -> f64 {
0.0
}
}
impl BondPotential for Morse {}
impl AnglePotential for Morse {}
impl DihedralPotential for Morse {}
/// Gaussian potential.
///
/// $$ V(x) = -a \exp(-b x^2) $$
///
/// where $a$ is the potential depth and $b$ is the potential width.
///
/// # Restrictions
///
/// $b$ has to be positive
///
/// # Examples
///
/// ```
/// # use lumol_core::energy::Potential;
/// # use lumol_core::energy::Gaussian;
/// let potential = Gaussian::new(8.0, 0.5);
/// assert_eq!(potential.energy(0.0), -8.0);
/// assert_eq!(potential.force(0.0), 0.0);
/// ```
#[derive(Clone, Copy)]
pub struct Gaussian {
/// Depth of the Gaussian potential
a: f64,
/// Width of the Gaussian potential
b: f64,
}
impl Gaussian {
/// Create a new `Gaussian` potential with a depth of `a` and a width of `b`
pub fn new(a: f64, b: f64) -> Gaussian {
if b <= 0.0 {
panic!("\"b\" has to be positive in Gaussian potential")
}
Gaussian { a: a, b: b }
}
}
impl Potential for Gaussian {
fn energy(&self, r: f64) -> f64 {
-self.a * f64::exp(-self.b * r * r)
}
fn force(&self, r: f64) -> f64 {
2.0 * self.b * r * self.energy(r)
}
}
impl PairPotential for Gaussian {
fn tail_energy(&self, rc: f64) -> f64 {
self.energy(rc) * rc / (2.0 * self.b) -
self.a * f64::sqrt(PI) * erfc(f64::sqrt(self.b) * rc) / (4.0 * self.b.powf(3.0 / 2.0))
}
fn tail_virial(&self, rc: f64) -> f64 {
3.0 * f64::sqrt(PI) * self.a * erfc(f64::sqrt(self.b) * rc) / (4.0 * self.b.powf(3.0 / 2.0)) -
self.energy(rc) * rc * (2.0 * self.b * rc * rc + 3.0) / (2.0 * self.b)
}
}
/// Mie potential.
///
/// This is a generalization of the Lennard-Jones potential with arbitrary
/// (floating point) exponents.
///
/// $$ V(r) = \epsilon \frac{n}{n - m} \frac{n}{m}^\frac{m}{n - m}
/// \left[\left(\frac \sigma r \right)^n - \left(\frac \sigma r \right)^m
/// \right] $$
///
/// where $\epsilon$ is an energetic constant, $\sigma$ is a distance constant,
/// and $n$, $m$ are the repulsive and attractive exponents, respectively.
///
/// # Restrictions
///
/// $n$ has to be larger than $m$
///
/// For $m$ smaller than 3.0, there is no analytic tail correction and the
/// energy and force contributions will be set to zero.
///
/// # Examples
///
/// ```
/// # use lumol_core::energy::Potential;
/// # use lumol_core::energy::Mie;
/// let potential = Mie::new(/*sigma*/ 2.0, /*epsilon*/ 10.0, /*n*/ 12.0, /*m*/ 6.0);
/// assert_eq!(potential.energy(2.0), 0.0);
/// assert!(f64::abs(potential.energy(3.0) + 3.203365942785746) < 1e-8);
///
/// assert_eq!(potential.force(2.0), 120.0);
/// ```
#[derive(Clone, Copy)]
pub struct Mie {
/// Distance constant
sigma: f64,
/// Exponent of repulsive contribution
n: f64,
/// Exponent of attractive contribution
m: f64,
/// Energetic prefactor computed from the exponents and epsilon
prefac: f64,
}
impl Mie {
/// Return Mie potential.
pub fn new(sigma: f64, epsilon: f64, n: f64, m: f64) -> Mie {
if m >= n {
panic!("The repulsive exponent n has to be larger than the attractive exponent m")
};
let prefac = n / (n - m) * (n / m).powf(m / (n - m)) * epsilon;
Mie {
sigma: sigma,
n: n,
m: m,
prefac: prefac,
}
}
}
impl Potential for Mie {
fn energy(&self, r: f64) -> f64 {
let sigma_r = self.sigma / r;
let repulsive = f64::powf(sigma_r, self.n);
let attractive = f64::powf(sigma_r, self.m);
self.prefac * (repulsive - attractive)
}
fn force(&self, r: f64) -> f64 {
let sigma_r = self.sigma / r;
let repulsive = f64::powf(sigma_r, self.n);
let attractive = f64::powf(sigma_r, self.m);
self.prefac * (self.n * repulsive - self.m * attractive) / r
}
}
impl PairPotential for Mie {
fn tail_energy(&self, cutoff: f64) -> f64 {
if self.m <= 3.0 {
return 0.0
};
let sigma_rc = self.sigma / cutoff;
let n_3 = self.n - 3.0;
let m_3 = self.m - 3.0;
let repulsive = f64::powf(sigma_rc, n_3);
let attractive = f64::powf(sigma_rc, m_3);
self.prefac * self.sigma.powi(3) * (repulsive / n_3 - attractive / m_3)
}
fn tail_virial(&self, cutoff: f64) -> f64 {
if self.m <= 3.0 {
return 0.0
};
let sigma_rc = self.sigma / cutoff;
let n_3 = self.n - 3.0;
let m_3 = self.m - 3.0;
let repulsive = f64::powf(sigma_rc, n_3);
let attractive = f64::powf(sigma_rc, m_3);
self.prefac * self.sigma.powi(3) * (repulsive * self.n / n_3 - attractive * self.m / m_3)
}
}
#[cfg(test)]
#[allow(clippy::unreadable_literal)]
mod tests {
use super::*;
use crate::{PairPotential, Potential};
use approx::{assert_ulps_eq, assert_relative_eq};
const EPS: f64 = 1e-9;
#[test]
fn null() {
let null = NullPotential;
assert_eq!(null.energy(2.0), 0.0);
assert_eq!(null.energy(2.5), 0.0);
assert_eq!(null.force(2.0), 0.0);
assert_eq!(null.force(2.5), 0.0);
assert_eq!(null.tail_energy(1.0), 0.0);
assert_eq!(null.tail_virial(1.0), 0.0);
let e0 = null.energy(2.0);
let e1 = null.energy(2.0 + EPS);
assert_ulps_eq!((e0 - e1) / EPS, null.force(2.0));
}
#[test]
fn lj() {
let lj = LennardJones {
epsilon: 0.8,
sigma: 2.0,
};
assert_eq!(lj.energy(2.0), 0.0);
assert_eq!(lj.energy(2.5), -0.6189584744448002);
assert_eq!(lj.tail_energy(1.0), 1388.0888888888887);
assert_eq!(lj.tail_energy(2.0), -5.688888888888889);
assert_eq!(lj.tail_energy(14.42), -0.022767318648783084);
assert_eq!(lj.tail_virial(1.0), 17066.666666666668);
assert_eq!(lj.tail_virial(2.0), -17.06666666666667);
assert_eq!(lj.tail_virial(14.42), -0.1366035877536718);
assert!(lj.force(f64::powf(2.0, 1.0 / 6.0) * 2.0).abs() < 1e-15);
assert_ulps_eq!(lj.force(2.5), -0.95773475733504);
let e0 = lj.energy(4.0);
let e1 = lj.energy(4.0 + EPS);
assert_relative_eq!((e0 - e1) / EPS, lj.force(4.0), epsilon = 1e-6);
}
#[test]
fn harmonic() {
let harmonic = Harmonic { k: 50.0, x0: 2.0 };
assert_eq!(harmonic.energy(2.0), 0.0);
assert_eq!(harmonic.energy(2.5), 6.25);
assert_eq!(harmonic.force(2.0), 0.0);
assert_eq!(harmonic.force(2.5), -25.0);
assert_eq!(harmonic.tail_energy(1.0), 0.0);
assert_eq!(harmonic.tail_virial(1.0), 0.0);
let e0 = harmonic.energy(2.1);
let e1 = harmonic.energy(2.1 + EPS);
assert_relative_eq!((e0 - e1) / EPS, harmonic.force(2.1), epsilon = 1e-6);
}
#[test]
fn cosine_harmonic() {
let harmonic = CosineHarmonic::new(50.0, 2.0);
assert_eq!(harmonic.energy(2.0), 0.0);
let dcos = f64::cos(2.5) - f64::cos(2.0);
assert_eq!(harmonic.energy(2.5), 0.5 * 50.0 * dcos * dcos);
assert_eq!(harmonic.force(2.0), 0.0);
let dcos = f64::cos(2.5) - f64::cos(2.0);
assert_eq!(harmonic.force(2.5), 50.0 * dcos * f64::sin(2.5));
let e0 = harmonic.energy(2.3);
let e1 = harmonic.energy(2.3 + EPS);
assert_relative_eq!((e0 - e1) / EPS, harmonic.force(2.3), epsilon = 1e-6);
}
#[test]
fn torsion() {
let torsion = Torsion {
k: 5.0,
n: 3,
delta: 3.0,
};
assert_eq!(torsion.energy(1.0), 10.0);
let energy = 5.0 * (1.0 + f64::cos(3.0 * 1.1 - 3.0));
assert_eq!(torsion.energy(1.1), energy);
assert_eq!(torsion.force(1.0), 0.0);
let e0 = torsion.energy(4.0);
let e1 = torsion.energy(4.0 + EPS);
assert_relative_eq!((e0 - e1) / EPS, torsion.force(4.0), epsilon = 1e-6);
}
#[test]
fn buckingham() {
let buckingham = Buckingham {
a: 2.0,
c: 1.0,
rho: 2.0,
};
// Comparing to externally computed values
assert_eq!(buckingham.energy(2.0), 0.7201338823428847);
assert_eq!(buckingham.force(2.0), 0.32100444117144233);
assert_eq!(buckingham.tail_energy(10.0), 1.8323882504179136);
assert_eq!(buckingham.tail_virial(10.0), 33.422487868546725);
let e0 = buckingham.energy(4.0);
let e1 = buckingham.energy(4.0 + EPS);
assert_relative_eq!((e0 - e1) / EPS, buckingham.force(4.0), epsilon = 1e-6);
}
#[test]
fn born() {
let born = BornMayerHuggins {
a: 2.0,
c: 1.0,
d: 0.5,
sigma: 2.0,
rho: 2.0,
};
// Comparing to externally computed values
assert_eq!(born.energy(2.0), 1.986328125);
assert_eq!(born.force(2.0), 0.9609375);
assert_eq!(born.tail_energy(10.0), 4.981521444402363);
assert_eq!(born.tail_virial(10.0), 69.13986044386026);
let e0 = born.energy(4.0);
let e1 = born.energy(4.0 + EPS);
assert_relative_eq!((e0 - e1) / EPS, born.force(4.0), epsilon = 1e-6);
}
#[test]
fn morse() {
let morse = Morse {
a: 2.0,
x0: 1.3,
depth: 4.0,
};
// Comparing to externally computed values
assert_eq!(morse.energy(1.0), 2.703517287822119);
assert_eq!(morse.force(1.0), -37.12187076378477);
assert_eq!(morse.tail_energy(1.0), 0.0);
assert_eq!(morse.tail_virial(1.0), 0.0);
let e0 = morse.energy(1.3);
let e1 = morse.energy(1.3 + EPS);
assert_relative_eq!((e0 - e1) / EPS, morse.force(1.3), epsilon = 1e-6);
}
#[test]
fn gaussian() {
let gaussian = Gaussian::new(8.0, 2.0);
assert_eq!(gaussian.energy(0.0), -8.0);
assert_eq!(gaussian.force(0.0), 0.0);
assert_relative_eq!(gaussian.tail_energy(2.5), -1.93518e-5, epsilon = 1e-10);
assert_relative_eq!(gaussian.tail_virial(2.5), 5.23887e-4, epsilon = 1e-10);
let e0 = gaussian.energy(0.5);
let e1 = gaussian.energy(0.5 + EPS);
assert_relative_eq!((e0 - e1) / EPS, gaussian.force(0.5), epsilon = 1e-6);
}
#[test]
#[should_panic(expected = "\"b\" has to be positive")]
fn test_gaussian_wrong_input() {
let gaussian = Gaussian::new(8.0, -2.0);
assert_eq!(gaussian.energy(0.0), -8.0);
}
#[test]
fn test_mie() {
let mie = Mie::new(2.0, 0.8, 12.0, 6.0);
assert_eq!(mie.energy(2.0), 0.0);
assert_eq!(mie.energy(2.5), -0.6189584744448002);
assert_relative_eq!(mie.tail_energy(1.0), 1388.0888889, epsilon = 1e-6);
assert_relative_eq!(mie.tail_energy(2.0), -5.688888888888889, epsilon = 1e-6);
assert_relative_eq!(mie.tail_energy(14.42), -0.022767318648783084, epsilon = 1e-6);
assert_relative_eq!(mie.tail_virial(1.0), 17066.666666666668, epsilon = 1e-6);
assert_relative_eq!(mie.tail_virial(2.0), -17.06666666666667, epsilon = 1e-6);
assert_relative_eq!(mie.tail_virial(14.42), -0.1366035877536718, epsilon = 1e-6);
assert!(mie.force(f64::powf(2.0, 1.0 / 6.0) * 2.0).abs() < 1e-15);
assert_ulps_eq!(mie.force(2.5), -0.95773475733504);
let e0 = mie.energy(4.0);
let e1 = mie.energy(4.0 + EPS);
assert_relative_eq!((e0 - e1) / EPS, mie.force(4.0), epsilon = 1e-6);
}
#[test]
#[should_panic(expected = "The repulsive exponent n has to be larger than the attractive exponent m")]
fn test_mie_n_lower_m() {
let mie = Mie::new(2.0, 0.8, 6.0, 12.0);
assert_eq!(mie.energy(2.0), 0.0);
}
#[test]
fn test_mie_tail_divergence() {
let mie = Mie::new(2.0, 0.8, 12.0, 2.0);
assert_eq!(mie.tail_energy(2.0), 0.0);
assert_eq!(mie.tail_virial(2.0), 0.0);
}
}
| 29.406098 | 106 | 0.55833 |
1ea279482d923d7d1140c9af37b0f971db25a297 | 111 | pub mod driver;
pub mod functor;
pub mod joiner;
pub mod sink;
pub mod source;
pub mod splitter;
pub mod util;
| 13.875 | 17 | 0.747748 |
f44330fc041f4a9a9ae592d89decb10f9c98e209 | 2,263 | //! Defines the Comparison Generator for the Best Split Times. The Best Split
//! Times represent the best pace that the runner was ever on up to each split
//! in the run. The Best Split Times are calculated by taking the best split
//! time for each individual split from all of the runner's attempts.
use super::ComparisonGenerator;
use crate::{Attempt, Segment, TimeSpan, TimingMethod};
/// The Comparison Generator for the Best Split Times. The Best Split Times
/// represent the best pace that the runner was ever on up to each split in the
/// run. The Best Split Times are calculated by taking the best split time for
/// each individual split from all of the runner's attempts.
#[derive(Copy, Clone, Debug)]
pub struct BestSplitTimes;
/// The short name of this comparison. Suitable for situations where not a lot
/// of space for text is available.
pub const SHORT_NAME: &str = NAME;
/// The name of this comparison.
pub const NAME: &str = "Best Split Times";
fn generate(segments: &mut [Segment], attempts: &[Attempt], method: TimingMethod) {
for attempt in attempts {
let id = attempt.index();
let mut total_time = TimeSpan::zero();
for segment in segments.iter_mut() {
if let Some(time) = segment.segment_history().get(id) {
if let Some(time) = time[method] {
total_time += time;
let comp = &mut segment.comparison_mut(NAME)[method];
if comp.map_or(true, |c| total_time < c) {
*comp = Some(total_time);
}
}
} else {
break;
}
}
}
}
impl ComparisonGenerator for BestSplitTimes {
fn name(&self) -> &str {
NAME
}
fn generate(&mut self, segments: &mut [Segment], attempts: &[Attempt]) {
if !segments.is_empty() {
*segments[0].comparison_mut(NAME) = segments[0].best_segment_time();
for segment in &mut segments[1..] {
*segment.comparison_mut(NAME) = segment.personal_best_split_time();
}
generate(segments, attempts, TimingMethod::RealTime);
generate(segments, attempts, TimingMethod::GameTime);
}
}
}
| 37.098361 | 83 | 0.620857 |
d9ad603ff9bcfa99d6f3f3a9f1734fecc35dabd0 | 2,391 | use cgmath::*;
use std::time::Duration;
use crate::{
audio, collision,
entity::{Entity, GameStatePeek},
event_dispatch::*,
map,
sprite::{self, rendering},
state::constants::layers,
tileset,
};
// ---------------------------------------------------------------------------------------------------------------------
const FLIGHT_BAR_SCALE: f32 = 2.0;
// ---------------------------------------------------------------------------------------------------------------------
pub struct UiFlightBar {
entity_id: u32,
position: Point3<f32>,
width_scale_max: f32,
width_scale_current: f32,
}
impl Default for UiFlightBar {
fn default() -> Self {
Self {
entity_id: 0,
position: point3(0.0, 0.0, 0.0),
width_scale_max: 1.0,
width_scale_current: 1.0,
}
}
}
impl Entity for UiFlightBar {
fn init_from_map_sprite(
&mut self,
entity_id: u32,
sprite: &sprite::Sprite,
_tile: &tileset::Tile,
_map: &map::Map,
_collision_space: &mut collision::Space,
) {
self.entity_id = entity_id;
self.position = point3(sprite.origin.x, sprite.origin.y, layers::ui::FOREGROUND);
}
fn update(
&mut self,
_dt: Duration,
_map: &map::Map,
_collision_space: &mut collision::Space,
_audio: &mut audio::Audio,
_message_dispatcher: &mut Dispatcher,
game_state_peek: &GameStatePeek,
) {
self.width_scale_max = game_state_peek.player_flight.1 * FLIGHT_BAR_SCALE;
self.width_scale_current =
FLIGHT_BAR_SCALE * (game_state_peek.player_flight.0 / game_state_peek.player_flight.1);
}
fn update_uniforms(&self, uniforms: &mut rendering::Uniforms) {
uniforms
.data
.set_model_position(self.position)
.set_sprite_scale(vec2(self.width_scale_current, 1.0));
}
fn entity_id(&self) -> u32 {
self.entity_id
}
fn entity_class(&self) -> crate::entities::EntityClass {
crate::entities::EntityClass::UiFlightBar
}
fn is_alive(&self) -> bool {
true
}
fn position(&self) -> Point3<f32> {
self.position
}
fn sprite_name(&self) -> &str {
"flight_bar"
}
fn sprite_cycle(&self) -> &str {
"default"
}
}
| 24.90625 | 120 | 0.525303 |
bb8a5ad1a9c4283edbe7b52da6295ea9f5d80498 | 6,624 | // Copyright 2018 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! `encoding` is a serialization library supporting zero-copy (de)serialization
//! of primitive types, heterogeneous structures and arrays.
//!
//! See also [the documentation page on serialization][doc:serialization].
//!
//! # Structure serialization
//!
//! Structures are in the root of any serializable Exonum object.
//! Binary representation of structures is split into two main parts:
//!
//! - **Header:** a fixed-sized part
//! - **Body:** dynamically sized part, known only after parsing the header
//!
//! To create a structure type, you can use [`transactions!`] and [`encoding_struct!`] macros.
//!
//! [doc:serialization]: https://exonum.com/doc/architecture/serialization/
//! [`transactions!`]: ../macro.transactions.html
//! [`encoding_struct!`]: ../macro.encoding_struct.html
//!
//! # Examples
//!
//! Consider a structure with two fields: `String` and `u64`.
//! To implement Exonum (de)serialization for this structure
//! you need to use macros like this:
//!
//! ```
//! # #[macro_use] extern crate exonum;
//! # extern crate serde;
//! # extern crate serde_json;
//! encoding_struct! {
//! struct MyAwesomeStructure {
//! name: &str,
//! age: u64,
//! }
//! }
//!
//! # fn main() {
//! let student = MyAwesomeStructure::new("Andrew", 23);
//! # }
//! ```
//!
//! Then the internal buffer of `student` is as follows:
//!
//! | Position | Stored data | Hexadecimal form | Comment |
//! |--------|------|---------------------|------------------------------------------|
//! | `0 => 4` | 16 | `10 00 00 00` | LE-encoded segment pointer to the data |
//! | `4 => 8` | 6 | `06 00 00 00` | LE-encoded segment size |
//! | `8 => 16` | 23 | `17 00 00 00 00 00 00 00` | number in little endian |
//! | `16 => 24` | Andrew | `41 6e 64 72 65 77` | Text bytes in UTF-8 encoding |
//!
//! # Structure fields
//!
//! ## Primitive types
//!
//! Primitive types are all fixed-sized, and located fully in the header.
//!
//! | Type name | Size in Header | Info |
//! |--------|---------------------|--------------------------------------------------|
//! | `u8` | 1 | Regular byte |
//! | `i8` | 1 | Signed byte |
//! | `u16` | 2 | Short unsigned integer stored in little endian |
//! | `i16` | 2 | Short signed integer stored in little endian |
//! | `u32` | 4 | 32-bit unsigned integer stored in little endian |
//! | `i32` | 4 | 32-bit signed integer stored in little endian |
//! | `u64` | 8 | Long unsigned integer stored in little endian |
//! | `i64` | 8 | Long signed integer stored in little endian |
//! | `F32` | 4 | 32-bit floating point type stored in little endian \[1\]\[2\] |
//! | `F64` | 8 | 64-bit floating point type stored in little endian \[1\]\[2\] |
//! | `bool` | 1 | Stored as a byte, with `0x01` denoting true and `0x00` false \[3\] |
//!
//! \[1\]
//! Special floating point values that cannot be represented as a sequences of digits (such as
//! Infinity, NaN and signaling NaN) are not permitted.
//!
//! \[2\]
//! Floating point value serialization is hidden behind the `float_serialize` feature gate.
//!
//! \[3\]
//! Trying to represent other values as `bool` leads to undefined behavior.
//!
//! ## Segment fields
//!
//! All segment types take 8 bytes in the header: 4 for position in the buffer,
//! and 4 for the segment field size.
//!
//! ## Custom fields
//!
//! These types can be implemented as per developer's design,
//! but they should declare how many bytes they
//! write in the header using the [`field_size()`] function.
//!
//! [`field_size()`]: ./trait.Field.html#tymethod.field_size
#[cfg(feature = "float_serialize")]
pub use self::float::{F32, F64};
pub use self::{error::Error, fields::Field, segments::SegmentField};
#[macro_use]
pub mod serialize;
use std::{
convert::From, ops::{Add, Div, Mul, Sub},
};
mod error;
#[macro_use]
mod fields;
mod segments;
#[macro_use]
mod spec;
#[cfg(feature = "float_serialize")]
mod float;
#[cfg(test)]
mod tests;
/// Type alias usable for reference in buffer
pub type Offset = u32;
/// Type alias that should be returned in `check` method of `Field`
pub type Result = ::std::result::Result<CheckedOffset, Error>;
// TODO: Replace by more generic type. (ECR-156)
/// `CheckedOffset` is a type that take control over overflow,
/// so you can't panic without `unwrap`,
/// and work with this value without overflow checks.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd)]
pub struct CheckedOffset {
offset: Offset,
}
impl CheckedOffset {
/// create checked value
pub fn new(offset: Offset) -> Self {
Self { offset }
}
/// return unchecked offset
pub fn unchecked_offset(self) -> Offset {
self.offset
}
}
macro_rules! implement_default_ops_checked {
($trait_name:ident $function:ident $checked_function:ident) => {
impl $trait_name<CheckedOffset> for CheckedOffset {
type Output = ::std::result::Result<CheckedOffset, Error>;
fn $function(self, rhs: CheckedOffset) -> Self::Output {
self.offset
.$checked_function(rhs.offset)
.map(CheckedOffset::new)
.ok_or(Error::OffsetOverflow)
}
}
impl $trait_name<Offset> for CheckedOffset {
type Output = ::std::result::Result<CheckedOffset, Error>;
fn $function(self, rhs: Offset) -> Self::Output {
self.offset
.$checked_function(rhs)
.map(CheckedOffset::new)
.ok_or(Error::OffsetOverflow)
}
}
};
}
implement_default_ops_checked!{Add add checked_add }
implement_default_ops_checked!{Sub sub checked_sub }
implement_default_ops_checked!{Mul mul checked_mul }
implement_default_ops_checked!{Div div checked_div }
impl From<Offset> for CheckedOffset {
fn from(offset: Offset) -> Self {
Self::new(offset)
}
}
| 34.680628 | 94 | 0.617452 |
91bbbf5c0a0df567e7db0670f8a78baf5a5afdb6 | 634 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Foo {
x: int
}
pub impl Foo {
fn new() -> Foo {
Foo { x: 3 }
}
}
pub fn main() {
let x = Foo::new();
io::println(x.x.to_str());
}
| 25.36 | 68 | 0.662461 |
e61361966720a078e734722b28358bea154222ab | 146 | fn main() {
let mut buf: Vec<u8> = vec![b'a', b'b', b'c', b'd', b'e'];
let sl = buf.as_mut_slice();
let _ = memx::memset(sl, b'A');
}
| 24.333333 | 62 | 0.486301 |
9b6b5991748f96e60ff0ea20d126faaa315637fb | 5,974 | // This file is part of Substrate.
// Copyright (C) 2019-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! A collection of node-specific RPC methods.
//!
//! Since `substrate` core functionality makes no assumptions
//! about the modules used inside the runtime, so do
//! RPC methods defined in `sc-rpc` crate.
//! It means that `client/rpc` can't have any methods that
//! need some strong assumptions about the particular runtime.
//!
//! The RPCs available in this crate however can make some assumptions
//! about how the runtime is constructed and what FRAME pallets
//! are part of it. Therefore all node-runtime-specific RPCs can
//! be placed here or imported from corresponding FRAME RPC definitions.
#![warn(missing_docs)]
use std::sync::Arc;
use node_primitives::{Block, BlockNumber, AccountId, Index, Balance, Hash};
use node_runtime::UncheckedExtrinsic;
use sp_api::ProvideRuntimeApi;
use sp_transaction_pool::TransactionPool;
use sp_blockchain::{Error as BlockChainError, HeaderMetadata, HeaderBackend};
use sp_consensus::SelectChain;
use sc_keystore::KeyStorePtr;
use sp_consensus_babe::BabeApi;
use sc_consensus_epochs::SharedEpochChanges;
use sc_consensus_babe::{Config, Epoch};
use sc_consensus_babe_rpc::BabeRpcHandler;
use sc_finality_grandpa::{SharedVoterState, SharedAuthoritySet};
use sc_finality_grandpa_rpc::GrandpaRpcHandler;
use sc_rpc_api::DenyUnsafe;
use sp_block_builder::BlockBuilder;
/// Light client extra dependencies.
pub struct LightDeps<C, F, P> {
/// The client instance to use.
pub client: Arc<C>,
/// Transaction pool instance.
pub pool: Arc<P>,
/// Remote access to the blockchain (async).
pub remote_blockchain: Arc<dyn sc_client_api::light::RemoteBlockchain<Block>>,
/// Fetcher instance.
pub fetcher: Arc<F>,
}
/// Extra dependencies for BABE.
pub struct BabeDeps {
/// BABE protocol config.
pub babe_config: Config,
/// BABE pending epoch changes.
pub shared_epoch_changes: SharedEpochChanges<Block, Epoch>,
/// The keystore that manages the keys of the node.
pub keystore: KeyStorePtr,
}
/// Extra dependencies for GRANDPA
pub struct GrandpaDeps {
/// Voting round info.
pub shared_voter_state: SharedVoterState,
/// Authority set info.
pub shared_authority_set: SharedAuthoritySet<Hash, BlockNumber>,
}
/// Full client dependencies.
pub struct FullDeps<C, P, SC> {
/// The client instance to use.
pub client: Arc<C>,
/// Transaction pool instance.
pub pool: Arc<P>,
/// The SelectChain Strategy
pub select_chain: SC,
/// Whether to deny unsafe calls
pub deny_unsafe: DenyUnsafe,
/// BABE specific dependencies.
pub babe: BabeDeps,
/// GRANDPA specific dependencies.
pub grandpa: GrandpaDeps,
}
/// Instantiate all Full RPC extensions.
pub fn create_full<C, P, M, SC>(
deps: FullDeps<C, P, SC>,
) -> jsonrpc_core::IoHandler<M> where
C: ProvideRuntimeApi<Block>,
C: HeaderBackend<Block> + HeaderMetadata<Block, Error=BlockChainError> + 'static,
C: Send + Sync + 'static,
C::Api: substrate_frame_rpc_system::AccountNonceApi<Block, AccountId, Index>,
C::Api: pallet_contracts_rpc::ContractsRuntimeApi<Block, AccountId, Balance, BlockNumber>,
C::Api: pallet_transaction_payment_rpc::TransactionPaymentRuntimeApi<Block, Balance, UncheckedExtrinsic>,
C::Api: BabeApi<Block>,
C::Api: BlockBuilder<Block>,
P: TransactionPool + 'static,
M: jsonrpc_core::Metadata + Default,
SC: SelectChain<Block> +'static,
{
use substrate_frame_rpc_system::{FullSystem, SystemApi};
use pallet_contracts_rpc::{Contracts, ContractsApi};
use pallet_transaction_payment_rpc::{TransactionPayment, TransactionPaymentApi};
let mut io = jsonrpc_core::IoHandler::default();
let FullDeps {
client,
pool,
select_chain,
deny_unsafe,
babe,
grandpa,
} = deps;
let BabeDeps {
keystore,
babe_config,
shared_epoch_changes,
} = babe;
let GrandpaDeps {
shared_voter_state,
shared_authority_set,
} = grandpa;
io.extend_with(
SystemApi::to_delegate(FullSystem::new(client.clone(), pool, deny_unsafe))
);
// Making synchronous calls in light client freezes the browser currently,
// more context: https://github.com/paritytech/substrate/pull/3480
// These RPCs should use an asynchronous caller instead.
io.extend_with(
ContractsApi::to_delegate(Contracts::new(client.clone()))
);
io.extend_with(
TransactionPaymentApi::to_delegate(TransactionPayment::new(client.clone()))
);
io.extend_with(
sc_consensus_babe_rpc::BabeApi::to_delegate(
BabeRpcHandler::new(
client,
shared_epoch_changes,
keystore,
babe_config,
select_chain,
deny_unsafe,
),
)
);
io.extend_with(
sc_finality_grandpa_rpc::GrandpaApi::to_delegate(
GrandpaRpcHandler::new(shared_authority_set, shared_voter_state)
)
);
io
}
/// Instantiate all Light RPC extensions.
pub fn create_light<C, P, M, F>(
deps: LightDeps<C, F, P>,
) -> jsonrpc_core::IoHandler<M> where
C: sp_blockchain::HeaderBackend<Block>,
C: Send + Sync + 'static,
F: sc_client_api::light::Fetcher<Block> + 'static,
P: TransactionPool + 'static,
M: jsonrpc_core::Metadata + Default,
{
use substrate_frame_rpc_system::{LightSystem, SystemApi};
let LightDeps {
client,
pool,
remote_blockchain,
fetcher
} = deps;
let mut io = jsonrpc_core::IoHandler::default();
io.extend_with(
SystemApi::<Hash, AccountId, Index>::to_delegate(LightSystem::new(client, remote_blockchain, fetcher, pool))
);
io
}
| 30.793814 | 110 | 0.747908 |
bb812b71e833f995f6bb1504952f02c31be20347 | 2,820 | // Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use anyhow::Result;
use futures_channel::mpsc;
use serde::{Deserialize, Serialize};
use starcoin_crypto::hash::HashValue;
use starcoin_types::{
account_address::AccountAddress, block::Block, transaction, transaction::SignedUserTransaction,
};
use std::fmt::Debug;
use std::sync::Arc;
pub type TxnStatusFullEvent = Arc<[(HashValue, transaction::TxStatus)]>;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct TxPoolStatus {
pub txn_count: usize,
pub txn_max_count: usize,
pub mem: usize,
pub mem_max: usize,
pub senders: usize,
pub is_full: bool,
}
pub trait TxPoolSyncService: Clone + Send + Sync + Unpin {
fn add_txns(
&self,
txns: Vec<SignedUserTransaction>,
) -> Vec<Result<(), transaction::TransactionError>>;
/// Removes transaction from the pool.
///
/// Attempts to "cancel" a transaction. If it was not propagated yet (or not accepted by other peers)
/// there is a good chance that the transaction will actually be removed.
fn remove_txn(&self, txn_hash: HashValue, is_invalid: bool) -> Option<SignedUserTransaction>;
/// Get all pending txns which is ok to be packaged to mining.
/// `now` is the current timestamp in secs, if it's None, it default to real world's current timestamp.
/// It's an Option to make mock time easier.
fn get_pending_txns(
&self,
max_len: Option<u64>,
now: Option<u64>,
) -> Vec<SignedUserTransaction>;
/// Returns next valid sequence number for given sender
/// or `None` if there are no pending transactions from that sender.
fn next_sequence_number(&self, address: AccountAddress) -> Option<u64>;
/// subscribe
fn subscribe_txns(&self) -> mpsc::UnboundedReceiver<TxnStatusFullEvent>;
fn subscribe_pending_txn(&self) -> mpsc::UnboundedReceiver<Arc<[HashValue]>>;
/// notify txpool about chain new blocks
/// `enacted` is the blocks which enter the main chain.
/// `retracted` is the blocks which belongs to previous main chain.
fn chain_new_block(&self, enacted: Vec<Block>, retracted: Vec<Block>) -> Result<()>;
/// Tx Pool status
fn status(&self) -> TxPoolStatus;
fn find_txn(&self, hash: &HashValue) -> Option<SignedUserTransaction>;
fn txns_of_sender(
&self,
sender: &AccountAddress,
max_len: Option<usize>,
) -> Vec<SignedUserTransaction>;
}
#[derive(Clone, Debug)]
pub struct PropagateTransactions {
txns: Vec<SignedUserTransaction>,
}
impl PropagateTransactions {
pub fn new(txns: Vec<SignedUserTransaction>) -> Self {
Self { txns }
}
pub fn transaction_to_propagate(&self) -> Vec<SignedUserTransaction> {
self.txns.clone()
}
}
| 32.790698 | 107 | 0.683333 |
ef0b2c2078d83b09623afa9b59350ac51cdd7a13 | 386 |
// -*- rust -*-
type compare<T> = fn@(T, T) -> bool;
fn test_generic<T: Copy>(expected: T, eq: compare<T>) {
let actual: T = match true { true => { expected }, _ => fail ~"wat" };
assert (eq(expected, actual));
}
fn test_vec() {
fn compare_box(&&v1: @int, &&v2: @int) -> bool { return v1 == v2; }
test_generic::<@int>(@1, compare_box);
}
fn main() { test_vec(); }
| 21.444444 | 74 | 0.546632 |
f455b175c01f14d0526daee8c53fb896af79cdb9 | 3,692 | use std::ops::Index;
use mirai_annotations::*;
pub trait Bytes32Ext: Index<usize> + Sized {
/// Returns the `index`-th nibble.
fn get_nibble(&self, index: usize) -> crate::types::nibble::Nibble;
/// Returns the length of common prefix of `self` and `other` in bits.
fn common_prefix_bits_len(&self, other: &[u8; 32]) -> usize;
/// Returns a `HashValueBitIterator` over all the bits that represent this hash value.
fn iter_bits(&self) -> HashValueBitIterator<'_>;
/// Returns the `index`-th nibble in the bytes.
fn nibble(&self, index: usize) -> u8;
/// Returns the length of common prefix of `self` and `other` in nibbles.
fn common_prefix_nibbles_len(&self, other: &[u8; 32]) -> usize {
self.common_prefix_bits_len(other) / 4
}
/// Constructs a `HashValue` from an iterator of bits.
fn from_bit_iter(iter: impl ExactSizeIterator<Item = bool>) -> Option<Self>;
}
impl Bytes32Ext for [u8; 32] {
fn get_nibble(&self, index: usize) -> crate::types::nibble::Nibble {
crate::types::nibble::Nibble::from(if index % 2 == 0 {
self[index / 2] >> 4
} else {
self[index / 2] & 0x0F
})
}
fn common_prefix_bits_len(&self, other: &[u8; 32]) -> usize {
self.iter_bits()
.zip(other.iter_bits())
.take_while(|(x, y)| x == y)
.count()
}
fn iter_bits(&self) -> HashValueBitIterator<'_> {
HashValueBitIterator::new(self)
}
fn nibble(&self, index: usize) -> u8 {
assume!(index < 32 * 2); // assumed precondition
let pos = index / 2;
let shift = if index % 2 == 0 { 4 } else { 0 };
(self[pos] >> shift) & 0x0f
}
/// Constructs a `HashValue` from an iterator of bits.
fn from_bit_iter(iter: impl ExactSizeIterator<Item = bool>) -> Option<Self> {
if iter.len() != 256 {
return None;
}
let mut buf = [0; 32];
for (i, bit) in iter.enumerate() {
if bit {
buf[i / 8] |= 1 << (7 - i % 8);
}
}
Some(buf)
}
}
/// An iterator over a hash value that generates one bit for each iteration.
pub struct HashValueBitIterator<'a> {
/// The reference to the bytes that represent the `HashValue`.
hash_bytes: &'a [u8],
pos: std::ops::Range<usize>,
// invariant hash_bytes.len() == HashValue::LENGTH;
// invariant pos.end == hash_bytes.len() * 8;
}
impl<'a> HashValueBitIterator<'a> {
/// Constructs a new `HashValueBitIterator` using given `HashValue`.
fn new(hash_value: &'a [u8; 32]) -> Self {
HashValueBitIterator {
hash_bytes: hash_value.as_ref(),
pos: (0..32 * 8),
}
}
/// Returns the `index`-th bit in the bytes.
fn get_bit(&self, index: usize) -> bool {
assume!(index < self.pos.end); // assumed precondition
assume!(self.hash_bytes.len() == 32); // invariant
assume!(self.pos.end == self.hash_bytes.len() * 8); // invariant
let pos = index / 8;
let bit = 7 - index % 8;
(self.hash_bytes[pos] >> bit) & 1 != 0
}
}
impl<'a> std::iter::Iterator for HashValueBitIterator<'a> {
type Item = bool;
fn next(&mut self) -> Option<Self::Item> {
self.pos.next().map(|x| self.get_bit(x))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.pos.size_hint()
}
}
impl<'a> std::iter::DoubleEndedIterator for HashValueBitIterator<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
self.pos.next_back().map(|x| self.get_bit(x))
}
}
impl<'a> std::iter::ExactSizeIterator for HashValueBitIterator<'a> {}
| 32.672566 | 90 | 0.576652 |
628c989bfc341bd48bd43eace26f881e61220f89 | 533 | //! rust-analyzer is lazy and doesn't not compute anything unless asked. This
//! sometimes is counter productive when, for example, the first goto definition
//! request takes longer to compute. This modules implemented prepopulating of
//! various caches, it's not really advanced at the moment.
use hir::Semantics;
use crate::{FileId, RootDatabase};
pub(crate) fn prime_caches(db: &RootDatabase, files: Vec<FileId>) {
let sema = Semantics::new(db);
for file in files {
let _ = sema.to_module_def(file);
}
}
| 33.3125 | 80 | 0.718574 |
6700362fee1eb1edf13e1b9a02286612815e2441 | 1,341 | use crate::{
AccountId, Accounts, Balance, Currencies, CurrencyId, GetNativeCurrencyId, NewAccountDeposit, Runtime, TokenSymbol,
DOLLARS,
};
use frame_support::traits::StoredMap;
use orml_traits::{MultiCurrency, MultiCurrencyExtended};
use sp_runtime::traits::{SaturatedConversion, StaticLookup};
pub fn lookup_of_account(who: AccountId) -> <<Runtime as frame_system::Trait>::Lookup as StaticLookup>::Source {
<Runtime as frame_system::Trait>::Lookup::unlookup(who)
}
pub fn set_balance(currency_id: CurrencyId, who: &AccountId, balance: Balance) {
if !Accounts::is_explicit(who) {
let _ = <Currencies as MultiCurrencyExtended<_>>::update_balance(
GetNativeCurrencyId::get(),
&who,
NewAccountDeposit::get().saturated_into(),
);
}
let _ = <Currencies as MultiCurrencyExtended<_>>::update_balance(currency_id, &who, balance.saturated_into());
assert_eq!(
<Currencies as MultiCurrency<_>>::free_balance(currency_id, who),
balance
);
}
pub fn set_ausd_balance(who: &AccountId, balance: Balance) {
set_balance(CurrencyId::Token(TokenSymbol::XUSD), who, balance)
}
pub fn set_dos_balance(who: &AccountId, balance: Balance) {
set_balance(CurrencyId::Token(TokenSymbol::DOS), who, balance)
}
pub fn dollars<T: Into<u128>>(d: T) -> Balance {
DOLLARS.saturating_mul(d.into())
}
| 33.525 | 117 | 0.726324 |
c1bcc109ddf2ad5a352c70c57405fcd874f29184 | 2,679 | use crate::constants::DATABASE_PATH;
use rusqlite::{params, Connection, Result};
#[allow(dead_code)]
pub struct CachedSheetValue {
pub sheet_id: i32,
pub value: i64
}
impl CachedSheetValue {
#[allow(dead_code)]
pub fn new(sheet_id: i32, value: i64) -> CachedSheetValue {
CachedSheetValue {
sheet_id,
value
}
}
#[allow(dead_code)]
pub fn insert(&self) -> Result<()> {
let conn = Connection::open(DATABASE_PATH)?;
conn.execute("
insert into inherited_sheets (
sheet_id,
value
)
values (
?1,
?2
)
", params![self.sheet_id, self.value])
.map(|_n| ())
}
#[allow(dead_code)]
pub fn update(&self) -> Result<()> {
let conn = Connection::open(DATABASE_PATH)?;
conn.execute("
update cached_sheet_values
set value = ?1
where sheet_id = ?2
",
params![self.value, self.sheet_id],
)?;
Ok(())
}
#[allow(dead_code)]
pub fn remove(&self) -> Result<()> {
let conn = Connection::open(DATABASE_PATH)?;
conn.execute("
delete from cached_sheet_values
where sheet_id = ?1
",
params![self.sheet_id],
)?;
Ok(())
}
#[allow(dead_code)]
pub fn get_all() -> Result<Vec<CachedSheetValue>> {
let conn = Connection::open(DATABASE_PATH)?;
let mut query = conn.prepare("
select sheet_id, value
from cached_sheet_values
")?;
let inherited_sheets = query.query_map(params![], |row| {
Ok(
CachedSheetValue {
sheet_id: row.get(0)?,
value: row.get(1)?
}
)
})?;
inherited_sheets.collect()
}
#[allow(dead_code)]
pub fn get_by_sheet_id(sheet_id: i32) -> Result<Option<CachedSheetValue>> {
let conn = Connection::open(DATABASE_PATH)?;
let mut query = conn.prepare("
select sheet_id, value
from cached_sheet_values
where sheet_id = ?1
")?;
let mut inherited_sheets = query.query_map(params![sheet_id], |row| {
Ok(
CachedSheetValue {
sheet_id: row.get(0)?,
value: row.get(1)?
}
)
})?;
inherited_sheets.nth(0).transpose()
}
}
pub fn create_table() -> Result<()> {
let conn = Connection::open(DATABASE_PATH)?;
conn.execute("
create table if not exists cached_sheet_values (
sheet_id integer not null,
value integer not null
)
", params![])
.map(|_n| ())
}
pub fn remove_by_sheet_id(sheet_id: i32) -> Result<()> {
let conn = Connection::open(DATABASE_PATH)?;
conn.execute("
delete from cached_sheet_values
where sheet_id = ?1
",
params![sheet_id],
)?;
Ok(())
} | 20.295455 | 77 | 0.585293 |
8f19db17062913a43473159f6eca230055287ff2 | 3,359 | use mopa::{Any, mopafy};
use crate::builder::commands::alpine;
use crate::builder::context::Context;
use crate::builder::error::StepError;
use crate::builder::packages;
/// This returns the same as Distribution::name but is separate trait because
/// static methods makes trait non-object-safe
pub trait Named {
/// Human-readable name of distribution
fn static_name() -> &'static str;
}
pub trait Distribution: Any {
/// Only true if distribution is not known yet (i.e. can be set)
fn is_unknown(&self) -> bool { false }
/// Human-readable name of distribution
///
/// Object-safe variant of the method
fn name(&self) -> &'static str;
/// Downloads initial image of distribution
fn bootstrap(&mut self, ctx: &mut Context) -> Result<(), StepError>;
/// Does distro-specific cleanup at the end of the build
fn finish(&mut self, _ctx: &mut Context) -> Result<(), String> { Ok(()) }
/// Adds repository
fn add_repo(&mut self, ctx: &mut Context, repo: &str) -> Result<(), StepError>;
/// Install normal packages
fn install(&mut self, ctx: &mut Context, pkgs: &[String]) -> Result<(), StepError>;
/// Install special predefined packages for specific features
fn ensure_packages(&mut self, ctx: &mut Context,
features: &[packages::Package])
-> Result<Vec<packages::Package>, StepError>;
}
// This is needed for cast to work
mopafy!(Distribution);
pub struct Unknown;
impl Distribution for Unknown {
fn is_unknown(&self) -> bool { true }
fn name(&self) -> &'static str { "unknown" }
fn bootstrap(&mut self, _: &mut Context) -> Result<(), StepError> {
unreachable!();
}
fn add_repo(&mut self, _: &mut Context, _repo: &str)
-> Result<(), StepError>
{
Err(StepError::NoDistro)
}
fn install(&mut self, _: &mut Context, _pkgs: &[String])
-> Result<(), StepError>
{
Err(StepError::NoDistro)
}
fn ensure_packages(&mut self, _: &mut Context, _: &[packages::Package])
-> Result<Vec<packages::Package>, StepError>
{
Err(StepError::NoDistro)
}
}
pub trait DistroBox {
fn set<D: Distribution+Sized>(&mut self, value: D) -> Result<(), StepError>;
fn specific<T, R, F>(&mut self, f: F) -> Result<R, StepError>
where T: Distribution+Named, R: Sized,
F: FnOnce(&mut T) -> Result<R, StepError>;
fn npm_configure(&mut self, ctx: &mut Context) -> Result<(), StepError>;
}
impl DistroBox for Box<dyn Distribution> {
fn set<D: Distribution+Sized>(&mut self, value: D) -> Result<(), StepError> {
if self.is::<Unknown>() {
*self = Box::new(value);
Ok(())
} else {
return Err(StepError::DistroOverlap(value.name(), self.name()));
}
}
fn specific<T, R, F>(&mut self, f: F) -> Result<R, StepError>
where T: Distribution+Named, R: Sized,
F: FnOnce(&mut T) -> Result<R, StepError>,
{
self.downcast_mut::<T>()
.map(f)
.ok_or(StepError::WrongDistro(T::static_name(), self.name()))
.and_then(|x| x)
}
fn npm_configure(&mut self, ctx: &mut Context) -> Result<(), StepError> {
if (**self).is::<Unknown>() {
alpine::configure(self, ctx, alpine::LATEST_VERSION)?;
}
Ok(())
}
}
| 31.990476 | 87 | 0.598392 |
017d90cbcd736a112a841aff8c99fc5d6a0f4cb0 | 293 | use core::comm::*;
fn producer(c: &Chan<~[u8]>) {
c.send(
~[1u8, 2u8, 3u8, 4u8, 5u8, 6u8, 7u8, 8u8, 9u8, 10u8, 11u8, 12u8,
13u8]);
}
pub fn main() {
let (p, ch) = stream::<~[u8]>();
let prod = task::spawn(|| producer(&ch) );
let data: ~[u8] = p.recv();
}
| 19.533333 | 73 | 0.474403 |
d7e1b01ee419fe7872e977e86b671058f73d0471 | 1,403 | use crate::{AccountField, AccountsStruct};
use quote::quote;
// Generates the `ToAccountMetas` trait implementation.
pub fn generate(accs: &AccountsStruct) -> proc_macro2::TokenStream {
let name = &accs.ident;
let to_acc_metas: Vec<proc_macro2::TokenStream> = accs
.fields
.iter()
.map(|f: &AccountField| {
let (name, is_signer) = match f {
AccountField::CompositeField(s) => (&s.ident, quote! {None}),
AccountField::Field(f) => {
let is_signer = match f.constraints.is_signer() {
false => quote! {None},
true => quote! {Some(true)},
};
(&f.ident, is_signer)
}
};
quote! {
account_metas.extend(self.#name.to_account_metas(#is_signer));
}
})
.collect();
let (impl_gen, ty_gen, where_clause) = accs.generics.split_for_impl();
quote! {
#[automatically_derived]
impl#impl_gen anchor_lang::ToAccountMetas for #name #ty_gen #where_clause{
fn to_account_metas(&self, is_signer: Option<bool>) -> Vec<anchor_lang::solana_program::instruction::AccountMeta> {
let mut account_metas = vec![];
#(#to_acc_metas)*
account_metas
}
}
}
}
| 32.627907 | 127 | 0.528867 |
ac67928649857ab602e47f6482192aa6c0b5dca9 | 2,465 | use crate::*;
/// A visual separator. A horizontal or vertical line (depending on [`Layout`]).
///
/// Usually you'd use the shorter version [`Ui::separator`].
///
/// ```
/// # let ui = &mut egui::Ui::__test();
/// // These are equivalent:
/// ui.separator();
/// ui.add(egui::Separator::default());
/// ```
#[must_use = "You should put this widget in an ui with `ui.add(widget);`"]
pub struct Separator {
spacing: f32,
is_horizontal_line: Option<bool>,
}
impl Default for Separator {
fn default() -> Self {
Self {
spacing: 6.0,
is_horizontal_line: None,
}
}
}
impl Separator {
/// How much space we take up. The line is painted in the middle of this.
pub fn spacing(mut self, spacing: f32) -> Self {
self.spacing = spacing;
self
}
/// Explicitly ask for a horizontal line.
/// By default you will get a horizontal line in vertical layouts,
/// and a vertical line in horizontal layouts.
pub fn horizontal(mut self) -> Self {
self.is_horizontal_line = Some(true);
self
}
/// Explicitly ask for a vertical line.
/// By default you will get a horizontal line in vertical layouts,
/// and a vertical line in horizontal layouts.
pub fn vertical(mut self) -> Self {
self.is_horizontal_line = Some(false);
self
}
}
impl Widget for Separator {
fn ui(self, ui: &mut Ui) -> Response {
let Separator {
spacing,
is_horizontal_line,
} = self;
let is_horizontal_line = is_horizontal_line
.unwrap_or_else(|| ui.is_grid() || !ui.layout().main_dir().is_horizontal());
let available_space = ui.available_size_before_wrap();
let size = if is_horizontal_line {
vec2(available_space.x, spacing)
} else {
vec2(spacing, available_space.y)
};
let (rect, response) = ui.allocate_at_least(size, Sense::hover());
let points = if is_horizontal_line {
[
pos2(rect.left(), rect.center().y),
pos2(rect.right(), rect.center().y),
]
} else {
[
pos2(rect.center().x, rect.top()),
pos2(rect.center().x, rect.bottom()),
]
};
let stroke = ui.visuals().widgets.noninteractive.bg_stroke;
ui.painter().line_segment(points, stroke);
response
}
}
| 28.333333 | 88 | 0.569574 |
4ab10335fc5e5699b1c8c0a9c95aa1f569e34939 | 9,324 | use std::env;
#[cfg(target_family = "unix")]
use std::fs::OpenOptions;
use std::io::Write;
#[cfg(target_family = "unix")]
use std::os::unix::fs::OpenOptionsExt;
use std::path::{Path, PathBuf};
use std::process::{self, Command};
use std::str;
use anyhow::{bail, Context, Result};
use fs_err as fs;
use target_lexicon::{OperatingSystem, Triple};
/// Zig linker wrapper
#[derive(Debug, clap::Subcommand)]
pub enum Zig {
/// `zig cc` wrapper
#[clap(name = "cc", trailing_var_arg = true)]
Cc {
/// `zig cc` arguments
#[clap(takes_value = true, multiple_values = true)]
args: Vec<String>,
},
/// `zig c++` wrapper
#[clap(name = "c++", trailing_var_arg = true)]
Cxx {
/// `zig c++` arguments
#[clap(takes_value = true, multiple_values = true)]
args: Vec<String>,
},
}
impl Zig {
/// Execute the underlying zig command
pub fn execute(&self) -> Result<()> {
let (cmd, cmd_args) = match self {
Zig::Cc { args } => ("cc", args),
Zig::Cxx { args } => ("c++", args),
};
let target = cmd_args
.iter()
.position(|x| x == "-target")
.and_then(|index| cmd_args.get(index + 1));
let is_musl = target.map(|x| x.contains("musl")).unwrap_or_default();
let is_windows_gnu = target
.map(|x| x.contains("windows-gnu"))
.unwrap_or_default();
let filter_link_arg = |arg: &str| {
if arg == "-lgcc_s" {
// Replace libgcc_s with libunwind
return Some("-lunwind".to_string());
}
if is_windows_gnu {
if arg == "-lgcc_eh" {
// zig doesn't provide gcc_eh alternative
// We use libc++ to replace it on windows gnu targets
return Some("-lc++".to_string());
} else if arg == "-lwindows" || arg == "-l:libpthread.a" || arg == "-lgcc" {
return None;
}
}
if is_musl {
// Avoids duplicated symbols with both zig musl libc and the libc crate
if arg.ends_with(".o") && arg.contains("self-contained") && arg.contains("crt") {
return None;
}
if arg.ends_with(".rlib") && arg.contains("liblibc-") {
return None;
}
}
Some(arg.to_string())
};
let mut new_cmd_args = Vec::with_capacity(cmd_args.len());
for arg in cmd_args {
let arg = if arg.starts_with('@') && arg.ends_with("linker-arguments") {
// rustc passes arguments to linker via an @-file when arguments are too long
// See https://github.com/rust-lang/rust/issues/41190
let content = fs::read(arg.trim_start_matches('@'))?;
let link_args: Vec<_> = str::from_utf8(&content)?
.split('\n')
.filter_map(filter_link_arg)
.collect();
fs::write(arg.trim_start_matches('@'), link_args.join("\n").as_bytes())?;
Some(arg.to_string())
} else {
filter_link_arg(arg)
};
if let Some(arg) = arg {
new_cmd_args.push(arg);
}
}
let (zig, zig_args) = Self::find_zig()?;
let mut child = Command::new(zig)
.args(zig_args)
.arg(cmd)
.args(new_cmd_args)
.spawn()
.with_context(|| format!("Failed to run `zig {}`", cmd))?;
let status = child.wait().expect("Failed to wait on zig child process");
if !status.success() {
process::exit(status.code().unwrap_or(1));
}
Ok(())
}
/// Search for `python -m ziglang` first and for `zig` second.
pub fn find_zig() -> Result<(String, Vec<String>)> {
Self::find_zig_python()
.or_else(|_| Self::find_zig_bin())
.context("Failed to find zig")
}
/// Detect the plain zig binary
fn find_zig_bin() -> Result<(String, Vec<String>)> {
let output = Command::new("zig").arg("version").output()?;
let version_str =
str::from_utf8(&output.stdout).context("`zig version` didn't return utf8 output")?;
Self::validate_zig_version(version_str)?;
Ok(("zig".to_string(), Vec::new()))
}
/// Detect the Python ziglang package
fn find_zig_python() -> Result<(String, Vec<String>)> {
let output = Command::new("python3")
.args(&["-m", "ziglang", "version"])
.output()?;
let version_str = str::from_utf8(&output.stdout)
.context("`python3 -m ziglang version` didn't return utf8 output")?;
Self::validate_zig_version(version_str)?;
Ok((
"python3".to_string(),
vec!["-m".to_string(), "ziglang".to_string()],
))
}
fn validate_zig_version(version: &str) -> Result<()> {
let min_ver = semver::Version::new(0, 9, 0);
let version = semver::Version::parse(version.trim())?;
if version >= min_ver {
Ok(())
} else {
bail!(
"zig version {} is too old, need at least {}",
version,
min_ver
)
}
}
}
/// Prepare wrapper scripts for `zig cc` and `zig c++` and returns their paths
///
/// We want to use `zig cc` as linker and c compiler. We want to call `python -m ziglang cc`, but
/// cargo only accepts a path to an executable as linker, so we add a wrapper script. We then also
/// use the wrapper script to pass arguments and substitute an unsupported argument.
///
/// We create different files for different args because otherwise cargo might skip recompiling even
/// if the linker target changed
#[allow(clippy::blocks_in_if_conditions)]
pub fn prepare_zig_linker(target: &str) -> Result<(PathBuf, PathBuf)> {
let (rust_target, abi_suffix) = target.split_once('.').unwrap_or((target, ""));
let abi_suffix = if abi_suffix.is_empty() {
String::new()
} else {
if abi_suffix
.split_once('.')
.filter(|(x, y)| {
!x.is_empty()
&& x.chars().all(|c| c.is_ascii_digit())
&& !y.is_empty()
&& y.chars().all(|c| c.is_ascii_digit())
})
.is_none()
{
bail!("Malformed zig target abi suffix.")
}
format!(".{}", abi_suffix)
};
let triple: Triple = rust_target.parse().unwrap();
let arch = triple.architecture.to_string();
let file_ext = if cfg!(windows) { "bat" } else { "sh" };
let zig_cc = format!("zigcc-{}.{}", target, file_ext);
let zig_cxx = format!("zigcxx-{}.{}", target, file_ext);
let cc_args = "-g"; // prevent stripping
let cc_args = match triple.operating_system {
OperatingSystem::Linux => format!(
"-target {}-linux-{}{} {}",
arch, triple.environment, abi_suffix, cc_args,
),
OperatingSystem::MacOSX { .. } | OperatingSystem::Darwin => {
format!("-target {}-macos-gnu{} {}", arch, abi_suffix, cc_args)
}
OperatingSystem::Windows { .. } => format!(
"-target {}-windows-{}{} {}",
arch, triple.environment, abi_suffix, cc_args,
),
_ => bail!("unsupported target"),
};
let zig_linker_dir = dirs::cache_dir()
// If the really is no cache dir, cwd will also do
.unwrap_or_else(|| env::current_dir().expect("Failed to get current dir"))
.join(env!("CARGO_PKG_NAME"))
.join(env!("CARGO_PKG_VERSION"));
fs::create_dir_all(&zig_linker_dir)?;
let zig_cc = zig_linker_dir.join(zig_cc);
let zig_cxx = zig_linker_dir.join(zig_cxx);
write_linker_wrapper(&zig_cc, "cc", &cc_args)?;
write_linker_wrapper(&zig_cxx, "c++", &cc_args)?;
Ok((zig_cc, zig_cxx))
}
/// Write a zig cc wrapper batch script for unix
#[cfg(target_family = "unix")]
fn write_linker_wrapper(path: &Path, command: &str, args: &str) -> Result<()> {
let mut custom_linker_file = OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.mode(0o700)
.open(path)?;
let current_exe = if let Ok(exe) = env::var("CARGO_BIN_EXE_cargo-zigbuild") {
PathBuf::from(exe)
} else {
env::current_exe()?
};
writeln!(&mut custom_linker_file, "#!/usr/bin/env bash")?;
writeln!(
&mut custom_linker_file,
"{} zig {} -- {} $@",
current_exe.display(),
command,
args
)?;
Ok(())
}
/// Write a zig cc wrapper batch script for windows
#[cfg(not(target_family = "unix"))]
fn write_linker_wrapper(path: &Path, command: &str, args: &str) -> Result<()> {
let mut custom_linker_file = fs::File::create(path)?;
let current_exe = if let Ok(exe) = env::var("CARGO_BIN_EXE_cargo-zigbuild") {
PathBuf::from(exe)
} else {
env::current_exe()?
};
writeln!(
&mut custom_linker_file,
"{} zig {} -- {} %*",
current_exe.display(),
command,
args
)?;
Ok(())
}
| 35.724138 | 100 | 0.535929 |
f982fd098e6a6a794295df77ba4df3b9d9cccfc3 | 2,054 | // Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT
// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD
// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,
// modified, or distributed except according to those terms. Please review the Licences for the
// specific language governing permissions and limitations relating to use of the SAFE Network
// Software.
extern crate rand;
extern crate system_uri;
#[macro_use]
extern crate unwrap;
use rand::Rng;
use std::env;
use std::process::exit;
use std::{thread, time};
use system_uri::{install, open, App, SystemUriError};
fn install_and_open() -> Result<(), SystemUriError> {
let mut rng = rand::thread_rng();
let exec = String::from(unwrap!(unwrap!(std::env::current_exe()).to_str()));
let app = App::new(
"net.maidsafe.example".to_string(),
"MaidSafe Ltd.".to_string(),
"Example R/W".to_string(),
exec,
None,
);
let schema = format!("testschema{}", rng.gen::<u32>());
println!("Installing ourselves under {}", schema);
install(&app, &[schema.clone()]).and_then(|()| {
println!("Install succeeded 😄");
println!("Trying to open {}:test", schema);
open(format!("{}:test", schema)).and_then(|()| {
println!("Open succeeded 😄, everything is fine 🎉!");
Ok(())
})
})
}
fn main() {
if let Some(url) = env::args().nth(1) {
println!(
"Being started with {} as first parameter. Yay 🎉. Closing in 3",
url
);
thread::sleep(time::Duration::from_secs(1));
println!("2");
thread::sleep(time::Duration::from_secs(1));
println!("1");
thread::sleep(time::Duration::from_secs(1));
println!("Good bye!");
exit(0);
}
if let Err(ref e) = install_and_open() {
println!("error: {}", e);
::std::process::exit(1);
}
}
| 29.768116 | 95 | 0.60224 |
72098d97565b1cfd396f788cd4c2480087f670a2 | 65,984 | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[derive(Clone)]
pub struct Client {
endpoint: String,
credential: std::sync::Arc<dyn azure_core::TokenCredential>,
scopes: Vec<String>,
pipeline: azure_core::pipeline::Pipeline,
}
#[derive(Clone)]
pub struct ClientBuilder {
credential: std::sync::Arc<dyn azure_core::TokenCredential>,
endpoint: Option<String>,
scopes: Option<Vec<String>>,
}
pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD;
impl ClientBuilder {
pub fn new(credential: std::sync::Arc<dyn azure_core::TokenCredential>) -> Self {
Self {
credential,
endpoint: None,
scopes: None,
}
}
pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self {
self.endpoint = Some(endpoint.into());
self
}
pub fn scopes(mut self, scopes: &[&str]) -> Self {
self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect());
self
}
pub fn build(self) -> Client {
let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned());
let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]);
Client::new(endpoint, self.credential, scopes)
}
}
impl Client {
pub(crate) fn endpoint(&self) -> &str {
self.endpoint.as_str()
}
pub(crate) fn token_credential(&self) -> &dyn azure_core::TokenCredential {
self.credential.as_ref()
}
pub(crate) fn scopes(&self) -> Vec<&str> {
self.scopes.iter().map(String::as_str).collect()
}
pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> Result<azure_core::Response, azure_core::Error> {
let mut context = azure_core::Context::default();
let mut request = request.into();
self.pipeline.send(&mut context, &mut request).await
}
pub fn new(endpoint: impl Into<String>, credential: std::sync::Arc<dyn azure_core::TokenCredential>, scopes: Vec<String>) -> Self {
let endpoint = endpoint.into();
let pipeline = azure_core::pipeline::Pipeline::new(
option_env!("CARGO_PKG_NAME"),
option_env!("CARGO_PKG_VERSION"),
azure_core::ClientOptions::default(),
Vec::new(),
Vec::new(),
);
Self {
endpoint,
credential,
scopes,
pipeline,
}
}
pub fn accounts(&self) -> accounts::Client {
accounts::Client(self.clone())
}
pub fn classic_accounts(&self) -> classic_accounts::Client {
classic_accounts::Client(self.clone())
}
pub fn generate(&self) -> generate::Client {
generate::Client(self.clone())
}
pub fn operations(&self) -> operations::Client {
operations::Client(self.clone())
}
pub fn user_classic_accounts(&self) -> user_classic_accounts::Client {
user_classic_accounts::Client(self.clone())
}
}
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
Accounts_CheckNameAvailability(#[from] accounts::check_name_availability::Error),
#[error(transparent)]
Accounts_List(#[from] accounts::list::Error),
#[error(transparent)]
Accounts_ListByResourceGroup(#[from] accounts::list_by_resource_group::Error),
#[error(transparent)]
Accounts_Get(#[from] accounts::get::Error),
#[error(transparent)]
Accounts_CreateOrUpdate(#[from] accounts::create_or_update::Error),
#[error(transparent)]
Accounts_Update(#[from] accounts::update::Error),
#[error(transparent)]
Accounts_Delete(#[from] accounts::delete::Error),
#[error(transparent)]
UserClassicAccounts_List(#[from] user_classic_accounts::list::Error),
#[error(transparent)]
ClassicAccounts_GetDetails(#[from] classic_accounts::get_details::Error),
#[error(transparent)]
Generate_AccessToken(#[from] generate::access_token::Error),
}
pub mod operations {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(&self) -> list::Builder {
list::Builder { client: self.0.clone() }
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OperationListResult, Error>> {
Box::pin(async move {
let url_str = &format!("{}/providers/Microsoft.VideoIndexer/operations", self.client.endpoint(),);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OperationListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod accounts {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn check_name_availability(
&self,
subscription_id: impl Into<String>,
check_name_availability_parameters: impl Into<models::AccountCheckNameAvailabilityParameters>,
) -> check_name_availability::Builder {
check_name_availability::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
check_name_availability_parameters: check_name_availability_parameters.into(),
}
}
pub fn list(&self, subscription_id: impl Into<String>) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
pub fn list_by_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_resource_group::Builder {
list_by_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
}
}
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
parameters: None,
}
}
pub fn update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
parameters: None,
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
}
}
}
pub mod check_name_availability {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) check_name_availability_parameters: models::AccountCheckNameAvailabilityParameters,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::CheckNameAvailabilityResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.VideoIndexer/checkNameAvailability",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.check_name_availability_parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CheckNameAvailabilityResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AccountList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.VideoIndexer/accounts",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AccountList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AccountList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.VideoIndexer/accounts",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AccountList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Account, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.VideoIndexer/accounts/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Account =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
BadRequest400 { value: models::ErrorResponse },
#[error("Error response #response_type")]
NotFound404 { value: models::ErrorResponse },
#[error("Error response #response_type")]
Conflict409 { value: models::ErrorResponse },
#[error("Error response #response_type")]
InternalServerError500 { value: models::ErrorResponse },
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) parameters: Option<models::Account>,
}
impl Builder {
pub fn parameters(mut self, parameters: impl Into<models::Account>) -> Self {
self.parameters = Some(parameters.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Account, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.VideoIndexer/accounts/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = if let Some(parameters) = &self.parameters {
req_builder = req_builder.header("content-type", "application/json");
azure_core::to_json(parameters).map_err(Error::Serialize)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Account =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
http::StatusCode::BAD_REQUEST => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::BadRequest400 { value: rsp_value })
}
http::StatusCode::NOT_FOUND => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::NotFound404 { value: rsp_value })
}
http::StatusCode::CONFLICT => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::Conflict409 { value: rsp_value })
}
http::StatusCode::INTERNAL_SERVER_ERROR => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::InternalServerError500 { value: rsp_value })
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
BadRequest400 { value: models::ErrorResponse },
#[error("Error response #response_type")]
NotFound404 { value: models::ErrorResponse },
#[error("Error response #response_type")]
InternalServerError500 { value: models::ErrorResponse },
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) parameters: Option<models::AccountPatch>,
}
impl Builder {
pub fn parameters(mut self, parameters: impl Into<models::AccountPatch>) -> Self {
self.parameters = Some(parameters.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Account, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.VideoIndexer/accounts/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = if let Some(parameters) = &self.parameters {
req_builder = req_builder.header("content-type", "application/json");
azure_core::to_json(parameters).map_err(Error::Serialize)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Account =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
http::StatusCode::BAD_REQUEST => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::BadRequest400 { value: rsp_value })
}
http::StatusCode::NOT_FOUND => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::NotFound404 { value: rsp_value })
}
http::StatusCode::INTERNAL_SERVER_ERROR => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::InternalServerError500 { value: rsp_value })
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
InternalServerError500 { value: models::ErrorResponse },
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.VideoIndexer/accounts/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
http::StatusCode::INTERNAL_SERVER_ERROR => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::InternalServerError500 { value: rsp_value })
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod user_classic_accounts {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(&self, location: impl Into<String>) -> list::Builder {
list::Builder {
client: self.0.clone(),
location: location.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
InternalServerError500 { value: models::ErrorResponse },
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) location: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::UserClassicAccountList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/providers/Microsoft.VideoIndexer/locations/{}/userClassicAccounts",
self.client.endpoint(),
&self.location
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::UserClassicAccountList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
http::StatusCode::INTERNAL_SERVER_ERROR => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::InternalServerError500 { value: rsp_value })
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod classic_accounts {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get_details(&self, location: impl Into<String>, account_name: impl Into<String>) -> get_details::Builder {
get_details::Builder {
client: self.0.clone(),
location: location.into(),
account_name: account_name.into(),
}
}
}
pub mod get_details {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
BadRequest400 { value: models::ErrorResponse },
#[error("Error response #response_type")]
Unauthorized401 { value: models::ErrorResponse },
#[error("Error response #response_type")]
NotFound404 { value: models::ErrorResponse },
#[error("Error response #response_type")]
InternalServerError500 { value: models::ErrorResponse },
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) location: String,
pub(crate) account_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ClassicAccount, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/providers/Microsoft.VideoIndexer/locations/{}/classicAccounts/{}",
self.client.endpoint(),
&self.location,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ClassicAccount =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
http::StatusCode::BAD_REQUEST => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::BadRequest400 { value: rsp_value })
}
http::StatusCode::UNAUTHORIZED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::Unauthorized401 { value: rsp_value })
}
http::StatusCode::NOT_FOUND => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::NotFound404 { value: rsp_value })
}
http::StatusCode::INTERNAL_SERVER_ERROR => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::InternalServerError500 { value: rsp_value })
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod generate {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn access_token(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
) -> access_token::Builder {
access_token::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
parameters: None,
}
}
}
pub mod access_token {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
BadRequest400 { value: models::ErrorResponse },
#[error("Error response #response_type")]
NotFound404 { value: models::ErrorResponse },
#[error("Error response #response_type")]
Conflict409 { value: models::ErrorResponse },
#[error("Error response #response_type")]
InternalServerError500 { value: models::ErrorResponse },
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) parameters: Option<models::GenerateAccessTokenParameters>,
}
impl Builder {
pub fn parameters(mut self, parameters: impl Into<models::GenerateAccessTokenParameters>) -> Self {
self.parameters = Some(parameters.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AccessToken, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.VideoIndexer/accounts/{}/generateAccessToken",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = if let Some(parameters) = &self.parameters {
req_builder = req_builder.header("content-type", "application/json");
azure_core::to_json(parameters).map_err(Error::Serialize)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AccessToken =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
http::StatusCode::BAD_REQUEST => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::BadRequest400 { value: rsp_value })
}
http::StatusCode::NOT_FOUND => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::NotFound404 { value: rsp_value })
}
http::StatusCode::CONFLICT => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::Conflict409 { value: rsp_value })
}
http::StatusCode::INTERNAL_SERVER_ERROR => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::InternalServerError500 { value: rsp_value })
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
| 52.660814 | 137 | 0.521126 |
de3d96036a82c9f902d2f175fbfc14a335f7b10d | 3,622 | #![feature(int_log)]
#![feature(map_first_last)]
extern crate ffmpeg_next as ffmpeg;
mod process;
mod utils;
use {
ffmpeg::{
format::{input, Pixel},
media,
util::frame::video::Video,
},
std::path::Path,
};
pub use {ffmpeg::Error, process::*, utils::*};
pub fn init() {
ffmpeg::init().unwrap();
ffmpeg::log::set_level(ffmpeg::log::Level::Quiet);
}
pub fn first_frame(filename: &Path) -> Result<(u32, u32, Vec<u8>), ffmpeg::Error> {
let mut ictx = input(&filename)?;
let input = ictx
.streams()
.best(media::Type::Video)
.ok_or(ffmpeg::Error::StreamNotFound)?;
let stream_idx = input.index();
let mut decoder = input.codec().decoder().video()?;
let mut converter = decoder.converter(Pixel::BGRA)?;
for (stream, packet) in ictx.packets() {
if stream.index() == stream_idx {
decoder.send_packet(&packet)?;
let mut decoded = Video::empty();
if decoder.receive_frame(&mut decoded).is_ok() {
let mut rgb_frame = Video::empty();
converter.run(&decoded, &mut rgb_frame)?;
return Ok((
decoder.width(),
decoder.height(),
rgb_frame.data(0).to_owned(),
));
}
}
}
Err(ffmpeg::Error::InvalidData)
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub enum CalculationUnit {
Average,
Pixel,
KMeans,
}
impl Default for CalculationUnit {
fn default() -> Self {
Self::Average
}
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub enum ColorSpace {
RGB,
HSV,
CIELAB,
}
impl Default for ColorSpace {
fn default() -> Self {
Self::RGB
}
}
impl From<ColorSpace> for String {
fn from(cs: ColorSpace) -> Self {
Self::from(match cs {
ColorSpace::RGB => "RGB",
ColorSpace::HSV => "HSV",
ColorSpace::CIELAB => "CIE L*a*b*",
})
}
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub enum DistanceAlgorithm {
Euclidean,
CIEDE2000,
}
impl Default for DistanceAlgorithm {
fn default() -> Self {
Self::Euclidean
}
}
impl From<DistanceAlgorithm> for String {
fn from(da: DistanceAlgorithm) -> Self {
Self::from(match da {
DistanceAlgorithm::Euclidean => "Euclidean",
DistanceAlgorithm::CIEDE2000 => "CIEDE2000",
})
}
}
pub fn str2cu(cu: &str) -> Result<CalculationUnit, String> {
match cu {
"average" => Ok(CalculationUnit::Average),
"pixel" => Ok(CalculationUnit::Pixel),
"k_means" => Ok(CalculationUnit::KMeans),
_ => Err("incorrect calculation unit".into()),
}
}
pub fn str2cs(cs: &str) -> Result<ColorSpace, String> {
match cs {
"rgb" => Ok(ColorSpace::RGB),
"hsv" => Ok(ColorSpace::HSV),
"cielab" => Ok(ColorSpace::CIELAB),
_ => Err("incorrect color space".into()),
}
}
pub fn str2da(da: &str) -> Result<DistanceAlgorithm, String> {
match da {
"euclidean" => Ok(DistanceAlgorithm::Euclidean),
"ciede2000" => Ok(DistanceAlgorithm::CIEDE2000),
_ => Err("incorrect distance algorithm".into()),
}
}
pub fn str2filter(filter: &str) -> Result<Filter, String> {
match filter {
"nearest" => Ok(Filter::Nearest),
"triangle" => Ok(Filter::Triangle),
"catmullRom" => Ok(Filter::CatmullRom),
"gaussian" => Ok(Filter::Gaussian),
"lanczos3" => Ok(Filter::Lanczos3),
_ => Err("incorrect filter".into()),
}
}
| 24.472973 | 83 | 0.561292 |
abd783cda1bc7bd02d9a2a1edb04657ffd76c7ed | 6,246 | use actix;
use actix_web::{error::ResponseError, HttpResponse, FutureResponse};
use diesel::result::{Error as DieselError};
use futures::future;
use std::io;
use actix_web::http::StatusCode;
use ostree::OstreeError;
#[derive(Fail, Debug, Clone)]
pub enum DeltaGenerationError {
#[fail(display = "{}", _0)]
Failed(String),
}
impl DeltaGenerationError {
pub fn new(s: &str) -> Self {
DeltaGenerationError::Failed(s.to_string())
}
}
impl From<io::Error> for DeltaGenerationError {
fn from(e: io::Error) -> Self {
DeltaGenerationError::new(&e.to_string())
}
}
impl From<OstreeError> for DeltaGenerationError {
fn from(e: OstreeError) -> Self {
DeltaGenerationError::new(&e.to_string())
}
}
#[derive(Fail, Debug, Clone)]
pub enum JobError {
#[fail(display = "InternalError: {}", _0)]
InternalError(String),
#[fail(display = "DbError: {}", _0)]
DBError(String),
}
impl JobError {
pub fn new(s: &str) -> Self {
JobError::InternalError(s.to_string())
}
}
pub type JobResult<T> = Result<T, JobError>;
impl From<DieselError> for JobError {
fn from(e: DieselError) -> Self {
match e {
_ => {
JobError::DBError(e.to_string())
}
}
}
}
impl From<OstreeError> for JobError {
fn from(e: OstreeError) -> Self {
match e {
_ => {
JobError::InternalError(e.to_string())
}
}
}
}
impl From<DeltaGenerationError> for JobError {
fn from(e: DeltaGenerationError) -> Self {
match e {
_ => {
JobError::InternalError(format!("Failed to generate delta: {}", e.to_string()))
}
}
}
}
impl From<io::Error> for JobError {
fn from(e: io::Error) -> Self {
match e {
_ => {
JobError::InternalError(e.to_string())
}
}
}
}
#[derive(Fail, Debug)]
pub enum ApiError {
#[fail(display = "Internal Server Error ({})", _0)]
InternalServerError(String),
#[fail(display = "NotFound")]
NotFound,
#[fail(display = "BadRequest: {}", _0)]
BadRequest(String),
#[fail(display = "WrongRepoState({}): {}", _2, _0)]
WrongRepoState(String,String,String),
#[fail(display = "WrongPublishedState({}): {}", _2, _0 )]
WrongPublishedState(String,String,String),
#[fail(display = "InvalidToken")]
InvalidToken,
#[fail(display = "NotEnoughPermissions")]
NotEnoughPermissions(String),
}
impl From<DieselError> for ApiError {
fn from(e: DieselError) -> Self {
match e {
DieselError::NotFound => ApiError::NotFound,
_ => {
ApiError::InternalServerError(e.to_string())
}
}
}
}
impl From<io::Error> for ApiError {
fn from(io_error: io::Error) -> Self {
match io_error {
_ => {
ApiError::InternalServerError(io_error.to_string())
}
}
}
}
impl From<actix::MailboxError> for ApiError {
fn from(e: actix::MailboxError) -> Self {
match e {
_ => {
ApiError::InternalServerError(e.to_string())
}
}
}
}
impl ApiError {
pub fn to_json(&self) -> String {
match *self {
ApiError::InternalServerError(ref _internal_message) => json!({
"status": 500,
"error-type": "internal-error",
"message": "Internal Server Error"
}),
ApiError::NotFound => json!({
"status": 404,
"error-type": "not-found",
"message": "Not found",
}),
ApiError::BadRequest(ref message) => json!({
"status": 400,
"error-type": "generic-error",
"message": message,
}),
ApiError::WrongRepoState(ref message, ref expected, ref state) => json!({
"status": 400,
"message": message,
"error-type": "wrong-repo-state",
"current-state": state,
"expected-state": expected,
}),
ApiError::WrongPublishedState(ref message, ref expected, ref state) => json!({
"status": 400,
"message": message,
"error-type": "wrong-published-state",
"current-state": state,
"expected-state": expected,
}),
ApiError::InvalidToken => json!({
"status": 401,
"error-type": "invalid-token",
"message": "Invalid token",
}),
ApiError::NotEnoughPermissions(ref message) => json!({
"status": 401,
"error-type": "token-insufficient",
"message": format!("Not enough permissions: {})", message),
}),
}
.to_string()
}
pub fn status_code(&self) -> StatusCode {
match *self {
ApiError::InternalServerError(ref _internal_message) => StatusCode::INTERNAL_SERVER_ERROR,
ApiError::NotFound => StatusCode::NOT_FOUND,
ApiError::BadRequest(ref _message) => StatusCode::BAD_REQUEST,
ApiError::WrongRepoState(_,_,_) => StatusCode::BAD_REQUEST,
ApiError::WrongPublishedState(_,_,_) => StatusCode::BAD_REQUEST,
ApiError::InvalidToken => StatusCode::UNAUTHORIZED,
ApiError::NotEnoughPermissions(ref _message) => StatusCode::UNAUTHORIZED,
}
}
}
impl ResponseError for ApiError {
fn error_response(&self) -> HttpResponse {
if let ApiError::InternalServerError(internal_message) = self {
error!("Responding with internal error: {}", internal_message);
}
if let ApiError::NotEnoughPermissions(internal_message) = self {
error!("Responding with NotEnoughPermissions error: {}", internal_message);
}
HttpResponse::build(self.status_code()).json(self.to_json())
}
}
impl From<ApiError> for FutureResponse<HttpResponse> {
fn from(e: ApiError) -> Self {
Box::new(future::ok(e.error_response()))
}
}
| 28.262443 | 102 | 0.54611 |
1425760051d255e295150e5c74ed6eedbba62858 | 6,345 | // This file is part of Substrate.
// Copyright (C) 2017-2021 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Hash utilities.
use codec::{Codec, MaxEncodedLen};
use sp_std::prelude::Vec;
use sp_io::hashing::{blake2_128, blake2_256, twox_64, twox_128, twox_256};
// This trait must be kept coherent with frame-support-procedural HasherKind usage
pub trait Hashable: Sized {
fn blake2_128(&self) -> [u8; 16];
fn blake2_256(&self) -> [u8; 32];
fn blake2_128_concat(&self) -> Vec<u8>;
fn twox_128(&self) -> [u8; 16];
fn twox_256(&self) -> [u8; 32];
fn twox_64_concat(&self) -> Vec<u8>;
fn identity(&self) -> Vec<u8>;
}
impl<T: Codec> Hashable for T {
fn blake2_128(&self) -> [u8; 16] {
self.using_encoded(blake2_128)
}
fn blake2_256(&self) -> [u8; 32] {
self.using_encoded(blake2_256)
}
fn blake2_128_concat(&self) -> Vec<u8> {
self.using_encoded(Blake2_128Concat::hash)
}
fn twox_128(&self) -> [u8; 16] {
self.using_encoded(twox_128)
}
fn twox_256(&self) -> [u8; 32] {
self.using_encoded(twox_256)
}
fn twox_64_concat(&self) -> Vec<u8> {
self.using_encoded(Twox64Concat::hash)
}
fn identity(&self) -> Vec<u8> { self.encode() }
}
/// Hasher to use to hash keys to insert to storage.
pub trait StorageHasher: 'static {
const METADATA: frame_metadata::StorageHasher;
type Output: AsRef<[u8]>;
fn hash(x: &[u8]) -> Self::Output;
/// The max length of the final hash, for the given key type.
fn max_len<K: MaxEncodedLen>() -> usize;
}
/// Hasher to use to hash keys to insert to storage.
///
/// Reversible hasher store the encoded key after the hash part.
pub trait ReversibleStorageHasher: StorageHasher {
/// Split the hash part out of the input.
///
/// I.e. for input `&[hash ++ key ++ some]` returns `&[key ++ some]`
fn reverse(x: &[u8]) -> &[u8];
}
/// Store the key directly.
pub struct Identity;
impl StorageHasher for Identity {
const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Identity;
type Output = Vec<u8>;
fn hash(x: &[u8]) -> Vec<u8> {
x.to_vec()
}
fn max_len<K: MaxEncodedLen>() -> usize {
K::max_encoded_len()
}
}
impl ReversibleStorageHasher for Identity {
fn reverse(x: &[u8]) -> &[u8] {
x
}
}
/// Hash storage keys with `concat(twox64(key), key)`
pub struct Twox64Concat;
impl StorageHasher for Twox64Concat {
const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Twox64Concat;
type Output = Vec<u8>;
fn hash(x: &[u8]) -> Vec<u8> {
twox_64(x)
.iter()
.chain(x.into_iter())
.cloned()
.collect::<Vec<_>>()
}
fn max_len<K: MaxEncodedLen>() -> usize {
K::max_encoded_len().saturating_add(8)
}
}
impl ReversibleStorageHasher for Twox64Concat {
fn reverse(x: &[u8]) -> &[u8] {
if x.len() < 8 {
log::error!("Invalid reverse: hash length too short");
return &[]
}
&x[8..]
}
}
/// Hash storage keys with `concat(blake2_128(key), key)`
pub struct Blake2_128Concat;
impl StorageHasher for Blake2_128Concat {
const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Blake2_128Concat;
type Output = Vec<u8>;
fn hash(x: &[u8]) -> Vec<u8> {
blake2_128(x)
.iter()
.chain(x.into_iter())
.cloned()
.collect::<Vec<_>>()
}
fn max_len<K: MaxEncodedLen>() -> usize {
K::max_encoded_len().saturating_add(16)
}
}
impl ReversibleStorageHasher for Blake2_128Concat {
fn reverse(x: &[u8]) -> &[u8] {
if x.len() < 16 {
log::error!("Invalid reverse: hash length too short");
return &[]
}
&x[16..]
}
}
/// Hash storage keys with blake2 128
pub struct Blake2_128;
impl StorageHasher for Blake2_128 {
const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Blake2_128;
type Output = [u8; 16];
fn hash(x: &[u8]) -> [u8; 16] {
blake2_128(x)
}
fn max_len<K: MaxEncodedLen>() -> usize {
16
}
}
/// Hash storage keys with blake2 256
pub struct Blake2_256;
impl StorageHasher for Blake2_256 {
const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Blake2_256;
type Output = [u8; 32];
fn hash(x: &[u8]) -> [u8; 32] {
blake2_256(x)
}
fn max_len<K: MaxEncodedLen>() -> usize {
32
}
}
/// Hash storage keys with twox 128
pub struct Twox128;
impl StorageHasher for Twox128 {
const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Twox128;
type Output = [u8; 16];
fn hash(x: &[u8]) -> [u8; 16] {
twox_128(x)
}
fn max_len<K: MaxEncodedLen>() -> usize {
16
}
}
/// Hash storage keys with twox 256
pub struct Twox256;
impl StorageHasher for Twox256 {
const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Twox256;
type Output = [u8; 32];
fn hash(x: &[u8]) -> [u8; 32] {
twox_256(x)
}
fn max_len<K: MaxEncodedLen>() -> usize {
32
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_twox_64_concat() {
let r = Twox64Concat::hash(b"foo");
assert_eq!(r.split_at(8), (&twox_128(b"foo")[..8], &b"foo"[..]))
}
#[test]
fn test_blake2_128_concat() {
let r = Blake2_128Concat::hash(b"foo");
assert_eq!(r.split_at(16), (&blake2_128(b"foo")[..], &b"foo"[..]))
}
#[test]
fn max_lengths() {
use codec::Encode;
let encoded_0u32 = &0u32.encode()[..];
assert_eq!(Twox64Concat::hash(encoded_0u32).len(), Twox64Concat::max_len::<u32>());
assert_eq!(Twox128::hash(encoded_0u32).len(), Twox128::max_len::<u32>());
assert_eq!(Twox256::hash(encoded_0u32).len(), Twox256::max_len::<u32>());
assert_eq!(Blake2_128::hash(encoded_0u32).len(), Blake2_128::max_len::<u32>());
assert_eq!(Blake2_128Concat::hash(encoded_0u32).len(), Blake2_128Concat::max_len::<u32>());
assert_eq!(Blake2_256::hash(encoded_0u32).len(), Blake2_256::max_len::<u32>());
assert_eq!(Identity::hash(encoded_0u32).len(), Identity::max_len::<u32>());
}
}
| 27.828947 | 97 | 0.675965 |
11a8437a79777ff39b6a23bd4affe42e8771222e | 662 | use crate::error::Error;
use crate::message::error::ErrorResponse;
use crate::message::VerdictResponse;
use std::convert::TryFrom;
pub enum MessageType {
Ping,
Pong,
Close,
VerdictResponse(VerdictResponse),
}
impl TryFrom<&String> for MessageType {
type Error = Error;
fn try_from(json: &String) -> Result<Self, Self::Error> {
if let Ok(resp) = VerdictResponse::try_from(json) {
return Ok(MessageType::VerdictResponse(resp));
}
if let Ok(err) = ErrorResponse::try_from(json) {
return Err(Error::ErrorResponse(err));
}
Err(Error::InvalidMessage(json.to_string()))
}
}
| 25.461538 | 61 | 0.637462 |
cc0b355061df7396fec841308845e7d93a7623e3 | 1,239 | use super::{Args, DirectiveValidator};
use crate::ast;
use crate::dml;
use crate::error::DatamodelError;
/// Moves an directive into a namespace scope.
///
/// This is mainly used with custom source blocks. It wraps a directive and
/// preprends the source name in front of the directive name.
pub struct DirectiveScope<T> {
inner: Box<dyn DirectiveValidator<T>>,
#[allow(dead_code)]
scope: String,
name: String,
}
impl<T> DirectiveScope<T> {
/// Creates a new instance, using the given directive and
/// a namespae name.
pub fn new(inner: Box<dyn DirectiveValidator<T>>, scope: &str) -> DirectiveScope<T> {
DirectiveScope {
name: format!("{}.{}", scope, inner.directive_name()),
inner,
scope: String::from(scope),
}
}
}
impl<T> DirectiveValidator<T> for DirectiveScope<T> {
fn directive_name(&self) -> &str {
&self.name
}
fn validate_and_apply(&self, args: &mut Args, obj: &mut T) -> Result<(), DatamodelError> {
self.inner.validate_and_apply(args, obj)
}
fn serialize(&self, obj: &T, datamodel: &dml::Datamodel) -> Result<Vec<ast::Directive>, DatamodelError> {
self.inner.serialize(obj, datamodel)
}
}
| 30.975 | 109 | 0.640032 |
16f0028541f76c169252537d1de794f54203bab4 | 1,988 | use resolve::resolve;
use failure::Error;
use linked_hash_map::LinkedHashMap;
use serde_json::value::Value;
use serde_yaml;
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::str::FromStr;
pub const CATEGORY_ENV: &str = "PERIPHERIO_CATEGORY_PATH";
pub const CATEGORY_FILE: &str = "category.yml";
#[derive(Deserialize, Clone)]
pub struct Signature {
pub args: Option<LinkedHashMap<String, Value>>,
pub returns: Option<LinkedHashMap<String, Value>>,
}
pub struct Category {
name: String,
path: PathBuf,
version: String,
author: Option<String>,
required_signatures: HashMap<String, Signature>,
}
#[derive(Deserialize)]
struct LibMetaData {
name: String,
version: String,
author: Option<String>,
signatures: HashMap<String, Signature>,
}
impl FromStr for Category {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let path = Category::resolve(s)?;
Category::new(path)
}
}
impl Category {
pub fn resolve(name: &str) -> Result<PathBuf, Error> {
resolve(name, CATEGORY_ENV, CATEGORY_FILE)
}
pub fn new<P: AsRef<Path>>(path: P) -> Result<Self, Error> {
let mut file = File::open(&path.as_ref().join(CATEGORY_FILE))?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let metadata: LibMetaData = serde_yaml::from_str(&contents)?;
Ok(Category {
path: path.as_ref().to_path_buf(),
name: metadata.name,
author: metadata.author,
version: metadata.version,
required_signatures: metadata.signatures,
})
}
pub fn name(&self) -> &String {
&self.name
}
pub fn required_symbols(&self) -> impl Iterator<Item = &String> {
self.required_signatures.keys()
}
pub fn signatures(&self) -> &HashMap<String, Signature> {
&self.required_signatures
}
}
| 25.164557 | 71 | 0.636821 |
4b2616b9d616b54761665bdb9333700a6920ff6e | 3,558 | use std::fs::File;
use std::io::{BufRead, BufReader};
fn main() {
let args: Vec<String> = std::env::args().collect();
let file = File::open(args.get(1).expect("No file provided")).expect("Could not open file");
//part_one(file);
part_two(file);
}
fn part_two(file: File) {
let claims: Vec<Claim> = BufReader::new(file)
.lines()
.map(|l| l.expect("Could not read line"))
.map(|l| parse_claim(l))
.collect();
for (i, c) in claims.iter().enumerate() {
if i < claims.len() {
let mut overlapped = false;
for o in &claims {
if overlap(c, o) {
overlapped = true;
break;
}
}
if overlapped == false {
println!("ID {} didn't overlap", c.id);
}
}
}
}
fn overlap(a: &Claim, b: &Claim) -> bool {
// If it's the same claim, don't count as overlap
if a.id == b.id {
return false;
}
// If either is to the right of the other
if a.x_margin + 1 > b.x_margin + b.x_size || b.x_margin + 1 > a.x_margin + a.x_size {
return false;
}
// If either is below the other
if a.y_margin + 1 > b.y_margin + b.y_size || b.y_margin + 1 > a.y_margin + a.y_size {
return false;
}
true
}
fn part_one(file: File) {
let claims: Vec<Claim> = BufReader::new(file)
.lines()
.map(|l| l.expect("Could not read line"))
.map(|l| parse_claim(l))
.collect();
let mut multi_claimed = 0;
for y in 1..1001 {
for x in 1..1001 {
if is_multi_claimed((x, y), &claims) {
multi_claimed = multi_claimed + 1;
}
}
}
println!("{}", multi_claimed);
}
fn is_multi_claimed((x, y): (usize, usize), claims: &Vec<Claim>) -> bool {
let mut claim_count = 0;
for c in claims {
if within((x, y), c) {
claim_count = claim_count + 1;
if claim_count > 1 {
return true;
}
}
}
false
}
fn within((x, y): (usize, usize), claim: &Claim) -> bool {
if x > claim.x_margin
&& x <= claim.x_margin + claim.x_size
&& y > claim.y_margin
&& y <= claim.y_margin + claim.y_size
{
return true;
}
false
}
fn parse_claim(s: String) -> Claim {
let parsed: Vec<&str> = s.split(&['#', '@', ',', ':', 'x'][..]).collect();
let id = parsed
.get(1)
.expect("Missing element")
.trim()
.parse::<usize>()
.expect("Could not parse usize from element");
let x_margin = parsed
.get(2)
.expect("Missing element")
.trim()
.parse::<usize>()
.expect("Could not parse usize from element");
let y_margin = parsed
.get(3)
.expect("Missing element")
.trim()
.parse::<usize>()
.expect("Could not parse usize from element");
let x_size = parsed
.get(4)
.expect("Missing element")
.trim()
.parse::<usize>()
.expect("Could not parse usize from element");
let y_size = parsed
.get(5)
.expect("Missing element")
.trim()
.parse::<usize>()
.expect("Could not parse usize from element");
Claim {
id,
x_margin,
y_margin,
x_size,
y_size,
}
}
#[derive(Debug)]
struct Claim {
id: usize,
x_margin: usize,
y_margin: usize,
x_size: usize,
y_size: usize,
}
| 25.056338 | 96 | 0.498876 |
6191a0f2172042944552ce00b2fa8c3ab0af8f26 | 3,308 | // Copyright 2021 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
anyhow::Error,
fidl::endpoints::DiscoverableService,
fidl_fuchsia_bluetooth_a2dp::{AudioModeMarker, AudioModeRequestStream},
fidl_fuchsia_bluetooth_avdtp as fidl_avdtp, fidl_fuchsia_bluetooth_avrcp as fidl_avrcp,
fidl_fuchsia_bluetooth_bredr::ProfileMarker,
fidl_fuchsia_bluetooth_component::LifecycleMarker,
fidl_fuchsia_cobalt::LoggerFactoryMarker,
fidl_fuchsia_media::{AudioDeviceEnumeratorMarker, SessionAudioConsumerFactoryMarker},
fidl_fuchsia_media_sessions2::PublisherMarker,
fidl_fuchsia_mediacodec::CodecFactoryMarker,
fidl_fuchsia_settings::AudioMarker,
fidl_fuchsia_sysmem::AllocatorMarker,
fidl_fuchsia_tracing_provider::RegistryMarker,
fuchsia_async as fasync,
fuchsia_component::{client::connect_to_protocol, server::ServiceFs},
futures::{StreamExt, TryStream, TryStreamExt},
log::info,
};
async fn process_request_stream<S>(mut stream: S, tag: &str)
where
S: TryStream<Error = fidl::Error> + Unpin,
<S as TryStream>::Ok: std::fmt::Debug,
{
info!("Received {} service connection", tag);
while let Some(request) = stream.try_next().await.expect("serving request stream failed") {
info!("Received {} service request: {:?}", tag, request);
}
}
#[fasync::run_singlethreaded]
async fn main() -> Result<(), Error> {
fuchsia_syslog::init().unwrap();
info!("Starting bt-a2dp-topology-fake component...");
// Set up the outgoing `svc` directory with the services A2DP provides.
let mut fs = ServiceFs::new();
fs.dir("svc")
.add_fidl_service(|stream: AudioModeRequestStream| {
fasync::Task::local(process_request_stream(stream, AudioModeMarker::SERVICE_NAME))
.detach();
})
.add_fidl_service(|stream: fidl_avdtp::PeerManagerRequestStream| {
fasync::Task::local(process_request_stream(
stream,
fidl_avdtp::PeerManagerMarker::SERVICE_NAME,
))
.detach();
});
fs.take_and_serve_directory_handle().expect("Unable to serve ServiceFs requests");
let service_fs_task = fasync::Task::spawn(fs.collect::<()>());
// Connect to the services A2DP requires.
let _avrcp_svc = connect_to_protocol::<fidl_avrcp::PeerManagerMarker>()?;
let _profile_svc = connect_to_protocol::<ProfileMarker>()?;
let _cobalt_svc = connect_to_protocol::<LoggerFactoryMarker>()?;
let _audio_device_svc = connect_to_protocol::<AudioDeviceEnumeratorMarker>()?;
let _session_audio_svc = connect_to_protocol::<SessionAudioConsumerFactoryMarker>()?;
let _publisher_svc = connect_to_protocol::<PublisherMarker>()?;
let _codec_factory_svc = connect_to_protocol::<CodecFactoryMarker>()?;
let _audio_svc = connect_to_protocol::<AudioMarker>()?;
let _allocator_svc = connect_to_protocol::<AllocatorMarker>()?;
let _tracing_svc = connect_to_protocol::<RegistryMarker>()?;
// A2DP also relies on the Lifecycle service which is provided by its child `bt-avrcp-target`.
let _lifecycle_svc = connect_to_protocol::<LifecycleMarker>()?;
service_fs_task.await;
Ok(())
}
| 44.106667 | 98 | 0.716143 |
7187d46b7c29dd87298c207d3dd0db3add84d3a3 | 49,684 | //! This module implements message expansion consistent with the
//! hash-to-curve RFC drafts 7 through 10
use core::{
fmt::{self, Debug, Formatter},
marker::PhantomData,
};
use digest::{BlockInput, Digest, ExtendableOutputDirty, Update, XofReader};
use crate::generic_array::{
typenum::{Unsigned, U32},
ArrayLength, GenericArray,
};
#[cfg(feature = "alloc")]
use alloc::vec::Vec;
const OVERSIZE_DST_SALT: &[u8] = b"H2C-OVERSIZE-DST-";
/// The domain separation tag for a message expansion.
///
/// Implements [section 5.4.3 of `draft-irtf-cfrg-hash-to-curve-12`][dst].
///
/// [dst]: https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-12#section-5.4.3
#[derive(Debug)]
enum ExpandMsgDst<'x, L: ArrayLength<u8>> {
/// DST produced by hashing a very long (> 255 chars) input DST.
Hashed(GenericArray<u8, L>),
/// A raw input DST (<= 255 chars).
Raw(&'x [u8]),
}
impl<'x, L: ArrayLength<u8>> ExpandMsgDst<'x, L> {
/// Produces a DST for use with `expand_message_xof`.
pub fn process_xof<H>(dst: &'x [u8]) -> Self
where
H: Default + Update + ExtendableOutputDirty,
{
if dst.len() > 255 {
let mut data = GenericArray::<u8, L>::default();
H::default()
.chain(OVERSIZE_DST_SALT)
.chain(&dst)
.finalize_xof_dirty()
.read(&mut data);
Self::Hashed(data)
} else {
Self::Raw(dst)
}
}
/// Produces a DST for use with `expand_message_xmd`.
pub fn process_xmd<H>(dst: &'x [u8]) -> Self
where
H: Digest<OutputSize = L>,
{
if dst.len() > 255 {
Self::Hashed(H::new().chain(OVERSIZE_DST_SALT).chain(&dst).finalize())
} else {
Self::Raw(dst)
}
}
/// Returns the raw bytes of the DST.
pub fn data(&'x self) -> &'x [u8] {
match self {
Self::Hashed(arr) => &arr[..],
Self::Raw(buf) => buf,
}
}
/// Returns the length of the DST.
pub fn len(&'x self) -> usize {
match self {
Self::Hashed(_) => L::to_usize(),
Self::Raw(buf) => buf.len(),
}
}
}
/// A trait for message expansion methods supported by hash-to-curve.
pub trait ExpandMessage: for<'x> InitExpandMessage<'x> {
// This intermediate is likely only necessary until GATs allow
// associated types with lifetimes.
}
/// Trait for constructing a new message expander.
pub trait InitExpandMessage<'x> {
/// The state object used during message expansion.
type Expander: ExpandMessageState<'x>;
/// Initializes a message expander.
fn init_expand(message: &[u8], dst: &'x [u8], len_in_bytes: usize) -> Self::Expander;
}
// Automatically derive trait
impl<X: for<'x> InitExpandMessage<'x>> ExpandMessage for X {}
/// Trait for types implementing the `expand_message` interface for `hash_to_field`.
pub trait ExpandMessageState<'x> {
/// Reads bytes from the generated output.
fn read_into(&mut self, output: &mut [u8]) -> usize;
/// Retrieves the number of bytes remaining in the generator.
fn remain(&self) -> usize;
#[cfg(feature = "alloc")]
/// Constructs a `Vec` containing the remaining bytes of the output.
fn into_vec(mut self) -> Vec<u8>
where
Self: Sized,
{
let mut result = alloc::vec![0u8; self.remain()];
self.read_into(&mut result[..]);
result
}
}
/// A generator for the output of `expand_message_xof` for a given
/// extendable hash function, message, DST, and output length.
///
/// Implements [section 5.4.2 of `draft-irtf-cfrg-hash-to-curve-12`][expand_message_xof]
/// with `k = 128`.
///
/// [expand_message_xof]: https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-12#section-5.4.2
pub struct ExpandMsgXof<H: ExtendableOutputDirty> {
hash: <H as ExtendableOutputDirty>::Reader,
remain: usize,
}
impl<H: ExtendableOutputDirty> Debug for ExpandMsgXof<H> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("ExpandMsgXof")
.field("remain", &self.remain)
.finish()
}
}
impl<'x, H> ExpandMessageState<'x> for ExpandMsgXof<H>
where
H: ExtendableOutputDirty,
{
fn read_into(&mut self, output: &mut [u8]) -> usize {
let len = self.remain.min(output.len());
self.hash.read(&mut output[..len]);
self.remain -= len;
len
}
fn remain(&self) -> usize {
self.remain
}
}
impl<'x, H> InitExpandMessage<'x> for ExpandMsgXof<H>
where
H: Default + Update + ExtendableOutputDirty,
{
type Expander = Self;
fn init_expand(message: &[u8], dst: &[u8], len_in_bytes: usize) -> Self {
// Use U32 here for k = 128.
let dst = ExpandMsgDst::<U32>::process_xof::<H>(dst);
let hash = H::default()
.chain(message)
.chain((len_in_bytes as u16).to_be_bytes())
.chain(dst.data())
.chain([dst.len() as u8])
.finalize_xof_dirty();
Self {
hash,
remain: len_in_bytes,
}
}
}
/// Constructor for `expand_message_xmd` for a given digest hash function, message, DST,
/// and output length.
///
/// Implements [section 5.4.1 of `draft-irtf-cfrg-hash-to-curve-12`][expand_message_xmd].
///
/// [expand_message_xmd]: https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-12#section-5.4.1
#[derive(Debug)]
pub struct ExpandMsgXmd<H: Digest>(PhantomData<H>);
/// A generator for the output of `expand_message_xmd` for a given
/// digest hash function, message, DST, and output length.
///
/// Implements [section 5.4.1 of `draft-irtf-cfrg-hash-to-curve-12`][expand_message_xmd].
///
/// [expand_message_xmd]: https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-12#section-5.4.1
pub struct ExpandMsgXmdState<'x, H: Digest> {
dst: ExpandMsgDst<'x, H::OutputSize>,
b_0: GenericArray<u8, H::OutputSize>,
b_i: GenericArray<u8, H::OutputSize>,
i: usize,
b_offs: usize,
remain: usize,
}
impl<H: Digest> Debug for ExpandMsgXmdState<'_, H> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("ExpandMsgXmdState")
.field("remain", &self.remain)
.finish()
}
}
impl<'x, H> InitExpandMessage<'x> for ExpandMsgXmd<H>
where
H: Digest + BlockInput,
{
type Expander = ExpandMsgXmdState<'x, H>;
fn init_expand(message: &[u8], dst: &'x [u8], len_in_bytes: usize) -> Self::Expander {
let hash_size = <H as Digest>::OutputSize::to_usize();
let ell = (len_in_bytes + hash_size - 1) / hash_size;
if ell > 255 {
panic!("Invalid ExpandMsgXmd usage: ell > 255");
}
let dst = ExpandMsgDst::process_xmd::<H>(dst);
let b_0 = H::new()
.chain(GenericArray::<u8, <H as BlockInput>::BlockSize>::default())
.chain(message)
.chain((len_in_bytes as u16).to_be_bytes())
.chain([0u8])
.chain(dst.data())
.chain([dst.len() as u8])
.finalize();
// init with b_1
let b_i = H::new()
.chain(&b_0)
.chain([1u8])
.chain(dst.data())
.chain([dst.len() as u8])
.finalize();
ExpandMsgXmdState {
dst,
b_0,
b_i,
i: 2,
b_offs: 0,
remain: len_in_bytes,
}
}
}
impl<'x, H> ExpandMessageState<'x> for ExpandMsgXmdState<'x, H>
where
H: Digest + BlockInput,
{
fn read_into(&mut self, output: &mut [u8]) -> usize {
let read_len = self.remain.min(output.len());
let mut offs = 0;
let hash_size = H::OutputSize::to_usize();
while offs < read_len {
let b_offs = self.b_offs;
let mut copy_len = hash_size - b_offs;
if copy_len > 0 {
copy_len = copy_len.min(read_len - offs);
output[offs..(offs + copy_len)]
.copy_from_slice(&self.b_i[b_offs..(b_offs + copy_len)]);
offs += copy_len;
self.b_offs = b_offs + copy_len;
} else {
let mut b_prev_xor = self.b_0.clone();
for j in 0..hash_size {
b_prev_xor[j] ^= self.b_i[j];
}
self.b_i = H::new()
.chain(b_prev_xor)
.chain([self.i as u8])
.chain(self.dst.data())
.chain([self.dst.len() as u8])
.finalize();
self.b_offs = 0;
self.i += 1;
}
}
self.remain -= read_len;
read_len
}
fn remain(&self) -> usize {
self.remain
}
}
#[cfg(feature = "alloc")]
#[cfg(test)]
mod tests {
use super::*;
use sha2::{Sha256, Sha512};
use sha3::{Shake128, Shake256};
/// From <https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-12#appendix-K.1>
#[test]
fn expand_message_xmd_works_for_draft12_testvectors_sha256() {
let dst = b"QUUX-V01-CS02-with-expander-SHA256-128";
let msg = b"";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"68a985b87eb6b46952128911f2a4412bbc302a9d759667f8\
7f7a21d803f07235",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abc";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"d8ccab23b5985ccea865c6c97b6e5b8350e794e603b4b979\
02f53a8a0d605615",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abcdef0123456789";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"eff31487c770a893cfb36f912fbfcbff40d5661771ca4b2c\
b4eafe524333f5c1",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqq";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"b23a1d2b4d97b2ef7785562a7e8bac7eed54ed6e97e29aa5\
1bfe3f12ddad1ff9",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"4623227bcc01293b8c130bf771da8c298dede7383243dc09\
93d2d94823958c4c",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"af84c27ccfd45d41914fdff5df25293e221afc53d8ad2ac0\
6d5e3e29485dadbee0d121587713a3e0dd4d5e69e93eb7cd4f5df4\
cd103e188cf60cb02edc3edf18eda8576c412b18ffb658e3dd6ec8\
49469b979d444cf7b26911a08e63cf31f9dcc541708d3491184472\
c2c29bb749d4286b004ceb5ee6b9a7fa5b646c993f0ced",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abc";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"abba86a6129e366fc877aab32fc4ffc70120d8996c88aee2\
fe4b32d6c7b6437a647e6c3163d40b76a73cf6a5674ef1d890f95b\
664ee0afa5359a5c4e07985635bbecbac65d747d3d2da7ec2b8221\
b17b0ca9dc8a1ac1c07ea6a1e60583e2cb00058e77b7b72a298425\
cd1b941ad4ec65e8afc50303a22c0f99b0509b4c895f40",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abcdef0123456789";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"ef904a29bffc4cf9ee82832451c946ac3c8f8058ae97d8d6\
29831a74c6572bd9ebd0df635cd1f208e2038e760c4994984ce73f\
0d55ea9f22af83ba4734569d4bc95e18350f740c07eef653cbb9f8\
7910d833751825f0ebefa1abe5420bb52be14cf489b37fe1a72f7d\
e2d10be453b2c9d9eb20c7e3f6edc5a60629178d9478df",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqq";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"80be107d0884f0d881bb460322f0443d38bd222db8bd0b0a\
5312a6fedb49c1bbd88fd75d8b9a09486c60123dfa1d73c1cc3169\
761b17476d3c6b7cbbd727acd0e2c942f4dd96ae3da5de368d26b3\
2286e32de7e5a8cb2949f866a0b80c58116b29fa7fabb3ea7d520e\
e603e0c25bcaf0b9a5e92ec6a1fe4e0391d1cdbce8c68a",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"546aff5444b5b79aa6148bd81728704c32decb73a3ba76e9\
e75885cad9def1d06d6792f8a7d12794e90efed817d96920d72889\
6a4510864370c207f99bd4a608ea121700ef01ed879745ee3e4cee\
f777eda6d9e5e38b90c86ea6fb0b36504ba4a45d22e86f6db5dd43\
d98a294bebb9125d5b794e9d2a81181066eb954966a487",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
}
/// From <https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-12#appendix-K.2>
#[test]
fn expand_message_xmd_works_for_draft12_testvectors_sha256_long_dst() {
let dst = b"QUUX-V01-CS02-with-expander-SHA256-128-long-DST-111111\
111111111111111111111111111111111111111111111111111111\
111111111111111111111111111111111111111111111111111111\
111111111111111111111111111111111111111111111111111111\
1111111111111111111111111111111111111111";
let msg = b"";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"e8dc0c8b686b7ef2074086fbdd2f30e3f8bfbd3bdf177f73\
f04b97ce618a3ed3",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abc";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"52dbf4f36cf560fca57dedec2ad924ee9c266341d8f3d6af\
e5171733b16bbb12",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abcdef0123456789";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"35387dcf22618f3728e6c686490f8b431f76550b0b2c61cb\
c1ce7001536f4521",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqq";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"01b637612bb18e840028be900a833a74414140dde0c4754c\
198532c3a0ba42bc",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"20cce7033cabc5460743180be6fa8aac5a103f56d481cf36\
9a8accc0c374431b",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"14604d85432c68b757e485c8894db3117992fc57e0e136f7\
1ad987f789a0abc287c47876978e2388a02af86b1e8d1342e5ce4f\
7aaa07a87321e691f6fba7e0072eecc1218aebb89fb14a0662322d\
5edbd873f0eb35260145cd4e64f748c5dfe60567e126604bcab1a3\
ee2dc0778102ae8a5cfd1429ebc0fa6bf1a53c36f55dfc",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abc";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"1a30a5e36fbdb87077552b9d18b9f0aee16e80181d5b951d\
0471d55b66684914aef87dbb3626eaabf5ded8cd0686567e503853\
e5c84c259ba0efc37f71c839da2129fe81afdaec7fbdc0ccd4c794\
727a17c0d20ff0ea55e1389d6982d1241cb8d165762dbc39fb0cee\
4474d2cbbd468a835ae5b2f20e4f959f56ab24cd6fe267",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abcdef0123456789";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"d2ecef3635d2397f34a9f86438d772db19ffe9924e28a1ca\
f6f1c8f15603d4028f40891044e5c7e39ebb9b31339979ff33a424\
9206f67d4a1e7c765410bcd249ad78d407e303675918f20f26ce6d\
7027ed3774512ef5b00d816e51bfcc96c3539601fa48ef1c07e494\
bdc37054ba96ecb9dbd666417e3de289d4f424f502a982",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqq";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"ed6e8c036df90111410431431a232d41a32c86e296c05d42\
6e5f44e75b9a50d335b2412bc6c91e0a6dc131de09c43110d9180d\
0a70f0d6289cb4e43b05f7ee5e9b3f42a1fad0f31bac6a625b3b5c\
50e3a83316783b649e5ecc9d3b1d9471cb5024b7ccf40d41d1751a\
04ca0356548bc6e703fca02ab521b505e8e45600508d32",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"78b53f2413f3c688f07732c10e5ced29a17c6a16f717179f\
fbe38d92d6c9ec296502eb9889af83a1928cd162e845b0d3c5424e\
83280fed3d10cffb2f8431f14e7a23f4c68819d40617589e4c4116\
9d0b56e0e3535be1fd71fbb08bb70c5b5ffed953d6c14bf7618b35\
fc1f4c4b30538236b4b08c9fbf90462447a8ada60be495",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
}
/// From <https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-12#appendix-K.3>
#[test]
fn expand_message_xmd_works_for_draft12_testvectors_sha512() {
let dst = b"QUUX-V01-CS02-with-expander-SHA512-256";
let msg = b"";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"6b9a7312411d92f921c6f68ca0b6380730a1a4d982c50721\
1a90964c394179ba",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha512>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abc";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"0da749f12fbe5483eb066a5f595055679b976e93abe9be6f\
0f6318bce7aca8dc",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha512>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abcdef0123456789";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"087e45a86e2939ee8b91100af1583c4938e0f5fc6c9db4b1\
07b83346bc967f58",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha512>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqq";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"7336234ee9983902440f6bc35b348352013becd88938d2af\
ec44311caf8356b3",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha512>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"57b5f7e766d5be68a6bfe1768e3c2b7f1228b3e4b3134956\
dd73a59b954c66f4",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha512>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"41b037d1734a5f8df225dd8c7de38f851efdb45c372887be\
655212d07251b921b052b62eaed99b46f72f2ef4cc96bfaf254ebb\
bec091e1a3b9e4fb5e5b619d2e0c5414800a1d882b62bb5cd1778f\
098b8eb6cb399d5d9d18f5d5842cf5d13d7eb00a7cff859b605da6\
78b318bd0e65ebff70bec88c753b159a805d2c89c55961",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha512>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abc";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"7f1dddd13c08b543f2e2037b14cefb255b44c83cc397c178\
6d975653e36a6b11bdd7732d8b38adb4a0edc26a0cef4bb4521713\
5456e58fbca1703cd6032cb1347ee720b87972d63fbf232587043e\
d2901bce7f22610c0419751c065922b488431851041310ad659e4b\
23520e1772ab29dcdeb2002222a363f0c2b1c972b3efe1",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha512>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abcdef0123456789";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"3f721f208e6199fe903545abc26c837ce59ac6fa45733f1b\
aaf0222f8b7acb0424814fcb5eecf6c1d38f06e9d0a6ccfbf85ae6\
12ab8735dfdf9ce84c372a77c8f9e1c1e952c3a61b7567dd069301\
6af51d2745822663d0c2367e3f4f0bed827feecc2aaf98c949b5ed\
0d35c3f1023d64ad1407924288d366ea159f46287e61ac",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha512>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqq";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"b799b045a58c8d2b4334cf54b78260b45eec544f9f2fb5bd\
12fb603eaee70db7317bf807c406e26373922b7b8920fa29142703\
dd52bdf280084fb7ef69da78afdf80b3586395b433dc66cde048a2\
58e476a561e9deba7060af40adf30c64249ca7ddea79806ee5beb9\
a1422949471d267b21bc88e688e4014087a0b592b695ed",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha512>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"05b0bfef265dcee87654372777b7c44177e2ae4c13a27f10\
3340d9cd11c86cb2426ffcad5bd964080c2aee97f03be1ca18e30a\
1f14e27bc11ebbd650f305269cc9fb1db08bf90bfc79b42a952b46\
daf810359e7bc36452684784a64952c343c52e5124cd1f71d474d5\
197fefc571a92929c9084ffe1112cf5eea5192ebff330b",
)
.unwrap();
assert_eq!(
ExpandMsgXmd::<Sha512>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
}
/// From <https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-12#appendix-K.4>
#[test]
fn expand_message_xof_works_for_draft12_testvectors_shake128() {
let dst = b"QUUX-V01-CS02-with-expander-SHAKE128";
let msg = b"";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"86518c9cd86581486e9485aa74ab35ba150d1c75c88e26b7\
043e44e2acd735a2",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abc";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"8696af52a4d862417c0763556073f47bc9b9ba43c99b5053\
05cb1ec04a9ab468",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abcdef0123456789";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"912c58deac4821c3509dbefa094df54b34b8f5d01a191d1d\
3108a2c89077acca",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqq";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"1adbcc448aef2a0cebc71dac9f756b22e51839d348e031e6\
3b33ebb50faeaf3f",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"df3447cc5f3e9a77da10f819218ddf31342c310778e0e4ef\
72bbaecee786a4fe",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"7314ff1a155a2fb99a0171dc71b89ab6e3b2b7d59e38e644\
19b8b6294d03ffee42491f11370261f436220ef787f8f76f5b26bd\
cd850071920ce023f3ac46847744f4612b8714db8f5db83205b2e6\
25d95afd7d7b4d3094d3bdde815f52850bb41ead9822e08f22cf41\
d615a303b0d9dde73263c049a7b9898208003a739a2e57",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abc";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"c952f0c8e529ca8824acc6a4cab0e782fc3648c563ddb00d\
a7399f2ae35654f4860ec671db2356ba7baa55a34a9d7f79197b60\
ddae6e64768a37d699a78323496db3878c8d64d909d0f8a7de4927\
dcab0d3dbbc26cb20a49eceb0530b431cdf47bc8c0fa3e0d88f53b\
318b6739fbed7d7634974f1b5c386d6230c76260d5337a",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abcdef0123456789";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"19b65ee7afec6ac06a144f2d6134f08eeec185f1a890fe34\
e68f0e377b7d0312883c048d9b8a1d6ecc3b541cb4987c26f45e0c\
82691ea299b5e6889bbfe589153016d8131717ba26f07c3c14ffbe\
f1f3eff9752e5b6183f43871a78219a75e7000fbac6a7072e2b83c\
790a3a5aecd9d14be79f9fd4fb180960a3772e08680495",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqq";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"ca1b56861482b16eae0f4a26212112362fcc2d76dcc80c93\
c4182ed66c5113fe41733ed68be2942a3487394317f3379856f482\
2a611735e50528a60e7ade8ec8c71670fec6661e2c59a09ed36386\
513221688b35dc47e3c3111ee8c67ff49579089d661caa29db1ef1\
0eb6eace575bf3dc9806e7c4016bd50f3c0e2a6481ee6d",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"9d763a5ce58f65c91531b4100c7266d479a5d9777ba76169\
3d052acd37d149e7ac91c796a10b919cd74a591a1e38719fb91b72\
03e2af31eac3bff7ead2c195af7d88b8bc0a8adf3d1e90ab9bed6d\
dc2b7f655dd86c730bdeaea884e73741097142c92f0e3fc1811b69\
9ba593c7fbd81da288a29d423df831652e3a01a9374999",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
}
/// From <https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-12#appendix-K.5>
#[test]
fn expand_message_xof_works_for_draft12_testvectors_shake128_long_dst() {
let dst = b"QUUX-V01-CS02-with-expander-SHAKE128-long-DST-11111111\
111111111111111111111111111111111111111111111111111111\
111111111111111111111111111111111111111111111111111111\
111111111111111111111111111111111111111111111111111111\
1111111111111111111111111111111111111111";
let msg = b"";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"827c6216330a122352312bccc0c8d6e7a146c5257a776dbd\
9ad9d75cd880fc53",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abc";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"690c8d82c7213b4282c6cb41c00e31ea1d3e2005f93ad19b\
bf6da40f15790c5c",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abcdef0123456789";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"979e3a15064afbbcf99f62cc09fa9c85028afcf3f825eb07\
11894dcfc2f57057",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqq";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"c5a9220962d9edc212c063f4f65b609755a1ed96e62f9db5\
d1fd6adb5a8dc52b",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"f7b96a5901af5d78ce1d071d9c383cac66a1dfadb508300e\
c6aeaea0d62d5d62",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"3890dbab00a2830be398524b71c2713bbef5f4884ac2e6f0\
70b092effdb19208c7df943dc5dcbaee3094a78c267ef276632ee2\
c8ea0c05363c94b6348500fae4208345dd3475fe0c834c2beac7fa\
7bc181692fb728c0a53d809fc8111495222ce0f38468b11becb15b\
32060218e285c57a60162c2c8bb5b6bded13973cd41819",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abc";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"41b7ffa7a301b5c1441495ebb9774e2a53dbbf4e54b9a1af\
6a20fd41eafd69ef7b9418599c5545b1ee422f363642b01d4a5344\
9313f68da3e49dddb9cd25b97465170537d45dcbdf92391b5bdff3\
44db4bd06311a05bca7dcd360b6caec849c299133e5c9194f4e15e\
3e23cfaab4003fab776f6ac0bfae9144c6e2e1c62e7d57",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abcdef0123456789";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"55317e4a21318472cd2290c3082957e1242241d9e0d04f47\
026f03401643131401071f01aa03038b2783e795bdfa8a3541c194\
ad5de7cb9c225133e24af6c86e748deb52e560569bd54ef4dac034\
65111a3a44b0ea490fb36777ff8ea9f1a8a3e8e0de3cf0880b4b2f\
8dd37d3a85a8b82375aee4fa0e909f9763319b55778e71",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqq";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"19fdd2639f082e31c77717ac9bb032a22ff0958382b2dbb3\
9020cdc78f0da43305414806abf9a561cb2d0067eb2f7bc544482f\
75623438ed4b4e39dd9e6e2909dd858bd8f1d57cd0fce2d3150d90\
aa67b4498bdf2df98c0100dd1a173436ba5d0df6be1defb0b2ce55\
ccd2f4fc05eb7cb2c019c35d5398b85adc676da4238bc7",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"945373f0b3431a103333ba6a0a34f1efab2702efde41754c\
4cb1d5216d5b0a92a67458d968562bde7fa6310a83f53dda138368\
0a276a283438d58ceebfa7ab7ba72499d4a3eddc860595f63c93b1\
c5e823ea41fc490d938398a26db28f61857698553e93f0574eb8c5\
017bfed6249491f9976aaa8d23d9485339cc85ca329308",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake128>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
}
/// From <https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-12#appendix-K.6>
#[test]
fn expand_message_xof_works_for_draft12_testvectors_shake256() {
let dst = b"QUUX-V01-CS02-with-expander-SHAKE256";
let msg = b"";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"2ffc05c48ed32b95d72e807f6eab9f7530dd1c2f013914c8\
fed38c5ccc15ad76",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abc";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"b39e493867e2767216792abce1f2676c197c0692aed06156\
0ead251821808e07",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abcdef0123456789";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"245389cf44a13f0e70af8665fe5337ec2dcd138890bb7901\
c4ad9cfceb054b65",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqq";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"719b3911821e6428a5ed9b8e600f2866bcf23c8f0515e52d\
6c6c019a03f16f0e",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
let len_in_bytes = 0x20;
let uniform_bytes = hex::decode(
"9181ead5220b1963f1b5951f35547a5ea86a820562287d6c\
a4723633d17ccbbc",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"7a1361d2d7d82d79e035b8880c5a3c86c5afa719478c007d\
96e6c88737a3f631dd74a2c88df79a4cb5e5d9f7504957c70d669e\
c6bfedc31e01e2bacc4ff3fdf9b6a00b17cc18d9d72ace7d6b81c2\
e481b4f73f34f9a7505dccbe8f5485f3d20c5409b0310093d5d649\
2dea4e18aa6979c23c8ea5de01582e9689612afbb353df",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abc";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"a54303e6b172909783353ab05ef08dd435a558c3197db0c1\
32134649708e0b9b4e34fb99b92a9e9e28fc1f1d8860d85897a8e0\
21e6382f3eea10577f968ff6df6c45fe624ce65ca25932f679a42a\
404bc3681efe03fcd45ef73bb3a8f79ba784f80f55ea8a3c367408\
f30381299617f50c8cf8fbb21d0f1e1d70b0131a7b6fbe",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"abcdef0123456789";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"e42e4d9538a189316e3154b821c1bafb390f78b2f010ea40\
4e6ac063deb8c0852fcd412e098e231e43427bd2be1330bb47b403\
9ad57b30ae1fc94e34993b162ff4d695e42d59d9777ea18d3848d9\
d336c25d2acb93adcad009bcfb9cde12286df267ada283063de0bb\
1505565b2eb6c90e31c48798ecdc71a71756a9110ff373",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\
qqqqqqqqqqqqqqqqqqqqqqqqq";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"4ac054dda0a38a65d0ecf7afd3c2812300027c8789655e47\
aecf1ecc1a2426b17444c7482c99e5907afd9c25b991990490bb9c\
686f43e79b4471a23a703d4b02f23c669737a886a7ec28bddb92c3\
a98de63ebf878aa363a501a60055c048bea11840c4717beae7eee2\
8c3cfa42857b3d130188571943a7bd747de831bd6444e0",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
let msg = b"a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
let len_in_bytes = 0x80;
let uniform_bytes = hex::decode(
"09afc76d51c2cccbc129c2315df66c2be7295a231203b8ab\
2dd7f95c2772c68e500bc72e20c602abc9964663b7a03a389be128\
c56971ce81001a0b875e7fd17822db9d69792ddf6a23a151bf4700\
79c518279aef3e75611f8f828994a9988f4a8a256ddb8bae161e65\
8d5a2a09bcfe839c6396dc06ee5c8ff3c22d3b1f9deb7e",
)
.unwrap();
assert_eq!(
ExpandMsgXof::<Shake256>::init_expand(msg, dst, len_in_bytes).into_vec(),
uniform_bytes
);
}
}
| 38.724864 | 110 | 0.649847 |
39db262ac7f113a6770376ad7830a303b27d078c | 5,253 | extern crate hex;
extern crate log;
extern crate reqwest;
extern crate rustyline;
extern crate select;
extern crate serde;
extern crate serde_json;
extern crate simple_logger;
mod cache;
mod config;
mod library;
mod trove;
mod trove_feed;
mod util;
//use std::str;
//use std::fs::{self};//, DirEntry};
use config::Config;
use trove::Trove;
use std::process::Command;
use rustyline::error::ReadlineError;
use rustyline::Editor;
use crate::cache::{Cache};
/*
* Sketch of API
* use icli::{ICLI, Cmd};
*
* struct ICLI {
* history: Vec<String>,
* prompt: String,
* }
*
* enum CResult {
* Subshell(icli),
* Ok(String),
* Err(error),
* }
*
* trait Cmd {
* fn name() -> String;
* fn execute(Vec<&str> args) -> CResult;
* }
*
* let cli = ICLI::new()
* .add(trove.cmds)
* .add(steam.cmds)
* .add(monthly.cmds)
* .add(downloader.cmds);
* cli.run();
*/
fn main() {
simple_logger::init_with_level(log::Level::Error).unwrap();
let config = Config::new("./config.toml");
let cache = Cache::new(&config.system.cache);
let mut trove = match Trove::new(&config, &cache) {
Ok(unwrapped) => unwrapped,
Err(error) => panic!("Error constructing trove: {}", error),
};
let stray = trove.stray_downloads();
println!("In downloads: {}", stray.len());
trove.move_downloads();
trove.update_download_status();
let mut rl = rustyline::Editor::<()>::new();
// `()` can be used when no completer is required
let mut rl = Editor::<()>::new();
if rl.load_history(".tarnish-history").is_err() {
println!("No previous history.");
}
loop {
let readline = rl.readline(">> ");
match readline {
Ok(line) => {
let mut words = line.split_ascii_whitespace();
match words.next() {
Some("cache_all_metadata") => {
if let Err(err) = trove.cache_all_metadata(&cache) {
println!("cache_all_metadata: {}", err);
}
}
Some("cache_thumbnails") => {
trove.cache_thumbnails(&cache);
}
Some("cache_screenshots") => {
trove.cache_screenshots(&cache);
}
Some("update") => {
trove.update_download_status();
}
Some("download") => {
let number: usize = words.next().unwrap().parse::<usize>().unwrap();
let game = trove.not_downloaded()[number];
println!("Downloading: {}", trove.format(game));
let chrome = Command::new(r"C:\Program Files (x86)\Google\Chrome\Application\chrome.exe")
.arg(trove.root.join(&game.downloads["windows"]))
.status()
.expect("Failed to launch Chrome.");
}
Some("downloaded") => {
trove
.downloaded()
.iter()
.zip(0..)
.for_each(|(p, i)| println!("{} {}", i, trove.format(p)));
}
Some("not_downloaded") => {
trove
.not_downloaded()
.iter()
.zip(0..)
.for_each(|(p, i)| println!("{} {}", i, trove.format(p)));
}
Some("exit") => break,
Some(_) => {}
None => break,
}
rl.add_history_entry(line.as_str());
println!("Line: {}", line);
}
Err(ReadlineError::Interrupted) => {
println!("CTRL-C");
break;
}
Err(ReadlineError::Eof) => {
println!("CTRL-D");
break;
}
Err(err) => {
println!("Error: {:?}", err);
break;
}
}
}
rl.save_history(".tarnish-history").unwrap();
//let data: Map<String, Value> = serde_json::from_str(data.as_str()).unwrap();
//data.keys().for_each(|k| println!("{}", k));
//println!("{}", data.has_key("displayItemData"));
/*let data: &Map<String, Value> = match &data.get("standardProducts").unwrap()[0] {
Object(o) => o,
_ => panic!("Fail!")
};*/
//let data: Product = serde_json::from_value(data.get("standardProducts").unwrap()[0].clone()).unwrap();
//data.keys().for_each(|k| println!("{}", k));
//println!("{:?}", data);
//println!("{}", data.is_array());
//data.members().for_each(|k| println!("{}", k));
// nodes have name, attrs, children
// attrs are tuples
/*for node in doc.find(Name("script")) {
println!("{:?}", node.name());
for attr in node.attrs() {
println!(" {:?} {:?}", attr.0, attr.1);
}
}*/
//doc.find(Element).for_each(|node| println!("{:?}", node.name()));
}
| 32.425926 | 113 | 0.464116 |
f99624f654a1c1cbeca1c56372428d5af755a43a | 2,621 | #![deny(rust_2018_idioms)]
use conch_parser::ast::PipeableCommand;
use conch_parser::ast::PipeableCommand::*;
use std::collections::HashMap;
use std::sync::Arc;
mod support;
pub use self::support::*;
type CmdArc = PipeableCommand<&'static str, MockCmd, MockCmd, Arc<MockCmd>>;
#[derive(Clone)]
struct MockEnvArc {
inner:
HashMap<&'static str, Arc<dyn Spawn<MockEnvArc, Error = MockErr> + 'static + Send + Sync>>,
}
impl MockEnvArc {
fn new() -> Self {
Self {
inner: HashMap::new(),
}
}
}
impl FunctionEnvironment for MockEnvArc {
type FnName = &'static str;
type Fn = Arc<dyn Spawn<MockEnvArc, Error = MockErr> + 'static + Send + Sync>;
fn function(&self, name: &Self::FnName) -> Option<&Self::Fn> {
self.inner.get(name)
}
fn set_function(&mut self, name: Self::FnName, func: Self::Fn) {
self.inner.insert(name, func);
}
}
async fn run(cmd: CmdArc) -> Result<ExitStatus, MockErr> {
let mut env = MockEnvArc::new();
let future = cmd.spawn(&mut env).await?;
drop(env);
Ok(future.await)
}
#[tokio::test]
async fn smoke() {
let exit = ExitStatus::Code(42);
assert_eq!(run(Simple(mock_status(exit))).await, Ok(exit));
assert_eq!(run(Compound(mock_status(exit))).await, Ok(exit));
let mut env = MockEnvArc::new();
let fn_name = "fn_name";
assert!(env.function(&fn_name).is_none());
let first: CmdArc = FunctionDef(fn_name, mock_status(exit).into());
assert_eq!(first.spawn(&mut env).await.unwrap().await, EXIT_SUCCESS);
let first_registered = env.function(&fn_name).expect("no fn registered").clone();
// Test overwriting the function with a different one
let second: CmdArc = FunctionDef(fn_name, mock_status(exit).into());
assert_eq!(second.spawn(&mut env).await.unwrap().await, EXIT_SUCCESS);
let second_registered = env.function(&fn_name).expect("no fn registered").clone();
assert_eq!(exit, first_registered.spawn(&mut env).await.unwrap().await);
assert_eq!(exit, second_registered.spawn(&mut env).await.unwrap().await);
}
#[tokio::test]
async fn should_propagate_errors() {
assert_eq!(
run(Simple(mock_error(true))).await,
Err(MockErr::Fatal(true))
);
assert_eq!(
run(Simple(mock_error(false))).await,
Err(MockErr::Fatal(false))
);
assert_eq!(
run(Compound(mock_error(true))).await,
Err(MockErr::Fatal(true))
);
assert_eq!(
run(Compound(mock_error(false))).await,
Err(MockErr::Fatal(false))
);
// NB: FunctionDefinitions can't have errors
}
| 28.802198 | 99 | 0.644792 |
01218f39164f61c769c33fe505e869d362ddd810 | 4,996 | use super::{prelude::*, *};
use crate::{errors::*, html, http, jsrt, models::*};
use lazy_static::lazy_static;
use regex::Regex;
use serde_json::Value;
pub struct Mht;
impl Extractor for Mht {
fn index(&self, more: u32) -> Result<Vec<Detail>> {
let url = format!("https://www.manhuatai.com/all_p{}.html", more + 1);
let mut fll: LinkListConverter<Detail> =
LinkListConverter::new(&url, "a.sdiv[title]", vec![]);
fll.set_href_prefix("http://www.manhuatai.com")
.set_text_in_dom("li.title");
fll.try_get_list()?.result()
}
fn fetch_sections(&self, detail: &mut Detail) -> Result<()> {
let url = &detail.url;
let mut fll: LinkListConverter<Section> =
LinkListConverter::new(&url, "ul[name=\"topiccount\"] > li > a", vec![]);
fll.set_href_prefix("http://www.manhuatai.com")
.text_prefix_finder(&|doc| {
let name =
html::find_attr(doc, "meta[property=\"og:title\"]", "content")?.to_string();
Ok(name)
});
let section_list = fll.try_get_list()?.result()?;
detail.section_list = section_list;
detail.reverse_section_list();
Ok(())
}
fn fetch_pages(&self, section: &mut Section) -> Result<()> {
let mut helper = http::SendHelper::new();
helper.send_get(§ion.url)?;
match helper.result() {
http::Result::Ok(html_s) => {
let caps = RE_CODE
.captures(&html_s)
.ok_or(err_msg(format!("no script code found, {}", §ion.url)))?;
let code = caps
.get(1)
.ok_or(err_msg(format!(
"no encryption code block found, {}",
§ion.url
)))?
.as_str();
let wrapper_code = format!("{}\n{}", code, &DECRYPT_BLOCK);
// 托管给 JSRT
let output = jsrt::read_output(&wrapper_code)?;
let v: Value = serde_json::from_str(&output)?;
let count = v["count"].as_u64().ok_or(err_msg("no count found"))?;
let path = v["path"].as_str().ok_or(err_msg("no path found"))?;
let _start = v["start"].as_u64().ok_or(err_msg("no start found"))?;
for i in 1..(count + 1) {
section.add_page(Page::new(
(i - 1) as u32,
&format!(
"http://mhpic.mh51.com/comic/{}/{}.jpg-mht.middle.webp",
&path, i
),
));
}
if !section.has_name() {
let doc = html::parse_document(&html_s);
section.name =
html::find_text(&doc, ".mh_readtitle > h1 > strong")?.to_string();
}
Ok(())
}
http::Result::Err(e) => Err(e),
}
}
}
lazy_static! {
static ref RE_CODE: Regex = Regex::new(r#"var\s*(mh_info\s*=\{[^\}]+\})"#).unwrap();
}
const DECRYPT_BLOCK: &'static str = r#"
window = {
"\x70\x72\x6f\x6d\x70\x74": function (e) { new Function(e.replace(/./g, function (e) { return String.fromCharCode(e.charCodeAt(0) - 1) }))() }
}
__cr = {
"\x64\x65\x63\x6f\x64\x65": "ni`jogp/jnhqbui>ni`jogp/jnhqbui/sfqmbdf)0/0h-gvodujpo)b*|sfuvso!Tusjoh/gspnDibsDpef)b/dibsDpefBu)1*.ni`jogp/qbhfje&21*~*"
}
eval(function (e, o, i, r, n, t) {
if (n = function (e) {
return (e < 10 ? "" : n(parseInt(e / 10))) + ((e %= 10) > 35 ? String.fromCharCode(e + 29) : e.toString(36))
}, !"".replace(/^/, String)) {
for (; i--;) t[n(i)] = r[i] || n(i);
r = [function (e) {
return t[e]
}], n = function () {
return "\\w+"
}, i = 1
}
for (; i--;) r[i] && (e = e.replace(new RegExp("\\b" + n(i) + "\\b", "g"), r[i]));
return e
}('4["\\1\\6\\0\\5\\1\\9"](8["\\3\\2\\7\\0\\3\\2"])', 0, 10, "x6f|x70|x65|x64|window|x6d|x72|x63|__cr|x74".split("|"), 0, {})
)
console.log(JSON.stringify({count: mh_info.totalimg, path: mh_info.imgpath, start: mh_info.startimg}))
"#;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_mht_index() {
let list = Mht {}.index(0).unwrap();
assert_eq!(36, list.len());
}
#[test]
fn test_mht_fetch_sections() {
let mut detail = Detail::new("斗破苍穹", "https://www.manhuatai.com/doupocangqiong/");
Mht {}.fetch_sections(&mut detail).unwrap();
assert_eq!(743, detail.section_list.len());
}
#[test]
fn test_mht_fetch_pages() {
let mut section = Section::new(
"斗破苍穹第735话 唐火儿(下)",
"https://www.manhuatai.com/doupocangqiong/735.html",
);
Mht {}.fetch_pages(&mut section).unwrap();
assert_eq!(8, section.page_list.len());
}
}
| 36.735294 | 154 | 0.494396 |
f59fdb142eb10ca9c32c42f527a7305b23f8bb39 | 22,499 | // Copyright 2018 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use anyhow::Result;
use tera::{self, Context, Tera};
use crate::{
context::{CrateContext, WorkspaceContext},
error::RazeError,
planning::PlannedBuild,
rendering::{BuildRenderer, FileOutputs, RenderDetails},
};
use std::error::Error;
macro_rules! unwind_tera_error {
($err:ident) => {{
let mut messages = vec![$err.to_string()];
let mut cause = $err.source();
while let Some(e) = cause {
messages.push(e.to_string());
cause = e.source();
}
messages.join("\n|__")
}};
}
#[derive(Default)]
pub struct BazelRenderer {
internal_renderer: Tera,
}
impl BazelRenderer {
pub fn new() -> Self {
// Configure tera with a bogus template dir: We don't want any runtime template support
let mut internal_renderer = Tera::new("src/not/a/dir/*").unwrap();
internal_renderer
.add_raw_templates(vec![
(
"templates/crate.BUILD.template",
include_str!("templates/crate.BUILD.template"),
),
(
"templates/partials/build_script.template",
include_str!("templates/partials/build_script.template"),
),
(
"templates/partials/common_attrs.template",
include_str!("templates/partials/common_attrs.template"),
),
(
"templates/partials/header.template",
include_str!("templates/partials/header.template"),
),
(
"templates/partials/remote_crates_patch.template",
include_str!("templates/partials/remote_crates_patch.template"),
),
(
"templates/partials/rust_binary.template",
include_str!("templates/partials/rust_binary.template"),
),
(
"templates/partials/rust_library.template",
include_str!("templates/partials/rust_library.template"),
),
(
"templates/partials/targeted_dependencies.template",
include_str!("templates/partials/targeted_dependencies.template"),
),
(
"templates/remote_crates.bzl.template",
include_str!("templates/remote_crates.bzl.template"),
),
(
"templates/workspace.BUILD.template",
include_str!("templates/workspace.BUILD.template"),
),
])
.unwrap();
Self {
internal_renderer,
}
}
pub fn render_crate(
&self,
workspace_context: &WorkspaceContext,
package: &CrateContext,
) -> Result<String, tera::Error> {
let mut context = Context::new();
context.insert("workspace", &workspace_context);
context.insert("crate", &package);
self
.internal_renderer
.render("templates/crate.BUILD.template", &context)
}
pub fn render_vendored_aliases(
&self,
workspace_context: &WorkspaceContext,
all_packages: &[CrateContext],
) -> Result<String, tera::Error> {
let mut context = Context::new();
context.insert("workspace", &workspace_context);
context.insert("crates", &all_packages);
self
.internal_renderer
.render("templates/workspace.BUILD.template", &context)
}
pub fn render_remote_crate(
&self,
workspace_context: &WorkspaceContext,
package: &CrateContext,
) -> Result<String, tera::Error> {
let mut context = Context::new();
context.insert("workspace", &workspace_context);
context.insert("crate", &package);
self
.internal_renderer
.render("templates/crate.BUILD.template", &context)
}
pub fn render_remote_aliases(
&self,
workspace_context: &WorkspaceContext,
all_packages: &[CrateContext],
) -> Result<String, tera::Error> {
let mut context = Context::new();
context.insert("workspace", &workspace_context);
context.insert("crates", &all_packages);
self
.internal_renderer
.render("templates/workspace.BUILD.template", &context)
}
pub fn render_bzl_fetch(
&self,
workspace_context: &WorkspaceContext,
all_packages: &[CrateContext],
) -> Result<String, tera::Error> {
let mut context = Context::new();
context.insert("workspace", &workspace_context);
context.insert("crates", &all_packages);
self
.internal_renderer
.render("templates/remote_crates.bzl.template", &context)
}
pub fn render_aliases(
&self,
planned_build: &PlannedBuild,
render_details: &RenderDetails,
is_remote_mode: bool,
) -> Result<Vec<FileOutputs>> {
let mut file_outputs = Vec::new();
for member_path in planned_build.workspace_context.workspace_members.iter() {
let all_packages: Vec<CrateContext> = planned_build
.crate_contexts
.iter()
.filter(|ctx| {
ctx.is_binary_dependency || ctx.workspace_member_dependents.contains(member_path)
})
.cloned()
.collect();
let rendered_alias_build_file = if is_remote_mode {
self
.render_remote_aliases(&planned_build.workspace_context, &all_packages)
.map_err(|e| RazeError::Rendering {
crate_name_opt: None,
message: unwind_tera_error!(e),
})?
} else {
self
.render_vendored_aliases(&planned_build.workspace_context, &all_packages)
.map_err(|e| RazeError::Rendering {
crate_name_opt: None,
message: unwind_tera_error!(e),
})?
};
file_outputs.push(FileOutputs {
path: render_details
.cargo_root
.join(member_path)
.join(&render_details.workspace_member_output_dir)
.join("BUILD.bazel"),
contents: rendered_alias_build_file,
});
}
Ok(file_outputs)
}
}
fn include_additional_build_file(
package: &CrateContext,
existing_contents: String,
) -> Result<String> {
match &package.raze_settings.additional_build_file {
Some(file_path) => {
let additional_content =
std::fs::read_to_string(file_path).map_err(|e| RazeError::Rendering {
crate_name_opt: Some(package.pkg_name.to_owned()),
message: format!("failed to read additional_build_file: {}", e),
})?;
Ok(format!(
"{}\n# Additional content from {}\n{}",
existing_contents, file_path, additional_content
))
},
None => Ok(existing_contents),
}
}
impl BuildRenderer for BazelRenderer {
fn render_planned_build(
&mut self,
render_details: &RenderDetails,
planned_build: &PlannedBuild,
) -> Result<Vec<FileOutputs>> {
let &PlannedBuild {
ref workspace_context,
ref crate_contexts,
..
} = planned_build;
let mut file_outputs = Vec::new();
let path_prefix = render_details
.bazel_root
.as_path()
.join(&render_details.path_prefix);
for package in crate_contexts {
let rendered_crate_build_file =
self
.render_crate(&workspace_context, &package)
.map_err(|e| RazeError::Rendering {
crate_name_opt: None,
message: unwind_tera_error!(e),
})?;
let final_crate_build_file =
include_additional_build_file(package, rendered_crate_build_file)?;
file_outputs.push(FileOutputs {
path: path_prefix.as_path().join(&package.expected_build_path),
contents: final_crate_build_file,
})
}
file_outputs.extend(self.render_aliases(planned_build, render_details, false)?);
Ok(file_outputs)
}
fn render_remote_planned_build(
&mut self,
render_details: &RenderDetails,
planned_build: &PlannedBuild,
) -> Result<Vec<FileOutputs>> {
let &PlannedBuild {
ref workspace_context,
ref crate_contexts,
..
} = planned_build;
let mut file_outputs: Vec<FileOutputs> = Vec::new();
let path_prefix = render_details
.bazel_root
.as_path()
.join(&render_details.path_prefix);
let buildfile_suffix = &render_details.vendored_buildfile_name;
// N.B. File needs to exist so that contained xyz-1.2.3.BUILD can be referenced
file_outputs.push(FileOutputs {
path: path_prefix.as_path().join("remote").join(buildfile_suffix),
contents: String::new(),
});
for package in crate_contexts {
let rendered_crate_build_file = self
.render_remote_crate(&workspace_context, &package)
.map_err(|e| RazeError::Rendering {
crate_name_opt: Some(package.pkg_name.to_owned()),
message: unwind_tera_error!(e),
})?;
let final_crate_build_file =
include_additional_build_file(package, rendered_crate_build_file)?;
file_outputs.push(FileOutputs {
path: path_prefix.as_path().join(&package.expected_build_path),
contents: final_crate_build_file,
})
}
file_outputs.extend(self.render_aliases(planned_build, render_details, true)?);
let bzl_fetch_file_path = path_prefix.as_path().join("crates.bzl");
let rendered_bzl_fetch_file = self
.render_bzl_fetch(&workspace_context, &crate_contexts)
.map_err(|e| RazeError::Rendering {
crate_name_opt: None,
message: unwind_tera_error!(e),
})?;
file_outputs.push(FileOutputs {
path: bzl_fetch_file_path,
contents: rendered_bzl_fetch_file,
});
// Optionally write out a unique lockfile for Cargo Raze. This happens in the case
// where a project has specified binary dependencies.
if let Some(lockfile) = &planned_build.lockfile {
file_outputs.push(FileOutputs {
path: path_prefix.as_path().join("Cargo.raze.lock"),
contents: lockfile.to_string(),
});
}
// Ensure there is always a `BUILD.bazel` file to accompany `crates.bzl`
let crates_bzl_pkg_file = path_prefix.as_path().join("BUILD.bazel");
let outputs_contain_crates_bzl_build_file = file_outputs
.iter()
.any(|output| output.path == crates_bzl_pkg_file);
if !outputs_contain_crates_bzl_build_file {
file_outputs.push(FileOutputs {
path: crates_bzl_pkg_file,
contents: self
.internal_renderer
.render("templates/partials/header.template", &Context::new())?,
});
}
Ok(file_outputs)
}
}
#[cfg(test)]
mod tests {
use hamcrest2::{core::expect, prelude::*};
use semver::Version;
use crate::{
context::*,
planning::PlannedBuild,
rendering::{FileOutputs, RenderDetails},
settings::CrateSettings,
testing::basic_lock_contents,
};
use super::*;
use std::{path::PathBuf, str::FromStr};
fn dummy_render_details(buildfile_suffix: &str) -> RenderDetails {
RenderDetails {
cargo_root: PathBuf::from("/some/cargo/root"),
path_prefix: PathBuf::from("./some_render_prefix"),
workspace_member_output_dir: "cargo".to_string(),
vendored_buildfile_name: buildfile_suffix.to_owned(),
bazel_root: PathBuf::from("/some/bazel/root"),
}
}
fn dummy_planned_build(crate_contexts: Vec<CrateContext>) -> PlannedBuild {
PlannedBuild {
workspace_context: WorkspaceContext {
workspace_path: "//workspace/prefix".to_owned(),
gen_workspace_prefix: "".to_owned(),
output_buildfile_suffix: "BUILD".to_owned(),
// This will typically resolve to:
// `/some/cargo/root/some/crate`
workspace_members: vec![PathBuf::from("some/crate")],
},
crate_contexts,
lockfile: None,
}
}
fn dummy_binary_crate_with_name(buildfile_suffix: &str) -> CrateContext {
CrateContext {
pkg_name: "test-binary".to_owned(),
pkg_version: Version::parse("1.1.1").unwrap(),
edition: "2015".to_owned(),
features: vec!["feature1".to_owned(), "feature2".to_owned()].to_owned(),
expected_build_path: format!("vendor/test-binary-1.1.1/{}", buildfile_suffix),
license: LicenseData::default(),
raze_settings: CrateSettings::default(),
default_deps: CrateDependencyContext {
dependencies: Vec::new(),
proc_macro_dependencies: Vec::new(),
data_dependencies: Vec::new(),
build_dependencies: Vec::new(),
build_proc_macro_dependencies: Vec::new(),
build_data_dependencies: Vec::new(),
dev_dependencies: Vec::new(),
aliased_dependencies: Vec::new(),
},
targeted_deps: Vec::new(),
workspace_member_dependents: Vec::new(),
is_workspace_member_dependency: false,
is_binary_dependency: false,
workspace_path_to_crate: "@raze__test_binary__1_1_1//".to_owned(),
targets: vec![BuildableTarget {
name: "some_binary".to_owned(),
kind: "bin".to_owned(),
path: "bin/main.rs".to_owned(),
edition: "2015".to_owned(),
}],
build_script_target: None,
links: None,
source_details: SourceDetails {
git_data: None,
},
sha256: None,
registry_url: "https://crates.io/api/v1/crates/test-binary/1.1.1/download".to_string(),
lib_target_name: None,
}
}
fn dummy_binary_crate() -> CrateContext {
return dummy_binary_crate_with_name("BUILD");
}
fn dummy_library_crate_with_name(buildfile_suffix: &str) -> CrateContext {
CrateContext {
pkg_name: "test-library".to_owned(),
pkg_version: Version::parse("1.1.1").unwrap(),
edition: "2015".to_owned(),
license: LicenseData::default(),
raze_settings: CrateSettings::default(),
features: vec!["feature1".to_owned(), "feature2".to_owned()].to_owned(),
expected_build_path: format!("vendor/test-library-1.1.1/{}", buildfile_suffix),
default_deps: CrateDependencyContext {
dependencies: Vec::new(),
proc_macro_dependencies: Vec::new(),
data_dependencies: Vec::new(),
build_dependencies: Vec::new(),
build_proc_macro_dependencies: Vec::new(),
build_data_dependencies: Vec::new(),
dev_dependencies: Vec::new(),
aliased_dependencies: Vec::new(),
},
targeted_deps: Vec::new(),
workspace_member_dependents: Vec::new(),
is_workspace_member_dependency: false,
is_binary_dependency: false,
workspace_path_to_crate: "@raze__test_library__1_1_1//".to_owned(),
targets: vec![BuildableTarget {
name: "some_library".to_owned(),
kind: "lib".to_owned(),
path: "path/lib.rs".to_owned(),
edition: "2015".to_owned(),
}],
build_script_target: None,
links: Some("ssh2".to_owned()),
source_details: SourceDetails {
git_data: None,
},
sha256: None,
registry_url: "https://crates.io/api/v1/crates/test-binary/1.1.1/download".to_string(),
lib_target_name: Some("test_library".to_owned()),
}
}
fn dummy_library_crate() -> CrateContext {
return dummy_library_crate_with_name("BUILD");
}
fn extract_contents_matching_path(file_outputs: &Vec<FileOutputs>, file_name: &str) -> String {
println!("Known files :{:?}", file_outputs);
let mut matching_files_contents = file_outputs
.iter()
.filter(|output| output.path.starts_with(file_name))
.map(|output| output.contents.to_owned())
.collect::<Vec<String>>();
assert_that!(matching_files_contents.len(), equal_to(1));
matching_files_contents.pop().unwrap()
}
fn render_crates_for_test_with_name(
buildfile_suffix: &str,
crate_contexts: Vec<CrateContext>,
) -> Vec<FileOutputs> {
BazelRenderer::new()
.render_planned_build(
&dummy_render_details(buildfile_suffix),
&dummy_planned_build(crate_contexts),
)
.unwrap()
}
fn render_crates_for_test(crate_contexts: Vec<CrateContext>) -> Vec<FileOutputs> {
return render_crates_for_test_with_name("BUILD", crate_contexts);
}
#[test]
fn all_plans_contain_root_build_file() {
let file_outputs = render_crates_for_test(Vec::new());
let file_names = file_outputs
.iter()
.map(|output| output.path.display().to_string())
.collect::<Vec<String>>();
assert_that!(
&file_names,
contains(vec![
"/some/cargo/root/some/crate/cargo/BUILD.bazel".to_string()
])
.exactly()
);
}
#[test]
fn crates_generate_build_files() {
let file_outputs = render_crates_for_test(vec![dummy_library_crate()]);
let file_names = file_outputs
.iter()
.map(|output| output.path.display().to_string())
.collect::<Vec<String>>();
assert_that!(
&file_names,
contains(vec![
"/some/bazel/root/./some_render_prefix/vendor/test-library-1.1.1/BUILD".to_string(),
"/some/cargo/root/some/crate/cargo/BUILD.bazel".to_string(),
])
.exactly()
);
}
#[test]
fn crates_generate_build_files_bazel() {
let file_outputs = render_crates_for_test_with_name(
"BUILD.bazel",
vec![dummy_library_crate_with_name("BUILD.bazel")],
);
let file_names = file_outputs
.iter()
.map(|output| output.path.display().to_string())
.collect::<Vec<String>>();
assert_that!(
&file_names,
contains(vec![
"/some/bazel/root/./some_render_prefix/vendor/test-library-1.1.1/BUILD.bazel".to_string(),
"/some/cargo/root/some/crate/cargo/BUILD.bazel".to_string(),
])
.exactly()
);
}
#[test]
fn workspace_member_dependencies_get_build_aliases() {
let mut context = dummy_library_crate();
context.is_workspace_member_dependency = true;
context
.workspace_member_dependents
.push(PathBuf::from("some/crate"));
let file_outputs = render_crates_for_test(vec![context]);
let workspace_crate_build_contents = extract_contents_matching_path(
&file_outputs,
"/some/cargo/root/some/crate/cargo/BUILD.bazel",
);
expect(
workspace_crate_build_contents.contains("alias"),
format!(
"expected root build contents to contain an alias for test-library crate, but it just \
contained [{}]",
workspace_crate_build_contents
),
)
.unwrap();
}
#[test]
fn non_workspace_crates_dont_get_build_aliases() {
let mut non_workspace_crate = dummy_library_crate();
non_workspace_crate.workspace_member_dependents = Vec::new();
non_workspace_crate.is_workspace_member_dependency = false;
let file_outputs = render_crates_for_test(vec![non_workspace_crate]);
let root_build_contents = extract_contents_matching_path(
&file_outputs,
"/some/cargo/root/some/crate/cargo/BUILD.bazel",
);
expect(
!root_build_contents.contains("alias"),
format!(
"expected root build contents not to contain an alias for test-library crate, but it just \
contained [{}]",
root_build_contents
),
)
.unwrap();
}
#[test]
fn binaries_get_rust_binary_rules() {
let file_outputs = render_crates_for_test(vec![dummy_binary_crate()]);
let crate_build_contents = extract_contents_matching_path(
&file_outputs,
"/some/bazel/root/./some_render_prefix/vendor/test-binary-1.1.1/BUILD",
);
expect(
crate_build_contents.contains("rust_binary("),
format!(
"expected crate build contents to contain rust_binary, but it just contained [{}]",
crate_build_contents
),
)
.unwrap();
}
#[test]
fn libraries_get_rust_library_rules() {
let file_outputs = render_crates_for_test(vec![dummy_library_crate()]);
let crate_build_contents = extract_contents_matching_path(
&file_outputs,
"/some/bazel/root/./some_render_prefix/vendor/test-library-1.1.1/BUILD",
);
expect(
crate_build_contents.contains("rust_library("),
format!(
"expected crate build contents to contain rust_library, but it just contained [{}]",
crate_build_contents
),
)
.unwrap();
}
#[test]
fn additional_build_file_missing_file_failure() {
let render_result = BazelRenderer::new().render_planned_build(
&dummy_render_details("BUILD"),
&dummy_planned_build(vec![CrateContext {
raze_settings: CrateSettings {
additional_build_file: Some("non-existent-file".into()),
..Default::default()
},
..dummy_library_crate()
}]),
);
assert_that!(render_result, err());
}
#[test]
fn additional_build_file_included() {
let file_outputs = render_crates_for_test(vec![CrateContext {
raze_settings: CrateSettings {
additional_build_file: Some("README.md".into()),
..Default::default()
},
..dummy_library_crate()
}]);
let crate_build_contents = extract_contents_matching_path(
&file_outputs,
"/some/bazel/root/./some_render_prefix/vendor/test-library-1.1.1/BUILD",
);
expect(
crate_build_contents.contains("# Additional content from README.md"),
format!(
"expected crate build contents to include additional_build_file, but it just contained \
[{}]",
crate_build_contents
),
)
.unwrap();
}
#[test]
fn test_generate_lockfile() {
let render_details = dummy_render_details("BUILD.bazel");
let mut planned_build = dummy_planned_build(Vec::new());
planned_build.lockfile = Some(cargo_lock::Lockfile::from_str(basic_lock_contents()).unwrap());
let render_result = BazelRenderer::new()
.render_remote_planned_build(&render_details, &planned_build)
.unwrap();
// Ensure that the lockfiles for binary dependencies get written out propperly
assert!(render_result.iter().any(|file_output| {
file_output.path == PathBuf::from("/some/bazel/root/./some_render_prefix/Cargo.raze.lock")
&& file_output.contents
== indoc::formatdoc! { r#"
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
[[package]]
name = "test"
version = "0.0.1"
"# }
}))
}
}
| 31.20527 | 99 | 0.650607 |
62f5541273ef7717b069da0c41b7c1e4fb4007ff | 6,795 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use arrow::datatypes::{DataType, Schema};
use arrow::record_batch::RecordBatch;
use datafusion_common::Result;
use datafusion_expr::ColumnarValue;
use std::fmt::{Debug, Display};
use arrow::array::{make_array, Array, ArrayRef, BooleanArray, MutableArrayData};
use arrow::compute::{and_kleene, filter_record_batch, is_not_null, SlicesIterator};
use std::any::Any;
/// Expression that can be evaluated against a RecordBatch
/// A Physical expression knows its type, nullability and how to evaluate itself.
pub trait PhysicalExpr: Send + Sync + Display + Debug {
/// Returns the physical expression as [`Any`](std::any::Any) so that it can be
/// downcast to a specific implementation.
fn as_any(&self) -> &dyn Any;
/// Get the data type of this expression, given the schema of the input
fn data_type(&self, input_schema: &Schema) -> Result<DataType>;
/// Determine whether this expression is nullable, given the schema of the input
fn nullable(&self, input_schema: &Schema) -> Result<bool>;
/// Evaluate an expression against a RecordBatch
fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue>;
/// Evaluate an expression against a RecordBatch after first applying a
/// validity array
fn evaluate_selection(
&self,
batch: &RecordBatch,
selection: &BooleanArray,
) -> Result<ColumnarValue> {
let tmp_batch = filter_record_batch(batch, selection)?;
let tmp_result = self.evaluate(&tmp_batch)?;
// All values from the `selection` filter are true.
if batch.num_rows() == tmp_batch.num_rows() {
return Ok(tmp_result);
}
if let ColumnarValue::Array(a) = tmp_result {
let result = scatter(selection, a.as_ref())?;
Ok(ColumnarValue::Array(result))
} else {
Ok(tmp_result)
}
}
}
/// Scatter `truthy` array by boolean mask. When the mask evaluates `true`, next values of `truthy`
/// are taken, when the mask evaluates `false` values null values are filled.
///
/// # Arguments
/// * `mask` - Boolean values used to determine where to put the `truthy` values
/// * `truthy` - All values of this array are to scatter according to `mask` into final result.
fn scatter(mask: &BooleanArray, truthy: &dyn Array) -> Result<ArrayRef> {
let truthy = truthy.data();
// update the mask so that any null values become false
// (SlicesIterator doesn't respect nulls)
let mask = and_kleene(mask, &is_not_null(mask)?)?;
let mut mutable = MutableArrayData::new(vec![truthy], true, mask.len());
// the SlicesIterator slices only the true values. So the gaps left by this iterator we need to
// fill with falsy values
// keep track of how much is filled
let mut filled = 0;
// keep track of current position we have in truthy array
let mut true_pos = 0;
SlicesIterator::new(&mask).for_each(|(start, end)| {
// the gap needs to be filled with nulls
if start > filled {
mutable.extend_nulls(start - filled);
}
// fill with truthy values
let len = end - start;
mutable.extend(0, true_pos, true_pos + len);
true_pos += len;
filled = end;
});
// the remaining part is falsy
if filled < mask.len() {
mutable.extend_nulls(mask.len() - filled);
}
let data = mutable.freeze();
Ok(make_array(data))
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use super::*;
use arrow::array::Int32Array;
use datafusion_common::Result;
#[test]
fn scatter_int() -> Result<()> {
let truthy = Arc::new(Int32Array::from(vec![1, 10, 11, 100]));
let mask = BooleanArray::from(vec![true, true, false, false, true]);
// the output array is expected to be the same length as the mask array
let expected =
Int32Array::from_iter(vec![Some(1), Some(10), None, None, Some(11)]);
let result = scatter(&mask, truthy.as_ref())?;
let result = result.as_any().downcast_ref::<Int32Array>().unwrap();
assert_eq!(&expected, result);
Ok(())
}
#[test]
fn scatter_int_end_with_false() -> Result<()> {
let truthy = Arc::new(Int32Array::from(vec![1, 10, 11, 100]));
let mask = BooleanArray::from(vec![true, false, true, false, false, false]);
// output should be same length as mask
let expected =
Int32Array::from_iter(vec![Some(1), None, Some(10), None, None, None]);
let result = scatter(&mask, truthy.as_ref())?;
let result = result.as_any().downcast_ref::<Int32Array>().unwrap();
assert_eq!(&expected, result);
Ok(())
}
#[test]
fn scatter_with_null_mask() -> Result<()> {
let truthy = Arc::new(Int32Array::from(vec![1, 10, 11]));
let mask: BooleanArray = vec![Some(false), None, Some(true), Some(true), None]
.into_iter()
.collect();
// output should treat nulls as though they are false
let expected = Int32Array::from_iter(vec![None, None, Some(1), Some(10), None]);
let result = scatter(&mask, truthy.as_ref())?;
let result = result.as_any().downcast_ref::<Int32Array>().unwrap();
assert_eq!(&expected, result);
Ok(())
}
#[test]
fn scatter_boolean() -> Result<()> {
let truthy = Arc::new(BooleanArray::from(vec![false, false, false, true]));
let mask = BooleanArray::from(vec![true, true, false, false, true]);
// the output array is expected to be the same length as the mask array
let expected = BooleanArray::from_iter(vec![
Some(false),
Some(false),
None,
None,
Some(false),
]);
let result = scatter(&mask, truthy.as_ref())?;
let result = result.as_any().downcast_ref::<BooleanArray>().unwrap();
assert_eq!(&expected, result);
Ok(())
}
}
| 37.131148 | 99 | 0.635173 |
d512a60ba84221195cb9c0efc2d68ff8ae79cdd2 | 11 | PhoneBook
| 5.5 | 10 | 0.818182 |
1ccec34f5de15f3fcb7f3cc0829581a59c3e9579 | 51,341 | // Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#![warn(missing_docs)]
use {
crate::{
modern_backend::input_reports_reader::InputReportsReader, synthesizer,
usages::hid_usage_to_input3_key,
},
anyhow::{format_err, Context as _, Error},
async_trait::async_trait,
fidl::endpoints::ServerEnd,
fidl::Error as FidlError,
fidl_fuchsia_input::Key,
fidl_fuchsia_input_report::{
ContactInputReport, DeviceDescriptor, InputDeviceRequest, InputDeviceRequestStream,
InputReport, InputReportsReaderMarker, KeyboardInputReport, TouchInputReport,
TOUCH_MAX_CONTACTS,
},
fidl_fuchsia_ui_input::{KeyboardReport, Touch},
futures::{future, pin_mut, StreamExt, TryFutureExt},
std::convert::TryFrom as _,
};
/// Implements the `synthesizer::InputDevice` trait, and the server side of the
/// `fuchsia.input.report.InputDevice` FIDL protocol. Used by
/// `modern_backend::InputDeviceRegistry`.
///
/// # Notes
/// * Some of the methods of `fuchsia.input.report.InputDevice` are not relevant to
/// input injection, so this implemnentation does not support them:
/// * `GetFeatureReport` and `SetFeatureReport` are for sensors.
/// * `SendOutputReport` provides a way to change keyboard LED state.
/// If these FIDL methods are invoked, `InputDevice::serve_reports()` will resolve
/// to Err.
/// * This implementation does not support multiple calls to `GetInputReportsReader`,
/// since:
/// * The ideal semantics for multiple calls are not obvious, and
/// * Each `InputDevice` has a single FIDL client (an input pipeline implementation),
/// and the current input pipeline implementation is happy to use a single
/// `InputReportsReader` for the lifetime of the `InputDevice`.
pub(super) struct InputDevice {
request_stream: InputDeviceRequestStream,
/// For responding to `fuchsia.input.report.InputDevice.GetDescriptor()` requests.
descriptor: DeviceDescriptor,
/// FIFO queue of reports to be consumed by calls to
/// `fuchsia.input.report.InputReportsReader.ReadInputReports()`.
/// Populated by calls to `synthesizer::InputDevice` trait methods.
reports: Vec<InputReport>,
}
#[async_trait(?Send)]
impl synthesizer::InputDevice for self::InputDevice {
fn media_buttons(
&mut self,
_volume_up: bool,
_volume_down: bool,
_mic_mute: bool,
_reset: bool,
_pause: bool,
_camera_disable: bool,
_time: u64,
) -> Result<(), Error> {
Err(format_err!("TODO: implement media_buttons()"))
}
// TODO(fxbug.dev/63973): remove dependency on HID usage codes.
fn key_press(&mut self, report: KeyboardReport, time: u64) -> Result<(), Error> {
self.key_press_internal(report, time, Self::convert_keyboard_report_to_keys)
}
fn key_press_raw(&mut self, report: KeyboardReport, time: u64) -> Result<(), Error> {
self.key_press_internal(report, time, Self::convert_keyboard_report_to_keys_no_transform)
}
// TODO(fxbug.dev/63973): remove reference to HID usage codes.
fn key_press_usage(&mut self, usage: Option<u32>, time: u64) -> Result<(), Error> {
self.key_press(KeyboardReport { pressed_keys: usage.into_iter().collect() }, time)
}
fn tap(&mut self, pos: Option<(u32, u32)>, time: u64) -> Result<(), Error> {
let fingers = pos.and_then(|(x, y)| {
// Note: we use finger_id `1` for consistency with the legacy_backend.
Some(vec![Touch { finger_id: 1, x: x as i32, y: y as i32, width: 0, height: 0 }])
});
self.multi_finger_tap(fingers, time)
}
fn multi_finger_tap(&mut self, fingers: Option<Vec<Touch>>, time: u64) -> Result<(), Error> {
let num_fingers = match &fingers {
Some(fingers_vec) => fingers_vec.len(),
None => 0,
};
if num_fingers > usize::try_from(TOUCH_MAX_CONTACTS).context("usize is at least 32 bits")? {
return Err(format_err!(
"Got {} fingers, but max is {}",
num_fingers,
TOUCH_MAX_CONTACTS
));
}
self.multi_finger_tap_internal(
TouchInputReport {
contacts: Some(fingers.map_or_else(Vec::new, |fingers_vec| {
fingers_vec
.into_iter()
.map(|finger| ContactInputReport {
contact_id: Some(finger.finger_id),
position_x: Some(i64::from(finger.x)),
position_y: Some(i64::from(finger.y)),
contact_width: Some(i64::from(finger.width)),
contact_height: Some(i64::from(finger.height)),
..ContactInputReport::EMPTY
})
.collect()
})),
pressed_buttons: Some(vec![]),
..TouchInputReport::EMPTY
},
time,
)
}
/// Returns a `Future` which resolves when all `InputReport`s for this device
/// have been sent to a `fuchsia.input.InputReportsReader` client, or when
/// an error occurs.
///
/// # Resolves to
/// * `Ok(())` if all reports were written successfully
/// * `Err` otherwise. For example:
/// * The `fuchsia.input.InputDevice` client sent an invalid request.
/// * A FIDL error occurred while trying to read a FIDL request.
/// * A FIDL error occurred while trying to write a FIDL response.
///
/// # Corner cases
/// Resolves to `Err` if the `fuchsia.input.InputDevice` client did not call
/// `GetInputReportsReader()`, even if no `InputReport`s were queued.
///
/// # Note
/// When the `Future` resolves, `InputReports` may still be sitting unread in the
/// channel to the `fuchsia.input.InputReportsReader` client. (The client will
/// typically be an input pipeline implementation.)
async fn serve_reports(mut self: Box<Self>) -> Result<(), Error> {
// Destructure fields into independent variables, to avoid "partial-move" issues.
let Self { request_stream, descriptor, reports } = *self;
// Process `fuchsia.input.report.InputDevice` requests, waiting for the `InputDevice`
// client to provide a `ServerEnd<InputReportsReader>` by calling `GetInputReportsReader()`.
let mut input_reports_reader_server_end_stream = request_stream
.filter_map(|r| future::ready(Self::handle_device_request(r, &descriptor)));
let input_reports_reader_fut = {
let reader_server_end = input_reports_reader_server_end_stream
.next()
.await
.ok_or(format_err!("stream ended without a call GetInputReportsReader"))?
.context("handling InputDeviceRequest")?;
InputReportsReader {
request_stream: reader_server_end
.into_stream()
.context("converting ServerEnd<InputReportsReader>")?,
reports,
}
.into_future()
};
pin_mut!(input_reports_reader_fut);
// Create a `Future` to keep serving the `fuchsia.input.report.InputDevice` protocol.
// This time, receiving a `ServerEnd<InputReportsReaderMarker>` will be an `Err`.
let input_device_server_fut = async {
match input_reports_reader_server_end_stream.next().await {
Some(Ok(_server_end)) => {
// There are no obvious "best" semantics for how to handle multiple
// `GetInputReportsReader` calls, and there is no current need to
// do so. Instead of taking a guess at what the client might want
// in such a case, just return `Err`.
Err(format_err!(
"InputDevice does not support multiple GetInputReportsReader calls"
))
}
Some(Err(e)) => Err(e.context("handling InputDeviceRequest")),
None => Ok(()),
}
};
pin_mut!(input_device_server_fut);
// Now, process both `fuchsia.input.report.InputDevice` requests, and
// `fuchsia.input.report.InputReportsReader` requests. And keep processing
// `InputReportsReader` requests even if the `InputDevice` connection
// is severed.
future::select(
input_device_server_fut.and_then(|_: ()| future::pending()),
input_reports_reader_fut,
)
.await
.factor_first()
.0
}
}
impl InputDevice {
/// Creates a new `InputDevice` that will:
/// a) process requests from `request_stream`, and
/// b) respond to `GetDescriptor` calls with the descriptor generated by `descriptor_generator()`
///
/// The `InputDevice` initially has no reports queued.
pub(super) fn new(
request_stream: InputDeviceRequestStream,
descriptor: DeviceDescriptor,
) -> Self {
Self { request_stream, descriptor, reports: vec![] }
}
/// Converts a [KeyboardReport] into a sequence of key presses, using the supplied
/// key-to-HID usage transformation function.
fn key_press_internal(
&mut self,
report: KeyboardReport,
time: u64,
transform: fn(r: &KeyboardReport) -> Result<Vec<Key>, Error>,
) -> Result<(), Error> {
self.reports.push(InputReport {
event_time: Some(i64::try_from(time).context("converting time to i64")?),
keyboard: Some(KeyboardInputReport {
pressed_keys3: Some(transform(&report)?),
..KeyboardInputReport::EMPTY
}),
..InputReport::EMPTY
});
Ok(())
}
fn multi_finger_tap_internal(
&mut self,
touch: TouchInputReport,
time: u64,
) -> Result<(), Error> {
self.reports.push(InputReport {
event_time: Some(i64::try_from(time).context("converting time to i64")?),
touch: Some(touch),
..InputReport::EMPTY
});
Ok(())
}
/// Processes a single request from an `InputDeviceRequestStream`
///
/// # Returns
/// * Some(Ok(ServerEnd<InputReportsReaderMarker>)) if the request yielded an
/// `InputReportsReader`. `InputDevice` should route its `InputReports` to the yielded
/// `InputReportsReader`.
/// * Some(Err) if the request yielded an `Error`
/// * None if the request was fully processed by `handle_device_request()`
fn handle_device_request(
request: Result<InputDeviceRequest, FidlError>,
descriptor: &DeviceDescriptor,
) -> Option<Result<ServerEnd<InputReportsReaderMarker>, Error>> {
match request {
Ok(InputDeviceRequest::GetInputReportsReader { reader: reader_server_end, .. }) => {
Some(Ok(reader_server_end))
}
Ok(InputDeviceRequest::GetDescriptor { responder }) => {
match responder.send(descriptor.clone()) {
Ok(()) => None,
Err(e) => {
Some(Err(anyhow::Error::from(e).context("sending GetDescriptor response")))
}
}
}
Ok(InputDeviceRequest::SendOutputReport { .. }) => {
Some(Err(format_err!("InputDevice does not support SendOutputReport")))
}
Ok(InputDeviceRequest::GetFeatureReport { .. }) => {
Some(Err(format_err!("InputDevice does not support GetFeatureReport")))
}
Ok(InputDeviceRequest::SetFeatureReport { .. }) => {
Some(Err(format_err!("InputDevice does not support SetFeatureReport")))
}
Ok(InputDeviceRequest::GetInputReport { .. }) => {
Some(Err(format_err!("InputDevice does not support GetInputReport")))
}
Err(e) => Some(Err(anyhow::Error::from(e).context("while reading InputDeviceRequest"))),
}
}
fn convert_keyboard_report_to_keys(report: &KeyboardReport) -> Result<Vec<Key>, Error> {
report
.pressed_keys
.iter()
.map(|&usage| {
hid_usage_to_input3_key(usage as u16)
.ok_or_else(|| format_err!("no Key for usage {:?}", usage))
})
.collect()
}
/// Same as convert_keyboard_report_to_keys, but no additional calls to HID usage mapping.
///
/// The keyboard report in `convert_keyboard_report_to_keys` assumes USB HID usage page 7.
/// This set of keys is narrower than what Fuchsia supports so that function needs to map
/// back into Fuchsia USB HID encoding (see [fidl_fuchsia_input::Key]).
///
/// This function, in turn, uses the full range of [fidl_fuchsia_input::Key], so does not
/// need this conversion.
fn convert_keyboard_report_to_keys_no_transform(
report: &KeyboardReport,
) -> Result<Vec<Key>, Error> {
report
.pressed_keys
.iter()
.map(|&usage| {
Key::from_primitive(usage)
.ok_or(anyhow::anyhow!("could not convert to input::Key: {}", &usage))
})
.collect()
}
}
#[cfg(test)]
mod tests {
use {
super::{synthesizer::InputDevice as _, *},
fidl::endpoints,
fidl_fuchsia_input_report::{
DeviceDescriptor, InputDeviceMarker, KeyboardDescriptor, KeyboardInputDescriptor,
},
fuchsia_async as fasync,
futures::future,
};
const DEFAULT_REPORT_TIMESTAMP: u64 = 0;
mod responds_to_get_descriptor_request {
use {
super::{
utils::{make_input_device_proxy_and_struct, make_keyboard_descriptor},
*,
},
fidl_fuchsia_input_report::InputReportsReaderMarker,
futures::{pin_mut, task::Poll},
matches::assert_matches,
};
#[fasync::run_until_stalled(test)]
async fn single_request_before_call_to_get_input_repors_reader() -> Result<(), Error> {
let (proxy, request_stream) = endpoints::create_proxy_and_stream::<InputDeviceMarker>()
.context("creating InputDevice proxy and stream")?;
let input_device_server_fut =
Box::new(InputDevice::new(request_stream, make_keyboard_descriptor(vec![Key::A])))
.serve_reports();
let get_descriptor_fut = proxy.get_descriptor();
std::mem::drop(proxy); // Drop `proxy` to terminate `request_stream`.
let (_, get_descriptor_result) =
future::join(input_device_server_fut, get_descriptor_fut).await;
assert_eq!(
get_descriptor_result.context("fidl error")?,
make_keyboard_descriptor(vec![Key::A])
);
Ok(())
}
#[test]
fn multiple_requests_before_call_to_get_input_repors_reader() -> Result<(), Error> {
let mut executor = fasync::TestExecutor::new().context("creating executor")?;
let (proxy, request_stream) = endpoints::create_proxy_and_stream::<InputDeviceMarker>()
.context("creating InputDevice proxy and stream")?;
let mut input_device_server_fut =
Box::new(InputDevice::new(request_stream, make_keyboard_descriptor(vec![Key::A])))
.serve_reports();
let mut get_descriptor_fut = proxy.get_descriptor();
assert_matches!(
executor.run_until_stalled(&mut input_device_server_fut),
Poll::Pending
);
std::mem::drop(executor.run_until_stalled(&mut get_descriptor_fut));
let mut get_descriptor_fut = proxy.get_descriptor();
let _ = executor.run_until_stalled(&mut input_device_server_fut);
assert_matches!(
executor.run_until_stalled(&mut get_descriptor_fut),
Poll::Ready(Ok(_))
);
Ok(())
}
#[test]
fn after_call_to_get_input_reports_reader_with_report_pending() -> Result<(), Error> {
let mut executor = fasync::TestExecutor::new().context("creating executor")?;
let (input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
input_device
.key_press(KeyboardReport { pressed_keys: vec![] }, DEFAULT_REPORT_TIMESTAMP)
.context("internal error queuing input event")?;
let input_device_server_fut = input_device.serve_reports();
pin_mut!(input_device_server_fut);
let (_input_reports_reader_proxy, input_reports_reader_server_end) =
endpoints::create_proxy::<InputReportsReaderMarker>()
.context("internal error creating InputReportsReader proxy and server end")?;
input_device_proxy
.get_input_reports_reader(input_reports_reader_server_end)
.context("sending get_input_reports_reader request")?;
assert_matches!(
executor.run_until_stalled(&mut input_device_server_fut),
Poll::Pending
);
let mut get_descriptor_fut = input_device_proxy.get_descriptor();
assert_matches!(
executor.run_until_stalled(&mut input_device_server_fut),
Poll::Pending
);
assert_matches!(executor.run_until_stalled(&mut get_descriptor_fut), Poll::Ready(_));
Ok(())
}
}
mod report_contents {
use {
super::{
utils::{get_input_reports, make_input_device_proxy_and_struct},
*,
},
crate::usages::Usages,
matches::assert_matches,
std::convert::TryInto as _,
};
#[fasync::run_until_stalled(test)]
async fn key_press_generates_expected_keyboard_input_report() -> Result<(), Error> {
let (input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
input_device.key_press(
KeyboardReport {
pressed_keys: vec![Usages::HidUsageKeyA as u32, Usages::HidUsageKeyB as u32],
},
DEFAULT_REPORT_TIMESTAMP,
)?;
let input_reports = get_input_reports(input_device, input_device_proxy).await;
assert_eq!(
input_reports.as_slice(),
[InputReport {
event_time: Some(
DEFAULT_REPORT_TIMESTAMP.try_into().expect("converting to i64")
),
keyboard: Some(KeyboardInputReport {
pressed_keys3: Some(vec![Key::A, Key::B]),
..KeyboardInputReport::EMPTY
}),
..InputReport::EMPTY
}]
);
Ok(())
}
#[fasync::run_until_stalled(test)]
async fn key_press_usage_generates_expected_keyboard_input_report_for_some(
) -> Result<(), Error> {
let (input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
input_device
.key_press_usage(Some(Usages::HidUsageKeyA as u32), DEFAULT_REPORT_TIMESTAMP)?;
let input_reports = get_input_reports(input_device, input_device_proxy).await;
assert_eq!(
input_reports.as_slice(),
[InputReport {
event_time: Some(
DEFAULT_REPORT_TIMESTAMP.try_into().expect("converting to i64")
),
keyboard: Some(KeyboardInputReport {
pressed_keys3: Some(vec![Key::A]),
..KeyboardInputReport::EMPTY
}),
..InputReport::EMPTY
}]
);
Ok(())
}
#[fasync::run_until_stalled(test)]
async fn key_press_usage_generates_expected_keyboard_input_report_for_none(
) -> Result<(), Error> {
let (input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
input_device.key_press_usage(None, DEFAULT_REPORT_TIMESTAMP)?;
let input_reports = get_input_reports(input_device, input_device_proxy).await;
assert_eq!(
input_reports.as_slice(),
[InputReport {
event_time: Some(
DEFAULT_REPORT_TIMESTAMP.try_into().expect("converting to i64")
),
keyboard: Some(KeyboardInputReport {
pressed_keys3: Some(vec![]),
..KeyboardInputReport::EMPTY
}),
..InputReport::EMPTY
}]
);
Ok(())
}
#[fasync::run_until_stalled(test)]
async fn key_press_returns_error_if_usage_cannot_be_mapped_to_key() {
let (_input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
assert_matches!(
input_device.key_press(
KeyboardReport { pressed_keys: vec![0xffff_ffff] },
DEFAULT_REPORT_TIMESTAMP
),
Err(_)
);
}
#[fasync::run_until_stalled(test)]
async fn key_press_usage_returns_error_if_usage_cannot_be_mapped_to_key() {
let (_input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
assert_matches!(
input_device.key_press_usage(Some(0xffff_ffff), DEFAULT_REPORT_TIMESTAMP),
Err(_)
);
}
#[fasync::run_until_stalled(test)]
async fn key_events_generates_expected_keyboard_response() -> Result<(), Error> {
let (_input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
input_device.key_press_raw(
KeyboardReport { pressed_keys: vec![Key::A as u32, Key::B as u32] },
DEFAULT_REPORT_TIMESTAMP,
)?;
let input_reports = get_input_reports(input_device, _input_device_proxy).await;
assert_eq!(
input_reports.as_slice(),
[InputReport {
event_time: Some(
DEFAULT_REPORT_TIMESTAMP.try_into().expect("converting to i64")
),
keyboard: Some(KeyboardInputReport {
pressed_keys3: Some(vec![Key::A, Key::B]),
..KeyboardInputReport::EMPTY
}),
..InputReport::EMPTY
}]
);
Ok(())
}
#[fasync::run_until_stalled(test)]
async fn tap_generates_expected_report_for_some() -> Result<(), Error> {
let (_input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
input_device.tap(Some((10, 20)), DEFAULT_REPORT_TIMESTAMP)?;
let input_reports = get_input_reports(input_device, _input_device_proxy).await;
assert_eq!(
input_reports.as_slice(),
[InputReport {
event_time: Some(
DEFAULT_REPORT_TIMESTAMP.try_into().expect("converting to i64")
),
touch: Some(TouchInputReport {
contacts: Some(vec![ContactInputReport {
contact_id: Some(1),
position_x: Some(10),
position_y: Some(20),
pressure: None,
contact_width: Some(0),
contact_height: Some(0),
..ContactInputReport::EMPTY
}]),
pressed_buttons: Some(vec![]),
..TouchInputReport::EMPTY
}),
..InputReport::EMPTY
}]
);
Ok(())
}
#[fasync::run_until_stalled(test)]
async fn tap_generates_expected_report_for_none() -> Result<(), Error> {
let (_input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
input_device.tap(None, DEFAULT_REPORT_TIMESTAMP)?;
let input_reports = get_input_reports(input_device, _input_device_proxy).await;
assert_eq!(
input_reports.as_slice(),
[InputReport {
event_time: Some(
DEFAULT_REPORT_TIMESTAMP.try_into().expect("converting to i64")
),
touch: Some(TouchInputReport {
contacts: Some(vec![]),
pressed_buttons: Some(vec![]),
..TouchInputReport::EMPTY
}),
..InputReport::EMPTY
}]
);
Ok(())
}
#[fasync::run_until_stalled(test)]
async fn multi_finger_tap_generates_report_for_single_finger() -> Result<(), Error> {
let (_input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
input_device.multi_finger_tap(
Some(vec![Touch { finger_id: 5, x: 10, y: 20, width: 100, height: 200 }]),
DEFAULT_REPORT_TIMESTAMP,
)?;
let input_reports = get_input_reports(input_device, _input_device_proxy).await;
assert_eq!(
input_reports.as_slice(),
[InputReport {
event_time: Some(
DEFAULT_REPORT_TIMESTAMP.try_into().expect("converting to i64")
),
touch: Some(TouchInputReport {
contacts: Some(vec![ContactInputReport {
contact_id: Some(5),
position_x: Some(10),
position_y: Some(20),
pressure: None,
contact_width: Some(100),
contact_height: Some(200),
..ContactInputReport::EMPTY
}]),
pressed_buttons: Some(vec![]),
..TouchInputReport::EMPTY
}),
..InputReport::EMPTY
}]
);
Ok(())
}
#[fasync::run_until_stalled(test)]
async fn multi_finger_tap_generates_expected_report_for_two_fingers() -> Result<(), Error> {
let (_input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
input_device.multi_finger_tap(
Some(vec![
Touch { finger_id: 5, x: 10, y: 20, width: 100, height: 200 },
Touch { finger_id: 0, x: 30, y: 40, width: 300, height: 400 },
]),
DEFAULT_REPORT_TIMESTAMP,
)?;
let input_reports = get_input_reports(input_device, _input_device_proxy).await;
assert_eq!(
input_reports.as_slice(),
[InputReport {
event_time: Some(
DEFAULT_REPORT_TIMESTAMP.try_into().expect("converting to i64")
),
touch: Some(TouchInputReport {
contacts: Some(vec![
ContactInputReport {
contact_id: Some(5),
position_x: Some(10),
position_y: Some(20),
pressure: None,
contact_width: Some(100),
contact_height: Some(200),
..ContactInputReport::EMPTY
},
ContactInputReport {
contact_id: Some(0),
position_x: Some(30),
position_y: Some(40),
pressure: None,
contact_width: Some(300),
contact_height: Some(400),
..ContactInputReport::EMPTY
}
]),
pressed_buttons: Some(vec![]),
..TouchInputReport::EMPTY
}),
..InputReport::EMPTY
}]
);
Ok(())
}
#[fasync::run_until_stalled(test)]
async fn multi_finger_tap_generates_expected_report_for_zero_fingers() -> Result<(), Error>
{
let (_input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
input_device.multi_finger_tap(Some(vec![]), DEFAULT_REPORT_TIMESTAMP)?;
let input_reports = get_input_reports(input_device, _input_device_proxy).await;
assert_eq!(
input_reports.as_slice(),
[InputReport {
event_time: Some(
DEFAULT_REPORT_TIMESTAMP.try_into().expect("converting to i64")
),
touch: Some(TouchInputReport {
contacts: Some(vec![]),
pressed_buttons: Some(vec![]),
..TouchInputReport::EMPTY
}),
..InputReport::EMPTY
}]
);
Ok(())
}
#[fasync::run_until_stalled(test)]
async fn multi_finger_tap_generates_expected_report_for_none() -> Result<(), Error> {
let (_input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
input_device.multi_finger_tap(None, DEFAULT_REPORT_TIMESTAMP)?;
let input_reports = get_input_reports(input_device, _input_device_proxy).await;
assert_eq!(
input_reports.as_slice(),
[InputReport {
event_time: Some(
DEFAULT_REPORT_TIMESTAMP.try_into().expect("converting to i64")
),
touch: Some(TouchInputReport {
contacts: Some(vec![]),
pressed_buttons: Some(vec![]),
..TouchInputReport::EMPTY
}),
..InputReport::EMPTY
}]
);
Ok(())
}
#[fasync::run_until_stalled(test)]
async fn multi_finger_tap_returns_error_when_num_fingers_is_to_large() {
let (_input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
assert_matches!(
input_device.multi_finger_tap(
Some(
(0..=TOUCH_MAX_CONTACTS)
.map(|i| Touch {
finger_id: i,
x: i as i32,
y: i as i32,
width: i,
height: i
})
.collect(),
),
DEFAULT_REPORT_TIMESTAMP,
),
Err(_)
);
}
}
mod future_resolution {
use {
super::{
utils::{make_input_device_proxy_and_struct, make_input_reports_reader_proxy},
*,
},
futures::task::Poll,
};
mod yields_ok_after_all_reports_are_sent_to_input_reports_reader {
use {super::*, matches::assert_matches};
#[test]
fn if_device_request_channel_was_closed() {
let mut executor = fasync::TestExecutor::new().expect("creating executor");
let (input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
let input_reports_reader_proxy =
make_input_reports_reader_proxy(&input_device_proxy);
input_device
.key_press(KeyboardReport { pressed_keys: vec![] }, DEFAULT_REPORT_TIMESTAMP)
.expect("queuing input report");
let _input_reports_fut = input_reports_reader_proxy.read_input_reports();
let mut input_device_fut = input_device.serve_reports();
std::mem::drop(input_device_proxy); // Close device request channel.
assert_matches!(
executor.run_until_stalled(&mut input_device_fut),
Poll::Ready(Ok(()))
);
}
#[test]
fn even_if_device_request_channel_is_open() {
let mut executor = fasync::TestExecutor::new().expect("creating executor");
let (input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
let input_reports_reader_proxy =
make_input_reports_reader_proxy(&input_device_proxy);
input_device
.key_press(KeyboardReport { pressed_keys: vec![] }, DEFAULT_REPORT_TIMESTAMP)
.expect("queuing input report");
let _input_reports_fut = input_reports_reader_proxy.read_input_reports();
let mut input_device_fut = input_device.serve_reports();
assert_matches!(
executor.run_until_stalled(&mut input_device_fut),
Poll::Ready(Ok(()))
);
}
#[test]
fn even_if_reports_was_empty_and_device_request_channel_is_open() {
let mut executor = fasync::TestExecutor::new().expect("creating executor");
let (input_device_proxy, input_device) = make_input_device_proxy_and_struct();
let input_reports_reader_proxy =
make_input_reports_reader_proxy(&input_device_proxy);
let _input_reports_fut = input_reports_reader_proxy.read_input_reports();
let mut input_device_fut = input_device.serve_reports();
assert_matches!(
executor.run_until_stalled(&mut input_device_fut),
Poll::Ready(Ok(()))
);
}
}
mod yields_err_if_peer_closed_device_channel_without_calling_get_input_reports_reader {
use super::*;
use matches::assert_matches;
#[test]
fn if_reports_were_available() {
let mut executor = fasync::TestExecutor::new().expect("creating executor");
let (input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
input_device
.key_press(KeyboardReport { pressed_keys: vec![] }, DEFAULT_REPORT_TIMESTAMP)
.expect("queuing input report");
let mut input_device_fut = input_device.serve_reports();
std::mem::drop(input_device_proxy);
assert_matches!(
executor.run_until_stalled(&mut input_device_fut),
Poll::Ready(Err(_))
)
}
#[test]
fn even_if_no_reports_were_available() {
let mut executor = fasync::TestExecutor::new().expect("creating executor");
let (input_device_proxy, input_device) = make_input_device_proxy_and_struct();
let mut input_device_fut = input_device.serve_reports();
std::mem::drop(input_device_proxy);
assert_matches!(
executor.run_until_stalled(&mut input_device_fut),
Poll::Ready(Err(_))
)
}
}
mod is_pending_if_peer_has_device_channel_open_and_has_not_called_get_input_reports_reader {
use super::*;
use matches::assert_matches;
#[test]
fn if_reports_were_available() {
let mut executor = fasync::TestExecutor::new().expect("creating executor");
let (_input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
input_device
.key_press(KeyboardReport { pressed_keys: vec![] }, DEFAULT_REPORT_TIMESTAMP)
.expect("queuing input report");
let mut input_device_fut = input_device.serve_reports();
assert_matches!(executor.run_until_stalled(&mut input_device_fut), Poll::Pending)
}
#[test]
fn even_if_no_reports_were_available() {
let mut executor = fasync::TestExecutor::new().expect("creating executor");
let (_input_device_proxy, input_device) = make_input_device_proxy_and_struct();
let mut input_device_fut = input_device.serve_reports();
assert_matches!(executor.run_until_stalled(&mut input_device_fut), Poll::Pending)
}
#[test]
fn even_if_get_device_descriptor_has_been_called() {
let mut executor = fasync::TestExecutor::new().expect("creating executor");
let (input_device_proxy, input_device) = make_input_device_proxy_and_struct();
let mut input_device_fut = input_device.serve_reports();
let _get_descriptor_fut = input_device_proxy.get_descriptor();
assert_matches!(executor.run_until_stalled(&mut input_device_fut), Poll::Pending)
}
}
mod is_pending_if_peer_has_not_read_any_reports_when_a_report_is_available {
use super::*;
use matches::assert_matches;
#[test]
fn if_device_request_channel_is_open() {
let mut executor = fasync::TestExecutor::new().expect("creating executor");
let (input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
let _input_reports_reader_proxy =
make_input_reports_reader_proxy(&input_device_proxy);
input_device
.key_press(KeyboardReport { pressed_keys: vec![] }, DEFAULT_REPORT_TIMESTAMP)
.expect("queuing input report");
let mut input_device_fut = input_device.serve_reports();
assert_matches!(executor.run_until_stalled(&mut input_device_fut), Poll::Pending)
}
#[test]
fn even_if_device_channel_is_closed() {
let mut executor = fasync::TestExecutor::new().expect("creating executor");
let (input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
let _input_reports_reader_proxy =
make_input_reports_reader_proxy(&input_device_proxy);
input_device
.key_press(KeyboardReport { pressed_keys: vec![] }, DEFAULT_REPORT_TIMESTAMP)
.expect("queuing input report");
let mut input_device_fut = input_device.serve_reports();
std::mem::drop(input_device_proxy); // Terminate `InputDeviceRequestStream`.
assert_matches!(executor.run_until_stalled(&mut input_device_fut), Poll::Pending)
}
}
mod is_pending_if_peer_did_not_read_all_reports {
use {
super::*, fidl_fuchsia_input_report::MAX_DEVICE_REPORT_COUNT,
matches::assert_matches,
};
#[test]
fn if_device_request_channel_is_open() {
let mut executor = fasync::TestExecutor::new().expect("creating executor");
let (input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
let input_reports_reader_proxy =
make_input_reports_reader_proxy(&input_device_proxy);
(0..=MAX_DEVICE_REPORT_COUNT).for_each(|_| {
input_device
.key_press(
KeyboardReport { pressed_keys: vec![] },
DEFAULT_REPORT_TIMESTAMP,
)
.expect("queuing input report");
});
// One query isn't enough to consume all of the reports queued above.
let _input_reports_fut = input_reports_reader_proxy.read_input_reports();
let mut input_device_fut = input_device.serve_reports();
assert_matches!(executor.run_until_stalled(&mut input_device_fut), Poll::Pending)
}
#[test]
fn even_if_device_request_channel_is_closed() {
let mut executor = fasync::TestExecutor::new().expect("creating executor");
let (input_device_proxy, mut input_device) = make_input_device_proxy_and_struct();
let input_reports_reader_proxy =
make_input_reports_reader_proxy(&input_device_proxy);
(0..=MAX_DEVICE_REPORT_COUNT).for_each(|_| {
input_device
.key_press(
KeyboardReport { pressed_keys: vec![] },
DEFAULT_REPORT_TIMESTAMP,
)
.expect("queuing input report");
});
// One query isn't enough to consume all of the reports queued above.
let _input_reports_fut = input_reports_reader_proxy.read_input_reports();
let mut input_device_fut = input_device.serve_reports();
std::mem::drop(input_device_proxy); // Terminate `InputDeviceRequestStream`.
assert_matches!(executor.run_until_stalled(&mut input_device_fut), Poll::Pending)
}
}
}
// Because `input_synthesis` is a library, unsupported features should yield `Err`s,
// rather than panic!()-ing.
mod unsupported_fidl_requests {
use {
super::{utils::make_input_device_proxy_and_struct, *},
fidl_fuchsia_input_report::{FeatureReport, OutputReport},
matches::assert_matches,
};
#[fasync::run_until_stalled(test)]
async fn send_output_report_request_yields_error() -> Result<(), Error> {
let (proxy, input_device) = make_input_device_proxy_and_struct();
let input_device_server_fut = input_device.serve_reports();
let send_output_report_fut = proxy.send_output_report(OutputReport::EMPTY);
std::mem::drop(proxy); // Drop `proxy` to terminate `request_stream`.
assert_matches!(
future::join(input_device_server_fut, send_output_report_fut).await,
(_, Err(_))
);
Ok(())
}
#[fasync::run_until_stalled(test)]
async fn get_feature_report_request_yields_error() -> Result<(), Error> {
let (proxy, input_device) = make_input_device_proxy_and_struct();
let input_device_server_fut = input_device.serve_reports();
let get_feature_report_fut = proxy.get_feature_report();
std::mem::drop(proxy); // Drop `proxy` to terminate `request_stream`.
assert_matches!(
future::join(input_device_server_fut, get_feature_report_fut).await,
(_, Err(_))
);
Ok(())
}
#[fasync::run_until_stalled(test)]
async fn set_feature_report_request_yields_error() -> Result<(), Error> {
let (proxy, input_device) = make_input_device_proxy_and_struct();
let input_device_server_fut = input_device.serve_reports();
let set_feature_report_fut = proxy.set_feature_report(FeatureReport::EMPTY);
std::mem::drop(proxy); // Drop `proxy` to terminate `request_stream`.
assert_matches!(
future::join(input_device_server_fut, set_feature_report_fut).await,
(_, Err(_))
);
Ok(())
}
}
// Because `input_synthesis` is a library, unimplemented features should yield `Error`s,
// rather than panic!()-ing.
mod unimplemented_trait_methods {
use super::{utils::make_input_device_proxy_and_struct, *};
use matches::assert_matches;
#[test]
fn media_buttons_yields_error() -> Result<(), Error> {
let _executor = fuchsia_async::TestExecutor::new(); // Create TLS executor used by `endpoints`.
let (_proxy, mut input_device) = make_input_device_proxy_and_struct();
let media_buttons_result =
input_device.media_buttons(false, false, false, false, false, false, 0);
assert_matches!(media_buttons_result, Err(_));
Ok(())
}
}
// Because `input_synthesis` is a library, unsupported use cases should yield `Error`s,
// rather than panic!()-ing.
mod unsupported_use_cases {
use {
super::{utils::make_input_device_proxy_and_struct, *},
fidl_fuchsia_input_report::InputReportsReaderMarker,
matches::assert_matches,
};
#[fasync::run_until_stalled(test)]
async fn multiple_get_input_reports_reader_requests_yield_error() -> Result<(), Error> {
let (input_device_proxy, input_device) = make_input_device_proxy_and_struct();
let (_input_reports_reader_proxy, input_reports_reader_server_end) =
endpoints::create_proxy::<InputReportsReaderMarker>()
.context("creating InputReportsReader proxy and server end")?;
input_device_proxy
.get_input_reports_reader(input_reports_reader_server_end)
.expect("sending first get_input_reports_reader request");
let (_input_reports_reader_proxy, input_reports_reader_server_end) =
endpoints::create_proxy::<InputReportsReaderMarker>()
.context("internal error creating InputReportsReader proxy and server end")?;
input_device_proxy
.get_input_reports_reader(input_reports_reader_server_end)
.expect("sending second get_input_reports_reader request");
let input_device_fut = input_device.serve_reports();
assert_matches!(input_device_fut.await, Err(_));
Ok(())
}
}
mod utils {
use {
super::*,
fidl_fuchsia_input_report::{
InputDeviceMarker, InputDeviceProxy, InputReportsReaderMarker,
InputReportsReaderProxy,
},
fuchsia_zircon as zx,
};
/// Creates a `DeviceDescriptor` for a keyboard which has the keys enumerated
/// in `keys`.
pub(super) fn make_keyboard_descriptor(keys: Vec<Key>) -> DeviceDescriptor {
DeviceDescriptor {
keyboard: Some(KeyboardDescriptor {
input: Some(KeyboardInputDescriptor {
keys3: Some(keys),
..KeyboardInputDescriptor::EMPTY
}),
..KeyboardDescriptor::EMPTY
}),
..DeviceDescriptor::EMPTY
}
}
/// Creates an `InputDeviceProxy`, for sending `fuchsia.input.report.InputDevice`
/// requests, and an `InputDevice` struct that will receive the FIDL requests
/// from the `InputDeviceProxy`.
///
/// # Returns
/// A tuple of the proxy and struct. The struct is `Box`-ed so that the caller
/// can easily invoke `serve_reports()`.
pub(super) fn make_input_device_proxy_and_struct() -> (InputDeviceProxy, Box<InputDevice>) {
let (input_device_proxy, input_device_request_stream) =
endpoints::create_proxy_and_stream::<InputDeviceMarker>()
.expect("creating InputDevice proxy and stream");
let input_device =
Box::new(InputDevice::new(input_device_request_stream, DeviceDescriptor::EMPTY));
(input_device_proxy, input_device)
}
/// Creates an `InputReportsReaderProxy`, for sending
/// `fuchsia.input.report.InputReportsReader` reqests, and registers that
/// `InputReportsReader` with the `InputDevice` bound to `InputDeviceProxy`.
///
/// # Returns
/// The newly created `InputReportsReaderProxy`.
pub(super) fn make_input_reports_reader_proxy(
input_device_proxy: &InputDeviceProxy,
) -> InputReportsReaderProxy {
let (input_reports_reader_proxy, input_reports_reader_server_end) =
endpoints::create_proxy::<InputReportsReaderMarker>()
.expect("internal error creating InputReportsReader proxy and server end");
input_device_proxy
.get_input_reports_reader(input_reports_reader_server_end)
.expect("sending get_input_reports_reader request");
input_reports_reader_proxy
}
/// Serves `fuchsia.input.report.InputDevice` and `fuchsia.input.report.InputReportsReader`
/// protocols using `input_device`, and reads `InputReport`s with one call to
/// `input_device_proxy.read_input_reports()`. Then drops the connections to
/// `fuchsia.input.report.InputDevice` and `fuchsia.input.report.InputReportsReader`.
///
/// # Returns
/// The reports provided by the `InputDevice`.
pub(super) async fn get_input_reports(
input_device: Box<InputDevice>,
mut input_device_proxy: InputDeviceProxy,
) -> Vec<InputReport> {
let input_reports_reader_proxy =
make_input_reports_reader_proxy(&mut input_device_proxy);
let input_device_server_fut = input_device.serve_reports();
let input_reports_fut = input_reports_reader_proxy.read_input_reports();
std::mem::drop(input_reports_reader_proxy); // Close channel to `input_reports_reader_server_end`
std::mem::drop(input_device_proxy); // Terminate `input_device_request_stream`.
future::join(input_device_server_fut, input_reports_fut)
.await
.1
.expect("fidl error")
.map_err(zx::Status::from_raw)
.expect("service error")
}
}
}
| 44.335924 | 109 | 0.567383 |
33f3088bf7d059d6acb8d2baf8758c1034518b1c | 6,755 | //! Paths and Unix shells
//!
//! MacOS, Linux, FreeBSD, and many other OS model their design on Unix,
//! so handling them is relatively consistent. But only relatively.
//! POSIX postdates Unix by 20 years, and each "Unix-like" shell develops
//! unique quirks over time.
//!
//!
//! Windowing Managers, Desktop Environments, GUI Terminals, and PATHs
//!
//! Duplicating paths in PATH can cause performance issues when the OS searches
//! the same place multiple times. Traditionally, Unix configurations have
//! resolved this by setting up PATHs in the shell's login profile.
//!
//! This has its own issues. Login profiles are only intended to run once, but
//! changing the PATH is common enough that people may run it twice. Desktop
//! environments often choose to NOT start login shells in GUI terminals. Thus,
//! a trend has emerged to place PATH updates in other run-commands (rc) files,
//! leaving Rustup with few assumptions to build on for fulfilling its promise
//! to set up PATH appropriately.
//!
//! Rustup addresses this by:
//! 1) using a shell script that updates PATH if the path is not in PATH
//! 2) sourcing this script (`. /path/to/script`) in any appropriate rc file
use std::path::PathBuf;
use error_chain::bail;
use super::*;
use crate::process;
pub type Shell = Box<dyn UnixShell>;
#[derive(Debug, PartialEq)]
pub struct ShellScript {
content: &'static str,
name: &'static str,
}
impl ShellScript {
pub fn write(&self) -> Result<()> {
let home = utils::cargo_home()?;
let cargo_bin = format!("{}/bin", cargo_home_str()?);
let env_name = home.join(self.name);
let env_file = self.content.replace("{cargo_bin}", &cargo_bin);
utils::write_file(self.name, &env_name, &env_file)?;
Ok(())
}
}
// TODO: Update into a bytestring.
pub fn cargo_home_str() -> Result<Cow<'static, str>> {
let path = utils::cargo_home()?;
let default_cargo_home = utils::home_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join(".cargo");
Ok(if default_cargo_home == path {
"$HOME/.cargo".into()
} else {
match path.to_str() {
Some(p) => p.to_owned().into(),
None => bail!("Non-Unicode path!"),
}
})
}
// TODO: Tcsh (BSD)
// TODO?: Make a decision on Ion Shell, Power Shell, Nushell
// Cross-platform non-POSIX shells have not been assessed for integration yet
fn enumerate_shells() -> Vec<Shell> {
vec![Box::new(Posix), Box::new(Bash), Box::new(Zsh)]
}
pub fn get_available_shells() -> impl Iterator<Item = Shell> {
enumerate_shells().into_iter().filter(|sh| sh.does_exist())
}
pub trait UnixShell {
// Detects if a shell "exists". Users have multiple shells, so an "eager"
// heuristic should be used, assuming shells exist if any traces do.
fn does_exist(&self) -> bool;
// Gives all rcfiles of a given shell that Rustup is concerned with.
// Used primarily in checking rcfiles for cleanup.
fn rcfiles(&self) -> Vec<PathBuf>;
// Gives rcs that should be written to.
fn update_rcs(&self) -> Vec<PathBuf>;
// Writes the relevant env file.
fn env_script(&self) -> ShellScript {
ShellScript {
name: "env",
content: include_str!("env.sh"),
}
}
fn source_string(&self) -> Result<String> {
Ok(format!(r#". "{}/env""#, cargo_home_str()?))
}
}
struct Posix;
impl UnixShell for Posix {
fn does_exist(&self) -> bool {
true
}
fn rcfiles(&self) -> Vec<PathBuf> {
match utils::home_dir() {
Some(dir) => vec![dir.join(".profile")],
_ => vec![],
}
}
fn update_rcs(&self) -> Vec<PathBuf> {
// Write to .profile even if it doesn't exist. It's the only rc in the
// POSIX spec so it should always be set up.
self.rcfiles()
}
}
struct Bash;
impl UnixShell for Bash {
fn does_exist(&self) -> bool {
!self.update_rcs().is_empty()
}
fn rcfiles(&self) -> Vec<PathBuf> {
// Bash also may read .profile, however Rustup already includes handling
// .profile as part of POSIX and always does setup for POSIX shells.
[".bash_profile", ".bash_login", ".bashrc"]
.iter()
.filter_map(|rc| utils::home_dir().map(|dir| dir.join(rc)))
.collect()
}
fn update_rcs(&self) -> Vec<PathBuf> {
self.rcfiles()
.into_iter()
.filter(|rc| rc.is_file())
.collect()
}
}
struct Zsh;
impl Zsh {
fn zdotdir() -> Result<PathBuf> {
use std::ffi::OsStr;
use std::os::unix::ffi::OsStrExt;
if matches!(process().var("SHELL"), Ok(sh) if sh.contains("zsh")) {
match process().var("ZDOTDIR") {
Ok(dir) if !dir.is_empty() => Ok(PathBuf::from(dir)),
_ => bail!("Zsh setup failed."),
}
} else {
match std::process::Command::new("zsh")
.args(&["-c", "'echo $ZDOTDIR'"])
.output()
{
Ok(io) if !io.stdout.is_empty() => Ok(PathBuf::from(OsStr::from_bytes(&io.stdout))),
_ => bail!("Zsh setup failed."),
}
}
}
}
impl UnixShell for Zsh {
fn does_exist(&self) -> bool {
// zsh has to either be the shell or be callable for zsh setup.
matches!(process().var("SHELL"), Ok(sh) if sh.contains("zsh"))
|| matches!(utils::find_cmd(&["zsh"]), Some(_))
}
fn rcfiles(&self) -> Vec<PathBuf> {
[Zsh::zdotdir().ok(), utils::home_dir()]
.iter()
.filter_map(|dir| dir.as_ref().map(|p| p.join(".zshenv")))
.collect()
}
fn update_rcs(&self) -> Vec<PathBuf> {
// zsh can change $ZDOTDIR both _before_ AND _during_ reading .zshenv,
// so we: write to $ZDOTDIR/.zshenv if-exists ($ZDOTDIR changes before)
// OR write to $HOME/.zshenv if it exists (change-during)
// if neither exist, we create it ourselves, but using the same logic,
// because we must still respond to whether $ZDOTDIR is set or unset.
// In any case we only write once.
self.rcfiles()
.into_iter()
.filter(|env| env.is_file())
.chain(self.rcfiles().into_iter())
.take(1)
.collect()
}
}
pub fn legacy_paths() -> impl Iterator<Item = PathBuf> {
let zprofiles = Zsh::zdotdir()
.into_iter()
.chain(utils::home_dir())
.map(|d| d.join(".zprofile"));
let profiles = [".bash_profile", ".profile"]
.iter()
.filter_map(|rc| utils::home_dir().map(|d| d.join(rc)));
profiles.chain(zprofiles)
}
| 31.565421 | 100 | 0.588601 |
e4001601aab746ea85a4fa0caa36090cd4ba3628 | 348 | extern crate corrosion;
use corrosion::{PlayerAction, GameStatus};
use corrosion::utility::*;
#[test]
fn test_concede() {
let mut game = new_two_player_game();
let player1_id = game.player_turn_order[0];
game.do_player_action(player1_id, &PlayerAction::Concede).unwrap();
assert_eq!(game.current_status, GameStatus::Ended);
}
| 20.470588 | 71 | 0.724138 |
6a3ac3176c0c4cb17f30b381319d438624c15794 | 20,622 | #![feature(test)]
use swc_common::{chain, Mark};
use swc_ecma_parser::Syntax;
use swc_ecma_transforms::{
compat::{es2015, es2015::regenerator, es2016, es2017, es2017::async_to_generator},
modules::common_js::common_js,
resolver,
};
use swc_ecma_visit::Fold;
#[macro_use]
mod common;
fn syntax() -> Syntax {
Syntax::default()
}
fn tr(_: ()) -> impl Fold {
chain!(resolver(), regenerator(Mark::fresh(Mark::root())))
}
// computed_properties_example
test!(
syntax(),
|_| tr(Default::default()),
computed_properties_example,
r#"
var o = {
*foo() {
return "foo";
}
};
"#,
r#"
var regeneratorRuntime = require('regenerator-runtime');
var o = {
foo() {
return regeneratorRuntime.mark(function _callee() {
return regeneratorRuntime.wrap(function _callee$(_ctx) {
while (1) switch (_ctx.prev = _ctx.next) {
case 0:
return _ctx.abrupt("return", "foo");
case 1:
case "end":
return _ctx.stop();
}
}, _callee);
})()
;
}
};
"#
);
// class_argument_scope_example
test_exec!(
syntax(),
|_| tr(Default::default()),
class_argument_scope_example_exec,
r#"
class Test {
*iter(arg = this) {
yield arg;
}
}
let test = new Test;
expect(test.iter().next().value).toBe(test);
"#
);
//// regression_T7041
//test!(
// syntax(),
// |_| tr(Default::default()),
// regression_t7041,
// r#"
//var _regeneratorRuntime = require("regenerator-runtime");
//
//Object.keys({});
//
//function * fn(){}
//
//"#,
// r#"
//var _regeneratorRuntime = require("regenerator-runtime");
//
//var _marked = _regeneratorRuntime.mark(fn);
//
//Object.keys({});
//
//function fn() {
// return _regeneratorRuntime.wrap(function fn$(_ctx) {
// while (1) {
// switch (_ctx.prev = _ctx.next) {
// case 0:
// case "end":
// return _ctx.stop();
// }
// }
// }, _marked);
//}
//
//"#
//);
// regression_6733
test!(
syntax(),
|_| chain!(
tr(Default::default()),
common_js(Mark::fresh(Mark::root()), Default::default())
),
regression_6733,
r#"
export default function * () {
var x = yield 5;
return 5;
}
"#,
r#"
'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
exports.default = void 0;
var regeneratorRuntime = require('regenerator-runtime');
var _default = function _callee() {
var x;
return regeneratorRuntime.wrap(function _callee$(_ctx) {
while(1)switch(_ctx.prev = _ctx.next){
case 0:
_ctx.next = 2;
return 5;
case 2:
x = _ctx.sent;
void 0;
return _ctx.abrupt('return', 5);
case 5:
case 'end': return _ctx.stop();
}
}, _callee);
};
exports.default = _default;
"#
);
test!(
syntax(),
|_| tr(Default::default()),
empty_fn_decl_1,
"function* foo(a,b,c){}
",
r#"
var regeneratorRuntime = require('regenerator-runtime');
var _marked = regeneratorRuntime.mark(foo);
function foo(a, b, c) {
return regeneratorRuntime.wrap(function foo$(_ctx) {
while (1)
switch (_ctx.prev = _ctx.next) {
case 0:
case "end":
return _ctx.stop();
}
}, _marked);
}
"#
);
test_exec!(
syntax(),
|_| tr(Default::default()),
conditional_return_1,
"
let v = (function* (){
yield 3;
if (true)
return 1
})();
expect(v.next()).toEqual({ value: 3, done: false });
expect(v.next()).toEqual({ value: 1, done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
conditional_return_2,
"
let v = (function* (){
if (false)
return a
yield 1
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
conditional_return_exec_1,
"
let v = (function* (){
yield 3;
if (true)
return 2;
yield 1
})();
expect(v.next()).toEqual({ done: false, value: 3 });
expect(v.next()).toEqual({ done: true, value: 2 });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
conditional_return_exec_2,
"
let v = (function* (){
yield 3;
if (false)
return 2;
yield 1
})();
expect(v.next()).toEqual({ done: false, value: 3 });
expect(v.next()).toEqual({ done: false, value: 1 });
expect(v.next()).toEqual({ done: true, value: undefined });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
conditional_yield_1,
"
let v = (function* () {
if (true)
yield 1
})();
expect(v.next()).toEqual({ done: false, value: 1 });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
conditional_yield_2,
"
let v = (function* () {
if (true)
yield 1
if (false)
yield 2
yield 3
})();
expect(v.next()).toEqual({ done: false, value: 1 });
expect(v.next()).toEqual({ done: false, value: 3 });
expect(v.next()).toEqual({ done: true, value: undefined });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
yield_in_seq,
"
let v = (function* () {
return (1, yield 2, yield 3, 4, yield 5);
})();
expect(v.next()).toEqual({ done: false, value: 2 });
expect(v.next()).toEqual({ done: false, value: 3 });
expect(v.next()).toEqual({ done: false, value: 5 });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
yield_in_cond_seq,
"
let v = (function* () {
if (true)
return (1, yield 2, yield 3, 4, yield 5);
})();
expect(v.next()).toEqual({ done: false, value: 2 });
expect(v.next()).toEqual({ done: false, value: 3 });
expect(v.next()).toEqual({ done: false, value: 5 });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
yield_in_return_and_call,
"
function id(v) { return v; }
let v = (function* () {
if (true)
return (1, id(yield id(2)));
})();
expect(v.next()).toEqual({ done: false, value: 2 });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
yield_in_call,
"
function id(v) { return v; }
let v = (function* () {
return (1, id(yield id(2)));
return (3, id(yield id(4)));
})();
expect(v.next()).toEqual({ done: false, value: 2 });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
ignore,
syntax(),
|_| tr(Default::default()),
yield_temp,
"
function id(v) { return v; }
let v = (function* () {
yield (1, id(yield id(2), 2));
return (3, id(yield id(4)));
})();
expect(v.next()).toEqual({ done: false, value: 2 });
expect(v.next()).toEqual({ done: false, value: 2 });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
ignore,
syntax(),
|_| tr(Default::default()),
yield_next_value,
"
let v = (function* () {
let bar = yield 'foo';
yield bar
})();
expect(v.next('bar')).toEqual({value: 'foo', done: false})
expect(v.next()).toEqual({value: 'bar', done: false})
expect(v.next()).toEqual({done: true})
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
only_yield,
"
let v = (function* () {
yield 1
})();
expect(v.next()).toEqual({ done: false, value: 1 });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
expr_cond,
"
let v = (function* (){
true ? yield 1 : yield 2;
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
expr_array,
"
let v = (function* (){
yield [yield 1, 2];
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: [undefined, 2], done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
expr_object,
"
let v = (function* (){
yield { a: 1 };
})();
expect(v.next()).toEqual({ value: { a: 1 }, done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
expr_logical_and,
"
let v = (function* (){
(yield 1) && (yield 2);
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
expr_logical_or,
"
let v = (function* (){
(yield 1) || (yield 2);
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
expr_update_prefix,
"
let v = (function* (){
let i = 0;
yield ++i;
yield i;
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
expr_update_postfix,
"
let v = (function* (){
let i = 0;
yield i++;
yield i;
})();
expect(v.next()).toEqual({ value: 0, done: false });
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
bin_expr_1,
"
let v = (function* (){
yield ((yield 1) + (yield 2));
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
expect(v.next()).toEqual({ value: NaN, done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
try_stmt_1,
"
let v = (function* (){
try {
yield 1;
} catch(e){
}
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
try_stmt_2,
"
let v = (function* (){
try {
yield 1;
throw new Error('');
} catch(e){
yield 2;
}
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
try_stmt_3,
"
let v = (function* (){
try {
yield 1;
throw new Error('');
} finally {
yield 2;
}
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
expect(() => v.next()).toThrow();
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
try_stmt_4,
"
let v = (function* (){
try {
yield 1;
throw new Error('');
} catch (e) {
yield 2;
} finally {
yield 3;
}
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
expect(v.next()).toEqual({ value: 3, done: false });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
try_stmt_5,
"
let v = (function* (){
try {
yield 1;
} catch (e) {
}
try {
yield 2;
} catch (e) {
}
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
"
);
// TODO
test_exec!(
syntax(),
|_| tr(Default::default()),
labeled_stmt_1,
"
let v = (function* (){
})();
expect(v.next()).toEqual({ done: true });
"
);
// TODO
test_exec!(
syntax(),
|_| tr(Default::default()),
break_stmt_1,
"
let v = (function* (){
})();
expect(v.next()).toEqual({ done: true });
"
);
// TODO
test_exec!(
syntax(),
|_| tr(Default::default()),
continue_stmt_1,
"
let v = (function* (){
})();
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
switch_stmt_1,
"
let v = (function* (){
switch(1) {
case 1:
yield 1
yield 2
}
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
switch_stmt_2,
"
let v = (function* (){
switch(2) {
case 1:
yield 1
yield 2
}
})();
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
switch_stmt_3,
"
let v = (function* (){
switch(2) {
default:
yield 1
yield 2
}
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
switch_stmt_4,
"
let v = (function* (){
switch(1) {
case 1:
yield 1
case 2:
yield 2
}
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
switch_stmt_5,
"
let v = (function* (){
switch(1) {
case 1:
yield 1;
break;
case 2:
yield 2;
break;
case 3:
yield 3;
break;
case 4:
yield 4;
break;
}
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ done: true });
"
);
// TODO
test_exec!(
syntax(),
|_| tr(Default::default()),
throw_stmt_1,
"
let v = (function* (){
})();
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
while_stmt_1,
"
let v = (function* (){
let i = 0;
while (true) {
yield i++;
}
})();
expect(v.next()).toEqual({ value: 0, done: false });
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
expect(v.next()).toEqual({ value: 3, done: false });
expect(v.next()).toEqual({ value: 4, done: false });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
do_while_stmt_1,
"
let v = (function* (){
let i = 0;
do {
yield i++;
} while(true);
})();
expect(v.next()).toEqual({ value: 0, done: false });
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
expect(v.next()).toEqual({ value: 3, done: false });
expect(v.next()).toEqual({ value: 4, done: false });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
do_while_stmt_2,
"
let v = (function* (){
do {
yield 1;
} while(false);
})();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ done: true });
"
);
// TODO
test_exec!(
syntax(),
|_| tr(Default::default()),
for_stmt_1,
"
let v = (function* (){
})();
expect(v.next()).toEqual({ done: true });
"
);
// TODO
test_exec!(
syntax(),
|_| tr(Default::default()),
for_of_stmt_1,
"
let v = (function* (){
})();
expect(v.next()).toEqual({ done: true });
"
);
// TODO
test_exec!(
syntax(),
|_| tr(Default::default()),
for_in_stmt_1,
"
let v = (function* (){
})();
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
bin_expr_2,
"
let v = (function* (){
let a = 1;
let b = 2;
yield a + b;
yield (yield a) + (yield b)
})();
expect(v.next()).toEqual({ value: 3, done: false });
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
expect(v.next()).toEqual({ value: NaN, done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
arguments_1,
"
function* gen(){
yield Array.prototype.slice.call(arguments);
}
var v = gen(1, 2);
expect(v.next()).toEqual({ value: [1, 2], done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| tr(Default::default()),
arguments_2,
"
function* gen(){
yield arguments[0];
yield arguments[1];
}
var v = gen(1, 2);
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
expect(v.next()).toEqual({ done: true });
var v = gen(3, 4);
expect(v.next()).toEqual({ value: 3, done: false });
expect(v.next()).toEqual({ value: 4, done: false });
expect(v.next()).toEqual({ done: true });
"
);
test_exec!(
syntax(),
|_| chain!(
es2017(),
es2016(),
es2015(Mark::fresh(Mark::root()), Default::default()),
),
issue_600_full,
"async function foo(b) {
for (let a of b) {
await a
}
}"
);
test_exec!(
syntax(),
|_| chain!(
async_to_generator(),
es2015::for_of(Default::default()),
es2015::regenerator(Mark::fresh(Mark::root())),
),
issue_600_exact_passes,
"async function foo(b) {
for (let a of b) {
await a
}
}"
);
test_exec!(
syntax(),
|_| es2015::regenerator(Mark::fresh(Mark::root())),
issue_600_min,
"function* foo() {
try {
yield 1;
throw new Error('1')
} finally{
try {
yield 2;
} finally{
throw new Error('2');
}
}
}
var v = foo();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
expect(() => v.next()).toThrow('2')
"
);
test_exec!(
syntax(),
|_| es2015::regenerator(Mark::fresh(Mark::root())),
issue_831_1,
"function* myGenerator() {
yield* [1,2,3];
}
const v = myGenerator();
expect(v.next()).toEqual({ value: 1, done: false });
expect(v.next()).toEqual({ value: 2, done: false });
expect(v.next()).toEqual({ value: 3, done: false });
expect(v.next()).toEqual({ done: true });
"
);
// test interop between cjs module and regenerator
test!(
syntax(),
|_| {
let mark = Mark::fresh(Mark::root());
chain!(
es2015::regenerator(mark),
common_js(mark, Default::default()),
)
},
issue_831_2,
"export function* myGenerator() {
yield* [1,2,3];
}",
"'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
exports.myGenerator = myGenerator;
var regeneratorRuntime = require('regenerator-runtime');
var _marked = regeneratorRuntime.mark(myGenerator);
function myGenerator() {
return regeneratorRuntime.wrap(function myGenerator$(_ctx) {
while(1)switch(_ctx.prev = _ctx.next){
case 0:
return _ctx.delegateYield([
1,
2,
3
], _ctx.t0, 1);
case 1:
_ctx.t0;
case 2:
case 'end':
return _ctx.stop();
}
}, _marked);
}"
);
// test interop between export and regenerator
test!(
syntax(),
|_| {
let mark = Mark::fresh(Mark::root());
es2015::regenerator(mark)
},
issue_831_3,
"export function* myGenerator() {
yield* [1,2,3];
}",
"var regeneratorRuntime = require('regenerator-runtime');
var _marked = regeneratorRuntime.mark(myGenerator);
export function myGenerator() {
return regeneratorRuntime.wrap(function myGenerator$(_ctx) {
while(1)switch(_ctx.prev = _ctx.next){
case 0:
return _ctx.delegateYield([
1,
2,
3
], _ctx.t0, 1);
case 1:
_ctx.t0;
case 2:
case 'end':
return _ctx.stop();
}
}, _marked);
}
"
);
test_exec!(
syntax(),
|_| es2015::regenerator(Mark::fresh(Mark::root())),
issue_849_1,
"function* gen() { yield 1 };
function genFactory() { return function*() { yield 1 }; }
const v = genFactory()();
expect(v.next()).toEqual({ value: 1, done: false })
expect(v.next()).toEqual({ done: true })"
);
test_exec!(
syntax(),
|_| es2015::regenerator(Mark::fresh(Mark::root())),
issue_853_1,
"function throwingFn() { throw 'Error' }
function* gen() {
try { yield throwingFn() } catch (e) { yield e }
};
const v = gen();
expect(v.next()).toEqual({ done: false, value: 'Error'});
"
);
| 18.105356 | 86 | 0.531568 |
75a7e65244f9b62cbf3c7dedb47ef15ad70360d6 | 4,139 | // This file is auto generated by [`cg`] from [`schema`].
//
// **DO NOT EDIT THIS FILE**,
//
// Edit `cg` or `schema` instead.
//
// [cg]: https://github.com/teloxide/cg
// [`schema`]: https://github.com/WaffleLapkin/tg-methods-schema
use serde::Serialize;
use crate::types::{ChatId, InputFile, Message, MessageEntity, ParseMode, ReplyMarkup};
impl_payload! {
@[multipart = video, thumb]
/// Use this method to send video files, Telegram clients support mp4 videos (other formats may be sent as [`Document`]). On success, the sent [`Message`] is returned. Bots can currently send video files of up to 50 MB in size, this limit may be changed in the future.
///
/// [`Document`]: crate::types::Document
/// [`Message`]: crate::types::Message
#[derive(Debug, Clone, Serialize)]
pub SendVideo (SendVideoSetters) => Message {
required {
/// Unique identifier for the target chat or username of the target channel (in the format `@channelusername`)
pub chat_id: ChatId [into],
/// Video to send. Pass a file_id as String to send a video that exists on the Telegram servers (recommended), pass an HTTP URL as a String for Telegram to get a video from the Internet, or upload a new video using multipart/form-data. [More info on Sending Files »]
///
/// [More info on Sending Files »]: crate::types::InputFile
pub video: InputFile,
}
optional {
/// Duration of the video in seconds
pub duration: u32,
/// Video width
pub width: u32,
/// Video height
pub height: u32,
/// Thumbnail of the file sent; can be ignored if thumbnail generation for the file is supported server-side. The thumbnail should be in JPEG format and less than 200 kB in size. A thumbnail's width and height should not exceed 320. Ignored if the file is not uploaded using multipart/form-data. Thumbnails can't be reused and can be only uploaded as a new file, so you can pass “attach://<file_attach_name>” if the thumbnail was uploaded using multipart/form-data under <file_attach_name>. [More info on Sending Files »]
///
/// [More info on Sending Files »]: crate::types::InputFile
pub thumb: InputFile,
/// Video caption (may also be used when resending videos by _file\_id_), 0-1024 characters after entities parsing
pub caption: String [into],
/// Mode for parsing entities in the video caption. See [formatting options] for more details.
///
/// [formatting options]: https://core.telegram.org/bots/api#formatting-options
pub parse_mode: ParseMode,
/// List of special entities that appear in the caption, which can be specified instead of _parse\_mode_
pub caption_entities: Vec<MessageEntity> [collect],
/// Pass _True_, if the uploaded video is suitable for streaming
pub supports_streaming: bool,
/// Sends the message [silently]. Users will receive a notification with no sound.
///
/// [silently]: https://telegram.org/blog/channels-2-0#silent-messages
pub disable_notification: bool,
/// Protects the contents of sent messages from forwarding and saving
pub protect_content: bool,
/// If the message is a reply, ID of the original message
pub reply_to_message_id: i32,
/// Pass _True_, if the message should be sent even if the specified replied-to message is not found
pub allow_sending_without_reply: bool,
/// Additional interface options. A JSON-serialized object for an [inline keyboard], [custom reply keyboard], instructions to remove reply keyboard or to force a reply from the user.
///
/// [inline keyboard]: https://core.telegram.org/bots#inline-keyboards-and-on-the-fly-updating
/// [custom reply keyboard]: https://core.telegram.org/bots#keyboards
pub reply_markup: ReplyMarkup [into],
}
}
}
| 60.867647 | 533 | 0.646533 |
f8cdfa97e649816671d494f51a982e2b16dc587e | 186 | use crate::ports::DbError;
use async_trait::async_trait;
use uuid::Uuid;
#[async_trait]
pub trait DeleteStudentPort {
async fn delete(&mut self, id: Uuid) -> Result<(), DbError>;
}
| 20.666667 | 64 | 0.704301 |
876a820cf204a826b9f65ecfe466dbd90134c5bf | 10,536 | // DO NOT EDIT !
// This file was generated automatically from 'src/mako/api/lib.rs.mako'
// DO NOT EDIT !
//! This documentation was generated from *Eventarc* crate version *2.0.8+20210325*, where *20210325* is the exact revision of the *eventarc:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v2.0.8*.
//!
//! Everything else about the *Eventarc* *v1* API can be found at the
//! [official documentation site](https://cloud.google.com/eventarc).
//! The original source code is [on github](https://github.com/Byron/google-apis-rs/tree/main/gen/eventarc1).
//! # Features
//!
//! Handle the following *Resources* with ease from the central [hub](Eventarc) ...
//!
//! * projects
//! * [*locations get*](api::ProjectLocationGetCall), [*locations list*](api::ProjectLocationListCall), [*locations operations cancel*](api::ProjectLocationOperationCancelCall), [*locations operations delete*](api::ProjectLocationOperationDeleteCall), [*locations operations get*](api::ProjectLocationOperationGetCall), [*locations operations list*](api::ProjectLocationOperationListCall), [*locations triggers create*](api::ProjectLocationTriggerCreateCall), [*locations triggers delete*](api::ProjectLocationTriggerDeleteCall), [*locations triggers get*](api::ProjectLocationTriggerGetCall), [*locations triggers get iam policy*](api::ProjectLocationTriggerGetIamPolicyCall), [*locations triggers list*](api::ProjectLocationTriggerListCall), [*locations triggers patch*](api::ProjectLocationTriggerPatchCall), [*locations triggers set iam policy*](api::ProjectLocationTriggerSetIamPolicyCall) and [*locations triggers test iam permissions*](api::ProjectLocationTriggerTestIamPermissionCall)
//!
//!
//!
//!
//! Not what you are looking for ? Find all other Google APIs in their Rust [documentation index](http://byron.github.io/google-apis-rs).
//!
//! # Structure of this Library
//!
//! The API is structured into the following primary items:
//!
//! * **[Hub](Eventarc)**
//! * a central object to maintain state and allow accessing all *Activities*
//! * creates [*Method Builders*](client::MethodsBuilder) which in turn
//! allow access to individual [*Call Builders*](client::CallBuilder)
//! * **[Resources](client::Resource)**
//! * primary types that you can apply *Activities* to
//! * a collection of properties and *Parts*
//! * **[Parts](client::Part)**
//! * a collection of properties
//! * never directly used in *Activities*
//! * **[Activities](client::CallBuilder)**
//! * operations to apply to *Resources*
//!
//! All *structures* are marked with applicable traits to further categorize them and ease browsing.
//!
//! Generally speaking, you can invoke *Activities* like this:
//!
//! ```Rust,ignore
//! let r = hub.resource().activity(...).doit().await
//! ```
//!
//! Or specifically ...
//!
//! ```ignore
//! let r = hub.projects().locations_operations_get(...).doit().await
//! let r = hub.projects().locations_triggers_create(...).doit().await
//! let r = hub.projects().locations_triggers_delete(...).doit().await
//! let r = hub.projects().locations_triggers_patch(...).doit().await
//! ```
//!
//! The `resource()` and `activity(...)` calls create [builders][builder-pattern]. The second one dealing with `Activities`
//! supports various methods to configure the impending operation (not shown here). It is made such that all required arguments have to be
//! specified right away (i.e. `(...)`), whereas all optional ones can be [build up][builder-pattern] as desired.
//! The `doit()` method performs the actual communication with the server and returns the respective result.
//!
//! # Usage
//!
//! ## Setting up your Project
//!
//! To use this library, you would put the following lines into your `Cargo.toml` file:
//!
//! ```toml
//! [dependencies]
//! google-eventarc1 = "*"
//! hyper = "^0.14"
//! hyper-rustls = "^0.22"
//! serde = "^1.0"
//! serde_json = "^1.0"
//! yup-oauth2 = "^5.0"
//! ```
//!
//! ## A complete example
//!
//! ```test_harness,no_run
//! extern crate hyper;
//! extern crate hyper_rustls;
//! extern crate yup_oauth2 as oauth2;
//! extern crate google_eventarc1 as eventarc1;
//! use eventarc1::api::Trigger;
//! use eventarc1::{Result, Error};
//! # async fn dox() {
//! use std::default::Default;
//! use oauth2;
//! use eventarc1::Eventarc;
//!
//! // Get an ApplicationSecret instance by some means. It contains the `client_id` and
//! // `client_secret`, among other things.
//! let secret: oauth2::ApplicationSecret = Default::default();
//! // Instantiate the authenticator. It will choose a suitable authentication flow for you,
//! // unless you replace `None` with the desired Flow.
//! // Provide your own `AuthenticatorDelegate` to adjust the way it operates and get feedback about
//! // what's going on. You probably want to bring in your own `TokenStorage` to persist tokens and
//! // retrieve them from storage.
//! let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
//! secret,
//! yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
//! ).build().await.unwrap();
//! let mut hub = Eventarc::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
//! // As the method needs a request, you would usually fill it with the desired information
//! // into the respective structure. Some of the parts shown here might not be applicable !
//! // Values shown here are possibly random and not representative !
//! let mut req = Trigger::default();
//!
//! // You can configure optional parameters by calling the respective setters at will, and
//! // execute the final call using `doit()`.
//! // Values shown here are possibly random and not representative !
//! let result = hub.projects().locations_triggers_patch(req, "name")
//! .validate_only(true)
//! .update_mask("amet.")
//! .allow_missing(true)
//! .doit().await;
//!
//! match result {
//! Err(e) => match e {
//! // The Error enum provides details about what exactly happened.
//! // You can also just use its `Debug`, `Display` or `Error` traits
//! Error::HttpError(_)
//! |Error::Io(_)
//! |Error::MissingAPIKey
//! |Error::MissingToken(_)
//! |Error::Cancelled
//! |Error::UploadSizeLimitExceeded(_, _)
//! |Error::Failure(_)
//! |Error::BadRequest(_)
//! |Error::FieldClash(_)
//! |Error::JsonDecodeError(_, _) => println!("{}", e),
//! },
//! Ok(res) => println!("Success: {:?}", res),
//! }
//! # }
//! ```
//! ## Handling Errors
//!
//! All errors produced by the system are provided either as [Result](client::Result) enumeration as return value of
//! the doit() methods, or handed as possibly intermediate results to either the
//! [Hub Delegate](client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html).
//!
//! When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This
//! makes the system potentially resilient to all kinds of errors.
//!
//! ## Uploads and Downloads
//! If a method supports downloads, the response body, which is part of the [Result](client::Result), should be
//! read by you to obtain the media.
//! If such a method also supports a [Response Result](client::ResponseResult), it will return that by default.
//! You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making
//! this call: `.param("alt", "media")`.
//!
//! Methods supporting uploads can do so using up to 2 different protocols:
//! *simple* and *resumable*. The distinctiveness of each is represented by customized
//! `doit(...)` methods, which are then named `upload(...)` and `upload_resumable(...)` respectively.
//!
//! ## Customization and Callbacks
//!
//! You may alter the way an `doit()` method is called by providing a [delegate](client::Delegate) to the
//! [Method Builder](client::CallBuilder) before making the final `doit()` call.
//! Respective methods will be called to provide progress information, as well as determine whether the system should
//! retry on failure.
//!
//! The [delegate trait](client::Delegate) is default-implemented, allowing you to customize it with minimal effort.
//!
//! ## Optional Parts in Server-Requests
//!
//! All structures provided by this library are made to be [encodable](client::RequestValue) and
//! [decodable](client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses
//! are valid.
//! Most optionals are are considered [Parts](client::Part) which are identifiable by name, which will be sent to
//! the server to indicate either the set parts of the request or the desired parts in the response.
//!
//! ## Builder Arguments
//!
//! Using [method builders](client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods.
//! These will always take a single argument, for which the following statements are true.
//!
//! * [PODs][wiki-pod] are handed by copy
//! * strings are passed as `&str`
//! * [request values](client::RequestValue) are moved
//!
//! Arguments will always be copied or cloned into the builder, to make them independent of their original life times.
//!
//! [wiki-pod]: http://en.wikipedia.org/wiki/Plain_old_data_structure
//! [builder-pattern]: http://en.wikipedia.org/wiki/Builder_pattern
//! [google-go-api]: https://github.com/google/google-api-go-client
//!
//!
// Unused attributes happen thanks to defined, but unused structures
// We don't warn about this, as depending on the API, some data structures or facilities are never used.
// Instead of pre-determining this, we just disable the lint. It's manually tuned to not have any
// unused imports in fully featured APIs. Same with unused_mut ... .
#![allow(unused_imports, unused_mut, dead_code)]
// DO NOT EDIT !
// This file was generated automatically from 'src/mako/api/lib.rs.mako'
// DO NOT EDIT !
#[macro_use]
extern crate serde_derive;
extern crate hyper;
extern crate serde;
extern crate serde_json;
extern crate yup_oauth2 as oauth2;
extern crate mime;
extern crate url;
pub mod api;
pub mod client;
// Re-export the hub type and some basic client structs
pub use api::Eventarc;
pub use client::{Result, Error, Delegate};
| 48.552995 | 993 | 0.691154 |
d6feccb8a2f978dace29ec13bc55118e7e94cb91 | 3,787 | use source::Spatial;
use std::f32;
use std::fmt::Debug;
use std::sync::{Arc, Mutex};
use std::time::Duration;
use Device;
use Sample;
use Sink;
use Source;
pub struct SpatialSink {
sink: Sink,
positions: Arc<Mutex<SoundPositions>>,
}
struct SoundPositions {
emitter_position: [f32; 3],
left_ear: [f32; 3],
right_ear: [f32; 3],
}
impl SpatialSink {
/// Builds a new `SpatialSink`.
#[inline]
pub fn new(
device: &Device, emitter_position: [f32; 3], left_ear: [f32; 3], right_ear: [f32; 3],
) -> SpatialSink {
SpatialSink {
sink: Sink::new(device),
positions: Arc::new(Mutex::new(SoundPositions {
emitter_position,
left_ear,
right_ear,
})),
}
}
/// Sets the position of the sound emitter in 3 dimensional space.
pub fn set_emitter_position(&mut self, pos: [f32; 3]) {
self.positions.lock().unwrap().emitter_position = pos;
}
/// Sets the position of the left ear in 3 dimensional space.
pub fn set_left_ear_position(&mut self, pos: [f32; 3]) {
self.positions.lock().unwrap().left_ear = pos;
}
/// Sets the position of the right ear in 3 dimensional space.
pub fn set_right_ear_position(&mut self, pos: [f32; 3]) {
self.positions.lock().unwrap().right_ear = pos;
}
/// Appends a sound to the queue of sounds to play.
#[inline]
pub fn append<S>(&self, source: S)
where
S: Source + Send + 'static,
S::Item: Sample + Send + Debug,
{
let positions = self.positions.clone();
let pos_lock = self.positions.lock().unwrap();
let source = Spatial::new(
source,
pos_lock.emitter_position,
pos_lock.left_ear,
pos_lock.right_ear,
).periodic_access(Duration::from_millis(10), move |i| {
let pos = positions.lock().unwrap();
i.set_positions(pos.emitter_position, pos.left_ear, pos.right_ear);
});
self.sink.append(source);
}
// Gets the volume of the sound.
///
/// The value `1.0` is the "normal" volume (unfiltered input). Any value other than 1.0 will
/// multiply each sample by this value.
#[inline]
pub fn volume(&self) -> f32 {
self.sink.volume()
}
/// Changes the volume of the sound.
///
/// The value `1.0` is the "normal" volume (unfiltered input). Any value other than 1.0 will
/// multiply each sample by this value.
#[inline]
pub fn set_volume(&mut self, value: f32) {
self.sink.set_volume(value);
}
/// Resumes playback of a paused sound.
///
/// No effect if not paused.
#[inline]
pub fn play(&self) {
self.sink.play();
}
/// Pauses playback of this sink.
///
/// No effect if already paused.
///
/// A paused sound can be resumed with `play()`.
pub fn pause(&self) {
self.sink.pause();
}
/// Gets if a sound is paused
///
/// Sounds can be paused and resumed using pause() and play(). This gets if a sound is paused.
pub fn is_paused(&self) -> bool {
self.sink.is_paused()
}
/// Stops the sink by emptying the queue.
#[inline]
pub fn stop(&self) {
self.sink.stop()
}
/// Destroys the sink without stopping the sounds that are still playing.
#[inline]
pub fn detach(self) {
self.sink.detach();
}
/// Sleeps the current thread until the sound ends.
#[inline]
pub fn sleep_until_end(&self) {
self.sink.sleep_until_end();
}
/// Returns true if this sink has no more sounds to play.
#[inline]
pub fn empty(&self) -> bool {
self.sink.empty()
}
}
| 27.05 | 98 | 0.578558 |
019e80df148a412721694e31b4663b47a3af08ec | 2,870 | use crate::openapi::{EitherT, Encoding, Example, Reference, Schema};
use from_as::*;
use std::{
collections::HashMap,
convert::TryFrom,
io::{Read, Write},
};
/// [Media Type Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.1.0.md#media-type-object)
#[derive(Default, Debug, Clone, Serialize, Deserialize, FromFile, AsFile)]
pub struct MediaType {
// The schema defining the content of the request, response, or parameter.
#[serde(skip_serializing_if = "Option::is_none")]
pub schema: Option<EitherT<Schema, Reference>>,
/// Example of the media type. The example object SHOULD be in the correct
/// format as specified by the media type. The example field is mutually
/// exclusive of the examples field. Furthermore, if referencing a
/// schema which contains an example, the example value SHALL override
/// the example provided by the schema.
#[serde(skip_serializing_if = "Option::is_none")]
pub example: Option<serde_json::Value>,
/// Examples of the media type. Each example object SHOULD match the media
/// type and specified schema if present. The examples field is mutually
/// exclusive of the example field. Furthermore, if referencing a schema
/// which contains an example, the examples value SHALL override the
/// example provided by the schema.
#[serde(default)]
#[serde(skip_serializing_if = "HashMap::is_empty")]
pub examples: HashMap<String, EitherT<Example, Reference>>,
/// A map between a property name and its encoding information. The key,
/// being the property name, MUST exist in the schema as a property. The
/// encoding object SHALL only apply to requestBody objects when the
/// media type is multipart or application/x-www-form-urlencoded.
#[serde(default)]
#[serde(skip_serializing_if = "HashMap::is_empty")]
pub encoding: HashMap<String, Encoding>,
}
impl MediaType {
pub fn is_upload_session(&self) -> bool {
if let Some(either_t) = self.schema.as_ref() {
if let Some(schema) = either_t.clone().into_left() {
return schema.is_upload_session();
}
}
false
}
pub fn is_download(&self) -> bool {
if let Some(either_t) = self.schema.as_ref() {
if let Some(schema) = either_t.clone().into_left() {
return schema.is_download();
}
}
false
}
pub fn is_ref_type_download(&self) -> bool {
if let Some(either_t) = self.schema.as_ref() {
if let Some(schema) = either_t.clone().into_left() {
return schema.is_ref_type_download();
}
if let Some(reference) = either_t.clone().into_right() {
return reference.is_ref_type_download();
}
}
false
}
}
| 37.272727 | 115 | 0.647038 |
bb24e6ae9803836ef483c99229a0504089dd5916 | 2,157 | #[doc = "Register `EVENTS_BCMATCH` reader"]
pub struct R(crate::R<EVENTS_BCMATCH_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<EVENTS_BCMATCH_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<EVENTS_BCMATCH_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<EVENTS_BCMATCH_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `EVENTS_BCMATCH` writer"]
pub struct W(crate::W<EVENTS_BCMATCH_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<EVENTS_BCMATCH_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<EVENTS_BCMATCH_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<EVENTS_BCMATCH_SPEC>) -> Self {
W(writer)
}
}
impl W {
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Bit counter reached bit count value.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [events_bcmatch](index.html) module"]
pub struct EVENTS_BCMATCH_SPEC;
impl crate::RegisterSpec for EVENTS_BCMATCH_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [events_bcmatch::R](R) reader structure"]
impl crate::Readable for EVENTS_BCMATCH_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [events_bcmatch::W](W) writer structure"]
impl crate::Writable for EVENTS_BCMATCH_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets EVENTS_BCMATCH to value 0"]
impl crate::Resettable for EVENTS_BCMATCH_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 33.184615 | 431 | 0.64395 |
e8b73f4c0093b8436583a0aeb7c4911f95c65c49 | 2,321 | use std::{
pin::Pin,
task::{Context, Poll},
};
use bytes::Bytes;
use futures_util::Stream;
use hyper::body::HttpBody;
use tokio::io::AsyncRead;
use crate::{Error, Result};
/// A body object for requests and responses.
#[derive(Default)]
pub struct Body(pub(crate) hyper::Body);
impl From<&'static [u8]> for Body {
#[inline]
fn from(data: &'static [u8]) -> Self {
Self(data.into())
}
}
impl From<&'static str> for Body {
#[inline]
fn from(data: &'static str) -> Self {
Self(data.into())
}
}
impl From<Bytes> for Body {
#[inline]
fn from(data: Bytes) -> Self {
Self(data.into())
}
}
impl From<Vec<u8>> for Body {
#[inline]
fn from(data: Vec<u8>) -> Self {
Self(data.into())
}
}
impl From<String> for Body {
#[inline]
fn from(data: String) -> Self {
Self(data.into())
}
}
impl Body {
/// Create a body objecj from [`Bytes`].
#[inline]
pub fn from_bytes(data: Bytes) -> Self {
data.into()
}
/// Create a body objecj from [`String`].
#[inline]
pub fn from_string(data: String) -> Self {
data.into()
}
/// Create a body object from reader.
#[inline]
pub fn from_async_read(reader: impl AsyncRead + Send + 'static) -> Self {
Self(hyper::Body::wrap_stream(tokio_util::io::ReaderStream::new(
reader,
)))
}
/// Create an empty body.
#[inline]
pub fn empty() -> Self {
Self(hyper::Body::empty())
}
/// Consumes this body object to return a [`Bytes`] that contains all data.
pub async fn into_bytes(self) -> Result<Bytes> {
hyper::body::to_bytes(self.0)
.await
.map_err(Error::bad_request)
}
/// Consumes this body object to return a reader.
pub fn into_async_read(self) -> impl AsyncRead + Send + 'static {
tokio_util::io::StreamReader::new(BodyStream(self.0))
}
}
struct BodyStream(hyper::Body);
impl Stream for BodyStream {
type Item = Result<Bytes, std::io::Error>;
#[inline]
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
Pin::new(&mut self.0)
.poll_data(cx)
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))
}
}
| 22.317308 | 94 | 0.56872 |
3965a7a909887ff35d969b6261234b3ff7a9e28b | 1,475 | use crate::core_types::PoolArray;
/// A reference-counted vector of `u8` that uses Godot's pool allocator.
///
/// See [`PoolByteArray`](https://docs.godotengine.org/en/stable/classes/class_poolbytearray.html) in Godot.
pub type ByteArray = PoolArray<u8>;
godot_test!(
test_byte_array_access {
use crate::object::NewRef as _;
let arr = (0..8).collect::<ByteArray>();
let original_read = {
let read = arr.read();
assert_eq!(&[0, 1, 2, 3, 4, 5, 6, 7], read.as_slice());
read.clone()
};
let mut cow_arr = arr.new_ref();
{
let mut write = cow_arr.write();
assert_eq!(8, write.len());
for i in write.as_mut_slice() {
*i *= 2;
}
}
cow_arr.append_slice(&[0, 1, 2, 3, 4, 5, 6, 7]);
assert_eq!(16, cow_arr.len());
for i in 0..8 {
assert_eq!(i * 2, cow_arr.get(i as i32));
}
for i in 8..16 {
assert_eq!(i - 8, cow_arr.get(i as i32));
}
// the write shouldn't have affected the original array
assert_eq!(&[0, 1, 2, 3, 4, 5, 6, 7], original_read.as_slice());
// check to_vec()
assert_eq!(arr.to_vec(), vec![0, 1, 2, 3, 4, 5, 6, 7]);
}
);
godot_test!(
test_byte_array_debug {
let arr = (0..8).collect::<ByteArray>();
assert_eq!(format!("{:?}", arr), "[0, 1, 2, 3, 4, 5, 6, 7]");
}
);
| 26.818182 | 108 | 0.508475 |
03d29f3241db2ba7b0f34d5ca7c8ded348f0d92d | 23 | unsafe auto trait T {}
| 11.5 | 22 | 0.695652 |
d71c55a28f650f060ad1fa7d8e418ce6286bca32 | 2,495 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use crate::Pixbuf;
use crate::PixbufAnimation;
use glib::object::ObjectType as ObjectType_;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib::wrapper! {
pub struct PixbufSimpleAnim(Object<ffi::GdkPixbufSimpleAnim, ffi::GdkPixbufSimpleAnimClass>) @extends PixbufAnimation;
match fn {
type_ => || ffi::gdk_pixbuf_simple_anim_get_type(),
}
}
impl PixbufSimpleAnim {
#[doc(alias = "gdk_pixbuf_simple_anim_new")]
pub fn new(width: i32, height: i32, rate: f32) -> PixbufSimpleAnim {
unsafe { from_glib_full(ffi::gdk_pixbuf_simple_anim_new(width, height, rate)) }
}
#[doc(alias = "gdk_pixbuf_simple_anim_add_frame")]
pub fn add_frame(&self, pixbuf: &Pixbuf) {
unsafe {
ffi::gdk_pixbuf_simple_anim_add_frame(self.to_glib_none().0, pixbuf.to_glib_none().0);
}
}
#[doc(alias = "gdk_pixbuf_simple_anim_get_loop")]
pub fn is_loop(&self) -> bool {
unsafe { from_glib(ffi::gdk_pixbuf_simple_anim_get_loop(self.to_glib_none().0)) }
}
#[doc(alias = "gdk_pixbuf_simple_anim_set_loop")]
pub fn set_loop(&self, loop_: bool) {
unsafe {
ffi::gdk_pixbuf_simple_anim_set_loop(self.to_glib_none().0, loop_.to_glib());
}
}
pub fn connect_property_loop_notify<F: Fn(&PixbufSimpleAnim) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_loop_trampoline<F: Fn(&PixbufSimpleAnim) + 'static>(
this: *mut ffi::GdkPixbufSimpleAnim,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::loop\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_loop_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for PixbufSimpleAnim {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("PixbufSimpleAnim")
}
}
| 31.582278 | 122 | 0.602004 |
5dc64ec4bccf08c9f0a67ee55fba46d934705e7f | 4,135 | #[doc = "Register `SERQ` writer"]
pub struct W(crate::W<SERQ_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<SERQ_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<SERQ_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<SERQ_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `SERQ` writer - Set Enable Request"]
pub struct SERQ_W<'a> {
w: &'a mut W,
}
impl<'a> SERQ_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | (value as u8 & 0x0f);
self.w
}
}
#[doc = "Field `SAER` writer - Set All Enable Requests"]
pub struct SAER_W<'a> {
w: &'a mut W,
}
impl<'a> SAER_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | ((value as u8 & 0x01) << 6);
self.w
}
}
#[doc = "No Op enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum NOP_AW {
#[doc = "0: Normal operation"]
_0 = 0,
#[doc = "1: No operation, ignore the other bits in this register"]
_1 = 1,
}
impl From<NOP_AW> for bool {
#[inline(always)]
fn from(variant: NOP_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `NOP` writer - No Op enable"]
pub struct NOP_W<'a> {
w: &'a mut W,
}
impl<'a> NOP_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: NOP_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Normal operation"]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(NOP_AW::_0)
}
#[doc = "No operation, ignore the other bits in this register"]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(NOP_AW::_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | ((value as u8 & 0x01) << 7);
self.w
}
}
impl W {
#[doc = "Bits 0:3 - Set Enable Request"]
#[inline(always)]
pub fn serq(&mut self) -> SERQ_W {
SERQ_W { w: self }
}
#[doc = "Bit 6 - Set All Enable Requests"]
#[inline(always)]
pub fn saer(&mut self) -> SAER_W {
SAER_W { w: self }
}
#[doc = "Bit 7 - No Op enable"]
#[inline(always)]
pub fn nop(&mut self) -> NOP_W {
NOP_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u8) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Set Enable Request Register\n\nThis register you can [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [serq](index.html) module"]
pub struct SERQ_SPEC;
impl crate::RegisterSpec for SERQ_SPEC {
type Ux = u8;
}
#[doc = "`write(|w| ..)` method takes [serq::W](W) writer structure"]
impl crate::Writable for SERQ_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets SERQ to value 0"]
impl crate::Resettable for SERQ_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 28.321918 | 334 | 0.555985 |
6968d2dd6315f5094ce65cfa2a3e073ab0af4500 | 3,380 | use crate::app::Context;
use crate::handler::model::date_time::DateTime;
use crate::handler::model::file::FileId;
use crate::handler::model::file_sharing::FileSharing;
use crate::handler::model::project_query::ProjectQuery;
use crate::handler::{HandlerResponse, HandlerResult};
use serde::{Deserialize, Serialize};
use sos21_domain::context::Login;
use sos21_use_case::share_file;
use warp::http::StatusCode;
#[derive(Debug, Clone, Deserialize)]
pub struct Request {
pub file_id: FileId,
pub expires_at: Option<DateTime>,
pub scope: RequestFileSharingScope,
}
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "snake_case", tag = "type")]
pub enum RequestFileSharingScope {
ProjectQuery { query: ProjectQuery },
Committee,
CommitteeOperator,
Public,
}
impl RequestFileSharingScope {
fn into_use_case(self) -> share_file::InputFileSharingScope {
match self {
RequestFileSharingScope::ProjectQuery { query } => {
share_file::InputFileSharingScope::ProjectQuery(query.into_use_case())
}
RequestFileSharingScope::Committee => share_file::InputFileSharingScope::Committee,
RequestFileSharingScope::CommitteeOperator => {
share_file::InputFileSharingScope::CommitteeOperator
}
RequestFileSharingScope::Public => share_file::InputFileSharingScope::Public,
}
}
}
#[derive(Debug, Clone, Serialize)]
pub struct Response {
pub sharing: FileSharing,
}
impl HandlerResponse for Response {
fn status_code(&self) -> StatusCode {
StatusCode::CREATED
}
}
#[derive(Debug, Clone, Serialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE", tag = "type")]
pub enum Error {
InvalidProjectQuery,
InsufficientPermissions,
FileNotFound,
NonSharableFile,
InvalidFileExpirationDate,
}
impl HandlerResponse for Error {
fn status_code(&self) -> StatusCode {
match self {
Error::InvalidProjectQuery => StatusCode::BAD_REQUEST,
Error::InsufficientPermissions => StatusCode::FORBIDDEN,
Error::FileNotFound => StatusCode::NOT_FOUND,
Error::NonSharableFile => StatusCode::FORBIDDEN,
Error::InvalidFileExpirationDate => StatusCode::CONFLICT,
}
}
}
impl From<share_file::Error> for Error {
fn from(err: share_file::Error) -> Error {
match err {
share_file::Error::InvalidQuery(_) => Error::InvalidProjectQuery,
share_file::Error::InsufficientPermissions => Error::InsufficientPermissions,
share_file::Error::FileNotFound => Error::FileNotFound,
share_file::Error::NonSharableFile => Error::NonSharableFile,
share_file::Error::InvalidExpirationDate => Error::InvalidFileExpirationDate,
}
}
}
#[macro_rules_attribute::macro_rules_attribute(handler!)]
pub async fn handler(ctx: Login<Context>, request: Request) -> HandlerResult<Response, Error> {
let input = share_file::Input {
file_id: request.file_id.into_use_case(),
expires_at: request
.expires_at
.map(|expires_at| expires_at.into_use_case()),
scope: request.scope.into_use_case(),
};
let sharing = share_file::run(&ctx, input).await?;
let sharing = FileSharing::from_use_case(sharing);
Ok(Response { sharing })
}
| 33.137255 | 95 | 0.679882 |
f816ba9ab8143b68e3b06a83ab21f99153b8c90d | 164,559 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Lowers the AST to the HIR.
//!
//! Since the AST and HIR are fairly similar, this is mostly a simple procedure,
//! much like a fold. Where lowering involves a bit more work things get more
//! interesting and there are some invariants you should know about. These mostly
//! concern spans and ids.
//!
//! Spans are assigned to AST nodes during parsing and then are modified during
//! expansion to indicate the origin of a node and the process it went through
//! being expanded. Ids are assigned to AST nodes just before lowering.
//!
//! For the simpler lowering steps, ids and spans should be preserved. Unlike
//! expansion we do not preserve the process of lowering in the spans, so spans
//! should not be modified here. When creating a new node (as opposed to
//! 'folding' an existing one), then you create a new id using `next_id()`.
//!
//! You must ensure that ids are unique. That means that you should only use the
//! id from an AST node in a single HIR node (you can assume that AST node ids
//! are unique). Every new node must have a unique id. Avoid cloning HIR nodes.
//! If you do, you must then set the new node's id to a fresh one.
//!
//! Spans are used for error messages and for tools to map semantics back to
//! source code. It is therefore not as important with spans as ids to be strict
//! about use (you can't break the compiler by screwing up a span). Obviously, a
//! HIR node can only have a single span. But multiple nodes can have the same
//! span and spans don't need to be kept in order, etc. Where code is preserved
//! by lowering, it should have the same span as in the AST. Where HIR nodes are
//! new it is probably best to give a span for the whole AST node being lowered.
//! All nodes should have real spans, don't use dummy spans. Tools are likely to
//! get confused if the spans from leaf AST nodes occur in multiple places
//! in the HIR, especially for multiple identifiers.
use dep_graph::DepGraph;
use hir;
use hir::HirVec;
use hir::map::{DefKey, DefPathData, Definitions};
use hir::def_id::{DefId, DefIndex, DefIndexAddressSpace, CRATE_DEF_INDEX};
use hir::def::{Def, PathResolution};
use lint::builtin::{self, PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES};
use middle::cstore::CrateStore;
use rustc_data_structures::indexed_vec::IndexVec;
use session::Session;
use util::common::FN_OUTPUT_NAME;
use util::nodemap::{DefIdMap, FxHashMap, NodeMap};
use std::collections::{BTreeMap, HashSet};
use std::fmt::Debug;
use std::iter;
use std::mem;
use syntax::attr;
use syntax::ast::*;
use syntax::errors;
use syntax::ext::hygiene::{Mark, SyntaxContext};
use syntax::print::pprust;
use syntax::ptr::P;
use syntax::codemap::{self, respan, CompilerDesugaringKind, Spanned};
use syntax::std_inject;
use syntax::symbol::{keywords, Symbol};
use syntax::tokenstream::{Delimited, TokenStream, TokenTree};
use syntax::parse::token::Token;
use syntax::util::small_vector::SmallVector;
use syntax::visit::{self, Visitor};
use syntax_pos::Span;
const HIR_ID_COUNTER_LOCKED: u32 = 0xFFFFFFFF;
pub struct LoweringContext<'a> {
crate_root: Option<&'static str>,
// Use to assign ids to hir nodes that do not directly correspond to an ast node
sess: &'a Session,
cstore: &'a CrateStore,
resolver: &'a mut Resolver,
name_map: FxHashMap<Ident, Name>,
/// The items being lowered are collected here.
items: BTreeMap<NodeId, hir::Item>,
trait_items: BTreeMap<hir::TraitItemId, hir::TraitItem>,
impl_items: BTreeMap<hir::ImplItemId, hir::ImplItem>,
bodies: BTreeMap<hir::BodyId, hir::Body>,
exported_macros: Vec<hir::MacroDef>,
trait_impls: BTreeMap<DefId, Vec<NodeId>>,
trait_auto_impl: BTreeMap<DefId, NodeId>,
is_generator: bool,
catch_scopes: Vec<NodeId>,
loop_scopes: Vec<NodeId>,
is_in_loop_condition: bool,
is_in_trait_impl: bool,
/// What to do when we encounter either an "anonymous lifetime
/// reference". The term "anonymous" is meant to encompass both
/// `'_` lifetimes as well as fully elided cases where nothing is
/// written at all (e.g., `&T` or `std::cell::Ref<T>`).
anonymous_lifetime_mode: AnonymousLifetimeMode,
// This is a list of in-band type definitions being generated by
// Argument-position `impl Trait`.
// When traversing a signature such as `fn foo(x: impl Trait)`,
// we record `impl Trait` as a new type parameter, then later
// add it on to `foo`s generics.
in_band_ty_params: Vec<hir::TyParam>,
// Used to create lifetime definitions from in-band lifetime usages.
// e.g. `fn foo(x: &'x u8) -> &'x u8` to `fn foo<'x>(x: &'x u8) -> &'x u8`
// When a named lifetime is encountered in a function or impl header and
// has not been defined
// (i.e. it doesn't appear in the in_scope_lifetimes list), it is added
// to this list. The results of this list are then added to the list of
// lifetime definitions in the corresponding impl or function generics.
lifetimes_to_define: Vec<(Span, hir::LifetimeName)>,
// Whether or not in-band lifetimes are being collected. This is used to
// indicate whether or not we're in a place where new lifetimes will result
// in in-band lifetime definitions, such a function or an impl header.
// This will always be false unless the `in_band_lifetimes` feature is
// enabled.
is_collecting_in_band_lifetimes: bool,
// Currently in-scope lifetimes defined in impl headers, fn headers, or HRTB.
// When `is_collectin_in_band_lifetimes` is true, each lifetime is checked
// against this list to see if it is already in-scope, or if a definition
// needs to be created for it.
in_scope_lifetimes: Vec<Name>,
type_def_lifetime_params: DefIdMap<usize>,
current_hir_id_owner: Vec<(DefIndex, u32)>,
item_local_id_counters: NodeMap<u32>,
node_id_to_hir_id: IndexVec<NodeId, hir::HirId>,
}
pub trait Resolver {
/// Resolve a hir path generated by the lowerer when expanding `for`, `if let`, etc.
fn resolve_hir_path(&mut self, path: &mut hir::Path, is_value: bool);
/// Obtain the resolution for a node id
fn get_resolution(&mut self, id: NodeId) -> Option<PathResolution>;
/// We must keep the set of definitions up to date as we add nodes that weren't in the AST.
/// This should only return `None` during testing.
fn definitions(&mut self) -> &mut Definitions;
/// Given suffix ["b","c","d"], creates a HIR path for `[::crate_root]::b::c::d` and resolves
/// it based on `is_value`.
fn resolve_str_path(
&mut self,
span: Span,
crate_root: Option<&str>,
components: &[&str],
is_value: bool,
) -> hir::Path;
}
#[derive(Clone, Copy, Debug)]
enum ImplTraitContext {
/// Treat `impl Trait` as shorthand for a new universal generic parameter.
/// Example: `fn foo(x: impl Debug)`, where `impl Debug` is conceptually
/// equivalent to a fresh universal parameter like `fn foo<T: Debug>(x: T)`.
///
/// We store a DefId here so we can look up necessary information later
Universal(DefId),
/// Treat `impl Trait` as shorthand for a new universal existential parameter.
/// Example: `fn foo() -> impl Debug`, where `impl Debug` is conceptually
/// equivalent to a fresh existential parameter like `abstract type T; fn foo() -> T`.
Existential,
/// `impl Trait` is not accepted in this position.
Disallowed,
}
pub fn lower_crate(
sess: &Session,
cstore: &CrateStore,
dep_graph: &DepGraph,
krate: &Crate,
resolver: &mut Resolver,
) -> hir::Crate {
// We're constructing the HIR here; we don't care what we will
// read, since we haven't even constructed the *input* to
// incr. comp. yet.
dep_graph.assert_ignored();
LoweringContext {
crate_root: std_inject::injected_crate_name(),
sess,
cstore,
resolver,
name_map: FxHashMap(),
items: BTreeMap::new(),
trait_items: BTreeMap::new(),
impl_items: BTreeMap::new(),
bodies: BTreeMap::new(),
trait_impls: BTreeMap::new(),
trait_auto_impl: BTreeMap::new(),
exported_macros: Vec::new(),
catch_scopes: Vec::new(),
loop_scopes: Vec::new(),
is_in_loop_condition: false,
anonymous_lifetime_mode: AnonymousLifetimeMode::PassThrough,
type_def_lifetime_params: DefIdMap(),
current_hir_id_owner: vec![(CRATE_DEF_INDEX, 0)],
item_local_id_counters: NodeMap(),
node_id_to_hir_id: IndexVec::new(),
is_generator: false,
is_in_trait_impl: false,
in_band_ty_params: Vec::new(),
lifetimes_to_define: Vec::new(),
is_collecting_in_band_lifetimes: false,
in_scope_lifetimes: Vec::new(),
}.lower_crate(krate)
}
#[derive(Copy, Clone, PartialEq, Eq)]
enum ParamMode {
/// Any path in a type context.
Explicit,
/// The `module::Type` in `module::Type::method` in an expression.
Optional,
}
struct LoweredNodeId {
node_id: NodeId,
hir_id: hir::HirId,
}
enum ParenthesizedGenericArgs {
Ok,
Warn,
Err,
}
/// What to do when we encounter an **anonymous** lifetime
/// reference. Anonymous lifetime references come in two flavors. You
/// have implicit, or fully elided, references to lifetimes, like the
/// one in `&T` or `Ref<T>`, and you have `'_` lifetimes, like `&'_ T`
/// or `Ref<'_, T>`. These often behave the same, but not always:
///
/// - certain usages of implicit references are deprecated, like
/// `Ref<T>`, and we sometimes just give hard errors in those cases
/// as well.
/// - for object bounds there is a difference: `Box<dyn Foo>` is not
/// the same as `Box<dyn Foo + '_>`.
///
/// We describe the effects of the various modes in terms of three cases:
///
/// - **Modern** -- includes all uses of `'_`, but also the lifetime arg
/// of a `&` (e.g., the missing lifetime in something like `&T`)
/// - **Dyn Bound** -- if you have something like `Box<dyn Foo>`,
/// there is an elided lifetime bound (`Box<dyn Foo + 'X>`). These
/// elided bounds follow special rules. Note that this only covers
/// cases where *nothing* is written; the `'_` in `Box<dyn Foo +
/// '_>` is a case of "modern" elision.
/// - **Deprecated** -- this coverse cases like `Ref<T>`, where the lifetime
/// parameter to ref is completely elided. `Ref<'_, T>` would be the modern,
/// non-deprecated equivalent.
///
/// Currently, the handling of lifetime elision is somewhat spread out
/// between HIR lowering and -- as described below -- the
/// `resolve_lifetime` module. Often we "fallthrough" to that code by generating
/// an "elided" or "underscore" lifetime name. In the future, we probably want to move
/// everything into HIR lowering.
#[derive(Copy, Clone)]
enum AnonymousLifetimeMode {
/// For **Modern** cases, create a new anonymous region parameter
/// and reference that.
///
/// For **Dyn Bound** cases, pass responsibility to
/// `resolve_lifetime` code.
///
/// For **Deprecated** cases, report an error.
CreateParameter,
/// Pass responsibility to `resolve_lifetime` code for all cases.
PassThrough,
}
impl<'a> LoweringContext<'a> {
fn lower_crate(mut self, c: &Crate) -> hir::Crate {
/// Full-crate AST visitor that inserts into a fresh
/// `LoweringContext` any information that may be
/// needed from arbitrary locations in the crate.
/// E.g. The number of lifetime generic parameters
/// declared for every type and trait definition.
struct MiscCollector<'lcx, 'interner: 'lcx> {
lctx: &'lcx mut LoweringContext<'interner>,
}
impl<'lcx, 'interner> Visitor<'lcx> for MiscCollector<'lcx, 'interner> {
fn visit_item(&mut self, item: &'lcx Item) {
self.lctx.allocate_hir_id_counter(item.id, item);
match item.node {
ItemKind::Struct(_, ref generics)
| ItemKind::Union(_, ref generics)
| ItemKind::Enum(_, ref generics)
| ItemKind::Ty(_, ref generics)
| ItemKind::Trait(_, _, ref generics, ..) => {
let def_id = self.lctx.resolver.definitions().local_def_id(item.id);
let count = generics
.params
.iter()
.filter(|param| param.is_lifetime_param())
.count();
self.lctx.type_def_lifetime_params.insert(def_id, count);
}
_ => {}
}
visit::walk_item(self, item);
}
fn visit_trait_item(&mut self, item: &'lcx TraitItem) {
self.lctx.allocate_hir_id_counter(item.id, item);
visit::walk_trait_item(self, item);
}
fn visit_impl_item(&mut self, item: &'lcx ImplItem) {
self.lctx.allocate_hir_id_counter(item.id, item);
visit::walk_impl_item(self, item);
}
}
struct ItemLowerer<'lcx, 'interner: 'lcx> {
lctx: &'lcx mut LoweringContext<'interner>,
}
impl<'lcx, 'interner> ItemLowerer<'lcx, 'interner> {
fn with_trait_impl_ref<F>(&mut self, trait_impl_ref: &Option<TraitRef>, f: F)
where
F: FnOnce(&mut Self),
{
let old = self.lctx.is_in_trait_impl;
self.lctx.is_in_trait_impl = if let &None = trait_impl_ref {
false
} else {
true
};
f(self);
self.lctx.is_in_trait_impl = old;
}
}
impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> {
fn visit_item(&mut self, item: &'lcx Item) {
let mut item_lowered = true;
self.lctx.with_hir_id_owner(item.id, |lctx| {
if let Some(hir_item) = lctx.lower_item(item) {
lctx.items.insert(item.id, hir_item);
} else {
item_lowered = false;
}
});
if item_lowered {
let item_lifetimes = match self.lctx.items.get(&item.id).unwrap().node {
hir::Item_::ItemImpl(_, _, _, ref generics, ..)
| hir::Item_::ItemTrait(_, _, ref generics, ..) => {
generics.lifetimes().cloned().collect::<Vec<_>>()
}
_ => Vec::new(),
};
self.lctx
.with_parent_impl_lifetime_defs(&item_lifetimes, |this| {
let this = &mut ItemLowerer { lctx: this };
if let ItemKind::Impl(_, _, _, _, ref opt_trait_ref, _, _) = item.node {
this.with_trait_impl_ref(opt_trait_ref, |this| {
visit::walk_item(this, item)
});
} else {
visit::walk_item(this, item);
}
});
}
}
fn visit_trait_item(&mut self, item: &'lcx TraitItem) {
self.lctx.with_hir_id_owner(item.id, |lctx| {
let id = hir::TraitItemId { node_id: item.id };
let hir_item = lctx.lower_trait_item(item);
lctx.trait_items.insert(id, hir_item);
});
visit::walk_trait_item(self, item);
}
fn visit_impl_item(&mut self, item: &'lcx ImplItem) {
self.lctx.with_hir_id_owner(item.id, |lctx| {
let id = hir::ImplItemId { node_id: item.id };
let hir_item = lctx.lower_impl_item(item);
lctx.impl_items.insert(id, hir_item);
});
visit::walk_impl_item(self, item);
}
}
self.lower_node_id(CRATE_NODE_ID);
debug_assert!(self.node_id_to_hir_id[CRATE_NODE_ID] == hir::CRATE_HIR_ID);
visit::walk_crate(&mut MiscCollector { lctx: &mut self }, c);
visit::walk_crate(&mut ItemLowerer { lctx: &mut self }, c);
let module = self.lower_mod(&c.module);
let attrs = self.lower_attrs(&c.attrs);
let body_ids = body_ids(&self.bodies);
self.resolver
.definitions()
.init_node_id_to_hir_id_mapping(self.node_id_to_hir_id);
hir::Crate {
module,
attrs,
span: c.span,
exported_macros: hir::HirVec::from(self.exported_macros),
items: self.items,
trait_items: self.trait_items,
impl_items: self.impl_items,
bodies: self.bodies,
body_ids,
trait_impls: self.trait_impls,
trait_auto_impl: self.trait_auto_impl,
}
}
fn allocate_hir_id_counter<T: Debug>(&mut self, owner: NodeId, debug: &T) {
if self.item_local_id_counters.insert(owner, 0).is_some() {
bug!(
"Tried to allocate item_local_id_counter for {:?} twice",
debug
);
}
// Always allocate the first HirId for the owner itself
self.lower_node_id_with_owner(owner, owner);
}
fn lower_node_id_generic<F>(&mut self, ast_node_id: NodeId, alloc_hir_id: F) -> LoweredNodeId
where
F: FnOnce(&mut Self) -> hir::HirId,
{
if ast_node_id == DUMMY_NODE_ID {
return LoweredNodeId {
node_id: DUMMY_NODE_ID,
hir_id: hir::DUMMY_HIR_ID,
};
}
let min_size = ast_node_id.as_usize() + 1;
if min_size > self.node_id_to_hir_id.len() {
self.node_id_to_hir_id.resize(min_size, hir::DUMMY_HIR_ID);
}
let existing_hir_id = self.node_id_to_hir_id[ast_node_id];
if existing_hir_id == hir::DUMMY_HIR_ID {
// Generate a new HirId
let hir_id = alloc_hir_id(self);
self.node_id_to_hir_id[ast_node_id] = hir_id;
LoweredNodeId {
node_id: ast_node_id,
hir_id,
}
} else {
LoweredNodeId {
node_id: ast_node_id,
hir_id: existing_hir_id,
}
}
}
fn with_hir_id_owner<F>(&mut self, owner: NodeId, f: F)
where
F: FnOnce(&mut Self),
{
let counter = self.item_local_id_counters
.insert(owner, HIR_ID_COUNTER_LOCKED)
.unwrap();
let def_index = self.resolver.definitions().opt_def_index(owner).unwrap();
self.current_hir_id_owner.push((def_index, counter));
f(self);
let (new_def_index, new_counter) = self.current_hir_id_owner.pop().unwrap();
debug_assert!(def_index == new_def_index);
debug_assert!(new_counter >= counter);
let prev = self.item_local_id_counters
.insert(owner, new_counter)
.unwrap();
debug_assert!(prev == HIR_ID_COUNTER_LOCKED);
}
/// This method allocates a new HirId for the given NodeId and stores it in
/// the LoweringContext's NodeId => HirId map.
/// Take care not to call this method if the resulting HirId is then not
/// actually used in the HIR, as that would trigger an assertion in the
/// HirIdValidator later on, which makes sure that all NodeIds got mapped
/// properly. Calling the method twice with the same NodeId is fine though.
fn lower_node_id(&mut self, ast_node_id: NodeId) -> LoweredNodeId {
self.lower_node_id_generic(ast_node_id, |this| {
let &mut (def_index, ref mut local_id_counter) =
this.current_hir_id_owner.last_mut().unwrap();
let local_id = *local_id_counter;
*local_id_counter += 1;
hir::HirId {
owner: def_index,
local_id: hir::ItemLocalId(local_id),
}
})
}
fn lower_node_id_with_owner(&mut self, ast_node_id: NodeId, owner: NodeId) -> LoweredNodeId {
self.lower_node_id_generic(ast_node_id, |this| {
let local_id_counter = this.item_local_id_counters.get_mut(&owner).unwrap();
let local_id = *local_id_counter;
// We want to be sure not to modify the counter in the map while it
// is also on the stack. Otherwise we'll get lost updates when writing
// back from the stack to the map.
debug_assert!(local_id != HIR_ID_COUNTER_LOCKED);
*local_id_counter += 1;
let def_index = this.resolver.definitions().opt_def_index(owner).unwrap();
hir::HirId {
owner: def_index,
local_id: hir::ItemLocalId(local_id),
}
})
}
fn record_body(&mut self, value: hir::Expr, decl: Option<&FnDecl>) -> hir::BodyId {
let body = hir::Body {
arguments: decl.map_or(hir_vec![], |decl| {
decl.inputs.iter().map(|x| self.lower_arg(x)).collect()
}),
is_generator: self.is_generator,
value,
};
let id = body.id();
self.bodies.insert(id, body);
id
}
fn next_id(&mut self) -> LoweredNodeId {
self.lower_node_id(self.sess.next_node_id())
}
fn expect_full_def(&mut self, id: NodeId) -> Def {
self.resolver.get_resolution(id).map_or(Def::Err, |pr| {
if pr.unresolved_segments() != 0 {
bug!("path not fully resolved: {:?}", pr);
}
pr.base_def()
})
}
fn diagnostic(&self) -> &errors::Handler {
self.sess.diagnostic()
}
fn str_to_ident(&self, s: &'static str) -> Name {
Symbol::gensym(s)
}
fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, span: Span) -> Span {
let mark = Mark::fresh(Mark::root());
mark.set_expn_info(codemap::ExpnInfo {
call_site: span,
callee: codemap::NameAndSpan {
format: codemap::CompilerDesugaring(reason),
span: Some(span),
allow_internal_unstable: true,
allow_internal_unsafe: false,
edition: codemap::hygiene::default_edition(),
},
});
span.with_ctxt(SyntaxContext::empty().apply_mark(mark))
}
fn with_anonymous_lifetime_mode<R>(
&mut self,
anonymous_lifetime_mode: AnonymousLifetimeMode,
op: impl FnOnce(&mut Self) -> R,
) -> R {
let old_anonymous_lifetime_mode = self.anonymous_lifetime_mode;
self.anonymous_lifetime_mode = anonymous_lifetime_mode;
let result = op(self);
self.anonymous_lifetime_mode = old_anonymous_lifetime_mode;
result
}
/// Creates a new hir::GenericParam for every new lifetime and
/// type parameter encountered while evaluating `f`. Definitions
/// are created with the parent provided. If no `parent_id` is
/// provided, no definitions will be returned.
///
/// Presuming that in-band lifetimes are enabled, then
/// `self.anonymous_lifetime_mode` will be updated to match the
/// argument while `f` is running (and restored afterwards).
fn collect_in_band_defs<T, F>(
&mut self,
parent_id: DefId,
anonymous_lifetime_mode: AnonymousLifetimeMode,
f: F,
) -> (Vec<hir::GenericParam>, T)
where
F: FnOnce(&mut LoweringContext) -> T,
{
assert!(!self.is_collecting_in_band_lifetimes);
assert!(self.lifetimes_to_define.is_empty());
let old_anonymous_lifetime_mode = self.anonymous_lifetime_mode;
self.is_collecting_in_band_lifetimes = self.sess.features_untracked().in_band_lifetimes;
if self.is_collecting_in_band_lifetimes {
self.anonymous_lifetime_mode = anonymous_lifetime_mode;
}
assert!(self.in_band_ty_params.is_empty());
let res = f(self);
self.is_collecting_in_band_lifetimes = false;
self.anonymous_lifetime_mode = old_anonymous_lifetime_mode;
let in_band_ty_params = self.in_band_ty_params.split_off(0);
let lifetimes_to_define = self.lifetimes_to_define.split_off(0);
let params = lifetimes_to_define
.into_iter()
.map(|(span, hir_name)| {
let def_node_id = self.next_id().node_id;
// Get the name we'll use to make the def-path. Note
// that collisions are ok here and this shouldn't
// really show up for end-user.
let str_name = match hir_name {
hir::LifetimeName::Name(n) => n.as_str(),
hir::LifetimeName::Fresh(_) => keywords::UnderscoreLifetime.name().as_str(),
hir::LifetimeName::Implicit
| hir::LifetimeName::Underscore
| hir::LifetimeName::Static => {
span_bug!(span, "unexpected in-band lifetime name: {:?}", hir_name)
}
};
// Add a definition for the in-band lifetime def
self.resolver.definitions().create_def_with_parent(
parent_id.index,
def_node_id,
DefPathData::LifetimeDef(str_name.as_interned_str()),
DefIndexAddressSpace::High,
Mark::root(),
span,
);
hir::GenericParam::Lifetime(hir::LifetimeDef {
lifetime: hir::Lifetime {
id: def_node_id,
span,
name: hir_name,
},
bounds: Vec::new().into(),
pure_wrt_drop: false,
in_band: true,
})
})
.chain(
in_band_ty_params
.into_iter()
.map(|tp| hir::GenericParam::Type(tp)),
)
.collect();
(params, res)
}
/// When there is a reference to some lifetime `'a`, and in-band
/// lifetimes are enabled, then we want to push that lifetime into
/// the vector of names to define later. In that case, it will get
/// added to the appropriate generics.
fn maybe_collect_in_band_lifetime(&mut self, span: Span, name: Name) {
if !self.is_collecting_in_band_lifetimes {
return;
}
if self.in_scope_lifetimes.contains(&name) {
return;
}
let hir_name = hir::LifetimeName::Name(name);
if self.lifetimes_to_define
.iter()
.any(|(_, lt_name)| *lt_name == hir_name)
{
return;
}
self.lifetimes_to_define.push((span, hir_name));
}
/// When we have either an elided or `'_` lifetime in an impl
/// header, we convert it to
fn collect_fresh_in_band_lifetime(&mut self, span: Span) -> hir::LifetimeName {
assert!(self.is_collecting_in_band_lifetimes);
let index = self.lifetimes_to_define.len();
let hir_name = hir::LifetimeName::Fresh(index);
self.lifetimes_to_define.push((span, hir_name));
hir_name
}
// Evaluates `f` with the lifetimes in `lt_defs` in-scope.
// This is used to track which lifetimes have already been defined, and
// which are new in-band lifetimes that need to have a definition created
// for them.
fn with_in_scope_lifetime_defs<'l, T, F>(
&mut self,
lt_defs: impl Iterator<Item = &'l LifetimeDef>,
f: F,
) -> T
where
F: FnOnce(&mut LoweringContext) -> T,
{
let old_len = self.in_scope_lifetimes.len();
let lt_def_names = lt_defs.map(|lt_def| lt_def.lifetime.ident.name);
self.in_scope_lifetimes.extend(lt_def_names);
let res = f(self);
self.in_scope_lifetimes.truncate(old_len);
res
}
// Same as the method above, but accepts `hir::LifetimeDef`s
// instead of `ast::LifetimeDef`s.
// This should only be used with generics that have already had their
// in-band lifetimes added. In practice, this means that this function is
// only used when lowering a child item of a trait or impl.
fn with_parent_impl_lifetime_defs<T, F>(&mut self, lt_defs: &[hir::LifetimeDef], f: F) -> T
where
F: FnOnce(&mut LoweringContext) -> T,
{
let old_len = self.in_scope_lifetimes.len();
let lt_def_names = lt_defs.iter().map(|lt_def| lt_def.lifetime.name.name());
self.in_scope_lifetimes.extend(lt_def_names);
let res = f(self);
self.in_scope_lifetimes.truncate(old_len);
res
}
/// Appends in-band lifetime defs and argument-position `impl
/// Trait` defs to the existing set of generics.
///
/// Presuming that in-band lifetimes are enabled, then
/// `self.anonymous_lifetime_mode` will be updated to match the
/// argument while `f` is running (and restored afterwards).
fn add_in_band_defs<F, T>(
&mut self,
generics: &Generics,
parent_id: DefId,
anonymous_lifetime_mode: AnonymousLifetimeMode,
f: F,
) -> (hir::Generics, T)
where
F: FnOnce(&mut LoweringContext) -> T,
{
let (in_band_defs, (mut lowered_generics, res)) = self.with_in_scope_lifetime_defs(
generics.params.iter().filter_map(|p| match p {
GenericParam::Lifetime(ld) => Some(ld),
_ => None,
}),
|this| {
let itctx = ImplTraitContext::Universal(parent_id);
this.collect_in_band_defs(parent_id, anonymous_lifetime_mode, |this| {
(this.lower_generics(generics, itctx), f(this))
})
},
);
lowered_generics.params = lowered_generics
.params
.iter()
.cloned()
.chain(in_band_defs)
.collect();
(lowered_generics, res)
}
fn with_catch_scope<T, F>(&mut self, catch_id: NodeId, f: F) -> T
where
F: FnOnce(&mut LoweringContext) -> T,
{
let len = self.catch_scopes.len();
self.catch_scopes.push(catch_id);
let result = f(self);
assert_eq!(
len + 1,
self.catch_scopes.len(),
"catch scopes should be added and removed in stack order"
);
self.catch_scopes.pop().unwrap();
result
}
fn lower_body<F>(&mut self, decl: Option<&FnDecl>, f: F) -> hir::BodyId
where
F: FnOnce(&mut LoweringContext) -> hir::Expr,
{
let prev = mem::replace(&mut self.is_generator, false);
let result = f(self);
let r = self.record_body(result, decl);
self.is_generator = prev;
return r;
}
fn with_loop_scope<T, F>(&mut self, loop_id: NodeId, f: F) -> T
where
F: FnOnce(&mut LoweringContext) -> T,
{
// We're no longer in the base loop's condition; we're in another loop.
let was_in_loop_condition = self.is_in_loop_condition;
self.is_in_loop_condition = false;
let len = self.loop_scopes.len();
self.loop_scopes.push(loop_id);
let result = f(self);
assert_eq!(
len + 1,
self.loop_scopes.len(),
"Loop scopes should be added and removed in stack order"
);
self.loop_scopes.pop().unwrap();
self.is_in_loop_condition = was_in_loop_condition;
result
}
fn with_loop_condition_scope<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut LoweringContext) -> T,
{
let was_in_loop_condition = self.is_in_loop_condition;
self.is_in_loop_condition = true;
let result = f(self);
self.is_in_loop_condition = was_in_loop_condition;
result
}
fn with_new_scopes<T, F>(&mut self, f: F) -> T
where
F: FnOnce(&mut LoweringContext) -> T,
{
let was_in_loop_condition = self.is_in_loop_condition;
self.is_in_loop_condition = false;
let catch_scopes = mem::replace(&mut self.catch_scopes, Vec::new());
let loop_scopes = mem::replace(&mut self.loop_scopes, Vec::new());
let result = f(self);
self.catch_scopes = catch_scopes;
self.loop_scopes = loop_scopes;
self.is_in_loop_condition = was_in_loop_condition;
result
}
fn def_key(&mut self, id: DefId) -> DefKey {
if id.is_local() {
self.resolver.definitions().def_key(id.index)
} else {
self.cstore.def_key(id)
}
}
fn lower_ident(&mut self, ident: Ident) -> Name {
let ident = ident.modern();
if ident.span.ctxt() == SyntaxContext::empty() {
return ident.name;
}
*self.name_map
.entry(ident)
.or_insert_with(|| Symbol::from_ident(ident))
}
fn lower_label(&mut self, label: Option<Label>) -> Option<hir::Label> {
label.map(|label| hir::Label {
name: label.ident.name,
span: label.ident.span,
})
}
fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination {
match destination {
Some((id, label)) => {
let target_id = if let Def::Label(loop_id) = self.expect_full_def(id) {
Ok(self.lower_node_id(loop_id).node_id)
} else {
Err(hir::LoopIdError::UnresolvedLabel)
};
hir::Destination {
label: self.lower_label(Some(label)),
target_id,
}
}
None => {
let target_id = self.loop_scopes
.last()
.map(|innermost_loop_id| *innermost_loop_id)
.map(|id| Ok(self.lower_node_id(id).node_id))
.unwrap_or(Err(hir::LoopIdError::OutsideLoopScope))
.into();
hir::Destination {
label: None,
target_id,
}
}
}
}
fn lower_attrs(&mut self, attrs: &[Attribute]) -> hir::HirVec<Attribute> {
attrs
.iter()
.map(|a| self.lower_attr(a))
.collect::<Vec<_>>()
.into()
}
fn lower_attr(&mut self, attr: &Attribute) -> Attribute {
Attribute {
id: attr.id,
style: attr.style,
path: attr.path.clone(),
tokens: self.lower_token_stream(attr.tokens.clone()),
is_sugared_doc: attr.is_sugared_doc,
span: attr.span,
}
}
fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
tokens
.into_trees()
.flat_map(|tree| self.lower_token_tree(tree).into_trees())
.collect()
}
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
match tree {
TokenTree::Token(span, token) => self.lower_token(token, span),
TokenTree::Delimited(span, delimited) => TokenTree::Delimited(
span,
Delimited {
delim: delimited.delim,
tts: self.lower_token_stream(delimited.tts.into()).into(),
},
).into(),
}
}
fn lower_token(&mut self, token: Token, span: Span) -> TokenStream {
match token {
Token::Interpolated(_) => {}
other => return TokenTree::Token(span, other).into(),
}
let tts = token.interpolated_to_tokenstream(&self.sess.parse_sess, span);
self.lower_token_stream(tts)
}
fn lower_arm(&mut self, arm: &Arm) -> hir::Arm {
hir::Arm {
attrs: self.lower_attrs(&arm.attrs),
pats: arm.pats.iter().map(|x| self.lower_pat(x)).collect(),
guard: arm.guard.as_ref().map(|ref x| P(self.lower_expr(x))),
body: P(self.lower_expr(&arm.body)),
}
}
fn lower_ty_binding(&mut self, b: &TypeBinding, itctx: ImplTraitContext) -> hir::TypeBinding {
hir::TypeBinding {
id: self.lower_node_id(b.id).node_id,
name: self.lower_ident(b.ident),
ty: self.lower_ty(&b.ty, itctx),
span: b.span,
}
}
fn lower_ty(&mut self, t: &Ty, itctx: ImplTraitContext) -> P<hir::Ty> {
let kind = match t.node {
TyKind::Infer => hir::TyInfer,
TyKind::Err => hir::TyErr,
TyKind::Slice(ref ty) => hir::TySlice(self.lower_ty(ty, itctx)),
TyKind::Ptr(ref mt) => hir::TyPtr(self.lower_mt(mt, itctx)),
TyKind::Rptr(ref region, ref mt) => {
let span = t.span.shrink_to_lo();
let lifetime = match *region {
Some(ref lt) => self.lower_lifetime(lt),
None => self.elided_ref_lifetime(span),
};
hir::TyRptr(lifetime, self.lower_mt(mt, itctx))
}
TyKind::BareFn(ref f) => self.with_in_scope_lifetime_defs(
f.generic_params.iter().filter_map(|p| match p {
GenericParam::Lifetime(ld) => Some(ld),
_ => None,
}),
|this| {
this.with_anonymous_lifetime_mode(
AnonymousLifetimeMode::PassThrough,
|this| {
hir::TyBareFn(P(hir::BareFnTy {
generic_params: this.lower_generic_params(
&f.generic_params,
&NodeMap(),
ImplTraitContext::Disallowed,
),
unsafety: this.lower_unsafety(f.unsafety),
abi: f.abi,
decl: this.lower_fn_decl(&f.decl, None, false),
arg_names: this.lower_fn_args_to_names(&f.decl),
}))
},
)
},
),
TyKind::Never => hir::TyNever,
TyKind::Tup(ref tys) => {
hir::TyTup(tys.iter().map(|ty| self.lower_ty(ty, itctx)).collect())
}
TyKind::Paren(ref ty) => {
return self.lower_ty(ty, itctx);
}
TyKind::Path(ref qself, ref path) => {
let id = self.lower_node_id(t.id);
let qpath = self.lower_qpath(t.id, qself, path, ParamMode::Explicit, itctx);
let ty = self.ty_path(id, t.span, qpath);
if let hir::TyTraitObject(..) = ty.node {
self.maybe_lint_bare_trait(t.span, t.id, qself.is_none() && path.is_global());
}
return ty;
}
TyKind::ImplicitSelf => hir::TyPath(hir::QPath::Resolved(
None,
P(hir::Path {
def: self.expect_full_def(t.id),
segments: hir_vec![hir::PathSegment::from_name(keywords::SelfType.name())],
span: t.span,
}),
)),
TyKind::Array(ref ty, ref length) => {
hir::TyArray(self.lower_ty(ty, itctx), self.lower_anon_const(length))
}
TyKind::Typeof(ref expr) => {
hir::TyTypeof(self.lower_anon_const(expr))
}
TyKind::TraitObject(ref bounds, kind) => {
let mut lifetime_bound = None;
let bounds = bounds
.iter()
.filter_map(|bound| match *bound {
TraitTyParamBound(ref ty, TraitBoundModifier::None) => {
Some(self.lower_poly_trait_ref(ty, itctx))
}
TraitTyParamBound(_, TraitBoundModifier::Maybe) => None,
RegionTyParamBound(ref lifetime) => {
if lifetime_bound.is_none() {
lifetime_bound = Some(self.lower_lifetime(lifetime));
}
None
}
})
.collect();
let lifetime_bound =
lifetime_bound.unwrap_or_else(|| self.elided_dyn_bound(t.span));
if kind != TraitObjectSyntax::Dyn {
self.maybe_lint_bare_trait(t.span, t.id, false);
}
hir::TyTraitObject(bounds, lifetime_bound)
}
TyKind::ImplTrait(ref bounds) => {
let span = t.span;
match itctx {
ImplTraitContext::Existential => {
let def_index = self.resolver.definitions().opt_def_index(t.id).unwrap();
let hir_bounds = self.lower_bounds(bounds, itctx);
let (lifetimes, lifetime_defs) =
self.lifetimes_from_impl_trait_bounds(def_index, &hir_bounds);
hir::TyImplTraitExistential(
hir::ExistTy {
generics: hir::Generics {
params: lifetime_defs,
where_clause: hir::WhereClause {
id: self.next_id().node_id,
predicates: Vec::new().into(),
},
span,
},
bounds: hir_bounds,
},
lifetimes,
)
}
ImplTraitContext::Universal(def_id) => {
let def_node_id = self.next_id().node_id;
// Add a definition for the in-band TyParam
let def_index = self.resolver.definitions().create_def_with_parent(
def_id.index,
def_node_id,
DefPathData::ImplTrait,
DefIndexAddressSpace::High,
Mark::root(),
span,
);
let hir_bounds = self.lower_bounds(bounds, itctx);
// Set the name to `impl Bound1 + Bound2`
let name = Symbol::intern(&pprust::ty_to_string(t));
self.in_band_ty_params.push(hir::TyParam {
name,
id: def_node_id,
bounds: hir_bounds,
default: None,
span,
pure_wrt_drop: false,
synthetic: Some(hir::SyntheticTyParamKind::ImplTrait),
attrs: P::new(),
});
hir::TyPath(hir::QPath::Resolved(
None,
P(hir::Path {
span,
def: Def::TyParam(DefId::local(def_index)),
segments: hir_vec![hir::PathSegment::from_name(name)],
}),
))
}
ImplTraitContext::Disallowed => {
span_err!(
self.sess,
t.span,
E0562,
"`impl Trait` not allowed outside of function \
and inherent method return types"
);
hir::TyErr
}
}
}
TyKind::Mac(_) => panic!("TyMac should have been expanded by now."),
};
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(t.id);
P(hir::Ty {
id: node_id,
node: kind,
span: t.span,
hir_id,
})
}
fn lifetimes_from_impl_trait_bounds(
&mut self,
parent_index: DefIndex,
bounds: &hir::TyParamBounds,
) -> (HirVec<hir::Lifetime>, HirVec<hir::GenericParam>) {
// This visitor walks over impl trait bounds and creates defs for all lifetimes which
// appear in the bounds, excluding lifetimes that are created within the bounds.
// e.g. 'a, 'b, but not 'c in `impl for<'c> SomeTrait<'a, 'b, 'c>`
struct ImplTraitLifetimeCollector<'r, 'a: 'r> {
context: &'r mut LoweringContext<'a>,
parent: DefIndex,
collect_elided_lifetimes: bool,
currently_bound_lifetimes: Vec<hir::LifetimeName>,
already_defined_lifetimes: HashSet<hir::LifetimeName>,
output_lifetimes: Vec<hir::Lifetime>,
output_lifetime_params: Vec<hir::GenericParam>,
}
impl<'r, 'a: 'r, 'v> hir::intravisit::Visitor<'v> for ImplTraitLifetimeCollector<'r, 'a> {
fn nested_visit_map<'this>(
&'this mut self,
) -> hir::intravisit::NestedVisitorMap<'this, 'v> {
hir::intravisit::NestedVisitorMap::None
}
fn visit_path_parameters(&mut self, span: Span, parameters: &'v hir::PathParameters) {
// Don't collect elided lifetimes used inside of `Fn()` syntax.
if parameters.parenthesized {
let old_collect_elided_lifetimes = self.collect_elided_lifetimes;
self.collect_elided_lifetimes = false;
hir::intravisit::walk_path_parameters(self, span, parameters);
self.collect_elided_lifetimes = old_collect_elided_lifetimes;
} else {
hir::intravisit::walk_path_parameters(self, span, parameters);
}
}
fn visit_ty(&mut self, t: &'v hir::Ty) {
// Don't collect elided lifetimes used inside of `fn()` syntax
if let &hir::Ty_::TyBareFn(_) = &t.node {
let old_collect_elided_lifetimes = self.collect_elided_lifetimes;
self.collect_elided_lifetimes = false;
// Record the "stack height" of `for<'a>` lifetime bindings
// to be able to later fully undo their introduction.
let old_len = self.currently_bound_lifetimes.len();
hir::intravisit::walk_ty(self, t);
self.currently_bound_lifetimes.truncate(old_len);
self.collect_elided_lifetimes = old_collect_elided_lifetimes;
} else {
hir::intravisit::walk_ty(self, t);
}
}
fn visit_poly_trait_ref(
&mut self,
trait_ref: &'v hir::PolyTraitRef,
modifier: hir::TraitBoundModifier,
) {
// Record the "stack height" of `for<'a>` lifetime bindings
// to be able to later fully undo their introduction.
let old_len = self.currently_bound_lifetimes.len();
hir::intravisit::walk_poly_trait_ref(self, trait_ref, modifier);
self.currently_bound_lifetimes.truncate(old_len);
}
fn visit_generic_param(&mut self, param: &'v hir::GenericParam) {
// Record the introduction of 'a in `for<'a> ...`
if let hir::GenericParam::Lifetime(ref lt_def) = *param {
// Introduce lifetimes one at a time so that we can handle
// cases like `fn foo<'d>() -> impl for<'a, 'b: 'a, 'c: 'b + 'd>`
self.currently_bound_lifetimes.push(lt_def.lifetime.name);
}
hir::intravisit::walk_generic_param(self, param);
}
fn visit_lifetime(&mut self, lifetime: &'v hir::Lifetime) {
let name = match lifetime.name {
hir::LifetimeName::Implicit | hir::LifetimeName::Underscore => {
if self.collect_elided_lifetimes {
// Use `'_` for both implicit and underscore lifetimes in
// `abstract type Foo<'_>: SomeTrait<'_>;`
hir::LifetimeName::Underscore
} else {
return;
}
}
name @ hir::LifetimeName::Fresh(_) => name,
name @ hir::LifetimeName::Name(_) => name,
hir::LifetimeName::Static => return,
};
if !self.currently_bound_lifetimes.contains(&name)
&& !self.already_defined_lifetimes.contains(&name)
{
self.already_defined_lifetimes.insert(name);
self.output_lifetimes.push(hir::Lifetime {
id: self.context.next_id().node_id,
span: lifetime.span,
name,
});
let def_node_id = self.context.next_id().node_id;
self.context.resolver.definitions().create_def_with_parent(
self.parent,
def_node_id,
DefPathData::LifetimeDef(name.name().as_interned_str()),
DefIndexAddressSpace::High,
Mark::root(),
lifetime.span,
);
let def_lifetime = hir::Lifetime {
id: def_node_id,
span: lifetime.span,
name: name,
};
self.output_lifetime_params
.push(hir::GenericParam::Lifetime(hir::LifetimeDef {
lifetime: def_lifetime,
bounds: Vec::new().into(),
pure_wrt_drop: false,
in_band: false,
}));
}
}
}
let mut lifetime_collector = ImplTraitLifetimeCollector {
context: self,
parent: parent_index,
collect_elided_lifetimes: true,
currently_bound_lifetimes: Vec::new(),
already_defined_lifetimes: HashSet::new(),
output_lifetimes: Vec::new(),
output_lifetime_params: Vec::new(),
};
for bound in bounds {
hir::intravisit::walk_ty_param_bound(&mut lifetime_collector, &bound);
}
(
lifetime_collector.output_lifetimes.into(),
lifetime_collector.output_lifetime_params.into(),
)
}
fn lower_foreign_mod(&mut self, fm: &ForeignMod) -> hir::ForeignMod {
hir::ForeignMod {
abi: fm.abi,
items: fm.items
.iter()
.map(|x| self.lower_foreign_item(x))
.collect(),
}
}
fn lower_global_asm(&mut self, ga: &GlobalAsm) -> P<hir::GlobalAsm> {
P(hir::GlobalAsm {
asm: ga.asm,
ctxt: ga.ctxt,
})
}
fn lower_variant(&mut self, v: &Variant) -> hir::Variant {
Spanned {
node: hir::Variant_ {
name: v.node.ident.name,
attrs: self.lower_attrs(&v.node.attrs),
data: self.lower_variant_data(&v.node.data),
disr_expr: v.node.disr_expr.as_ref().map(|e| self.lower_anon_const(e)),
},
span: v.span,
}
}
fn lower_qpath(
&mut self,
id: NodeId,
qself: &Option<QSelf>,
p: &Path,
param_mode: ParamMode,
itctx: ImplTraitContext,
) -> hir::QPath {
let qself_position = qself.as_ref().map(|q| q.position);
let qself = qself.as_ref().map(|q| self.lower_ty(&q.ty, itctx));
let resolution = self.resolver
.get_resolution(id)
.unwrap_or(PathResolution::new(Def::Err));
let proj_start = p.segments.len() - resolution.unresolved_segments();
let path = P(hir::Path {
def: resolution.base_def(),
segments: p.segments[..proj_start]
.iter()
.enumerate()
.map(|(i, segment)| {
let param_mode = match (qself_position, param_mode) {
(Some(j), ParamMode::Optional) if i < j => {
// This segment is part of the trait path in a
// qualified path - one of `a`, `b` or `Trait`
// in `<X as a::b::Trait>::T::U::method`.
ParamMode::Explicit
}
_ => param_mode,
};
// Figure out if this is a type/trait segment,
// which may need lifetime elision performed.
let parent_def_id = |this: &mut Self, def_id: DefId| DefId {
krate: def_id.krate,
index: this.def_key(def_id).parent.expect("missing parent"),
};
let type_def_id = match resolution.base_def() {
Def::AssociatedTy(def_id) if i + 2 == proj_start => {
Some(parent_def_id(self, def_id))
}
Def::Variant(def_id) if i + 1 == proj_start => {
Some(parent_def_id(self, def_id))
}
Def::Struct(def_id)
| Def::Union(def_id)
| Def::Enum(def_id)
| Def::TyAlias(def_id)
| Def::Trait(def_id) if i + 1 == proj_start =>
{
Some(def_id)
}
_ => None,
};
let parenthesized_generic_args = match resolution.base_def() {
// `a::b::Trait(Args)`
Def::Trait(..) if i + 1 == proj_start => ParenthesizedGenericArgs::Ok,
// `a::b::Trait(Args)::TraitItem`
Def::Method(..) | Def::AssociatedConst(..) | Def::AssociatedTy(..)
if i + 2 == proj_start =>
{
ParenthesizedGenericArgs::Ok
}
// Avoid duplicated errors
Def::Err => ParenthesizedGenericArgs::Ok,
// An error
Def::Struct(..)
| Def::Enum(..)
| Def::Union(..)
| Def::TyAlias(..)
| Def::Variant(..) if i + 1 == proj_start =>
{
ParenthesizedGenericArgs::Err
}
// A warning for now, for compatibility reasons
_ => ParenthesizedGenericArgs::Warn,
};
let num_lifetimes = type_def_id.map_or(0, |def_id| {
if let Some(&n) = self.type_def_lifetime_params.get(&def_id) {
return n;
}
assert!(!def_id.is_local());
let item_generics =
self.cstore.item_generics_cloned_untracked(def_id, self.sess);
let n = item_generics.own_counts().lifetimes;
self.type_def_lifetime_params.insert(def_id, n);
n
});
self.lower_path_segment(
p.span,
segment,
param_mode,
num_lifetimes,
parenthesized_generic_args,
itctx,
)
})
.collect(),
span: p.span,
});
// Simple case, either no projections, or only fully-qualified.
// E.g. `std::mem::size_of` or `<I as Iterator>::Item`.
if resolution.unresolved_segments() == 0 {
return hir::QPath::Resolved(qself, path);
}
// Create the innermost type that we're projecting from.
let mut ty = if path.segments.is_empty() {
// If the base path is empty that means there exists a
// syntactical `Self`, e.g. `&i32` in `<&i32>::clone`.
qself.expect("missing QSelf for <T>::...")
} else {
// Otherwise, the base path is an implicit `Self` type path,
// e.g. `Vec` in `Vec::new` or `<I as Iterator>::Item` in
// `<I as Iterator>::Item::default`.
let new_id = self.next_id();
self.ty_path(new_id, p.span, hir::QPath::Resolved(qself, path))
};
// Anything after the base path are associated "extensions",
// out of which all but the last one are associated types,
// e.g. for `std::vec::Vec::<T>::IntoIter::Item::clone`:
// * base path is `std::vec::Vec<T>`
// * "extensions" are `IntoIter`, `Item` and `clone`
// * type nodes are:
// 1. `std::vec::Vec<T>` (created above)
// 2. `<std::vec::Vec<T>>::IntoIter`
// 3. `<<std::vec::Vec<T>>::IntoIter>::Item`
// * final path is `<<<std::vec::Vec<T>>::IntoIter>::Item>::clone`
for (i, segment) in p.segments.iter().enumerate().skip(proj_start) {
let segment = P(self.lower_path_segment(
p.span,
segment,
param_mode,
0,
ParenthesizedGenericArgs::Warn,
itctx,
));
let qpath = hir::QPath::TypeRelative(ty, segment);
// It's finished, return the extension of the right node type.
if i == p.segments.len() - 1 {
return qpath;
}
// Wrap the associated extension in another type node.
let new_id = self.next_id();
ty = self.ty_path(new_id, p.span, qpath);
}
// Should've returned in the for loop above.
span_bug!(
p.span,
"lower_qpath: no final extension segment in {}..{}",
proj_start,
p.segments.len()
)
}
fn lower_path_extra(
&mut self,
id: NodeId,
p: &Path,
name: Option<Name>,
param_mode: ParamMode,
) -> hir::Path {
hir::Path {
def: self.expect_full_def(id),
segments: p.segments
.iter()
.map(|segment| {
self.lower_path_segment(
p.span,
segment,
param_mode,
0,
ParenthesizedGenericArgs::Err,
ImplTraitContext::Disallowed,
)
})
.chain(name.map(|name| hir::PathSegment::from_name(name)))
.collect(),
span: p.span,
}
}
fn lower_path(&mut self, id: NodeId, p: &Path, param_mode: ParamMode) -> hir::Path {
self.lower_path_extra(id, p, None, param_mode)
}
fn lower_path_segment(
&mut self,
path_span: Span,
segment: &PathSegment,
param_mode: ParamMode,
expected_lifetimes: usize,
parenthesized_generic_args: ParenthesizedGenericArgs,
itctx: ImplTraitContext,
) -> hir::PathSegment {
let (mut parameters, infer_types) = if let Some(ref parameters) = segment.parameters {
let msg = "parenthesized parameters may only be used with a trait";
match **parameters {
PathParameters::AngleBracketed(ref data) => {
self.lower_angle_bracketed_parameter_data(data, param_mode, itctx)
}
PathParameters::Parenthesized(ref data) => match parenthesized_generic_args {
ParenthesizedGenericArgs::Ok => self.lower_parenthesized_parameter_data(data),
ParenthesizedGenericArgs::Warn => {
self.sess.buffer_lint(
PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,
CRATE_NODE_ID,
data.span,
msg.into(),
);
(hir::PathParameters::none(), true)
}
ParenthesizedGenericArgs::Err => {
struct_span_err!(self.sess, data.span, E0214, "{}", msg)
.span_label(data.span, "only traits may use parentheses")
.emit();
(hir::PathParameters::none(), true)
}
},
}
} else {
self.lower_angle_bracketed_parameter_data(&Default::default(), param_mode, itctx)
};
if !parameters.parenthesized && parameters.lifetimes.is_empty() {
parameters.lifetimes = self.elided_path_lifetimes(path_span, expected_lifetimes);
}
hir::PathSegment::new(
self.lower_ident(segment.ident),
parameters,
infer_types,
)
}
fn lower_angle_bracketed_parameter_data(
&mut self,
data: &AngleBracketedParameterData,
param_mode: ParamMode,
itctx: ImplTraitContext,
) -> (hir::PathParameters, bool) {
let &AngleBracketedParameterData {
ref lifetimes,
ref types,
ref bindings,
..
} = data;
(
hir::PathParameters {
lifetimes: self.lower_lifetimes(lifetimes),
types: types.iter().map(|ty| self.lower_ty(ty, itctx)).collect(),
bindings: bindings
.iter()
.map(|b| self.lower_ty_binding(b, itctx))
.collect(),
parenthesized: false,
},
types.is_empty() && param_mode == ParamMode::Optional,
)
}
fn lower_parenthesized_parameter_data(
&mut self,
data: &ParenthesizedParameterData,
) -> (hir::PathParameters, bool) {
// Switch to `PassThrough` mode for anonymous lifetimes: this
// means that we permit things like `&Ref<T>`, where `Ref` has
// a hidden lifetime parameter. This is needed for backwards
// compatibility, even in contexts like an impl header where
// we generally don't permit such things (see #51008).
self.with_anonymous_lifetime_mode(
AnonymousLifetimeMode::PassThrough,
|this| {
const DISALLOWED: ImplTraitContext = ImplTraitContext::Disallowed;
let &ParenthesizedParameterData {
ref inputs,
ref output,
span,
} = data;
let inputs = inputs
.iter()
.map(|ty| this.lower_ty(ty, DISALLOWED))
.collect();
let mk_tup = |this: &mut Self, tys, span| {
let LoweredNodeId { node_id, hir_id } = this.next_id();
P(hir::Ty {
node: hir::TyTup(tys),
id: node_id,
hir_id,
span,
})
};
(
hir::PathParameters {
lifetimes: hir::HirVec::new(),
types: hir_vec![mk_tup(this, inputs, span)],
bindings: hir_vec![
hir::TypeBinding {
id: this.next_id().node_id,
name: Symbol::intern(FN_OUTPUT_NAME),
ty: output
.as_ref()
.map(|ty| this.lower_ty(&ty, DISALLOWED))
.unwrap_or_else(|| mk_tup(this, hir::HirVec::new(), span)),
span: output.as_ref().map_or(span, |ty| ty.span),
}
],
parenthesized: true,
},
false,
)
}
)
}
fn lower_local(&mut self, l: &Local) -> P<hir::Local> {
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(l.id);
P(hir::Local {
id: node_id,
hir_id,
ty: l.ty
.as_ref()
.map(|t| self.lower_ty(t, ImplTraitContext::Disallowed)),
pat: self.lower_pat(&l.pat),
init: l.init.as_ref().map(|e| P(self.lower_expr(e))),
span: l.span,
attrs: l.attrs.clone(),
source: hir::LocalSource::Normal,
})
}
fn lower_mutability(&mut self, m: Mutability) -> hir::Mutability {
match m {
Mutability::Mutable => hir::MutMutable,
Mutability::Immutable => hir::MutImmutable,
}
}
fn lower_arg(&mut self, arg: &Arg) -> hir::Arg {
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(arg.id);
hir::Arg {
id: node_id,
hir_id,
pat: self.lower_pat(&arg.pat),
}
}
fn lower_fn_args_to_names(&mut self, decl: &FnDecl) -> hir::HirVec<Spanned<Name>> {
decl.inputs
.iter()
.map(|arg| match arg.pat.node {
PatKind::Ident(_, ident, None) => respan(ident.span, ident.name),
_ => respan(arg.pat.span, keywords::Invalid.name()),
})
.collect()
}
fn lower_fn_decl(
&mut self,
decl: &FnDecl,
fn_def_id: Option<DefId>,
impl_trait_return_allow: bool,
) -> P<hir::FnDecl> {
// NOTE: The two last parameters here have to do with impl Trait. If fn_def_id is Some,
// then impl Trait arguments are lowered into generic parameters on the given
// fn_def_id, otherwise impl Trait is disallowed. (for now)
//
// Furthermore, if impl_trait_return_allow is true, then impl Trait may be used in
// return positions as well. This guards against trait declarations and their impls
// where impl Trait is disallowed. (again for now)
P(hir::FnDecl {
inputs: decl.inputs
.iter()
.map(|arg| {
if let Some(def_id) = fn_def_id {
self.lower_ty(&arg.ty, ImplTraitContext::Universal(def_id))
} else {
self.lower_ty(&arg.ty, ImplTraitContext::Disallowed)
}
})
.collect(),
output: match decl.output {
FunctionRetTy::Ty(ref ty) => match fn_def_id {
Some(_) if impl_trait_return_allow => {
hir::Return(self.lower_ty(ty, ImplTraitContext::Existential))
}
_ => hir::Return(self.lower_ty(ty, ImplTraitContext::Disallowed)),
},
FunctionRetTy::Default(span) => hir::DefaultReturn(span),
},
variadic: decl.variadic,
has_implicit_self: decl.inputs.get(0).map_or(false, |arg| match arg.ty.node {
TyKind::ImplicitSelf => true,
TyKind::Rptr(_, ref mt) => mt.ty.node == TyKind::ImplicitSelf,
_ => false,
}),
})
}
fn lower_ty_param_bound(
&mut self,
tpb: &TyParamBound,
itctx: ImplTraitContext,
) -> hir::TyParamBound {
match *tpb {
TraitTyParamBound(ref ty, modifier) => hir::TraitTyParamBound(
self.lower_poly_trait_ref(ty, itctx),
self.lower_trait_bound_modifier(modifier),
),
RegionTyParamBound(ref lifetime) => {
hir::RegionTyParamBound(self.lower_lifetime(lifetime))
}
}
}
fn lower_ty_param(
&mut self,
tp: &TyParam,
add_bounds: &[TyParamBound],
itctx: ImplTraitContext,
) -> hir::TyParam {
let mut name = self.lower_ident(tp.ident);
// Don't expose `Self` (recovered "keyword used as ident" parse error).
// `rustc::ty` expects `Self` to be only used for a trait's `Self`.
// Instead, use gensym("Self") to create a distinct name that looks the same.
if name == keywords::SelfType.name() {
name = Symbol::gensym("Self");
}
let mut bounds = self.lower_bounds(&tp.bounds, itctx);
if !add_bounds.is_empty() {
bounds = bounds
.into_iter()
.chain(self.lower_bounds(add_bounds, itctx).into_iter())
.collect();
}
hir::TyParam {
id: self.lower_node_id(tp.id).node_id,
name,
bounds,
default: tp.default
.as_ref()
.map(|x| self.lower_ty(x, ImplTraitContext::Disallowed)),
span: tp.ident.span,
pure_wrt_drop: attr::contains_name(&tp.attrs, "may_dangle"),
synthetic: tp.attrs
.iter()
.filter(|attr| attr.check_name("rustc_synthetic"))
.map(|_| hir::SyntheticTyParamKind::ImplTrait)
.nth(0),
attrs: self.lower_attrs(&tp.attrs),
}
}
fn lower_lifetime(&mut self, l: &Lifetime) -> hir::Lifetime {
let span = l.ident.span;
match self.lower_ident(l.ident) {
x if x == "'static" => self.new_named_lifetime(l.id, span, hir::LifetimeName::Static),
x if x == "'_" => match self.anonymous_lifetime_mode {
AnonymousLifetimeMode::CreateParameter => {
let fresh_name = self.collect_fresh_in_band_lifetime(span);
self.new_named_lifetime(l.id, span, fresh_name)
}
AnonymousLifetimeMode::PassThrough => {
self.new_named_lifetime(l.id, span, hir::LifetimeName::Underscore)
}
},
name => {
self.maybe_collect_in_band_lifetime(span, name);
self.new_named_lifetime(l.id, span, hir::LifetimeName::Name(name))
}
}
}
fn new_named_lifetime(
&mut self,
id: NodeId,
span: Span,
name: hir::LifetimeName,
) -> hir::Lifetime {
hir::Lifetime {
id: self.lower_node_id(id).node_id,
span,
name: name,
}
}
fn lower_lifetime_def(&mut self, l: &LifetimeDef) -> hir::LifetimeDef {
let was_collecting_in_band = self.is_collecting_in_band_lifetimes;
self.is_collecting_in_band_lifetimes = false;
let def = hir::LifetimeDef {
lifetime: self.lower_lifetime(&l.lifetime),
bounds: self.lower_lifetimes(&l.bounds),
pure_wrt_drop: attr::contains_name(&l.attrs, "may_dangle"),
in_band: false,
};
self.is_collecting_in_band_lifetimes = was_collecting_in_band;
def
}
fn lower_lifetimes(&mut self, lts: &Vec<Lifetime>) -> hir::HirVec<hir::Lifetime> {
lts.iter().map(|l| self.lower_lifetime(l)).collect()
}
fn lower_generic_params(
&mut self,
params: &Vec<GenericParam>,
add_bounds: &NodeMap<Vec<TyParamBound>>,
itctx: ImplTraitContext,
) -> hir::HirVec<hir::GenericParam> {
params
.iter()
.map(|param| match *param {
GenericParam::Lifetime(ref lifetime_def) => {
hir::GenericParam::Lifetime(self.lower_lifetime_def(lifetime_def))
}
GenericParam::Type(ref ty_param) => hir::GenericParam::Type(self.lower_ty_param(
ty_param,
add_bounds.get(&ty_param.id).map_or(&[][..], |x| &x),
itctx,
)),
})
.collect()
}
fn lower_generics(&mut self, g: &Generics, itctx: ImplTraitContext) -> hir::Generics {
// Collect `?Trait` bounds in where clause and move them to parameter definitions.
// FIXME: This could probably be done with less rightward drift. Also looks like two control
// paths where report_error is called are also the only paths that advance to after
// the match statement, so the error reporting could probably just be moved there.
let mut add_bounds = NodeMap();
for pred in &g.where_clause.predicates {
if let WherePredicate::BoundPredicate(ref bound_pred) = *pred {
'next_bound: for bound in &bound_pred.bounds {
if let TraitTyParamBound(_, TraitBoundModifier::Maybe) = *bound {
let report_error = |this: &mut Self| {
this.diagnostic().span_err(
bound_pred.bounded_ty.span,
"`?Trait` bounds are only permitted at the \
point where a type parameter is declared",
);
};
// Check if the where clause type is a plain type parameter.
match bound_pred.bounded_ty.node {
TyKind::Path(None, ref path)
if path.segments.len() == 1
&& bound_pred.bound_generic_params.is_empty() =>
{
if let Some(Def::TyParam(def_id)) = self.resolver
.get_resolution(bound_pred.bounded_ty.id)
.map(|d| d.base_def())
{
if let Some(node_id) =
self.resolver.definitions().as_local_node_id(def_id)
{
for param in &g.params {
if let GenericParam::Type(ref ty_param) = *param {
if node_id == ty_param.id {
add_bounds
.entry(ty_param.id)
.or_insert(Vec::new())
.push(bound.clone());
continue 'next_bound;
}
}
}
}
}
report_error(self)
}
_ => report_error(self),
}
}
}
}
}
hir::Generics {
params: self.lower_generic_params(&g.params, &add_bounds, itctx),
where_clause: self.lower_where_clause(&g.where_clause),
span: g.span,
}
}
fn lower_where_clause(&mut self, wc: &WhereClause) -> hir::WhereClause {
hir::WhereClause {
id: self.lower_node_id(wc.id).node_id,
predicates: wc.predicates
.iter()
.map(|predicate| self.lower_where_predicate(predicate))
.collect(),
}
}
fn lower_where_predicate(&mut self, pred: &WherePredicate) -> hir::WherePredicate {
match *pred {
WherePredicate::BoundPredicate(WhereBoundPredicate {
ref bound_generic_params,
ref bounded_ty,
ref bounds,
span,
}) => {
self.with_in_scope_lifetime_defs(
bound_generic_params.iter().filter_map(|p| match p {
GenericParam::Lifetime(ld) => Some(ld),
_ => None,
}),
|this| {
hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate {
bound_generic_params: this.lower_generic_params(
bound_generic_params,
&NodeMap(),
ImplTraitContext::Disallowed,
),
bounded_ty: this.lower_ty(bounded_ty, ImplTraitContext::Disallowed),
bounds: bounds
.iter()
.filter_map(|bound| match *bound {
// Ignore `?Trait` bounds.
// Tthey were copied into type parameters already.
TraitTyParamBound(_, TraitBoundModifier::Maybe) => None,
_ => Some(this.lower_ty_param_bound(
bound,
ImplTraitContext::Disallowed,
)),
})
.collect(),
span,
})
},
)
}
WherePredicate::RegionPredicate(WhereRegionPredicate {
ref lifetime,
ref bounds,
span,
}) => hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate {
span,
lifetime: self.lower_lifetime(lifetime),
bounds: bounds
.iter()
.map(|bound| self.lower_lifetime(bound))
.collect(),
}),
WherePredicate::EqPredicate(WhereEqPredicate {
id,
ref lhs_ty,
ref rhs_ty,
span,
}) => hir::WherePredicate::EqPredicate(hir::WhereEqPredicate {
id: self.lower_node_id(id).node_id,
lhs_ty: self.lower_ty(lhs_ty, ImplTraitContext::Disallowed),
rhs_ty: self.lower_ty(rhs_ty, ImplTraitContext::Disallowed),
span,
}),
}
}
fn lower_variant_data(&mut self, vdata: &VariantData) -> hir::VariantData {
match *vdata {
VariantData::Struct(ref fields, id) => hir::VariantData::Struct(
fields
.iter()
.enumerate()
.map(|f| self.lower_struct_field(f))
.collect(),
self.lower_node_id(id).node_id,
),
VariantData::Tuple(ref fields, id) => hir::VariantData::Tuple(
fields
.iter()
.enumerate()
.map(|f| self.lower_struct_field(f))
.collect(),
self.lower_node_id(id).node_id,
),
VariantData::Unit(id) => hir::VariantData::Unit(self.lower_node_id(id).node_id),
}
}
fn lower_trait_ref(&mut self, p: &TraitRef, itctx: ImplTraitContext) -> hir::TraitRef {
let path = match self.lower_qpath(p.ref_id, &None, &p.path, ParamMode::Explicit, itctx) {
hir::QPath::Resolved(None, path) => path.and_then(|path| path),
qpath => bug!("lower_trait_ref: unexpected QPath `{:?}`", qpath),
};
hir::TraitRef {
path,
ref_id: self.lower_node_id(p.ref_id).node_id,
}
}
fn lower_poly_trait_ref(
&mut self,
p: &PolyTraitRef,
itctx: ImplTraitContext,
) -> hir::PolyTraitRef {
let bound_generic_params =
self.lower_generic_params(&p.bound_generic_params, &NodeMap(), itctx);
let trait_ref = self.with_parent_impl_lifetime_defs(
&bound_generic_params
.iter()
.filter_map(|p| match *p {
hir::GenericParam::Lifetime(ref ld) => Some(ld.clone()),
_ => None,
})
.collect::<Vec<_>>(),
|this| this.lower_trait_ref(&p.trait_ref, itctx),
);
hir::PolyTraitRef {
bound_generic_params,
trait_ref,
span: p.span,
}
}
fn lower_struct_field(&mut self, (index, f): (usize, &StructField)) -> hir::StructField {
hir::StructField {
span: f.span,
id: self.lower_node_id(f.id).node_id,
ident: match f.ident {
Some(ident) => ident,
// FIXME(jseyfried) positional field hygiene
None => Ident::new(Symbol::intern(&index.to_string()), f.span),
},
vis: self.lower_visibility(&f.vis, None),
ty: self.lower_ty(&f.ty, ImplTraitContext::Disallowed),
attrs: self.lower_attrs(&f.attrs),
}
}
fn lower_field(&mut self, f: &Field) -> hir::Field {
hir::Field {
id: self.next_id().node_id,
ident: f.ident,
expr: P(self.lower_expr(&f.expr)),
span: f.span,
is_shorthand: f.is_shorthand,
}
}
fn lower_mt(&mut self, mt: &MutTy, itctx: ImplTraitContext) -> hir::MutTy {
hir::MutTy {
ty: self.lower_ty(&mt.ty, itctx),
mutbl: self.lower_mutability(mt.mutbl),
}
}
fn lower_bounds(
&mut self,
bounds: &[TyParamBound],
itctx: ImplTraitContext,
) -> hir::TyParamBounds {
bounds
.iter()
.map(|bound| self.lower_ty_param_bound(bound, itctx))
.collect()
}
fn lower_block(&mut self, b: &Block, targeted_by_break: bool) -> P<hir::Block> {
let mut expr = None;
let mut stmts = vec![];
for (index, stmt) in b.stmts.iter().enumerate() {
if index == b.stmts.len() - 1 {
if let StmtKind::Expr(ref e) = stmt.node {
expr = Some(P(self.lower_expr(e)));
} else {
stmts.extend(self.lower_stmt(stmt));
}
} else {
stmts.extend(self.lower_stmt(stmt));
}
}
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(b.id);
P(hir::Block {
id: node_id,
hir_id,
stmts: stmts.into(),
expr,
rules: self.lower_block_check_mode(&b.rules),
span: b.span,
targeted_by_break,
recovered: b.recovered,
})
}
fn lower_item_kind(
&mut self,
id: NodeId,
name: &mut Name,
attrs: &hir::HirVec<Attribute>,
vis: &mut hir::Visibility,
i: &ItemKind,
) -> hir::Item_ {
match *i {
ItemKind::ExternCrate(orig_name) => hir::ItemExternCrate(orig_name),
ItemKind::Use(ref use_tree) => {
// Start with an empty prefix
let prefix = Path {
segments: vec![],
span: use_tree.span,
};
self.lower_use_tree(use_tree, &prefix, id, vis, name, attrs)
}
ItemKind::Static(ref t, m, ref e) => {
let value = self.lower_body(None, |this| this.lower_expr(e));
hir::ItemStatic(
self.lower_ty(t, ImplTraitContext::Disallowed),
self.lower_mutability(m),
value,
)
}
ItemKind::Const(ref t, ref e) => {
let value = self.lower_body(None, |this| this.lower_expr(e));
hir::ItemConst(self.lower_ty(t, ImplTraitContext::Disallowed), value)
}
ItemKind::Fn(ref decl, unsafety, constness, abi, ref generics, ref body) => {
let fn_def_id = self.resolver.definitions().local_def_id(id);
self.with_new_scopes(|this| {
let body_id = this.lower_body(Some(decl), |this| {
let body = this.lower_block(body, false);
this.expr_block(body, ThinVec::new())
});
let (generics, fn_decl) = this.add_in_band_defs(
generics,
fn_def_id,
AnonymousLifetimeMode::PassThrough,
|this| this.lower_fn_decl(decl, Some(fn_def_id), true),
);
hir::ItemFn(
fn_decl,
this.lower_unsafety(unsafety),
this.lower_constness(constness),
abi,
generics,
body_id,
)
})
}
ItemKind::Mod(ref m) => hir::ItemMod(self.lower_mod(m)),
ItemKind::ForeignMod(ref nm) => hir::ItemForeignMod(self.lower_foreign_mod(nm)),
ItemKind::GlobalAsm(ref ga) => hir::ItemGlobalAsm(self.lower_global_asm(ga)),
ItemKind::Ty(ref t, ref generics) => hir::ItemTy(
self.lower_ty(t, ImplTraitContext::Disallowed),
self.lower_generics(generics, ImplTraitContext::Disallowed),
),
ItemKind::Enum(ref enum_definition, ref generics) => hir::ItemEnum(
hir::EnumDef {
variants: enum_definition
.variants
.iter()
.map(|x| self.lower_variant(x))
.collect(),
},
self.lower_generics(generics, ImplTraitContext::Disallowed),
),
ItemKind::Struct(ref struct_def, ref generics) => {
let struct_def = self.lower_variant_data(struct_def);
hir::ItemStruct(
struct_def,
self.lower_generics(generics, ImplTraitContext::Disallowed),
)
}
ItemKind::Union(ref vdata, ref generics) => {
let vdata = self.lower_variant_data(vdata);
hir::ItemUnion(
vdata,
self.lower_generics(generics, ImplTraitContext::Disallowed),
)
}
ItemKind::Impl(
unsafety,
polarity,
defaultness,
ref ast_generics,
ref trait_ref,
ref ty,
ref impl_items,
) => {
let def_id = self.resolver.definitions().local_def_id(id);
// Lower the "impl header" first. This ordering is important
// for in-band lifetimes! Consider `'a` here:
//
// impl Foo<'a> for u32 {
// fn method(&'a self) { .. }
// }
//
// Because we start by lowering the `Foo<'a> for u32`
// part, we will add `'a` to the list of generics on
// the impl. When we then encounter it later in the
// method, it will not be considered an in-band
// lifetime to be added, but rather a reference to a
// parent lifetime.
let (generics, (trait_ref, lowered_ty)) = self.add_in_band_defs(
ast_generics,
def_id,
AnonymousLifetimeMode::CreateParameter,
|this| {
let trait_ref = trait_ref.as_ref().map(|trait_ref| {
this.lower_trait_ref(trait_ref, ImplTraitContext::Disallowed)
});
if let Some(ref trait_ref) = trait_ref {
if let Def::Trait(def_id) = trait_ref.path.def {
this.trait_impls.entry(def_id).or_insert(vec![]).push(id);
}
}
let lowered_ty = this.lower_ty(ty, ImplTraitContext::Disallowed);
(trait_ref, lowered_ty)
},
);
let new_impl_items = self.with_in_scope_lifetime_defs(
ast_generics.params.iter().filter_map(|p| match p {
GenericParam::Lifetime(ld) => Some(ld),
_ => None,
}),
|this| {
impl_items
.iter()
.map(|item| this.lower_impl_item_ref(item))
.collect()
},
);
hir::ItemImpl(
self.lower_unsafety(unsafety),
self.lower_impl_polarity(polarity),
self.lower_defaultness(defaultness, true /* [1] */),
generics,
trait_ref,
lowered_ty,
new_impl_items,
)
}
ItemKind::Trait(is_auto, unsafety, ref generics, ref bounds, ref items) => {
let bounds = self.lower_bounds(bounds, ImplTraitContext::Disallowed);
let items = items
.iter()
.map(|item| self.lower_trait_item_ref(item))
.collect();
hir::ItemTrait(
self.lower_is_auto(is_auto),
self.lower_unsafety(unsafety),
self.lower_generics(generics, ImplTraitContext::Disallowed),
bounds,
items,
)
}
ItemKind::TraitAlias(ref generics, ref bounds) => hir::ItemTraitAlias(
self.lower_generics(generics, ImplTraitContext::Disallowed),
self.lower_bounds(bounds, ImplTraitContext::Disallowed),
),
ItemKind::MacroDef(..) | ItemKind::Mac(..) => panic!("Shouldn't still be around"),
}
// [1] `defaultness.has_value()` is never called for an `impl`, always `true` in order to
// not cause an assertion failure inside the `lower_defaultness` function
}
fn lower_use_tree(
&mut self,
tree: &UseTree,
prefix: &Path,
id: NodeId,
vis: &mut hir::Visibility,
name: &mut Name,
attrs: &hir::HirVec<Attribute>,
) -> hir::Item_ {
let path = &tree.prefix;
match tree.kind {
UseTreeKind::Simple(rename) => {
*name = tree.ident().name;
// First apply the prefix to the path
let mut path = Path {
segments: prefix
.segments
.iter()
.chain(path.segments.iter())
.cloned()
.collect(),
span: path.span,
};
// Correctly resolve `self` imports
if path.segments.len() > 1
&& path.segments.last().unwrap().ident.name == keywords::SelfValue.name()
{
let _ = path.segments.pop();
if rename.is_none() {
*name = path.segments.last().unwrap().ident.name;
}
}
let path = P(self.lower_path(id, &path, ParamMode::Explicit));
hir::ItemUse(path, hir::UseKind::Single)
}
UseTreeKind::Glob => {
let path = P(self.lower_path(
id,
&Path {
segments: prefix
.segments
.iter()
.chain(path.segments.iter())
.cloned()
.collect(),
span: path.span,
},
ParamMode::Explicit,
));
hir::ItemUse(path, hir::UseKind::Glob)
}
UseTreeKind::Nested(ref trees) => {
let prefix = Path {
segments: prefix
.segments
.iter()
.chain(path.segments.iter())
.cloned()
.collect(),
span: prefix.span.to(path.span),
};
// Add all the nested PathListItems in the HIR
for &(ref use_tree, id) in trees {
self.allocate_hir_id_counter(id, &use_tree);
let LoweredNodeId {
node_id: new_id,
hir_id: new_hir_id,
} = self.lower_node_id(id);
let mut vis = vis.clone();
let mut name = name.clone();
let item =
self.lower_use_tree(use_tree, &prefix, new_id, &mut vis, &mut name, &attrs);
self.with_hir_id_owner(new_id, |this| {
let vis = match vis {
hir::Visibility::Public => hir::Visibility::Public,
hir::Visibility::Crate => hir::Visibility::Crate,
hir::Visibility::Inherited => hir::Visibility::Inherited,
hir::Visibility::Restricted { ref path, id: _ } => {
hir::Visibility::Restricted {
path: path.clone(),
// We are allocating a new NodeId here
id: this.next_id().node_id,
}
}
};
this.items.insert(
new_id,
hir::Item {
id: new_id,
hir_id: new_hir_id,
name: name,
attrs: attrs.clone(),
node: item,
vis,
span: use_tree.span,
},
);
});
}
// Privatize the degenerate import base, used only to check
// the stability of `use a::{};`, to avoid it showing up as
// a re-export by accident when `pub`, e.g. in documentation.
let path = P(self.lower_path(id, &prefix, ParamMode::Explicit));
*vis = hir::Inherited;
hir::ItemUse(path, hir::UseKind::ListStem)
}
}
}
fn lower_trait_item(&mut self, i: &TraitItem) -> hir::TraitItem {
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(i.id);
let trait_item_def_id = self.resolver.definitions().local_def_id(node_id);
let (generics, node) = match i.node {
TraitItemKind::Const(ref ty, ref default) => (
self.lower_generics(&i.generics, ImplTraitContext::Disallowed),
hir::TraitItemKind::Const(
self.lower_ty(ty, ImplTraitContext::Disallowed),
default
.as_ref()
.map(|x| self.lower_body(None, |this| this.lower_expr(x))),
),
),
TraitItemKind::Method(ref sig, None) => {
let names = self.lower_fn_args_to_names(&sig.decl);
self.add_in_band_defs(
&i.generics,
trait_item_def_id,
AnonymousLifetimeMode::PassThrough,
|this| {
hir::TraitItemKind::Method(
this.lower_method_sig(sig, trait_item_def_id, false),
hir::TraitMethod::Required(names),
)
},
)
}
TraitItemKind::Method(ref sig, Some(ref body)) => {
let body_id = self.lower_body(Some(&sig.decl), |this| {
let body = this.lower_block(body, false);
this.expr_block(body, ThinVec::new())
});
self.add_in_band_defs(
&i.generics,
trait_item_def_id,
AnonymousLifetimeMode::PassThrough,
|this| {
hir::TraitItemKind::Method(
this.lower_method_sig(sig, trait_item_def_id, false),
hir::TraitMethod::Provided(body_id),
)
},
)
}
TraitItemKind::Type(ref bounds, ref default) => (
self.lower_generics(&i.generics, ImplTraitContext::Disallowed),
hir::TraitItemKind::Type(
self.lower_bounds(bounds, ImplTraitContext::Disallowed),
default
.as_ref()
.map(|x| self.lower_ty(x, ImplTraitContext::Disallowed)),
),
),
TraitItemKind::Macro(..) => panic!("Shouldn't exist any more"),
};
hir::TraitItem {
id: node_id,
hir_id,
name: self.lower_ident(i.ident),
attrs: self.lower_attrs(&i.attrs),
generics,
node,
span: i.span,
}
}
fn lower_trait_item_ref(&mut self, i: &TraitItem) -> hir::TraitItemRef {
let (kind, has_default) = match i.node {
TraitItemKind::Const(_, ref default) => {
(hir::AssociatedItemKind::Const, default.is_some())
}
TraitItemKind::Type(_, ref default) => {
(hir::AssociatedItemKind::Type, default.is_some())
}
TraitItemKind::Method(ref sig, ref default) => (
hir::AssociatedItemKind::Method {
has_self: sig.decl.has_self(),
},
default.is_some(),
),
TraitItemKind::Macro(..) => unimplemented!(),
};
hir::TraitItemRef {
id: hir::TraitItemId { node_id: i.id },
name: self.lower_ident(i.ident),
span: i.span,
defaultness: self.lower_defaultness(Defaultness::Default, has_default),
kind,
}
}
fn lower_impl_item(&mut self, i: &ImplItem) -> hir::ImplItem {
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(i.id);
let impl_item_def_id = self.resolver.definitions().local_def_id(node_id);
let (generics, node) = match i.node {
ImplItemKind::Const(ref ty, ref expr) => {
let body_id = self.lower_body(None, |this| this.lower_expr(expr));
(
self.lower_generics(&i.generics, ImplTraitContext::Disallowed),
hir::ImplItemKind::Const(
self.lower_ty(ty, ImplTraitContext::Disallowed),
body_id,
),
)
}
ImplItemKind::Method(ref sig, ref body) => {
let body_id = self.lower_body(Some(&sig.decl), |this| {
let body = this.lower_block(body, false);
this.expr_block(body, ThinVec::new())
});
let impl_trait_return_allow = !self.is_in_trait_impl;
self.add_in_band_defs(
&i.generics,
impl_item_def_id,
AnonymousLifetimeMode::PassThrough,
|this| {
hir::ImplItemKind::Method(
this.lower_method_sig(
sig,
impl_item_def_id,
impl_trait_return_allow,
),
body_id,
)
},
)
}
ImplItemKind::Type(ref ty) => (
self.lower_generics(&i.generics, ImplTraitContext::Disallowed),
hir::ImplItemKind::Type(self.lower_ty(ty, ImplTraitContext::Disallowed)),
),
ImplItemKind::Macro(..) => panic!("Shouldn't exist any more"),
};
hir::ImplItem {
id: node_id,
hir_id,
name: self.lower_ident(i.ident),
attrs: self.lower_attrs(&i.attrs),
generics,
vis: self.lower_visibility(&i.vis, None),
defaultness: self.lower_defaultness(i.defaultness, true /* [1] */),
node,
span: i.span,
}
// [1] since `default impl` is not yet implemented, this is always true in impls
}
fn lower_impl_item_ref(&mut self, i: &ImplItem) -> hir::ImplItemRef {
hir::ImplItemRef {
id: hir::ImplItemId { node_id: i.id },
name: self.lower_ident(i.ident),
span: i.span,
vis: self.lower_visibility(&i.vis, Some(i.id)),
defaultness: self.lower_defaultness(i.defaultness, true /* [1] */),
kind: match i.node {
ImplItemKind::Const(..) => hir::AssociatedItemKind::Const,
ImplItemKind::Type(..) => hir::AssociatedItemKind::Type,
ImplItemKind::Method(ref sig, _) => hir::AssociatedItemKind::Method {
has_self: sig.decl.has_self(),
},
ImplItemKind::Macro(..) => unimplemented!(),
},
}
// [1] since `default impl` is not yet implemented, this is always true in impls
}
fn lower_mod(&mut self, m: &Mod) -> hir::Mod {
hir::Mod {
inner: m.inner,
item_ids: m.items.iter().flat_map(|x| self.lower_item_id(x)).collect(),
}
}
fn lower_item_id(&mut self, i: &Item) -> SmallVector<hir::ItemId> {
match i.node {
ItemKind::Use(ref use_tree) => {
let mut vec = SmallVector::one(hir::ItemId { id: i.id });
self.lower_item_id_use_tree(use_tree, &mut vec);
return vec;
}
ItemKind::MacroDef(..) => return SmallVector::new(),
_ => {}
}
SmallVector::one(hir::ItemId { id: i.id })
}
fn lower_item_id_use_tree(&self, tree: &UseTree, vec: &mut SmallVector<hir::ItemId>) {
match tree.kind {
UseTreeKind::Nested(ref nested_vec) => for &(ref nested, id) in nested_vec {
vec.push(hir::ItemId { id });
self.lower_item_id_use_tree(nested, vec);
},
UseTreeKind::Glob => {}
UseTreeKind::Simple(..) => {}
}
}
pub fn lower_item(&mut self, i: &Item) -> Option<hir::Item> {
let mut name = i.ident.name;
let mut vis = self.lower_visibility(&i.vis, None);
let attrs = self.lower_attrs(&i.attrs);
if let ItemKind::MacroDef(ref def) = i.node {
if !def.legacy || attr::contains_name(&i.attrs, "macro_export") {
let body = self.lower_token_stream(def.stream());
self.exported_macros.push(hir::MacroDef {
name,
vis,
attrs,
id: i.id,
span: i.span,
body,
legacy: def.legacy,
});
}
return None;
}
let node = self.lower_item_kind(i.id, &mut name, &attrs, &mut vis, &i.node);
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(i.id);
Some(hir::Item {
id: node_id,
hir_id,
name,
attrs,
node,
vis,
span: i.span,
})
}
fn lower_foreign_item(&mut self, i: &ForeignItem) -> hir::ForeignItem {
let node_id = self.lower_node_id(i.id).node_id;
let def_id = self.resolver.definitions().local_def_id(node_id);
hir::ForeignItem {
id: node_id,
name: i.ident.name,
attrs: self.lower_attrs(&i.attrs),
node: match i.node {
ForeignItemKind::Fn(ref fdec, ref generics) => {
let (generics, (fn_dec, fn_args)) = self.add_in_band_defs(
generics,
def_id,
AnonymousLifetimeMode::PassThrough,
|this| {
(
// Disallow impl Trait in foreign items
this.lower_fn_decl(fdec, None, false),
this.lower_fn_args_to_names(fdec),
)
},
);
hir::ForeignItemFn(fn_dec, fn_args, generics)
}
ForeignItemKind::Static(ref t, m) => {
hir::ForeignItemStatic(self.lower_ty(t, ImplTraitContext::Disallowed), m)
}
ForeignItemKind::Ty => hir::ForeignItemType,
ForeignItemKind::Macro(_) => panic!("shouldn't exist here"),
},
vis: self.lower_visibility(&i.vis, None),
span: i.span,
}
}
fn lower_method_sig(
&mut self,
sig: &MethodSig,
fn_def_id: DefId,
impl_trait_return_allow: bool,
) -> hir::MethodSig {
hir::MethodSig {
abi: sig.abi,
unsafety: self.lower_unsafety(sig.unsafety),
constness: self.lower_constness(sig.constness),
decl: self.lower_fn_decl(&sig.decl, Some(fn_def_id), impl_trait_return_allow),
}
}
fn lower_is_auto(&mut self, a: IsAuto) -> hir::IsAuto {
match a {
IsAuto::Yes => hir::IsAuto::Yes,
IsAuto::No => hir::IsAuto::No,
}
}
fn lower_unsafety(&mut self, u: Unsafety) -> hir::Unsafety {
match u {
Unsafety::Unsafe => hir::Unsafety::Unsafe,
Unsafety::Normal => hir::Unsafety::Normal,
}
}
fn lower_constness(&mut self, c: Spanned<Constness>) -> hir::Constness {
match c.node {
Constness::Const => hir::Constness::Const,
Constness::NotConst => hir::Constness::NotConst,
}
}
fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
match u {
UnOp::Deref => hir::UnDeref,
UnOp::Not => hir::UnNot,
UnOp::Neg => hir::UnNeg,
}
}
fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
Spanned {
node: match b.node {
BinOpKind::Add => hir::BiAdd,
BinOpKind::Sub => hir::BiSub,
BinOpKind::Mul => hir::BiMul,
BinOpKind::Div => hir::BiDiv,
BinOpKind::Rem => hir::BiRem,
BinOpKind::And => hir::BiAnd,
BinOpKind::Or => hir::BiOr,
BinOpKind::BitXor => hir::BiBitXor,
BinOpKind::BitAnd => hir::BiBitAnd,
BinOpKind::BitOr => hir::BiBitOr,
BinOpKind::Shl => hir::BiShl,
BinOpKind::Shr => hir::BiShr,
BinOpKind::Eq => hir::BiEq,
BinOpKind::Lt => hir::BiLt,
BinOpKind::Le => hir::BiLe,
BinOpKind::Ne => hir::BiNe,
BinOpKind::Ge => hir::BiGe,
BinOpKind::Gt => hir::BiGt,
},
span: b.span,
}
}
fn lower_pat(&mut self, p: &Pat) -> P<hir::Pat> {
let node = match p.node {
PatKind::Wild => hir::PatKind::Wild,
PatKind::Ident(ref binding_mode, ident, ref sub) => {
match self.resolver.get_resolution(p.id).map(|d| d.base_def()) {
// `None` can occur in body-less function signatures
def @ None | def @ Some(Def::Local(_)) => {
let canonical_id = match def {
Some(Def::Local(id)) => id,
_ => p.id,
};
hir::PatKind::Binding(
self.lower_binding_mode(binding_mode),
canonical_id,
respan(ident.span, ident.name),
sub.as_ref().map(|x| self.lower_pat(x)),
)
}
Some(def) => hir::PatKind::Path(hir::QPath::Resolved(
None,
P(hir::Path {
span: ident.span,
def,
segments: hir_vec![hir::PathSegment::from_name(ident.name)],
}),
)),
}
}
PatKind::Lit(ref e) => hir::PatKind::Lit(P(self.lower_expr(e))),
PatKind::TupleStruct(ref path, ref pats, ddpos) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::Disallowed,
);
hir::PatKind::TupleStruct(
qpath,
pats.iter().map(|x| self.lower_pat(x)).collect(),
ddpos,
)
}
PatKind::Path(ref qself, ref path) => hir::PatKind::Path(self.lower_qpath(
p.id,
qself,
path,
ParamMode::Optional,
ImplTraitContext::Disallowed,
)),
PatKind::Struct(ref path, ref fields, etc) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::Disallowed,
);
let fs = fields
.iter()
.map(|f| Spanned {
span: f.span,
node: hir::FieldPat {
id: self.next_id().node_id,
ident: f.node.ident,
pat: self.lower_pat(&f.node.pat),
is_shorthand: f.node.is_shorthand,
},
})
.collect();
hir::PatKind::Struct(qpath, fs, etc)
}
PatKind::Tuple(ref elts, ddpos) => {
hir::PatKind::Tuple(elts.iter().map(|x| self.lower_pat(x)).collect(), ddpos)
}
PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)),
PatKind::Ref(ref inner, mutbl) => {
hir::PatKind::Ref(self.lower_pat(inner), self.lower_mutability(mutbl))
}
PatKind::Range(ref e1, ref e2, ref end) => hir::PatKind::Range(
P(self.lower_expr(e1)),
P(self.lower_expr(e2)),
self.lower_range_end(end),
),
PatKind::Slice(ref before, ref slice, ref after) => hir::PatKind::Slice(
before.iter().map(|x| self.lower_pat(x)).collect(),
slice.as_ref().map(|x| self.lower_pat(x)),
after.iter().map(|x| self.lower_pat(x)).collect(),
),
PatKind::Paren(ref inner) => return self.lower_pat(inner),
PatKind::Mac(_) => panic!("Shouldn't exist here"),
};
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(p.id);
P(hir::Pat {
id: node_id,
hir_id,
node,
span: p.span,
})
}
fn lower_range_end(&mut self, e: &RangeEnd) -> hir::RangeEnd {
match *e {
RangeEnd::Included(_) => hir::RangeEnd::Included,
RangeEnd::Excluded => hir::RangeEnd::Excluded,
}
}
fn lower_anon_const(&mut self, c: &AnonConst) -> hir::AnonConst {
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(c.id);
hir::AnonConst {
id: node_id,
hir_id,
body: self.lower_body(None, |this| this.lower_expr(&c.value)),
}
}
fn lower_expr(&mut self, e: &Expr) -> hir::Expr {
let kind = match e.node {
ExprKind::Box(ref inner) => hir::ExprBox(P(self.lower_expr(inner))),
ExprKind::ObsoleteInPlace(..) => {
self.sess.abort_if_errors();
span_bug!(e.span, "encountered ObsoleteInPlace expr during lowering");
}
ExprKind::Array(ref exprs) => {
hir::ExprArray(exprs.iter().map(|x| self.lower_expr(x)).collect())
}
ExprKind::Repeat(ref expr, ref count) => {
let expr = P(self.lower_expr(expr));
let count = self.lower_anon_const(count);
hir::ExprRepeat(expr, count)
}
ExprKind::Tup(ref elts) => {
hir::ExprTup(elts.iter().map(|x| self.lower_expr(x)).collect())
}
ExprKind::Call(ref f, ref args) => {
let f = P(self.lower_expr(f));
hir::ExprCall(f, args.iter().map(|x| self.lower_expr(x)).collect())
}
ExprKind::MethodCall(ref seg, ref args) => {
let hir_seg = self.lower_path_segment(
e.span,
seg,
ParamMode::Optional,
0,
ParenthesizedGenericArgs::Err,
ImplTraitContext::Disallowed,
);
let args = args.iter().map(|x| self.lower_expr(x)).collect();
hir::ExprMethodCall(hir_seg, seg.ident.span, args)
}
ExprKind::Binary(binop, ref lhs, ref rhs) => {
let binop = self.lower_binop(binop);
let lhs = P(self.lower_expr(lhs));
let rhs = P(self.lower_expr(rhs));
hir::ExprBinary(binop, lhs, rhs)
}
ExprKind::Unary(op, ref ohs) => {
let op = self.lower_unop(op);
let ohs = P(self.lower_expr(ohs));
hir::ExprUnary(op, ohs)
}
ExprKind::Lit(ref l) => hir::ExprLit(P((**l).clone())),
ExprKind::Cast(ref expr, ref ty) => {
let expr = P(self.lower_expr(expr));
hir::ExprCast(expr, self.lower_ty(ty, ImplTraitContext::Disallowed))
}
ExprKind::Type(ref expr, ref ty) => {
let expr = P(self.lower_expr(expr));
hir::ExprType(expr, self.lower_ty(ty, ImplTraitContext::Disallowed))
}
ExprKind::AddrOf(m, ref ohs) => {
let m = self.lower_mutability(m);
let ohs = P(self.lower_expr(ohs));
hir::ExprAddrOf(m, ohs)
}
// More complicated than you might expect because the else branch
// might be `if let`.
ExprKind::If(ref cond, ref blk, ref else_opt) => {
let else_opt = else_opt.as_ref().map(|els| {
match els.node {
ExprKind::IfLet(..) => {
// wrap the if-let expr in a block
let span = els.span;
let els = P(self.lower_expr(els));
let LoweredNodeId { node_id, hir_id } = self.next_id();
let blk = P(hir::Block {
stmts: hir_vec![],
expr: Some(els),
id: node_id,
hir_id,
rules: hir::DefaultBlock,
span,
targeted_by_break: false,
recovered: blk.recovered,
});
P(self.expr_block(blk, ThinVec::new()))
}
_ => P(self.lower_expr(els)),
}
});
let then_blk = self.lower_block(blk, false);
let then_expr = self.expr_block(then_blk, ThinVec::new());
hir::ExprIf(P(self.lower_expr(cond)), P(then_expr), else_opt)
}
ExprKind::While(ref cond, ref body, opt_label) => self.with_loop_scope(e.id, |this| {
hir::ExprWhile(
this.with_loop_condition_scope(|this| P(this.lower_expr(cond))),
this.lower_block(body, false),
this.lower_label(opt_label),
)
}),
ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
hir::ExprLoop(
this.lower_block(body, false),
this.lower_label(opt_label),
hir::LoopSource::Loop,
)
}),
ExprKind::Catch(ref body) => {
self.with_catch_scope(body.id, |this| {
let unstable_span =
this.allow_internal_unstable(CompilerDesugaringKind::Catch, body.span);
let mut block = this.lower_block(body, true).into_inner();
let tail = block.expr.take().map_or_else(
|| {
let LoweredNodeId { node_id, hir_id } = this.next_id();
let span = this.sess.codemap().end_point(unstable_span);
hir::Expr {
id: node_id,
span,
node: hir::ExprTup(hir_vec![]),
attrs: ThinVec::new(),
hir_id,
}
},
|x: P<hir::Expr>| x.into_inner(),
);
block.expr = Some(this.wrap_in_try_constructor(
"from_ok", tail, unstable_span));
hir::ExprBlock(P(block), None)
})
}
ExprKind::Match(ref expr, ref arms) => hir::ExprMatch(
P(self.lower_expr(expr)),
arms.iter().map(|x| self.lower_arm(x)).collect(),
hir::MatchSource::Normal,
),
ExprKind::Closure(capture_clause, movability, ref decl, ref body, fn_decl_span) => {
self.with_new_scopes(|this| {
let mut is_generator = false;
let body_id = this.lower_body(Some(decl), |this| {
let e = this.lower_expr(body);
is_generator = this.is_generator;
e
});
let generator_option = if is_generator {
if !decl.inputs.is_empty() {
span_err!(
this.sess,
fn_decl_span,
E0628,
"generators cannot have explicit arguments"
);
this.sess.abort_if_errors();
}
Some(match movability {
Movability::Movable => hir::GeneratorMovability::Movable,
Movability::Static => hir::GeneratorMovability::Static,
})
} else {
if movability == Movability::Static {
span_err!(
this.sess,
fn_decl_span,
E0906,
"closures cannot be static"
);
}
None
};
hir::ExprClosure(
this.lower_capture_clause(capture_clause),
this.lower_fn_decl(decl, None, false),
body_id,
fn_decl_span,
generator_option,
)
})
}
ExprKind::Block(ref blk, opt_label) => {
hir::ExprBlock(self.lower_block(blk,
opt_label.is_some()),
self.lower_label(opt_label))
}
ExprKind::Assign(ref el, ref er) => {
hir::ExprAssign(P(self.lower_expr(el)), P(self.lower_expr(er)))
}
ExprKind::AssignOp(op, ref el, ref er) => hir::ExprAssignOp(
self.lower_binop(op),
P(self.lower_expr(el)),
P(self.lower_expr(er)),
),
ExprKind::Field(ref el, ident) => hir::ExprField(P(self.lower_expr(el)), ident),
ExprKind::Index(ref el, ref er) => {
hir::ExprIndex(P(self.lower_expr(el)), P(self.lower_expr(er)))
}
// Desugar `<start>..=<end>` to `std::ops::RangeInclusive::new(<start>, <end>)`
ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
// FIXME: Use e.span directly after RangeInclusive::new() is stabilized in stage0.
let span = self.allow_internal_unstable(CompilerDesugaringKind::DotFill, e.span);
let id = self.next_id();
let e1 = self.lower_expr(e1);
let e2 = self.lower_expr(e2);
let ty_path = P(self.std_path(span, &["ops", "RangeInclusive"], false));
let ty = self.ty_path(id, span, hir::QPath::Resolved(None, ty_path));
let new_seg = P(hir::PathSegment::from_name(Symbol::intern("new")));
let new_path = hir::QPath::TypeRelative(ty, new_seg);
let new = P(self.expr(span, hir::ExprPath(new_path), ThinVec::new()));
hir::ExprCall(new, hir_vec![e1, e2])
}
ExprKind::Range(ref e1, ref e2, lims) => {
use syntax::ast::RangeLimits::*;
let path = match (e1, e2, lims) {
(&None, &None, HalfOpen) => "RangeFull",
(&Some(..), &None, HalfOpen) => "RangeFrom",
(&None, &Some(..), HalfOpen) => "RangeTo",
(&Some(..), &Some(..), HalfOpen) => "Range",
(&None, &Some(..), Closed) => "RangeToInclusive",
(&Some(..), &Some(..), Closed) => unreachable!(),
(_, &None, Closed) => self.diagnostic()
.span_fatal(e.span, "inclusive range with no end")
.raise(),
};
let fields = e1.iter()
.map(|e| ("start", e))
.chain(e2.iter().map(|e| ("end", e)))
.map(|(s, e)| {
let expr = P(self.lower_expr(&e));
let unstable_span =
self.allow_internal_unstable(CompilerDesugaringKind::DotFill, e.span);
let ident = Ident::new(Symbol::intern(s), unstable_span);
self.field(ident, expr, unstable_span)
})
.collect::<P<[hir::Field]>>();
let is_unit = fields.is_empty();
let unstable_span =
self.allow_internal_unstable(CompilerDesugaringKind::DotFill, e.span);
let struct_path = iter::once("ops")
.chain(iter::once(path))
.collect::<Vec<_>>();
let struct_path = self.std_path(unstable_span, &struct_path, is_unit);
let struct_path = hir::QPath::Resolved(None, P(struct_path));
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(e.id);
return hir::Expr {
id: node_id,
hir_id,
node: if is_unit {
hir::ExprPath(struct_path)
} else {
hir::ExprStruct(struct_path, fields, None)
},
span: unstable_span,
attrs: e.attrs.clone(),
};
}
ExprKind::Path(ref qself, ref path) => hir::ExprPath(self.lower_qpath(
e.id,
qself,
path,
ParamMode::Optional,
ImplTraitContext::Disallowed,
)),
ExprKind::Break(opt_label, ref opt_expr) => {
let destination = if self.is_in_loop_condition && opt_label.is_none() {
hir::Destination {
label: None,
target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition).into(),
}
} else {
self.lower_loop_destination(opt_label.map(|label| (e.id, label)))
};
hir::ExprBreak(
destination,
opt_expr.as_ref().map(|x| P(self.lower_expr(x))),
)
}
ExprKind::Continue(opt_label) => {
hir::ExprAgain(if self.is_in_loop_condition && opt_label.is_none() {
hir::Destination {
label: None,
target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition).into(),
}
} else {
self.lower_loop_destination(opt_label.map(|label| (e.id, label)))
})
}
ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| P(self.lower_expr(x)))),
ExprKind::InlineAsm(ref asm) => {
let hir_asm = hir::InlineAsm {
inputs: asm.inputs.iter().map(|&(ref c, _)| c.clone()).collect(),
outputs: asm.outputs
.iter()
.map(|out| hir::InlineAsmOutput {
constraint: out.constraint.clone(),
is_rw: out.is_rw,
is_indirect: out.is_indirect,
})
.collect(),
asm: asm.asm.clone(),
asm_str_style: asm.asm_str_style,
clobbers: asm.clobbers.clone().into(),
volatile: asm.volatile,
alignstack: asm.alignstack,
dialect: asm.dialect,
ctxt: asm.ctxt,
};
let outputs = asm.outputs
.iter()
.map(|out| self.lower_expr(&out.expr))
.collect();
let inputs = asm.inputs
.iter()
.map(|&(_, ref input)| self.lower_expr(input))
.collect();
hir::ExprInlineAsm(P(hir_asm), outputs, inputs)
}
ExprKind::Struct(ref path, ref fields, ref maybe_expr) => hir::ExprStruct(
self.lower_qpath(
e.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::Disallowed,
),
fields.iter().map(|x| self.lower_field(x)).collect(),
maybe_expr.as_ref().map(|x| P(self.lower_expr(x))),
),
ExprKind::Paren(ref ex) => {
let mut ex = self.lower_expr(ex);
// include parens in span, but only if it is a super-span.
if e.span.contains(ex.span) {
ex.span = e.span;
}
// merge attributes into the inner expression.
let mut attrs = e.attrs.clone();
attrs.extend::<Vec<_>>(ex.attrs.into());
ex.attrs = attrs;
return ex;
}
ExprKind::Yield(ref opt_expr) => {
self.is_generator = true;
let expr = opt_expr
.as_ref()
.map(|x| self.lower_expr(x))
.unwrap_or_else(|| self.expr(e.span, hir::ExprTup(hir_vec![]), ThinVec::new()));
hir::ExprYield(P(expr))
}
// Desugar ExprIfLet
// From: `if let <pat> = <sub_expr> <body> [<else_opt>]`
ExprKind::IfLet(ref pats, ref sub_expr, ref body, ref else_opt) => {
// to:
//
// match <sub_expr> {
// <pat> => <body>,
// _ => [<else_opt> | ()]
// }
let mut arms = vec![];
// `<pat> => <body>`
{
let body = self.lower_block(body, false);
let body_expr = P(self.expr_block(body, ThinVec::new()));
let pats = pats.iter().map(|pat| self.lower_pat(pat)).collect();
arms.push(self.arm(pats, body_expr));
}
// _ => [<else_opt>|()]
{
let wildcard_arm: Option<&Expr> = else_opt.as_ref().map(|p| &**p);
let wildcard_pattern = self.pat_wild(e.span);
let body = if let Some(else_expr) = wildcard_arm {
P(self.lower_expr(else_expr))
} else {
self.expr_tuple(e.span, hir_vec![])
};
arms.push(self.arm(hir_vec![wildcard_pattern], body));
}
let contains_else_clause = else_opt.is_some();
let sub_expr = P(self.lower_expr(sub_expr));
hir::ExprMatch(
sub_expr,
arms.into(),
hir::MatchSource::IfLetDesugar {
contains_else_clause,
},
)
}
// Desugar ExprWhileLet
// From: `[opt_ident]: while let <pat> = <sub_expr> <body>`
ExprKind::WhileLet(ref pats, ref sub_expr, ref body, opt_label) => {
// to:
//
// [opt_ident]: loop {
// match <sub_expr> {
// <pat> => <body>,
// _ => break
// }
// }
// Note that the block AND the condition are evaluated in the loop scope.
// This is done to allow `break` from inside the condition of the loop.
let (body, break_expr, sub_expr) = self.with_loop_scope(e.id, |this| {
(
this.lower_block(body, false),
this.expr_break(e.span, ThinVec::new()),
this.with_loop_condition_scope(|this| P(this.lower_expr(sub_expr))),
)
});
// `<pat> => <body>`
let pat_arm = {
let body_expr = P(self.expr_block(body, ThinVec::new()));
let pats = pats.iter().map(|pat| self.lower_pat(pat)).collect();
self.arm(pats, body_expr)
};
// `_ => break`
let break_arm = {
let pat_under = self.pat_wild(e.span);
self.arm(hir_vec![pat_under], break_expr)
};
// `match <sub_expr> { ... }`
let arms = hir_vec![pat_arm, break_arm];
let match_expr = self.expr(
sub_expr.span,
hir::ExprMatch(sub_expr, arms, hir::MatchSource::WhileLetDesugar),
ThinVec::new(),
);
// `[opt_ident]: loop { ... }`
let loop_block = P(self.block_expr(P(match_expr)));
let loop_expr = hir::ExprLoop(
loop_block,
self.lower_label(opt_label),
hir::LoopSource::WhileLet,
);
// add attributes to the outer returned expr node
loop_expr
}
// Desugar ExprForLoop
// From: `[opt_ident]: for <pat> in <head> <body>`
ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
// to:
//
// {
// let result = match ::std::iter::IntoIterator::into_iter(<head>) {
// mut iter => {
// [opt_ident]: loop {
// let mut __next;
// match ::std::iter::Iterator::next(&mut iter) {
// ::std::option::Option::Some(val) => __next = val,
// ::std::option::Option::None => break
// };
// let <pat> = __next;
// StmtExpr(<body>);
// }
// }
// };
// result
// }
// expand <head>
let head = self.lower_expr(head);
let head_sp = head.span;
let iter = self.str_to_ident("iter");
let next_ident = self.str_to_ident("__next");
let next_pat = self.pat_ident_binding_mode(
pat.span,
next_ident,
hir::BindingAnnotation::Mutable,
);
// `::std::option::Option::Some(val) => next = val`
let pat_arm = {
let val_ident = self.str_to_ident("val");
let val_pat = self.pat_ident(pat.span, val_ident);
let val_expr = P(self.expr_ident(pat.span, val_ident, val_pat.id));
let next_expr = P(self.expr_ident(pat.span, next_ident, next_pat.id));
let assign = P(self.expr(
pat.span,
hir::ExprAssign(next_expr, val_expr),
ThinVec::new(),
));
let some_pat = self.pat_some(pat.span, val_pat);
self.arm(hir_vec![some_pat], assign)
};
// `::std::option::Option::None => break`
let break_arm = {
let break_expr =
self.with_loop_scope(e.id, |this| this.expr_break(e.span, ThinVec::new()));
let pat = self.pat_none(e.span);
self.arm(hir_vec![pat], break_expr)
};
// `mut iter`
let iter_pat =
self.pat_ident_binding_mode(head_sp, iter, hir::BindingAnnotation::Mutable);
// `match ::std::iter::Iterator::next(&mut iter) { ... }`
let match_expr = {
let iter = P(self.expr_ident(head_sp, iter, iter_pat.id));
let ref_mut_iter = self.expr_mut_addr_of(head_sp, iter);
let next_path = &["iter", "Iterator", "next"];
let next_path = P(self.expr_std_path(head_sp, next_path, ThinVec::new()));
let next_expr = P(self.expr_call(head_sp, next_path, hir_vec![ref_mut_iter]));
let arms = hir_vec![pat_arm, break_arm];
P(self.expr(
head_sp,
hir::ExprMatch(next_expr, arms, hir::MatchSource::ForLoopDesugar),
ThinVec::new(),
))
};
let match_stmt = respan(head_sp, hir::StmtExpr(match_expr, self.next_id().node_id));
let next_expr = P(self.expr_ident(head_sp, next_ident, next_pat.id));
// `let mut __next`
let next_let =
self.stmt_let_pat(head_sp, None, next_pat, hir::LocalSource::ForLoopDesugar);
// `let <pat> = __next`
let pat = self.lower_pat(pat);
let pat_let = self.stmt_let_pat(
head_sp,
Some(next_expr),
pat,
hir::LocalSource::ForLoopDesugar,
);
let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body, false));
let body_expr = P(self.expr_block(body_block, ThinVec::new()));
let body_stmt = respan(body.span, hir::StmtExpr(body_expr, self.next_id().node_id));
let loop_block = P(self.block_all(
e.span,
hir_vec![next_let, match_stmt, pat_let, body_stmt],
None,
));
// `[opt_ident]: loop { ... }`
let loop_expr = hir::ExprLoop(
loop_block,
self.lower_label(opt_label),
hir::LoopSource::ForLoop,
);
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(e.id);
let loop_expr = P(hir::Expr {
id: node_id,
hir_id,
node: loop_expr,
span: e.span,
attrs: ThinVec::new(),
});
// `mut iter => { ... }`
let iter_arm = self.arm(hir_vec![iter_pat], loop_expr);
// `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
let into_iter_expr = {
let into_iter_path = &["iter", "IntoIterator", "into_iter"];
let into_iter = P(self.expr_std_path(head_sp, into_iter_path, ThinVec::new()));
P(self.expr_call(head_sp, into_iter, hir_vec![head]))
};
let match_expr = P(self.expr_match(
head_sp,
into_iter_expr,
hir_vec![iter_arm],
hir::MatchSource::ForLoopDesugar,
));
// `{ let _result = ...; _result }`
// underscore prevents an unused_variables lint if the head diverges
let result_ident = self.str_to_ident("_result");
let (let_stmt, let_stmt_binding) =
self.stmt_let(e.span, false, result_ident, match_expr);
let result = P(self.expr_ident(e.span, result_ident, let_stmt_binding));
let block = P(self.block_all(e.span, hir_vec![let_stmt], Some(result)));
// add the attributes to the outer returned expr node
return self.expr_block(block, e.attrs.clone());
}
// Desugar ExprKind::Try
// From: `<expr>?`
ExprKind::Try(ref sub_expr) => {
// to:
//
// match Try::into_result(<expr>) {
// Ok(val) => #[allow(unreachable_code)] val,
// Err(err) => #[allow(unreachable_code)]
// // If there is an enclosing `catch {...}`
// break 'catch_target Try::from_error(From::from(err)),
// // Otherwise
// return Try::from_error(From::from(err)),
// }
let unstable_span =
self.allow_internal_unstable(CompilerDesugaringKind::QuestionMark, e.span);
// Try::into_result(<expr>)
let discr = {
// expand <expr>
let sub_expr = self.lower_expr(sub_expr);
let path = &["ops", "Try", "into_result"];
let path = P(self.expr_std_path(unstable_span, path, ThinVec::new()));
P(self.expr_call(e.span, path, hir_vec![sub_expr]))
};
// #[allow(unreachable_code)]
let attr = {
// allow(unreachable_code)
let allow = {
let allow_ident = Ident::from_str("allow").with_span_pos(e.span);
let uc_ident = Ident::from_str("unreachable_code").with_span_pos(e.span);
let uc_nested = attr::mk_nested_word_item(uc_ident);
attr::mk_list_item(e.span, allow_ident, vec![uc_nested])
};
attr::mk_spanned_attr_outer(e.span, attr::mk_attr_id(), allow)
};
let attrs = vec![attr];
// Ok(val) => #[allow(unreachable_code)] val,
let ok_arm = {
let val_ident = self.str_to_ident("val");
let val_pat = self.pat_ident(e.span, val_ident);
let val_expr = P(self.expr_ident_with_attrs(
e.span,
val_ident,
val_pat.id,
ThinVec::from(attrs.clone()),
));
let ok_pat = self.pat_ok(e.span, val_pat);
self.arm(hir_vec![ok_pat], val_expr)
};
// Err(err) => #[allow(unreachable_code)]
// return Try::from_error(From::from(err)),
let err_arm = {
let err_ident = self.str_to_ident("err");
let err_local = self.pat_ident(e.span, err_ident);
let from_expr = {
let path = &["convert", "From", "from"];
let from = P(self.expr_std_path(e.span, path, ThinVec::new()));
let err_expr = self.expr_ident(e.span, err_ident, err_local.id);
self.expr_call(e.span, from, hir_vec![err_expr])
};
let from_err_expr =
self.wrap_in_try_constructor("from_error", from_expr, unstable_span);
let thin_attrs = ThinVec::from(attrs);
let catch_scope = self.catch_scopes.last().map(|x| *x);
let ret_expr = if let Some(catch_node) = catch_scope {
P(self.expr(
e.span,
hir::ExprBreak(
hir::Destination {
label: None,
target_id: Ok(catch_node),
},
Some(from_err_expr),
),
thin_attrs,
))
} else {
P(self.expr(e.span, hir::Expr_::ExprRet(Some(from_err_expr)), thin_attrs))
};
let err_pat = self.pat_err(e.span, err_local);
self.arm(hir_vec![err_pat], ret_expr)
};
hir::ExprMatch(
discr,
hir_vec![err_arm, ok_arm],
hir::MatchSource::TryDesugar,
)
}
ExprKind::Mac(_) => panic!("Shouldn't exist here"),
};
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(e.id);
hir::Expr {
id: node_id,
hir_id,
node: kind,
span: e.span,
attrs: e.attrs.clone(),
}
}
fn lower_stmt(&mut self, s: &Stmt) -> SmallVector<hir::Stmt> {
SmallVector::one(match s.node {
StmtKind::Local(ref l) => Spanned {
node: hir::StmtDecl(
P(Spanned {
node: hir::DeclLocal(self.lower_local(l)),
span: s.span,
}),
self.lower_node_id(s.id).node_id,
),
span: s.span,
},
StmtKind::Item(ref it) => {
// Can only use the ID once.
let mut id = Some(s.id);
return self.lower_item_id(it)
.into_iter()
.map(|item_id| Spanned {
node: hir::StmtDecl(
P(Spanned {
node: hir::DeclItem(item_id),
span: s.span,
}),
id.take()
.map(|id| self.lower_node_id(id).node_id)
.unwrap_or_else(|| self.next_id().node_id),
),
span: s.span,
})
.collect();
}
StmtKind::Expr(ref e) => Spanned {
node: hir::StmtExpr(P(self.lower_expr(e)), self.lower_node_id(s.id).node_id),
span: s.span,
},
StmtKind::Semi(ref e) => Spanned {
node: hir::StmtSemi(P(self.lower_expr(e)), self.lower_node_id(s.id).node_id),
span: s.span,
},
StmtKind::Mac(..) => panic!("Shouldn't exist here"),
})
}
fn lower_capture_clause(&mut self, c: CaptureBy) -> hir::CaptureClause {
match c {
CaptureBy::Value => hir::CaptureByValue,
CaptureBy::Ref => hir::CaptureByRef,
}
}
/// If an `explicit_owner` is given, this method allocates the `HirId` in
/// the address space of that item instead of the item currently being
/// lowered. This can happen during `lower_impl_item_ref()` where we need to
/// lower a `Visibility` value although we haven't lowered the owning
/// `ImplItem` in question yet.
fn lower_visibility(
&mut self,
v: &Visibility,
explicit_owner: Option<NodeId>,
) -> hir::Visibility {
match v.node {
VisibilityKind::Public => hir::Public,
VisibilityKind::Crate(..) => hir::Visibility::Crate,
VisibilityKind::Restricted { ref path, id, .. } => hir::Visibility::Restricted {
path: P(self.lower_path(id, path, ParamMode::Explicit)),
id: if let Some(owner) = explicit_owner {
self.lower_node_id_with_owner(id, owner).node_id
} else {
self.lower_node_id(id).node_id
},
},
VisibilityKind::Inherited => hir::Inherited,
}
}
fn lower_defaultness(&mut self, d: Defaultness, has_value: bool) -> hir::Defaultness {
match d {
Defaultness::Default => hir::Defaultness::Default {
has_value: has_value,
},
Defaultness::Final => {
assert!(has_value);
hir::Defaultness::Final
}
}
}
fn lower_block_check_mode(&mut self, b: &BlockCheckMode) -> hir::BlockCheckMode {
match *b {
BlockCheckMode::Default => hir::DefaultBlock,
BlockCheckMode::Unsafe(u) => hir::UnsafeBlock(self.lower_unsafe_source(u)),
}
}
fn lower_binding_mode(&mut self, b: &BindingMode) -> hir::BindingAnnotation {
match *b {
BindingMode::ByValue(Mutability::Immutable) => hir::BindingAnnotation::Unannotated,
BindingMode::ByRef(Mutability::Immutable) => hir::BindingAnnotation::Ref,
BindingMode::ByValue(Mutability::Mutable) => hir::BindingAnnotation::Mutable,
BindingMode::ByRef(Mutability::Mutable) => hir::BindingAnnotation::RefMut,
}
}
fn lower_unsafe_source(&mut self, u: UnsafeSource) -> hir::UnsafeSource {
match u {
CompilerGenerated => hir::CompilerGenerated,
UserProvided => hir::UserProvided,
}
}
fn lower_impl_polarity(&mut self, i: ImplPolarity) -> hir::ImplPolarity {
match i {
ImplPolarity::Positive => hir::ImplPolarity::Positive,
ImplPolarity::Negative => hir::ImplPolarity::Negative,
}
}
fn lower_trait_bound_modifier(&mut self, f: TraitBoundModifier) -> hir::TraitBoundModifier {
match f {
TraitBoundModifier::None => hir::TraitBoundModifier::None,
TraitBoundModifier::Maybe => hir::TraitBoundModifier::Maybe,
}
}
// Helper methods for building HIR.
fn arm(&mut self, pats: hir::HirVec<P<hir::Pat>>, expr: P<hir::Expr>) -> hir::Arm {
hir::Arm {
attrs: hir_vec![],
pats,
guard: None,
body: expr,
}
}
fn field(&mut self, ident: Ident, expr: P<hir::Expr>, span: Span) -> hir::Field {
hir::Field {
id: self.next_id().node_id,
ident,
span,
expr,
is_shorthand: false,
}
}
fn expr_break(&mut self, span: Span, attrs: ThinVec<Attribute>) -> P<hir::Expr> {
let expr_break = hir::ExprBreak(self.lower_loop_destination(None), None);
P(self.expr(span, expr_break, attrs))
}
fn expr_call(
&mut self,
span: Span,
e: P<hir::Expr>,
args: hir::HirVec<hir::Expr>,
) -> hir::Expr {
self.expr(span, hir::ExprCall(e, args), ThinVec::new())
}
fn expr_ident(&mut self, span: Span, id: Name, binding: NodeId) -> hir::Expr {
self.expr_ident_with_attrs(span, id, binding, ThinVec::new())
}
fn expr_ident_with_attrs(
&mut self,
span: Span,
id: Name,
binding: NodeId,
attrs: ThinVec<Attribute>,
) -> hir::Expr {
let expr_path = hir::ExprPath(hir::QPath::Resolved(
None,
P(hir::Path {
span,
def: Def::Local(binding),
segments: hir_vec![hir::PathSegment::from_name(id)],
}),
));
self.expr(span, expr_path, attrs)
}
fn expr_mut_addr_of(&mut self, span: Span, e: P<hir::Expr>) -> hir::Expr {
self.expr(span, hir::ExprAddrOf(hir::MutMutable, e), ThinVec::new())
}
fn expr_std_path(
&mut self,
span: Span,
components: &[&str],
attrs: ThinVec<Attribute>,
) -> hir::Expr {
let path = self.std_path(span, components, true);
self.expr(
span,
hir::ExprPath(hir::QPath::Resolved(None, P(path))),
attrs,
)
}
fn expr_match(
&mut self,
span: Span,
arg: P<hir::Expr>,
arms: hir::HirVec<hir::Arm>,
source: hir::MatchSource,
) -> hir::Expr {
self.expr(span, hir::ExprMatch(arg, arms, source), ThinVec::new())
}
fn expr_block(&mut self, b: P<hir::Block>, attrs: ThinVec<Attribute>) -> hir::Expr {
self.expr(b.span, hir::ExprBlock(b, None), attrs)
}
fn expr_tuple(&mut self, sp: Span, exprs: hir::HirVec<hir::Expr>) -> P<hir::Expr> {
P(self.expr(sp, hir::ExprTup(exprs), ThinVec::new()))
}
fn expr(&mut self, span: Span, node: hir::Expr_, attrs: ThinVec<Attribute>) -> hir::Expr {
let LoweredNodeId { node_id, hir_id } = self.next_id();
hir::Expr {
id: node_id,
hir_id,
node,
span,
attrs,
}
}
fn stmt_let_pat(
&mut self,
sp: Span,
ex: Option<P<hir::Expr>>,
pat: P<hir::Pat>,
source: hir::LocalSource,
) -> hir::Stmt {
let LoweredNodeId { node_id, hir_id } = self.next_id();
let local = P(hir::Local {
pat,
ty: None,
init: ex,
id: node_id,
hir_id,
span: sp,
attrs: ThinVec::new(),
source,
});
let decl = respan(sp, hir::DeclLocal(local));
respan(sp, hir::StmtDecl(P(decl), self.next_id().node_id))
}
fn stmt_let(
&mut self,
sp: Span,
mutbl: bool,
ident: Name,
ex: P<hir::Expr>,
) -> (hir::Stmt, NodeId) {
let pat = if mutbl {
self.pat_ident_binding_mode(sp, ident, hir::BindingAnnotation::Mutable)
} else {
self.pat_ident(sp, ident)
};
let pat_id = pat.id;
(
self.stmt_let_pat(sp, Some(ex), pat, hir::LocalSource::Normal),
pat_id,
)
}
fn block_expr(&mut self, expr: P<hir::Expr>) -> hir::Block {
self.block_all(expr.span, hir::HirVec::new(), Some(expr))
}
fn block_all(
&mut self,
span: Span,
stmts: hir::HirVec<hir::Stmt>,
expr: Option<P<hir::Expr>>,
) -> hir::Block {
let LoweredNodeId { node_id, hir_id } = self.next_id();
hir::Block {
stmts,
expr,
id: node_id,
hir_id,
rules: hir::DefaultBlock,
span,
targeted_by_break: false,
recovered: false,
}
}
fn pat_ok(&mut self, span: Span, pat: P<hir::Pat>) -> P<hir::Pat> {
self.pat_std_enum(span, &["result", "Result", "Ok"], hir_vec![pat])
}
fn pat_err(&mut self, span: Span, pat: P<hir::Pat>) -> P<hir::Pat> {
self.pat_std_enum(span, &["result", "Result", "Err"], hir_vec![pat])
}
fn pat_some(&mut self, span: Span, pat: P<hir::Pat>) -> P<hir::Pat> {
self.pat_std_enum(span, &["option", "Option", "Some"], hir_vec![pat])
}
fn pat_none(&mut self, span: Span) -> P<hir::Pat> {
self.pat_std_enum(span, &["option", "Option", "None"], hir_vec![])
}
fn pat_std_enum(
&mut self,
span: Span,
components: &[&str],
subpats: hir::HirVec<P<hir::Pat>>,
) -> P<hir::Pat> {
let path = self.std_path(span, components, true);
let qpath = hir::QPath::Resolved(None, P(path));
let pt = if subpats.is_empty() {
hir::PatKind::Path(qpath)
} else {
hir::PatKind::TupleStruct(qpath, subpats, None)
};
self.pat(span, pt)
}
fn pat_ident(&mut self, span: Span, name: Name) -> P<hir::Pat> {
self.pat_ident_binding_mode(span, name, hir::BindingAnnotation::Unannotated)
}
fn pat_ident_binding_mode(
&mut self,
span: Span,
name: Name,
bm: hir::BindingAnnotation,
) -> P<hir::Pat> {
let LoweredNodeId { node_id, hir_id } = self.next_id();
P(hir::Pat {
id: node_id,
hir_id,
node: hir::PatKind::Binding(bm, node_id, Spanned { span, node: name }, None),
span,
})
}
fn pat_wild(&mut self, span: Span) -> P<hir::Pat> {
self.pat(span, hir::PatKind::Wild)
}
fn pat(&mut self, span: Span, pat: hir::PatKind) -> P<hir::Pat> {
let LoweredNodeId { node_id, hir_id } = self.next_id();
P(hir::Pat {
id: node_id,
hir_id,
node: pat,
span,
})
}
/// Given suffix ["b","c","d"], returns path `::std::b::c::d` when
/// `fld.cx.use_std`, and `::core::b::c::d` otherwise.
/// The path is also resolved according to `is_value`.
fn std_path(&mut self, span: Span, components: &[&str], is_value: bool) -> hir::Path {
self.resolver
.resolve_str_path(span, self.crate_root, components, is_value)
}
fn ty_path(&mut self, id: LoweredNodeId, span: Span, qpath: hir::QPath) -> P<hir::Ty> {
let mut id = id;
let node = match qpath {
hir::QPath::Resolved(None, path) => {
// Turn trait object paths into `TyTraitObject` instead.
if let Def::Trait(_) = path.def {
let principal = hir::PolyTraitRef {
bound_generic_params: hir::HirVec::new(),
trait_ref: hir::TraitRef {
path: path.and_then(|path| path),
ref_id: id.node_id,
},
span,
};
// The original ID is taken by the `PolyTraitRef`,
// so the `Ty` itself needs a different one.
id = self.next_id();
hir::TyTraitObject(hir_vec![principal], self.elided_dyn_bound(span))
} else {
hir::TyPath(hir::QPath::Resolved(None, path))
}
}
_ => hir::TyPath(qpath),
};
P(hir::Ty {
id: id.node_id,
hir_id: id.hir_id,
node,
span,
})
}
/// Invoked to create the lifetime argument for a type `&T`
/// with no explicit lifetime.
fn elided_ref_lifetime(&mut self, span: Span) -> hir::Lifetime {
match self.anonymous_lifetime_mode {
// Intercept when we are in an impl header and introduce an in-band lifetime.
// Hence `impl Foo for &u32` becomes `impl<'f> Foo for &'f u32` for some fresh
// `'f`.
AnonymousLifetimeMode::CreateParameter => {
let fresh_name = self.collect_fresh_in_band_lifetime(span);
hir::Lifetime {
id: self.next_id().node_id,
span,
name: fresh_name,
}
}
AnonymousLifetimeMode::PassThrough => self.new_implicit_lifetime(span),
}
}
/// Invoked to create the lifetime argument(s) for a path like
/// `std::cell::Ref<T>`; note that implicit lifetimes in these
/// sorts of cases are deprecated. This may therefore report a warning or an
/// error, depending on the mode.
fn elided_path_lifetimes(&mut self, span: Span, count: usize) -> P<[hir::Lifetime]> {
match self.anonymous_lifetime_mode {
// NB. We intentionally ignore the create-parameter mode here
// and instead "pass through" to resolve-lifetimes, which will then
// report an error. This is because we don't want to support
// impl elision for deprecated forms like
//
// impl Foo for std::cell::Ref<u32> // note lack of '_
AnonymousLifetimeMode::CreateParameter => {}
// This is the normal case.
AnonymousLifetimeMode::PassThrough => {}
}
(0..count)
.map(|_| self.new_implicit_lifetime(span))
.collect()
}
/// Invoked to create the lifetime argument(s) for an elided trait object
/// bound, like the bound in `Box<dyn Debug>`. This method is not invoked
/// when the bound is written, even if it is written with `'_` like in
/// `Box<dyn Debug + '_>`. In those cases, `lower_lifetime` is invoked.
fn elided_dyn_bound(&mut self, span: Span) -> hir::Lifetime {
match self.anonymous_lifetime_mode {
// NB. We intentionally ignore the create-parameter mode here.
// and instead "pass through" to resolve-lifetimes, which will apply
// the object-lifetime-defaulting rules. Elided object lifetime defaults
// do not act like other elided lifetimes. In other words, given this:
//
// impl Foo for Box<dyn Debug>
//
// we do not introduce a fresh `'_` to serve as the bound, but instead
// ultimately translate to the equivalent of:
//
// impl Foo for Box<dyn Debug + 'static>
//
// `resolve_lifetime` has the code to make that happen.
AnonymousLifetimeMode::CreateParameter => {}
// This is the normal case.
AnonymousLifetimeMode::PassThrough => {}
}
self.new_implicit_lifetime(span)
}
fn new_implicit_lifetime(&mut self, span: Span) -> hir::Lifetime {
hir::Lifetime {
id: self.next_id().node_id,
span,
name: hir::LifetimeName::Implicit,
}
}
fn maybe_lint_bare_trait(&self, span: Span, id: NodeId, is_global: bool) {
self.sess.buffer_lint_with_diagnostic(
builtin::BARE_TRAIT_OBJECTS,
id,
span,
"trait objects without an explicit `dyn` are deprecated",
builtin::BuiltinLintDiagnostics::BareTraitObject(span, is_global),
)
}
fn wrap_in_try_constructor(
&mut self,
method: &'static str,
e: hir::Expr,
unstable_span: Span,
) -> P<hir::Expr> {
let path = &["ops", "Try", method];
let from_err = P(self.expr_std_path(unstable_span, path,
ThinVec::new()));
P(self.expr_call(e.span, from_err, hir_vec![e]))
}
}
fn body_ids(bodies: &BTreeMap<hir::BodyId, hir::Body>) -> Vec<hir::BodyId> {
// Sorting by span ensures that we get things in order within a
// file, and also puts the files in a sensible order.
let mut body_ids: Vec<_> = bodies.keys().cloned().collect();
body_ids.sort_by_key(|b| bodies[b].value.span);
body_ids
}
| 39.595525 | 100 | 0.485564 |
089a214667ea24f8ee05713f1d0b90c6dedd74c6 | 647 | #![allow(incomplete_features)]
#![feature(generic_associated_types)]
#![feature(associated_type_defaults)]
trait Foo {
type A<'a> where Self: 'a;
type B<'a, 'b> where 'a: 'b;
type C where Self: Clone;
}
#[derive(Copy, Clone)]
struct Fooy<T>(T);
impl<T> Foo for Fooy<T> {
type A<'a> where Self: 'static = (&'a ());
//~^ ERROR the parameter type `T` may not live long enough
type B<'a, 'b> where 'b: 'a = (&'a(), &'b ());
//~^ ERROR lifetime bound not satisfied
//~| ERROR lifetime bound not satisfied
type C where Self: Copy = String;
//~^ ERROR the trait bound `T: Copy` is not satisfied
}
fn main() {}
| 25.88 | 62 | 0.607419 |
ebed49b05db56b438ce676b3c0c52587b2366260 | 11,563 | //! @brief command line setup and parse
use {
clap::{
crate_description, crate_name, crate_version, App, AppSettings, Arg, ArgGroup, ArgMatches,
},
gadgets_common::load_yaml_file,
lazy_static::*,
solana_clap_utils::input_validators::{is_keypair, is_pubkey, is_url_or_moniker},
solana_sdk::{pubkey::Pubkey, signature::read_keypair_file, signer::Signer},
std::process::exit,
std::{collections::HashMap, str::FromStr},
yaml_rust::Yaml,
};
/// Construct the cli input model and parse command line
pub fn parse_command_line() -> App<'static, 'static> {
App::new(crate_name!())
.global_setting(AppSettings::ArgRequiredElseHelp)
.about(crate_description!())
.version(crate_version!())
.setting(AppSettings::ArgRequiredElseHelp)
.arg({
let arg = Arg::with_name("config_file")
.short("C")
.long("config")
.value_name("PATH")
.takes_value(true)
.global(true)
.help("Configuration file to use");
if let Some(ref config_file) = *solana_cli_config::CONFIG_FILE {
arg.default_value(config_file)
} else {
arg
}
})
.arg(
Arg::with_name("verbose")
.long("verbose")
.short("v")
.takes_value(false)
.global(true)
.help("Show additional information"),
)
.arg(
Arg::with_name("json_rpc_url")
.short("u")
.long("url")
.value_name("URL")
.takes_value(true)
.global(true)
.validator(is_url_or_moniker)
.help("JSON RPC URL for the cluster [default: value from configuration file]"),
)
.arg(
Arg::with_name("decl")
.display_order(2)
.long("declfile")
.short("d")
.takes_value(true)
.global(true)
.help("YAML data deserialization declaration file"),
)
.arg(
Arg::with_name("keypair")
.long("keypair")
.short("k")
.global(true)
.validator(is_keypair)
.conflicts_with_all(&["pkstr", "sampkey"])
.takes_value(true)
.help("Keypair to extract public key from"),
)
.arg(
Arg::with_name("pkstr")
.long("pubkey")
.short("p")
.global(true)
.conflicts_with("sampkey")
.validator(is_pubkey)
.takes_value(true)
.help("Publickey Base58 string"),
)
.arg(
Arg::with_name("sampkey")
.long("samplekey")
.short("s")
.global(true)
.possible_values(&["user1", "user2", "prog"])
.takes_value(true)
.help("Account or program sample name"),
)
.arg(
Arg::with_name("output")
.long("output")
.short("o")
.global(true)
.takes_value(true)
.possible_values(&["json", "stdout"])
.default_value("stdout")
.requires_ifs(&[("json", "filename")])
.help("Direct output to file"),
)
.arg(
Arg::with_name("filename")
.long("filename")
.short("f")
.global(true)
.takes_value(true)
.requires("output")
.help("Filename for '-o json' output"),
)
.subcommand(App::new("account").about("Deserialize single account"))
.subcommand(App::new("program").about("Deserialize all program owned accounts"))
.group(
ArgGroup::with_name("key_flags").args(&["keypair", "pkstr", "sampkey"]), // .required(true),
)
}
lazy_static! {
static ref SAMPLE_KEYS_MAP: HashMap<&'static str, &'static str> = {
let mut jump_table = HashMap::<&str, &str>::new();
if std::env::current_dir().unwrap().ends_with("sad") {
jump_table.insert("user1", "../../samples/keys/user1_account.json");
jump_table.insert("user2", "../../samples/keys/user2_account.json");
jump_table.insert(
"prog",
"../../samples/keys/SampGgdt3wioaoMZhC6LTSbg4pnuvQnSfJpDYeuXQBv.json",
);
jump_table.insert(
"decl",
"../../samples/yamldecls/SampGgdt3wioaoMZhC6LTSbg4pnuvQnSfJpDYeuXQBv.yml",
);
jump_table
} else {
jump_table.insert("user1", "../samples/keys/user1_account.json");
jump_table.insert("user2", "../samples/keys/user2_account.json");
jump_table.insert(
"prog",
"../samples/keys/SampGgdt3wioaoMZhC6LTSbg4pnuvQnSfJpDYeuXQBv.json",
);
jump_table.insert(
"decl",
"../samples/yamldecls/SampGgdt3wioaoMZhC6LTSbg4pnuvQnSfJpDYeuXQBv.yml",
);
jump_table
}
};
}
/// Get correct public key from command line and
/// associated sample descriptor or provided descriptor
/// path
pub fn get_account_and_descriptor(matches: &ArgMatches) -> (Pubkey, Vec<Yaml>) {
let (kp, ks, ss) = (
matches.is_present("keypair"),
matches.is_present("pkstr"),
matches.is_present("sampkey"),
);
match (kp, ks, ss) {
(true, _, _) => {
let descriptor_file_name = matches.value_of("decl").unwrap();
let kp = read_keypair_file(matches.value_of("keypair").unwrap())
.unwrap()
.pubkey();
let indecl = load_yaml_file(descriptor_file_name);
if indecl.is_err() {
eprintln!(
"File error: On {} {}",
descriptor_file_name,
indecl.err().unwrap()
);
exit(1)
} else {
(kp, indecl.unwrap())
}
}
(_, true, _) => {
let descriptor_file_name = matches.value_of("decl").unwrap();
let kp = Pubkey::from_str(matches.value_of("pkstr").unwrap()).unwrap();
let indecl = load_yaml_file(descriptor_file_name);
if indecl.is_err() {
eprintln!(
"File error: On {} {}",
descriptor_file_name,
indecl.err().unwrap()
);
exit(1)
} else {
(kp, indecl.unwrap())
}
}
(_, _, true) => {
let kp = read_keypair_file(
SAMPLE_KEYS_MAP
.get(matches.value_of("sampkey").unwrap())
.unwrap(),
)
.unwrap()
.pubkey();
let descriptor_file_name = *SAMPLE_KEYS_MAP.get("decl").unwrap();
let indecl = load_yaml_file(descriptor_file_name);
if indecl.is_err() {
eprintln!(
"File error: On {} {}",
descriptor_file_name,
indecl.err().unwrap()
);
exit(1)
} else {
(kp, indecl.unwrap())
}
}
_ => unreachable!(),
}
}
#[cfg(test)]
mod tests {
use super::*;
use clap::ErrorKind;
fn argsetup(faux_cmd_line: Vec<&str>) -> Result<ArgMatches, clap::Error> {
App::new("prog")
.arg(
Arg::with_name("output")
.long("output")
.short("o")
.takes_value(true)
.possible_values(&["json", "stdout"])
.requires_ifs(&[("json", "filename")])
// .default_value("stdout")
.help("Direct output to file"),
)
.arg(
Arg::with_name("filename")
.long("filename")
.short("f")
.takes_value(true)
.requires("output")
.help("Filename for '-o json' output"),
)
.get_matches_from_safe(faux_cmd_line)
}
#[test]
fn test_requiredifs_options_without_output_should_pass() {
let res = argsetup(vec!["prog", "-o", "json", "-f", "filename"]);
assert!(res.is_ok());
}
#[test]
fn test_requiredifs_options_without_output_should_fail() {
let res = argsetup(vec!["prog", "-f", "filename"]);
assert!(res.is_err()); // We used -o excel so -f <filename> is required
assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn test_requiresif_options_without_file_should_fail() {
let res = argsetup(vec!["prog", "-o", "json"]);
assert!(res.is_err()); // We used -o excel so -f <filename> is required
assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument);
}
// Setup for which key (accounnt or program)
fn keysetup(faux_cmd_line: Vec<&str>) -> Result<ArgMatches, clap::Error> {
App::new("prog")
.arg(
Arg::with_name("keypair")
.long("keypair")
.short("k")
.validator(is_keypair)
.takes_value(true)
.help("Keypair to extract public key from. Mutually exclusive with '--pubkey'"),
)
.arg(
Arg::with_name("pkstr")
.long("pubkey")
.short("p")
.validator(is_pubkey)
.takes_value(true)
.help("Publickey string. Mutually exclusive with '--keyfile'"),
)
.arg(
Arg::with_name("sampkey")
.long("samplekey")
.short("s")
.possible_values(&["user1", "user2", "prog"])
.takes_value(true)
.help("Account or program sample name"),
)
.group(
ArgGroup::with_name("key_flags")
.required(true)
.args(&["keypair", "pkstr", "sampkey"]),
)
.get_matches_from_safe(faux_cmd_line)
}
#[test]
fn test_keyfile_pass() {
let matches = keysetup(vec!["prog", "-k", SAMPLE_KEYS_MAP.get("user2").unwrap()]).unwrap();
let (target_pubkey, indecl) = get_account_and_descriptor(&matches);
println!("{:?} = {:?}", target_pubkey, indecl);
}
#[test]
fn test_keystr_pass() {
let matches = keysetup(vec![
"prog",
"-p",
"SampGgdt3wioaoMZhC6LTSbg4pnuvQnSfJpDYeuXQBv",
])
.unwrap();
println!("{:?}", matches);
}
#[test]
fn test_sampkey_pass() {
let matches = keysetup(vec!["prog", "-s", "user2"]).unwrap();
let (target_pubkey, indecl) = get_account_and_descriptor(&matches);
println!("{:?} = {:?}", target_pubkey, indecl);
}
#[test]
fn test_sampkey_options_fail() {
let matches = keysetup(vec!["prog"]);
assert!(matches.is_err());
}
}
| 35.360856 | 104 | 0.48119 |
382b5e017d3369dd7a8c7b2b1bc2747ed2343a8f | 2,826 |
const DATA: &str = include_str!("../../data/day_03.txt");
struct Terrain {
width: usize,
height: usize,
// Matrix of ascii characters
data: Vec<&'static [u8]>,
}
impl From<&'static str> for Terrain {
fn from(s: &'static str) -> Self {
let mut width_check: Option<usize> = None;
let matrix: Vec<&[u8]> = s
.lines()
.inspect(|s| match width_check {
None => width_check = Some(s.len()),
Some(len) if len != s.len() => panic!("Inconsistent line length in input data"),
_ => ()
})
.map(|s| s.as_bytes())
.collect();
Terrain {
width: matrix[0].len(),
height: matrix.len(),
data: matrix,
}
}
}
impl Terrain {
/// Are we on a tree? Returns None if we're past the bottom
fn is_a_tree(&self, x: usize, y: usize) -> Option<bool> {
if y >= (self.height) {
None
} else {
Some(self.data[y][x % self.width] == b'#')
}
}
fn run(&self, dx: usize, dy: usize) -> Trajectory {
Trajectory {
terrain: self,
x: 0,
y: 0,
dx,
dy,
}
}
}
/// An iterator of "is it a tree?" booleans over a trajectory.
struct Trajectory<'a> {
terrain: &'a Terrain,
x: usize,
y: usize,
dx: usize,
dy: usize,
}
impl <'a> Iterator for Trajectory<'a> {
type Item = bool;
fn next(&mut self) -> Option<bool> {
let r = self.terrain.is_a_tree(self.x, self.y);
self.x += self.dx;
self.y += self.dy;
r
}
}
fn count_trees(terrain: &Terrain, dx: usize, dy:usize) -> usize {
terrain.run(dx, dy).filter(|tree| *tree).count()
}
fn mul_count(terrain: &Terrain, slopes: Vec<(usize, usize)>) -> usize {
slopes.into_iter()
.inspect(|(dx, dy)| print!("Trees hit with slope ({}, {}): ", dx, dy))
.map(|(dx, dy)| count_trees(&terrain, dx, dy))
.inspect(|x| println!("{}", x))
.product()
}
fn main() {
let terrain = Terrain::from(DATA);
println!("Trees hit with slope (3, 1): {}", count_trees(&terrain, 3, 1));
println!();
let r = mul_count(&terrain, vec![(1, 1), (3, 1), (5, 1), (7, 1), (1, 2)]);
println!("Product: {}", r);
}
#[cfg(test)]
mod tests {
use super::*;
const TEST_DATA: &str = include_str!("../../data/day_03_test.txt");
#[test]
fn test_terrain() {
let terrain = Terrain::from(TEST_DATA);
let c = count_trees(&terrain, 3, 1);
assert_eq!(c, 7);
}
#[test]
fn test_mul_count() {
let terrain = Terrain::from(TEST_DATA);
let r = mul_count(&terrain, vec![(1, 1), (3, 1), (5, 1), (7, 1), (1, 2)]);
assert_eq!(r, 336);
}
}
| 24.789474 | 96 | 0.497523 |
5b8de71702bc15d7d6bbe59a42380395657e6868 | 10,934 | // Copyright 2020 Parity Technologies (UK) Ltd.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//! A basic relay server and relay client implementation.
//!
//! The example below involves three nodes: (1) a relay server, (2) a listening
//! relay client listening via the relay server and (3) a dialing relay client
//! dialing the listening relay client via the relay server.
//!
//! 1. To start the relay server, run `cargo run --example=relay --package=libp2p-relay --mode relay --secret-key-seed 1 --address /ip4/<ip address>/tcp/<port>`.
//! The `-secret-key-seed` helps create a static peer id using the given number argument as a seed.
//! The mode specifies whether the node should run as a relay server, a listening client or a dialing client.
//! The address specifies a static address. Usually it will be some loop back address such as `/ip4/0.0.0.0/tcp/4444`.
//! Example:
//! `cargo run --example=relay --package=libp2p-relay -- --mode relay --secret-key-seed 1 --address /ip4/0.0.0.0/tcp/4444`
//! `cargo run --example=relay --package=libp2p-relay -- --mode relay --secret-key-seed 1 --address /ip6/::/tcp/4444`
//!
//! 2. To start the listening relay client run `cargo run --example=relay --package=libp2p-relay -- --mode client-listen --secret-key-seed 2 --address
//! <addr-relay-server>/p2p/<peer-id-relay-server>/p2p-circuit` in a second terminal where:
//!
//! - `<addr-relay-server>` is replaced by one of the listening addresses of the relay server.
//! - `<peer-id-relay-server>` is replaced by the peer id of the relay server.
//!
//! Example:
//! `cargo run --example=relay --package=libp2p-relay -- --mode client-listen --secret-key-seed 2 --address /ip4/127.0.0.1/tcp/4444/p2p/12D3KooWPjceQrSwdWXPyLLeABRXmuqt69Rg3sBYbU1Nft9HyQ6X/p2p-circuit`
//! `cargo run --example=relay --package=libp2p-relay -- --mode client-listen --secret-key-seed 2 --address /ip6/::1/tcp/4444/p2p/12D3KooWPjceQrSwdWXPyLLeABRXmuqt69Rg3sBYbU1Nft9HyQ6X/p2p-circuit`
//!
//! 3. To start the dialing relay client run `cargo run --example=relay --package=libp2p-relay -- --mode client-dial --secret-key-seed 3 --address
//! <addr-relay-server>/p2p/<peer-id-relay-server>/p2p-circuit/p2p/<peer-id-listening-relay-client>` in
//! a third terminal where:
//!
//! - `<addr-relay-server>` is replaced by one of the listening addresses of the relay server.
//! - `<peer-id-relay-server>` is replaced by the peer id of the relay server.
//! - `<peer-id-listening-relay-client>` is replaced by the peer id of the listening relay client.
//! Example:
//! `cargo run --example=relay --package=libp2p-relay -- --mode client-dial --secret-key-seed 3 --address /ip4/127.0.0.1/tcp/4444/p2p/12D3KooWPjceQrSwdWXPyLLeABRXmuqt69Rg3sBYbU1Nft9HyQ6X/p2p-circuit/p2p/12D3KooWH3uVF6wv47WnArKHk5p6cvgCJEb74UTmxztmQDc298L3`
//! `cargo run --example=relay --package=libp2p-relay -- --mode client-dial --secret-key-seed 3 --address /ip6/::1/tcp/4444/p2p/12D3KooWPjceQrSwdWXPyLLeABRXmuqt69Rg3sBYbU1Nft9HyQ6X/p2p-circuit/p2p/12D3KooWH3uVF6wv47WnArKHk5p6cvgCJEb74UTmxztmQDc298L3`
//!
//! In the third terminal you will see the dialing relay client to receive pings
//! from both the relay server AND from the listening relay client relayed via
//! the relay server.
use futures::executor::block_on;
use futures::stream::StreamExt;
use libp2p::dns::DnsConfig;
use libp2p::ping::{Ping, PingConfig, PingEvent};
use libp2p::plaintext;
use libp2p::relay::{Relay, RelayConfig};
use libp2p::swarm::SwarmEvent;
use libp2p::tcp::TcpConfig;
use libp2p::Transport;
use libp2p::{core::upgrade, identity::ed25519};
use libp2p::{identity, NetworkBehaviour, PeerId, Swarm};
use std::error::Error;
use std::task::{Context, Poll};
use std::time::Duration;
use std::{fmt, str::FromStr};
use structopt::StructOpt;
// Listen on all interfaces and whatever port the OS assigns
const DEFAULT_RELAY_ADDRESS: &str = "/ip4/0.0.0.0/tcp/0";
fn main() -> Result<(), Box<dyn Error>> {
env_logger::init();
let opt = Opt::from_args();
println!("opt: {:?}", opt);
// Create a static known PeerId based on given secret
let local_key: identity::Keypair = generate_ed25519(opt.secret_key_seed);
let local_peer_id = PeerId::from(local_key.public());
println!("Local peer id: {:?}", local_peer_id);
let transport = block_on(DnsConfig::system(TcpConfig::new()))?;
let relay_config = RelayConfig {
connection_idle_timeout: Duration::from_secs(10 * 60),
..Default::default()
};
let (relay_wrapped_transport, relay_behaviour) =
libp2p_relay::new_transport_and_behaviour(relay_config, transport);
let behaviour = Behaviour {
relay: relay_behaviour,
ping: Ping::new(
PingConfig::new()
.with_keep_alive(true)
.with_interval(Duration::from_secs(1)),
),
};
let plaintext = plaintext::PlainText2Config {
local_public_key: local_key.public(),
};
let transport = relay_wrapped_transport
.upgrade(upgrade::Version::V1)
.authenticate(plaintext)
.multiplex(libp2p_yamux::YamuxConfig::default())
.boxed();
let mut swarm = Swarm::new(transport, behaviour, local_peer_id);
match opt.mode {
Mode::Relay => {
let address = get_relay_address(&opt);
swarm.listen_on(address.parse()?)?;
println!("starting listening as relay on {}", &address);
}
Mode::ClientListen => {
let relay_address = get_relay_peer_address(&opt);
swarm.listen_on(relay_address.parse()?)?;
println!("starting client listener via relay on {}", &relay_address);
}
Mode::ClientDial => {
let client_listen_address = get_client_listen_address(&opt);
swarm.dial_addr(client_listen_address.parse()?)?;
println!("starting as client dialer on {}", client_listen_address);
}
}
block_on(futures::future::poll_fn(move |cx: &mut Context<'_>| {
loop {
match swarm.poll_next_unpin(cx) {
Poll::Ready(Some(event)) => match event {
SwarmEvent::NewListenAddr { address, .. } => {
print_listener_peer(&address, &opt.mode, local_peer_id)
}
_ => println!("{:?}", event),
},
Poll::Ready(None) => return Poll::Ready(Ok(())),
Poll::Pending => break,
}
}
Poll::Pending
}))
}
fn print_listener_peer(addr: &libp2p::Multiaddr, mode: &Mode, local_peer_id: PeerId) -> () {
match mode {
Mode::Relay => {
println!(
"Peer that act as Relay can access on: `{}/p2p/{}/p2p-circuit`",
addr, local_peer_id
);
}
Mode::ClientListen => {
println!(
"Peer that act as Client Listen can access on: `/p2p/{}/{}`",
addr, local_peer_id
);
}
Mode::ClientDial => {
println!("Peer that act as Client Dial Listening on {:?}", addr);
}
}
}
fn generate_ed25519(secret_key_seed: u8) -> identity::Keypair {
let mut bytes = [0u8; 32];
bytes[0] = secret_key_seed;
let secret_key = ed25519::SecretKey::from_bytes(&mut bytes)
.expect("this returns `Err` only if the length is wrong; the length is correct; qed");
identity::Keypair::Ed25519(secret_key.into())
}
/// Get the address for relay mode
fn get_relay_address(opt: &Opt) -> String {
match &opt.address {
Some(address) => address.clone(),
None => {
println!("--address argument was not provided, will use the default listening relay address: {}",DEFAULT_RELAY_ADDRESS);
DEFAULT_RELAY_ADDRESS.to_string()
}
}
}
/// Get the address for client_listen mode
fn get_relay_peer_address(opt: &Opt) -> String {
match &opt.address {
Some(address) => address.clone(),
None => panic!("Please provide relayed listen address such as: <addr-relay-server>/p2p/<peer-id-relay-server>/p2p-circuit"),
}
}
/// Get the address for client-dial mode
fn get_client_listen_address(opt: &Opt) -> String {
match &opt.address {
Some(address) => address.clone(),
None => panic!("Please provide client listen address such as: <addr-relay-server>/p2p/<peer-id-relay-server>/p2p-circuit/p2p/<peer-id-listening-relay-client>")
}
}
#[derive(NetworkBehaviour)]
#[behaviour(out_event = "Event", event_process = false)]
struct Behaviour {
relay: Relay,
ping: Ping,
}
#[derive(Debug)]
enum Event {
Relay(()),
Ping(PingEvent),
}
impl From<PingEvent> for Event {
fn from(e: PingEvent) -> Self {
Event::Ping(e)
}
}
impl From<()> for Event {
fn from(_: ()) -> Self {
Event::Relay(())
}
}
#[derive(Debug, StructOpt)]
enum Mode {
Relay,
ClientListen,
ClientDial,
}
impl FromStr for Mode {
type Err = ModeError;
fn from_str(mode: &str) -> Result<Self, Self::Err> {
match mode {
"relay" => Ok(Mode::Relay),
"client-listen" => Ok(Mode::ClientListen),
"client-dial" => Ok(Mode::ClientDial),
_ => Err(ModeError {}),
}
}
}
#[derive(Debug)]
struct ModeError {}
impl Error for ModeError {}
impl fmt::Display for ModeError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Could not parse a mode")
}
}
#[derive(Debug, StructOpt)]
#[structopt(name = "libp2p relay")]
struct Opt {
/// The mode (relay, client-listen, client-dial)
#[structopt(long)]
mode: Mode,
/// Fixed value to generate deterministic peer id
#[structopt(long)]
secret_key_seed: u8,
/// The listening address
#[structopt(long)]
address: Option<String>,
}
| 39.05 | 259 | 0.650905 |
1ca844bc012a1ddacddef0d455eacc00cba857f1 | 17,042 | #[doc = "Register `pu_rst_clkpll` reader"]
pub struct R(crate::R<PU_RST_CLKPLL_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<PU_RST_CLKPLL_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<PU_RST_CLKPLL_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<PU_RST_CLKPLL_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `pu_rst_clkpll` writer"]
pub struct W(crate::W<PU_RST_CLKPLL_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<PU_RST_CLKPLL_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<PU_RST_CLKPLL_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<PU_RST_CLKPLL_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `pu_clkpll` reader - "]
pub struct PU_CLKPLL_R(crate::FieldReader<bool, bool>);
impl PU_CLKPLL_R {
pub(crate) fn new(bits: bool) -> Self {
PU_CLKPLL_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for PU_CLKPLL_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `pu_clkpll` writer - "]
pub struct PU_CLKPLL_W<'a> {
w: &'a mut W,
}
impl<'a> PU_CLKPLL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | ((value as u32 & 0x01) << 10);
self.w
}
}
#[doc = "Field `pu_clkpll_sfreg` reader - "]
pub struct PU_CLKPLL_SFREG_R(crate::FieldReader<bool, bool>);
impl PU_CLKPLL_SFREG_R {
pub(crate) fn new(bits: bool) -> Self {
PU_CLKPLL_SFREG_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for PU_CLKPLL_SFREG_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `pu_clkpll_sfreg` writer - "]
pub struct PU_CLKPLL_SFREG_W<'a> {
w: &'a mut W,
}
impl<'a> PU_CLKPLL_SFREG_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | ((value as u32 & 0x01) << 9);
self.w
}
}
#[doc = "Field `clkpll_pu_cp` reader - "]
pub struct CLKPLL_PU_CP_R(crate::FieldReader<bool, bool>);
impl CLKPLL_PU_CP_R {
pub(crate) fn new(bits: bool) -> Self {
CLKPLL_PU_CP_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CLKPLL_PU_CP_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `clkpll_pu_cp` writer - "]
pub struct CLKPLL_PU_CP_W<'a> {
w: &'a mut W,
}
impl<'a> CLKPLL_PU_CP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | ((value as u32 & 0x01) << 8);
self.w
}
}
#[doc = "Field `clkpll_pu_pfd` reader - "]
pub struct CLKPLL_PU_PFD_R(crate::FieldReader<bool, bool>);
impl CLKPLL_PU_PFD_R {
pub(crate) fn new(bits: bool) -> Self {
CLKPLL_PU_PFD_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CLKPLL_PU_PFD_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `clkpll_pu_pfd` writer - "]
pub struct CLKPLL_PU_PFD_W<'a> {
w: &'a mut W,
}
impl<'a> CLKPLL_PU_PFD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | ((value as u32 & 0x01) << 7);
self.w
}
}
#[doc = "Field `clkpll_pu_clamp_op` reader - "]
pub struct CLKPLL_PU_CLAMP_OP_R(crate::FieldReader<bool, bool>);
impl CLKPLL_PU_CLAMP_OP_R {
pub(crate) fn new(bits: bool) -> Self {
CLKPLL_PU_CLAMP_OP_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CLKPLL_PU_CLAMP_OP_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `clkpll_pu_clamp_op` writer - "]
pub struct CLKPLL_PU_CLAMP_OP_W<'a> {
w: &'a mut W,
}
impl<'a> CLKPLL_PU_CLAMP_OP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | ((value as u32 & 0x01) << 6);
self.w
}
}
#[doc = "Field `clkpll_pu_fbdv` reader - "]
pub struct CLKPLL_PU_FBDV_R(crate::FieldReader<bool, bool>);
impl CLKPLL_PU_FBDV_R {
pub(crate) fn new(bits: bool) -> Self {
CLKPLL_PU_FBDV_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CLKPLL_PU_FBDV_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `clkpll_pu_fbdv` writer - "]
pub struct CLKPLL_PU_FBDV_W<'a> {
w: &'a mut W,
}
impl<'a> CLKPLL_PU_FBDV_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | ((value as u32 & 0x01) << 5);
self.w
}
}
#[doc = "Field `clkpll_pu_postdiv` reader - "]
pub struct CLKPLL_PU_POSTDIV_R(crate::FieldReader<bool, bool>);
impl CLKPLL_PU_POSTDIV_R {
pub(crate) fn new(bits: bool) -> Self {
CLKPLL_PU_POSTDIV_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CLKPLL_PU_POSTDIV_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `clkpll_pu_postdiv` writer - "]
pub struct CLKPLL_PU_POSTDIV_W<'a> {
w: &'a mut W,
}
impl<'a> CLKPLL_PU_POSTDIV_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | ((value as u32 & 0x01) << 4);
self.w
}
}
#[doc = "Field `clkpll_reset_refdiv` reader - "]
pub struct CLKPLL_RESET_REFDIV_R(crate::FieldReader<bool, bool>);
impl CLKPLL_RESET_REFDIV_R {
pub(crate) fn new(bits: bool) -> Self {
CLKPLL_RESET_REFDIV_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CLKPLL_RESET_REFDIV_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `clkpll_reset_refdiv` writer - "]
pub struct CLKPLL_RESET_REFDIV_W<'a> {
w: &'a mut W,
}
impl<'a> CLKPLL_RESET_REFDIV_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | ((value as u32 & 0x01) << 3);
self.w
}
}
#[doc = "Field `clkpll_reset_fbdv` reader - "]
pub struct CLKPLL_RESET_FBDV_R(crate::FieldReader<bool, bool>);
impl CLKPLL_RESET_FBDV_R {
pub(crate) fn new(bits: bool) -> Self {
CLKPLL_RESET_FBDV_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CLKPLL_RESET_FBDV_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `clkpll_reset_fbdv` writer - "]
pub struct CLKPLL_RESET_FBDV_W<'a> {
w: &'a mut W,
}
impl<'a> CLKPLL_RESET_FBDV_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | ((value as u32 & 0x01) << 2);
self.w
}
}
#[doc = "Field `clkpll_reset_postdiv` reader - "]
pub struct CLKPLL_RESET_POSTDIV_R(crate::FieldReader<bool, bool>);
impl CLKPLL_RESET_POSTDIV_R {
pub(crate) fn new(bits: bool) -> Self {
CLKPLL_RESET_POSTDIV_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CLKPLL_RESET_POSTDIV_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `clkpll_reset_postdiv` writer - "]
pub struct CLKPLL_RESET_POSTDIV_W<'a> {
w: &'a mut W,
}
impl<'a> CLKPLL_RESET_POSTDIV_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u32 & 0x01) << 1);
self.w
}
}
#[doc = "Field `clkpll_sdm_reset` reader - "]
pub struct CLKPLL_SDM_RESET_R(crate::FieldReader<bool, bool>);
impl CLKPLL_SDM_RESET_R {
pub(crate) fn new(bits: bool) -> Self {
CLKPLL_SDM_RESET_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CLKPLL_SDM_RESET_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `clkpll_sdm_reset` writer - "]
pub struct CLKPLL_SDM_RESET_W<'a> {
w: &'a mut W,
}
impl<'a> CLKPLL_SDM_RESET_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01);
self.w
}
}
impl R {
#[doc = "Bit 10"]
#[inline(always)]
pub fn pu_clkpll(&self) -> PU_CLKPLL_R {
PU_CLKPLL_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 9"]
#[inline(always)]
pub fn pu_clkpll_sfreg(&self) -> PU_CLKPLL_SFREG_R {
PU_CLKPLL_SFREG_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 8"]
#[inline(always)]
pub fn clkpll_pu_cp(&self) -> CLKPLL_PU_CP_R {
CLKPLL_PU_CP_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 7"]
#[inline(always)]
pub fn clkpll_pu_pfd(&self) -> CLKPLL_PU_PFD_R {
CLKPLL_PU_PFD_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 6"]
#[inline(always)]
pub fn clkpll_pu_clamp_op(&self) -> CLKPLL_PU_CLAMP_OP_R {
CLKPLL_PU_CLAMP_OP_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 5"]
#[inline(always)]
pub fn clkpll_pu_fbdv(&self) -> CLKPLL_PU_FBDV_R {
CLKPLL_PU_FBDV_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 4"]
#[inline(always)]
pub fn clkpll_pu_postdiv(&self) -> CLKPLL_PU_POSTDIV_R {
CLKPLL_PU_POSTDIV_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 3"]
#[inline(always)]
pub fn clkpll_reset_refdiv(&self) -> CLKPLL_RESET_REFDIV_R {
CLKPLL_RESET_REFDIV_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 2"]
#[inline(always)]
pub fn clkpll_reset_fbdv(&self) -> CLKPLL_RESET_FBDV_R {
CLKPLL_RESET_FBDV_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 1"]
#[inline(always)]
pub fn clkpll_reset_postdiv(&self) -> CLKPLL_RESET_POSTDIV_R {
CLKPLL_RESET_POSTDIV_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 0"]
#[inline(always)]
pub fn clkpll_sdm_reset(&self) -> CLKPLL_SDM_RESET_R {
CLKPLL_SDM_RESET_R::new((self.bits & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 10"]
#[inline(always)]
pub fn pu_clkpll(&mut self) -> PU_CLKPLL_W {
PU_CLKPLL_W { w: self }
}
#[doc = "Bit 9"]
#[inline(always)]
pub fn pu_clkpll_sfreg(&mut self) -> PU_CLKPLL_SFREG_W {
PU_CLKPLL_SFREG_W { w: self }
}
#[doc = "Bit 8"]
#[inline(always)]
pub fn clkpll_pu_cp(&mut self) -> CLKPLL_PU_CP_W {
CLKPLL_PU_CP_W { w: self }
}
#[doc = "Bit 7"]
#[inline(always)]
pub fn clkpll_pu_pfd(&mut self) -> CLKPLL_PU_PFD_W {
CLKPLL_PU_PFD_W { w: self }
}
#[doc = "Bit 6"]
#[inline(always)]
pub fn clkpll_pu_clamp_op(&mut self) -> CLKPLL_PU_CLAMP_OP_W {
CLKPLL_PU_CLAMP_OP_W { w: self }
}
#[doc = "Bit 5"]
#[inline(always)]
pub fn clkpll_pu_fbdv(&mut self) -> CLKPLL_PU_FBDV_W {
CLKPLL_PU_FBDV_W { w: self }
}
#[doc = "Bit 4"]
#[inline(always)]
pub fn clkpll_pu_postdiv(&mut self) -> CLKPLL_PU_POSTDIV_W {
CLKPLL_PU_POSTDIV_W { w: self }
}
#[doc = "Bit 3"]
#[inline(always)]
pub fn clkpll_reset_refdiv(&mut self) -> CLKPLL_RESET_REFDIV_W {
CLKPLL_RESET_REFDIV_W { w: self }
}
#[doc = "Bit 2"]
#[inline(always)]
pub fn clkpll_reset_fbdv(&mut self) -> CLKPLL_RESET_FBDV_W {
CLKPLL_RESET_FBDV_W { w: self }
}
#[doc = "Bit 1"]
#[inline(always)]
pub fn clkpll_reset_postdiv(&mut self) -> CLKPLL_RESET_POSTDIV_W {
CLKPLL_RESET_POSTDIV_W { w: self }
}
#[doc = "Bit 0"]
#[inline(always)]
pub fn clkpll_sdm_reset(&mut self) -> CLKPLL_SDM_RESET_W {
CLKPLL_SDM_RESET_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "pu_rst_clkpll.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pu_rst_clkpll](index.html) module"]
pub struct PU_RST_CLKPLL_SPEC;
impl crate::RegisterSpec for PU_RST_CLKPLL_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [pu_rst_clkpll::R](R) reader structure"]
impl crate::Readable for PU_RST_CLKPLL_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [pu_rst_clkpll::W](W) writer structure"]
impl crate::Writable for PU_RST_CLKPLL_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets pu_rst_clkpll to value 0"]
impl crate::Resettable for PU_RST_CLKPLL_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 29.74171 | 408 | 0.584791 |
11f02c1b0d7733681078d8ed5d2afc7b32966538 | 425 | use identity::iota::ExplorerUrl;
use identity::iota::IotaDID;
use rocket::{get, serde::json::Json};
use rocket_okapi::openapi;
#[openapi(tag = "resolver")]
#[get("/resolver/resolve/<did>")]
pub fn get_resolve(did: String) -> Json<String> {
let iota_did: IotaDID = IotaDID::try_from(did).unwrap();
let explorer: &ExplorerUrl = ExplorerUrl::mainnet();
Json(explorer.resolver_url(&iota_did).unwrap().to_string())
}
| 32.692308 | 63 | 0.701176 |
896a75df1d244b70df3734316bd4f9654012b503 | 987 | use std::io;
fn is_even(a: &i32) -> bool {
if *a % 2 == 0 {
true
} else {
false
}
}
fn find_digits_in_no(mut n: i32) -> i32 {
let mut count: i32 = 0;
while n != 0 {
n = n / 10;
count += 1;
}
count
}
fn main() {
println!("Print even numbers from 1 to 100");
for n in 1..=100 {
if is_even(&n) {
print!("{} ", n);
}
}
println!("");
let mut choice = String::new();
for n in 0..3 {
println!("What's 100 / 10?");
choice.clear();
io::stdin().read_line(&mut choice).expect("Failed");
let answer: i32 = choice.trim().parse().expect("Failed");
if answer == 10 {
println!("{} is correct!", answer);
break;
} else {
println!("Wrong answer. {} chance left.", 2 - n);
}
}
println!("Number of digits in 12345 is {}",
find_digits_in_no(12345));
}
| 18.622642 | 65 | 0.442756 |
11a5008dd7292c93b468c84fb41f9e65d25b69cc | 8,772 | //! Gallery album specification
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use url::Url;
use crate::{
model::{common::AccountID, gallery_image::GalleryImage},
serialization::unix_epoch,
};
/// Gallery album
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
// #[serde(deny_unknown_fields)]
pub struct GalleryAlbum {
/// The account ID of the account that uploaded it, or `null`.
pub account_id: Option<AccountID>,
/// The account username or `null` if it's anonymous.
pub account_url: Option<String>,
/// TODO: missing from API model
pub ad_config: Option<serde_json::Value>,
/// TODO: missing from API model
pub ad_type: Option<serde_json::Value>,
/// TODO: missing from API model
pub ad_url: Option<serde_json::Value>,
/// TODO: missing from API model
pub comment_count: u64,
/// The ID of the album cover image
pub cover: String,
/// The height, in pixels, of the album cover image
pub cover_height: Option<u64>,
/// The width, in pixels, of the album cover image
pub cover_width: Option<u64>,
/// Time inserted into the gallery, epoch time
#[serde(with = "unix_epoch")]
pub datetime: DateTime<Utc>,
/// The description of the album in the gallery
pub description: Option<String>,
/// Number of downvotes for the image
pub downs: u64,
/// Indicates if the current user favorited the album. Defaults to false if not signed in.
pub favorite: Option<bool>,
/// Indicates the number of users that have favorited the album.
pub favorite_count: Option<u64>,
/// The ID for the image
pub id: String,
/// An array of all the images in the album (only available when requesting the direct album)
pub images: Option<Vec<GalleryImage>>,
/// The total number of images in the album
pub images_count: u64,
/// TODO: missing from API model
pub in_gallery: Option<bool>,
/// Indicates if the album is in the most viral gallery or not.
pub in_most_viral: Option<bool>,
/// TODO: missing from API model
pub include_album_ads: Option<bool>,
/// TODO: missing from API model
pub is_ad: Option<bool>,
/// If it's an album or not
pub is_album: bool,
/// The view layout of the album.
pub layout: String,
/// The URL link to the album
pub link: Url,
/// Indicates if the album has been marked as nsfw or not. Defaults to `null` if information is not available.
pub nsfw: Option<bool>,
/// Upvotes minus downvotes
pub points: i64,
/// The privacy level of the album, you can only view public if not logged in as album owner
pub privacy: String,
/// Imgur popularity score
pub score: i64,
/// TODO: missing from API model
pub section: Option<serde_json::Value>,
/// TODO: missing from API model
pub tags: Option<Vec<String>>,
/// The title of the album in the gallery
pub title: String,
/// Topic of the gallery album.
pub topic: Option<String>,
/// Topic ID of the gallery album.
pub topic_id: Option<u64>,
/// Upvotes for the image
pub ups: u64,
/// The number of image views
pub views: u64,
/// The current user's vote on the album. `null` if not signed in or if the user hasn't voted on it.
pub vote: Option<String>,
}
#[cfg(test)]
mod test {
use std::error::Error;
use crate::model::{basic::Basic, gallery_album::GalleryAlbum};
#[test]
fn test_deserialize_gallery_album_local() -> Result<(), Box<dyn Error>> {
let res = r#"{
"data": {
"account_id": 67659037,
"account_url": "BeanMugged",
"ad_config": {
"highRiskFlags": [],
"safeFlags": [
"in_gallery",
"gallery",
"album"
],
"showsAds": false,
"unsafeFlags": [
"onsfw_mod_unsafe",
"sixth_mod_unsafe",
"mature"
],
"wallUnsafeFlags": []
},
"ad_type": 0,
"ad_url": "",
"comment_count": 108,
"cover": "MDCEW6Q",
"cover_height": 532,
"cover_width": 513,
"datetime": 1603095538,
"description": null,
"downs": 54,
"favorite": false,
"favorite_count": 315,
"id": "HvCcoNA",
"images": [
{
"account_id": null,
"account_url": null,
"ad_type": 0,
"ad_url": "",
"animated": false,
"bandwidth": 8667309096,
"comment_count": null,
"datetime": 1592569542,
"description": null,
"downs": null,
"edited": "0",
"favorite": false,
"favorite_count": null,
"has_sound": false,
"height": 532,
"id": "MDCEW6Q",
"in_gallery": false,
"in_most_viral": false,
"is_ad": false,
"link": "https://i.imgur.com/MDCEW6Q.png",
"nsfw": null,
"points": null,
"score": null,
"section": null,
"size": 452412,
"tags": [],
"title": null,
"type": "image/png",
"ups": null,
"views": 19158,
"vote": null,
"width": 513
},
{
"account_id": null,
"account_url": null,
"ad_type": 0,
"ad_url": "",
"animated": false,
"bandwidth": 596525155,
"comment_count": null,
"datetime": 1592569543,
"description": null,
"downs": null,
"edited": "0",
"favorite": false,
"favorite_count": null,
"has_sound": false,
"height": 540,
"id": "1REuHNL",
"in_gallery": false,
"in_most_viral": false,
"is_ad": false,
"link": "https://i.imgur.com/1REuHNL.jpg",
"nsfw": null,
"points": null,
"score": null,
"section": null,
"size": 32215,
"tags": [],
"title": null,
"type": "image/jpeg",
"ups": null,
"views": 18517,
"vote": null,
"width": 609
},
{
"account_id": null,
"account_url": null,
"ad_type": 0,
"ad_url": "",
"animated": false,
"bandwidth": 468810123,
"comment_count": null,
"datetime": 1592569544,
"description": null,
"downs": null,
"edited": "0",
"favorite": false,
"favorite_count": null,
"has_sound": false,
"height": 232,
"id": "hp2tIwe",
"in_gallery": false,
"in_most_viral": false,
"is_ad": false,
"link": "https://i.imgur.com/hp2tIwe.jpg",
"nsfw": null,
"points": null,
"score": null,
"section": null,
"size": 30201,
"tags": [],
"title": null,
"type": "image/jpeg",
"ups": null,
"views": 15523,
"vote": null,
"width": 500
}
],
"images_count": 50,
"in_gallery": true,
"in_most_viral": true,
"include_album_ads": true,
"is_ad": false,
"is_album": true,
"layout": "blog",
"link": "https://imgur.com/a/HvCcoNA",
"nsfw": true,
"points": 1251,
"privacy": "hidden",
"score": 1266,
"section": "",
"tags": [],
"title": "Dunn's Dumb Dump",
"topic": "No Topic",
"topic_id": 29,
"ups": 1305,
"views": 31958,
"vote": null
},
"status": 200,
"success": true
}"#;
let data = serde_json::from_str::<Basic<GalleryAlbum>>(res)?;
println!("{:#?}", data);
Ok(())
}
}
| 32.977444 | 114 | 0.476858 |
7a460b52bd9780978cd5d44d7b85155b1ba9397c | 1,776 | // Copyright 2018 Mozilla
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
#![cfg(feature = "serde_support")]
extern crate serde_test;
extern crate serde_json;
extern crate edn;
use edn::symbols::Keyword;
use serde_test::{assert_tokens, Token};
#[cfg(feature = "serde_support")]
#[test]
fn test_serialize_keyword() {
let kw = Keyword::namespaced("foo", "bar");
assert_tokens(&kw, &[
Token::NewtypeStruct { name: "Keyword" },
Token::Struct { name: "NamespaceableName", len: 2 },
Token::Str("namespace"),
Token::Some,
Token::BorrowedStr("foo"),
Token::Str("name"),
Token::BorrowedStr("bar"),
Token::StructEnd,
]);
}
#[cfg(feature = "serde_support")]
#[test]
fn test_deserialize_keyword() {
let json = r#"{"name": "foo", "namespace": "bar"}"#;
let kw = serde_json::from_str::<Keyword>(json).unwrap();
assert_eq!(kw.name(), "foo");
assert_eq!(kw.namespace(), Some("bar"));
let bad_ns_json = r#"{"name": "foo", "namespace": ""}"#;
let not_kw = serde_json::from_str::<Keyword>(bad_ns_json);
assert!(not_kw.is_err());
let bad_ns_json = r#"{"name": "", "namespace": "bar"}"#;
let not_kw = serde_json::from_str::<Keyword>(bad_ns_json);
assert!(not_kw.is_err());
}
| 31.157895 | 82 | 0.657658 |
6720dea16c09676fc79b9c93cd07870c57bd0b3d | 4,730 | // Copyright 2015-2019 Capital One Services, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Amazon SQS Messaging Provider
//
#[macro_use]
extern crate log;
#[macro_use]
extern crate wascc_codec as codec;
use codec::capabilities::{CapabilityProvider, Dispatcher, NullDispatcher};
use codec::core::{CapabilityConfiguration, OP_CONFIGURE, OP_REMOVE_ACTOR};
use codec::deserialize;
use codec::messaging::{PublishMessage, OP_PUBLISH_MESSAGE};
use env_logger;
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
use std::error::Error;
mod sqs;
const CAPABILITY_ID: &str = "wascc:messaging";
capability_provider!(AmazonSqsMessagingProvider, AmazonSqsMessagingProvider::new);
// Represents a waSCC Amazon SQS messaging provider.
pub struct AmazonSqsMessagingProvider {
dispatcher: Arc<RwLock<Box<dyn Dispatcher>>>,
clients: Arc<RwLock<HashMap<String, sqs::Client>>>,
}
impl Default for AmazonSqsMessagingProvider {
// Returns the default value for `AmazonSqsMessagingProvider`.
fn default() -> Self {
if env_logger::try_init().is_err() {
info!("Logger already intialized");
}
AmazonSqsMessagingProvider {
dispatcher: Arc::new(RwLock::new(Box::new(NullDispatcher::new()))),
clients: Arc::new(RwLock::new(HashMap::new())),
}
}
}
impl AmazonSqsMessagingProvider {
// Creates a new, empty `AmazonSqsMessagingProvider`.
pub fn new() -> Self {
Self::default()
}
// Starts the capability provider.
fn start(&self, config: CapabilityConfiguration) -> Result<Vec<u8>, Box<dyn Error>> {
let module_id = &config.module;
info!(
"AmazonSqsMessagingProvider(wascc:messaging) start: {}",
module_id
);
let client = sqs::Client::new();
self.clients
.write()
.unwrap()
.insert(module_id.clone(), client);
Ok(vec![])
}
// Stops the capability provider.
fn stop(&self, config: CapabilityConfiguration) -> Result<Vec<u8>, Box<dyn Error>> {
let module_id = &config.module;
info!(
"AmazonSqsMessagingProvider(wascc:messaging) stop: {}",
module_id
);
self.clients.write().unwrap().remove(module_id);
Ok(vec![])
}
// Publishes a message.
// Is this the right place for "tokio::main"?
#[tokio::main(basic_scheduler)]
async fn publish_message(
&self,
actor: &str,
msg: PublishMessage,
) -> Result<Vec<u8>, Box<dyn Error>> {
let lock = self.clients.read().unwrap();
let client = match lock.get(actor) {
Some(c) => c,
None => return Err(format!("Unknown actor: {}", actor).into()),
};
client.publish(msg).await
}
}
impl CapabilityProvider for AmazonSqsMessagingProvider {
// Returns the capability ID in the formated `namespace:id`.
fn capability_id(&self) -> &'static str {
CAPABILITY_ID
}
// Called when the host runtime is ready and has configured a dispatcher.
fn configure_dispatch(&self, dispatcher: Box<dyn Dispatcher>) -> Result<(), Box<dyn Error>> {
info!("AmazonSqsMessagingProvider(wascc:messaging) configure_dispatch");
let mut lock = self.dispatcher.write().unwrap();
*lock = dispatcher;
Ok(())
}
// Called by the host runtime when an actor is requesting a command be executed.
fn handle_call(&self, actor: &str, op: &str, msg: &[u8]) -> Result<Vec<u8>, Box<dyn Error>> {
info!(
"AmazonSqsMessagingProvider(wascc:messaging) handle_call `{}` from `{}`",
op, actor
);
match op {
OP_CONFIGURE if actor == "system" => self.start(deserialize(msg)?),
OP_REMOVE_ACTOR if actor == "system" => self.stop(deserialize(msg)?),
OP_PUBLISH_MESSAGE => self.publish_message(actor, deserialize(msg)?),
_ => Err(format!("Unsupported operation: {}", op).into()),
}
}
// Returns the human-readable, friendly name of this capability provider.
fn name(&self) -> &'static str {
"Amazon SQS messaging provider"
}
}
| 30.915033 | 97 | 0.636786 |
2344143928c0add4fea1e95080029ea05dff17bd | 12,478 | //! Type-safe IDs for each resource to avoid mixing the IDs of resources like
//! channels and guilds.
//!
//! # serde
//!
//! These IDs support deserializing from both integers and strings and serialize
//! into strings.
pub(crate) mod string {
use serde::{
de::{Deserializer, Error as DeError, Visitor},
ser::Serializer,
};
use std::{
fmt::{Display, Formatter, Result as FmtResult},
marker::PhantomData,
};
struct IdVisitor<T: From<u64>>(PhantomData<T>);
impl<'de, T: From<u64>> Visitor<'de> for IdVisitor<T> {
type Value = T;
fn expecting(&self, f: &mut Formatter<'_>) -> FmtResult {
f.write_str("string or integer snowflake")
}
fn visit_u64<E: DeError>(self, value: u64) -> Result<Self::Value, E> {
Ok(T::from(value))
}
fn visit_str<E: DeError>(self, value: &str) -> Result<Self::Value, E> {
value.parse().map(T::from).map_err(DeError::custom)
}
}
pub fn serialize<T: Display, S: Serializer>(
value: &T,
serializer: S,
) -> Result<S::Ok, S::Error> {
serializer.collect_str(value)
}
pub fn deserialize<'de, T: From<u64>, D: Deserializer<'de>>(
deserializer: D,
) -> Result<T, D::Error> {
deserializer.deserialize_any(IdVisitor(PhantomData))
}
}
use serde::{Deserialize, Serialize};
use std::fmt::{Display, Formatter, Result as FmtResult};
#[derive(
Clone, Copy, Debug, Default, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize,
)]
pub struct ApplicationId(#[serde(with = "string")] pub u64);
impl Display for ApplicationId {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Display::fmt(&self.0, f)
}
}
impl From<u64> for ApplicationId {
fn from(id: u64) -> Self {
ApplicationId(id)
}
}
#[derive(
Clone, Copy, Debug, Default, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize,
)]
pub struct AttachmentId(#[serde(with = "string")] pub u64);
impl Display for AttachmentId {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Display::fmt(&self.0, f)
}
}
impl From<u64> for AttachmentId {
fn from(id: u64) -> Self {
AttachmentId(id)
}
}
#[derive(
Clone, Copy, Debug, Default, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize,
)]
pub struct AuditLogEntryId(#[serde(with = "string")] pub u64);
impl Display for AuditLogEntryId {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Display::fmt(&self.0, f)
}
}
impl From<u64> for AuditLogEntryId {
fn from(id: u64) -> Self {
AuditLogEntryId(id)
}
}
#[derive(
Clone, Copy, Debug, Default, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize,
)]
pub struct ChannelId(#[serde(with = "string")] pub u64);
impl Display for ChannelId {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Display::fmt(&self.0, f)
}
}
impl From<u64> for ChannelId {
fn from(id: u64) -> Self {
ChannelId(id)
}
}
#[derive(
Clone, Copy, Debug, Default, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize,
)]
pub struct EmojiId(#[serde(with = "string")] pub u64);
impl Display for EmojiId {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Display::fmt(&self.0, f)
}
}
impl From<u64> for EmojiId {
fn from(id: u64) -> Self {
EmojiId(id)
}
}
#[derive(
Clone, Copy, Debug, Default, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize,
)]
pub struct GenericId(#[serde(with = "string")] pub u64);
impl Display for GenericId {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Display::fmt(&self.0, f)
}
}
impl From<u64> for GenericId {
fn from(id: u64) -> Self {
GenericId(id)
}
}
#[derive(
Clone, Copy, Debug, Default, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize,
)]
pub struct GuildId(#[serde(with = "string")] pub u64);
impl Display for GuildId {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Display::fmt(&self.0, f)
}
}
impl From<u64> for GuildId {
fn from(id: u64) -> Self {
GuildId(id)
}
}
#[derive(
Clone, Copy, Debug, Default, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize,
)]
pub struct IntegrationId(#[serde(with = "string")] pub u64);
impl Display for IntegrationId {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Display::fmt(&self.0, f)
}
}
impl From<u64> for IntegrationId {
fn from(id: u64) -> Self {
IntegrationId(id)
}
}
#[derive(
Clone, Copy, Debug, Default, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize,
)]
pub struct MessageId(#[serde(with = "string")] pub u64);
impl Display for MessageId {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Display::fmt(&self.0, f)
}
}
impl From<u64> for MessageId {
fn from(id: u64) -> Self {
MessageId(id)
}
}
#[derive(
Clone, Copy, Debug, Default, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize,
)]
pub struct RoleId(#[serde(with = "string")] pub u64);
impl Display for RoleId {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Display::fmt(&self.0, f)
}
}
impl From<u64> for RoleId {
fn from(id: u64) -> Self {
RoleId(id)
}
}
#[derive(
Clone, Copy, Debug, Default, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize,
)]
pub struct UserId(#[serde(with = "string")] pub u64);
impl Display for UserId {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Display::fmt(&self.0, f)
}
}
impl From<u64> for UserId {
fn from(id: u64) -> Self {
UserId(id)
}
}
#[derive(
Clone, Copy, Debug, Default, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize,
)]
pub struct WebhookId(#[serde(with = "string")] pub u64);
impl Display for WebhookId {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Display::fmt(&self.0, f)
}
}
impl From<u64> for WebhookId {
fn from(id: u64) -> Self {
WebhookId(id)
}
}
#[cfg(test)]
mod tests {
use super::{
ApplicationId, AttachmentId, AuditLogEntryId, ChannelId, EmojiId, GenericId, GuildId,
IntegrationId, MessageId, RoleId, UserId, WebhookId,
};
use serde_test::Token;
#[allow(clippy::too_many_lines)]
#[test]
fn test_id_deser() {
serde_test::assert_tokens(
&ApplicationId(114_941_315_417_899_012),
&[
Token::NewtypeStruct {
name: "ApplicationId",
},
Token::Str("114941315417899012"),
],
);
serde_test::assert_de_tokens(
&ApplicationId(114_941_315_417_899_012),
&[
Token::NewtypeStruct {
name: "ApplicationId",
},
Token::U64(114_941_315_417_899_012),
],
);
serde_test::assert_tokens(
&AttachmentId(114_941_315_417_899_012),
&[
Token::NewtypeStruct {
name: "AttachmentId",
},
Token::Str("114941315417899012"),
],
);
serde_test::assert_de_tokens(
&AttachmentId(114_941_315_417_899_012),
&[
Token::NewtypeStruct {
name: "AttachmentId",
},
Token::U64(114_941_315_417_899_012),
],
);
serde_test::assert_tokens(
&AuditLogEntryId(114_941_315_417_899_012),
&[
Token::NewtypeStruct {
name: "AuditLogEntryId",
},
Token::Str("114941315417899012"),
],
);
serde_test::assert_de_tokens(
&AuditLogEntryId(114_941_315_417_899_012),
&[
Token::NewtypeStruct {
name: "AuditLogEntryId",
},
Token::U64(114_941_315_417_899_012),
],
);
serde_test::assert_tokens(
&ChannelId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "ChannelId" },
Token::Str("114941315417899012"),
],
);
serde_test::assert_de_tokens(
&ChannelId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "ChannelId" },
Token::U64(114_941_315_417_899_012),
],
);
serde_test::assert_tokens(
&EmojiId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "EmojiId" },
Token::Str("114941315417899012"),
],
);
serde_test::assert_de_tokens(
&EmojiId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "EmojiId" },
Token::U64(114_941_315_417_899_012),
],
);
serde_test::assert_tokens(
&GenericId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "GenericId" },
Token::Str("114941315417899012"),
],
);
serde_test::assert_de_tokens(
&GenericId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "GenericId" },
Token::U64(114_941_315_417_899_012),
],
);
serde_test::assert_tokens(
&GuildId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "GuildId" },
Token::Str("114941315417899012"),
],
);
serde_test::assert_de_tokens(
&GuildId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "GuildId" },
Token::U64(114_941_315_417_899_012),
],
);
serde_test::assert_tokens(
&IntegrationId(114_941_315_417_899_012),
&[
Token::NewtypeStruct {
name: "IntegrationId",
},
Token::Str("114941315417899012"),
],
);
serde_test::assert_de_tokens(
&IntegrationId(114_941_315_417_899_012),
&[
Token::NewtypeStruct {
name: "IntegrationId",
},
Token::U64(114_941_315_417_899_012),
],
);
serde_test::assert_tokens(
&MessageId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "MessageId" },
Token::Str("114941315417899012"),
],
);
serde_test::assert_de_tokens(
&MessageId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "MessageId" },
Token::U64(114_941_315_417_899_012),
],
);
serde_test::assert_tokens(
&RoleId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "RoleId" },
Token::Str("114941315417899012"),
],
);
serde_test::assert_de_tokens(
&RoleId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "RoleId" },
Token::U64(114_941_315_417_899_012),
],
);
serde_test::assert_tokens(
&UserId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "UserId" },
Token::Str("114941315417899012"),
],
);
serde_test::assert_de_tokens(
&UserId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "UserId" },
Token::U64(114_941_315_417_899_012),
],
);
serde_test::assert_tokens(
&WebhookId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "WebhookId" },
Token::Str("114941315417899012"),
],
);
serde_test::assert_de_tokens(
&WebhookId(114_941_315_417_899_012),
&[
Token::NewtypeStruct { name: "WebhookId" },
Token::U64(114_941_315_417_899_012),
],
);
}
}
| 27.424176 | 94 | 0.537827 |
edc2f394765934892963899e8929a5cfb20c589e | 3,705 | // this module is transparently re-exported by its parent `stream`
use super::*;
use crate::quad::streaming_mode::StreamedQuad;
use std::collections::VecDeque;
/// The result of [`TripleSource::filter_map_triples`]
pub struct FilterMapSource<S, F> {
pub source: S,
pub filter_map: F,
}
impl<S, F, T> TripleSource for FilterMapSource<S, F>
where
S: TripleSource,
F: FnMut(StreamedTriple<S::Triple>) -> Option<T>,
T: Triple,
{
type Error = S::Error;
type Triple = ByValue<T>;
fn try_for_some_triple<G, E>(&mut self, f: &mut G) -> StreamResult<bool, Self::Error, E>
where
G: FnMut(StreamedTriple<Self::Triple>) -> Result<(), E>,
E: Error,
{
let filter_map = &mut self.filter_map;
self.source.try_for_some_triple(&mut |t| {
if let Some(q) = (filter_map)(t) {
f(StreamedTriple::by_value(q))
} else {
Ok(())
}
})
}
fn size_hint_triples(&self) -> (usize, Option<usize>) {
(0, self.source.size_hint_triples().1)
}
}
impl<S, F, T> crate::quad::stream::QuadSource for FilterMapSource<S, F>
where
S: TripleSource,
F: FnMut(StreamedTriple<S::Triple>) -> Option<T>,
T: crate::quad::Quad,
{
type Error = S::Error;
type Quad = crate::quad::streaming_mode::ByValue<T>;
fn try_for_some_quad<G, E>(&mut self, f: &mut G) -> StreamResult<bool, Self::Error, E>
where
G: FnMut(StreamedQuad<Self::Quad>) -> Result<(), E>,
E: Error,
{
let filter_map = &mut self.filter_map;
self.source.try_for_some_triple(&mut |t| {
if let Some(u) = (filter_map)(t) {
f(StreamedQuad::by_value(u))
} else {
Ok(())
}
})
}
fn size_hint_quads(&self) -> (usize, Option<usize>) {
(0, self.source.size_hint_triples().1)
}
}
impl<S, F, T> IntoIterator for FilterMapSource<S, F>
where
S: TripleSource,
F: FnMut(StreamedTriple<S::Triple>) -> Option<T>,
T: 'static,
{
type Item = Result<T, S::Error>;
type IntoIter = FilterMapSourceIterator<S, F, T, S::Error>;
fn into_iter(self) -> Self::IntoIter {
FilterMapSourceIterator {
source: self.source,
filter_map: self.filter_map,
buffer: VecDeque::new(),
}
}
}
/// An iterator over the result of [`TripleSource::filter_map_triples`]
pub struct FilterMapSourceIterator<S, F, T, E> {
pub source: S,
pub filter_map: F,
pub buffer: VecDeque<Result<T, E>>,
}
impl<S, F, T, E> Iterator for FilterMapSourceIterator<S, F, T, E>
where
S: TripleSource<Error = E>,
F: FnMut(StreamedTriple<S::Triple>) -> Option<T>,
T: 'static,
E: 'static + std::error::Error,
{
type Item = Result<T, S::Error>;
fn next(&mut self) -> Option<Result<T, S::Error>> {
let mut remaining = true;
let mut buffer = VecDeque::new();
std::mem::swap(&mut self.buffer, &mut buffer);
let filter_map = &mut self.filter_map;
while self.buffer.is_empty() && remaining {
match self.source.for_some_triple(&mut |t| {
if let Some(v) = (filter_map)(t) {
buffer.push_back(Ok(v));
}
}) {
Ok(b) => {
remaining = b;
}
Err(err) => {
buffer.push_back(Err(err));
}
};
}
std::mem::swap(&mut self.buffer, &mut buffer);
self.buffer.pop_front()
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, self.source.size_hint_triples().1)
}
}
| 28.5 | 92 | 0.551417 |
286422695698d2e03583a6ace2d3d79b629cd203 | 4,320 | // Copyright (c) Microsoft. All rights reserved.
use edgelet_core::{Module, ModuleRuntime};
use edgelet_http::route::{BoxFuture, Handler, Parameters};
use failure::ResultExt;
use futures::{future, Future};
use http::header::{CONTENT_LENGTH, CONTENT_TYPE};
use http::{Request, Response, StatusCode};
use hyper::{Body, Error as HyperError};
use management::models::*;
use serde::Serialize;
use serde_json;
use error::ErrorKind;
use IntoResponse;
pub struct GetSystemInfo<M>
where
M: 'static + ModuleRuntime,
M::Error: IntoResponse,
<M::Module as Module>::Config: Serialize,
{
runtime: M,
}
impl<M> GetSystemInfo<M>
where
M: 'static + ModuleRuntime,
M::Error: IntoResponse,
<M::Module as Module>::Config: Serialize,
{
pub fn new(runtime: M) -> Self {
GetSystemInfo { runtime }
}
}
impl<M> Handler<Parameters> for GetSystemInfo<M>
where
M: 'static + ModuleRuntime,
M::Error: IntoResponse,
<M::Module as Module>::Config: Serialize,
{
fn handle(
&self,
_req: Request<Body>,
_params: Parameters,
) -> BoxFuture<Response<Body>, HyperError> {
debug!("Get System Information");
let response = self.runtime
.system_info()
.and_then(|systeminfo| {
let body = SystemInfo::new(
systeminfo.os_type().to_string(),
systeminfo.architecture().to_string(),
);
let response = serde_json::to_string(&body)
.context(ErrorKind::Serde)
.map(|b| {
Response::builder()
.status(StatusCode::OK)
.header(CONTENT_TYPE, "application/json")
.header(CONTENT_LENGTH, b.len().to_string().as_str())
.body(b.into())
.unwrap_or_else(|e| e.into_response())
})
.unwrap_or_else(|e| e.into_response());
future::ok(response)
})
.or_else(|e| future::ok(e.into_response()));
Box::new(response)
}
}
#[cfg(test)]
mod tests {
use edgelet_core::ModuleRuntimeState;
use edgelet_http::route::Parameters;
use edgelet_test_utils::module::*;
use futures::Stream;
use management::models::SystemInfo;
use server::module::tests::Error;
use super::*;
#[test]
fn system_info_success() {
// arrange
let state = ModuleRuntimeState::default();
let config = TestConfig::new("microsoft/test-image".to_string());
let module: TestModule<Error> =
TestModule::new("test-module".to_string(), config, Ok(state));
let runtime = TestRuntime::new(Ok(module));
let handler = GetSystemInfo::new(runtime);
let request = Request::get("http://localhost/info")
.body(Body::default())
.unwrap();
// act
let response = handler.handle(request, Parameters::new()).wait().unwrap();
// assert
response
.into_body()
.concat2()
.and_then(|b| {
let system_info: SystemInfo = serde_json::from_slice(&b).unwrap();
let os_type = system_info.os_type();
let architecture = system_info.architecture();
assert_eq!("os_type_sample", os_type);
assert_eq!("architecture_sample", architecture);
Ok(())
})
.wait()
.unwrap();
}
#[test]
fn system_info_failed() {
// arrange
let runtime = TestRuntime::new(Err(Error::General));
let handler = GetSystemInfo::new(runtime);
let request = Request::get("http://localhost/modules")
.body(Body::default())
.unwrap();
// act
let response = handler.handle(request, Parameters::new()).wait().unwrap();
// assert
response
.into_body()
.concat2()
.and_then(|b| {
let error: ErrorResponse = serde_json::from_slice(&b).unwrap();
assert_eq!("General error", error.message());
Ok(())
})
.wait()
.unwrap();
}
}
| 29.793103 | 82 | 0.54213 |
d562e68da996b81f08d47d4047610a7494d4e808 | 13,825 | //! This module inplements a thin shim over the 3rd party GitLab API where needed.
//!
//! As a result we don't unit-test the shim code, and therefore try to keep as much logic out of
//! this as possible, in order to keep the shim as thin as possible.
//!
//! Where possible it will just re-export types from the 3rd party library when nothing special
//! needs to be abstracted.
use anyhow::{Context, Result, anyhow};
pub use gitlab::Gitlab as Client;
pub use gitlab::api as api;
pub use gitlab::api::Query;
pub use gitlab::api::projects::Project;
pub use gitlab::api::projects::ProjectBuilder;
pub use gitlab::api::projects::CreateProject;
pub use gitlab::api::projects::CreateProjectBuilder;
pub use gitlab::api::projects::issues::Issue;
pub use gitlab::api::projects::issues::IssueBuilder;
pub use gitlab::api::projects::issues::Issues;
pub use gitlab::api::projects::issues::IssuesBuilder;
pub use gitlab::api::projects::issues::EditIssue;
pub use gitlab::api::projects::issues::EditIssueBuilder;
pub use gitlab::api::projects::issues::CreateIssue;
pub use gitlab::api::projects::issues::CreateIssueBuilder;
pub use gitlab::api::projects::issues::IssueState;
pub use gitlab::api::projects::issues::IssueStateEvent;
pub use gitlab::api::projects::issues::IssueScope;
pub use gitlab::api::projects::issues::IssueWeight;
pub use gitlab::api::projects::issues::IssueOrderBy;
pub use gitlab::api::projects::merge_requests::MergeRequest;
pub use gitlab::api::projects::merge_requests::MergeRequestBuilder;
pub use gitlab::api::projects::merge_requests::MergeRequests;
pub use gitlab::api::projects::merge_requests::MergeRequestsBuilder;
pub use gitlab::api::projects::merge_requests::EditMergeRequest;
pub use gitlab::api::projects::merge_requests::EditMergeRequestBuilder;
pub use gitlab::api::projects::merge_requests::CreateMergeRequest;
pub use gitlab::api::projects::merge_requests::CreateMergeRequestBuilder;
pub use gitlab::api::projects::merge_requests::UnapproveMergeRequest;
pub use gitlab::api::projects::merge_requests::UnapproveMergeRequestBuilder;
pub use gitlab::api::projects::merge_requests::ApproveMergeRequest;
pub use gitlab::api::projects::merge_requests::ApproveMergeRequestBuilder;
pub use gitlab::api::projects::merge_requests::RebaseMergeRequest;
pub use gitlab::api::projects::merge_requests::RebaseMergeRequestBuilder;
pub use gitlab::api::projects::merge_requests::MergeMergeRequest;
pub use gitlab::api::projects::merge_requests::MergeMergeRequestBuilder;
pub use gitlab::api::projects::merge_requests::MergeRequestState;
pub use gitlab::api::projects::merge_requests::MergeRequestStateEvent;
pub use gitlab::api::projects::merge_requests::MergeRequestOrderBy;
pub use gitlab::api::projects::merge_requests::MergeRequestScope;
pub use gitlab::api::projects::labels::Labels;
pub use gitlab::api::projects::labels::LabelsBuilder;
pub use gitlab::api::projects::members::ProjectMembers;
pub use gitlab::api::projects::members::ProjectMembersBuilder;
pub use gitlab::api::projects::repository::branches::CreateBranch;
pub use gitlab::api::projects::repository::branches::CreateBranchBuilder;
pub use gitlab::api::projects::repository::branches::Branch;
pub use gitlab::api::projects::repository::branches::BranchBuilder;
pub use gitlab::api::common::EnableState;
pub use gitlab::api::common::VisibilityLevel;
pub use gitlab::api::common::SortOrder;
pub use gitlab::api::projects::AutoDevOpsDeployStrategy;
pub use gitlab::api::projects::FeatureAccessLevel;
pub use gitlab::api::projects::FeatureAccessLevelPublic;
pub use gitlab::api::projects::MergeMethod;
pub use gitlab::api::projects::BuildGitStrategy;
use crate::config::Config;
/// Misc converter functions used to convert string args to Gitlab types
pub mod converter {
use super::*;
pub fn mr_order_by_from_str(s: &str) -> Result<MergeRequestOrderBy> {
match s {
"created_on" => Ok(MergeRequestOrderBy::CreatedAt),
"updated_on" => Ok(MergeRequestOrderBy::UpdatedAt),
_ => Err(anyhow!("Incorrect merge request list ordering"))
}
}
pub fn mr_scope_from_str(s: &str) -> Result<MergeRequestScope> {
match s {
"created_by_me" => Ok(MergeRequestScope::CreatedByMe),
"assigned_to_me" => Ok(MergeRequestScope::AssignedToMe),
"all" => Ok(MergeRequestScope::All),
_ => Err(anyhow!("Incorrect merge request scope"))
}
}
pub fn mr_state_from_str(s: &str) -> Result<MergeRequestState> {
match s {
"opened" => Ok(MergeRequestState::Opened),
"closed" => Ok(MergeRequestState::Closed),
"locked" => Ok(MergeRequestState::Locked),
"merged" => Ok(MergeRequestState::Merged),
_ => Err(anyhow!("Incorrect issue state"))
}
}
pub fn issue_order_by_from_str(s: &str) -> Result<IssueOrderBy> {
match s {
"created_on" => Ok(IssueOrderBy::CreatedAt),
"updated_on" => Ok(IssueOrderBy::UpdatedAt),
"priority" => Ok(IssueOrderBy::Priority),
"due_date" => Ok(IssueOrderBy::DueDate),
"relative_position" => Ok(IssueOrderBy::RelativePosition),
"label_priority" => Ok(IssueOrderBy::LabelPriority),
"milestone_date" => Ok(IssueOrderBy::MilestoneDue),
"popularity" => Ok(IssueOrderBy::Popularity),
"weight" => Ok(IssueOrderBy::WeightFields),
_ => Err(anyhow!("Incorrect issue list ordering"))
}
}
pub fn issue_scope_from_str(s: &str) -> Result<IssueScope> {
match s {
"created_by_me" => Ok(IssueScope::CreatedByMe),
"assigned_to_me" => Ok(IssueScope::AssignedToMe),
"all" => Ok(IssueScope::All),
_ => Err(anyhow!("Incorrect issue scope"))
}
}
pub fn issue_state_from_str(s: &str) -> Result<IssueState> {
match s {
"opened" => Ok(IssueState::Opened),
"closed" => Ok(IssueState::Closed),
_ => Err(anyhow!("Incorrect issue state"))
}
}
pub fn auto_devops_deploy_strategy_from_str(s: &str) -> Result<AutoDevOpsDeployStrategy> {
match s {
"continuous" => Ok(AutoDevOpsDeployStrategy::Continuous),
"manual" => Ok(AutoDevOpsDeployStrategy::Manual),
"timed_incremental" => Ok(AutoDevOpsDeployStrategy::TimedIncremental),
_ => Err(anyhow!("Incorrect deployment strategy"))
}
}
pub fn enable_state_from_str(s: &str) -> Result<EnableState> {
match s {
"enabled" => Ok(EnableState::Enabled),
"disabled" => Ok(EnableState::Disabled),
_ => Err(anyhow!("Incorrect state"))
}
}
pub fn pipeline_git_strategy_from_str(s: &str) -> Result<BuildGitStrategy> {
match s {
"fetch" => Ok(BuildGitStrategy::Fetch),
"clone" => Ok(BuildGitStrategy::Clone),
_ => Err(anyhow!("Incorrect git strategy"))
}
}
pub fn merge_method_from_str(s: &str) -> Result<MergeMethod> {
match s {
"merge" => Ok(MergeMethod::Merge),
"rebase-merge" => Ok(MergeMethod::RebaseMerge),
"fast-forward" => Ok(MergeMethod::FastForward),
_ => Err(anyhow!("Incorrect merge method"))
}
}
pub fn visibility_level_from_str(s: &str) -> Result<VisibilityLevel> {
match s {
"public" => Ok(VisibilityLevel::Public),
"internal" => Ok(VisibilityLevel::Internal),
"private" => Ok(VisibilityLevel::Private),
_ => Err(anyhow!("Incorrect visibility level"))
}
}
pub fn feature_access_level_public_from_str(s: &str) -> Result<FeatureAccessLevelPublic> {
match s {
"disabled" => Ok(FeatureAccessLevelPublic::Disabled),
"private" => Ok(FeatureAccessLevelPublic::Private),
"enabled" => Ok(FeatureAccessLevelPublic::Enabled),
"public" => Ok(FeatureAccessLevelPublic::Public),
_ => Err(anyhow!("Incorrect public feature access level"))
}
}
pub fn feature_access_level_from_str(s: &str) -> Result<FeatureAccessLevel> {
match s {
"disabled" => Ok(FeatureAccessLevel::Disabled),
"private" => Ok(FeatureAccessLevel::Private),
"enabled" => Ok(FeatureAccessLevel::Enabled),
_ => Err(anyhow!("Incorrect feature access level"))
}
}
}
/// Shim over 3rd party new() method
pub fn new(config: &Config) -> Result<Box<Client>> {
let host = config
.host
.as_ref()
.context("GitLab host not set. Run `git lab init`.")?;
let token = config
.token
.as_ref()
.context("GitLab token not set. Run `git lab init`.")?;
let client = match config.tls {
Some(tls) if !tls => Client::new_insecure(host, token)
.with_context(|| {
format!("Failed to make insecure (http) connection to {}", host)
})? ,
_ => Client::new(host, token)
.with_context(|| format!("Failed to make secure (https) connection to {}", host))?,
};
Ok(Box::new(client))
}
#[cfg(test)]
mod gitlab_converter_unit_tests {
use anyhow::Result;
use rstest::*;
use super::*;
use super::converter::*;
#[rstest(
s, t, f,
case("created_on", MergeRequestOrderBy::CreatedAt, &mr_order_by_from_str),
case("updated_on", MergeRequestOrderBy::UpdatedAt, &mr_order_by_from_str),
case("created_by_me", MergeRequestScope::CreatedByMe, &mr_scope_from_str),
case("assigned_to_me", MergeRequestScope::AssignedToMe, &mr_scope_from_str),
case("all", MergeRequestScope::All, &mr_scope_from_str),
case("opened", MergeRequestState::Opened, &mr_state_from_str),
case("closed", MergeRequestState::Closed, &mr_state_from_str),
case("locked", MergeRequestState::Locked, &mr_state_from_str),
case("merged", MergeRequestState::Merged, &mr_state_from_str),
case("created_on", IssueOrderBy::CreatedAt, &issue_order_by_from_str),
case("updated_on", IssueOrderBy::UpdatedAt, &issue_order_by_from_str),
case("priority", IssueOrderBy::Priority, &issue_order_by_from_str),
case("due_date", IssueOrderBy::DueDate, &issue_order_by_from_str),
case("relative_position", IssueOrderBy::RelativePosition, &issue_order_by_from_str),
case("label_priority", IssueOrderBy::LabelPriority, &issue_order_by_from_str),
case("milestone_date", IssueOrderBy::MilestoneDue, &issue_order_by_from_str),
case("popularity", IssueOrderBy::Popularity, &issue_order_by_from_str),
case("weight", IssueOrderBy::WeightFields, &issue_order_by_from_str),
case("created_by_me", IssueScope::CreatedByMe, &issue_scope_from_str),
case("assigned_to_me", IssueScope::AssignedToMe, &issue_scope_from_str),
case("all", IssueScope::All, &issue_scope_from_str),
case("opened", IssueState::Opened, &issue_state_from_str),
case("closed", IssueState::Closed, &issue_state_from_str),
case("continuous", AutoDevOpsDeployStrategy::Continuous, &auto_devops_deploy_strategy_from_str),
case("manual", AutoDevOpsDeployStrategy::Manual, &auto_devops_deploy_strategy_from_str),
case("timed_incremental", AutoDevOpsDeployStrategy::TimedIncremental, &auto_devops_deploy_strategy_from_str),
case("enabled", EnableState::Enabled, &enable_state_from_str),
case("disabled", EnableState::Disabled, &enable_state_from_str),
case("fetch", BuildGitStrategy::Fetch, &pipeline_git_strategy_from_str),
case("clone", BuildGitStrategy::Clone, &pipeline_git_strategy_from_str),
case("merge", MergeMethod::Merge, &merge_method_from_str),
case("rebase-merge", MergeMethod::RebaseMerge, &merge_method_from_str),
case("fast-forward", MergeMethod::FastForward, &merge_method_from_str),
case("public", VisibilityLevel::Public, &visibility_level_from_str),
case("internal", VisibilityLevel::Internal, &visibility_level_from_str),
case("private", VisibilityLevel::Private, &visibility_level_from_str),
case("disabled", FeatureAccessLevelPublic::Disabled, &feature_access_level_public_from_str),
case("private", FeatureAccessLevelPublic::Private, &feature_access_level_public_from_str),
case("enabled", FeatureAccessLevelPublic::Enabled, &feature_access_level_public_from_str),
case("public", FeatureAccessLevelPublic::Public, &feature_access_level_public_from_str),
case("disabled", FeatureAccessLevel::Disabled, &feature_access_level_from_str),
case("private", FeatureAccessLevel::Private, &feature_access_level_from_str),
case("enabled", FeatureAccessLevel::Enabled, &feature_access_level_from_str),
)]
fn test_gitlab_converter_from_str_ok<T>(s: &str, t: T, f: &dyn Fn(&str) -> Result<T>)
where T: Eq + std::fmt::Debug
{
assert_eq!(f(s).unwrap(), t)
}
#[rstest(
s, f,
case("blah", &issue_order_by_from_str),
case("blah", &issue_scope_from_str),
case("blah", &issue_state_from_str),
case("blah", &auto_devops_deploy_strategy_from_str),
case("blah", &enable_state_from_str),
case("blah", &pipeline_git_strategy_from_str),
case("blah", &merge_method_from_str),
case("blah", &visibility_level_from_str),
case("blah", &feature_access_level_public_from_str),
case("blah", &feature_access_level_from_str),
)]
fn test_gitlab_converter_from_str_err<T>(s: &str, f: &dyn Fn(&str) -> Result<T>)
where T: Eq + std::fmt::Debug
{
assert!(f(s).is_err())
}
}
| 44.028662 | 117 | 0.674069 |
22c02c136585325dffa206ff063031e76cf63353 | 4,746 | // Copyright 2021 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! Library for Fuchsia device diagnostics utilities.
use {
anyhow::Result,
diagnostics_data::Timestamp,
ffx_daemon_target::logger::streamer::{DiagnosticsStreamer, SessionStream},
fidl::endpoints::ServerEnd,
fidl_fuchsia_developer_ffx::{
DaemonDiagnosticsStreamParameters, DiagnosticsStreamError, TimeBound,
},
fidl_fuchsia_developer_remotecontrol::{
ArchiveIteratorEntry, ArchiveIteratorError, ArchiveIteratorMarker, ArchiveIteratorRequest,
DiagnosticsData, InlineData,
},
fuchsia_async::futures::{stream::TryStreamExt, AsyncWriteExt},
std::sync::Arc,
};
pub async fn run_diagnostics_streaming(
mut log_iterator: SessionStream,
iterator: ServerEnd<ArchiveIteratorMarker>,
) -> Result<()> {
let mut iter_stream = iterator.into_stream()?;
while let Some(request) = iter_stream.try_next().await? {
match request {
ArchiveIteratorRequest::GetNext { responder } => {
let res = log_iterator.iter().await?;
match res {
Some(Ok(entry)) => {
// If the entry is small enough to fit into a FIDL message
// we send it using the Inline variant. Otherwise, we use
// the Socket variant by sending one end of the socket as a
// response and sending the data into the other end of the
// socket.
// TODO(fxbug.dev/81310): This should be unified across the
// daemon and bridge.
let data = serde_json::to_string(&entry)?;
if data.len() <= fidl_fuchsia_logger::MAX_DATAGRAM_LEN_BYTES as usize {
responder.send(&mut Ok(vec![ArchiveIteratorEntry {
diagnostics_data: Some(DiagnosticsData::Inline(InlineData {
data,
truncated_chars: 0,
})),
..ArchiveIteratorEntry::EMPTY
}]))?;
} else {
let (socket, tx_socket) =
fuchsia_async::emulated_handle::Socket::create(
fuchsia_async::emulated_handle::SocketOpts::STREAM,
)?;
let mut tx_socket = fuchsia_async::Socket::from_socket(tx_socket)?;
// We send one end of the socket back to the caller.
// The receiver will need to read the socket content to
// get the data.
let response = vec![ArchiveIteratorEntry {
diagnostics_data: Some(DiagnosticsData::Socket(socket)),
..ArchiveIteratorEntry::EMPTY
}];
responder.send(&mut Ok(response))?;
// We write all the data to the other end of the
// socket.
tx_socket.write_all(data.as_bytes()).await?;
}
}
Some(Err(e)) => {
log::warn!("got error streaming diagnostics: {}", e);
responder.send(&mut Err(ArchiveIteratorError::DataReadFailed))?;
}
None => {
responder.send(&mut Ok(vec![]))?;
break;
}
}
}
}
}
Ok::<(), anyhow::Error>(())
}
pub async fn get_streaming_min_timestamp(
parameters: &DaemonDiagnosticsStreamParameters,
stream: &Arc<DiagnosticsStreamer<'_>>,
) -> Result<Option<Timestamp>, DiagnosticsStreamError> {
Ok(match ¶meters.min_timestamp_nanos {
Some(TimeBound::Absolute(t)) => {
if let Some(session) = stream.session_timestamp_nanos().await {
Some(Timestamp::from(*t as i64 - session))
} else {
None
}
}
Some(TimeBound::Monotonic(t)) => Some(Timestamp::from(*t as i64)),
Some(bound) => {
log::error!("Got unexpected TimeBound field {:?}", bound);
return Err(DiagnosticsStreamError::GenericError);
}
None => parameters.min_target_timestamp_nanos.map(|t| Timestamp::from(t as i64)),
})
}
| 45.2 | 98 | 0.513064 |
e8a949929dd3c7dd7d74a6c50a1842a384abd3dc | 8,247 | mod page_dict;
pub use page_dict::*;
use std::sync::Arc;
pub use parquet_format_async_temp::{
DataPageHeader as DataPageHeaderV1, DataPageHeaderV2, PageHeader as ParquetPageHeader,
};
pub use crate::parquet_bridge::{DataPageHeaderExt, PageType};
use crate::compression::Compression;
use crate::encoding::{get_length, Encoding};
use crate::error::Result;
use crate::metadata::ColumnDescriptor;
use crate::statistics::{deserialize_statistics, Statistics};
/// A [`CompressedDataPage`] is compressed, encoded representation of a Parquet data page.
/// It holds actual data and thus cloning it is expensive.
#[derive(Debug)]
pub struct CompressedDataPage {
pub(crate) header: DataPageHeader,
pub(crate) buffer: Vec<u8>,
compression: Compression,
uncompressed_page_size: usize,
pub(crate) dictionary_page: Option<Arc<dyn DictPage>>,
pub(crate) descriptor: ColumnDescriptor,
}
impl CompressedDataPage {
pub fn new(
header: DataPageHeader,
buffer: Vec<u8>,
compression: Compression,
uncompressed_page_size: usize,
dictionary_page: Option<Arc<dyn DictPage>>,
descriptor: ColumnDescriptor,
) -> Self {
Self {
header,
buffer,
compression,
uncompressed_page_size,
dictionary_page,
descriptor,
}
}
pub fn header(&self) -> &DataPageHeader {
&self.header
}
pub fn uncompressed_size(&self) -> usize {
self.uncompressed_page_size
}
pub fn compressed_size(&self) -> usize {
self.buffer.len()
}
pub fn compression(&self) -> Compression {
self.compression
}
pub fn num_values(&self) -> usize {
self.header.num_values()
}
/// Decodes the raw statistics into a statistics
pub fn statistics(&self) -> Option<Result<Arc<dyn Statistics>>> {
match &self.header {
DataPageHeader::V1(d) => d
.statistics
.as_ref()
.map(|x| deserialize_statistics(x, self.descriptor().clone())),
DataPageHeader::V2(d) => d
.statistics
.as_ref()
.map(|x| deserialize_statistics(x, self.descriptor().clone())),
}
}
pub fn descriptor(&self) -> &ColumnDescriptor {
&self.descriptor
}
}
#[derive(Debug, Clone)]
pub enum DataPageHeader {
V1(DataPageHeaderV1),
V2(DataPageHeaderV2),
}
impl DataPageHeader {
pub fn num_values(&self) -> usize {
match &self {
DataPageHeader::V1(d) => d.num_values as usize,
DataPageHeader::V2(d) => d.num_values as usize,
}
}
}
/// A [`DataPage`] is an uncompressed, encoded representation of a Parquet data page. It holds actual data
/// and thus cloning it is expensive.
#[derive(Debug, Clone)]
pub struct DataPage {
pub(super) header: DataPageHeader,
pub(super) buffer: Vec<u8>,
pub(super) dictionary_page: Option<Arc<dyn DictPage>>,
pub(super) descriptor: ColumnDescriptor,
}
impl DataPage {
pub fn new(
header: DataPageHeader,
buffer: Vec<u8>,
dictionary_page: Option<Arc<dyn DictPage>>,
descriptor: ColumnDescriptor,
) -> Self {
Self {
header,
buffer,
dictionary_page,
descriptor,
}
}
pub fn header(&self) -> &DataPageHeader {
&self.header
}
pub fn dictionary_page(&self) -> Option<&Arc<dyn DictPage>> {
self.dictionary_page.as_ref()
}
pub fn buffer(&self) -> &[u8] {
&self.buffer
}
/// Returns a mutable reference to the internal buffer.
/// Useful to recover the buffer after the page has been decoded.
pub fn buffer_mut(&mut self) -> &mut Vec<u8> {
&mut self.buffer
}
pub fn num_values(&self) -> usize {
self.header.num_values()
}
pub fn encoding(&self) -> Encoding {
match &self.header {
DataPageHeader::V1(d) => d.encoding(),
DataPageHeader::V2(d) => d.encoding(),
}
}
pub fn definition_level_encoding(&self) -> Encoding {
match &self.header {
DataPageHeader::V1(d) => d.definition_level_encoding(),
DataPageHeader::V2(_) => Encoding::Rle,
}
}
pub fn repetition_level_encoding(&self) -> Encoding {
match &self.header {
DataPageHeader::V1(d) => d.repetition_level_encoding(),
DataPageHeader::V2(_) => Encoding::Rle,
}
}
/// Decodes the raw statistics into a statistics
pub fn statistics(&self) -> Option<Result<Arc<dyn Statistics>>> {
match &self.header {
DataPageHeader::V1(d) => d
.statistics
.as_ref()
.map(|x| deserialize_statistics(x, self.descriptor().clone())),
DataPageHeader::V2(d) => d
.statistics
.as_ref()
.map(|x| deserialize_statistics(x, self.descriptor().clone())),
}
}
pub fn descriptor(&self) -> &ColumnDescriptor {
&self.descriptor
}
}
/// A [`Page`] is an uncompressed, encoded representation of a Parquet page. It may hold actual data
/// and thus cloning it may be expensive.
#[derive(Debug)]
pub enum Page {
Data(DataPage),
Dict(Arc<dyn DictPage>),
}
/// A [`EncodedPage`] is an uncompressed, encoded representation of a Parquet page. It may hold actual data
/// and thus cloning it may be expensive.
#[derive(Debug)]
pub enum EncodedPage {
Data(DataPage),
Dict(EncodedDictPage),
}
/// A [`CompressedPage`] is a compressed, encoded representation of a Parquet page. It holds actual data
/// and thus cloning it is expensive.
#[derive(Debug)]
pub enum CompressedPage {
Data(CompressedDataPage),
Dict(CompressedDictPage),
}
impl CompressedPage {
pub(crate) fn buffer(&mut self) -> &mut Vec<u8> {
match self {
CompressedPage::Data(page) => &mut page.buffer,
CompressedPage::Dict(page) => &mut page.buffer,
}
}
}
/// Splits the page buffer into 3 slices corresponding to (encoded rep levels, encoded def levels, encoded values) for v1 pages.
#[inline]
pub fn split_buffer_v1(buffer: &[u8], has_rep: bool, has_def: bool) -> (&[u8], &[u8], &[u8]) {
let (rep, buffer) = if has_rep {
let level_buffer_length = get_length(buffer) as usize;
(
&buffer[4..4 + level_buffer_length],
&buffer[4 + level_buffer_length..],
)
} else {
(&[] as &[u8], buffer)
};
let (def, buffer) = if has_def {
let level_buffer_length = get_length(buffer) as usize;
(
&buffer[4..4 + level_buffer_length],
&buffer[4 + level_buffer_length..],
)
} else {
(&[] as &[u8], buffer)
};
(rep, def, buffer)
}
/// Splits the page buffer into 3 slices corresponding to (encoded rep levels, encoded def levels, encoded values) for v2 pages.
pub fn split_buffer_v2(
buffer: &[u8],
rep_level_buffer_length: usize,
def_level_buffer_length: usize,
) -> (&[u8], &[u8], &[u8]) {
(
&buffer[..rep_level_buffer_length],
&buffer[rep_level_buffer_length..rep_level_buffer_length + def_level_buffer_length],
&buffer[rep_level_buffer_length + def_level_buffer_length..],
)
}
/// Splits the page buffer into 3 slices corresponding to (encoded rep levels, encoded def levels, encoded values).
pub fn split_buffer<'a>(
page: &'a DataPage,
descriptor: &ColumnDescriptor,
) -> (&'a [u8], &'a [u8], &'a [u8]) {
match page.header() {
DataPageHeader::V1(_) => split_buffer_v1(
page.buffer(),
descriptor.max_rep_level() > 0,
descriptor.max_def_level() > 0,
),
DataPageHeader::V2(header) => {
let def_level_buffer_length = header.definition_levels_byte_length as usize;
let rep_level_buffer_length = header.repetition_levels_byte_length as usize;
split_buffer_v2(
page.buffer(),
rep_level_buffer_length,
def_level_buffer_length,
)
}
}
}
| 28.936842 | 128 | 0.603613 |
deed4bd9dc7061cd850e3c717da1ed72fc1d83d2 | 3,132 | //! The `check` crate is responsible for ensuring that an AST expression is actually a valid
//! program. This currently consits of three larger parts, typechecking, kindchecking and renaming.
//! If an AST passes the checks in `Typecheck::typecheck_expr` (which runs all of theses checks
//! the expression is expected to compile succesfully (if it does not it should be considered an
//! internal compiler error.
#![doc(html_root_url = "https://docs.rs/gluon_check/0.9.4")] // # GLUON
extern crate codespan;
extern crate codespan_reporting;
#[macro_use]
extern crate collect_mac;
#[cfg(test)]
extern crate env_logger;
extern crate itertools;
#[macro_use]
extern crate log;
extern crate pretty;
extern crate rpds;
extern crate smallvec;
extern crate stable_deref_trait;
extern crate strsim;
extern crate union_find;
#[macro_use]
extern crate gluon_base as base;
pub mod kindcheck;
pub mod metadata;
mod recursion_check;
pub mod rename;
pub mod substitution;
pub mod typecheck;
pub mod unify;
pub mod unify_type;
mod implicits;
use base::types::{ArcType, TypeCache, TypeEnv};
/// Checks if `actual` can be assigned to a binding with the type signature `signature`
pub fn check_signature(env: &TypeEnv, signature: &ArcType, actual: &ArcType) -> bool {
use base::fnv::FnvMap;
use base::kind::Kind;
use base::scoped_map::ScopedMap;
use substitution::Substitution;
let subs = Substitution::new(Kind::typ());
let type_cache = TypeCache::new();
let state = unify_type::State::new(env, &subs, &type_cache);
let actual = unify_type::new_skolem_scope(&subs, actual);
let actual = actual.instantiate_generics(&mut FnvMap::default());
let result = unify_type::subsumes(&subs, &mut ScopedMap::new(), 0, state, signature, &actual);
if let Err((_, ref err)) = result {
warn!("Check signature error: {}", err);
}
result.is_ok()
}
#[cfg(test)]
mod tests {
use std::cell::RefCell;
use std::rc::Rc;
use base::kind::{ArcKind, KindEnv};
use base::symbol::{Symbol, SymbolModule, SymbolRef, Symbols};
use base::types::{Alias, ArcType, TypeEnv};
pub struct MockEnv;
impl KindEnv for MockEnv {
fn find_kind(&self, _type_name: &SymbolRef) -> Option<ArcKind> {
None
}
}
impl TypeEnv for MockEnv {
fn find_type(&self, _id: &SymbolRef) -> Option<&ArcType> {
None
}
fn find_type_info(&self, _id: &SymbolRef) -> Option<&Alias<Symbol, ArcType>> {
None
}
}
/// Returns a reference to the interner stored in TLD
pub fn get_local_interner() -> Rc<RefCell<Symbols>> {
thread_local!(static INTERNER: Rc<RefCell<Symbols>>
= Rc::new(RefCell::new(Symbols::new())));
INTERNER.with(|interner| interner.clone())
}
pub fn intern(s: &str) -> Symbol {
let interner = get_local_interner();
let mut interner = interner.borrow_mut();
if s.starts_with(char::is_lowercase) {
interner.symbol(s)
} else {
SymbolModule::new("test".into(), &mut interner).scoped_symbol(s)
}
}
}
| 30.115385 | 99 | 0.666028 |
09099de7b7c6450859226076c4b6a49d3db149f8 | 3,955 | use std::{collections::HashMap, convert::TryFrom, ffi::CString};
use async_trait::async_trait;
use futures::channel::oneshot;
use snafu::ResultExt;
use url::Url;
use spdk_sys::{create_uring_bdev, delete_uring_bdev};
use crate::{
bdev::{util::uri, CreateDestroy, GetName},
core::Bdev,
ffihelper::{cb_arg, done_errno_cb, ErrnoResult},
nexus_uri::{self, NexusBdevError},
};
#[derive(Debug)]
pub(super) struct Uring {
name: String,
alias: String,
blk_size: u32,
uuid: Option<uuid::Uuid>,
}
/// Convert a URI to an Uring "object"
impl TryFrom<&Url> for Uring {
type Error = NexusBdevError;
fn try_from(url: &Url) -> Result<Self, Self::Error> {
let segments = uri::segments(url);
if segments.is_empty() {
return Err(NexusBdevError::UriInvalid {
uri: url.to_string(),
message: String::from("no path segments"),
});
}
let mut parameters: HashMap<String, String> =
url.query_pairs().into_owned().collect();
let blk_size: u32 = match parameters.remove("blk_size") {
Some(value) => {
value.parse().context(nexus_uri::IntParamParseError {
uri: url.to_string(),
parameter: String::from("blk_size"),
})?
}
None => 512,
};
let uuid = uri::uuid(parameters.remove("uuid")).context(
nexus_uri::UuidParamParseError {
uri: url.to_string(),
},
)?;
if let Some(keys) = uri::keys(parameters) {
warn!("ignored parameters: {}", keys);
}
Ok(Uring {
name: url.path().into(),
alias: url.to_string(),
blk_size,
uuid,
})
}
}
impl GetName for Uring {
fn get_name(&self) -> String {
self.name.clone()
}
}
#[async_trait(?Send)]
impl CreateDestroy for Uring {
type Error = NexusBdevError;
/// Create a uring bdev
async fn create(&self) -> Result<String, Self::Error> {
if Bdev::lookup_by_name(&self.name).is_some() {
return Err(NexusBdevError::BdevExists {
name: self.get_name(),
});
}
let cname = CString::new(self.get_name()).unwrap();
let name = Bdev::from_ptr(unsafe {
create_uring_bdev(cname.as_ptr(), cname.as_ptr(), self.blk_size)
})
.map(|mut bdev| {
if let Some(u) = self.uuid {
bdev.set_uuid(Some(u.to_string()))
}
if !bdev.add_alias(&self.alias) {
error!(
"Failed to add alias {} to device {}",
self.alias,
self.get_name()
);
}
bdev.name()
});
async {
name.ok_or_else(|| NexusBdevError::BdevNotFound {
name: self.get_name(),
})
}
.await
}
/// Destroy the given uring bdev
async fn destroy(self: Box<Self>) -> Result<(), Self::Error> {
match Bdev::lookup_by_name(&self.name) {
Some(bdev) => {
let (sender, receiver) = oneshot::channel::<ErrnoResult<()>>();
unsafe {
delete_uring_bdev(
bdev.as_ptr(),
Some(done_errno_cb),
cb_arg(sender),
);
}
receiver
.await
.context(nexus_uri::CancelBdev {
name: self.get_name(),
})?
.context(nexus_uri::DestroyBdev {
name: self.get_name(),
})
}
None => Err(NexusBdevError::BdevNotFound {
name: self.get_name(),
}),
}
}
}
| 27.657343 | 79 | 0.483186 |
3a75b42f721b2ee54256214749a654a30ba9bda6 | 1,074 | #[allow(unused_attributes)]
#[cargo_snippet::snippet("template")]
pub mod template {
use std::io;
use std::io::Read;
#[allow(dead_code)]
fn main() -> io::Result<()> {
let r = io::stdin();
let mut sc = Scanner::new(r.lock());
let _n: usize = sc.read();
Ok(())
}
struct Scanner<R> {
r: R,
}
impl<R: io::Read> Scanner<R> {
fn new(r: R) -> Scanner<R> {
Scanner { r: r }
}
#[allow(dead_code)]
fn read<T: std::str::FromStr>(&mut self) -> T {
let bytes = self
.r
.by_ref()
.bytes()
.map(|b| b.unwrap())
.skip_while(|&b| b == b' ' || b == b'\r' || b == b'\n')
.take_while(|&b| b != b' ' && b != b'\r' && b != b'\n')
.collect::<Vec<_>>();
std::str::from_utf8(&bytes)
.expect("convert bytes into &str failed")
.parse()
.ok()
.expect("parse failed")
}
}
}
| 27.538462 | 71 | 0.394786 |
e59725a21c5e2bbbf643e9cd3060eda3300b273b | 8,176 | use crate::*;
use frame_support::{assert_ok, assert_noop, impl_outer_event, impl_outer_origin, parameter_types, weights::Weight};
use codec::{alloc::sync::Arc, Decode};
use parking_lot::RwLock;
use sp_core::{
offchain::{
testing::{self, OffchainState, PoolState},
OffchainExt, TransactionPoolExt,
},
sr25519::{self, Signature},
testing::KeyStore,
traits::KeystoreExt,
H256,
};
use sp_core::{Pair, Public};
use sp_io::TestExternalities;
use sp_runtime::{
testing::{Header, TestXt},
traits::{BlakeTwo256, IdentityLookup, Verify, IdentifyAccount},
Perbill,
};
use crate as ocw_demo;
impl_outer_origin! {
pub enum Origin for Test {}
}
// Configure a mock runtime to test the pallet.
#[derive(Clone, Eq, PartialEq)]
pub struct Test;
parameter_types! {
pub const BlockHashCount: u64 = 250;
pub const MaximumBlockWeight: u32 = 1_000_000;
pub const MaximumBlockLength: u32 = 2 * 1024;
pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75);
}
pub type Balances = pallet_balances::Module<Test>;
pub type System = frame_system::Module<Test>;
impl system::Trait for Test {
type BaseCallFilter = ();
type Origin = Origin;
type Index = u64;
type Call = ();
type BlockNumber = u64;
type Hash = H256;
type Hashing = BlakeTwo256;
type AccountId = sr25519::Public;
type Lookup = IdentityLookup<Self::AccountId>;
type Header = Header;
type Event = ();
type BlockHashCount = BlockHashCount;
type MaximumBlockWeight = MaximumBlockWeight;
type DbWeight = ();
type BlockExecutionWeight = ();
type ExtrinsicBaseWeight = ();
type MaximumExtrinsicWeight = MaximumBlockWeight;
type MaximumBlockLength = MaximumBlockLength;
type AvailableBlockRatio = AvailableBlockRatio;
type Version = ();
type PalletInfo = ();
type OnNewAccount = ();
type OnKilledAccount = ();
type SystemWeightInfo = ();
type AccountData = pallet_balances::AccountData<u64>;
}
impl pooler::Trait for Test {
type Event = ();
type Balance = u128;
type AssetId = u128;
type Currency = Balances;
}
parameter_types! {
pub const ExistentialDeposit: u64 = 1;
}
impl pallet_balances::Trait for Test {
type Balance = u64;
type Event = ();
type DustRemoval = ();
type ExistentialDeposit = ExistentialDeposit;
type AccountStore = System;
type WeightInfo = ();
type MaxLocks = ();
}
parameter_types! {
pub const MaxSettlers: u32 = 10;
}
impl admin::Trait for Test {
type Event = ();
type MaxSettlers = MaxSettlers;
}
parameter_types! {
pub const SystemDecimals: u128 = 100000000000;
}
impl chance::Trait for Test {
type Event = ();
type Currency = Balances;
type SystemDecimals = SystemDecimals;
}
type TestExtrinsic = TestXt<Call<Test>, ()>;
parameter_types! {
pub const UnsignedPriority: u64 = 100;
}
impl Trait for Test {
type AuthorityId = crypto::TestAuthId;
type Call = Call<Test>;
type Event = ();
type UnsignedPriority = UnsignedPriority;
}
impl<LocalCall> system::offchain::CreateSignedTransaction<LocalCall> for Test
where
Call<Test>: From<LocalCall>,
{
fn create_transaction<C: frame_system::offchain::AppCrypto<Self::Public, Self::Signature>>(
call: Call<Test>,
_public: <Signature as Verify>::Signer,
_account: <Test as system::Trait>::AccountId,
index: <Test as system::Trait>::Index,
) -> Option<(
Call<Test>,
<TestExtrinsic as sp_runtime::traits::Extrinsic>::SignaturePayload,
)> {
Some((call, (index, ())))
}
}
impl frame_system::offchain::SigningTypes for Test {
type Public = <Signature as Verify>::Signer;
type Signature = Signature;
}
impl<C> frame_system::offchain::SendTransactionTypes<C> for Test
where
Call<Test>: From<C>,
{
type OverarchingCall = Call<Test>;
type Extrinsic = TestExtrinsic;
}
pub type OcwDemo = Module<Test>;
pub type Chance = chance::Module<Test>;
pub type Pooler = pooler::Module<Test>;
struct ExternalityBuilder;
impl ExternalityBuilder {
pub fn build() -> (
TestExternalities,
Arc<RwLock<PoolState>>,
Arc<RwLock<OffchainState>>,
) {
const PHRASE: &str =
"expire stage crawl shell boss any story swamp skull yellow bamboo copy";
let (offchain, offchain_state) = testing::TestOffchainExt::new();
let (pool, pool_state) = testing::TestTransactionPoolExt::new();
let keystore = KeyStore::new();
keystore
.write()
.sr25519_generate_new(KEY_TYPE, Some(&format!("{}/hunter1", PHRASE)))
.unwrap();
let acct: <Test as system::Trait>::AccountId = Default::default();
let mut storage = system::GenesisConfig::default()
.build_storage::<Test>()
.unwrap();
pallet_balances::GenesisConfig::<Test> {
balances: vec![(acct, 100000000000000000)],
}.assimilate_storage(&mut storage).unwrap();
admin::GenesisConfig::<Test> {
settlers: vec![acct],
}.assimilate_storage(&mut storage).unwrap();
let mut t = TestExternalities::from(storage);
t.register_extension(OffchainExt::new(offchain));
t.register_extension(TransactionPoolExt::new(pool));
t.register_extension(KeystoreExt(keystore));
t.execute_with(|| System::set_block_number(1));
(t, pool_state, offchain_state)
}
}
#[test]
fn test_ocw_call_bet_won() {
let (mut t, _, _) = ExternalityBuilder::build();
t.execute_with(|| {
let acct: <Test as system::Trait>::AccountId = Default::default();
assert_ok!(Pooler::deposit(Origin::signed(acct), 100000000000000));
assert_eq!(Pooler::balance(acct), 100000000000000);
assert_ok!(Chance::bet(Origin::signed(acct), 1000000000000));
println!("check on bets before {:#?}", Chance::scheduled_bet());
let bet = [(acct,990000000000,),];
assert_eq!(Chance::scheduled_bet(), bet);
assert_ok!(OcwDemo::submit_signed(Origin::signed(acct), acct, 990000000000, true));
let bet_after = [];
assert_eq!(Chance::scheduled_bet(), bet_after);
println!("check on bets after {:#?}", Chance::scheduled_bet());
assert_eq!(Balances::free_balance(Chance::account_id()), 99010000000000);
})
}
#[test]
fn test_ocw_call_bet_lost() {
let (mut t, _, _) = ExternalityBuilder::build();
t.execute_with(|| {
let acct: <Test as system::Trait>::AccountId = Default::default();
assert_ok!(Pooler::deposit(Origin::signed(acct), 100000000000000));
assert_eq!(Pooler::balance(acct), 100000000000000);
assert_ok!(Chance::bet(Origin::signed(acct), 1000000000000));
println!("check on bets before {:#?}", Chance::scheduled_bet());
let bet = [(acct,990000000000,),];
assert_eq!(Chance::scheduled_bet(), bet);
assert_ok!(OcwDemo::submit_signed(Origin::signed(acct), acct, 990000000000, false));
let bet_after = [];
assert_eq!(Chance::scheduled_bet(), bet_after);
println!("check on bets after {:#?}", Chance::scheduled_bet());
println!("account {:#?}", Balances::free_balance(Chance::account_id()));
assert_eq!(Balances::free_balance(Chance::account_id()), 100990000000000);
})
}
#[test]
fn test_ocw_called_by_non_settler_should_fail() {
let (mut t, _, _) = ExternalityBuilder::build();
t.execute_with(|| {
let acct: <Test as system::Trait>::AccountId = Default::default();
let non_settler = get_account_id_from_seed::<sr25519::Public>("Alice");
assert_ok!(Pooler::deposit(Origin::signed(acct), 100000000000000));
assert_eq!(Pooler::balance(acct), 100000000000000);
assert_ok!(Chance::bet(Origin::signed(acct), 1000000000000));
println!("check on bets before {:#?}", Chance::scheduled_bet());
let bet = [(acct,990000000000,),];
assert_eq!(Chance::scheduled_bet(), bet);
assert_noop!(OcwDemo::submit_signed(Origin::signed(non_settler), acct, 990000000000, false), admin::Error::<Test>::NotSettler);
})
}
type AccountPublic = <Signature as Verify>::Signer;
/// Generate an account ID from seed.
pub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> <Test as system::Trait>::AccountId where
AccountPublic: From<<TPublic::Pair as Pair>::Public>
{
AccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()
}
/// Generate a crypto pair from seed.
pub fn get_from_seed<TPublic: Public>(seed: &str) -> <TPublic::Pair as Pair>::Public {
TPublic::Pair::from_string(&format!("//{}", seed), None)
.expect("static values are valid; qed")
.public()
} | 29.516245 | 129 | 0.702666 |
e5e5165c69e6d7f0fdf994de5e39e7c65352ff8d | 355 | use std::fmt;
#[derive(Debug, Copy, Clone, PartialEq)]
#[repr(C)]
pub enum Dialect {
Other,
None,
EIR,
Standard,
LLVM,
}
impl fmt::Display for Dialect {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut name = format!("{:?}", self);
name.make_ascii_lowercase();
write!(f, "{}", &name)
}
}
| 18.684211 | 58 | 0.543662 |
56c71d617c9fc8ef64a36f6e378b175474f27542 | 1,695 | // enums3.rs
// Address all the TODOs to make the tests pass!
struct Point {
x: u8,
y: u8,
}
enum Message {
Echo(String),
ChangeColor((u8,u8,u8)),
Quit,
Move(Point),
// TODO: implement the message variant types based on their usage below
}
struct State {
color: (u8, u8, u8),
position: Point,
quit: bool,
}
impl State {
fn change_color(&mut self, color: (u8, u8, u8)) {
self.color = color;
}
fn quit(&mut self) {
self.quit = true;
}
fn echo(&self, s: String) {
println!("{}", s);
}
fn move_position(&mut self, p: Point) {
self.position = p;
}
fn process(&mut self, message: Message) {
match message{
Message::ChangeColor((r, g, b)) => self.change_color((r, g, b)),
Message::Echo(s) => self.echo(s),
Message::Move(Point{x, y}) => self.move_position(Point {x: x, y: y}),
Message::Quit => self.quit(),
}
// TODO: create a match expression to process the different message variants
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_match_message_call() {
let mut state = State {
quit: false,
position: Point { x: 0, y: 0 },
color: (0, 0, 0),
};
state.process(Message::ChangeColor((255, 0, 255)));
state.process(Message::Echo(String::from("hello world")));
state.process(Message::Move(Point { x: 10, y: 15 }));
state.process(Message::Quit);
assert_eq!(state.color, (255, 0, 255));
assert_eq!(state.position.x, 10);
assert_eq!(state.position.y, 15);
assert_eq!(state.quit, true);
}
}
| 22.905405 | 84 | 0.544543 |
dd5bb5ee564749fb928fe5c45683d75448870c63 | 3,675 | use cargo::ops;
use cargo::util::{CliResult, CliError, Human, Config};
use cargo::util::important_paths::{find_root_manifest_for_cwd};
#[derive(RustcDecodable)]
struct Options {
flag_no_run: bool,
flag_package: Option<String>,
flag_jobs: Option<u32>,
flag_features: Vec<String>,
flag_no_default_features: bool,
flag_target: Option<String>,
flag_manifest_path: Option<String>,
flag_verbose: bool,
flag_lib: bool,
flag_bin: Vec<String>,
flag_example: Vec<String>,
flag_test: Vec<String>,
flag_bench: Vec<String>,
arg_args: Vec<String>,
}
pub const USAGE: &'static str = "
Execute all benchmarks of a local package
Usage:
cargo bench [options] [--] [<args>...]
Options:
-h, --help Print this message
--lib Benchmark only this package's library
--bin NAME Benchmark only the specified binary
--example NAME Benchmark only the specified example
--test NAME Benchmark only the specified test
--bench NAME Benchmark only the specified bench
--no-run Compile, but don't run benchmarks
-p SPEC, --package SPEC Package to run benchmarks for
-j N, --jobs N The number of jobs to run in parallel
--features FEATURES Space-separated list of features to also build
--no-default-features Do not build the `default` feature
--target TRIPLE Build for the target triple
--manifest-path PATH Path to the manifest to build benchmarks for
-v, --verbose Use verbose output
All of the trailing arguments are passed to the benchmark binaries generated
for filtering benchmarks and generally providing options configuring how they
run.
If the --package argument is given, then SPEC is a package id specification
which indicates which package should be benchmarked. If it is not given, then
the current package is benchmarked. For more information on SPEC and its format,
see the `cargo help pkgid` command.
Compilation can be customized with the `bench` profile in the manifest.
";
pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
config.shell().set_verbose(options.flag_verbose);
let ops = ops::TestOptions {
no_run: options.flag_no_run,
compile_opts: ops::CompileOptions {
config: config,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|s| &s[..]),
features: &options.flag_features,
no_default_features: options.flag_no_default_features,
spec: options.flag_package.as_ref().map(|s| &s[..]),
exec_engine: None,
release: true,
mode: ops::CompileMode::Bench,
filter: ops::CompileFilter::new(options.flag_lib,
&options.flag_bin,
&options.flag_test,
&options.flag_example,
&options.flag_bench),
target_rustc_args: None,
},
};
let err = try!(ops::run_benches(&root, &ops,
&options.arg_args).map_err(|err| {
CliError::from_boxed(err, 101)
}));
match err {
None => Ok(None),
Some(err) => {
Err(match err.exit.as_ref().and_then(|c| c.code()) {
Some(i) => CliError::new("", i),
None => CliError::from_error(Human(err), 101)
})
}
}
}
| 38.28125 | 80 | 0.60517 |
ebff4a9126dafa2beb466067d1a0e6fb5797ac89 | 35,841 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! An owned, growable string that enforces that its contents are valid UTF-8.
#![stable(feature = "rust1", since = "1.0.0")]
use core::prelude::*;
use core::fmt;
use core::hash;
use core::iter::FromIterator;
use core::mem;
use core::ops::{self, Deref, Add, Index};
use core::ptr;
use core::slice;
use core::str::pattern::Pattern;
use rustc_unicode::str as unicode_str;
use rustc_unicode::str::Utf16Item;
use borrow::{Cow, IntoCow};
use range::RangeArgument;
use str::{self, FromStr, Utf8Error, Chars};
use vec::Vec;
use boxed::Box;
/// A growable string stored as a UTF-8 encoded buffer.
#[derive(Clone, PartialOrd, Eq, Ord)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct String {
vec: Vec<u8>,
}
/// A possible error value from the `String::from_utf8` function.
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct FromUtf8Error {
bytes: Vec<u8>,
error: Utf8Error,
}
/// A possible error value from the `String::from_utf16` function.
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct FromUtf16Error(());
impl String {
/// Creates a new string buffer initialized with the empty string.
///
/// # Examples
///
/// ```
/// let mut s = String::new();
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> String {
String {
vec: Vec::new(),
}
}
/// Creates a new string buffer with the given capacity.
/// The string will be able to hold exactly `capacity` bytes without
/// reallocating. If `capacity` is 0, the string will not allocate.
///
/// # Examples
///
/// ```
/// let mut s = String::with_capacity(10);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: usize) -> String {
String {
vec: Vec::with_capacity(capacity),
}
}
/// Creates a new string buffer from the given string.
///
/// # Examples
///
/// ```
/// # #![feature(collections)]
/// let s = String::from("hello");
/// assert_eq!(&s[..], "hello");
/// ```
#[inline]
#[unstable(feature = "collections", reason = "use `String::from` instead")]
#[deprecated(since = "1.2.0", reason = "use `String::from` instead")]
#[cfg(not(test))]
pub fn from_str(string: &str) -> String {
String { vec: <[_]>::to_vec(string.as_bytes()) }
}
// HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
// required for this method definition, is not available. Since we don't
// require this method for testing purposes, I'll just stub it
// NB see the slice::hack module in slice.rs for more information
#[inline]
#[cfg(test)]
pub fn from_str(_: &str) -> String {
panic!("not available with cfg(test)");
}
/// Returns the vector as a string buffer, if possible, taking care not to
/// copy it.
///
/// # Failure
///
/// If the given vector is not valid UTF-8, then the original vector and the
/// corresponding error is returned.
///
/// # Examples
///
/// ```
/// let hello_vec = vec![104, 101, 108, 108, 111];
/// let s = String::from_utf8(hello_vec).unwrap();
/// assert_eq!(s, "hello");
///
/// let invalid_vec = vec![240, 144, 128];
/// let s = String::from_utf8(invalid_vec).err().unwrap();
/// let err = s.utf8_error();
/// assert_eq!(s.into_bytes(), [240, 144, 128]);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_utf8(vec: Vec<u8>) -> Result<String, FromUtf8Error> {
match str::from_utf8(&vec) {
Ok(..) => Ok(String { vec: vec }),
Err(e) => Err(FromUtf8Error { bytes: vec, error: e })
}
}
/// Converts a vector of bytes to a new UTF-8 string.
/// Any invalid UTF-8 sequences are replaced with U+FFFD REPLACEMENT CHARACTER.
///
/// # Examples
///
/// ```
/// let input = b"Hello \xF0\x90\x80World";
/// let output = String::from_utf8_lossy(input);
/// assert_eq!(output, "Hello \u{FFFD}World");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_utf8_lossy<'a>(v: &'a [u8]) -> Cow<'a, str> {
let mut i;
match str::from_utf8(v) {
Ok(s) => return Cow::Borrowed(s),
Err(e) => i = e.valid_up_to(),
}
const TAG_CONT_U8: u8 = 128;
const REPLACEMENT: &'static [u8] = b"\xEF\xBF\xBD"; // U+FFFD in UTF-8
let total = v.len();
fn unsafe_get(xs: &[u8], i: usize) -> u8 {
unsafe { *xs.get_unchecked(i) }
}
fn safe_get(xs: &[u8], i: usize, total: usize) -> u8 {
if i >= total {
0
} else {
unsafe_get(xs, i)
}
}
let mut res = String::with_capacity(total);
if i > 0 {
unsafe {
res.as_mut_vec().push_all(&v[..i])
};
}
// subseqidx is the index of the first byte of the subsequence we're
// looking at. It's used to copy a bunch of contiguous good codepoints
// at once instead of copying them one by one.
let mut subseqidx = i;
while i < total {
let i_ = i;
let byte = unsafe_get(v, i);
i += 1;
macro_rules! error { () => ({
unsafe {
if subseqidx != i_ {
res.as_mut_vec().push_all(&v[subseqidx..i_]);
}
subseqidx = i;
res.as_mut_vec().push_all(REPLACEMENT);
}
})}
if byte < 128 {
// subseqidx handles this
} else {
let w = unicode_str::utf8_char_width(byte);
match w {
2 => {
if safe_get(v, i, total) & 192 != TAG_CONT_U8 {
error!();
continue;
}
i += 1;
}
3 => {
match (byte, safe_get(v, i, total)) {
(0xE0 , 0xA0 ... 0xBF) => (),
(0xE1 ... 0xEC, 0x80 ... 0xBF) => (),
(0xED , 0x80 ... 0x9F) => (),
(0xEE ... 0xEF, 0x80 ... 0xBF) => (),
_ => {
error!();
continue;
}
}
i += 1;
if safe_get(v, i, total) & 192 != TAG_CONT_U8 {
error!();
continue;
}
i += 1;
}
4 => {
match (byte, safe_get(v, i, total)) {
(0xF0 , 0x90 ... 0xBF) => (),
(0xF1 ... 0xF3, 0x80 ... 0xBF) => (),
(0xF4 , 0x80 ... 0x8F) => (),
_ => {
error!();
continue;
}
}
i += 1;
if safe_get(v, i, total) & 192 != TAG_CONT_U8 {
error!();
continue;
}
i += 1;
if safe_get(v, i, total) & 192 != TAG_CONT_U8 {
error!();
continue;
}
i += 1;
}
_ => {
error!();
continue;
}
}
}
}
if subseqidx < total {
unsafe {
res.as_mut_vec().push_all(&v[subseqidx..total])
};
}
Cow::Owned(res)
}
/// Decode a UTF-16 encoded vector `v` into a `String`, returning `None`
/// if `v` contains any invalid data.
///
/// # Examples
///
/// ```
/// // 𝄞music
/// let mut v = &mut [0xD834, 0xDD1E, 0x006d, 0x0075,
/// 0x0073, 0x0069, 0x0063];
/// assert_eq!(String::from_utf16(v).unwrap(),
/// "𝄞music".to_string());
///
/// // 𝄞mu<invalid>ic
/// v[4] = 0xD800;
/// assert!(String::from_utf16(v).is_err());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_utf16(v: &[u16]) -> Result<String, FromUtf16Error> {
let mut s = String::with_capacity(v.len());
for c in unicode_str::utf16_items(v) {
match c {
Utf16Item::ScalarValue(c) => s.push(c),
Utf16Item::LoneSurrogate(_) => return Err(FromUtf16Error(())),
}
}
Ok(s)
}
/// Decode a UTF-16 encoded vector `v` into a string, replacing
/// invalid data with the replacement character (U+FFFD).
///
/// # Examples
///
/// ```
/// // 𝄞mus<invalid>ic<invalid>
/// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
/// 0x0073, 0xDD1E, 0x0069, 0x0063,
/// 0xD834];
///
/// assert_eq!(String::from_utf16_lossy(v),
/// "𝄞mus\u{FFFD}ic\u{FFFD}".to_string());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_utf16_lossy(v: &[u16]) -> String {
unicode_str::utf16_items(v).map(|c| c.to_char_lossy()).collect()
}
/// Creates a new `String` from a length, capacity, and pointer.
///
/// # Unsafety
///
/// This is _very_ unsafe because:
///
/// * We call `Vec::from_raw_parts` to get a `Vec<u8>`. Therefore, this
/// function inherits all of its unsafety, see [its
/// documentation](../vec/struct.Vec.html#method.from_raw_parts)
/// for the invariants it expects, they also apply to this function.
/// * We assume that the `Vec` contains valid UTF-8.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts(buf: *mut u8, length: usize, capacity: usize) -> String {
String {
vec: Vec::from_raw_parts(buf, length, capacity),
}
}
/// Converts a vector of bytes to a new `String` without checking if
/// it contains valid UTF-8. This is unsafe because it assumes that
/// the UTF-8-ness of the vector has already been validated.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_utf8_unchecked(bytes: Vec<u8>) -> String {
String { vec: bytes }
}
/// Returns the underlying byte buffer, encoded as UTF-8.
///
/// # Examples
///
/// ```
/// let s = String::from("hello");
/// let bytes = s.into_bytes();
/// assert_eq!(bytes, [104, 101, 108, 108, 111]);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_bytes(self) -> Vec<u8> {
self.vec
}
/// Extracts a string slice containing the entire string.
#[inline]
#[unstable(feature = "convert",
reason = "waiting on RFC revision")]
pub fn as_str(&self) -> &str {
self
}
/// Pushes the given string onto this string buffer.
///
/// # Examples
///
/// ```
/// let mut s = String::from("foo");
/// s.push_str("bar");
/// assert_eq!(s, "foobar");
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn push_str(&mut self, string: &str) {
self.vec.push_all(string.as_bytes())
}
/// Returns the number of bytes that this string buffer can hold without
/// reallocating.
///
/// # Examples
///
/// ```
/// let s = String::with_capacity(10);
/// assert!(s.capacity() >= 10);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn capacity(&self) -> usize {
self.vec.capacity()
}
/// Reserves capacity for at least `additional` more bytes to be inserted
/// in the given `String`. The collection may reserve more space to avoid
/// frequent reallocations.
///
/// # Panics
///
/// Panics if the new capacity overflows `usize`.
///
/// # Examples
///
/// ```
/// let mut s = String::new();
/// s.reserve(10);
/// assert!(s.capacity() >= 10);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve(&mut self, additional: usize) {
self.vec.reserve(additional)
}
/// Reserves the minimum capacity for exactly `additional` more bytes to be
/// inserted in the given `String`. Does nothing if the capacity is already
/// sufficient.
///
/// Note that the allocator may give the collection more space than it
/// requests. Therefore capacity can not be relied upon to be precisely
/// minimal. Prefer `reserve` if future insertions are expected.
///
/// # Panics
///
/// Panics if the new capacity overflows `usize`.
///
/// # Examples
///
/// ```
/// let mut s = String::new();
/// s.reserve_exact(10);
/// assert!(s.capacity() >= 10);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve_exact(&mut self, additional: usize) {
self.vec.reserve_exact(additional)
}
/// Shrinks the capacity of this string buffer to match its length.
///
/// # Examples
///
/// ```
/// let mut s = String::from("foo");
/// s.reserve(100);
/// assert!(s.capacity() >= 100);
/// s.shrink_to_fit();
/// assert_eq!(s.capacity(), 3);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn shrink_to_fit(&mut self) {
self.vec.shrink_to_fit()
}
/// Adds the given character to the end of the string.
///
/// # Examples
///
/// ```
/// let mut s = String::from("abc");
/// s.push('1');
/// s.push('2');
/// s.push('3');
/// assert_eq!(s, "abc123");
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn push(&mut self, ch: char) {
match ch.len_utf8() {
1 => self.vec.push(ch as u8),
ch_len => {
let cur_len = self.len();
// This may use up to 4 bytes.
self.vec.reserve(ch_len);
unsafe {
// Attempt to not use an intermediate buffer by just pushing bytes
// directly onto this string.
let slice = slice::from_raw_parts_mut (
self.vec.as_mut_ptr().offset(cur_len as isize),
ch_len
);
let used = ch.encode_utf8(slice).unwrap_or(0);
self.vec.set_len(cur_len + used);
}
}
}
}
/// Works with the underlying buffer as a byte slice.
///
/// # Examples
///
/// ```
/// let s = String::from("hello");
/// assert_eq!(s.as_bytes(), [104, 101, 108, 108, 111]);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn as_bytes(&self) -> &[u8] {
&self.vec
}
/// Shortens a string to the specified length.
///
/// # Panics
///
/// Panics if `new_len` > current length,
/// or if `new_len` is not a character boundary.
///
/// # Examples
///
/// ```
/// let mut s = String::from("hello");
/// s.truncate(2);
/// assert_eq!(s, "he");
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn truncate(&mut self, new_len: usize) {
assert!(self.is_char_boundary(new_len));
self.vec.truncate(new_len)
}
/// Removes the last character from the string buffer and returns it.
/// Returns `None` if this string buffer is empty.
///
/// # Examples
///
/// ```
/// let mut s = String::from("foo");
/// assert_eq!(s.pop(), Some('o'));
/// assert_eq!(s.pop(), Some('o'));
/// assert_eq!(s.pop(), Some('f'));
/// assert_eq!(s.pop(), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn pop(&mut self) -> Option<char> {
let len = self.len();
if len == 0 {
return None
}
let ch = self.char_at_reverse(len);
unsafe {
self.vec.set_len(len - ch.len_utf8());
}
Some(ch)
}
/// Removes the character from the string buffer at byte position `idx` and
/// returns it.
///
/// # Warning
///
/// This is an O(n) operation as it requires copying every element in the
/// buffer.
///
/// # Panics
///
/// If `idx` does not lie on a character boundary, or if it is out of
/// bounds, then this function will panic.
///
/// # Examples
///
/// ```
/// let mut s = String::from("foo");
/// assert_eq!(s.remove(0), 'f');
/// assert_eq!(s.remove(1), 'o');
/// assert_eq!(s.remove(0), 'o');
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove(&mut self, idx: usize) -> char {
let len = self.len();
assert!(idx <= len);
let ch = self.char_at(idx);
let next = idx + ch.len_utf8();
unsafe {
ptr::copy(self.vec.as_ptr().offset(next as isize),
self.vec.as_mut_ptr().offset(idx as isize),
len - next);
self.vec.set_len(len - (next - idx));
}
ch
}
/// Inserts a character into the string buffer at byte position `idx`.
///
/// # Warning
///
/// This is an O(n) operation as it requires copying every element in the
/// buffer.
///
/// # Panics
///
/// If `idx` does not lie on a character boundary or is out of bounds, then
/// this function will panic.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(&mut self, idx: usize, ch: char) {
let len = self.len();
assert!(idx <= len);
assert!(self.is_char_boundary(idx));
self.vec.reserve(4);
let mut bits = [0; 4];
let amt = ch.encode_utf8(&mut bits).unwrap();
unsafe {
ptr::copy(self.vec.as_ptr().offset(idx as isize),
self.vec.as_mut_ptr().offset((idx + amt) as isize),
len - idx);
ptr::copy(bits.as_ptr(),
self.vec.as_mut_ptr().offset(idx as isize),
amt);
self.vec.set_len(len + amt);
}
}
/// Views the string buffer as a mutable sequence of bytes.
///
/// This is unsafe because it does not check
/// to ensure that the resulting string will be valid UTF-8.
///
/// # Examples
///
/// ```
/// let mut s = String::from("hello");
/// unsafe {
/// let vec = s.as_mut_vec();
/// assert!(vec == &[104, 101, 108, 108, 111]);
/// vec.reverse();
/// }
/// assert_eq!(s, "olleh");
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn as_mut_vec(&mut self) -> &mut Vec<u8> {
&mut self.vec
}
/// Returns the number of bytes in this string.
///
/// # Examples
///
/// ```
/// let a = "foo".to_string();
/// assert_eq!(a.len(), 3);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> usize { self.vec.len() }
/// Returns true if the string contains no bytes
///
/// # Examples
///
/// ```
/// let mut v = String::new();
/// assert!(v.is_empty());
/// v.push('a');
/// assert!(!v.is_empty());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_empty(&self) -> bool { self.len() == 0 }
/// Truncates the string, returning it to 0 length.
///
/// # Examples
///
/// ```
/// let mut s = "foo".to_string();
/// s.clear();
/// assert!(s.is_empty());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn clear(&mut self) {
self.vec.clear()
}
/// Create a draining iterator that removes the specified range in the string
/// and yields the removed chars from start to end. The element range is
/// removed even if the iterator is not consumed until the end.
///
/// # Panics
///
/// Panics if the starting point or end point are not on character boundaries,
/// or if they are out of bounds.
///
/// # Examples
///
/// ```
/// # #![feature(drain)]
///
/// let mut s = String::from("α is alpha, β is beta");
/// let beta_offset = s.find('β').unwrap_or(s.len());
///
/// // Remove the range up until the β from the string
/// let t: String = s.drain(..beta_offset).collect();
/// assert_eq!(t, "α is alpha, ");
/// assert_eq!(s, "β is beta");
///
/// // A full range clears the string
/// s.drain(..);
/// assert_eq!(s, "");
/// ```
#[unstable(feature = "drain",
reason = "recently added, matches RFC")]
pub fn drain<R>(&mut self, range: R) -> Drain where R: RangeArgument<usize> {
// Memory safety
//
// The String version of Drain does not have the memory safety issues
// of the vector version. The data is just plain bytes.
// Because the range removal happens in Drop, if the Drain iterator is leaked,
// the removal will not happen.
let len = self.len();
let start = *range.start().unwrap_or(&0);
let end = *range.end().unwrap_or(&len);
// Take out two simultaneous borrows. The &mut String won't be accessed
// until iteration is over, in Drop.
let self_ptr = self as *mut _;
// slicing does the appropriate bounds checks
let chars_iter = self[start..end].chars();
Drain {
start: start,
end: end,
iter: chars_iter,
string: self_ptr,
}
}
/// Converts the string into `Box<str>`.
///
/// Note that this will drop any excess capacity.
#[unstable(feature = "box_str",
reason = "recently added, matches RFC")]
pub fn into_boxed_slice(self) -> Box<str> {
let slice = self.vec.into_boxed_slice();
unsafe { mem::transmute::<Box<[u8]>, Box<str>>(slice) }
}
}
impl FromUtf8Error {
/// Consumes this error, returning the bytes that were attempted to make a
/// `String` with.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_bytes(self) -> Vec<u8> { self.bytes }
/// Access the underlying UTF8-error that was the cause of this error.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn utf8_error(&self) -> Utf8Error { self.error }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Display for FromUtf8Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.error, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Display for FromUtf16Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt("invalid utf-16: lone surrogate found", f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl FromIterator<char> for String {
fn from_iter<I: IntoIterator<Item=char>>(iter: I) -> String {
let mut buf = String::new();
buf.extend(iter);
buf
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> FromIterator<&'a str> for String {
fn from_iter<I: IntoIterator<Item=&'a str>>(iter: I) -> String {
let mut buf = String::new();
buf.extend(iter);
buf
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Extend<char> for String {
fn extend<I: IntoIterator<Item=char>>(&mut self, iterable: I) {
let iterator = iterable.into_iter();
let (lower_bound, _) = iterator.size_hint();
self.reserve(lower_bound);
for ch in iterator {
self.push(ch)
}
}
}
#[stable(feature = "extend_ref", since = "1.2.0")]
impl<'a> Extend<&'a char> for String {
fn extend<I: IntoIterator<Item=&'a char>>(&mut self, iter: I) {
self.extend(iter.into_iter().cloned());
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> Extend<&'a str> for String {
fn extend<I: IntoIterator<Item=&'a str>>(&mut self, iterable: I) {
let iterator = iterable.into_iter();
// A guess that at least one byte per iterator element will be needed.
let (lower_bound, _) = iterator.size_hint();
self.reserve(lower_bound);
for s in iterator {
self.push_str(s)
}
}
}
/// A convenience impl that delegates to the impl for `&str`
impl<'a, 'b> Pattern<'a> for &'b String {
type Searcher = <&'b str as Pattern<'a>>::Searcher;
fn into_searcher(self, haystack: &'a str) -> <&'b str as Pattern<'a>>::Searcher {
self[..].into_searcher(haystack)
}
#[inline]
fn is_contained_in(self, haystack: &'a str) -> bool {
self[..].is_contained_in(haystack)
}
#[inline]
fn is_prefix_of(self, haystack: &'a str) -> bool {
self[..].is_prefix_of(haystack)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl PartialEq for String {
#[inline]
fn eq(&self, other: &String) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
fn ne(&self, other: &String) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
macro_rules! impl_eq {
($lhs:ty, $rhs: ty) => {
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> PartialEq<$rhs> for $lhs {
#[inline]
fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> PartialEq<$lhs> for $rhs {
#[inline]
fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
fn ne(&self, other: &$lhs) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
}
}
impl_eq! { String, str }
impl_eq! { String, &'a str }
impl_eq! { Cow<'a, str>, str }
impl_eq! { Cow<'a, str>, String }
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, 'b> PartialEq<&'b str> for Cow<'a, str> {
#[inline]
fn eq(&self, other: &&'b str) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
fn ne(&self, other: &&'b str) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, 'b> PartialEq<Cow<'a, str>> for &'b str {
#[inline]
fn eq(&self, other: &Cow<'a, str>) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
fn ne(&self, other: &Cow<'a, str>) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Default for String {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn default() -> String {
String::new()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Display for String {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Debug for String {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl hash::Hash for String {
#[inline]
fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
(**self).hash(hasher)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> Add<&'a str> for String {
type Output = String;
#[inline]
fn add(mut self, other: &str) -> String {
self.push_str(other);
self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ops::Index<ops::Range<usize>> for String {
type Output = str;
#[inline]
fn index(&self, index: ops::Range<usize>) -> &str {
&self[..][index]
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ops::Index<ops::RangeTo<usize>> for String {
type Output = str;
#[inline]
fn index(&self, index: ops::RangeTo<usize>) -> &str {
&self[..][index]
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ops::Index<ops::RangeFrom<usize>> for String {
type Output = str;
#[inline]
fn index(&self, index: ops::RangeFrom<usize>) -> &str {
&self[..][index]
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ops::Index<ops::RangeFull> for String {
type Output = str;
#[inline]
fn index(&self, _index: ops::RangeFull) -> &str {
unsafe { mem::transmute(&*self.vec) }
}
}
#[cfg(not(stage0))]
#[stable(feature = "derefmut_for_string", since = "1.2.0")]
impl ops::IndexMut<ops::Range<usize>> for String {
#[inline]
fn index_mut(&mut self, index: ops::Range<usize>) -> &mut str {
&mut self[..][index]
}
}
#[cfg(not(stage0))]
#[stable(feature = "derefmut_for_string", since = "1.2.0")]
impl ops::IndexMut<ops::RangeTo<usize>> for String {
#[inline]
fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut str {
&mut self[..][index]
}
}
#[cfg(not(stage0))]
#[stable(feature = "derefmut_for_string", since = "1.2.0")]
impl ops::IndexMut<ops::RangeFrom<usize>> for String {
#[inline]
fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut str {
&mut self[..][index]
}
}
#[stable(feature = "derefmut_for_string", since = "1.2.0")]
impl ops::IndexMut<ops::RangeFull> for String {
#[inline]
fn index_mut(&mut self, _index: ops::RangeFull) -> &mut str {
unsafe { mem::transmute(&mut *self.vec) }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ops::Deref for String {
type Target = str;
#[inline]
fn deref(&self) -> &str {
unsafe { mem::transmute(&self.vec[..]) }
}
}
#[stable(feature = "derefmut_for_string", since = "1.2.0")]
impl ops::DerefMut for String {
#[inline]
fn deref_mut(&mut self) -> &mut str {
unsafe { mem::transmute(&mut self.vec[..]) }
}
}
/// Error returned from `String::from`
#[unstable(feature = "str_parse_error", reason = "may want to be replaced with \
Void if it ever exists")]
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct ParseError(());
#[stable(feature = "rust1", since = "1.0.0")]
impl FromStr for String {
type Err = ParseError;
#[inline]
fn from_str(s: &str) -> Result<String, ParseError> {
Ok(String::from(s))
}
}
/// A generic trait for converting a value to a string
#[stable(feature = "rust1", since = "1.0.0")]
pub trait ToString {
/// Converts the value of `self` to an owned string
#[stable(feature = "rust1", since = "1.0.0")]
fn to_string(&self) -> String;
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Display + ?Sized> ToString for T {
#[inline]
fn to_string(&self) -> String {
use core::fmt::Write;
let mut buf = String::new();
let _ = buf.write_fmt(format_args!("{}", self));
buf.shrink_to_fit();
buf
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl AsRef<str> for String {
#[inline]
fn as_ref(&self) -> &str {
self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl AsRef<[u8]> for String {
#[inline]
fn as_ref(&self) -> &[u8] {
self.as_bytes()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> From<&'a str> for String {
#[cfg(not(test))]
#[inline]
fn from(s: &'a str) -> String {
String { vec: <[_]>::to_vec(s.as_bytes()) }
}
// HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
// required for this method definition, is not available. Since we don't
// require this method for testing purposes, I'll just stub it
// NB see the slice::hack module in slice.rs for more information
#[inline]
#[cfg(test)]
fn from(_: &str) -> String {
panic!("not available with cfg(test)");
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> From<&'a str> for Cow<'a, str> {
#[inline]
fn from(s: &'a str) -> Cow<'a, str> {
Cow::Borrowed(s)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> From<String> for Cow<'a, str> {
#[inline]
fn from(s: String) -> Cow<'a, str> {
Cow::Owned(s)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Into<Vec<u8>> for String {
fn into(self) -> Vec<u8> {
self.into_bytes()
}
}
#[unstable(feature = "into_cow", reason = "may be replaced by `convert::Into`")]
impl IntoCow<'static, str> for String {
#[inline]
fn into_cow(self) -> Cow<'static, str> {
Cow::Owned(self)
}
}
#[unstable(feature = "into_cow", reason = "may be replaced by `convert::Into`")]
impl<'a> IntoCow<'a, str> for &'a str {
#[inline]
fn into_cow(self) -> Cow<'a, str> {
Cow::Borrowed(self)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Write for String {
#[inline]
fn write_str(&mut self, s: &str) -> fmt::Result {
self.push_str(s);
Ok(())
}
#[inline]
fn write_char(&mut self, c: char) -> fmt::Result {
self.push(c);
Ok(())
}
}
/// A draining iterator for `String`.
#[unstable(feature = "drain", reason = "recently added")]
pub struct Drain<'a> {
/// Will be used as &'a mut String in the destructor
string: *mut String,
/// Start of part to remove
start: usize,
/// End of part to remove
end: usize,
/// Current remaining range to remove
iter: Chars<'a>,
}
unsafe impl<'a> Sync for Drain<'a> {}
unsafe impl<'a> Send for Drain<'a> {}
#[unstable(feature = "drain", reason = "recently added")]
impl<'a> Drop for Drain<'a> {
fn drop(&mut self) {
unsafe {
// Use Vec::drain. "Reaffirm" the bounds checks to avoid
// panic code being inserted again.
let self_vec = (*self.string).as_mut_vec();
if self.start <= self.end && self.end <= self_vec.len() {
self_vec.drain(self.start..self.end);
}
}
}
}
#[unstable(feature = "drain", reason = "recently added")]
impl<'a> Iterator for Drain<'a> {
type Item = char;
#[inline]
fn next(&mut self) -> Option<char> {
self.iter.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
#[unstable(feature = "drain", reason = "recently added")]
impl<'a> DoubleEndedIterator for Drain<'a> {
#[inline]
fn next_back(&mut self) -> Option<char> {
self.iter.next_back()
}
}
| 29.694283 | 90 | 0.513044 |
183cedaa2b35ea36d77ec8223180facf1ae2af33 | 10,142 | use std::collections::HashMap;
use std::env::consts::{ARCH, OS};
use std::fs;
use std::fs::File;
use std::io;
use std::io::Write;
use std::ops::Add;
use std::path::{Path, PathBuf};
use std::str::from_utf8;
use anyhow::anyhow;
use futures::TryFutureExt;
use log::{debug, trace};
use md5::Digest;
use os_info::{Bitness, Info};
use pact_models::json_utils::json_to_string;
use prost::Message;
use prost_types::FileDescriptorSet;
use reqwest::Url;
use serde_json::Value;
use tempfile::NamedTempFile;
use tokio::process::Command;
use zip::ZipArchive;
pub(crate) struct Protoc {
protoc_path: String,
local_install: bool
}
impl Protoc {
fn new(path: String, local_install: bool) -> Self {
Protoc {
protoc_path: path,
local_install
}
}
// Try to invoke the protoc binary
async fn invoke(&self) -> anyhow::Result<String> {
trace!("Invoking protoc: '{} --version'", self.protoc_path);
match Command::new(&self.protoc_path).arg("--version").output().await {
Ok(out) => {
if out.status.success() {
let version = from_utf8(out.stdout.as_ref()).unwrap_or_default();
debug!("Protoc binary invoked OK: {}", version);
Ok(version.to_string())
} else {
debug!("Protoc output: {}", from_utf8(out.stdout.as_slice()).unwrap_or_default());
debug!("Protoc stderr: {}", from_utf8(out.stderr.as_slice()).unwrap_or_default());
Err(anyhow!("Failed to invoke protoc binary: exit code {}", out.status))
}
}
Err(err) => Err(anyhow!("Failed to invoke protoc binary: {}", err))
}
}
pub(crate) async fn parse_proto_file(&self, proto_file: &Path) -> anyhow::Result<(FileDescriptorSet, Digest, Vec<u8>)> {
let tmp_dir = Path::new("tmp");
fs::create_dir_all(tmp_dir)?;
let file = NamedTempFile::new_in(tmp_dir)?;
let output = format!("-o{}", file.path().to_string_lossy());
let mut parent_dir = proto_file.to_path_buf();
parent_dir.pop();
let include = format!("-I{}", parent_dir.to_string_lossy());
let mut cmd = Command::new(&self.protoc_path);
cmd.arg(output.as_str())
.arg(include.as_str())
.arg("--include_imports")
.arg(proto_file);
if self.local_install {
let include2 = "-Iprotoc/include/google/protobuf";
trace!("Invoking protoc: '{} {} {} {} --include_imports {}'", self.protoc_path, output.as_str(), include.as_str(), include2, proto_file.to_string_lossy());
cmd.arg(include2);
} else {
trace!("Invoking protoc: '{} {} {} --include_imports {}'", self.protoc_path, output.as_str(), include.as_str(), proto_file.to_string_lossy());
}
match cmd.output().await {
Ok(out) => {
if out.status.success() {
let data = fs::read(file.path())?;
FileDescriptorSet::decode(data.as_slice())
.map(|descriptor| (descriptor, md5::compute(data.as_slice()), data))
.map_err(|err| anyhow!("Failed to load file descriptor set - {}", err))
} else {
debug!("Protoc output: {}", from_utf8(out.stdout.as_slice()).unwrap_or_default());
debug!("Protoc stderr: {}", from_utf8(out.stderr.as_slice()).unwrap_or_default());
Err(anyhow!("Failed to invoke protoc binary: exit code {}", out.status))
}
}
Err(err) => Err(anyhow!("Failed to invoke protoc binary: {}", err))
}
}
}
// This function first checks for an unpacked protoc binary, and tries to run that
// otherwise it will try unpack the version for the current OS
// otherwise it will try download and unpack the version for the current OS
// otherwise then fallback to any version on the system path
// will error if unable to do that
pub(crate) async fn setup_protoc(config: &HashMap<String, Value>) -> anyhow::Result<Protoc> {
let os_info = os_info::get();
debug!("Detected OS: {}", os_info);
local_protoc()
.or_else(|err| {
trace!("local_protoc: {}", err);
unpack_protoc(config, &os_info)
})
.or_else(|err| {
trace!("unpack_protoc: {}", err);
download_protoc(config, &os_info)
})
.or_else(|err| {
trace!("download_protoc: {}", err);
system_protoc()
})
.await
}
async fn download_protoc(config: &HashMap<String, Value>, os_info: &Info) -> anyhow::Result<Protoc> {
trace!("download_protoc: config = {:?}", config);
let protoc_version = config.get("protocVersion")
.map(json_to_string)
.ok_or_else(|| anyhow!("Could not get the protoc version from the manifest"))?;
let download_url = config.get("downloadUrl")
.map(|v| {
let url = json_to_string(v);
if url.ends_with('/') {
url
} else {
url.add("/")
}
})
.ok_or_else(|| anyhow!("Could not get the protoc download URL from the manifest"))?;
let base_url = Url::parse(download_url.as_str())?;
let os_type = os_type(os_info.bitness(), ARCH, OS);
let url = base_url.join(format!("v{}/protoc-{}-{}.zip", protoc_version, protoc_version, os_type).as_str())?;
debug!("Downloading protoc from '{}'", url);
let mut response = reqwest::get(url).await?;
if response.status().is_success() {
let mut protoc_file = File::create(format!("./protoc-{}-{}.zip", protoc_version, os_type))?;
let mut count: usize = 0;
while let Some(chunk) = response.chunk().await? {
count += chunk.len();
protoc_file.write_all(chunk.as_ref())?;
}
debug!("Downloaded {} bytes", count);
unpack_protoc(config, os_info).await
} else {
Err(anyhow!("Failed to download protoc - {}", response.status()))
}
}
async fn system_protoc() -> anyhow::Result<Protoc> {
trace!("system_protoc: looking for protoc in system path");
let program = if OS == "windows" { "where" } else { "which" };
match Command::new(program).arg("protoc").output().await {
Ok(out) => {
if out.status.success() {
let path = from_utf8(out.stdout.as_ref())?;
debug!("Found protoc binary: {}", path);
let protoc = Protoc::new(path.trim().to_string(), false);
protoc.invoke().await?;
Ok(protoc)
} else {
debug!("{} output: {}", program, from_utf8(out.stdout.as_slice()).unwrap_or_default());
debug!("{} stderr: {}", program, from_utf8(out.stderr.as_slice()).unwrap_or_default());
Err(anyhow!("Failed to invoke {}: exit code {}", program, out.status))
}
}
Err(err) => Err(anyhow!("Failed to find system protoc binary: {}", err))
}
}
async fn local_protoc() -> anyhow::Result<Protoc> {
let local_path = "./protoc/bin/protoc";
trace!("Looking for local protoc at '{}'", local_path);
let protoc_path = Path::new(local_path);
if protoc_path.exists() {
debug!("Found unpacked protoc binary");
let protoc = Protoc::new(protoc_path.to_string_lossy().to_string(), true);
protoc.invoke().await?;
Ok(protoc)
} else {
trace!("No local unpacked protoc binary");
Err(anyhow!("No local unpacked protoc binary"))
}
}
async fn unpack_protoc(config: &HashMap<String, Value>, os_info: &Info) -> anyhow::Result<Protoc> {
let protoc_version = config.get("protocVersion")
.map(json_to_string)
.ok_or_else(|| anyhow!("Could not get the protoc version from the manifest"))?;
let protoc_file = format!("./protoc-{}-{}.zip", protoc_version, os_type(os_info.bitness(), ARCH, OS));
trace!("Looking for protoc zip archive '{}'", protoc_file);
let protoc_zip_path = Path::new(protoc_file.as_str());
if protoc_zip_path.exists() {
debug!("Found protoc zip archive: {}", protoc_zip_path.to_string_lossy());
unzip_proto_archive(protoc_zip_path)?;
local_protoc().await
} else {
trace!("protoc zip archive not found");
Err(anyhow!("No local protoc zip archive"))
}
}
fn unzip_proto_archive(archive_path: &Path) -> anyhow::Result<()> {
let file = File::open(archive_path)?;
let mut archive = ZipArchive::new(file)?;
let base_path = PathBuf::from("protoc");
for i in 0..archive.len() {
let mut file = archive.by_index(i)?;
let outpath = match file.enclosed_name() {
Some(path) => base_path.join(path).to_owned(),
None => {
trace!("Skipping file {} as it is not a valid file name", i);
continue
}
};
if file.name().ends_with('/') {
trace!("Directory {} extracted to \"{}\"", i, outpath.display());
fs::create_dir_all(&outpath)?;
} else {
trace!("File {} extracted to \"{}\" ({} bytes)", i, outpath.display(), file.size());
if let Some(p) = outpath.parent() {
if !p.exists() {
fs::create_dir_all(&p)?;
}
}
let mut outfile = fs::File::create(&outpath)?;
io::copy(&mut file, &mut outfile)?;
}
// Get and Set permissions
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
if let Some(mode) = file.unix_mode() {
fs::set_permissions(&outpath, fs::Permissions::from_mode(mode))?;
}
}
}
Ok(())
}
fn os_type(os_info: Bitness, arch: &str, os: &str) -> String {
match os {
"linux" => match arch {
"x86" => "linux-x86_32",
"x86_64" => "linux-x86_64",
"aarch64" => "linux-aarch_64",
"s390x" => "linux-s390_64",
_ => "unknown"
}.to_string(),
"macos" => format!("osx-{}", arch),
"windows" => format!("win{}", match os_info {
Bitness::X32 => "32",
Bitness::X64 => "64",
_ => "64"
}),
_ => "unknown".to_string()
}
}
#[cfg(test)]
mod tests {
use expectest::prelude::*;
use os_info::Bitness;
use super::os_type;
#[test]
fn os_type_test() {
expect!(os_type(Bitness::X32, "x86", "linux").as_str()).to(be_equal_to("linux-x86_32"));
expect!(os_type(Bitness::X64, "x86_64", "linux").as_str()).to(be_equal_to("linux-x86_64"));
expect!(os_type(Bitness::X64, "aarch64", "linux").as_str()).to(be_equal_to("linux-aarch_64"));
expect!(os_type(Bitness::X64, "x86_64", "macos").as_str()).to(be_equal_to("osx-x86_64"));
expect!(os_type(Bitness::X32, "", "windows").as_str()).to(be_equal_to("win32"));
expect!(os_type(Bitness::X64, "", "windows").as_str()).to(be_equal_to("win64"));
}
}
| 35.337979 | 161 | 0.619207 |
76914baaae35ae5322544a186a9104ea46d6e4ac | 6,805 | //! Sapling key components.
//!
//! Implements [section 4.2.2] of the Zcash Protocol Specification.
//!
//! [section 4.2.2]: https://zips.z.cash/protocol/protocol.pdf#saplingkeycomponents
use crate::{
jubjub::{edwards, FixedGenerators, JubjubEngine, JubjubParams, ToUniform, Unknown},
primitives::{ProofGenerationKey, ViewingKey},
};
use blake2b_simd::{Hash as Blake2bHash, Params as Blake2bParams};
use ff::{PrimeField, PrimeFieldRepr};
use std::io::{self, Read, Write};
pub const PRF_EXPAND_PERSONALIZATION: &[u8; 16] = b"Zcash_ExpandSeed";
/// PRF^expand(sk, t) := BLAKE2b-512("Zcash_ExpandSeed", sk || t)
pub fn prf_expand(sk: &[u8], t: &[u8]) -> Blake2bHash {
prf_expand_vec(sk, &[t])
}
pub fn prf_expand_vec(sk: &[u8], ts: &[&[u8]]) -> Blake2bHash {
let mut h = Blake2bParams::new()
.hash_length(64)
.personal(PRF_EXPAND_PERSONALIZATION)
.to_state();
h.update(sk);
for t in ts {
h.update(t);
}
h.finalize()
}
/// An outgoing viewing key
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct OutgoingViewingKey(pub [u8; 32]);
/// A Sapling expanded spending key
#[derive(Clone)]
pub struct ExpandedSpendingKey<E: JubjubEngine> {
pub ask: E::Fs,
pub nsk: E::Fs,
pub ovk: OutgoingViewingKey,
}
/// A Sapling full viewing key
#[derive(Debug)]
pub struct FullViewingKey<E: JubjubEngine> {
pub vk: ViewingKey<E>,
pub ovk: OutgoingViewingKey,
}
impl<E: JubjubEngine> ExpandedSpendingKey<E> {
pub fn from_spending_key(sk: &[u8]) -> Self {
let ask = E::Fs::to_uniform(prf_expand(sk, &[0x00]).as_bytes());
let nsk = E::Fs::to_uniform(prf_expand(sk, &[0x01]).as_bytes());
let mut ovk = OutgoingViewingKey([0u8; 32]);
ovk.0
.copy_from_slice(&prf_expand(sk, &[0x02]).as_bytes()[..32]);
ExpandedSpendingKey { ask, nsk, ovk }
}
pub fn proof_generation_key(&self, params: &E::Params) -> ProofGenerationKey<E> {
ProofGenerationKey {
ak: params
.generator(FixedGenerators::SpendingKeyGenerator)
.mul(self.ask, params),
nsk: self.nsk,
}
}
pub fn read<R: Read>(mut reader: R) -> io::Result<Self> {
let mut ask_repr = <E::Fs as PrimeField>::Repr::default();
ask_repr.read_le(&mut reader)?;
let ask = E::Fs::from_repr(ask_repr)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
let mut nsk_repr = <E::Fs as PrimeField>::Repr::default();
nsk_repr.read_le(&mut reader)?;
let nsk = E::Fs::from_repr(nsk_repr)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
let mut ovk = [0; 32];
reader.read_exact(&mut ovk)?;
Ok(ExpandedSpendingKey {
ask,
nsk,
ovk: OutgoingViewingKey(ovk),
})
}
pub fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
self.ask.into_repr().write_le(&mut writer)?;
self.nsk.into_repr().write_le(&mut writer)?;
writer.write_all(&self.ovk.0)?;
Ok(())
}
pub fn to_bytes(&self) -> [u8; 96] {
let mut result = [0u8; 96];
self.write(&mut result[..])
.expect("should be able to serialize an ExpandedSpendingKey");
result
}
}
impl<E: JubjubEngine> Clone for FullViewingKey<E> {
fn clone(&self) -> Self {
FullViewingKey {
vk: ViewingKey {
ak: self.vk.ak.clone(),
nk: self.vk.nk.clone(),
},
ovk: self.ovk,
}
}
}
impl<E: JubjubEngine> FullViewingKey<E> {
pub fn from_expanded_spending_key(expsk: &ExpandedSpendingKey<E>, params: &E::Params) -> Self {
FullViewingKey {
vk: ViewingKey {
ak: params
.generator(FixedGenerators::SpendingKeyGenerator)
.mul(expsk.ask, params),
nk: params
.generator(FixedGenerators::ProofGenerationKey)
.mul(expsk.nsk, params),
},
ovk: expsk.ovk,
}
}
pub fn read<R: Read>(mut reader: R, params: &E::Params) -> io::Result<Self> {
let ak = edwards::Point::<E, Unknown>::read(&mut reader, params)?;
let ak = match ak.as_prime_order(params) {
Some(p) => p,
None => {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"ak not in prime-order subgroup",
));
}
};
if ak == edwards::Point::zero() {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"ak not of prime order",
));
}
let nk = edwards::Point::<E, Unknown>::read(&mut reader, params)?;
let nk = match nk.as_prime_order(params) {
Some(p) => p,
None => {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"nk not in prime-order subgroup",
));
}
};
let mut ovk = [0; 32];
reader.read_exact(&mut ovk)?;
Ok(FullViewingKey {
vk: ViewingKey { ak, nk },
ovk: OutgoingViewingKey(ovk),
})
}
pub fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
self.vk.ak.write(&mut writer)?;
self.vk.nk.write(&mut writer)?;
writer.write_all(&self.ovk.0)?;
Ok(())
}
pub fn to_bytes(&self) -> [u8; 96] {
let mut result = [0u8; 96];
self.write(&mut result[..])
.expect("should be able to serialize a FullViewingKey");
result
}
}
#[cfg(test)]
mod tests {
use crate::jubjub::{edwards, FixedGenerators, JubjubParams, PrimeOrder};
use pairing::bls12_381::Bls12;
use std::error::Error;
use super::FullViewingKey;
use crate::JUBJUB;
#[test]
fn ak_must_be_prime_order() {
let mut buf = [0; 96];
let identity = edwards::Point::<Bls12, PrimeOrder>::zero();
// Set both ak and nk to the identity.
identity.write(&mut buf[0..32]).unwrap();
identity.write(&mut buf[32..64]).unwrap();
// ak is not allowed to be the identity.
assert_eq!(
FullViewingKey::<Bls12>::read(&buf[..], &JUBJUB)
.unwrap_err()
.description(),
"ak not of prime order"
);
// Set ak to a basepoint.
let basepoint = JUBJUB.generator(FixedGenerators::SpendingKeyGenerator);
basepoint.write(&mut buf[0..32]).unwrap();
// nk is allowed to be the identity.
assert!(FullViewingKey::<Bls12>::read(&buf[..], &JUBJUB).is_ok());
}
}
| 30.379464 | 99 | 0.54842 |
f46e08515b9f59648a3d74e4ba7eabdfbffa72fb | 1,755 | // //! # Semantic Analysis
// //! The `semant` module holds all of the semantic analysis
// //! that happens as part of the compilation process.
// //! The most important part of this analysis is
// //! type inference.
// //!
// //! ## Type System
// //!
// //! ### Motivations
// //!
// //! Rox is explicitly typed with type inference for variables.
// //! The reason for choosing explicit typing over total implicit typing (i.e. using type
// //! inference for functions and types) is that while implicit typing does reduce
// //! a lot of the visual noise, there are some instances that are inherently not translatable
// //! to from an explicit system to an implicit system, such as defining polymorphic
// //! functions that take arguments which are themselves polymorphic (such as a function
// //! that takes two polymorphic functions as arguments).
// //!
// //! The more important reason for using explicit types is readability.
// //! Function declarations are often the first thing read by a user, and while good argument
// //! names can be helpful, _good types are even better_. Having type information in the
// //! header of a function or in the field declarations of a type are, more often than not,
// //! far more helpful for readability than the names alone.
// //!
// //! ### Trade-offs
// //!
// //! As with everything, making `Rox` explicitly typed has its trade-offs.
// //! Of course, while explicit types provide more information up-front to the
// //! reader, they can also potentially act as clutter. They also pose more
// //! work to the programmer when refactoring and changing types.
mod tagged_syntax;
mod type_checker;
mod types;
pub(crate) use tagged_syntax::*;
pub(crate) use type_checker::*;
pub(crate) use types::*;
| 45 | 95 | 0.707123 |
21e4dba48e09729516090e49a950e25a62c272fb | 364,435 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
#[derive(Debug)]
pub(crate) struct Handle<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
pub(crate) client: aws_smithy_client::Client<C, M, R>,
pub(crate) conf: crate::Config,
}
/// Client for AWS Elemental MediaLive
///
/// Client for invoking operations on AWS Elemental MediaLive. Each operation on AWS Elemental MediaLive is a method on this
/// this struct. `.send()` MUST be invoked on the generated operations to dispatch the request to the service.
///
/// # Examples
/// **Constructing a client and invoking an operation**
/// ```rust,no_run
/// # async fn docs() {
/// // create a shared configuration. This can be used & shared between multiple service clients.
/// let shared_config = aws_config::load_from_env().await;
/// let client = aws_sdk_medialive::Client::new(&shared_config);
/// // invoke an operation
/// /* let rsp = client
/// .<operation_name>().
/// .<param>("some value")
/// .send().await; */
/// # }
/// ```
/// **Constructing a client with custom configuration**
/// ```rust,no_run
/// use aws_config::RetryConfig;
/// # async fn docs() {
/// let shared_config = aws_config::load_from_env().await;
/// let config = aws_sdk_medialive::config::Builder::from(&shared_config)
/// .retry_config(RetryConfig::disabled())
/// .build();
/// let client = aws_sdk_medialive::Client::from_conf(config);
/// # }
#[derive(std::fmt::Debug)]
pub struct Client<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<Handle<C, M, R>>,
}
impl<C, M, R> std::clone::Clone for Client<C, M, R> {
fn clone(&self) -> Self {
Self {
handle: self.handle.clone(),
}
}
}
#[doc(inline)]
pub use aws_smithy_client::Builder;
impl<C, M, R> From<aws_smithy_client::Client<C, M, R>> for Client<C, M, R> {
fn from(client: aws_smithy_client::Client<C, M, R>) -> Self {
Self::with_config(client, crate::Config::builder().build())
}
}
impl<C, M, R> Client<C, M, R> {
/// Creates a client with the given service configuration.
pub fn with_config(client: aws_smithy_client::Client<C, M, R>, conf: crate::Config) -> Self {
Self {
handle: std::sync::Arc::new(Handle { client, conf }),
}
}
/// Returns the client's configuration.
pub fn conf(&self) -> &crate::Config {
&self.handle.conf
}
}
impl<C, M, R> Client<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Constructs a fluent builder for the [`AcceptInputDeviceTransfer`](crate::client::fluent_builders::AcceptInputDeviceTransfer) operation.
///
/// - The fluent builder is configurable:
/// - [`input_device_id(impl Into<String>)`](crate::client::fluent_builders::AcceptInputDeviceTransfer::input_device_id) / [`set_input_device_id(Option<String>)`](crate::client::fluent_builders::AcceptInputDeviceTransfer::set_input_device_id): The unique ID of the input device to accept. For example, hd-123456789abcdef.
/// - On success, responds with [`AcceptInputDeviceTransferOutput`](crate::output::AcceptInputDeviceTransferOutput)
/// - On failure, responds with [`SdkError<AcceptInputDeviceTransferError>`](crate::error::AcceptInputDeviceTransferError)
pub fn accept_input_device_transfer(
&self,
) -> fluent_builders::AcceptInputDeviceTransfer<C, M, R> {
fluent_builders::AcceptInputDeviceTransfer::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`BatchDelete`](crate::client::fluent_builders::BatchDelete) operation.
///
/// - The fluent builder is configurable:
/// - [`channel_ids(Vec<String>)`](crate::client::fluent_builders::BatchDelete::channel_ids) / [`set_channel_ids(Option<Vec<String>>)`](crate::client::fluent_builders::BatchDelete::set_channel_ids): List of channel IDs
/// - [`input_ids(Vec<String>)`](crate::client::fluent_builders::BatchDelete::input_ids) / [`set_input_ids(Option<Vec<String>>)`](crate::client::fluent_builders::BatchDelete::set_input_ids): List of input IDs
/// - [`input_security_group_ids(Vec<String>)`](crate::client::fluent_builders::BatchDelete::input_security_group_ids) / [`set_input_security_group_ids(Option<Vec<String>>)`](crate::client::fluent_builders::BatchDelete::set_input_security_group_ids): List of input security group IDs
/// - [`multiplex_ids(Vec<String>)`](crate::client::fluent_builders::BatchDelete::multiplex_ids) / [`set_multiplex_ids(Option<Vec<String>>)`](crate::client::fluent_builders::BatchDelete::set_multiplex_ids): List of multiplex IDs
/// - On success, responds with [`BatchDeleteOutput`](crate::output::BatchDeleteOutput) with field(s):
/// - [`failed(Option<Vec<BatchFailedResultModel>>)`](crate::output::BatchDeleteOutput::failed): List of failed operations
/// - [`successful(Option<Vec<BatchSuccessfulResultModel>>)`](crate::output::BatchDeleteOutput::successful): List of successful operations
/// - On failure, responds with [`SdkError<BatchDeleteError>`](crate::error::BatchDeleteError)
pub fn batch_delete(&self) -> fluent_builders::BatchDelete<C, M, R> {
fluent_builders::BatchDelete::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`BatchStart`](crate::client::fluent_builders::BatchStart) operation.
///
/// - The fluent builder is configurable:
/// - [`channel_ids(Vec<String>)`](crate::client::fluent_builders::BatchStart::channel_ids) / [`set_channel_ids(Option<Vec<String>>)`](crate::client::fluent_builders::BatchStart::set_channel_ids): List of channel IDs
/// - [`multiplex_ids(Vec<String>)`](crate::client::fluent_builders::BatchStart::multiplex_ids) / [`set_multiplex_ids(Option<Vec<String>>)`](crate::client::fluent_builders::BatchStart::set_multiplex_ids): List of multiplex IDs
/// - On success, responds with [`BatchStartOutput`](crate::output::BatchStartOutput) with field(s):
/// - [`failed(Option<Vec<BatchFailedResultModel>>)`](crate::output::BatchStartOutput::failed): List of failed operations
/// - [`successful(Option<Vec<BatchSuccessfulResultModel>>)`](crate::output::BatchStartOutput::successful): List of successful operations
/// - On failure, responds with [`SdkError<BatchStartError>`](crate::error::BatchStartError)
pub fn batch_start(&self) -> fluent_builders::BatchStart<C, M, R> {
fluent_builders::BatchStart::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`BatchStop`](crate::client::fluent_builders::BatchStop) operation.
///
/// - The fluent builder is configurable:
/// - [`channel_ids(Vec<String>)`](crate::client::fluent_builders::BatchStop::channel_ids) / [`set_channel_ids(Option<Vec<String>>)`](crate::client::fluent_builders::BatchStop::set_channel_ids): List of channel IDs
/// - [`multiplex_ids(Vec<String>)`](crate::client::fluent_builders::BatchStop::multiplex_ids) / [`set_multiplex_ids(Option<Vec<String>>)`](crate::client::fluent_builders::BatchStop::set_multiplex_ids): List of multiplex IDs
/// - On success, responds with [`BatchStopOutput`](crate::output::BatchStopOutput) with field(s):
/// - [`failed(Option<Vec<BatchFailedResultModel>>)`](crate::output::BatchStopOutput::failed): List of failed operations
/// - [`successful(Option<Vec<BatchSuccessfulResultModel>>)`](crate::output::BatchStopOutput::successful): List of successful operations
/// - On failure, responds with [`SdkError<BatchStopError>`](crate::error::BatchStopError)
pub fn batch_stop(&self) -> fluent_builders::BatchStop<C, M, R> {
fluent_builders::BatchStop::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`BatchUpdateSchedule`](crate::client::fluent_builders::BatchUpdateSchedule) operation.
///
/// - The fluent builder is configurable:
/// - [`channel_id(impl Into<String>)`](crate::client::fluent_builders::BatchUpdateSchedule::channel_id) / [`set_channel_id(Option<String>)`](crate::client::fluent_builders::BatchUpdateSchedule::set_channel_id): Id of the channel whose schedule is being updated.
/// - [`creates(BatchScheduleActionCreateRequest)`](crate::client::fluent_builders::BatchUpdateSchedule::creates) / [`set_creates(Option<BatchScheduleActionCreateRequest>)`](crate::client::fluent_builders::BatchUpdateSchedule::set_creates): Schedule actions to create in the schedule.
/// - [`deletes(BatchScheduleActionDeleteRequest)`](crate::client::fluent_builders::BatchUpdateSchedule::deletes) / [`set_deletes(Option<BatchScheduleActionDeleteRequest>)`](crate::client::fluent_builders::BatchUpdateSchedule::set_deletes): Schedule actions to delete from the schedule.
/// - On success, responds with [`BatchUpdateScheduleOutput`](crate::output::BatchUpdateScheduleOutput) with field(s):
/// - [`creates(Option<BatchScheduleActionCreateResult>)`](crate::output::BatchUpdateScheduleOutput::creates): Schedule actions created in the schedule.
/// - [`deletes(Option<BatchScheduleActionDeleteResult>)`](crate::output::BatchUpdateScheduleOutput::deletes): Schedule actions deleted from the schedule.
/// - On failure, responds with [`SdkError<BatchUpdateScheduleError>`](crate::error::BatchUpdateScheduleError)
pub fn batch_update_schedule(&self) -> fluent_builders::BatchUpdateSchedule<C, M, R> {
fluent_builders::BatchUpdateSchedule::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`CancelInputDeviceTransfer`](crate::client::fluent_builders::CancelInputDeviceTransfer) operation.
///
/// - The fluent builder is configurable:
/// - [`input_device_id(impl Into<String>)`](crate::client::fluent_builders::CancelInputDeviceTransfer::input_device_id) / [`set_input_device_id(Option<String>)`](crate::client::fluent_builders::CancelInputDeviceTransfer::set_input_device_id): The unique ID of the input device to cancel. For example, hd-123456789abcdef.
/// - On success, responds with [`CancelInputDeviceTransferOutput`](crate::output::CancelInputDeviceTransferOutput)
/// - On failure, responds with [`SdkError<CancelInputDeviceTransferError>`](crate::error::CancelInputDeviceTransferError)
pub fn cancel_input_device_transfer(
&self,
) -> fluent_builders::CancelInputDeviceTransfer<C, M, R> {
fluent_builders::CancelInputDeviceTransfer::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`ClaimDevice`](crate::client::fluent_builders::ClaimDevice) operation.
///
/// - The fluent builder is configurable:
/// - [`id(impl Into<String>)`](crate::client::fluent_builders::ClaimDevice::id) / [`set_id(Option<String>)`](crate::client::fluent_builders::ClaimDevice::set_id): The id of the device you want to claim.
/// - On success, responds with [`ClaimDeviceOutput`](crate::output::ClaimDeviceOutput)
/// - On failure, responds with [`SdkError<ClaimDeviceError>`](crate::error::ClaimDeviceError)
pub fn claim_device(&self) -> fluent_builders::ClaimDevice<C, M, R> {
fluent_builders::ClaimDevice::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`CreateChannel`](crate::client::fluent_builders::CreateChannel) operation.
///
/// - The fluent builder is configurable:
/// - [`cdi_input_specification(CdiInputSpecification)`](crate::client::fluent_builders::CreateChannel::cdi_input_specification) / [`set_cdi_input_specification(Option<CdiInputSpecification>)`](crate::client::fluent_builders::CreateChannel::set_cdi_input_specification): Specification of CDI inputs for this channel
/// - [`channel_class(ChannelClass)`](crate::client::fluent_builders::CreateChannel::channel_class) / [`set_channel_class(Option<ChannelClass>)`](crate::client::fluent_builders::CreateChannel::set_channel_class): The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel with one pipeline.
/// - [`destinations(Vec<OutputDestination>)`](crate::client::fluent_builders::CreateChannel::destinations) / [`set_destinations(Option<Vec<OutputDestination>>)`](crate::client::fluent_builders::CreateChannel::set_destinations): Placeholder documentation for __listOfOutputDestination
/// - [`encoder_settings(EncoderSettings)`](crate::client::fluent_builders::CreateChannel::encoder_settings) / [`set_encoder_settings(Option<EncoderSettings>)`](crate::client::fluent_builders::CreateChannel::set_encoder_settings): Encoder Settings
/// - [`input_attachments(Vec<InputAttachment>)`](crate::client::fluent_builders::CreateChannel::input_attachments) / [`set_input_attachments(Option<Vec<InputAttachment>>)`](crate::client::fluent_builders::CreateChannel::set_input_attachments): List of input attachments for channel.
/// - [`input_specification(InputSpecification)`](crate::client::fluent_builders::CreateChannel::input_specification) / [`set_input_specification(Option<InputSpecification>)`](crate::client::fluent_builders::CreateChannel::set_input_specification): Specification of network and file inputs for this channel
/// - [`log_level(LogLevel)`](crate::client::fluent_builders::CreateChannel::log_level) / [`set_log_level(Option<LogLevel>)`](crate::client::fluent_builders::CreateChannel::set_log_level): The log level to write to CloudWatch Logs.
/// - [`name(impl Into<String>)`](crate::client::fluent_builders::CreateChannel::name) / [`set_name(Option<String>)`](crate::client::fluent_builders::CreateChannel::set_name): Name of channel.
/// - [`request_id(impl Into<String>)`](crate::client::fluent_builders::CreateChannel::request_id) / [`set_request_id(Option<String>)`](crate::client::fluent_builders::CreateChannel::set_request_id): Unique request ID to be specified. This is needed to prevent retries from creating multiple resources.
/// - [`reserved(impl Into<String>)`](crate::client::fluent_builders::CreateChannel::reserved) / [`set_reserved(Option<String>)`](crate::client::fluent_builders::CreateChannel::set_reserved): Deprecated field that's only usable by whitelisted customers.
/// - [`role_arn(impl Into<String>)`](crate::client::fluent_builders::CreateChannel::role_arn) / [`set_role_arn(Option<String>)`](crate::client::fluent_builders::CreateChannel::set_role_arn): An optional Amazon Resource Name (ARN) of the role to assume when running the Channel.
/// - [`tags(HashMap<String, String>)`](crate::client::fluent_builders::CreateChannel::tags) / [`set_tags(Option<HashMap<String, String>>)`](crate::client::fluent_builders::CreateChannel::set_tags): A collection of key-value pairs.
/// - [`vpc(VpcOutputSettings)`](crate::client::fluent_builders::CreateChannel::vpc) / [`set_vpc(Option<VpcOutputSettings>)`](crate::client::fluent_builders::CreateChannel::set_vpc): Settings for the VPC outputs
/// - On success, responds with [`CreateChannelOutput`](crate::output::CreateChannelOutput) with field(s):
/// - [`channel(Option<Channel>)`](crate::output::CreateChannelOutput::channel): Placeholder documentation for Channel
/// - On failure, responds with [`SdkError<CreateChannelError>`](crate::error::CreateChannelError)
pub fn create_channel(&self) -> fluent_builders::CreateChannel<C, M, R> {
fluent_builders::CreateChannel::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`CreateInput`](crate::client::fluent_builders::CreateInput) operation.
///
/// - The fluent builder is configurable:
/// - [`destinations(Vec<InputDestinationRequest>)`](crate::client::fluent_builders::CreateInput::destinations) / [`set_destinations(Option<Vec<InputDestinationRequest>>)`](crate::client::fluent_builders::CreateInput::set_destinations): Destination settings for PUSH type inputs.
/// - [`input_devices(Vec<InputDeviceSettings>)`](crate::client::fluent_builders::CreateInput::input_devices) / [`set_input_devices(Option<Vec<InputDeviceSettings>>)`](crate::client::fluent_builders::CreateInput::set_input_devices): Settings for the devices.
/// - [`input_security_groups(Vec<String>)`](crate::client::fluent_builders::CreateInput::input_security_groups) / [`set_input_security_groups(Option<Vec<String>>)`](crate::client::fluent_builders::CreateInput::set_input_security_groups): A list of security groups referenced by IDs to attach to the input.
/// - [`media_connect_flows(Vec<MediaConnectFlowRequest>)`](crate::client::fluent_builders::CreateInput::media_connect_flows) / [`set_media_connect_flows(Option<Vec<MediaConnectFlowRequest>>)`](crate::client::fluent_builders::CreateInput::set_media_connect_flows): A list of the MediaConnect Flows that you want to use in this input. You can specify as few as one Flow and presently, as many as two. The only requirement is when you have more than one is that each Flow is in a separate Availability Zone as this ensures your EML input is redundant to AZ issues.
/// - [`name(impl Into<String>)`](crate::client::fluent_builders::CreateInput::name) / [`set_name(Option<String>)`](crate::client::fluent_builders::CreateInput::set_name): Name of the input.
/// - [`request_id(impl Into<String>)`](crate::client::fluent_builders::CreateInput::request_id) / [`set_request_id(Option<String>)`](crate::client::fluent_builders::CreateInput::set_request_id): Unique identifier of the request to ensure the request is handled exactly once in case of retries.
/// - [`role_arn(impl Into<String>)`](crate::client::fluent_builders::CreateInput::role_arn) / [`set_role_arn(Option<String>)`](crate::client::fluent_builders::CreateInput::set_role_arn): The Amazon Resource Name (ARN) of the role this input assumes during and after creation.
/// - [`sources(Vec<InputSourceRequest>)`](crate::client::fluent_builders::CreateInput::sources) / [`set_sources(Option<Vec<InputSourceRequest>>)`](crate::client::fluent_builders::CreateInput::set_sources): The source URLs for a PULL-type input. Every PULL type input needs exactly two source URLs for redundancy. Only specify sources for PULL type Inputs. Leave Destinations empty.
/// - [`tags(HashMap<String, String>)`](crate::client::fluent_builders::CreateInput::tags) / [`set_tags(Option<HashMap<String, String>>)`](crate::client::fluent_builders::CreateInput::set_tags): A collection of key-value pairs.
/// - [`r#type(InputType)`](crate::client::fluent_builders::CreateInput::r#type) / [`set_type(Option<InputType>)`](crate::client::fluent_builders::CreateInput::set_type): The different types of inputs that AWS Elemental MediaLive supports.
/// - [`vpc(InputVpcRequest)`](crate::client::fluent_builders::CreateInput::vpc) / [`set_vpc(Option<InputVpcRequest>)`](crate::client::fluent_builders::CreateInput::set_vpc): Settings for a private VPC Input. When this property is specified, the input destination addresses will be created in a VPC rather than with public Internet addresses. This property requires setting the roleArn property on Input creation. Not compatible with the inputSecurityGroups property.
/// - On success, responds with [`CreateInputOutput`](crate::output::CreateInputOutput) with field(s):
/// - [`input(Option<Input>)`](crate::output::CreateInputOutput::input): Placeholder documentation for Input
/// - On failure, responds with [`SdkError<CreateInputError>`](crate::error::CreateInputError)
pub fn create_input(&self) -> fluent_builders::CreateInput<C, M, R> {
fluent_builders::CreateInput::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`CreateInputSecurityGroup`](crate::client::fluent_builders::CreateInputSecurityGroup) operation.
///
/// - The fluent builder is configurable:
/// - [`tags(HashMap<String, String>)`](crate::client::fluent_builders::CreateInputSecurityGroup::tags) / [`set_tags(Option<HashMap<String, String>>)`](crate::client::fluent_builders::CreateInputSecurityGroup::set_tags): A collection of key-value pairs.
/// - [`whitelist_rules(Vec<InputWhitelistRuleCidr>)`](crate::client::fluent_builders::CreateInputSecurityGroup::whitelist_rules) / [`set_whitelist_rules(Option<Vec<InputWhitelistRuleCidr>>)`](crate::client::fluent_builders::CreateInputSecurityGroup::set_whitelist_rules): List of IPv4 CIDR addresses to whitelist
/// - On success, responds with [`CreateInputSecurityGroupOutput`](crate::output::CreateInputSecurityGroupOutput) with field(s):
/// - [`security_group(Option<InputSecurityGroup>)`](crate::output::CreateInputSecurityGroupOutput::security_group): An Input Security Group
/// - On failure, responds with [`SdkError<CreateInputSecurityGroupError>`](crate::error::CreateInputSecurityGroupError)
pub fn create_input_security_group(
&self,
) -> fluent_builders::CreateInputSecurityGroup<C, M, R> {
fluent_builders::CreateInputSecurityGroup::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`CreateMultiplex`](crate::client::fluent_builders::CreateMultiplex) operation.
///
/// - The fluent builder is configurable:
/// - [`availability_zones(Vec<String>)`](crate::client::fluent_builders::CreateMultiplex::availability_zones) / [`set_availability_zones(Option<Vec<String>>)`](crate::client::fluent_builders::CreateMultiplex::set_availability_zones): A list of availability zones for the multiplex. You must specify exactly two.
/// - [`multiplex_settings(MultiplexSettings)`](crate::client::fluent_builders::CreateMultiplex::multiplex_settings) / [`set_multiplex_settings(Option<MultiplexSettings>)`](crate::client::fluent_builders::CreateMultiplex::set_multiplex_settings): Configuration for a multiplex event.
/// - [`name(impl Into<String>)`](crate::client::fluent_builders::CreateMultiplex::name) / [`set_name(Option<String>)`](crate::client::fluent_builders::CreateMultiplex::set_name): Name of multiplex.
/// - [`request_id(impl Into<String>)`](crate::client::fluent_builders::CreateMultiplex::request_id) / [`set_request_id(Option<String>)`](crate::client::fluent_builders::CreateMultiplex::set_request_id): Unique request ID. This prevents retries from creating multiple resources.
/// - [`tags(HashMap<String, String>)`](crate::client::fluent_builders::CreateMultiplex::tags) / [`set_tags(Option<HashMap<String, String>>)`](crate::client::fluent_builders::CreateMultiplex::set_tags): A collection of key-value pairs.
/// - On success, responds with [`CreateMultiplexOutput`](crate::output::CreateMultiplexOutput) with field(s):
/// - [`multiplex(Option<Multiplex>)`](crate::output::CreateMultiplexOutput::multiplex): The newly created multiplex.
/// - On failure, responds with [`SdkError<CreateMultiplexError>`](crate::error::CreateMultiplexError)
pub fn create_multiplex(&self) -> fluent_builders::CreateMultiplex<C, M, R> {
fluent_builders::CreateMultiplex::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`CreateMultiplexProgram`](crate::client::fluent_builders::CreateMultiplexProgram) operation.
///
/// - The fluent builder is configurable:
/// - [`multiplex_id(impl Into<String>)`](crate::client::fluent_builders::CreateMultiplexProgram::multiplex_id) / [`set_multiplex_id(Option<String>)`](crate::client::fluent_builders::CreateMultiplexProgram::set_multiplex_id): ID of the multiplex where the program is to be created.
/// - [`multiplex_program_settings(MultiplexProgramSettings)`](crate::client::fluent_builders::CreateMultiplexProgram::multiplex_program_settings) / [`set_multiplex_program_settings(Option<MultiplexProgramSettings>)`](crate::client::fluent_builders::CreateMultiplexProgram::set_multiplex_program_settings): The settings for this multiplex program.
/// - [`program_name(impl Into<String>)`](crate::client::fluent_builders::CreateMultiplexProgram::program_name) / [`set_program_name(Option<String>)`](crate::client::fluent_builders::CreateMultiplexProgram::set_program_name): Name of multiplex program.
/// - [`request_id(impl Into<String>)`](crate::client::fluent_builders::CreateMultiplexProgram::request_id) / [`set_request_id(Option<String>)`](crate::client::fluent_builders::CreateMultiplexProgram::set_request_id): Unique request ID. This prevents retries from creating multiple resources.
/// - On success, responds with [`CreateMultiplexProgramOutput`](crate::output::CreateMultiplexProgramOutput) with field(s):
/// - [`multiplex_program(Option<MultiplexProgram>)`](crate::output::CreateMultiplexProgramOutput::multiplex_program): The newly created multiplex program.
/// - On failure, responds with [`SdkError<CreateMultiplexProgramError>`](crate::error::CreateMultiplexProgramError)
pub fn create_multiplex_program(&self) -> fluent_builders::CreateMultiplexProgram<C, M, R> {
fluent_builders::CreateMultiplexProgram::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`CreatePartnerInput`](crate::client::fluent_builders::CreatePartnerInput) operation.
///
/// - The fluent builder is configurable:
/// - [`input_id(impl Into<String>)`](crate::client::fluent_builders::CreatePartnerInput::input_id) / [`set_input_id(Option<String>)`](crate::client::fluent_builders::CreatePartnerInput::set_input_id): Unique ID of the input.
/// - [`request_id(impl Into<String>)`](crate::client::fluent_builders::CreatePartnerInput::request_id) / [`set_request_id(Option<String>)`](crate::client::fluent_builders::CreatePartnerInput::set_request_id): Unique identifier of the request to ensure the request is handled exactly once in case of retries.
/// - [`tags(HashMap<String, String>)`](crate::client::fluent_builders::CreatePartnerInput::tags) / [`set_tags(Option<HashMap<String, String>>)`](crate::client::fluent_builders::CreatePartnerInput::set_tags): A collection of key-value pairs.
/// - On success, responds with [`CreatePartnerInputOutput`](crate::output::CreatePartnerInputOutput) with field(s):
/// - [`input(Option<Input>)`](crate::output::CreatePartnerInputOutput::input): Placeholder documentation for Input
/// - On failure, responds with [`SdkError<CreatePartnerInputError>`](crate::error::CreatePartnerInputError)
pub fn create_partner_input(&self) -> fluent_builders::CreatePartnerInput<C, M, R> {
fluent_builders::CreatePartnerInput::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`CreateTags`](crate::client::fluent_builders::CreateTags) operation.
///
/// - The fluent builder is configurable:
/// - [`resource_arn(impl Into<String>)`](crate::client::fluent_builders::CreateTags::resource_arn) / [`set_resource_arn(Option<String>)`](crate::client::fluent_builders::CreateTags::set_resource_arn): Placeholder documentation for __string
/// - [`tags(HashMap<String, String>)`](crate::client::fluent_builders::CreateTags::tags) / [`set_tags(Option<HashMap<String, String>>)`](crate::client::fluent_builders::CreateTags::set_tags): Placeholder documentation for Tags
/// - On success, responds with [`CreateTagsOutput`](crate::output::CreateTagsOutput)
/// - On failure, responds with [`SdkError<CreateTagsError>`](crate::error::CreateTagsError)
pub fn create_tags(&self) -> fluent_builders::CreateTags<C, M, R> {
fluent_builders::CreateTags::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DeleteChannel`](crate::client::fluent_builders::DeleteChannel) operation.
///
/// - The fluent builder is configurable:
/// - [`channel_id(impl Into<String>)`](crate::client::fluent_builders::DeleteChannel::channel_id) / [`set_channel_id(Option<String>)`](crate::client::fluent_builders::DeleteChannel::set_channel_id): Unique ID of the channel.
/// - On success, responds with [`DeleteChannelOutput`](crate::output::DeleteChannelOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::DeleteChannelOutput::arn): The unique arn of the channel.
/// - [`cdi_input_specification(Option<CdiInputSpecification>)`](crate::output::DeleteChannelOutput::cdi_input_specification): Specification of CDI inputs for this channel
/// - [`channel_class(Option<ChannelClass>)`](crate::output::DeleteChannelOutput::channel_class): The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel with one pipeline.
/// - [`destinations(Option<Vec<OutputDestination>>)`](crate::output::DeleteChannelOutput::destinations): A list of destinations of the channel. For UDP outputs, there is one destination per output. For other types (HLS, for example), there is one destination per packager.
/// - [`egress_endpoints(Option<Vec<ChannelEgressEndpoint>>)`](crate::output::DeleteChannelOutput::egress_endpoints): The endpoints where outgoing connections initiate from
/// - [`encoder_settings(Option<EncoderSettings>)`](crate::output::DeleteChannelOutput::encoder_settings): Encoder Settings
/// - [`id(Option<String>)`](crate::output::DeleteChannelOutput::id): The unique id of the channel.
/// - [`input_attachments(Option<Vec<InputAttachment>>)`](crate::output::DeleteChannelOutput::input_attachments): List of input attachments for channel.
/// - [`input_specification(Option<InputSpecification>)`](crate::output::DeleteChannelOutput::input_specification): Specification of network and file inputs for this channel
/// - [`log_level(Option<LogLevel>)`](crate::output::DeleteChannelOutput::log_level): The log level being written to CloudWatch Logs.
/// - [`name(Option<String>)`](crate::output::DeleteChannelOutput::name): The name of the channel. (user-mutable)
/// - [`pipeline_details(Option<Vec<PipelineDetail>>)`](crate::output::DeleteChannelOutput::pipeline_details): Runtime details for the pipelines of a running channel.
/// - [`pipelines_running_count(i32)`](crate::output::DeleteChannelOutput::pipelines_running_count): The number of currently healthy pipelines.
/// - [`role_arn(Option<String>)`](crate::output::DeleteChannelOutput::role_arn): The Amazon Resource Name (ARN) of the role assumed when running the Channel.
/// - [`state(Option<ChannelState>)`](crate::output::DeleteChannelOutput::state): Placeholder documentation for ChannelState
/// - [`tags(Option<HashMap<String, String>>)`](crate::output::DeleteChannelOutput::tags): A collection of key-value pairs.
/// - [`vpc(Option<VpcOutputSettingsDescription>)`](crate::output::DeleteChannelOutput::vpc): Settings for VPC output
/// - On failure, responds with [`SdkError<DeleteChannelError>`](crate::error::DeleteChannelError)
pub fn delete_channel(&self) -> fluent_builders::DeleteChannel<C, M, R> {
fluent_builders::DeleteChannel::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DeleteInput`](crate::client::fluent_builders::DeleteInput) operation.
///
/// - The fluent builder is configurable:
/// - [`input_id(impl Into<String>)`](crate::client::fluent_builders::DeleteInput::input_id) / [`set_input_id(Option<String>)`](crate::client::fluent_builders::DeleteInput::set_input_id): Unique ID of the input
/// - On success, responds with [`DeleteInputOutput`](crate::output::DeleteInputOutput)
/// - On failure, responds with [`SdkError<DeleteInputError>`](crate::error::DeleteInputError)
pub fn delete_input(&self) -> fluent_builders::DeleteInput<C, M, R> {
fluent_builders::DeleteInput::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DeleteInputSecurityGroup`](crate::client::fluent_builders::DeleteInputSecurityGroup) operation.
///
/// - The fluent builder is configurable:
/// - [`input_security_group_id(impl Into<String>)`](crate::client::fluent_builders::DeleteInputSecurityGroup::input_security_group_id) / [`set_input_security_group_id(Option<String>)`](crate::client::fluent_builders::DeleteInputSecurityGroup::set_input_security_group_id): The Input Security Group to delete
/// - On success, responds with [`DeleteInputSecurityGroupOutput`](crate::output::DeleteInputSecurityGroupOutput)
/// - On failure, responds with [`SdkError<DeleteInputSecurityGroupError>`](crate::error::DeleteInputSecurityGroupError)
pub fn delete_input_security_group(
&self,
) -> fluent_builders::DeleteInputSecurityGroup<C, M, R> {
fluent_builders::DeleteInputSecurityGroup::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DeleteMultiplex`](crate::client::fluent_builders::DeleteMultiplex) operation.
///
/// - The fluent builder is configurable:
/// - [`multiplex_id(impl Into<String>)`](crate::client::fluent_builders::DeleteMultiplex::multiplex_id) / [`set_multiplex_id(Option<String>)`](crate::client::fluent_builders::DeleteMultiplex::set_multiplex_id): The ID of the multiplex.
/// - On success, responds with [`DeleteMultiplexOutput`](crate::output::DeleteMultiplexOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::DeleteMultiplexOutput::arn): The unique arn of the multiplex.
/// - [`availability_zones(Option<Vec<String>>)`](crate::output::DeleteMultiplexOutput::availability_zones): A list of availability zones for the multiplex.
/// - [`destinations(Option<Vec<MultiplexOutputDestination>>)`](crate::output::DeleteMultiplexOutput::destinations): A list of the multiplex output destinations.
/// - [`id(Option<String>)`](crate::output::DeleteMultiplexOutput::id): The unique id of the multiplex.
/// - [`multiplex_settings(Option<MultiplexSettings>)`](crate::output::DeleteMultiplexOutput::multiplex_settings): Configuration for a multiplex event.
/// - [`name(Option<String>)`](crate::output::DeleteMultiplexOutput::name): The name of the multiplex.
/// - [`pipelines_running_count(i32)`](crate::output::DeleteMultiplexOutput::pipelines_running_count): The number of currently healthy pipelines.
/// - [`program_count(i32)`](crate::output::DeleteMultiplexOutput::program_count): The number of programs in the multiplex.
/// - [`state(Option<MultiplexState>)`](crate::output::DeleteMultiplexOutput::state): The current state of the multiplex.
/// - [`tags(Option<HashMap<String, String>>)`](crate::output::DeleteMultiplexOutput::tags): A collection of key-value pairs.
/// - On failure, responds with [`SdkError<DeleteMultiplexError>`](crate::error::DeleteMultiplexError)
pub fn delete_multiplex(&self) -> fluent_builders::DeleteMultiplex<C, M, R> {
fluent_builders::DeleteMultiplex::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DeleteMultiplexProgram`](crate::client::fluent_builders::DeleteMultiplexProgram) operation.
///
/// - The fluent builder is configurable:
/// - [`multiplex_id(impl Into<String>)`](crate::client::fluent_builders::DeleteMultiplexProgram::multiplex_id) / [`set_multiplex_id(Option<String>)`](crate::client::fluent_builders::DeleteMultiplexProgram::set_multiplex_id): The ID of the multiplex that the program belongs to.
/// - [`program_name(impl Into<String>)`](crate::client::fluent_builders::DeleteMultiplexProgram::program_name) / [`set_program_name(Option<String>)`](crate::client::fluent_builders::DeleteMultiplexProgram::set_program_name): The multiplex program name.
/// - On success, responds with [`DeleteMultiplexProgramOutput`](crate::output::DeleteMultiplexProgramOutput) with field(s):
/// - [`channel_id(Option<String>)`](crate::output::DeleteMultiplexProgramOutput::channel_id): The MediaLive channel associated with the program.
/// - [`multiplex_program_settings(Option<MultiplexProgramSettings>)`](crate::output::DeleteMultiplexProgramOutput::multiplex_program_settings): The settings for this multiplex program.
/// - [`packet_identifiers_map(Option<MultiplexProgramPacketIdentifiersMap>)`](crate::output::DeleteMultiplexProgramOutput::packet_identifiers_map): The packet identifier map for this multiplex program.
/// - [`pipeline_details(Option<Vec<MultiplexProgramPipelineDetail>>)`](crate::output::DeleteMultiplexProgramOutput::pipeline_details): Contains information about the current sources for the specified program in the specified multiplex. Keep in mind that each multiplex pipeline connects to both pipelines in a given source channel (the channel identified by the program). But only one of those channel pipelines is ever active at one time.
/// - [`program_name(Option<String>)`](crate::output::DeleteMultiplexProgramOutput::program_name): The name of the multiplex program.
/// - On failure, responds with [`SdkError<DeleteMultiplexProgramError>`](crate::error::DeleteMultiplexProgramError)
pub fn delete_multiplex_program(&self) -> fluent_builders::DeleteMultiplexProgram<C, M, R> {
fluent_builders::DeleteMultiplexProgram::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DeleteReservation`](crate::client::fluent_builders::DeleteReservation) operation.
///
/// - The fluent builder is configurable:
/// - [`reservation_id(impl Into<String>)`](crate::client::fluent_builders::DeleteReservation::reservation_id) / [`set_reservation_id(Option<String>)`](crate::client::fluent_builders::DeleteReservation::set_reservation_id): Unique reservation ID, e.g. '1234567'
/// - On success, responds with [`DeleteReservationOutput`](crate::output::DeleteReservationOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::DeleteReservationOutput::arn): Unique reservation ARN, e.g. 'arn:aws:medialive:us-west-2:123456789012:reservation:1234567'
/// - [`count(i32)`](crate::output::DeleteReservationOutput::count): Number of reserved resources
/// - [`currency_code(Option<String>)`](crate::output::DeleteReservationOutput::currency_code): Currency code for usagePrice and fixedPrice in ISO-4217 format, e.g. 'USD'
/// - [`duration(i32)`](crate::output::DeleteReservationOutput::duration): Lease duration, e.g. '12'
/// - [`duration_units(Option<OfferingDurationUnits>)`](crate::output::DeleteReservationOutput::duration_units): Units for duration, e.g. 'MONTHS'
/// - [`end(Option<String>)`](crate::output::DeleteReservationOutput::end): Reservation UTC end date and time in ISO-8601 format, e.g. '2019-03-01T00:00:00'
/// - [`fixed_price(f64)`](crate::output::DeleteReservationOutput::fixed_price): One-time charge for each reserved resource, e.g. '0.0' for a NO_UPFRONT offering
/// - [`name(Option<String>)`](crate::output::DeleteReservationOutput::name): User specified reservation name
/// - [`offering_description(Option<String>)`](crate::output::DeleteReservationOutput::offering_description): Offering description, e.g. 'HD AVC output at 10-20 Mbps, 30 fps, and standard VQ in US West (Oregon)'
/// - [`offering_id(Option<String>)`](crate::output::DeleteReservationOutput::offering_id): Unique offering ID, e.g. '87654321'
/// - [`offering_type(Option<OfferingType>)`](crate::output::DeleteReservationOutput::offering_type): Offering type, e.g. 'NO_UPFRONT'
/// - [`region(Option<String>)`](crate::output::DeleteReservationOutput::region): AWS region, e.g. 'us-west-2'
/// - [`reservation_id(Option<String>)`](crate::output::DeleteReservationOutput::reservation_id): Unique reservation ID, e.g. '1234567'
/// - [`resource_specification(Option<ReservationResourceSpecification>)`](crate::output::DeleteReservationOutput::resource_specification): Resource configuration details
/// - [`start(Option<String>)`](crate::output::DeleteReservationOutput::start): Reservation UTC start date and time in ISO-8601 format, e.g. '2018-03-01T00:00:00'
/// - [`state(Option<ReservationState>)`](crate::output::DeleteReservationOutput::state): Current state of reservation, e.g. 'ACTIVE'
/// - [`tags(Option<HashMap<String, String>>)`](crate::output::DeleteReservationOutput::tags): A collection of key-value pairs
/// - [`usage_price(f64)`](crate::output::DeleteReservationOutput::usage_price): Recurring usage charge for each reserved resource, e.g. '157.0'
/// - On failure, responds with [`SdkError<DeleteReservationError>`](crate::error::DeleteReservationError)
pub fn delete_reservation(&self) -> fluent_builders::DeleteReservation<C, M, R> {
fluent_builders::DeleteReservation::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DeleteSchedule`](crate::client::fluent_builders::DeleteSchedule) operation.
///
/// - The fluent builder is configurable:
/// - [`channel_id(impl Into<String>)`](crate::client::fluent_builders::DeleteSchedule::channel_id) / [`set_channel_id(Option<String>)`](crate::client::fluent_builders::DeleteSchedule::set_channel_id): Id of the channel whose schedule is being deleted.
/// - On success, responds with [`DeleteScheduleOutput`](crate::output::DeleteScheduleOutput)
/// - On failure, responds with [`SdkError<DeleteScheduleError>`](crate::error::DeleteScheduleError)
pub fn delete_schedule(&self) -> fluent_builders::DeleteSchedule<C, M, R> {
fluent_builders::DeleteSchedule::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DeleteTags`](crate::client::fluent_builders::DeleteTags) operation.
///
/// - The fluent builder is configurable:
/// - [`resource_arn(impl Into<String>)`](crate::client::fluent_builders::DeleteTags::resource_arn) / [`set_resource_arn(Option<String>)`](crate::client::fluent_builders::DeleteTags::set_resource_arn): Placeholder documentation for __string
/// - [`tag_keys(Vec<String>)`](crate::client::fluent_builders::DeleteTags::tag_keys) / [`set_tag_keys(Option<Vec<String>>)`](crate::client::fluent_builders::DeleteTags::set_tag_keys): An array of tag keys to delete
/// - On success, responds with [`DeleteTagsOutput`](crate::output::DeleteTagsOutput)
/// - On failure, responds with [`SdkError<DeleteTagsError>`](crate::error::DeleteTagsError)
pub fn delete_tags(&self) -> fluent_builders::DeleteTags<C, M, R> {
fluent_builders::DeleteTags::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DescribeChannel`](crate::client::fluent_builders::DescribeChannel) operation.
///
/// - The fluent builder is configurable:
/// - [`channel_id(impl Into<String>)`](crate::client::fluent_builders::DescribeChannel::channel_id) / [`set_channel_id(Option<String>)`](crate::client::fluent_builders::DescribeChannel::set_channel_id): channel ID
/// - On success, responds with [`DescribeChannelOutput`](crate::output::DescribeChannelOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::DescribeChannelOutput::arn): The unique arn of the channel.
/// - [`cdi_input_specification(Option<CdiInputSpecification>)`](crate::output::DescribeChannelOutput::cdi_input_specification): Specification of CDI inputs for this channel
/// - [`channel_class(Option<ChannelClass>)`](crate::output::DescribeChannelOutput::channel_class): The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel with one pipeline.
/// - [`destinations(Option<Vec<OutputDestination>>)`](crate::output::DescribeChannelOutput::destinations): A list of destinations of the channel. For UDP outputs, there is one destination per output. For other types (HLS, for example), there is one destination per packager.
/// - [`egress_endpoints(Option<Vec<ChannelEgressEndpoint>>)`](crate::output::DescribeChannelOutput::egress_endpoints): The endpoints where outgoing connections initiate from
/// - [`encoder_settings(Option<EncoderSettings>)`](crate::output::DescribeChannelOutput::encoder_settings): Encoder Settings
/// - [`id(Option<String>)`](crate::output::DescribeChannelOutput::id): The unique id of the channel.
/// - [`input_attachments(Option<Vec<InputAttachment>>)`](crate::output::DescribeChannelOutput::input_attachments): List of input attachments for channel.
/// - [`input_specification(Option<InputSpecification>)`](crate::output::DescribeChannelOutput::input_specification): Specification of network and file inputs for this channel
/// - [`log_level(Option<LogLevel>)`](crate::output::DescribeChannelOutput::log_level): The log level being written to CloudWatch Logs.
/// - [`name(Option<String>)`](crate::output::DescribeChannelOutput::name): The name of the channel. (user-mutable)
/// - [`pipeline_details(Option<Vec<PipelineDetail>>)`](crate::output::DescribeChannelOutput::pipeline_details): Runtime details for the pipelines of a running channel.
/// - [`pipelines_running_count(i32)`](crate::output::DescribeChannelOutput::pipelines_running_count): The number of currently healthy pipelines.
/// - [`role_arn(Option<String>)`](crate::output::DescribeChannelOutput::role_arn): The Amazon Resource Name (ARN) of the role assumed when running the Channel.
/// - [`state(Option<ChannelState>)`](crate::output::DescribeChannelOutput::state): Placeholder documentation for ChannelState
/// - [`tags(Option<HashMap<String, String>>)`](crate::output::DescribeChannelOutput::tags): A collection of key-value pairs.
/// - [`vpc(Option<VpcOutputSettingsDescription>)`](crate::output::DescribeChannelOutput::vpc): Settings for VPC output
/// - On failure, responds with [`SdkError<DescribeChannelError>`](crate::error::DescribeChannelError)
pub fn describe_channel(&self) -> fluent_builders::DescribeChannel<C, M, R> {
fluent_builders::DescribeChannel::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DescribeInput`](crate::client::fluent_builders::DescribeInput) operation.
///
/// - The fluent builder is configurable:
/// - [`input_id(impl Into<String>)`](crate::client::fluent_builders::DescribeInput::input_id) / [`set_input_id(Option<String>)`](crate::client::fluent_builders::DescribeInput::set_input_id): Unique ID of the input
/// - On success, responds with [`DescribeInputOutput`](crate::output::DescribeInputOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::DescribeInputOutput::arn): The Unique ARN of the input (generated, immutable).
/// - [`attached_channels(Option<Vec<String>>)`](crate::output::DescribeInputOutput::attached_channels): A list of channel IDs that that input is attached to (currently an input can only be attached to one channel).
/// - [`destinations(Option<Vec<InputDestination>>)`](crate::output::DescribeInputOutput::destinations): A list of the destinations of the input (PUSH-type).
/// - [`id(Option<String>)`](crate::output::DescribeInputOutput::id): The generated ID of the input (unique for user account, immutable).
/// - [`input_class(Option<InputClass>)`](crate::output::DescribeInputOutput::input_class): STANDARD - MediaLive expects two sources to be connected to this input. If the channel is also STANDARD, both sources will be ingested. If the channel is SINGLE_PIPELINE, only the first source will be ingested; the second source will always be ignored, even if the first source fails. SINGLE_PIPELINE - You can connect only one source to this input. If the ChannelClass is also SINGLE_PIPELINE, this value is valid. If the ChannelClass is STANDARD, this value is not valid because the channel requires two sources in the input.
/// - [`input_devices(Option<Vec<InputDeviceSettings>>)`](crate::output::DescribeInputOutput::input_devices): Settings for the input devices.
/// - [`input_partner_ids(Option<Vec<String>>)`](crate::output::DescribeInputOutput::input_partner_ids): A list of IDs for all Inputs which are partners of this one.
/// - [`input_source_type(Option<InputSourceType>)`](crate::output::DescribeInputOutput::input_source_type): Certain pull input sources can be dynamic, meaning that they can have their URL's dynamically changes during input switch actions. Presently, this functionality only works with MP4_FILE and TS_FILE inputs.
/// - [`media_connect_flows(Option<Vec<MediaConnectFlow>>)`](crate::output::DescribeInputOutput::media_connect_flows): A list of MediaConnect Flows for this input.
/// - [`name(Option<String>)`](crate::output::DescribeInputOutput::name): The user-assigned name (This is a mutable value).
/// - [`role_arn(Option<String>)`](crate::output::DescribeInputOutput::role_arn): The Amazon Resource Name (ARN) of the role this input assumes during and after creation.
/// - [`security_groups(Option<Vec<String>>)`](crate::output::DescribeInputOutput::security_groups): A list of IDs for all the Input Security Groups attached to the input.
/// - [`sources(Option<Vec<InputSource>>)`](crate::output::DescribeInputOutput::sources): A list of the sources of the input (PULL-type).
/// - [`state(Option<InputState>)`](crate::output::DescribeInputOutput::state): Placeholder documentation for InputState
/// - [`tags(Option<HashMap<String, String>>)`](crate::output::DescribeInputOutput::tags): A collection of key-value pairs.
/// - [`r#type(Option<InputType>)`](crate::output::DescribeInputOutput::type): The different types of inputs that AWS Elemental MediaLive supports.
/// - On failure, responds with [`SdkError<DescribeInputError>`](crate::error::DescribeInputError)
pub fn describe_input(&self) -> fluent_builders::DescribeInput<C, M, R> {
fluent_builders::DescribeInput::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DescribeInputDevice`](crate::client::fluent_builders::DescribeInputDevice) operation.
///
/// - The fluent builder is configurable:
/// - [`input_device_id(impl Into<String>)`](crate::client::fluent_builders::DescribeInputDevice::input_device_id) / [`set_input_device_id(Option<String>)`](crate::client::fluent_builders::DescribeInputDevice::set_input_device_id): The unique ID of this input device. For example, hd-123456789abcdef.
/// - On success, responds with [`DescribeInputDeviceOutput`](crate::output::DescribeInputDeviceOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::DescribeInputDeviceOutput::arn): The unique ARN of the input device.
/// - [`connection_state(Option<InputDeviceConnectionState>)`](crate::output::DescribeInputDeviceOutput::connection_state): The state of the connection between the input device and AWS.
/// - [`device_settings_sync_state(Option<DeviceSettingsSyncState>)`](crate::output::DescribeInputDeviceOutput::device_settings_sync_state): The status of the action to synchronize the device configuration. If you change the configuration of the input device (for example, the maximum bitrate), MediaLive sends the new data to the device. The device might not update itself immediately. SYNCED means the device has updated its configuration. SYNCING means that it has not updated its configuration.
/// - [`device_update_status(Option<DeviceUpdateStatus>)`](crate::output::DescribeInputDeviceOutput::device_update_status): The status of software on the input device.
/// - [`hd_device_settings(Option<InputDeviceHdSettings>)`](crate::output::DescribeInputDeviceOutput::hd_device_settings): Settings that describe an input device that is type HD.
/// - [`id(Option<String>)`](crate::output::DescribeInputDeviceOutput::id): The unique ID of the input device.
/// - [`mac_address(Option<String>)`](crate::output::DescribeInputDeviceOutput::mac_address): The network MAC address of the input device.
/// - [`name(Option<String>)`](crate::output::DescribeInputDeviceOutput::name): A name that you specify for the input device.
/// - [`network_settings(Option<InputDeviceNetworkSettings>)`](crate::output::DescribeInputDeviceOutput::network_settings): The network settings for the input device.
/// - [`serial_number(Option<String>)`](crate::output::DescribeInputDeviceOutput::serial_number): The unique serial number of the input device.
/// - [`r#type(Option<InputDeviceType>)`](crate::output::DescribeInputDeviceOutput::type): The type of the input device.
/// - [`uhd_device_settings(Option<InputDeviceUhdSettings>)`](crate::output::DescribeInputDeviceOutput::uhd_device_settings): Settings that describe an input device that is type UHD.
/// - On failure, responds with [`SdkError<DescribeInputDeviceError>`](crate::error::DescribeInputDeviceError)
pub fn describe_input_device(&self) -> fluent_builders::DescribeInputDevice<C, M, R> {
fluent_builders::DescribeInputDevice::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DescribeInputDeviceThumbnail`](crate::client::fluent_builders::DescribeInputDeviceThumbnail) operation.
///
/// - The fluent builder is configurable:
/// - [`input_device_id(impl Into<String>)`](crate::client::fluent_builders::DescribeInputDeviceThumbnail::input_device_id) / [`set_input_device_id(Option<String>)`](crate::client::fluent_builders::DescribeInputDeviceThumbnail::set_input_device_id): The unique ID of this input device. For example, hd-123456789abcdef.
/// - [`accept(AcceptHeader)`](crate::client::fluent_builders::DescribeInputDeviceThumbnail::accept) / [`set_accept(Option<AcceptHeader>)`](crate::client::fluent_builders::DescribeInputDeviceThumbnail::set_accept): The HTTP Accept header. Indicates the requested type for the thumbnail.
/// - On success, responds with [`DescribeInputDeviceThumbnailOutput`](crate::output::DescribeInputDeviceThumbnailOutput) with field(s):
/// - [`body(byte_stream::ByteStream)`](crate::output::DescribeInputDeviceThumbnailOutput::body): The binary data for the thumbnail that the Link device has most recently sent to MediaLive.
/// - [`content_type(Option<ContentType>)`](crate::output::DescribeInputDeviceThumbnailOutput::content_type): Specifies the media type of the thumbnail.
/// - [`content_length(i64)`](crate::output::DescribeInputDeviceThumbnailOutput::content_length): The length of the content.
/// - [`e_tag(Option<String>)`](crate::output::DescribeInputDeviceThumbnailOutput::e_tag): The unique, cacheable version of this thumbnail.
/// - [`last_modified(Option<DateTime>)`](crate::output::DescribeInputDeviceThumbnailOutput::last_modified): The date and time the thumbnail was last updated at the device.
/// - On failure, responds with [`SdkError<DescribeInputDeviceThumbnailError>`](crate::error::DescribeInputDeviceThumbnailError)
pub fn describe_input_device_thumbnail(
&self,
) -> fluent_builders::DescribeInputDeviceThumbnail<C, M, R> {
fluent_builders::DescribeInputDeviceThumbnail::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DescribeInputSecurityGroup`](crate::client::fluent_builders::DescribeInputSecurityGroup) operation.
///
/// - The fluent builder is configurable:
/// - [`input_security_group_id(impl Into<String>)`](crate::client::fluent_builders::DescribeInputSecurityGroup::input_security_group_id) / [`set_input_security_group_id(Option<String>)`](crate::client::fluent_builders::DescribeInputSecurityGroup::set_input_security_group_id): The id of the Input Security Group to describe
/// - On success, responds with [`DescribeInputSecurityGroupOutput`](crate::output::DescribeInputSecurityGroupOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::DescribeInputSecurityGroupOutput::arn): Unique ARN of Input Security Group
/// - [`id(Option<String>)`](crate::output::DescribeInputSecurityGroupOutput::id): The Id of the Input Security Group
/// - [`inputs(Option<Vec<String>>)`](crate::output::DescribeInputSecurityGroupOutput::inputs): The list of inputs currently using this Input Security Group.
/// - [`state(Option<InputSecurityGroupState>)`](crate::output::DescribeInputSecurityGroupOutput::state): The current state of the Input Security Group.
/// - [`tags(Option<HashMap<String, String>>)`](crate::output::DescribeInputSecurityGroupOutput::tags): A collection of key-value pairs.
/// - [`whitelist_rules(Option<Vec<InputWhitelistRule>>)`](crate::output::DescribeInputSecurityGroupOutput::whitelist_rules): Whitelist rules and their sync status
/// - On failure, responds with [`SdkError<DescribeInputSecurityGroupError>`](crate::error::DescribeInputSecurityGroupError)
pub fn describe_input_security_group(
&self,
) -> fluent_builders::DescribeInputSecurityGroup<C, M, R> {
fluent_builders::DescribeInputSecurityGroup::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DescribeMultiplex`](crate::client::fluent_builders::DescribeMultiplex) operation.
///
/// - The fluent builder is configurable:
/// - [`multiplex_id(impl Into<String>)`](crate::client::fluent_builders::DescribeMultiplex::multiplex_id) / [`set_multiplex_id(Option<String>)`](crate::client::fluent_builders::DescribeMultiplex::set_multiplex_id): The ID of the multiplex.
/// - On success, responds with [`DescribeMultiplexOutput`](crate::output::DescribeMultiplexOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::DescribeMultiplexOutput::arn): The unique arn of the multiplex.
/// - [`availability_zones(Option<Vec<String>>)`](crate::output::DescribeMultiplexOutput::availability_zones): A list of availability zones for the multiplex.
/// - [`destinations(Option<Vec<MultiplexOutputDestination>>)`](crate::output::DescribeMultiplexOutput::destinations): A list of the multiplex output destinations.
/// - [`id(Option<String>)`](crate::output::DescribeMultiplexOutput::id): The unique id of the multiplex.
/// - [`multiplex_settings(Option<MultiplexSettings>)`](crate::output::DescribeMultiplexOutput::multiplex_settings): Configuration for a multiplex event.
/// - [`name(Option<String>)`](crate::output::DescribeMultiplexOutput::name): The name of the multiplex.
/// - [`pipelines_running_count(i32)`](crate::output::DescribeMultiplexOutput::pipelines_running_count): The number of currently healthy pipelines.
/// - [`program_count(i32)`](crate::output::DescribeMultiplexOutput::program_count): The number of programs in the multiplex.
/// - [`state(Option<MultiplexState>)`](crate::output::DescribeMultiplexOutput::state): The current state of the multiplex.
/// - [`tags(Option<HashMap<String, String>>)`](crate::output::DescribeMultiplexOutput::tags): A collection of key-value pairs.
/// - On failure, responds with [`SdkError<DescribeMultiplexError>`](crate::error::DescribeMultiplexError)
pub fn describe_multiplex(&self) -> fluent_builders::DescribeMultiplex<C, M, R> {
fluent_builders::DescribeMultiplex::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DescribeMultiplexProgram`](crate::client::fluent_builders::DescribeMultiplexProgram) operation.
///
/// - The fluent builder is configurable:
/// - [`multiplex_id(impl Into<String>)`](crate::client::fluent_builders::DescribeMultiplexProgram::multiplex_id) / [`set_multiplex_id(Option<String>)`](crate::client::fluent_builders::DescribeMultiplexProgram::set_multiplex_id): The ID of the multiplex that the program belongs to.
/// - [`program_name(impl Into<String>)`](crate::client::fluent_builders::DescribeMultiplexProgram::program_name) / [`set_program_name(Option<String>)`](crate::client::fluent_builders::DescribeMultiplexProgram::set_program_name): The name of the program.
/// - On success, responds with [`DescribeMultiplexProgramOutput`](crate::output::DescribeMultiplexProgramOutput) with field(s):
/// - [`channel_id(Option<String>)`](crate::output::DescribeMultiplexProgramOutput::channel_id): The MediaLive channel associated with the program.
/// - [`multiplex_program_settings(Option<MultiplexProgramSettings>)`](crate::output::DescribeMultiplexProgramOutput::multiplex_program_settings): The settings for this multiplex program.
/// - [`packet_identifiers_map(Option<MultiplexProgramPacketIdentifiersMap>)`](crate::output::DescribeMultiplexProgramOutput::packet_identifiers_map): The packet identifier map for this multiplex program.
/// - [`pipeline_details(Option<Vec<MultiplexProgramPipelineDetail>>)`](crate::output::DescribeMultiplexProgramOutput::pipeline_details): Contains information about the current sources for the specified program in the specified multiplex. Keep in mind that each multiplex pipeline connects to both pipelines in a given source channel (the channel identified by the program). But only one of those channel pipelines is ever active at one time.
/// - [`program_name(Option<String>)`](crate::output::DescribeMultiplexProgramOutput::program_name): The name of the multiplex program.
/// - On failure, responds with [`SdkError<DescribeMultiplexProgramError>`](crate::error::DescribeMultiplexProgramError)
pub fn describe_multiplex_program(&self) -> fluent_builders::DescribeMultiplexProgram<C, M, R> {
fluent_builders::DescribeMultiplexProgram::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DescribeOffering`](crate::client::fluent_builders::DescribeOffering) operation.
///
/// - The fluent builder is configurable:
/// - [`offering_id(impl Into<String>)`](crate::client::fluent_builders::DescribeOffering::offering_id) / [`set_offering_id(Option<String>)`](crate::client::fluent_builders::DescribeOffering::set_offering_id): Unique offering ID, e.g. '87654321'
/// - On success, responds with [`DescribeOfferingOutput`](crate::output::DescribeOfferingOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::DescribeOfferingOutput::arn): Unique offering ARN, e.g. 'arn:aws:medialive:us-west-2:123456789012:offering:87654321'
/// - [`currency_code(Option<String>)`](crate::output::DescribeOfferingOutput::currency_code): Currency code for usagePrice and fixedPrice in ISO-4217 format, e.g. 'USD'
/// - [`duration(i32)`](crate::output::DescribeOfferingOutput::duration): Lease duration, e.g. '12'
/// - [`duration_units(Option<OfferingDurationUnits>)`](crate::output::DescribeOfferingOutput::duration_units): Units for duration, e.g. 'MONTHS'
/// - [`fixed_price(f64)`](crate::output::DescribeOfferingOutput::fixed_price): One-time charge for each reserved resource, e.g. '0.0' for a NO_UPFRONT offering
/// - [`offering_description(Option<String>)`](crate::output::DescribeOfferingOutput::offering_description): Offering description, e.g. 'HD AVC output at 10-20 Mbps, 30 fps, and standard VQ in US West (Oregon)'
/// - [`offering_id(Option<String>)`](crate::output::DescribeOfferingOutput::offering_id): Unique offering ID, e.g. '87654321'
/// - [`offering_type(Option<OfferingType>)`](crate::output::DescribeOfferingOutput::offering_type): Offering type, e.g. 'NO_UPFRONT'
/// - [`region(Option<String>)`](crate::output::DescribeOfferingOutput::region): AWS region, e.g. 'us-west-2'
/// - [`resource_specification(Option<ReservationResourceSpecification>)`](crate::output::DescribeOfferingOutput::resource_specification): Resource configuration details
/// - [`usage_price(f64)`](crate::output::DescribeOfferingOutput::usage_price): Recurring usage charge for each reserved resource, e.g. '157.0'
/// - On failure, responds with [`SdkError<DescribeOfferingError>`](crate::error::DescribeOfferingError)
pub fn describe_offering(&self) -> fluent_builders::DescribeOffering<C, M, R> {
fluent_builders::DescribeOffering::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DescribeReservation`](crate::client::fluent_builders::DescribeReservation) operation.
///
/// - The fluent builder is configurable:
/// - [`reservation_id(impl Into<String>)`](crate::client::fluent_builders::DescribeReservation::reservation_id) / [`set_reservation_id(Option<String>)`](crate::client::fluent_builders::DescribeReservation::set_reservation_id): Unique reservation ID, e.g. '1234567'
/// - On success, responds with [`DescribeReservationOutput`](crate::output::DescribeReservationOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::DescribeReservationOutput::arn): Unique reservation ARN, e.g. 'arn:aws:medialive:us-west-2:123456789012:reservation:1234567'
/// - [`count(i32)`](crate::output::DescribeReservationOutput::count): Number of reserved resources
/// - [`currency_code(Option<String>)`](crate::output::DescribeReservationOutput::currency_code): Currency code for usagePrice and fixedPrice in ISO-4217 format, e.g. 'USD'
/// - [`duration(i32)`](crate::output::DescribeReservationOutput::duration): Lease duration, e.g. '12'
/// - [`duration_units(Option<OfferingDurationUnits>)`](crate::output::DescribeReservationOutput::duration_units): Units for duration, e.g. 'MONTHS'
/// - [`end(Option<String>)`](crate::output::DescribeReservationOutput::end): Reservation UTC end date and time in ISO-8601 format, e.g. '2019-03-01T00:00:00'
/// - [`fixed_price(f64)`](crate::output::DescribeReservationOutput::fixed_price): One-time charge for each reserved resource, e.g. '0.0' for a NO_UPFRONT offering
/// - [`name(Option<String>)`](crate::output::DescribeReservationOutput::name): User specified reservation name
/// - [`offering_description(Option<String>)`](crate::output::DescribeReservationOutput::offering_description): Offering description, e.g. 'HD AVC output at 10-20 Mbps, 30 fps, and standard VQ in US West (Oregon)'
/// - [`offering_id(Option<String>)`](crate::output::DescribeReservationOutput::offering_id): Unique offering ID, e.g. '87654321'
/// - [`offering_type(Option<OfferingType>)`](crate::output::DescribeReservationOutput::offering_type): Offering type, e.g. 'NO_UPFRONT'
/// - [`region(Option<String>)`](crate::output::DescribeReservationOutput::region): AWS region, e.g. 'us-west-2'
/// - [`reservation_id(Option<String>)`](crate::output::DescribeReservationOutput::reservation_id): Unique reservation ID, e.g. '1234567'
/// - [`resource_specification(Option<ReservationResourceSpecification>)`](crate::output::DescribeReservationOutput::resource_specification): Resource configuration details
/// - [`start(Option<String>)`](crate::output::DescribeReservationOutput::start): Reservation UTC start date and time in ISO-8601 format, e.g. '2018-03-01T00:00:00'
/// - [`state(Option<ReservationState>)`](crate::output::DescribeReservationOutput::state): Current state of reservation, e.g. 'ACTIVE'
/// - [`tags(Option<HashMap<String, String>>)`](crate::output::DescribeReservationOutput::tags): A collection of key-value pairs
/// - [`usage_price(f64)`](crate::output::DescribeReservationOutput::usage_price): Recurring usage charge for each reserved resource, e.g. '157.0'
/// - On failure, responds with [`SdkError<DescribeReservationError>`](crate::error::DescribeReservationError)
pub fn describe_reservation(&self) -> fluent_builders::DescribeReservation<C, M, R> {
fluent_builders::DescribeReservation::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DescribeSchedule`](crate::client::fluent_builders::DescribeSchedule) operation.
/// This operation supports pagination; See [`into_paginator()`](crate::client::fluent_builders::DescribeSchedule::into_paginator).
///
/// - The fluent builder is configurable:
/// - [`channel_id(impl Into<String>)`](crate::client::fluent_builders::DescribeSchedule::channel_id) / [`set_channel_id(Option<String>)`](crate::client::fluent_builders::DescribeSchedule::set_channel_id): Id of the channel whose schedule is being updated.
/// - [`max_results(i32)`](crate::client::fluent_builders::DescribeSchedule::max_results) / [`set_max_results(i32)`](crate::client::fluent_builders::DescribeSchedule::set_max_results): Placeholder documentation for MaxResults
/// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::DescribeSchedule::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::DescribeSchedule::set_next_token): Placeholder documentation for __string
/// - On success, responds with [`DescribeScheduleOutput`](crate::output::DescribeScheduleOutput) with field(s):
/// - [`next_token(Option<String>)`](crate::output::DescribeScheduleOutput::next_token): The next token; for use in pagination.
/// - [`schedule_actions(Option<Vec<ScheduleAction>>)`](crate::output::DescribeScheduleOutput::schedule_actions): The list of actions in the schedule.
/// - On failure, responds with [`SdkError<DescribeScheduleError>`](crate::error::DescribeScheduleError)
pub fn describe_schedule(&self) -> fluent_builders::DescribeSchedule<C, M, R> {
fluent_builders::DescribeSchedule::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`ListChannels`](crate::client::fluent_builders::ListChannels) operation.
/// This operation supports pagination; See [`into_paginator()`](crate::client::fluent_builders::ListChannels::into_paginator).
///
/// - The fluent builder is configurable:
/// - [`max_results(i32)`](crate::client::fluent_builders::ListChannels::max_results) / [`set_max_results(i32)`](crate::client::fluent_builders::ListChannels::set_max_results): Placeholder documentation for MaxResults
/// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::ListChannels::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::ListChannels::set_next_token): Placeholder documentation for __string
/// - On success, responds with [`ListChannelsOutput`](crate::output::ListChannelsOutput) with field(s):
/// - [`channels(Option<Vec<ChannelSummary>>)`](crate::output::ListChannelsOutput::channels): Placeholder documentation for __listOfChannelSummary
/// - [`next_token(Option<String>)`](crate::output::ListChannelsOutput::next_token): Placeholder documentation for __string
/// - On failure, responds with [`SdkError<ListChannelsError>`](crate::error::ListChannelsError)
pub fn list_channels(&self) -> fluent_builders::ListChannels<C, M, R> {
fluent_builders::ListChannels::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`ListInputDevices`](crate::client::fluent_builders::ListInputDevices) operation.
/// This operation supports pagination; See [`into_paginator()`](crate::client::fluent_builders::ListInputDevices::into_paginator).
///
/// - The fluent builder is configurable:
/// - [`max_results(i32)`](crate::client::fluent_builders::ListInputDevices::max_results) / [`set_max_results(i32)`](crate::client::fluent_builders::ListInputDevices::set_max_results): Placeholder documentation for MaxResults
/// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::ListInputDevices::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::ListInputDevices::set_next_token): Placeholder documentation for __string
/// - On success, responds with [`ListInputDevicesOutput`](crate::output::ListInputDevicesOutput) with field(s):
/// - [`input_devices(Option<Vec<InputDeviceSummary>>)`](crate::output::ListInputDevicesOutput::input_devices): The list of input devices.
/// - [`next_token(Option<String>)`](crate::output::ListInputDevicesOutput::next_token): A token to get additional list results.
/// - On failure, responds with [`SdkError<ListInputDevicesError>`](crate::error::ListInputDevicesError)
pub fn list_input_devices(&self) -> fluent_builders::ListInputDevices<C, M, R> {
fluent_builders::ListInputDevices::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`ListInputDeviceTransfers`](crate::client::fluent_builders::ListInputDeviceTransfers) operation.
/// This operation supports pagination; See [`into_paginator()`](crate::client::fluent_builders::ListInputDeviceTransfers::into_paginator).
///
/// - The fluent builder is configurable:
/// - [`max_results(i32)`](crate::client::fluent_builders::ListInputDeviceTransfers::max_results) / [`set_max_results(i32)`](crate::client::fluent_builders::ListInputDeviceTransfers::set_max_results): Placeholder documentation for MaxResults
/// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::ListInputDeviceTransfers::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::ListInputDeviceTransfers::set_next_token): Placeholder documentation for __string
/// - [`transfer_type(impl Into<String>)`](crate::client::fluent_builders::ListInputDeviceTransfers::transfer_type) / [`set_transfer_type(Option<String>)`](crate::client::fluent_builders::ListInputDeviceTransfers::set_transfer_type): Placeholder documentation for __string
/// - On success, responds with [`ListInputDeviceTransfersOutput`](crate::output::ListInputDeviceTransfersOutput) with field(s):
/// - [`input_device_transfers(Option<Vec<TransferringInputDeviceSummary>>)`](crate::output::ListInputDeviceTransfersOutput::input_device_transfers): The list of devices that you are transferring or are being transferred to you.
/// - [`next_token(Option<String>)`](crate::output::ListInputDeviceTransfersOutput::next_token): A token to get additional list results.
/// - On failure, responds with [`SdkError<ListInputDeviceTransfersError>`](crate::error::ListInputDeviceTransfersError)
pub fn list_input_device_transfers(
&self,
) -> fluent_builders::ListInputDeviceTransfers<C, M, R> {
fluent_builders::ListInputDeviceTransfers::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`ListInputs`](crate::client::fluent_builders::ListInputs) operation.
/// This operation supports pagination; See [`into_paginator()`](crate::client::fluent_builders::ListInputs::into_paginator).
///
/// - The fluent builder is configurable:
/// - [`max_results(i32)`](crate::client::fluent_builders::ListInputs::max_results) / [`set_max_results(i32)`](crate::client::fluent_builders::ListInputs::set_max_results): Placeholder documentation for MaxResults
/// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::ListInputs::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::ListInputs::set_next_token): Placeholder documentation for __string
/// - On success, responds with [`ListInputsOutput`](crate::output::ListInputsOutput) with field(s):
/// - [`inputs(Option<Vec<Input>>)`](crate::output::ListInputsOutput::inputs): Placeholder documentation for __listOfInput
/// - [`next_token(Option<String>)`](crate::output::ListInputsOutput::next_token): Placeholder documentation for __string
/// - On failure, responds with [`SdkError<ListInputsError>`](crate::error::ListInputsError)
pub fn list_inputs(&self) -> fluent_builders::ListInputs<C, M, R> {
fluent_builders::ListInputs::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`ListInputSecurityGroups`](crate::client::fluent_builders::ListInputSecurityGroups) operation.
/// This operation supports pagination; See [`into_paginator()`](crate::client::fluent_builders::ListInputSecurityGroups::into_paginator).
///
/// - The fluent builder is configurable:
/// - [`max_results(i32)`](crate::client::fluent_builders::ListInputSecurityGroups::max_results) / [`set_max_results(i32)`](crate::client::fluent_builders::ListInputSecurityGroups::set_max_results): Placeholder documentation for MaxResults
/// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::ListInputSecurityGroups::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::ListInputSecurityGroups::set_next_token): Placeholder documentation for __string
/// - On success, responds with [`ListInputSecurityGroupsOutput`](crate::output::ListInputSecurityGroupsOutput) with field(s):
/// - [`input_security_groups(Option<Vec<InputSecurityGroup>>)`](crate::output::ListInputSecurityGroupsOutput::input_security_groups): List of input security groups
/// - [`next_token(Option<String>)`](crate::output::ListInputSecurityGroupsOutput::next_token): Placeholder documentation for __string
/// - On failure, responds with [`SdkError<ListInputSecurityGroupsError>`](crate::error::ListInputSecurityGroupsError)
pub fn list_input_security_groups(&self) -> fluent_builders::ListInputSecurityGroups<C, M, R> {
fluent_builders::ListInputSecurityGroups::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`ListMultiplexes`](crate::client::fluent_builders::ListMultiplexes) operation.
/// This operation supports pagination; See [`into_paginator()`](crate::client::fluent_builders::ListMultiplexes::into_paginator).
///
/// - The fluent builder is configurable:
/// - [`max_results(i32)`](crate::client::fluent_builders::ListMultiplexes::max_results) / [`set_max_results(i32)`](crate::client::fluent_builders::ListMultiplexes::set_max_results): The maximum number of items to return.
/// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::ListMultiplexes::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::ListMultiplexes::set_next_token): The token to retrieve the next page of results.
/// - On success, responds with [`ListMultiplexesOutput`](crate::output::ListMultiplexesOutput) with field(s):
/// - [`multiplexes(Option<Vec<MultiplexSummary>>)`](crate::output::ListMultiplexesOutput::multiplexes): List of multiplexes.
/// - [`next_token(Option<String>)`](crate::output::ListMultiplexesOutput::next_token): Token for the next ListMultiplexes request.
/// - On failure, responds with [`SdkError<ListMultiplexesError>`](crate::error::ListMultiplexesError)
pub fn list_multiplexes(&self) -> fluent_builders::ListMultiplexes<C, M, R> {
fluent_builders::ListMultiplexes::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`ListMultiplexPrograms`](crate::client::fluent_builders::ListMultiplexPrograms) operation.
/// This operation supports pagination; See [`into_paginator()`](crate::client::fluent_builders::ListMultiplexPrograms::into_paginator).
///
/// - The fluent builder is configurable:
/// - [`max_results(i32)`](crate::client::fluent_builders::ListMultiplexPrograms::max_results) / [`set_max_results(i32)`](crate::client::fluent_builders::ListMultiplexPrograms::set_max_results): The maximum number of items to return.
/// - [`multiplex_id(impl Into<String>)`](crate::client::fluent_builders::ListMultiplexPrograms::multiplex_id) / [`set_multiplex_id(Option<String>)`](crate::client::fluent_builders::ListMultiplexPrograms::set_multiplex_id): The ID of the multiplex that the programs belong to.
/// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::ListMultiplexPrograms::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::ListMultiplexPrograms::set_next_token): The token to retrieve the next page of results.
/// - On success, responds with [`ListMultiplexProgramsOutput`](crate::output::ListMultiplexProgramsOutput) with field(s):
/// - [`multiplex_programs(Option<Vec<MultiplexProgramSummary>>)`](crate::output::ListMultiplexProgramsOutput::multiplex_programs): List of multiplex programs.
/// - [`next_token(Option<String>)`](crate::output::ListMultiplexProgramsOutput::next_token): Token for the next ListMultiplexProgram request.
/// - On failure, responds with [`SdkError<ListMultiplexProgramsError>`](crate::error::ListMultiplexProgramsError)
pub fn list_multiplex_programs(&self) -> fluent_builders::ListMultiplexPrograms<C, M, R> {
fluent_builders::ListMultiplexPrograms::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`ListOfferings`](crate::client::fluent_builders::ListOfferings) operation.
/// This operation supports pagination; See [`into_paginator()`](crate::client::fluent_builders::ListOfferings::into_paginator).
///
/// - The fluent builder is configurable:
/// - [`channel_class(impl Into<String>)`](crate::client::fluent_builders::ListOfferings::channel_class) / [`set_channel_class(Option<String>)`](crate::client::fluent_builders::ListOfferings::set_channel_class): Filter by channel class, 'STANDARD' or 'SINGLE_PIPELINE'
/// - [`channel_configuration(impl Into<String>)`](crate::client::fluent_builders::ListOfferings::channel_configuration) / [`set_channel_configuration(Option<String>)`](crate::client::fluent_builders::ListOfferings::set_channel_configuration): Filter to offerings that match the configuration of an existing channel, e.g. '2345678' (a channel ID)
/// - [`codec(impl Into<String>)`](crate::client::fluent_builders::ListOfferings::codec) / [`set_codec(Option<String>)`](crate::client::fluent_builders::ListOfferings::set_codec): Filter by codec, 'AVC', 'HEVC', 'MPEG2', 'AUDIO', or 'LINK'
/// - [`duration(impl Into<String>)`](crate::client::fluent_builders::ListOfferings::duration) / [`set_duration(Option<String>)`](crate::client::fluent_builders::ListOfferings::set_duration): Filter by offering duration, e.g. '12'
/// - [`max_results(i32)`](crate::client::fluent_builders::ListOfferings::max_results) / [`set_max_results(i32)`](crate::client::fluent_builders::ListOfferings::set_max_results): Placeholder documentation for MaxResults
/// - [`maximum_bitrate(impl Into<String>)`](crate::client::fluent_builders::ListOfferings::maximum_bitrate) / [`set_maximum_bitrate(Option<String>)`](crate::client::fluent_builders::ListOfferings::set_maximum_bitrate): Filter by bitrate, 'MAX_10_MBPS', 'MAX_20_MBPS', or 'MAX_50_MBPS'
/// - [`maximum_framerate(impl Into<String>)`](crate::client::fluent_builders::ListOfferings::maximum_framerate) / [`set_maximum_framerate(Option<String>)`](crate::client::fluent_builders::ListOfferings::set_maximum_framerate): Filter by framerate, 'MAX_30_FPS' or 'MAX_60_FPS'
/// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::ListOfferings::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::ListOfferings::set_next_token): Placeholder documentation for __string
/// - [`resolution(impl Into<String>)`](crate::client::fluent_builders::ListOfferings::resolution) / [`set_resolution(Option<String>)`](crate::client::fluent_builders::ListOfferings::set_resolution): Filter by resolution, 'SD', 'HD', 'FHD', or 'UHD'
/// - [`resource_type(impl Into<String>)`](crate::client::fluent_builders::ListOfferings::resource_type) / [`set_resource_type(Option<String>)`](crate::client::fluent_builders::ListOfferings::set_resource_type): Filter by resource type, 'INPUT', 'OUTPUT', 'MULTIPLEX', or 'CHANNEL'
/// - [`special_feature(impl Into<String>)`](crate::client::fluent_builders::ListOfferings::special_feature) / [`set_special_feature(Option<String>)`](crate::client::fluent_builders::ListOfferings::set_special_feature): Filter by special feature, 'ADVANCED_AUDIO' or 'AUDIO_NORMALIZATION'
/// - [`video_quality(impl Into<String>)`](crate::client::fluent_builders::ListOfferings::video_quality) / [`set_video_quality(Option<String>)`](crate::client::fluent_builders::ListOfferings::set_video_quality): Filter by video quality, 'STANDARD', 'ENHANCED', or 'PREMIUM'
/// - On success, responds with [`ListOfferingsOutput`](crate::output::ListOfferingsOutput) with field(s):
/// - [`next_token(Option<String>)`](crate::output::ListOfferingsOutput::next_token): Token to retrieve the next page of results
/// - [`offerings(Option<Vec<Offering>>)`](crate::output::ListOfferingsOutput::offerings): List of offerings
/// - On failure, responds with [`SdkError<ListOfferingsError>`](crate::error::ListOfferingsError)
pub fn list_offerings(&self) -> fluent_builders::ListOfferings<C, M, R> {
fluent_builders::ListOfferings::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`ListReservations`](crate::client::fluent_builders::ListReservations) operation.
/// This operation supports pagination; See [`into_paginator()`](crate::client::fluent_builders::ListReservations::into_paginator).
///
/// - The fluent builder is configurable:
/// - [`channel_class(impl Into<String>)`](crate::client::fluent_builders::ListReservations::channel_class) / [`set_channel_class(Option<String>)`](crate::client::fluent_builders::ListReservations::set_channel_class): Filter by channel class, 'STANDARD' or 'SINGLE_PIPELINE'
/// - [`codec(impl Into<String>)`](crate::client::fluent_builders::ListReservations::codec) / [`set_codec(Option<String>)`](crate::client::fluent_builders::ListReservations::set_codec): Filter by codec, 'AVC', 'HEVC', 'MPEG2', 'AUDIO', or 'LINK'
/// - [`max_results(i32)`](crate::client::fluent_builders::ListReservations::max_results) / [`set_max_results(i32)`](crate::client::fluent_builders::ListReservations::set_max_results): Placeholder documentation for MaxResults
/// - [`maximum_bitrate(impl Into<String>)`](crate::client::fluent_builders::ListReservations::maximum_bitrate) / [`set_maximum_bitrate(Option<String>)`](crate::client::fluent_builders::ListReservations::set_maximum_bitrate): Filter by bitrate, 'MAX_10_MBPS', 'MAX_20_MBPS', or 'MAX_50_MBPS'
/// - [`maximum_framerate(impl Into<String>)`](crate::client::fluent_builders::ListReservations::maximum_framerate) / [`set_maximum_framerate(Option<String>)`](crate::client::fluent_builders::ListReservations::set_maximum_framerate): Filter by framerate, 'MAX_30_FPS' or 'MAX_60_FPS'
/// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::ListReservations::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::ListReservations::set_next_token): Placeholder documentation for __string
/// - [`resolution(impl Into<String>)`](crate::client::fluent_builders::ListReservations::resolution) / [`set_resolution(Option<String>)`](crate::client::fluent_builders::ListReservations::set_resolution): Filter by resolution, 'SD', 'HD', 'FHD', or 'UHD'
/// - [`resource_type(impl Into<String>)`](crate::client::fluent_builders::ListReservations::resource_type) / [`set_resource_type(Option<String>)`](crate::client::fluent_builders::ListReservations::set_resource_type): Filter by resource type, 'INPUT', 'OUTPUT', 'MULTIPLEX', or 'CHANNEL'
/// - [`special_feature(impl Into<String>)`](crate::client::fluent_builders::ListReservations::special_feature) / [`set_special_feature(Option<String>)`](crate::client::fluent_builders::ListReservations::set_special_feature): Filter by special feature, 'ADVANCED_AUDIO' or 'AUDIO_NORMALIZATION'
/// - [`video_quality(impl Into<String>)`](crate::client::fluent_builders::ListReservations::video_quality) / [`set_video_quality(Option<String>)`](crate::client::fluent_builders::ListReservations::set_video_quality): Filter by video quality, 'STANDARD', 'ENHANCED', or 'PREMIUM'
/// - On success, responds with [`ListReservationsOutput`](crate::output::ListReservationsOutput) with field(s):
/// - [`next_token(Option<String>)`](crate::output::ListReservationsOutput::next_token): Token to retrieve the next page of results
/// - [`reservations(Option<Vec<Reservation>>)`](crate::output::ListReservationsOutput::reservations): List of reservations
/// - On failure, responds with [`SdkError<ListReservationsError>`](crate::error::ListReservationsError)
pub fn list_reservations(&self) -> fluent_builders::ListReservations<C, M, R> {
fluent_builders::ListReservations::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`ListTagsForResource`](crate::client::fluent_builders::ListTagsForResource) operation.
///
/// - The fluent builder is configurable:
/// - [`resource_arn(impl Into<String>)`](crate::client::fluent_builders::ListTagsForResource::resource_arn) / [`set_resource_arn(Option<String>)`](crate::client::fluent_builders::ListTagsForResource::set_resource_arn): Placeholder documentation for __string
/// - On success, responds with [`ListTagsForResourceOutput`](crate::output::ListTagsForResourceOutput) with field(s):
/// - [`tags(Option<HashMap<String, String>>)`](crate::output::ListTagsForResourceOutput::tags): Placeholder documentation for Tags
/// - On failure, responds with [`SdkError<ListTagsForResourceError>`](crate::error::ListTagsForResourceError)
pub fn list_tags_for_resource(&self) -> fluent_builders::ListTagsForResource<C, M, R> {
fluent_builders::ListTagsForResource::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`PurchaseOffering`](crate::client::fluent_builders::PurchaseOffering) operation.
///
/// - The fluent builder is configurable:
/// - [`count(i32)`](crate::client::fluent_builders::PurchaseOffering::count) / [`set_count(i32)`](crate::client::fluent_builders::PurchaseOffering::set_count): Number of resources
/// - [`name(impl Into<String>)`](crate::client::fluent_builders::PurchaseOffering::name) / [`set_name(Option<String>)`](crate::client::fluent_builders::PurchaseOffering::set_name): Name for the new reservation
/// - [`offering_id(impl Into<String>)`](crate::client::fluent_builders::PurchaseOffering::offering_id) / [`set_offering_id(Option<String>)`](crate::client::fluent_builders::PurchaseOffering::set_offering_id): Offering to purchase, e.g. '87654321'
/// - [`request_id(impl Into<String>)`](crate::client::fluent_builders::PurchaseOffering::request_id) / [`set_request_id(Option<String>)`](crate::client::fluent_builders::PurchaseOffering::set_request_id): Unique request ID to be specified. This is needed to prevent retries from creating multiple resources.
/// - [`start(impl Into<String>)`](crate::client::fluent_builders::PurchaseOffering::start) / [`set_start(Option<String>)`](crate::client::fluent_builders::PurchaseOffering::set_start): Requested reservation start time (UTC) in ISO-8601 format. The specified time must be between the first day of the current month and one year from now. If no value is given, the default is now.
/// - [`tags(HashMap<String, String>)`](crate::client::fluent_builders::PurchaseOffering::tags) / [`set_tags(Option<HashMap<String, String>>)`](crate::client::fluent_builders::PurchaseOffering::set_tags): A collection of key-value pairs
/// - On success, responds with [`PurchaseOfferingOutput`](crate::output::PurchaseOfferingOutput) with field(s):
/// - [`reservation(Option<Reservation>)`](crate::output::PurchaseOfferingOutput::reservation): Reserved resources available to use
/// - On failure, responds with [`SdkError<PurchaseOfferingError>`](crate::error::PurchaseOfferingError)
pub fn purchase_offering(&self) -> fluent_builders::PurchaseOffering<C, M, R> {
fluent_builders::PurchaseOffering::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`RejectInputDeviceTransfer`](crate::client::fluent_builders::RejectInputDeviceTransfer) operation.
///
/// - The fluent builder is configurable:
/// - [`input_device_id(impl Into<String>)`](crate::client::fluent_builders::RejectInputDeviceTransfer::input_device_id) / [`set_input_device_id(Option<String>)`](crate::client::fluent_builders::RejectInputDeviceTransfer::set_input_device_id): The unique ID of the input device to reject. For example, hd-123456789abcdef.
/// - On success, responds with [`RejectInputDeviceTransferOutput`](crate::output::RejectInputDeviceTransferOutput)
/// - On failure, responds with [`SdkError<RejectInputDeviceTransferError>`](crate::error::RejectInputDeviceTransferError)
pub fn reject_input_device_transfer(
&self,
) -> fluent_builders::RejectInputDeviceTransfer<C, M, R> {
fluent_builders::RejectInputDeviceTransfer::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`StartChannel`](crate::client::fluent_builders::StartChannel) operation.
///
/// - The fluent builder is configurable:
/// - [`channel_id(impl Into<String>)`](crate::client::fluent_builders::StartChannel::channel_id) / [`set_channel_id(Option<String>)`](crate::client::fluent_builders::StartChannel::set_channel_id): A request to start a channel
/// - On success, responds with [`StartChannelOutput`](crate::output::StartChannelOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::StartChannelOutput::arn): The unique arn of the channel.
/// - [`cdi_input_specification(Option<CdiInputSpecification>)`](crate::output::StartChannelOutput::cdi_input_specification): Specification of CDI inputs for this channel
/// - [`channel_class(Option<ChannelClass>)`](crate::output::StartChannelOutput::channel_class): The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel with one pipeline.
/// - [`destinations(Option<Vec<OutputDestination>>)`](crate::output::StartChannelOutput::destinations): A list of destinations of the channel. For UDP outputs, there is one destination per output. For other types (HLS, for example), there is one destination per packager.
/// - [`egress_endpoints(Option<Vec<ChannelEgressEndpoint>>)`](crate::output::StartChannelOutput::egress_endpoints): The endpoints where outgoing connections initiate from
/// - [`encoder_settings(Option<EncoderSettings>)`](crate::output::StartChannelOutput::encoder_settings): Encoder Settings
/// - [`id(Option<String>)`](crate::output::StartChannelOutput::id): The unique id of the channel.
/// - [`input_attachments(Option<Vec<InputAttachment>>)`](crate::output::StartChannelOutput::input_attachments): List of input attachments for channel.
/// - [`input_specification(Option<InputSpecification>)`](crate::output::StartChannelOutput::input_specification): Specification of network and file inputs for this channel
/// - [`log_level(Option<LogLevel>)`](crate::output::StartChannelOutput::log_level): The log level being written to CloudWatch Logs.
/// - [`name(Option<String>)`](crate::output::StartChannelOutput::name): The name of the channel. (user-mutable)
/// - [`pipeline_details(Option<Vec<PipelineDetail>>)`](crate::output::StartChannelOutput::pipeline_details): Runtime details for the pipelines of a running channel.
/// - [`pipelines_running_count(i32)`](crate::output::StartChannelOutput::pipelines_running_count): The number of currently healthy pipelines.
/// - [`role_arn(Option<String>)`](crate::output::StartChannelOutput::role_arn): The Amazon Resource Name (ARN) of the role assumed when running the Channel.
/// - [`state(Option<ChannelState>)`](crate::output::StartChannelOutput::state): Placeholder documentation for ChannelState
/// - [`tags(Option<HashMap<String, String>>)`](crate::output::StartChannelOutput::tags): A collection of key-value pairs.
/// - [`vpc(Option<VpcOutputSettingsDescription>)`](crate::output::StartChannelOutput::vpc): Settings for VPC output
/// - On failure, responds with [`SdkError<StartChannelError>`](crate::error::StartChannelError)
pub fn start_channel(&self) -> fluent_builders::StartChannel<C, M, R> {
fluent_builders::StartChannel::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`StartMultiplex`](crate::client::fluent_builders::StartMultiplex) operation.
///
/// - The fluent builder is configurable:
/// - [`multiplex_id(impl Into<String>)`](crate::client::fluent_builders::StartMultiplex::multiplex_id) / [`set_multiplex_id(Option<String>)`](crate::client::fluent_builders::StartMultiplex::set_multiplex_id): The ID of the multiplex.
/// - On success, responds with [`StartMultiplexOutput`](crate::output::StartMultiplexOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::StartMultiplexOutput::arn): The unique arn of the multiplex.
/// - [`availability_zones(Option<Vec<String>>)`](crate::output::StartMultiplexOutput::availability_zones): A list of availability zones for the multiplex.
/// - [`destinations(Option<Vec<MultiplexOutputDestination>>)`](crate::output::StartMultiplexOutput::destinations): A list of the multiplex output destinations.
/// - [`id(Option<String>)`](crate::output::StartMultiplexOutput::id): The unique id of the multiplex.
/// - [`multiplex_settings(Option<MultiplexSettings>)`](crate::output::StartMultiplexOutput::multiplex_settings): Configuration for a multiplex event.
/// - [`name(Option<String>)`](crate::output::StartMultiplexOutput::name): The name of the multiplex.
/// - [`pipelines_running_count(i32)`](crate::output::StartMultiplexOutput::pipelines_running_count): The number of currently healthy pipelines.
/// - [`program_count(i32)`](crate::output::StartMultiplexOutput::program_count): The number of programs in the multiplex.
/// - [`state(Option<MultiplexState>)`](crate::output::StartMultiplexOutput::state): The current state of the multiplex.
/// - [`tags(Option<HashMap<String, String>>)`](crate::output::StartMultiplexOutput::tags): A collection of key-value pairs.
/// - On failure, responds with [`SdkError<StartMultiplexError>`](crate::error::StartMultiplexError)
pub fn start_multiplex(&self) -> fluent_builders::StartMultiplex<C, M, R> {
fluent_builders::StartMultiplex::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`StopChannel`](crate::client::fluent_builders::StopChannel) operation.
///
/// - The fluent builder is configurable:
/// - [`channel_id(impl Into<String>)`](crate::client::fluent_builders::StopChannel::channel_id) / [`set_channel_id(Option<String>)`](crate::client::fluent_builders::StopChannel::set_channel_id): A request to stop a running channel
/// - On success, responds with [`StopChannelOutput`](crate::output::StopChannelOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::StopChannelOutput::arn): The unique arn of the channel.
/// - [`cdi_input_specification(Option<CdiInputSpecification>)`](crate::output::StopChannelOutput::cdi_input_specification): Specification of CDI inputs for this channel
/// - [`channel_class(Option<ChannelClass>)`](crate::output::StopChannelOutput::channel_class): The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel with one pipeline.
/// - [`destinations(Option<Vec<OutputDestination>>)`](crate::output::StopChannelOutput::destinations): A list of destinations of the channel. For UDP outputs, there is one destination per output. For other types (HLS, for example), there is one destination per packager.
/// - [`egress_endpoints(Option<Vec<ChannelEgressEndpoint>>)`](crate::output::StopChannelOutput::egress_endpoints): The endpoints where outgoing connections initiate from
/// - [`encoder_settings(Option<EncoderSettings>)`](crate::output::StopChannelOutput::encoder_settings): Encoder Settings
/// - [`id(Option<String>)`](crate::output::StopChannelOutput::id): The unique id of the channel.
/// - [`input_attachments(Option<Vec<InputAttachment>>)`](crate::output::StopChannelOutput::input_attachments): List of input attachments for channel.
/// - [`input_specification(Option<InputSpecification>)`](crate::output::StopChannelOutput::input_specification): Specification of network and file inputs for this channel
/// - [`log_level(Option<LogLevel>)`](crate::output::StopChannelOutput::log_level): The log level being written to CloudWatch Logs.
/// - [`name(Option<String>)`](crate::output::StopChannelOutput::name): The name of the channel. (user-mutable)
/// - [`pipeline_details(Option<Vec<PipelineDetail>>)`](crate::output::StopChannelOutput::pipeline_details): Runtime details for the pipelines of a running channel.
/// - [`pipelines_running_count(i32)`](crate::output::StopChannelOutput::pipelines_running_count): The number of currently healthy pipelines.
/// - [`role_arn(Option<String>)`](crate::output::StopChannelOutput::role_arn): The Amazon Resource Name (ARN) of the role assumed when running the Channel.
/// - [`state(Option<ChannelState>)`](crate::output::StopChannelOutput::state): Placeholder documentation for ChannelState
/// - [`tags(Option<HashMap<String, String>>)`](crate::output::StopChannelOutput::tags): A collection of key-value pairs.
/// - [`vpc(Option<VpcOutputSettingsDescription>)`](crate::output::StopChannelOutput::vpc): Settings for VPC output
/// - On failure, responds with [`SdkError<StopChannelError>`](crate::error::StopChannelError)
pub fn stop_channel(&self) -> fluent_builders::StopChannel<C, M, R> {
fluent_builders::StopChannel::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`StopMultiplex`](crate::client::fluent_builders::StopMultiplex) operation.
///
/// - The fluent builder is configurable:
/// - [`multiplex_id(impl Into<String>)`](crate::client::fluent_builders::StopMultiplex::multiplex_id) / [`set_multiplex_id(Option<String>)`](crate::client::fluent_builders::StopMultiplex::set_multiplex_id): The ID of the multiplex.
/// - On success, responds with [`StopMultiplexOutput`](crate::output::StopMultiplexOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::StopMultiplexOutput::arn): The unique arn of the multiplex.
/// - [`availability_zones(Option<Vec<String>>)`](crate::output::StopMultiplexOutput::availability_zones): A list of availability zones for the multiplex.
/// - [`destinations(Option<Vec<MultiplexOutputDestination>>)`](crate::output::StopMultiplexOutput::destinations): A list of the multiplex output destinations.
/// - [`id(Option<String>)`](crate::output::StopMultiplexOutput::id): The unique id of the multiplex.
/// - [`multiplex_settings(Option<MultiplexSettings>)`](crate::output::StopMultiplexOutput::multiplex_settings): Configuration for a multiplex event.
/// - [`name(Option<String>)`](crate::output::StopMultiplexOutput::name): The name of the multiplex.
/// - [`pipelines_running_count(i32)`](crate::output::StopMultiplexOutput::pipelines_running_count): The number of currently healthy pipelines.
/// - [`program_count(i32)`](crate::output::StopMultiplexOutput::program_count): The number of programs in the multiplex.
/// - [`state(Option<MultiplexState>)`](crate::output::StopMultiplexOutput::state): The current state of the multiplex.
/// - [`tags(Option<HashMap<String, String>>)`](crate::output::StopMultiplexOutput::tags): A collection of key-value pairs.
/// - On failure, responds with [`SdkError<StopMultiplexError>`](crate::error::StopMultiplexError)
pub fn stop_multiplex(&self) -> fluent_builders::StopMultiplex<C, M, R> {
fluent_builders::StopMultiplex::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`TransferInputDevice`](crate::client::fluent_builders::TransferInputDevice) operation.
///
/// - The fluent builder is configurable:
/// - [`input_device_id(impl Into<String>)`](crate::client::fluent_builders::TransferInputDevice::input_device_id) / [`set_input_device_id(Option<String>)`](crate::client::fluent_builders::TransferInputDevice::set_input_device_id): The unique ID of this input device. For example, hd-123456789abcdef.
/// - [`target_customer_id(impl Into<String>)`](crate::client::fluent_builders::TransferInputDevice::target_customer_id) / [`set_target_customer_id(Option<String>)`](crate::client::fluent_builders::TransferInputDevice::set_target_customer_id): The AWS account ID (12 digits) for the recipient of the device transfer.
/// - [`target_region(impl Into<String>)`](crate::client::fluent_builders::TransferInputDevice::target_region) / [`set_target_region(Option<String>)`](crate::client::fluent_builders::TransferInputDevice::set_target_region): The target AWS region to transfer the device.
/// - [`transfer_message(impl Into<String>)`](crate::client::fluent_builders::TransferInputDevice::transfer_message) / [`set_transfer_message(Option<String>)`](crate::client::fluent_builders::TransferInputDevice::set_transfer_message): An optional message for the recipient. Maximum 280 characters.
/// - On success, responds with [`TransferInputDeviceOutput`](crate::output::TransferInputDeviceOutput)
/// - On failure, responds with [`SdkError<TransferInputDeviceError>`](crate::error::TransferInputDeviceError)
pub fn transfer_input_device(&self) -> fluent_builders::TransferInputDevice<C, M, R> {
fluent_builders::TransferInputDevice::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`UpdateChannel`](crate::client::fluent_builders::UpdateChannel) operation.
///
/// - The fluent builder is configurable:
/// - [`cdi_input_specification(CdiInputSpecification)`](crate::client::fluent_builders::UpdateChannel::cdi_input_specification) / [`set_cdi_input_specification(Option<CdiInputSpecification>)`](crate::client::fluent_builders::UpdateChannel::set_cdi_input_specification): Specification of CDI inputs for this channel
/// - [`channel_id(impl Into<String>)`](crate::client::fluent_builders::UpdateChannel::channel_id) / [`set_channel_id(Option<String>)`](crate::client::fluent_builders::UpdateChannel::set_channel_id): channel ID
/// - [`destinations(Vec<OutputDestination>)`](crate::client::fluent_builders::UpdateChannel::destinations) / [`set_destinations(Option<Vec<OutputDestination>>)`](crate::client::fluent_builders::UpdateChannel::set_destinations): A list of output destinations for this channel.
/// - [`encoder_settings(EncoderSettings)`](crate::client::fluent_builders::UpdateChannel::encoder_settings) / [`set_encoder_settings(Option<EncoderSettings>)`](crate::client::fluent_builders::UpdateChannel::set_encoder_settings): The encoder settings for this channel.
/// - [`input_attachments(Vec<InputAttachment>)`](crate::client::fluent_builders::UpdateChannel::input_attachments) / [`set_input_attachments(Option<Vec<InputAttachment>>)`](crate::client::fluent_builders::UpdateChannel::set_input_attachments): Placeholder documentation for __listOfInputAttachment
/// - [`input_specification(InputSpecification)`](crate::client::fluent_builders::UpdateChannel::input_specification) / [`set_input_specification(Option<InputSpecification>)`](crate::client::fluent_builders::UpdateChannel::set_input_specification): Specification of network and file inputs for this channel
/// - [`log_level(LogLevel)`](crate::client::fluent_builders::UpdateChannel::log_level) / [`set_log_level(Option<LogLevel>)`](crate::client::fluent_builders::UpdateChannel::set_log_level): The log level to write to CloudWatch Logs.
/// - [`name(impl Into<String>)`](crate::client::fluent_builders::UpdateChannel::name) / [`set_name(Option<String>)`](crate::client::fluent_builders::UpdateChannel::set_name): The name of the channel.
/// - [`role_arn(impl Into<String>)`](crate::client::fluent_builders::UpdateChannel::role_arn) / [`set_role_arn(Option<String>)`](crate::client::fluent_builders::UpdateChannel::set_role_arn): An optional Amazon Resource Name (ARN) of the role to assume when running the Channel. If you do not specify this on an update call but the role was previously set that role will be removed.
/// - On success, responds with [`UpdateChannelOutput`](crate::output::UpdateChannelOutput) with field(s):
/// - [`channel(Option<Channel>)`](crate::output::UpdateChannelOutput::channel): Placeholder documentation for Channel
/// - On failure, responds with [`SdkError<UpdateChannelError>`](crate::error::UpdateChannelError)
pub fn update_channel(&self) -> fluent_builders::UpdateChannel<C, M, R> {
fluent_builders::UpdateChannel::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`UpdateChannelClass`](crate::client::fluent_builders::UpdateChannelClass) operation.
///
/// - The fluent builder is configurable:
/// - [`channel_class(ChannelClass)`](crate::client::fluent_builders::UpdateChannelClass::channel_class) / [`set_channel_class(Option<ChannelClass>)`](crate::client::fluent_builders::UpdateChannelClass::set_channel_class): The channel class that you wish to update this channel to use.
/// - [`channel_id(impl Into<String>)`](crate::client::fluent_builders::UpdateChannelClass::channel_id) / [`set_channel_id(Option<String>)`](crate::client::fluent_builders::UpdateChannelClass::set_channel_id): Channel Id of the channel whose class should be updated.
/// - [`destinations(Vec<OutputDestination>)`](crate::client::fluent_builders::UpdateChannelClass::destinations) / [`set_destinations(Option<Vec<OutputDestination>>)`](crate::client::fluent_builders::UpdateChannelClass::set_destinations): A list of output destinations for this channel.
/// - On success, responds with [`UpdateChannelClassOutput`](crate::output::UpdateChannelClassOutput) with field(s):
/// - [`channel(Option<Channel>)`](crate::output::UpdateChannelClassOutput::channel): Placeholder documentation for Channel
/// - On failure, responds with [`SdkError<UpdateChannelClassError>`](crate::error::UpdateChannelClassError)
pub fn update_channel_class(&self) -> fluent_builders::UpdateChannelClass<C, M, R> {
fluent_builders::UpdateChannelClass::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`UpdateInput`](crate::client::fluent_builders::UpdateInput) operation.
///
/// - The fluent builder is configurable:
/// - [`destinations(Vec<InputDestinationRequest>)`](crate::client::fluent_builders::UpdateInput::destinations) / [`set_destinations(Option<Vec<InputDestinationRequest>>)`](crate::client::fluent_builders::UpdateInput::set_destinations): Destination settings for PUSH type inputs.
/// - [`input_devices(Vec<InputDeviceRequest>)`](crate::client::fluent_builders::UpdateInput::input_devices) / [`set_input_devices(Option<Vec<InputDeviceRequest>>)`](crate::client::fluent_builders::UpdateInput::set_input_devices): Settings for the devices.
/// - [`input_id(impl Into<String>)`](crate::client::fluent_builders::UpdateInput::input_id) / [`set_input_id(Option<String>)`](crate::client::fluent_builders::UpdateInput::set_input_id): Unique ID of the input.
/// - [`input_security_groups(Vec<String>)`](crate::client::fluent_builders::UpdateInput::input_security_groups) / [`set_input_security_groups(Option<Vec<String>>)`](crate::client::fluent_builders::UpdateInput::set_input_security_groups): A list of security groups referenced by IDs to attach to the input.
/// - [`media_connect_flows(Vec<MediaConnectFlowRequest>)`](crate::client::fluent_builders::UpdateInput::media_connect_flows) / [`set_media_connect_flows(Option<Vec<MediaConnectFlowRequest>>)`](crate::client::fluent_builders::UpdateInput::set_media_connect_flows): A list of the MediaConnect Flow ARNs that you want to use as the source of the input. You can specify as few as one Flow and presently, as many as two. The only requirement is when you have more than one is that each Flow is in a separate Availability Zone as this ensures your EML input is redundant to AZ issues.
/// - [`name(impl Into<String>)`](crate::client::fluent_builders::UpdateInput::name) / [`set_name(Option<String>)`](crate::client::fluent_builders::UpdateInput::set_name): Name of the input.
/// - [`role_arn(impl Into<String>)`](crate::client::fluent_builders::UpdateInput::role_arn) / [`set_role_arn(Option<String>)`](crate::client::fluent_builders::UpdateInput::set_role_arn): The Amazon Resource Name (ARN) of the role this input assumes during and after creation.
/// - [`sources(Vec<InputSourceRequest>)`](crate::client::fluent_builders::UpdateInput::sources) / [`set_sources(Option<Vec<InputSourceRequest>>)`](crate::client::fluent_builders::UpdateInput::set_sources): The source URLs for a PULL-type input. Every PULL type input needs exactly two source URLs for redundancy. Only specify sources for PULL type Inputs. Leave Destinations empty.
/// - On success, responds with [`UpdateInputOutput`](crate::output::UpdateInputOutput) with field(s):
/// - [`input(Option<Input>)`](crate::output::UpdateInputOutput::input): Placeholder documentation for Input
/// - On failure, responds with [`SdkError<UpdateInputError>`](crate::error::UpdateInputError)
pub fn update_input(&self) -> fluent_builders::UpdateInput<C, M, R> {
fluent_builders::UpdateInput::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`UpdateInputDevice`](crate::client::fluent_builders::UpdateInputDevice) operation.
///
/// - The fluent builder is configurable:
/// - [`hd_device_settings(InputDeviceConfigurableSettings)`](crate::client::fluent_builders::UpdateInputDevice::hd_device_settings) / [`set_hd_device_settings(Option<InputDeviceConfigurableSettings>)`](crate::client::fluent_builders::UpdateInputDevice::set_hd_device_settings): The settings that you want to apply to the HD input device.
/// - [`input_device_id(impl Into<String>)`](crate::client::fluent_builders::UpdateInputDevice::input_device_id) / [`set_input_device_id(Option<String>)`](crate::client::fluent_builders::UpdateInputDevice::set_input_device_id): The unique ID of the input device. For example, hd-123456789abcdef.
/// - [`name(impl Into<String>)`](crate::client::fluent_builders::UpdateInputDevice::name) / [`set_name(Option<String>)`](crate::client::fluent_builders::UpdateInputDevice::set_name): The name that you assigned to this input device (not the unique ID).
/// - [`uhd_device_settings(InputDeviceConfigurableSettings)`](crate::client::fluent_builders::UpdateInputDevice::uhd_device_settings) / [`set_uhd_device_settings(Option<InputDeviceConfigurableSettings>)`](crate::client::fluent_builders::UpdateInputDevice::set_uhd_device_settings): The settings that you want to apply to the UHD input device.
/// - On success, responds with [`UpdateInputDeviceOutput`](crate::output::UpdateInputDeviceOutput) with field(s):
/// - [`arn(Option<String>)`](crate::output::UpdateInputDeviceOutput::arn): The unique ARN of the input device.
/// - [`connection_state(Option<InputDeviceConnectionState>)`](crate::output::UpdateInputDeviceOutput::connection_state): The state of the connection between the input device and AWS.
/// - [`device_settings_sync_state(Option<DeviceSettingsSyncState>)`](crate::output::UpdateInputDeviceOutput::device_settings_sync_state): The status of the action to synchronize the device configuration. If you change the configuration of the input device (for example, the maximum bitrate), MediaLive sends the new data to the device. The device might not update itself immediately. SYNCED means the device has updated its configuration. SYNCING means that it has not updated its configuration.
/// - [`device_update_status(Option<DeviceUpdateStatus>)`](crate::output::UpdateInputDeviceOutput::device_update_status): The status of software on the input device.
/// - [`hd_device_settings(Option<InputDeviceHdSettings>)`](crate::output::UpdateInputDeviceOutput::hd_device_settings): Settings that describe an input device that is type HD.
/// - [`id(Option<String>)`](crate::output::UpdateInputDeviceOutput::id): The unique ID of the input device.
/// - [`mac_address(Option<String>)`](crate::output::UpdateInputDeviceOutput::mac_address): The network MAC address of the input device.
/// - [`name(Option<String>)`](crate::output::UpdateInputDeviceOutput::name): A name that you specify for the input device.
/// - [`network_settings(Option<InputDeviceNetworkSettings>)`](crate::output::UpdateInputDeviceOutput::network_settings): The network settings for the input device.
/// - [`serial_number(Option<String>)`](crate::output::UpdateInputDeviceOutput::serial_number): The unique serial number of the input device.
/// - [`r#type(Option<InputDeviceType>)`](crate::output::UpdateInputDeviceOutput::type): The type of the input device.
/// - [`uhd_device_settings(Option<InputDeviceUhdSettings>)`](crate::output::UpdateInputDeviceOutput::uhd_device_settings): Settings that describe an input device that is type UHD.
/// - On failure, responds with [`SdkError<UpdateInputDeviceError>`](crate::error::UpdateInputDeviceError)
pub fn update_input_device(&self) -> fluent_builders::UpdateInputDevice<C, M, R> {
fluent_builders::UpdateInputDevice::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`UpdateInputSecurityGroup`](crate::client::fluent_builders::UpdateInputSecurityGroup) operation.
///
/// - The fluent builder is configurable:
/// - [`input_security_group_id(impl Into<String>)`](crate::client::fluent_builders::UpdateInputSecurityGroup::input_security_group_id) / [`set_input_security_group_id(Option<String>)`](crate::client::fluent_builders::UpdateInputSecurityGroup::set_input_security_group_id): The id of the Input Security Group to update.
/// - [`tags(HashMap<String, String>)`](crate::client::fluent_builders::UpdateInputSecurityGroup::tags) / [`set_tags(Option<HashMap<String, String>>)`](crate::client::fluent_builders::UpdateInputSecurityGroup::set_tags): A collection of key-value pairs.
/// - [`whitelist_rules(Vec<InputWhitelistRuleCidr>)`](crate::client::fluent_builders::UpdateInputSecurityGroup::whitelist_rules) / [`set_whitelist_rules(Option<Vec<InputWhitelistRuleCidr>>)`](crate::client::fluent_builders::UpdateInputSecurityGroup::set_whitelist_rules): List of IPv4 CIDR addresses to whitelist
/// - On success, responds with [`UpdateInputSecurityGroupOutput`](crate::output::UpdateInputSecurityGroupOutput) with field(s):
/// - [`security_group(Option<InputSecurityGroup>)`](crate::output::UpdateInputSecurityGroupOutput::security_group): An Input Security Group
/// - On failure, responds with [`SdkError<UpdateInputSecurityGroupError>`](crate::error::UpdateInputSecurityGroupError)
pub fn update_input_security_group(
&self,
) -> fluent_builders::UpdateInputSecurityGroup<C, M, R> {
fluent_builders::UpdateInputSecurityGroup::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`UpdateMultiplex`](crate::client::fluent_builders::UpdateMultiplex) operation.
///
/// - The fluent builder is configurable:
/// - [`multiplex_id(impl Into<String>)`](crate::client::fluent_builders::UpdateMultiplex::multiplex_id) / [`set_multiplex_id(Option<String>)`](crate::client::fluent_builders::UpdateMultiplex::set_multiplex_id): ID of the multiplex to update.
/// - [`multiplex_settings(MultiplexSettings)`](crate::client::fluent_builders::UpdateMultiplex::multiplex_settings) / [`set_multiplex_settings(Option<MultiplexSettings>)`](crate::client::fluent_builders::UpdateMultiplex::set_multiplex_settings): The new settings for a multiplex.
/// - [`name(impl Into<String>)`](crate::client::fluent_builders::UpdateMultiplex::name) / [`set_name(Option<String>)`](crate::client::fluent_builders::UpdateMultiplex::set_name): Name of the multiplex.
/// - On success, responds with [`UpdateMultiplexOutput`](crate::output::UpdateMultiplexOutput) with field(s):
/// - [`multiplex(Option<Multiplex>)`](crate::output::UpdateMultiplexOutput::multiplex): The updated multiplex.
/// - On failure, responds with [`SdkError<UpdateMultiplexError>`](crate::error::UpdateMultiplexError)
pub fn update_multiplex(&self) -> fluent_builders::UpdateMultiplex<C, M, R> {
fluent_builders::UpdateMultiplex::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`UpdateMultiplexProgram`](crate::client::fluent_builders::UpdateMultiplexProgram) operation.
///
/// - The fluent builder is configurable:
/// - [`multiplex_id(impl Into<String>)`](crate::client::fluent_builders::UpdateMultiplexProgram::multiplex_id) / [`set_multiplex_id(Option<String>)`](crate::client::fluent_builders::UpdateMultiplexProgram::set_multiplex_id): The ID of the multiplex of the program to update.
/// - [`multiplex_program_settings(MultiplexProgramSettings)`](crate::client::fluent_builders::UpdateMultiplexProgram::multiplex_program_settings) / [`set_multiplex_program_settings(Option<MultiplexProgramSettings>)`](crate::client::fluent_builders::UpdateMultiplexProgram::set_multiplex_program_settings): The new settings for a multiplex program.
/// - [`program_name(impl Into<String>)`](crate::client::fluent_builders::UpdateMultiplexProgram::program_name) / [`set_program_name(Option<String>)`](crate::client::fluent_builders::UpdateMultiplexProgram::set_program_name): The name of the program to update.
/// - On success, responds with [`UpdateMultiplexProgramOutput`](crate::output::UpdateMultiplexProgramOutput) with field(s):
/// - [`multiplex_program(Option<MultiplexProgram>)`](crate::output::UpdateMultiplexProgramOutput::multiplex_program): The updated multiplex program.
/// - On failure, responds with [`SdkError<UpdateMultiplexProgramError>`](crate::error::UpdateMultiplexProgramError)
pub fn update_multiplex_program(&self) -> fluent_builders::UpdateMultiplexProgram<C, M, R> {
fluent_builders::UpdateMultiplexProgram::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`UpdateReservation`](crate::client::fluent_builders::UpdateReservation) operation.
///
/// - The fluent builder is configurable:
/// - [`name(impl Into<String>)`](crate::client::fluent_builders::UpdateReservation::name) / [`set_name(Option<String>)`](crate::client::fluent_builders::UpdateReservation::set_name): Name of the reservation
/// - [`reservation_id(impl Into<String>)`](crate::client::fluent_builders::UpdateReservation::reservation_id) / [`set_reservation_id(Option<String>)`](crate::client::fluent_builders::UpdateReservation::set_reservation_id): Unique reservation ID, e.g. '1234567'
/// - On success, responds with [`UpdateReservationOutput`](crate::output::UpdateReservationOutput) with field(s):
/// - [`reservation(Option<Reservation>)`](crate::output::UpdateReservationOutput::reservation): Reserved resources available to use
/// - On failure, responds with [`SdkError<UpdateReservationError>`](crate::error::UpdateReservationError)
pub fn update_reservation(&self) -> fluent_builders::UpdateReservation<C, M, R> {
fluent_builders::UpdateReservation::new(self.handle.clone())
}
}
pub mod fluent_builders {
//!
//! Utilities to ergonomically construct a request to the service.
//!
//! Fluent builders are created through the [`Client`](crate::client::Client) by calling
//! one if its operation methods. After parameters are set using the builder methods,
//! the `send` method can be called to initiate the request.
//!
/// Fluent builder constructing a request to `AcceptInputDeviceTransfer`.
///
/// Accept an incoming input device transfer. The ownership of the device will transfer to your AWS account.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct AcceptInputDeviceTransfer<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::accept_input_device_transfer_input::Builder,
}
impl<C, M, R> AcceptInputDeviceTransfer<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `AcceptInputDeviceTransfer`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::AcceptInputDeviceTransferOutput,
aws_smithy_http::result::SdkError<crate::error::AcceptInputDeviceTransferError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::AcceptInputDeviceTransferInputOperationOutputAlias,
crate::output::AcceptInputDeviceTransferOutput,
crate::error::AcceptInputDeviceTransferError,
crate::input::AcceptInputDeviceTransferInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The unique ID of the input device to accept. For example, hd-123456789abcdef.
pub fn input_device_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_device_id(input.into());
self
}
/// The unique ID of the input device to accept. For example, hd-123456789abcdef.
pub fn set_input_device_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_input_device_id(input);
self
}
}
/// Fluent builder constructing a request to `BatchDelete`.
///
/// Starts delete of resources.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct BatchDelete<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::batch_delete_input::Builder,
}
impl<C, M, R> BatchDelete<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `BatchDelete`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::BatchDeleteOutput,
aws_smithy_http::result::SdkError<crate::error::BatchDeleteError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::BatchDeleteInputOperationOutputAlias,
crate::output::BatchDeleteOutput,
crate::error::BatchDeleteError,
crate::input::BatchDeleteInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Appends an item to `ChannelIds`.
///
/// To override the contents of this collection use [`set_channel_ids`](Self::set_channel_ids).
///
/// List of channel IDs
pub fn channel_ids(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_ids(input.into());
self
}
/// List of channel IDs
pub fn set_channel_ids(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.inner = self.inner.set_channel_ids(input);
self
}
/// Appends an item to `InputIds`.
///
/// To override the contents of this collection use [`set_input_ids`](Self::set_input_ids).
///
/// List of input IDs
pub fn input_ids(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_ids(input.into());
self
}
/// List of input IDs
pub fn set_input_ids(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.inner = self.inner.set_input_ids(input);
self
}
/// Appends an item to `InputSecurityGroupIds`.
///
/// To override the contents of this collection use [`set_input_security_group_ids`](Self::set_input_security_group_ids).
///
/// List of input security group IDs
pub fn input_security_group_ids(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_security_group_ids(input.into());
self
}
/// List of input security group IDs
pub fn set_input_security_group_ids(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.inner = self.inner.set_input_security_group_ids(input);
self
}
/// Appends an item to `MultiplexIds`.
///
/// To override the contents of this collection use [`set_multiplex_ids`](Self::set_multiplex_ids).
///
/// List of multiplex IDs
pub fn multiplex_ids(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.multiplex_ids(input.into());
self
}
/// List of multiplex IDs
pub fn set_multiplex_ids(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.inner = self.inner.set_multiplex_ids(input);
self
}
}
/// Fluent builder constructing a request to `BatchStart`.
///
/// Starts existing resources
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct BatchStart<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::batch_start_input::Builder,
}
impl<C, M, R> BatchStart<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `BatchStart`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::BatchStartOutput,
aws_smithy_http::result::SdkError<crate::error::BatchStartError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::BatchStartInputOperationOutputAlias,
crate::output::BatchStartOutput,
crate::error::BatchStartError,
crate::input::BatchStartInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Appends an item to `ChannelIds`.
///
/// To override the contents of this collection use [`set_channel_ids`](Self::set_channel_ids).
///
/// List of channel IDs
pub fn channel_ids(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_ids(input.into());
self
}
/// List of channel IDs
pub fn set_channel_ids(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.inner = self.inner.set_channel_ids(input);
self
}
/// Appends an item to `MultiplexIds`.
///
/// To override the contents of this collection use [`set_multiplex_ids`](Self::set_multiplex_ids).
///
/// List of multiplex IDs
pub fn multiplex_ids(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.multiplex_ids(input.into());
self
}
/// List of multiplex IDs
pub fn set_multiplex_ids(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.inner = self.inner.set_multiplex_ids(input);
self
}
}
/// Fluent builder constructing a request to `BatchStop`.
///
/// Stops running resources
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct BatchStop<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::batch_stop_input::Builder,
}
impl<C, M, R> BatchStop<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `BatchStop`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::BatchStopOutput,
aws_smithy_http::result::SdkError<crate::error::BatchStopError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::BatchStopInputOperationOutputAlias,
crate::output::BatchStopOutput,
crate::error::BatchStopError,
crate::input::BatchStopInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Appends an item to `ChannelIds`.
///
/// To override the contents of this collection use [`set_channel_ids`](Self::set_channel_ids).
///
/// List of channel IDs
pub fn channel_ids(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_ids(input.into());
self
}
/// List of channel IDs
pub fn set_channel_ids(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.inner = self.inner.set_channel_ids(input);
self
}
/// Appends an item to `MultiplexIds`.
///
/// To override the contents of this collection use [`set_multiplex_ids`](Self::set_multiplex_ids).
///
/// List of multiplex IDs
pub fn multiplex_ids(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.multiplex_ids(input.into());
self
}
/// List of multiplex IDs
pub fn set_multiplex_ids(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.inner = self.inner.set_multiplex_ids(input);
self
}
}
/// Fluent builder constructing a request to `BatchUpdateSchedule`.
///
/// Update a channel schedule
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct BatchUpdateSchedule<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::batch_update_schedule_input::Builder,
}
impl<C, M, R> BatchUpdateSchedule<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `BatchUpdateSchedule`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::BatchUpdateScheduleOutput,
aws_smithy_http::result::SdkError<crate::error::BatchUpdateScheduleError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::BatchUpdateScheduleInputOperationOutputAlias,
crate::output::BatchUpdateScheduleOutput,
crate::error::BatchUpdateScheduleError,
crate::input::BatchUpdateScheduleInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Id of the channel whose schedule is being updated.
pub fn channel_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_id(input.into());
self
}
/// Id of the channel whose schedule is being updated.
pub fn set_channel_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_channel_id(input);
self
}
/// Schedule actions to create in the schedule.
pub fn creates(mut self, input: crate::model::BatchScheduleActionCreateRequest) -> Self {
self.inner = self.inner.creates(input);
self
}
/// Schedule actions to create in the schedule.
pub fn set_creates(
mut self,
input: std::option::Option<crate::model::BatchScheduleActionCreateRequest>,
) -> Self {
self.inner = self.inner.set_creates(input);
self
}
/// Schedule actions to delete from the schedule.
pub fn deletes(mut self, input: crate::model::BatchScheduleActionDeleteRequest) -> Self {
self.inner = self.inner.deletes(input);
self
}
/// Schedule actions to delete from the schedule.
pub fn set_deletes(
mut self,
input: std::option::Option<crate::model::BatchScheduleActionDeleteRequest>,
) -> Self {
self.inner = self.inner.set_deletes(input);
self
}
}
/// Fluent builder constructing a request to `CancelInputDeviceTransfer`.
///
/// Cancel an input device transfer that you have requested.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct CancelInputDeviceTransfer<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::cancel_input_device_transfer_input::Builder,
}
impl<C, M, R> CancelInputDeviceTransfer<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `CancelInputDeviceTransfer`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::CancelInputDeviceTransferOutput,
aws_smithy_http::result::SdkError<crate::error::CancelInputDeviceTransferError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::CancelInputDeviceTransferInputOperationOutputAlias,
crate::output::CancelInputDeviceTransferOutput,
crate::error::CancelInputDeviceTransferError,
crate::input::CancelInputDeviceTransferInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The unique ID of the input device to cancel. For example, hd-123456789abcdef.
pub fn input_device_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_device_id(input.into());
self
}
/// The unique ID of the input device to cancel. For example, hd-123456789abcdef.
pub fn set_input_device_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_input_device_id(input);
self
}
}
/// Fluent builder constructing a request to `ClaimDevice`.
///
/// Send a request to claim an AWS Elemental device that you have purchased from a third-party vendor. After the request succeeds, you will own the device.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct ClaimDevice<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::claim_device_input::Builder,
}
impl<C, M, R> ClaimDevice<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `ClaimDevice`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::ClaimDeviceOutput,
aws_smithy_http::result::SdkError<crate::error::ClaimDeviceError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ClaimDeviceInputOperationOutputAlias,
crate::output::ClaimDeviceOutput,
crate::error::ClaimDeviceError,
crate::input::ClaimDeviceInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The id of the device you want to claim.
pub fn id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.id(input.into());
self
}
/// The id of the device you want to claim.
pub fn set_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_id(input);
self
}
}
/// Fluent builder constructing a request to `CreateChannel`.
///
/// Creates a new channel
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct CreateChannel<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::create_channel_input::Builder,
}
impl<C, M, R> CreateChannel<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `CreateChannel`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::CreateChannelOutput,
aws_smithy_http::result::SdkError<crate::error::CreateChannelError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::CreateChannelInputOperationOutputAlias,
crate::output::CreateChannelOutput,
crate::error::CreateChannelError,
crate::input::CreateChannelInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Specification of CDI inputs for this channel
pub fn cdi_input_specification(
mut self,
input: crate::model::CdiInputSpecification,
) -> Self {
self.inner = self.inner.cdi_input_specification(input);
self
}
/// Specification of CDI inputs for this channel
pub fn set_cdi_input_specification(
mut self,
input: std::option::Option<crate::model::CdiInputSpecification>,
) -> Self {
self.inner = self.inner.set_cdi_input_specification(input);
self
}
/// The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel with one pipeline.
pub fn channel_class(mut self, input: crate::model::ChannelClass) -> Self {
self.inner = self.inner.channel_class(input);
self
}
/// The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel with one pipeline.
pub fn set_channel_class(
mut self,
input: std::option::Option<crate::model::ChannelClass>,
) -> Self {
self.inner = self.inner.set_channel_class(input);
self
}
/// Appends an item to `Destinations`.
///
/// To override the contents of this collection use [`set_destinations`](Self::set_destinations).
///
/// Placeholder documentation for __listOfOutputDestination
pub fn destinations(mut self, input: crate::model::OutputDestination) -> Self {
self.inner = self.inner.destinations(input);
self
}
/// Placeholder documentation for __listOfOutputDestination
pub fn set_destinations(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::OutputDestination>>,
) -> Self {
self.inner = self.inner.set_destinations(input);
self
}
/// Encoder Settings
pub fn encoder_settings(mut self, input: crate::model::EncoderSettings) -> Self {
self.inner = self.inner.encoder_settings(input);
self
}
/// Encoder Settings
pub fn set_encoder_settings(
mut self,
input: std::option::Option<crate::model::EncoderSettings>,
) -> Self {
self.inner = self.inner.set_encoder_settings(input);
self
}
/// Appends an item to `InputAttachments`.
///
/// To override the contents of this collection use [`set_input_attachments`](Self::set_input_attachments).
///
/// List of input attachments for channel.
pub fn input_attachments(mut self, input: crate::model::InputAttachment) -> Self {
self.inner = self.inner.input_attachments(input);
self
}
/// List of input attachments for channel.
pub fn set_input_attachments(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::InputAttachment>>,
) -> Self {
self.inner = self.inner.set_input_attachments(input);
self
}
/// Specification of network and file inputs for this channel
pub fn input_specification(mut self, input: crate::model::InputSpecification) -> Self {
self.inner = self.inner.input_specification(input);
self
}
/// Specification of network and file inputs for this channel
pub fn set_input_specification(
mut self,
input: std::option::Option<crate::model::InputSpecification>,
) -> Self {
self.inner = self.inner.set_input_specification(input);
self
}
/// The log level to write to CloudWatch Logs.
pub fn log_level(mut self, input: crate::model::LogLevel) -> Self {
self.inner = self.inner.log_level(input);
self
}
/// The log level to write to CloudWatch Logs.
pub fn set_log_level(mut self, input: std::option::Option<crate::model::LogLevel>) -> Self {
self.inner = self.inner.set_log_level(input);
self
}
/// Name of channel.
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.name(input.into());
self
}
/// Name of channel.
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_name(input);
self
}
/// Unique request ID to be specified. This is needed to prevent retries from creating multiple resources.
pub fn request_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.request_id(input.into());
self
}
/// Unique request ID to be specified. This is needed to prevent retries from creating multiple resources.
pub fn set_request_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_request_id(input);
self
}
/// Deprecated field that's only usable by whitelisted customers.
pub fn reserved(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.reserved(input.into());
self
}
/// Deprecated field that's only usable by whitelisted customers.
pub fn set_reserved(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_reserved(input);
self
}
/// An optional Amazon Resource Name (ARN) of the role to assume when running the Channel.
pub fn role_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.role_arn(input.into());
self
}
/// An optional Amazon Resource Name (ARN) of the role to assume when running the Channel.
pub fn set_role_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_role_arn(input);
self
}
/// Adds a key-value pair to `Tags`.
///
/// To override the contents of this collection use [`set_tags`](Self::set_tags).
///
/// A collection of key-value pairs.
pub fn tags(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
self.inner = self.inner.tags(k.into(), v.into());
self
}
/// A collection of key-value pairs.
pub fn set_tags(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.inner = self.inner.set_tags(input);
self
}
/// Settings for the VPC outputs
pub fn vpc(mut self, input: crate::model::VpcOutputSettings) -> Self {
self.inner = self.inner.vpc(input);
self
}
/// Settings for the VPC outputs
pub fn set_vpc(
mut self,
input: std::option::Option<crate::model::VpcOutputSettings>,
) -> Self {
self.inner = self.inner.set_vpc(input);
self
}
}
/// Fluent builder constructing a request to `CreateInput`.
///
/// Create an input
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct CreateInput<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::create_input_input::Builder,
}
impl<C, M, R> CreateInput<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `CreateInput`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::CreateInputOutput,
aws_smithy_http::result::SdkError<crate::error::CreateInputError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::CreateInputInputOperationOutputAlias,
crate::output::CreateInputOutput,
crate::error::CreateInputError,
crate::input::CreateInputInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Appends an item to `Destinations`.
///
/// To override the contents of this collection use [`set_destinations`](Self::set_destinations).
///
/// Destination settings for PUSH type inputs.
pub fn destinations(mut self, input: crate::model::InputDestinationRequest) -> Self {
self.inner = self.inner.destinations(input);
self
}
/// Destination settings for PUSH type inputs.
pub fn set_destinations(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::InputDestinationRequest>>,
) -> Self {
self.inner = self.inner.set_destinations(input);
self
}
/// Appends an item to `InputDevices`.
///
/// To override the contents of this collection use [`set_input_devices`](Self::set_input_devices).
///
/// Settings for the devices.
pub fn input_devices(mut self, input: crate::model::InputDeviceSettings) -> Self {
self.inner = self.inner.input_devices(input);
self
}
/// Settings for the devices.
pub fn set_input_devices(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::InputDeviceSettings>>,
) -> Self {
self.inner = self.inner.set_input_devices(input);
self
}
/// Appends an item to `InputSecurityGroups`.
///
/// To override the contents of this collection use [`set_input_security_groups`](Self::set_input_security_groups).
///
/// A list of security groups referenced by IDs to attach to the input.
pub fn input_security_groups(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_security_groups(input.into());
self
}
/// A list of security groups referenced by IDs to attach to the input.
pub fn set_input_security_groups(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.inner = self.inner.set_input_security_groups(input);
self
}
/// Appends an item to `MediaConnectFlows`.
///
/// To override the contents of this collection use [`set_media_connect_flows`](Self::set_media_connect_flows).
///
/// A list of the MediaConnect Flows that you want to use in this input. You can specify as few as one Flow and presently, as many as two. The only requirement is when you have more than one is that each Flow is in a separate Availability Zone as this ensures your EML input is redundant to AZ issues.
pub fn media_connect_flows(mut self, input: crate::model::MediaConnectFlowRequest) -> Self {
self.inner = self.inner.media_connect_flows(input);
self
}
/// A list of the MediaConnect Flows that you want to use in this input. You can specify as few as one Flow and presently, as many as two. The only requirement is when you have more than one is that each Flow is in a separate Availability Zone as this ensures your EML input is redundant to AZ issues.
pub fn set_media_connect_flows(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::MediaConnectFlowRequest>>,
) -> Self {
self.inner = self.inner.set_media_connect_flows(input);
self
}
/// Name of the input.
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.name(input.into());
self
}
/// Name of the input.
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_name(input);
self
}
/// Unique identifier of the request to ensure the request is handled exactly once in case of retries.
pub fn request_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.request_id(input.into());
self
}
/// Unique identifier of the request to ensure the request is handled exactly once in case of retries.
pub fn set_request_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_request_id(input);
self
}
/// The Amazon Resource Name (ARN) of the role this input assumes during and after creation.
pub fn role_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.role_arn(input.into());
self
}
/// The Amazon Resource Name (ARN) of the role this input assumes during and after creation.
pub fn set_role_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_role_arn(input);
self
}
/// Appends an item to `Sources`.
///
/// To override the contents of this collection use [`set_sources`](Self::set_sources).
///
/// The source URLs for a PULL-type input. Every PULL type input needs exactly two source URLs for redundancy. Only specify sources for PULL type Inputs. Leave Destinations empty.
pub fn sources(mut self, input: crate::model::InputSourceRequest) -> Self {
self.inner = self.inner.sources(input);
self
}
/// The source URLs for a PULL-type input. Every PULL type input needs exactly two source URLs for redundancy. Only specify sources for PULL type Inputs. Leave Destinations empty.
pub fn set_sources(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::InputSourceRequest>>,
) -> Self {
self.inner = self.inner.set_sources(input);
self
}
/// Adds a key-value pair to `Tags`.
///
/// To override the contents of this collection use [`set_tags`](Self::set_tags).
///
/// A collection of key-value pairs.
pub fn tags(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
self.inner = self.inner.tags(k.into(), v.into());
self
}
/// A collection of key-value pairs.
pub fn set_tags(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.inner = self.inner.set_tags(input);
self
}
/// The different types of inputs that AWS Elemental MediaLive supports.
pub fn r#type(mut self, input: crate::model::InputType) -> Self {
self.inner = self.inner.r#type(input);
self
}
/// The different types of inputs that AWS Elemental MediaLive supports.
pub fn set_type(mut self, input: std::option::Option<crate::model::InputType>) -> Self {
self.inner = self.inner.set_type(input);
self
}
/// Settings for a private VPC Input. When this property is specified, the input destination addresses will be created in a VPC rather than with public Internet addresses. This property requires setting the roleArn property on Input creation. Not compatible with the inputSecurityGroups property.
pub fn vpc(mut self, input: crate::model::InputVpcRequest) -> Self {
self.inner = self.inner.vpc(input);
self
}
/// Settings for a private VPC Input. When this property is specified, the input destination addresses will be created in a VPC rather than with public Internet addresses. This property requires setting the roleArn property on Input creation. Not compatible with the inputSecurityGroups property.
pub fn set_vpc(
mut self,
input: std::option::Option<crate::model::InputVpcRequest>,
) -> Self {
self.inner = self.inner.set_vpc(input);
self
}
}
/// Fluent builder constructing a request to `CreateInputSecurityGroup`.
///
/// Creates a Input Security Group
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct CreateInputSecurityGroup<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::create_input_security_group_input::Builder,
}
impl<C, M, R> CreateInputSecurityGroup<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `CreateInputSecurityGroup`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::CreateInputSecurityGroupOutput,
aws_smithy_http::result::SdkError<crate::error::CreateInputSecurityGroupError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::CreateInputSecurityGroupInputOperationOutputAlias,
crate::output::CreateInputSecurityGroupOutput,
crate::error::CreateInputSecurityGroupError,
crate::input::CreateInputSecurityGroupInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Adds a key-value pair to `Tags`.
///
/// To override the contents of this collection use [`set_tags`](Self::set_tags).
///
/// A collection of key-value pairs.
pub fn tags(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
self.inner = self.inner.tags(k.into(), v.into());
self
}
/// A collection of key-value pairs.
pub fn set_tags(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.inner = self.inner.set_tags(input);
self
}
/// Appends an item to `WhitelistRules`.
///
/// To override the contents of this collection use [`set_whitelist_rules`](Self::set_whitelist_rules).
///
/// List of IPv4 CIDR addresses to whitelist
pub fn whitelist_rules(mut self, input: crate::model::InputWhitelistRuleCidr) -> Self {
self.inner = self.inner.whitelist_rules(input);
self
}
/// List of IPv4 CIDR addresses to whitelist
pub fn set_whitelist_rules(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::InputWhitelistRuleCidr>>,
) -> Self {
self.inner = self.inner.set_whitelist_rules(input);
self
}
}
/// Fluent builder constructing a request to `CreateMultiplex`.
///
/// Create a new multiplex.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct CreateMultiplex<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::create_multiplex_input::Builder,
}
impl<C, M, R> CreateMultiplex<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `CreateMultiplex`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::CreateMultiplexOutput,
aws_smithy_http::result::SdkError<crate::error::CreateMultiplexError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::CreateMultiplexInputOperationOutputAlias,
crate::output::CreateMultiplexOutput,
crate::error::CreateMultiplexError,
crate::input::CreateMultiplexInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Appends an item to `AvailabilityZones`.
///
/// To override the contents of this collection use [`set_availability_zones`](Self::set_availability_zones).
///
/// A list of availability zones for the multiplex. You must specify exactly two.
pub fn availability_zones(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.availability_zones(input.into());
self
}
/// A list of availability zones for the multiplex. You must specify exactly two.
pub fn set_availability_zones(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.inner = self.inner.set_availability_zones(input);
self
}
/// Configuration for a multiplex event.
pub fn multiplex_settings(mut self, input: crate::model::MultiplexSettings) -> Self {
self.inner = self.inner.multiplex_settings(input);
self
}
/// Configuration for a multiplex event.
pub fn set_multiplex_settings(
mut self,
input: std::option::Option<crate::model::MultiplexSettings>,
) -> Self {
self.inner = self.inner.set_multiplex_settings(input);
self
}
/// Name of multiplex.
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.name(input.into());
self
}
/// Name of multiplex.
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_name(input);
self
}
/// Unique request ID. This prevents retries from creating multiple resources.
pub fn request_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.request_id(input.into());
self
}
/// Unique request ID. This prevents retries from creating multiple resources.
pub fn set_request_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_request_id(input);
self
}
/// Adds a key-value pair to `Tags`.
///
/// To override the contents of this collection use [`set_tags`](Self::set_tags).
///
/// A collection of key-value pairs.
pub fn tags(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
self.inner = self.inner.tags(k.into(), v.into());
self
}
/// A collection of key-value pairs.
pub fn set_tags(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.inner = self.inner.set_tags(input);
self
}
}
/// Fluent builder constructing a request to `CreateMultiplexProgram`.
///
/// Create a new program in the multiplex.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct CreateMultiplexProgram<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::create_multiplex_program_input::Builder,
}
impl<C, M, R> CreateMultiplexProgram<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `CreateMultiplexProgram`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::CreateMultiplexProgramOutput,
aws_smithy_http::result::SdkError<crate::error::CreateMultiplexProgramError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::CreateMultiplexProgramInputOperationOutputAlias,
crate::output::CreateMultiplexProgramOutput,
crate::error::CreateMultiplexProgramError,
crate::input::CreateMultiplexProgramInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// ID of the multiplex where the program is to be created.
pub fn multiplex_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.multiplex_id(input.into());
self
}
/// ID of the multiplex where the program is to be created.
pub fn set_multiplex_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_multiplex_id(input);
self
}
/// The settings for this multiplex program.
pub fn multiplex_program_settings(
mut self,
input: crate::model::MultiplexProgramSettings,
) -> Self {
self.inner = self.inner.multiplex_program_settings(input);
self
}
/// The settings for this multiplex program.
pub fn set_multiplex_program_settings(
mut self,
input: std::option::Option<crate::model::MultiplexProgramSettings>,
) -> Self {
self.inner = self.inner.set_multiplex_program_settings(input);
self
}
/// Name of multiplex program.
pub fn program_name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.program_name(input.into());
self
}
/// Name of multiplex program.
pub fn set_program_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_program_name(input);
self
}
/// Unique request ID. This prevents retries from creating multiple resources.
pub fn request_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.request_id(input.into());
self
}
/// Unique request ID. This prevents retries from creating multiple resources.
pub fn set_request_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_request_id(input);
self
}
}
/// Fluent builder constructing a request to `CreatePartnerInput`.
///
/// Create a partner input
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct CreatePartnerInput<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::create_partner_input_input::Builder,
}
impl<C, M, R> CreatePartnerInput<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `CreatePartnerInput`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::CreatePartnerInputOutput,
aws_smithy_http::result::SdkError<crate::error::CreatePartnerInputError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::CreatePartnerInputInputOperationOutputAlias,
crate::output::CreatePartnerInputOutput,
crate::error::CreatePartnerInputError,
crate::input::CreatePartnerInputInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Unique ID of the input.
pub fn input_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_id(input.into());
self
}
/// Unique ID of the input.
pub fn set_input_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_input_id(input);
self
}
/// Unique identifier of the request to ensure the request is handled exactly once in case of retries.
pub fn request_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.request_id(input.into());
self
}
/// Unique identifier of the request to ensure the request is handled exactly once in case of retries.
pub fn set_request_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_request_id(input);
self
}
/// Adds a key-value pair to `Tags`.
///
/// To override the contents of this collection use [`set_tags`](Self::set_tags).
///
/// A collection of key-value pairs.
pub fn tags(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
self.inner = self.inner.tags(k.into(), v.into());
self
}
/// A collection of key-value pairs.
pub fn set_tags(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.inner = self.inner.set_tags(input);
self
}
}
/// Fluent builder constructing a request to `CreateTags`.
///
/// Create tags for a resource
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct CreateTags<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::create_tags_input::Builder,
}
impl<C, M, R> CreateTags<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `CreateTags`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::CreateTagsOutput,
aws_smithy_http::result::SdkError<crate::error::CreateTagsError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::CreateTagsInputOperationOutputAlias,
crate::output::CreateTagsOutput,
crate::error::CreateTagsError,
crate::input::CreateTagsInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Placeholder documentation for __string
pub fn resource_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.resource_arn(input.into());
self
}
/// Placeholder documentation for __string
pub fn set_resource_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_resource_arn(input);
self
}
/// Adds a key-value pair to `Tags`.
///
/// To override the contents of this collection use [`set_tags`](Self::set_tags).
///
/// Placeholder documentation for Tags
pub fn tags(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
self.inner = self.inner.tags(k.into(), v.into());
self
}
/// Placeholder documentation for Tags
pub fn set_tags(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.inner = self.inner.set_tags(input);
self
}
}
/// Fluent builder constructing a request to `DeleteChannel`.
///
/// Starts deletion of channel. The associated outputs are also deleted.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DeleteChannel<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::delete_channel_input::Builder,
}
impl<C, M, R> DeleteChannel<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DeleteChannel`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DeleteChannelOutput,
aws_smithy_http::result::SdkError<crate::error::DeleteChannelError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DeleteChannelInputOperationOutputAlias,
crate::output::DeleteChannelOutput,
crate::error::DeleteChannelError,
crate::input::DeleteChannelInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Unique ID of the channel.
pub fn channel_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_id(input.into());
self
}
/// Unique ID of the channel.
pub fn set_channel_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_channel_id(input);
self
}
}
/// Fluent builder constructing a request to `DeleteInput`.
///
/// Deletes the input end point
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DeleteInput<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::delete_input_input::Builder,
}
impl<C, M, R> DeleteInput<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DeleteInput`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DeleteInputOutput,
aws_smithy_http::result::SdkError<crate::error::DeleteInputError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DeleteInputInputOperationOutputAlias,
crate::output::DeleteInputOutput,
crate::error::DeleteInputError,
crate::input::DeleteInputInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Unique ID of the input
pub fn input_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_id(input.into());
self
}
/// Unique ID of the input
pub fn set_input_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_input_id(input);
self
}
}
/// Fluent builder constructing a request to `DeleteInputSecurityGroup`.
///
/// Deletes an Input Security Group
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DeleteInputSecurityGroup<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::delete_input_security_group_input::Builder,
}
impl<C, M, R> DeleteInputSecurityGroup<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DeleteInputSecurityGroup`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DeleteInputSecurityGroupOutput,
aws_smithy_http::result::SdkError<crate::error::DeleteInputSecurityGroupError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DeleteInputSecurityGroupInputOperationOutputAlias,
crate::output::DeleteInputSecurityGroupOutput,
crate::error::DeleteInputSecurityGroupError,
crate::input::DeleteInputSecurityGroupInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The Input Security Group to delete
pub fn input_security_group_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_security_group_id(input.into());
self
}
/// The Input Security Group to delete
pub fn set_input_security_group_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_input_security_group_id(input);
self
}
}
/// Fluent builder constructing a request to `DeleteMultiplex`.
///
/// Delete a multiplex. The multiplex must be idle.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DeleteMultiplex<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::delete_multiplex_input::Builder,
}
impl<C, M, R> DeleteMultiplex<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DeleteMultiplex`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DeleteMultiplexOutput,
aws_smithy_http::result::SdkError<crate::error::DeleteMultiplexError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DeleteMultiplexInputOperationOutputAlias,
crate::output::DeleteMultiplexOutput,
crate::error::DeleteMultiplexError,
crate::input::DeleteMultiplexInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The ID of the multiplex.
pub fn multiplex_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.multiplex_id(input.into());
self
}
/// The ID of the multiplex.
pub fn set_multiplex_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_multiplex_id(input);
self
}
}
/// Fluent builder constructing a request to `DeleteMultiplexProgram`.
///
/// Delete a program from a multiplex.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DeleteMultiplexProgram<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::delete_multiplex_program_input::Builder,
}
impl<C, M, R> DeleteMultiplexProgram<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DeleteMultiplexProgram`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DeleteMultiplexProgramOutput,
aws_smithy_http::result::SdkError<crate::error::DeleteMultiplexProgramError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DeleteMultiplexProgramInputOperationOutputAlias,
crate::output::DeleteMultiplexProgramOutput,
crate::error::DeleteMultiplexProgramError,
crate::input::DeleteMultiplexProgramInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The ID of the multiplex that the program belongs to.
pub fn multiplex_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.multiplex_id(input.into());
self
}
/// The ID of the multiplex that the program belongs to.
pub fn set_multiplex_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_multiplex_id(input);
self
}
/// The multiplex program name.
pub fn program_name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.program_name(input.into());
self
}
/// The multiplex program name.
pub fn set_program_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_program_name(input);
self
}
}
/// Fluent builder constructing a request to `DeleteReservation`.
///
/// Delete an expired reservation.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DeleteReservation<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::delete_reservation_input::Builder,
}
impl<C, M, R> DeleteReservation<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DeleteReservation`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DeleteReservationOutput,
aws_smithy_http::result::SdkError<crate::error::DeleteReservationError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DeleteReservationInputOperationOutputAlias,
crate::output::DeleteReservationOutput,
crate::error::DeleteReservationError,
crate::input::DeleteReservationInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Unique reservation ID, e.g. '1234567'
pub fn reservation_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.reservation_id(input.into());
self
}
/// Unique reservation ID, e.g. '1234567'
pub fn set_reservation_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_reservation_id(input);
self
}
}
/// Fluent builder constructing a request to `DeleteSchedule`.
///
/// Delete all schedule actions on a channel.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DeleteSchedule<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::delete_schedule_input::Builder,
}
impl<C, M, R> DeleteSchedule<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DeleteSchedule`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DeleteScheduleOutput,
aws_smithy_http::result::SdkError<crate::error::DeleteScheduleError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DeleteScheduleInputOperationOutputAlias,
crate::output::DeleteScheduleOutput,
crate::error::DeleteScheduleError,
crate::input::DeleteScheduleInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Id of the channel whose schedule is being deleted.
pub fn channel_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_id(input.into());
self
}
/// Id of the channel whose schedule is being deleted.
pub fn set_channel_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_channel_id(input);
self
}
}
/// Fluent builder constructing a request to `DeleteTags`.
///
/// Removes tags for a resource
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DeleteTags<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::delete_tags_input::Builder,
}
impl<C, M, R> DeleteTags<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DeleteTags`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DeleteTagsOutput,
aws_smithy_http::result::SdkError<crate::error::DeleteTagsError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DeleteTagsInputOperationOutputAlias,
crate::output::DeleteTagsOutput,
crate::error::DeleteTagsError,
crate::input::DeleteTagsInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Placeholder documentation for __string
pub fn resource_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.resource_arn(input.into());
self
}
/// Placeholder documentation for __string
pub fn set_resource_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_resource_arn(input);
self
}
/// Appends an item to `TagKeys`.
///
/// To override the contents of this collection use [`set_tag_keys`](Self::set_tag_keys).
///
/// An array of tag keys to delete
pub fn tag_keys(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.tag_keys(input.into());
self
}
/// An array of tag keys to delete
pub fn set_tag_keys(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.inner = self.inner.set_tag_keys(input);
self
}
}
/// Fluent builder constructing a request to `DescribeChannel`.
///
/// Gets details about a channel
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DescribeChannel<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::describe_channel_input::Builder,
}
impl<C, M, R> DescribeChannel<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DescribeChannel`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DescribeChannelOutput,
aws_smithy_http::result::SdkError<crate::error::DescribeChannelError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DescribeChannelInputOperationOutputAlias,
crate::output::DescribeChannelOutput,
crate::error::DescribeChannelError,
crate::input::DescribeChannelInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// channel ID
pub fn channel_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_id(input.into());
self
}
/// channel ID
pub fn set_channel_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_channel_id(input);
self
}
}
/// Fluent builder constructing a request to `DescribeInput`.
///
/// Produces details about an input
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DescribeInput<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::describe_input_input::Builder,
}
impl<C, M, R> DescribeInput<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DescribeInput`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DescribeInputOutput,
aws_smithy_http::result::SdkError<crate::error::DescribeInputError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DescribeInputInputOperationOutputAlias,
crate::output::DescribeInputOutput,
crate::error::DescribeInputError,
crate::input::DescribeInputInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Unique ID of the input
pub fn input_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_id(input.into());
self
}
/// Unique ID of the input
pub fn set_input_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_input_id(input);
self
}
}
/// Fluent builder constructing a request to `DescribeInputDevice`.
///
/// Gets the details for the input device
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DescribeInputDevice<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::describe_input_device_input::Builder,
}
impl<C, M, R> DescribeInputDevice<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DescribeInputDevice`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DescribeInputDeviceOutput,
aws_smithy_http::result::SdkError<crate::error::DescribeInputDeviceError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DescribeInputDeviceInputOperationOutputAlias,
crate::output::DescribeInputDeviceOutput,
crate::error::DescribeInputDeviceError,
crate::input::DescribeInputDeviceInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The unique ID of this input device. For example, hd-123456789abcdef.
pub fn input_device_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_device_id(input.into());
self
}
/// The unique ID of this input device. For example, hd-123456789abcdef.
pub fn set_input_device_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_input_device_id(input);
self
}
}
/// Fluent builder constructing a request to `DescribeInputDeviceThumbnail`.
///
/// Get the latest thumbnail data for the input device.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DescribeInputDeviceThumbnail<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::describe_input_device_thumbnail_input::Builder,
}
impl<C, M, R> DescribeInputDeviceThumbnail<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DescribeInputDeviceThumbnail`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DescribeInputDeviceThumbnailOutput,
aws_smithy_http::result::SdkError<crate::error::DescribeInputDeviceThumbnailError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DescribeInputDeviceThumbnailInputOperationOutputAlias,
crate::output::DescribeInputDeviceThumbnailOutput,
crate::error::DescribeInputDeviceThumbnailError,
crate::input::DescribeInputDeviceThumbnailInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The unique ID of this input device. For example, hd-123456789abcdef.
pub fn input_device_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_device_id(input.into());
self
}
/// The unique ID of this input device. For example, hd-123456789abcdef.
pub fn set_input_device_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_input_device_id(input);
self
}
/// The HTTP Accept header. Indicates the requested type for the thumbnail.
pub fn accept(mut self, input: crate::model::AcceptHeader) -> Self {
self.inner = self.inner.accept(input);
self
}
/// The HTTP Accept header. Indicates the requested type for the thumbnail.
pub fn set_accept(
mut self,
input: std::option::Option<crate::model::AcceptHeader>,
) -> Self {
self.inner = self.inner.set_accept(input);
self
}
}
/// Fluent builder constructing a request to `DescribeInputSecurityGroup`.
///
/// Produces a summary of an Input Security Group
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DescribeInputSecurityGroup<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::describe_input_security_group_input::Builder,
}
impl<C, M, R> DescribeInputSecurityGroup<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DescribeInputSecurityGroup`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DescribeInputSecurityGroupOutput,
aws_smithy_http::result::SdkError<crate::error::DescribeInputSecurityGroupError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DescribeInputSecurityGroupInputOperationOutputAlias,
crate::output::DescribeInputSecurityGroupOutput,
crate::error::DescribeInputSecurityGroupError,
crate::input::DescribeInputSecurityGroupInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The id of the Input Security Group to describe
pub fn input_security_group_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_security_group_id(input.into());
self
}
/// The id of the Input Security Group to describe
pub fn set_input_security_group_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_input_security_group_id(input);
self
}
}
/// Fluent builder constructing a request to `DescribeMultiplex`.
///
/// Gets details about a multiplex.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DescribeMultiplex<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::describe_multiplex_input::Builder,
}
impl<C, M, R> DescribeMultiplex<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DescribeMultiplex`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DescribeMultiplexOutput,
aws_smithy_http::result::SdkError<crate::error::DescribeMultiplexError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DescribeMultiplexInputOperationOutputAlias,
crate::output::DescribeMultiplexOutput,
crate::error::DescribeMultiplexError,
crate::input::DescribeMultiplexInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The ID of the multiplex.
pub fn multiplex_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.multiplex_id(input.into());
self
}
/// The ID of the multiplex.
pub fn set_multiplex_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_multiplex_id(input);
self
}
}
/// Fluent builder constructing a request to `DescribeMultiplexProgram`.
///
/// Get the details for a program in a multiplex.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DescribeMultiplexProgram<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::describe_multiplex_program_input::Builder,
}
impl<C, M, R> DescribeMultiplexProgram<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DescribeMultiplexProgram`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DescribeMultiplexProgramOutput,
aws_smithy_http::result::SdkError<crate::error::DescribeMultiplexProgramError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DescribeMultiplexProgramInputOperationOutputAlias,
crate::output::DescribeMultiplexProgramOutput,
crate::error::DescribeMultiplexProgramError,
crate::input::DescribeMultiplexProgramInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The ID of the multiplex that the program belongs to.
pub fn multiplex_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.multiplex_id(input.into());
self
}
/// The ID of the multiplex that the program belongs to.
pub fn set_multiplex_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_multiplex_id(input);
self
}
/// The name of the program.
pub fn program_name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.program_name(input.into());
self
}
/// The name of the program.
pub fn set_program_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_program_name(input);
self
}
}
/// Fluent builder constructing a request to `DescribeOffering`.
///
/// Get details for an offering.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DescribeOffering<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::describe_offering_input::Builder,
}
impl<C, M, R> DescribeOffering<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DescribeOffering`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DescribeOfferingOutput,
aws_smithy_http::result::SdkError<crate::error::DescribeOfferingError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DescribeOfferingInputOperationOutputAlias,
crate::output::DescribeOfferingOutput,
crate::error::DescribeOfferingError,
crate::input::DescribeOfferingInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Unique offering ID, e.g. '87654321'
pub fn offering_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.offering_id(input.into());
self
}
/// Unique offering ID, e.g. '87654321'
pub fn set_offering_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_offering_id(input);
self
}
}
/// Fluent builder constructing a request to `DescribeReservation`.
///
/// Get details for a reservation.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DescribeReservation<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::describe_reservation_input::Builder,
}
impl<C, M, R> DescribeReservation<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DescribeReservation`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DescribeReservationOutput,
aws_smithy_http::result::SdkError<crate::error::DescribeReservationError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DescribeReservationInputOperationOutputAlias,
crate::output::DescribeReservationOutput,
crate::error::DescribeReservationError,
crate::input::DescribeReservationInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Unique reservation ID, e.g. '1234567'
pub fn reservation_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.reservation_id(input.into());
self
}
/// Unique reservation ID, e.g. '1234567'
pub fn set_reservation_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_reservation_id(input);
self
}
}
/// Fluent builder constructing a request to `DescribeSchedule`.
///
/// Get a channel schedule
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DescribeSchedule<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::describe_schedule_input::Builder,
}
impl<C, M, R> DescribeSchedule<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `DescribeSchedule`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DescribeScheduleOutput,
aws_smithy_http::result::SdkError<crate::error::DescribeScheduleError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::DescribeScheduleInputOperationOutputAlias,
crate::output::DescribeScheduleOutput,
crate::error::DescribeScheduleError,
crate::input::DescribeScheduleInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Create a paginator for this request
///
/// Paginators are used by calling [`send().await`](crate::paginator::DescribeSchedulePaginator::send) which returns a [`Stream`](tokio_stream::Stream).
pub fn into_paginator(self) -> crate::paginator::DescribeSchedulePaginator<C, M, R> {
crate::paginator::DescribeSchedulePaginator::new(self.handle, self.inner)
}
/// Id of the channel whose schedule is being updated.
pub fn channel_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_id(input.into());
self
}
/// Id of the channel whose schedule is being updated.
pub fn set_channel_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_channel_id(input);
self
}
/// Placeholder documentation for MaxResults
pub fn max_results(mut self, input: i32) -> Self {
self.inner = self.inner.max_results(input);
self
}
/// Placeholder documentation for MaxResults
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.inner = self.inner.set_max_results(input);
self
}
/// Placeholder documentation for __string
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.next_token(input.into());
self
}
/// Placeholder documentation for __string
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_next_token(input);
self
}
}
/// Fluent builder constructing a request to `ListChannels`.
///
/// Produces list of channels that have been created
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct ListChannels<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::list_channels_input::Builder,
}
impl<C, M, R> ListChannels<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `ListChannels`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::ListChannelsOutput,
aws_smithy_http::result::SdkError<crate::error::ListChannelsError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListChannelsInputOperationOutputAlias,
crate::output::ListChannelsOutput,
crate::error::ListChannelsError,
crate::input::ListChannelsInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Create a paginator for this request
///
/// Paginators are used by calling [`send().await`](crate::paginator::ListChannelsPaginator::send) which returns a [`Stream`](tokio_stream::Stream).
pub fn into_paginator(self) -> crate::paginator::ListChannelsPaginator<C, M, R> {
crate::paginator::ListChannelsPaginator::new(self.handle, self.inner)
}
/// Placeholder documentation for MaxResults
pub fn max_results(mut self, input: i32) -> Self {
self.inner = self.inner.max_results(input);
self
}
/// Placeholder documentation for MaxResults
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.inner = self.inner.set_max_results(input);
self
}
/// Placeholder documentation for __string
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.next_token(input.into());
self
}
/// Placeholder documentation for __string
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_next_token(input);
self
}
}
/// Fluent builder constructing a request to `ListInputDevices`.
///
/// List input devices
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct ListInputDevices<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::list_input_devices_input::Builder,
}
impl<C, M, R> ListInputDevices<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `ListInputDevices`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::ListInputDevicesOutput,
aws_smithy_http::result::SdkError<crate::error::ListInputDevicesError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListInputDevicesInputOperationOutputAlias,
crate::output::ListInputDevicesOutput,
crate::error::ListInputDevicesError,
crate::input::ListInputDevicesInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Create a paginator for this request
///
/// Paginators are used by calling [`send().await`](crate::paginator::ListInputDevicesPaginator::send) which returns a [`Stream`](tokio_stream::Stream).
pub fn into_paginator(self) -> crate::paginator::ListInputDevicesPaginator<C, M, R> {
crate::paginator::ListInputDevicesPaginator::new(self.handle, self.inner)
}
/// Placeholder documentation for MaxResults
pub fn max_results(mut self, input: i32) -> Self {
self.inner = self.inner.max_results(input);
self
}
/// Placeholder documentation for MaxResults
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.inner = self.inner.set_max_results(input);
self
}
/// Placeholder documentation for __string
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.next_token(input.into());
self
}
/// Placeholder documentation for __string
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_next_token(input);
self
}
}
/// Fluent builder constructing a request to `ListInputDeviceTransfers`.
///
/// List input devices that are currently being transferred. List input devices that you are transferring from your AWS account or input devices that another AWS account is transferring to you.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct ListInputDeviceTransfers<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::list_input_device_transfers_input::Builder,
}
impl<C, M, R> ListInputDeviceTransfers<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `ListInputDeviceTransfers`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::ListInputDeviceTransfersOutput,
aws_smithy_http::result::SdkError<crate::error::ListInputDeviceTransfersError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListInputDeviceTransfersInputOperationOutputAlias,
crate::output::ListInputDeviceTransfersOutput,
crate::error::ListInputDeviceTransfersError,
crate::input::ListInputDeviceTransfersInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Create a paginator for this request
///
/// Paginators are used by calling [`send().await`](crate::paginator::ListInputDeviceTransfersPaginator::send) which returns a [`Stream`](tokio_stream::Stream).
pub fn into_paginator(
self,
) -> crate::paginator::ListInputDeviceTransfersPaginator<C, M, R> {
crate::paginator::ListInputDeviceTransfersPaginator::new(self.handle, self.inner)
}
/// Placeholder documentation for MaxResults
pub fn max_results(mut self, input: i32) -> Self {
self.inner = self.inner.max_results(input);
self
}
/// Placeholder documentation for MaxResults
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.inner = self.inner.set_max_results(input);
self
}
/// Placeholder documentation for __string
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.next_token(input.into());
self
}
/// Placeholder documentation for __string
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_next_token(input);
self
}
/// Placeholder documentation for __string
pub fn transfer_type(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.transfer_type(input.into());
self
}
/// Placeholder documentation for __string
pub fn set_transfer_type(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_transfer_type(input);
self
}
}
/// Fluent builder constructing a request to `ListInputs`.
///
/// Produces list of inputs that have been created
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct ListInputs<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::list_inputs_input::Builder,
}
impl<C, M, R> ListInputs<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `ListInputs`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::ListInputsOutput,
aws_smithy_http::result::SdkError<crate::error::ListInputsError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListInputsInputOperationOutputAlias,
crate::output::ListInputsOutput,
crate::error::ListInputsError,
crate::input::ListInputsInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Create a paginator for this request
///
/// Paginators are used by calling [`send().await`](crate::paginator::ListInputsPaginator::send) which returns a [`Stream`](tokio_stream::Stream).
pub fn into_paginator(self) -> crate::paginator::ListInputsPaginator<C, M, R> {
crate::paginator::ListInputsPaginator::new(self.handle, self.inner)
}
/// Placeholder documentation for MaxResults
pub fn max_results(mut self, input: i32) -> Self {
self.inner = self.inner.max_results(input);
self
}
/// Placeholder documentation for MaxResults
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.inner = self.inner.set_max_results(input);
self
}
/// Placeholder documentation for __string
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.next_token(input.into());
self
}
/// Placeholder documentation for __string
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_next_token(input);
self
}
}
/// Fluent builder constructing a request to `ListInputSecurityGroups`.
///
/// Produces a list of Input Security Groups for an account
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct ListInputSecurityGroups<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::list_input_security_groups_input::Builder,
}
impl<C, M, R> ListInputSecurityGroups<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `ListInputSecurityGroups`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::ListInputSecurityGroupsOutput,
aws_smithy_http::result::SdkError<crate::error::ListInputSecurityGroupsError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListInputSecurityGroupsInputOperationOutputAlias,
crate::output::ListInputSecurityGroupsOutput,
crate::error::ListInputSecurityGroupsError,
crate::input::ListInputSecurityGroupsInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Create a paginator for this request
///
/// Paginators are used by calling [`send().await`](crate::paginator::ListInputSecurityGroupsPaginator::send) which returns a [`Stream`](tokio_stream::Stream).
pub fn into_paginator(self) -> crate::paginator::ListInputSecurityGroupsPaginator<C, M, R> {
crate::paginator::ListInputSecurityGroupsPaginator::new(self.handle, self.inner)
}
/// Placeholder documentation for MaxResults
pub fn max_results(mut self, input: i32) -> Self {
self.inner = self.inner.max_results(input);
self
}
/// Placeholder documentation for MaxResults
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.inner = self.inner.set_max_results(input);
self
}
/// Placeholder documentation for __string
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.next_token(input.into());
self
}
/// Placeholder documentation for __string
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_next_token(input);
self
}
}
/// Fluent builder constructing a request to `ListMultiplexes`.
///
/// Retrieve a list of the existing multiplexes.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct ListMultiplexes<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::list_multiplexes_input::Builder,
}
impl<C, M, R> ListMultiplexes<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `ListMultiplexes`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::ListMultiplexesOutput,
aws_smithy_http::result::SdkError<crate::error::ListMultiplexesError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListMultiplexesInputOperationOutputAlias,
crate::output::ListMultiplexesOutput,
crate::error::ListMultiplexesError,
crate::input::ListMultiplexesInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Create a paginator for this request
///
/// Paginators are used by calling [`send().await`](crate::paginator::ListMultiplexesPaginator::send) which returns a [`Stream`](tokio_stream::Stream).
pub fn into_paginator(self) -> crate::paginator::ListMultiplexesPaginator<C, M, R> {
crate::paginator::ListMultiplexesPaginator::new(self.handle, self.inner)
}
/// The maximum number of items to return.
pub fn max_results(mut self, input: i32) -> Self {
self.inner = self.inner.max_results(input);
self
}
/// The maximum number of items to return.
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.inner = self.inner.set_max_results(input);
self
}
/// The token to retrieve the next page of results.
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.next_token(input.into());
self
}
/// The token to retrieve the next page of results.
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_next_token(input);
self
}
}
/// Fluent builder constructing a request to `ListMultiplexPrograms`.
///
/// List the programs that currently exist for a specific multiplex.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct ListMultiplexPrograms<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::list_multiplex_programs_input::Builder,
}
impl<C, M, R> ListMultiplexPrograms<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `ListMultiplexPrograms`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::ListMultiplexProgramsOutput,
aws_smithy_http::result::SdkError<crate::error::ListMultiplexProgramsError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListMultiplexProgramsInputOperationOutputAlias,
crate::output::ListMultiplexProgramsOutput,
crate::error::ListMultiplexProgramsError,
crate::input::ListMultiplexProgramsInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Create a paginator for this request
///
/// Paginators are used by calling [`send().await`](crate::paginator::ListMultiplexProgramsPaginator::send) which returns a [`Stream`](tokio_stream::Stream).
pub fn into_paginator(self) -> crate::paginator::ListMultiplexProgramsPaginator<C, M, R> {
crate::paginator::ListMultiplexProgramsPaginator::new(self.handle, self.inner)
}
/// The maximum number of items to return.
pub fn max_results(mut self, input: i32) -> Self {
self.inner = self.inner.max_results(input);
self
}
/// The maximum number of items to return.
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.inner = self.inner.set_max_results(input);
self
}
/// The ID of the multiplex that the programs belong to.
pub fn multiplex_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.multiplex_id(input.into());
self
}
/// The ID of the multiplex that the programs belong to.
pub fn set_multiplex_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_multiplex_id(input);
self
}
/// The token to retrieve the next page of results.
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.next_token(input.into());
self
}
/// The token to retrieve the next page of results.
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_next_token(input);
self
}
}
/// Fluent builder constructing a request to `ListOfferings`.
///
/// List offerings available for purchase.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct ListOfferings<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::list_offerings_input::Builder,
}
impl<C, M, R> ListOfferings<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `ListOfferings`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::ListOfferingsOutput,
aws_smithy_http::result::SdkError<crate::error::ListOfferingsError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListOfferingsInputOperationOutputAlias,
crate::output::ListOfferingsOutput,
crate::error::ListOfferingsError,
crate::input::ListOfferingsInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Create a paginator for this request
///
/// Paginators are used by calling [`send().await`](crate::paginator::ListOfferingsPaginator::send) which returns a [`Stream`](tokio_stream::Stream).
pub fn into_paginator(self) -> crate::paginator::ListOfferingsPaginator<C, M, R> {
crate::paginator::ListOfferingsPaginator::new(self.handle, self.inner)
}
/// Filter by channel class, 'STANDARD' or 'SINGLE_PIPELINE'
pub fn channel_class(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_class(input.into());
self
}
/// Filter by channel class, 'STANDARD' or 'SINGLE_PIPELINE'
pub fn set_channel_class(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_channel_class(input);
self
}
/// Filter to offerings that match the configuration of an existing channel, e.g. '2345678' (a channel ID)
pub fn channel_configuration(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_configuration(input.into());
self
}
/// Filter to offerings that match the configuration of an existing channel, e.g. '2345678' (a channel ID)
pub fn set_channel_configuration(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_channel_configuration(input);
self
}
/// Filter by codec, 'AVC', 'HEVC', 'MPEG2', 'AUDIO', or 'LINK'
pub fn codec(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.codec(input.into());
self
}
/// Filter by codec, 'AVC', 'HEVC', 'MPEG2', 'AUDIO', or 'LINK'
pub fn set_codec(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_codec(input);
self
}
/// Filter by offering duration, e.g. '12'
pub fn duration(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.duration(input.into());
self
}
/// Filter by offering duration, e.g. '12'
pub fn set_duration(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_duration(input);
self
}
/// Placeholder documentation for MaxResults
pub fn max_results(mut self, input: i32) -> Self {
self.inner = self.inner.max_results(input);
self
}
/// Placeholder documentation for MaxResults
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.inner = self.inner.set_max_results(input);
self
}
/// Filter by bitrate, 'MAX_10_MBPS', 'MAX_20_MBPS', or 'MAX_50_MBPS'
pub fn maximum_bitrate(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.maximum_bitrate(input.into());
self
}
/// Filter by bitrate, 'MAX_10_MBPS', 'MAX_20_MBPS', or 'MAX_50_MBPS'
pub fn set_maximum_bitrate(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_maximum_bitrate(input);
self
}
/// Filter by framerate, 'MAX_30_FPS' or 'MAX_60_FPS'
pub fn maximum_framerate(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.maximum_framerate(input.into());
self
}
/// Filter by framerate, 'MAX_30_FPS' or 'MAX_60_FPS'
pub fn set_maximum_framerate(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_maximum_framerate(input);
self
}
/// Placeholder documentation for __string
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.next_token(input.into());
self
}
/// Placeholder documentation for __string
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_next_token(input);
self
}
/// Filter by resolution, 'SD', 'HD', 'FHD', or 'UHD'
pub fn resolution(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.resolution(input.into());
self
}
/// Filter by resolution, 'SD', 'HD', 'FHD', or 'UHD'
pub fn set_resolution(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_resolution(input);
self
}
/// Filter by resource type, 'INPUT', 'OUTPUT', 'MULTIPLEX', or 'CHANNEL'
pub fn resource_type(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.resource_type(input.into());
self
}
/// Filter by resource type, 'INPUT', 'OUTPUT', 'MULTIPLEX', or 'CHANNEL'
pub fn set_resource_type(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_resource_type(input);
self
}
/// Filter by special feature, 'ADVANCED_AUDIO' or 'AUDIO_NORMALIZATION'
pub fn special_feature(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.special_feature(input.into());
self
}
/// Filter by special feature, 'ADVANCED_AUDIO' or 'AUDIO_NORMALIZATION'
pub fn set_special_feature(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_special_feature(input);
self
}
/// Filter by video quality, 'STANDARD', 'ENHANCED', or 'PREMIUM'
pub fn video_quality(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.video_quality(input.into());
self
}
/// Filter by video quality, 'STANDARD', 'ENHANCED', or 'PREMIUM'
pub fn set_video_quality(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_video_quality(input);
self
}
}
/// Fluent builder constructing a request to `ListReservations`.
///
/// List purchased reservations.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct ListReservations<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::list_reservations_input::Builder,
}
impl<C, M, R> ListReservations<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `ListReservations`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::ListReservationsOutput,
aws_smithy_http::result::SdkError<crate::error::ListReservationsError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListReservationsInputOperationOutputAlias,
crate::output::ListReservationsOutput,
crate::error::ListReservationsError,
crate::input::ListReservationsInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Create a paginator for this request
///
/// Paginators are used by calling [`send().await`](crate::paginator::ListReservationsPaginator::send) which returns a [`Stream`](tokio_stream::Stream).
pub fn into_paginator(self) -> crate::paginator::ListReservationsPaginator<C, M, R> {
crate::paginator::ListReservationsPaginator::new(self.handle, self.inner)
}
/// Filter by channel class, 'STANDARD' or 'SINGLE_PIPELINE'
pub fn channel_class(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_class(input.into());
self
}
/// Filter by channel class, 'STANDARD' or 'SINGLE_PIPELINE'
pub fn set_channel_class(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_channel_class(input);
self
}
/// Filter by codec, 'AVC', 'HEVC', 'MPEG2', 'AUDIO', or 'LINK'
pub fn codec(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.codec(input.into());
self
}
/// Filter by codec, 'AVC', 'HEVC', 'MPEG2', 'AUDIO', or 'LINK'
pub fn set_codec(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_codec(input);
self
}
/// Placeholder documentation for MaxResults
pub fn max_results(mut self, input: i32) -> Self {
self.inner = self.inner.max_results(input);
self
}
/// Placeholder documentation for MaxResults
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.inner = self.inner.set_max_results(input);
self
}
/// Filter by bitrate, 'MAX_10_MBPS', 'MAX_20_MBPS', or 'MAX_50_MBPS'
pub fn maximum_bitrate(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.maximum_bitrate(input.into());
self
}
/// Filter by bitrate, 'MAX_10_MBPS', 'MAX_20_MBPS', or 'MAX_50_MBPS'
pub fn set_maximum_bitrate(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_maximum_bitrate(input);
self
}
/// Filter by framerate, 'MAX_30_FPS' or 'MAX_60_FPS'
pub fn maximum_framerate(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.maximum_framerate(input.into());
self
}
/// Filter by framerate, 'MAX_30_FPS' or 'MAX_60_FPS'
pub fn set_maximum_framerate(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_maximum_framerate(input);
self
}
/// Placeholder documentation for __string
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.next_token(input.into());
self
}
/// Placeholder documentation for __string
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_next_token(input);
self
}
/// Filter by resolution, 'SD', 'HD', 'FHD', or 'UHD'
pub fn resolution(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.resolution(input.into());
self
}
/// Filter by resolution, 'SD', 'HD', 'FHD', or 'UHD'
pub fn set_resolution(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_resolution(input);
self
}
/// Filter by resource type, 'INPUT', 'OUTPUT', 'MULTIPLEX', or 'CHANNEL'
pub fn resource_type(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.resource_type(input.into());
self
}
/// Filter by resource type, 'INPUT', 'OUTPUT', 'MULTIPLEX', or 'CHANNEL'
pub fn set_resource_type(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_resource_type(input);
self
}
/// Filter by special feature, 'ADVANCED_AUDIO' or 'AUDIO_NORMALIZATION'
pub fn special_feature(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.special_feature(input.into());
self
}
/// Filter by special feature, 'ADVANCED_AUDIO' or 'AUDIO_NORMALIZATION'
pub fn set_special_feature(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_special_feature(input);
self
}
/// Filter by video quality, 'STANDARD', 'ENHANCED', or 'PREMIUM'
pub fn video_quality(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.video_quality(input.into());
self
}
/// Filter by video quality, 'STANDARD', 'ENHANCED', or 'PREMIUM'
pub fn set_video_quality(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_video_quality(input);
self
}
}
/// Fluent builder constructing a request to `ListTagsForResource`.
///
/// Produces list of tags that have been created for a resource
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct ListTagsForResource<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::list_tags_for_resource_input::Builder,
}
impl<C, M, R> ListTagsForResource<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `ListTagsForResource`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::ListTagsForResourceOutput,
aws_smithy_http::result::SdkError<crate::error::ListTagsForResourceError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListTagsForResourceInputOperationOutputAlias,
crate::output::ListTagsForResourceOutput,
crate::error::ListTagsForResourceError,
crate::input::ListTagsForResourceInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Placeholder documentation for __string
pub fn resource_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.resource_arn(input.into());
self
}
/// Placeholder documentation for __string
pub fn set_resource_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_resource_arn(input);
self
}
}
/// Fluent builder constructing a request to `PurchaseOffering`.
///
/// Purchase an offering and create a reservation.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct PurchaseOffering<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::purchase_offering_input::Builder,
}
impl<C, M, R> PurchaseOffering<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `PurchaseOffering`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::PurchaseOfferingOutput,
aws_smithy_http::result::SdkError<crate::error::PurchaseOfferingError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::PurchaseOfferingInputOperationOutputAlias,
crate::output::PurchaseOfferingOutput,
crate::error::PurchaseOfferingError,
crate::input::PurchaseOfferingInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Number of resources
pub fn count(mut self, input: i32) -> Self {
self.inner = self.inner.count(input);
self
}
/// Number of resources
pub fn set_count(mut self, input: std::option::Option<i32>) -> Self {
self.inner = self.inner.set_count(input);
self
}
/// Name for the new reservation
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.name(input.into());
self
}
/// Name for the new reservation
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_name(input);
self
}
/// Offering to purchase, e.g. '87654321'
pub fn offering_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.offering_id(input.into());
self
}
/// Offering to purchase, e.g. '87654321'
pub fn set_offering_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_offering_id(input);
self
}
/// Unique request ID to be specified. This is needed to prevent retries from creating multiple resources.
pub fn request_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.request_id(input.into());
self
}
/// Unique request ID to be specified. This is needed to prevent retries from creating multiple resources.
pub fn set_request_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_request_id(input);
self
}
/// Requested reservation start time (UTC) in ISO-8601 format. The specified time must be between the first day of the current month and one year from now. If no value is given, the default is now.
pub fn start(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.start(input.into());
self
}
/// Requested reservation start time (UTC) in ISO-8601 format. The specified time must be between the first day of the current month and one year from now. If no value is given, the default is now.
pub fn set_start(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_start(input);
self
}
/// Adds a key-value pair to `Tags`.
///
/// To override the contents of this collection use [`set_tags`](Self::set_tags).
///
/// A collection of key-value pairs
pub fn tags(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
self.inner = self.inner.tags(k.into(), v.into());
self
}
/// A collection of key-value pairs
pub fn set_tags(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.inner = self.inner.set_tags(input);
self
}
}
/// Fluent builder constructing a request to `RejectInputDeviceTransfer`.
///
/// Reject the transfer of the specified input device to your AWS account.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct RejectInputDeviceTransfer<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::reject_input_device_transfer_input::Builder,
}
impl<C, M, R> RejectInputDeviceTransfer<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `RejectInputDeviceTransfer`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::RejectInputDeviceTransferOutput,
aws_smithy_http::result::SdkError<crate::error::RejectInputDeviceTransferError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::RejectInputDeviceTransferInputOperationOutputAlias,
crate::output::RejectInputDeviceTransferOutput,
crate::error::RejectInputDeviceTransferError,
crate::input::RejectInputDeviceTransferInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The unique ID of the input device to reject. For example, hd-123456789abcdef.
pub fn input_device_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_device_id(input.into());
self
}
/// The unique ID of the input device to reject. For example, hd-123456789abcdef.
pub fn set_input_device_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_input_device_id(input);
self
}
}
/// Fluent builder constructing a request to `StartChannel`.
///
/// Starts an existing channel
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct StartChannel<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::start_channel_input::Builder,
}
impl<C, M, R> StartChannel<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `StartChannel`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::StartChannelOutput,
aws_smithy_http::result::SdkError<crate::error::StartChannelError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::StartChannelInputOperationOutputAlias,
crate::output::StartChannelOutput,
crate::error::StartChannelError,
crate::input::StartChannelInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// A request to start a channel
pub fn channel_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_id(input.into());
self
}
/// A request to start a channel
pub fn set_channel_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_channel_id(input);
self
}
}
/// Fluent builder constructing a request to `StartMultiplex`.
///
/// Start (run) the multiplex. Starting the multiplex does not start the channels. You must explicitly start each channel.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct StartMultiplex<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::start_multiplex_input::Builder,
}
impl<C, M, R> StartMultiplex<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `StartMultiplex`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::StartMultiplexOutput,
aws_smithy_http::result::SdkError<crate::error::StartMultiplexError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::StartMultiplexInputOperationOutputAlias,
crate::output::StartMultiplexOutput,
crate::error::StartMultiplexError,
crate::input::StartMultiplexInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The ID of the multiplex.
pub fn multiplex_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.multiplex_id(input.into());
self
}
/// The ID of the multiplex.
pub fn set_multiplex_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_multiplex_id(input);
self
}
}
/// Fluent builder constructing a request to `StopChannel`.
///
/// Stops a running channel
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct StopChannel<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::stop_channel_input::Builder,
}
impl<C, M, R> StopChannel<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `StopChannel`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::StopChannelOutput,
aws_smithy_http::result::SdkError<crate::error::StopChannelError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::StopChannelInputOperationOutputAlias,
crate::output::StopChannelOutput,
crate::error::StopChannelError,
crate::input::StopChannelInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// A request to stop a running channel
pub fn channel_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_id(input.into());
self
}
/// A request to stop a running channel
pub fn set_channel_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_channel_id(input);
self
}
}
/// Fluent builder constructing a request to `StopMultiplex`.
///
/// Stops a running multiplex. If the multiplex isn't running, this action has no effect.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct StopMultiplex<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::stop_multiplex_input::Builder,
}
impl<C, M, R> StopMultiplex<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `StopMultiplex`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::StopMultiplexOutput,
aws_smithy_http::result::SdkError<crate::error::StopMultiplexError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::StopMultiplexInputOperationOutputAlias,
crate::output::StopMultiplexOutput,
crate::error::StopMultiplexError,
crate::input::StopMultiplexInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The ID of the multiplex.
pub fn multiplex_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.multiplex_id(input.into());
self
}
/// The ID of the multiplex.
pub fn set_multiplex_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_multiplex_id(input);
self
}
}
/// Fluent builder constructing a request to `TransferInputDevice`.
///
/// Start an input device transfer to another AWS account. After you make the request, the other account must accept or reject the transfer.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct TransferInputDevice<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::transfer_input_device_input::Builder,
}
impl<C, M, R> TransferInputDevice<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `TransferInputDevice`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::TransferInputDeviceOutput,
aws_smithy_http::result::SdkError<crate::error::TransferInputDeviceError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::TransferInputDeviceInputOperationOutputAlias,
crate::output::TransferInputDeviceOutput,
crate::error::TransferInputDeviceError,
crate::input::TransferInputDeviceInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The unique ID of this input device. For example, hd-123456789abcdef.
pub fn input_device_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_device_id(input.into());
self
}
/// The unique ID of this input device. For example, hd-123456789abcdef.
pub fn set_input_device_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_input_device_id(input);
self
}
/// The AWS account ID (12 digits) for the recipient of the device transfer.
pub fn target_customer_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.target_customer_id(input.into());
self
}
/// The AWS account ID (12 digits) for the recipient of the device transfer.
pub fn set_target_customer_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_target_customer_id(input);
self
}
/// The target AWS region to transfer the device.
pub fn target_region(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.target_region(input.into());
self
}
/// The target AWS region to transfer the device.
pub fn set_target_region(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_target_region(input);
self
}
/// An optional message for the recipient. Maximum 280 characters.
pub fn transfer_message(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.transfer_message(input.into());
self
}
/// An optional message for the recipient. Maximum 280 characters.
pub fn set_transfer_message(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_transfer_message(input);
self
}
}
/// Fluent builder constructing a request to `UpdateChannel`.
///
/// Updates a channel.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct UpdateChannel<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::update_channel_input::Builder,
}
impl<C, M, R> UpdateChannel<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `UpdateChannel`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::UpdateChannelOutput,
aws_smithy_http::result::SdkError<crate::error::UpdateChannelError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::UpdateChannelInputOperationOutputAlias,
crate::output::UpdateChannelOutput,
crate::error::UpdateChannelError,
crate::input::UpdateChannelInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Specification of CDI inputs for this channel
pub fn cdi_input_specification(
mut self,
input: crate::model::CdiInputSpecification,
) -> Self {
self.inner = self.inner.cdi_input_specification(input);
self
}
/// Specification of CDI inputs for this channel
pub fn set_cdi_input_specification(
mut self,
input: std::option::Option<crate::model::CdiInputSpecification>,
) -> Self {
self.inner = self.inner.set_cdi_input_specification(input);
self
}
/// channel ID
pub fn channel_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_id(input.into());
self
}
/// channel ID
pub fn set_channel_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_channel_id(input);
self
}
/// Appends an item to `Destinations`.
///
/// To override the contents of this collection use [`set_destinations`](Self::set_destinations).
///
/// A list of output destinations for this channel.
pub fn destinations(mut self, input: crate::model::OutputDestination) -> Self {
self.inner = self.inner.destinations(input);
self
}
/// A list of output destinations for this channel.
pub fn set_destinations(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::OutputDestination>>,
) -> Self {
self.inner = self.inner.set_destinations(input);
self
}
/// The encoder settings for this channel.
pub fn encoder_settings(mut self, input: crate::model::EncoderSettings) -> Self {
self.inner = self.inner.encoder_settings(input);
self
}
/// The encoder settings for this channel.
pub fn set_encoder_settings(
mut self,
input: std::option::Option<crate::model::EncoderSettings>,
) -> Self {
self.inner = self.inner.set_encoder_settings(input);
self
}
/// Appends an item to `InputAttachments`.
///
/// To override the contents of this collection use [`set_input_attachments`](Self::set_input_attachments).
///
/// Placeholder documentation for __listOfInputAttachment
pub fn input_attachments(mut self, input: crate::model::InputAttachment) -> Self {
self.inner = self.inner.input_attachments(input);
self
}
/// Placeholder documentation for __listOfInputAttachment
pub fn set_input_attachments(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::InputAttachment>>,
) -> Self {
self.inner = self.inner.set_input_attachments(input);
self
}
/// Specification of network and file inputs for this channel
pub fn input_specification(mut self, input: crate::model::InputSpecification) -> Self {
self.inner = self.inner.input_specification(input);
self
}
/// Specification of network and file inputs for this channel
pub fn set_input_specification(
mut self,
input: std::option::Option<crate::model::InputSpecification>,
) -> Self {
self.inner = self.inner.set_input_specification(input);
self
}
/// The log level to write to CloudWatch Logs.
pub fn log_level(mut self, input: crate::model::LogLevel) -> Self {
self.inner = self.inner.log_level(input);
self
}
/// The log level to write to CloudWatch Logs.
pub fn set_log_level(mut self, input: std::option::Option<crate::model::LogLevel>) -> Self {
self.inner = self.inner.set_log_level(input);
self
}
/// The name of the channel.
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.name(input.into());
self
}
/// The name of the channel.
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_name(input);
self
}
/// An optional Amazon Resource Name (ARN) of the role to assume when running the Channel. If you do not specify this on an update call but the role was previously set that role will be removed.
pub fn role_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.role_arn(input.into());
self
}
/// An optional Amazon Resource Name (ARN) of the role to assume when running the Channel. If you do not specify this on an update call but the role was previously set that role will be removed.
pub fn set_role_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_role_arn(input);
self
}
}
/// Fluent builder constructing a request to `UpdateChannelClass`.
///
/// Changes the class of the channel.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct UpdateChannelClass<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::update_channel_class_input::Builder,
}
impl<C, M, R> UpdateChannelClass<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `UpdateChannelClass`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::UpdateChannelClassOutput,
aws_smithy_http::result::SdkError<crate::error::UpdateChannelClassError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::UpdateChannelClassInputOperationOutputAlias,
crate::output::UpdateChannelClassOutput,
crate::error::UpdateChannelClassError,
crate::input::UpdateChannelClassInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The channel class that you wish to update this channel to use.
pub fn channel_class(mut self, input: crate::model::ChannelClass) -> Self {
self.inner = self.inner.channel_class(input);
self
}
/// The channel class that you wish to update this channel to use.
pub fn set_channel_class(
mut self,
input: std::option::Option<crate::model::ChannelClass>,
) -> Self {
self.inner = self.inner.set_channel_class(input);
self
}
/// Channel Id of the channel whose class should be updated.
pub fn channel_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.channel_id(input.into());
self
}
/// Channel Id of the channel whose class should be updated.
pub fn set_channel_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_channel_id(input);
self
}
/// Appends an item to `Destinations`.
///
/// To override the contents of this collection use [`set_destinations`](Self::set_destinations).
///
/// A list of output destinations for this channel.
pub fn destinations(mut self, input: crate::model::OutputDestination) -> Self {
self.inner = self.inner.destinations(input);
self
}
/// A list of output destinations for this channel.
pub fn set_destinations(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::OutputDestination>>,
) -> Self {
self.inner = self.inner.set_destinations(input);
self
}
}
/// Fluent builder constructing a request to `UpdateInput`.
///
/// Updates an input.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct UpdateInput<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::update_input_input::Builder,
}
impl<C, M, R> UpdateInput<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `UpdateInput`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::UpdateInputOutput,
aws_smithy_http::result::SdkError<crate::error::UpdateInputError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::UpdateInputInputOperationOutputAlias,
crate::output::UpdateInputOutput,
crate::error::UpdateInputError,
crate::input::UpdateInputInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Appends an item to `Destinations`.
///
/// To override the contents of this collection use [`set_destinations`](Self::set_destinations).
///
/// Destination settings for PUSH type inputs.
pub fn destinations(mut self, input: crate::model::InputDestinationRequest) -> Self {
self.inner = self.inner.destinations(input);
self
}
/// Destination settings for PUSH type inputs.
pub fn set_destinations(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::InputDestinationRequest>>,
) -> Self {
self.inner = self.inner.set_destinations(input);
self
}
/// Appends an item to `InputDevices`.
///
/// To override the contents of this collection use [`set_input_devices`](Self::set_input_devices).
///
/// Settings for the devices.
pub fn input_devices(mut self, input: crate::model::InputDeviceRequest) -> Self {
self.inner = self.inner.input_devices(input);
self
}
/// Settings for the devices.
pub fn set_input_devices(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::InputDeviceRequest>>,
) -> Self {
self.inner = self.inner.set_input_devices(input);
self
}
/// Unique ID of the input.
pub fn input_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_id(input.into());
self
}
/// Unique ID of the input.
pub fn set_input_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_input_id(input);
self
}
/// Appends an item to `InputSecurityGroups`.
///
/// To override the contents of this collection use [`set_input_security_groups`](Self::set_input_security_groups).
///
/// A list of security groups referenced by IDs to attach to the input.
pub fn input_security_groups(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_security_groups(input.into());
self
}
/// A list of security groups referenced by IDs to attach to the input.
pub fn set_input_security_groups(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.inner = self.inner.set_input_security_groups(input);
self
}
/// Appends an item to `MediaConnectFlows`.
///
/// To override the contents of this collection use [`set_media_connect_flows`](Self::set_media_connect_flows).
///
/// A list of the MediaConnect Flow ARNs that you want to use as the source of the input. You can specify as few as one Flow and presently, as many as two. The only requirement is when you have more than one is that each Flow is in a separate Availability Zone as this ensures your EML input is redundant to AZ issues.
pub fn media_connect_flows(mut self, input: crate::model::MediaConnectFlowRequest) -> Self {
self.inner = self.inner.media_connect_flows(input);
self
}
/// A list of the MediaConnect Flow ARNs that you want to use as the source of the input. You can specify as few as one Flow and presently, as many as two. The only requirement is when you have more than one is that each Flow is in a separate Availability Zone as this ensures your EML input is redundant to AZ issues.
pub fn set_media_connect_flows(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::MediaConnectFlowRequest>>,
) -> Self {
self.inner = self.inner.set_media_connect_flows(input);
self
}
/// Name of the input.
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.name(input.into());
self
}
/// Name of the input.
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_name(input);
self
}
/// The Amazon Resource Name (ARN) of the role this input assumes during and after creation.
pub fn role_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.role_arn(input.into());
self
}
/// The Amazon Resource Name (ARN) of the role this input assumes during and after creation.
pub fn set_role_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_role_arn(input);
self
}
/// Appends an item to `Sources`.
///
/// To override the contents of this collection use [`set_sources`](Self::set_sources).
///
/// The source URLs for a PULL-type input. Every PULL type input needs exactly two source URLs for redundancy. Only specify sources for PULL type Inputs. Leave Destinations empty.
pub fn sources(mut self, input: crate::model::InputSourceRequest) -> Self {
self.inner = self.inner.sources(input);
self
}
/// The source URLs for a PULL-type input. Every PULL type input needs exactly two source URLs for redundancy. Only specify sources for PULL type Inputs. Leave Destinations empty.
pub fn set_sources(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::InputSourceRequest>>,
) -> Self {
self.inner = self.inner.set_sources(input);
self
}
}
/// Fluent builder constructing a request to `UpdateInputDevice`.
///
/// Updates the parameters for the input device.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct UpdateInputDevice<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::update_input_device_input::Builder,
}
impl<C, M, R> UpdateInputDevice<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `UpdateInputDevice`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::UpdateInputDeviceOutput,
aws_smithy_http::result::SdkError<crate::error::UpdateInputDeviceError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::UpdateInputDeviceInputOperationOutputAlias,
crate::output::UpdateInputDeviceOutput,
crate::error::UpdateInputDeviceError,
crate::input::UpdateInputDeviceInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The settings that you want to apply to the HD input device.
pub fn hd_device_settings(
mut self,
input: crate::model::InputDeviceConfigurableSettings,
) -> Self {
self.inner = self.inner.hd_device_settings(input);
self
}
/// The settings that you want to apply to the HD input device.
pub fn set_hd_device_settings(
mut self,
input: std::option::Option<crate::model::InputDeviceConfigurableSettings>,
) -> Self {
self.inner = self.inner.set_hd_device_settings(input);
self
}
/// The unique ID of the input device. For example, hd-123456789abcdef.
pub fn input_device_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_device_id(input.into());
self
}
/// The unique ID of the input device. For example, hd-123456789abcdef.
pub fn set_input_device_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_input_device_id(input);
self
}
/// The name that you assigned to this input device (not the unique ID).
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.name(input.into());
self
}
/// The name that you assigned to this input device (not the unique ID).
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_name(input);
self
}
/// The settings that you want to apply to the UHD input device.
pub fn uhd_device_settings(
mut self,
input: crate::model::InputDeviceConfigurableSettings,
) -> Self {
self.inner = self.inner.uhd_device_settings(input);
self
}
/// The settings that you want to apply to the UHD input device.
pub fn set_uhd_device_settings(
mut self,
input: std::option::Option<crate::model::InputDeviceConfigurableSettings>,
) -> Self {
self.inner = self.inner.set_uhd_device_settings(input);
self
}
}
/// Fluent builder constructing a request to `UpdateInputSecurityGroup`.
///
/// Update an Input Security Group's Whilelists.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct UpdateInputSecurityGroup<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::update_input_security_group_input::Builder,
}
impl<C, M, R> UpdateInputSecurityGroup<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `UpdateInputSecurityGroup`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::UpdateInputSecurityGroupOutput,
aws_smithy_http::result::SdkError<crate::error::UpdateInputSecurityGroupError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::UpdateInputSecurityGroupInputOperationOutputAlias,
crate::output::UpdateInputSecurityGroupOutput,
crate::error::UpdateInputSecurityGroupError,
crate::input::UpdateInputSecurityGroupInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The id of the Input Security Group to update.
pub fn input_security_group_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.input_security_group_id(input.into());
self
}
/// The id of the Input Security Group to update.
pub fn set_input_security_group_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_input_security_group_id(input);
self
}
/// Adds a key-value pair to `Tags`.
///
/// To override the contents of this collection use [`set_tags`](Self::set_tags).
///
/// A collection of key-value pairs.
pub fn tags(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
self.inner = self.inner.tags(k.into(), v.into());
self
}
/// A collection of key-value pairs.
pub fn set_tags(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.inner = self.inner.set_tags(input);
self
}
/// Appends an item to `WhitelistRules`.
///
/// To override the contents of this collection use [`set_whitelist_rules`](Self::set_whitelist_rules).
///
/// List of IPv4 CIDR addresses to whitelist
pub fn whitelist_rules(mut self, input: crate::model::InputWhitelistRuleCidr) -> Self {
self.inner = self.inner.whitelist_rules(input);
self
}
/// List of IPv4 CIDR addresses to whitelist
pub fn set_whitelist_rules(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::InputWhitelistRuleCidr>>,
) -> Self {
self.inner = self.inner.set_whitelist_rules(input);
self
}
}
/// Fluent builder constructing a request to `UpdateMultiplex`.
///
/// Updates a multiplex.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct UpdateMultiplex<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::update_multiplex_input::Builder,
}
impl<C, M, R> UpdateMultiplex<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `UpdateMultiplex`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::UpdateMultiplexOutput,
aws_smithy_http::result::SdkError<crate::error::UpdateMultiplexError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::UpdateMultiplexInputOperationOutputAlias,
crate::output::UpdateMultiplexOutput,
crate::error::UpdateMultiplexError,
crate::input::UpdateMultiplexInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// ID of the multiplex to update.
pub fn multiplex_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.multiplex_id(input.into());
self
}
/// ID of the multiplex to update.
pub fn set_multiplex_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_multiplex_id(input);
self
}
/// The new settings for a multiplex.
pub fn multiplex_settings(mut self, input: crate::model::MultiplexSettings) -> Self {
self.inner = self.inner.multiplex_settings(input);
self
}
/// The new settings for a multiplex.
pub fn set_multiplex_settings(
mut self,
input: std::option::Option<crate::model::MultiplexSettings>,
) -> Self {
self.inner = self.inner.set_multiplex_settings(input);
self
}
/// Name of the multiplex.
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.name(input.into());
self
}
/// Name of the multiplex.
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_name(input);
self
}
}
/// Fluent builder constructing a request to `UpdateMultiplexProgram`.
///
/// Update a program in a multiplex.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct UpdateMultiplexProgram<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::update_multiplex_program_input::Builder,
}
impl<C, M, R> UpdateMultiplexProgram<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `UpdateMultiplexProgram`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::UpdateMultiplexProgramOutput,
aws_smithy_http::result::SdkError<crate::error::UpdateMultiplexProgramError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::UpdateMultiplexProgramInputOperationOutputAlias,
crate::output::UpdateMultiplexProgramOutput,
crate::error::UpdateMultiplexProgramError,
crate::input::UpdateMultiplexProgramInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// The ID of the multiplex of the program to update.
pub fn multiplex_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.multiplex_id(input.into());
self
}
/// The ID of the multiplex of the program to update.
pub fn set_multiplex_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_multiplex_id(input);
self
}
/// The new settings for a multiplex program.
pub fn multiplex_program_settings(
mut self,
input: crate::model::MultiplexProgramSettings,
) -> Self {
self.inner = self.inner.multiplex_program_settings(input);
self
}
/// The new settings for a multiplex program.
pub fn set_multiplex_program_settings(
mut self,
input: std::option::Option<crate::model::MultiplexProgramSettings>,
) -> Self {
self.inner = self.inner.set_multiplex_program_settings(input);
self
}
/// The name of the program to update.
pub fn program_name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.program_name(input.into());
self
}
/// The name of the program to update.
pub fn set_program_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_program_name(input);
self
}
}
/// Fluent builder constructing a request to `UpdateReservation`.
///
/// Update reservation.
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct UpdateReservation<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<super::Handle<C, M, R>>,
inner: crate::input::update_reservation_input::Builder,
}
impl<C, M, R> UpdateReservation<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Creates a new `UpdateReservation`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::UpdateReservationOutput,
aws_smithy_http::result::SdkError<crate::error::UpdateReservationError>,
>
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::UpdateReservationInputOperationOutputAlias,
crate::output::UpdateReservationOutput,
crate::error::UpdateReservationError,
crate::input::UpdateReservationInputOperationRetryAlias,
>,
{
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// Name of the reservation
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.name(input.into());
self
}
/// Name of the reservation
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_name(input);
self
}
/// Unique reservation ID, e.g. '1234567'
pub fn reservation_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.reservation_id(input.into());
self
}
/// Unique reservation ID, e.g. '1234567'
pub fn set_reservation_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_reservation_id(input);
self
}
}
}
impl<C> Client<C, crate::middleware::DefaultMiddleware, aws_smithy_client::retry::Standard> {
/// Creates a client with the given service config and connector override.
pub fn from_conf_conn(conf: crate::Config, conn: C) -> Self {
let retry_config = conf.retry_config.as_ref().cloned().unwrap_or_default();
let timeout_config = conf.timeout_config.as_ref().cloned().unwrap_or_default();
let sleep_impl = conf.sleep_impl.clone();
let mut builder = aws_smithy_client::Builder::new()
.connector(conn)
.middleware(crate::middleware::DefaultMiddleware::new());
builder.set_retry_config(retry_config.into());
builder.set_timeout_config(timeout_config);
if let Some(sleep_impl) = sleep_impl {
builder.set_sleep_impl(Some(sleep_impl));
}
let client = builder.build();
Self {
handle: std::sync::Arc::new(Handle { client, conf }),
}
}
}
impl
Client<
aws_smithy_client::erase::DynConnector,
crate::middleware::DefaultMiddleware,
aws_smithy_client::retry::Standard,
>
{
/// Creates a new client from a shared config.
#[cfg(any(feature = "rustls", feature = "native-tls"))]
pub fn new(config: &aws_types::config::Config) -> Self {
Self::from_conf(config.into())
}
/// Creates a new client from the service [`Config`](crate::Config).
#[cfg(any(feature = "rustls", feature = "native-tls"))]
pub fn from_conf(conf: crate::Config) -> Self {
let retry_config = conf.retry_config.as_ref().cloned().unwrap_or_default();
let timeout_config = conf.timeout_config.as_ref().cloned().unwrap_or_default();
let sleep_impl = conf.sleep_impl.clone();
let mut builder = aws_smithy_client::Builder::dyn_https()
.middleware(crate::middleware::DefaultMiddleware::new());
builder.set_retry_config(retry_config.into());
builder.set_timeout_config(timeout_config);
// the builder maintains a try-state. To avoid suppressing the warning when sleep is unset,
// only set it if we actually have a sleep impl.
if let Some(sleep_impl) = sleep_impl {
builder.set_sleep_impl(Some(sleep_impl));
}
let client = builder.build();
Self {
handle: std::sync::Arc::new(Handle { client, conf }),
}
}
}
| 54.539808 | 625 | 0.632047 |
035142ca36dbc85ef57075976d0445d845a8fa58 | 1,737 | use crate::proto::services::audience::{
list_teams_response::TeamListItem, DashboardResponse, ListTeamsResponse,
};
use actix_protobuf::ProtoBufResponseBuilder;
use actix_web::{web, Error as AWError, HttpResponse};
use mysql::prelude::*;
pub async fn list_teams(db: web::Data<crate::Pool>) -> Result<HttpResponse, AWError> {
let items = web::block::<_, _, mysql::Error>(move || {
let mut conn = db.get().expect("Failed to checkout database connection");
let teams: Vec<crate::Team> = conn
.query("SELECT * FROM `teams` WHERE `withdrawn` = FALSE ORDER BY `created_at` DESC")?;
let mut items = Vec::new();
for team in teams {
let members: Vec<crate::Contestant> = conn.exec(
"SELECT * FROM `contestants` WHERE `team_id` = ? ORDER BY `created_at`",
(team.id,),
)?;
items.push(TeamListItem {
team_id: team.id,
name: team.name,
is_student: members.iter().all(|c| c.student),
member_names: members
.into_iter()
.map(|c| c.name.unwrap_or_else(|| "".to_owned()))
.collect(),
});
}
Ok(items)
})
.await?;
HttpResponse::Ok().protobuf(ListTeamsResponse { teams: items })
}
pub async fn dashboard(db: web::Data<crate::Pool>) -> Result<HttpResponse, AWError> {
let leaderboard = web::block(move || {
let mut conn = db.get().expect("Failed to checkout database connection");
crate::leaderboard::get_leaderboard(&mut conn, 0)
})
.await?;
HttpResponse::Ok().protobuf(DashboardResponse {
leaderboard: Some(leaderboard),
})
}
| 38.6 | 98 | 0.578584 |
71d0974f480655dbb7916fd0702366c05091927f | 2,612 | use bootloader::bootinfo::{MemoryMap, MemoryRegionType};
use x86_64::{
registers::control::Cr3,
structures::paging::{FrameAllocator, OffsetPageTable, PageTable, PhysFrame, Size4KiB},
PhysAddr, VirtAddr,
};
const PAGE_SIZE: usize = 4096;
/// Initialize a new OffsetPageTable mapper.
///
/// This function is unsafe because the caller must guarantee that the
/// complete physical memory is mapped to virtual memory at the passed
/// `physical_memory_offset`. Also, this function must be only called once
/// to avoid aliasing `&mut` references (which is undefined behavior).
pub unsafe fn initialize_mapper(physical_memory_offset: VirtAddr) -> OffsetPageTable<'static> {
let (level_4_table_frame, _) = Cr3::read();
let phys_addr = level_4_table_frame.start_address();
let virt_addr = physical_memory_offset + phys_addr.as_u64();
let page_table_ptr: *mut PageTable = virt_addr.as_mut_ptr();
unsafe { OffsetPageTable::new(&mut *page_table_ptr, physical_memory_offset) }
}
/// A FrameAllocator that returns usable frames from the bootloader's memory map.
pub struct BootInfoFrameAllocator {
memory_map: &'static MemoryMap,
next: usize,
}
impl BootInfoFrameAllocator {
/// Create a FrameAllocator from the passed memory map.
///
/// # Safety
/// This constructor is unsafe because the caller must guarantee that the passed
/// memory map is valid. The main requirement is that all frames that are marked
/// as `USABLE` in it are really unused.
pub unsafe fn new(memory_map: &'static MemoryMap) -> Self {
BootInfoFrameAllocator {
memory_map,
next: 0,
}
}
// Map usable memory regions to physical frames by breaking the address ranges
// into page-sized chunks.
fn usable_frames(&self) -> impl Iterator<Item = PhysFrame> {
self.memory_map
.iter()
.filter(|region| region.region_type == MemoryRegionType::Usable)
.map(|usable_region| usable_region.range.start_addr()..usable_region.range.end_addr())
// Divide regions into page-sized chunks (may leave extra bytes at end of some regions)
.flat_map(|range| range.step_by(PAGE_SIZE))
// Yield physical frames corresponding to start of each chunk
.map(|address| PhysFrame::containing_address(PhysAddr::new(address)))
}
}
unsafe impl FrameAllocator<Size4KiB> for BootInfoFrameAllocator {
fn allocate_frame(&mut self) -> Option<PhysFrame> {
let frame = self.usable_frames().nth(self.next);
self.next += 1;
frame
}
}
| 40.184615 | 99 | 0.693338 |
1a3bf9f0e76a55fd01d249fccba63b55e288c5cc | 436 | //! Unsafe code for accessing system-level counters for memory & CPU usage.
#![deny(
warnings,
rust_2018_idioms,
rust_2018_idioms,
clippy::disallowed_methods,
unsafe_code
)]
#[cfg(target_os = "linux")]
mod linux;
#[cfg(target_os = "linux")]
pub use self::linux::{blocking_stat, max_fds, ms_per_tick, open_fds, page_size, Stat};
#[cfg(not(target_os = "linux"))]
compile_error!("The system crate requires Linux");
| 22.947368 | 86 | 0.701835 |
ed9377f95699fe451c409c89ec2f8373040736e5 | 55,614 | // This file contains code from external sources.
// Attributions: https://github.com/wasmerio/wasmer/blob/master/ATTRIBUTIONS.md
use crate::translator::{
type_to_irtype, FuncEnvironment as BaseFuncEnvironment, GlobalVariable, TargetEnvironment,
};
use cranelift_codegen::cursor::FuncCursor;
use cranelift_codegen::ir;
use cranelift_codegen::ir::condcodes::*;
use cranelift_codegen::ir::immediates::{Offset32, Uimm64};
use cranelift_codegen::ir::types::*;
use cranelift_codegen::ir::{AbiParam, ArgumentPurpose, Function, InstBuilder, Signature};
use cranelift_codegen::isa::TargetFrontendConfig;
use cranelift_frontend::{FunctionBuilder, Variable};
use std::convert::TryFrom;
use wasmer_compiler::wasmparser::Type;
use wasmer_compiler::{WasmError, WasmResult};
use wasmer_types::entity::EntityRef;
use wasmer_types::entity::PrimaryMap;
use wasmer_types::VMBuiltinFunctionIndex;
use wasmer_types::VMOffsets;
use wasmer_types::{
FunctionIndex, FunctionType, GlobalIndex, LocalFunctionIndex, MemoryIndex, ModuleInfo,
SignatureIndex, TableIndex, Type as WasmerType,
};
use wasmer_types::{MemoryStyle, TableStyle};
/// Compute an `ir::ExternalName` for a given wasm function index.
pub fn get_function_name(func_index: FunctionIndex) -> ir::ExternalName {
ir::ExternalName::user(0, func_index.as_u32())
}
/// The type of the `current_elements` field.
pub fn type_of_vmtable_definition_current_elements(vmoffsets: &VMOffsets) -> ir::Type {
ir::Type::int(u16::from(vmoffsets.size_of_vmtable_definition_current_elements()) * 8).unwrap()
}
/// The `FuncEnvironment` implementation for use by the `ModuleEnvironment`.
pub struct FuncEnvironment<'module_environment> {
/// Target-specified configuration.
target_config: TargetFrontendConfig,
/// The module-level environment which this function-level environment belongs to.
module: &'module_environment ModuleInfo,
/// A stack tracking the type of local variables.
type_stack: Vec<WasmerType>,
/// The module function signatures
signatures: &'module_environment PrimaryMap<SignatureIndex, ir::Signature>,
/// The Cranelift global holding the vmctx address.
vmctx: Option<ir::GlobalValue>,
/// The external function signature for implementing wasm's `memory.size`
/// for locally-defined 32-bit memories.
memory32_size_sig: Option<ir::SigRef>,
/// The external function signature for implementing wasm's `table.size`
/// for locally-defined tables.
table_size_sig: Option<ir::SigRef>,
/// The external function signature for implementing wasm's `memory.grow`
/// for locally-defined memories.
memory_grow_sig: Option<ir::SigRef>,
/// The external function signature for implementing wasm's `table.grow`
/// for locally-defined tables.
table_grow_sig: Option<ir::SigRef>,
/// The external function signature for implementing wasm's `table.copy`
/// (it's the same for both local and imported tables).
table_copy_sig: Option<ir::SigRef>,
/// The external function signature for implementing wasm's `table.init`.
table_init_sig: Option<ir::SigRef>,
/// The external function signature for implementing wasm's `elem.drop`.
elem_drop_sig: Option<ir::SigRef>,
/// The external function signature for implementing wasm's `memory.copy`
/// (it's the same for both local and imported memories).
memory_copy_sig: Option<ir::SigRef>,
/// The external function signature for implementing wasm's `memory.fill`
/// (it's the same for both local and imported memories).
memory_fill_sig: Option<ir::SigRef>,
/// The external function signature for implementing wasm's `memory.init`.
memory_init_sig: Option<ir::SigRef>,
/// The external function signature for implementing wasm's `data.drop`.
data_drop_sig: Option<ir::SigRef>,
/// The external function signature for implementing wasm's `table.get`.
table_get_sig: Option<ir::SigRef>,
/// The external function signature for implementing wasm's `table.set`.
table_set_sig: Option<ir::SigRef>,
/// The external function signature for implementing wasm's `func.ref`.
func_ref_sig: Option<ir::SigRef>,
/// The external function signature for implementing wasm's `table.fill`.
table_fill_sig: Option<ir::SigRef>,
/// The external function signature for implementing reference increment for `extern.ref`.
externref_inc_sig: Option<ir::SigRef>,
/// The external function signature for implementing reference decrement for `extern.ref`.
externref_dec_sig: Option<ir::SigRef>,
/// Offsets to struct fields accessed by JIT code.
offsets: VMOffsets,
/// The memory styles
memory_styles: &'module_environment PrimaryMap<MemoryIndex, MemoryStyle>,
/// The table styles
table_styles: &'module_environment PrimaryMap<TableIndex, TableStyle>,
}
impl<'module_environment> FuncEnvironment<'module_environment> {
pub fn new(
target_config: TargetFrontendConfig,
module: &'module_environment ModuleInfo,
signatures: &'module_environment PrimaryMap<SignatureIndex, ir::Signature>,
memory_styles: &'module_environment PrimaryMap<MemoryIndex, MemoryStyle>,
table_styles: &'module_environment PrimaryMap<TableIndex, TableStyle>,
) -> Self {
Self {
target_config,
module,
signatures,
type_stack: vec![],
vmctx: None,
memory32_size_sig: None,
table_size_sig: None,
memory_grow_sig: None,
table_grow_sig: None,
table_copy_sig: None,
table_init_sig: None,
elem_drop_sig: None,
memory_copy_sig: None,
memory_fill_sig: None,
memory_init_sig: None,
table_get_sig: None,
table_set_sig: None,
data_drop_sig: None,
func_ref_sig: None,
table_fill_sig: None,
externref_inc_sig: None,
externref_dec_sig: None,
offsets: VMOffsets::new(target_config.pointer_bytes(), module),
memory_styles,
table_styles,
}
}
fn pointer_type(&self) -> ir::Type {
self.target_config.pointer_type()
}
fn vmctx(&mut self, func: &mut Function) -> ir::GlobalValue {
self.vmctx.unwrap_or_else(|| {
let vmctx = func.create_global_value(ir::GlobalValueData::VMContext);
self.vmctx = Some(vmctx);
vmctx
})
}
fn get_table_fill_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.table_fill_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
// table index
AbiParam::new(I32),
// dst
AbiParam::new(I32),
// value
AbiParam::new(R64),
// len
AbiParam::new(I32),
],
returns: vec![],
call_conv: self.target_config.default_call_conv,
})
});
self.table_fill_sig = Some(sig);
sig
}
fn get_table_fill_func(
&mut self,
func: &mut Function,
table_index: TableIndex,
) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
(
self.get_table_fill_sig(func),
table_index.index(),
VMBuiltinFunctionIndex::get_table_fill_index(),
)
}
fn get_externref_inc_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.externref_inc_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![AbiParam::new(R64)],
returns: vec![],
call_conv: self.target_config.default_call_conv,
})
});
self.externref_inc_sig = Some(sig);
sig
}
fn get_externref_inc_func(
&mut self,
func: &mut Function,
) -> (ir::SigRef, VMBuiltinFunctionIndex) {
(
self.get_externref_inc_sig(func),
VMBuiltinFunctionIndex::get_externref_inc_index(),
)
}
fn get_externref_dec_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.externref_dec_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![AbiParam::new(R64)],
returns: vec![],
call_conv: self.target_config.default_call_conv,
})
});
self.externref_dec_sig = Some(sig);
sig
}
fn get_externref_dec_func(
&mut self,
func: &mut Function,
) -> (ir::SigRef, VMBuiltinFunctionIndex) {
(
self.get_externref_dec_sig(func),
VMBuiltinFunctionIndex::get_externref_dec_index(),
)
}
fn get_func_ref_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.func_ref_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
AbiParam::new(I32),
],
returns: vec![AbiParam::new(R64)],
call_conv: self.target_config.default_call_conv,
})
});
self.func_ref_sig = Some(sig);
sig
}
fn get_func_ref_func(
&mut self,
func: &mut Function,
function_index: FunctionIndex,
) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
(
self.get_func_ref_sig(func),
function_index.index(),
VMBuiltinFunctionIndex::get_func_ref_index(),
)
}
fn get_table_get_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.table_get_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
AbiParam::new(I32),
AbiParam::new(I32),
],
returns: vec![AbiParam::new(R64)],
call_conv: self.target_config.default_call_conv,
})
});
self.table_get_sig = Some(sig);
sig
}
fn get_table_get_func(
&mut self,
func: &mut Function,
table_index: TableIndex,
) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
if self.module.is_imported_table(table_index) {
(
self.get_table_get_sig(func),
table_index.index(),
VMBuiltinFunctionIndex::get_imported_table_get_index(),
)
} else {
(
self.get_table_get_sig(func),
self.module.local_table_index(table_index).unwrap().index(),
VMBuiltinFunctionIndex::get_table_get_index(),
)
}
}
fn get_table_set_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.table_set_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
AbiParam::new(I32),
AbiParam::new(I32),
AbiParam::new(R64),
],
returns: vec![],
call_conv: self.target_config.default_call_conv,
})
});
self.table_set_sig = Some(sig);
sig
}
fn get_table_set_func(
&mut self,
func: &mut Function,
table_index: TableIndex,
) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
if self.module.is_imported_table(table_index) {
(
self.get_table_set_sig(func),
table_index.index(),
VMBuiltinFunctionIndex::get_imported_table_set_index(),
)
} else {
(
self.get_table_set_sig(func),
self.module.local_table_index(table_index).unwrap().index(),
VMBuiltinFunctionIndex::get_table_set_index(),
)
}
}
fn get_table_grow_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.table_grow_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
// TODO: figure out what the representation of a Wasm value is
AbiParam::new(R64),
AbiParam::new(I32),
AbiParam::new(I32),
],
returns: vec![AbiParam::new(I32)],
call_conv: self.target_config.default_call_conv,
})
});
self.table_grow_sig = Some(sig);
sig
}
/// Return the table.grow function signature to call for the given index, along with the
/// translated index value to pass to it and its index in `VMBuiltinFunctionsArray`.
fn get_table_grow_func(
&mut self,
func: &mut Function,
index: TableIndex,
) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
if self.module.is_imported_table(index) {
(
self.get_table_grow_sig(func),
index.index(),
VMBuiltinFunctionIndex::get_imported_table_grow_index(),
)
} else {
(
self.get_table_grow_sig(func),
self.module.local_table_index(index).unwrap().index(),
VMBuiltinFunctionIndex::get_table_grow_index(),
)
}
}
fn get_memory_grow_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.memory_grow_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
AbiParam::new(I32),
AbiParam::new(I32),
],
returns: vec![AbiParam::new(I32)],
call_conv: self.target_config.default_call_conv,
})
});
self.memory_grow_sig = Some(sig);
sig
}
/// Return the memory.grow function signature to call for the given index, along with the
/// translated index value to pass to it and its index in `VMBuiltinFunctionsArray`.
fn get_memory_grow_func(
&mut self,
func: &mut Function,
index: MemoryIndex,
) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
if self.module.is_imported_memory(index) {
(
self.get_memory_grow_sig(func),
index.index(),
VMBuiltinFunctionIndex::get_imported_memory32_grow_index(),
)
} else {
(
self.get_memory_grow_sig(func),
self.module.local_memory_index(index).unwrap().index(),
VMBuiltinFunctionIndex::get_memory32_grow_index(),
)
}
}
fn get_table_size_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.table_size_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
AbiParam::new(I32),
],
returns: vec![AbiParam::new(I32)],
call_conv: self.target_config.default_call_conv,
})
});
self.table_size_sig = Some(sig);
sig
}
/// Return the memory.size function signature to call for the given index, along with the
/// translated index value to pass to it and its index in `VMBuiltinFunctionsArray`.
fn get_table_size_func(
&mut self,
func: &mut Function,
index: TableIndex,
) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
if self.module.is_imported_table(index) {
(
self.get_table_size_sig(func),
index.index(),
VMBuiltinFunctionIndex::get_imported_table_size_index(),
)
} else {
(
self.get_table_size_sig(func),
self.module.local_table_index(index).unwrap().index(),
VMBuiltinFunctionIndex::get_table_size_index(),
)
}
}
fn get_memory32_size_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.memory32_size_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
AbiParam::new(I32),
],
returns: vec![AbiParam::new(I32)],
call_conv: self.target_config.default_call_conv,
})
});
self.memory32_size_sig = Some(sig);
sig
}
/// Return the memory.size function signature to call for the given index, along with the
/// translated index value to pass to it and its index in `VMBuiltinFunctionsArray`.
fn get_memory_size_func(
&mut self,
func: &mut Function,
index: MemoryIndex,
) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
if self.module.is_imported_memory(index) {
(
self.get_memory32_size_sig(func),
index.index(),
VMBuiltinFunctionIndex::get_imported_memory32_size_index(),
)
} else {
(
self.get_memory32_size_sig(func),
self.module.local_memory_index(index).unwrap().index(),
VMBuiltinFunctionIndex::get_memory32_size_index(),
)
}
}
fn get_table_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.table_copy_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
// Destination table index.
AbiParam::new(I32),
// Source table index.
AbiParam::new(I32),
// Index within destination table.
AbiParam::new(I32),
// Index within source table.
AbiParam::new(I32),
// Number of elements to copy.
AbiParam::new(I32),
],
returns: vec![],
call_conv: self.target_config.default_call_conv,
})
});
self.table_copy_sig = Some(sig);
sig
}
fn get_table_copy_func(
&mut self,
func: &mut Function,
dst_table_index: TableIndex,
src_table_index: TableIndex,
) -> (ir::SigRef, usize, usize, VMBuiltinFunctionIndex) {
let sig = self.get_table_copy_sig(func);
(
sig,
dst_table_index.as_u32() as usize,
src_table_index.as_u32() as usize,
VMBuiltinFunctionIndex::get_table_copy_index(),
)
}
fn get_table_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.table_init_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
// Table index.
AbiParam::new(I32),
// Segment index.
AbiParam::new(I32),
// Destination index within table.
AbiParam::new(I32),
// Source index within segment.
AbiParam::new(I32),
// Number of elements to initialize.
AbiParam::new(I32),
],
returns: vec![],
call_conv: self.target_config.default_call_conv,
})
});
self.table_init_sig = Some(sig);
sig
}
fn get_table_init_func(
&mut self,
func: &mut Function,
table_index: TableIndex,
) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
let sig = self.get_table_init_sig(func);
let table_index = table_index.as_u32() as usize;
(
sig,
table_index,
VMBuiltinFunctionIndex::get_table_init_index(),
)
}
fn get_elem_drop_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.elem_drop_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
// Element index.
AbiParam::new(I32),
],
returns: vec![],
call_conv: self.target_config.default_call_conv,
})
});
self.elem_drop_sig = Some(sig);
sig
}
fn get_elem_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
let sig = self.get_elem_drop_sig(func);
(sig, VMBuiltinFunctionIndex::get_elem_drop_index())
}
fn get_memory_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.memory_copy_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
// Memory index.
AbiParam::new(I32),
// Destination address.
AbiParam::new(I32),
// Source address.
AbiParam::new(I32),
// Length.
AbiParam::new(I32),
],
returns: vec![],
call_conv: self.target_config.default_call_conv,
})
});
self.memory_copy_sig = Some(sig);
sig
}
fn get_memory_copy_func(
&mut self,
func: &mut Function,
memory_index: MemoryIndex,
) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
let sig = self.get_memory_copy_sig(func);
if let Some(local_memory_index) = self.module.local_memory_index(memory_index) {
(
sig,
local_memory_index.index(),
VMBuiltinFunctionIndex::get_memory_copy_index(),
)
} else {
(
sig,
memory_index.index(),
VMBuiltinFunctionIndex::get_imported_memory_copy_index(),
)
}
}
fn get_memory_fill_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.memory_fill_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
// Memory index.
AbiParam::new(I32),
// Destination address.
AbiParam::new(I32),
// Value.
AbiParam::new(I32),
// Length.
AbiParam::new(I32),
],
returns: vec![],
call_conv: self.target_config.default_call_conv,
})
});
self.memory_fill_sig = Some(sig);
sig
}
fn get_memory_fill_func(
&mut self,
func: &mut Function,
memory_index: MemoryIndex,
) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
let sig = self.get_memory_fill_sig(func);
if let Some(local_memory_index) = self.module.local_memory_index(memory_index) {
(
sig,
local_memory_index.index(),
VMBuiltinFunctionIndex::get_memory_fill_index(),
)
} else {
(
sig,
memory_index.index(),
VMBuiltinFunctionIndex::get_imported_memory_fill_index(),
)
}
}
fn get_memory_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.memory_init_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
// Memory index.
AbiParam::new(I32),
// Data index.
AbiParam::new(I32),
// Destination address.
AbiParam::new(I32),
// Source index within the data segment.
AbiParam::new(I32),
// Length.
AbiParam::new(I32),
],
returns: vec![],
call_conv: self.target_config.default_call_conv,
})
});
self.memory_init_sig = Some(sig);
sig
}
fn get_memory_init_func(
&mut self,
func: &mut Function,
) -> (ir::SigRef, VMBuiltinFunctionIndex) {
let sig = self.get_memory_init_sig(func);
(sig, VMBuiltinFunctionIndex::get_memory_init_index())
}
fn get_data_drop_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.data_drop_sig.unwrap_or_else(|| {
func.import_signature(Signature {
params: vec![
AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
// Data index.
AbiParam::new(I32),
],
returns: vec![],
call_conv: self.target_config.default_call_conv,
})
});
self.data_drop_sig = Some(sig);
sig
}
fn get_data_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
let sig = self.get_data_drop_sig(func);
(sig, VMBuiltinFunctionIndex::get_data_drop_index())
}
/// Translates load of builtin function and returns a pair of values `vmctx`
/// and address of the loaded function.
fn translate_load_builtin_function_address(
&mut self,
pos: &mut FuncCursor<'_>,
callee_func_idx: VMBuiltinFunctionIndex,
) -> (ir::Value, ir::Value) {
// We use an indirect call so that we don't have to patch the code at runtime.
let pointer_type = self.pointer_type();
let vmctx = self.vmctx(pos.func);
let base = pos.ins().global_value(pointer_type, vmctx);
let mut mem_flags = ir::MemFlags::trusted();
mem_flags.set_readonly();
// Load the callee address.
let body_offset =
i32::try_from(self.offsets.vmctx_builtin_function(callee_func_idx)).unwrap();
let func_addr = pos.ins().load(pointer_type, mem_flags, base, body_offset);
(base, func_addr)
}
}
impl<'module_environment> TargetEnvironment for FuncEnvironment<'module_environment> {
fn target_config(&self) -> TargetFrontendConfig {
self.target_config
}
}
impl<'module_environment> BaseFuncEnvironment for FuncEnvironment<'module_environment> {
fn is_wasm_parameter(&self, _signature: &ir::Signature, index: usize) -> bool {
// The first parameter is the vmctx. The rest are the wasm parameters.
index >= 1
}
fn make_table(&mut self, func: &mut ir::Function, index: TableIndex) -> WasmResult<ir::Table> {
let pointer_type = self.pointer_type();
let (ptr, base_offset, current_elements_offset) = {
let vmctx = self.vmctx(func);
if let Some(def_index) = self.module.local_table_index(index) {
let base_offset =
i32::try_from(self.offsets.vmctx_vmtable_definition_base(def_index)).unwrap();
let current_elements_offset = i32::try_from(
self.offsets
.vmctx_vmtable_definition_current_elements(def_index),
)
.unwrap();
(vmctx, base_offset, current_elements_offset)
} else {
let from_offset = self.offsets.vmctx_vmtable_import_definition(index);
let table = func.create_global_value(ir::GlobalValueData::Load {
base: vmctx,
offset: Offset32::new(i32::try_from(from_offset).unwrap()),
global_type: pointer_type,
readonly: true,
});
let base_offset = i32::from(self.offsets.vmtable_definition_base());
let current_elements_offset =
i32::from(self.offsets.vmtable_definition_current_elements());
(table, base_offset, current_elements_offset)
}
};
let base_gv = func.create_global_value(ir::GlobalValueData::Load {
base: ptr,
offset: Offset32::new(base_offset),
global_type: pointer_type,
readonly: false,
});
let bound_gv = func.create_global_value(ir::GlobalValueData::Load {
base: ptr,
offset: Offset32::new(current_elements_offset),
global_type: type_of_vmtable_definition_current_elements(&self.offsets),
readonly: false,
});
let element_size = match self.table_styles[index] {
TableStyle::CallerChecksSignature => u64::from(self.offsets.size_of_vm_funcref()),
};
Ok(func.create_table(ir::TableData {
base_gv,
min_size: Uimm64::new(0),
bound_gv,
element_size: Uimm64::new(element_size),
index_type: I32,
}))
}
fn translate_table_grow(
&mut self,
mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
table_index: TableIndex,
_table: ir::Table,
delta: ir::Value,
init_value: ir::Value,
) -> WasmResult<ir::Value> {
let (func_sig, index_arg, func_idx) = self.get_table_grow_func(pos.func, table_index);
let table_index = pos.ins().iconst(I32, index_arg as i64);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
let call_inst = pos.ins().call_indirect(
func_sig,
func_addr,
&[vmctx, init_value, delta, table_index],
);
Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
}
fn translate_table_get(
&mut self,
builder: &mut FunctionBuilder,
table_index: TableIndex,
_table: ir::Table,
index: ir::Value,
) -> WasmResult<ir::Value> {
let mut pos = builder.cursor();
let (func_sig, table_index_arg, func_idx) = self.get_table_get_func(pos.func, table_index);
let table_index = pos.ins().iconst(I32, table_index_arg as i64);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
let call_inst = pos
.ins()
.call_indirect(func_sig, func_addr, &[vmctx, table_index, index]);
Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
}
fn translate_table_set(
&mut self,
builder: &mut FunctionBuilder,
table_index: TableIndex,
_table: ir::Table,
value: ir::Value,
index: ir::Value,
) -> WasmResult<()> {
let mut pos = builder.cursor();
let (func_sig, table_index_arg, func_idx) = self.get_table_set_func(pos.func, table_index);
let table_index = pos.ins().iconst(I32, table_index_arg as i64);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
pos.ins()
.call_indirect(func_sig, func_addr, &[vmctx, table_index, index, value]);
Ok(())
}
fn translate_table_fill(
&mut self,
mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
table_index: TableIndex,
dst: ir::Value,
val: ir::Value,
len: ir::Value,
) -> WasmResult<()> {
let (func_sig, table_index_arg, func_idx) = self.get_table_fill_func(pos.func, table_index);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
let table_index_arg = pos.ins().iconst(I32, table_index_arg as i64);
pos.ins().call_indirect(
func_sig,
func_addr,
&[vmctx, table_index_arg, dst, val, len],
);
Ok(())
}
fn translate_externref_inc(
&mut self,
mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
externref: ir::Value,
) -> WasmResult<()> {
let (func_sig, func_idx) = self.get_externref_inc_func(pos.func);
let (_vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
pos.ins().call_indirect(func_sig, func_addr, &[externref]);
Ok(())
}
fn translate_externref_dec(
&mut self,
mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
externref: ir::Value,
) -> WasmResult<()> {
let (func_sig, func_idx) = self.get_externref_dec_func(pos.func);
let (_vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
pos.ins().call_indirect(func_sig, func_addr, &[externref]);
Ok(())
}
fn translate_ref_null(
&mut self,
mut pos: cranelift_codegen::cursor::FuncCursor,
ty: Type,
) -> WasmResult<ir::Value> {
Ok(match ty {
Type::FuncRef => pos.ins().null(self.reference_type()),
Type::ExternRef => pos.ins().null(self.reference_type()),
_ => {
return Err(WasmError::Unsupported(
"`ref.null T` that is not a `funcref` or an `externref`".into(),
));
}
})
}
fn translate_ref_is_null(
&mut self,
mut pos: cranelift_codegen::cursor::FuncCursor,
value: ir::Value,
) -> WasmResult<ir::Value> {
let bool_is_null = match pos.func.dfg.value_type(value) {
// `externref`
ty if ty.is_ref() => pos.ins().is_null(value),
// `funcref`
ty if ty == self.pointer_type() => {
pos.ins()
.icmp_imm(cranelift_codegen::ir::condcodes::IntCC::Equal, value, 0)
}
_ => unreachable!(),
};
Ok(pos.ins().bint(ir::types::I32, bool_is_null))
}
fn translate_ref_func(
&mut self,
mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
func_index: FunctionIndex,
) -> WasmResult<ir::Value> {
// TODO: optimize this by storing a pointer to local func_index funcref metadata
// so that local funcref is just (*global + offset) instead of a function call
//
// Actually we can do the above for both local and imported functions because
// all of those are known statically.
//
// prototyping with a function call though
let (func_sig, func_index_arg, func_idx) = self.get_func_ref_func(pos.func, func_index);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
let func_index_arg = pos.ins().iconst(I32, func_index_arg as i64);
let call_inst = pos
.ins()
.call_indirect(func_sig, func_addr, &[vmctx, func_index_arg]);
Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
}
fn translate_custom_global_get(
&mut self,
mut _pos: cranelift_codegen::cursor::FuncCursor<'_>,
_index: GlobalIndex,
) -> WasmResult<ir::Value> {
unreachable!("we don't make any custom globals")
}
fn translate_custom_global_set(
&mut self,
mut _pos: cranelift_codegen::cursor::FuncCursor<'_>,
_index: GlobalIndex,
_value: ir::Value,
) -> WasmResult<()> {
unreachable!("we don't make any custom globals")
}
fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> WasmResult<ir::Heap> {
let pointer_type = self.pointer_type();
let (ptr, base_offset, current_length_offset) = {
let vmctx = self.vmctx(func);
if let Some(def_index) = self.module.local_memory_index(index) {
let base_offset =
i32::try_from(self.offsets.vmctx_vmmemory_definition_base(def_index)).unwrap();
let current_length_offset = i32::try_from(
self.offsets
.vmctx_vmmemory_definition_current_length(def_index),
)
.unwrap();
(vmctx, base_offset, current_length_offset)
} else {
let from_offset = self.offsets.vmctx_vmmemory_import_definition(index);
let memory = func.create_global_value(ir::GlobalValueData::Load {
base: vmctx,
offset: Offset32::new(i32::try_from(from_offset).unwrap()),
global_type: pointer_type,
readonly: true,
});
let base_offset = i32::from(self.offsets.vmmemory_definition_base());
let current_length_offset =
i32::from(self.offsets.vmmemory_definition_current_length());
(memory, base_offset, current_length_offset)
}
};
// If we have a declared maximum, we can make this a "static" heap, which is
// allocated up front and never moved.
let (offset_guard_size, heap_style, readonly_base) = match self.memory_styles[index] {
MemoryStyle::Dynamic { offset_guard_size } => {
let heap_bound = func.create_global_value(ir::GlobalValueData::Load {
base: ptr,
offset: Offset32::new(current_length_offset),
global_type: pointer_type,
readonly: false,
});
(
Uimm64::new(offset_guard_size),
ir::HeapStyle::Dynamic {
bound_gv: heap_bound,
},
false,
)
}
MemoryStyle::Static {
bound,
offset_guard_size,
} => (
Uimm64::new(offset_guard_size),
ir::HeapStyle::Static {
bound: Uimm64::new(bound.bytes().0 as u64),
},
true,
),
};
let heap_base = func.create_global_value(ir::GlobalValueData::Load {
base: ptr,
offset: Offset32::new(base_offset),
global_type: pointer_type,
readonly: readonly_base,
});
Ok(func.create_heap(ir::HeapData {
base: heap_base,
min_size: 0.into(),
offset_guard_size,
style: heap_style,
index_type: I32,
}))
}
fn make_global(
&mut self,
func: &mut ir::Function,
index: GlobalIndex,
) -> WasmResult<GlobalVariable> {
let pointer_type = self.pointer_type();
let (ptr, offset) = {
let vmctx = self.vmctx(func);
let from_offset = if let Some(def_index) = self.module.local_global_index(index) {
self.offsets.vmctx_vmglobal_definition(def_index)
} else {
self.offsets.vmctx_vmglobal_import_definition(index)
};
let global = func.create_global_value(ir::GlobalValueData::Load {
base: vmctx,
offset: Offset32::new(i32::try_from(from_offset).unwrap()),
global_type: pointer_type,
readonly: true,
});
(global, 0)
};
Ok(GlobalVariable::Memory {
gv: ptr,
offset: offset.into(),
ty: type_to_irtype(self.module.globals[index].ty, self.target_config())?,
})
}
fn make_indirect_sig(
&mut self,
func: &mut ir::Function,
index: SignatureIndex,
) -> WasmResult<ir::SigRef> {
Ok(func.import_signature(self.signatures[index].clone()))
}
fn make_direct_func(
&mut self,
func: &mut ir::Function,
index: FunctionIndex,
) -> WasmResult<ir::FuncRef> {
let sigidx = self.module.functions[index];
let signature = func.import_signature(self.signatures[sigidx].clone());
let name = get_function_name(index);
Ok(func.import_function(ir::ExtFuncData {
name,
signature,
colocated: true,
}))
}
fn translate_call_indirect(
&mut self,
mut pos: FuncCursor<'_>,
table_index: TableIndex,
table: ir::Table,
sig_index: SignatureIndex,
sig_ref: ir::SigRef,
callee: ir::Value,
call_args: &[ir::Value],
) -> WasmResult<ir::Inst> {
let pointer_type = self.pointer_type();
let table_entry_addr = pos.ins().table_addr(pointer_type, table, callee, 0);
// Dereference table_entry_addr to get the function address.
let mem_flags = ir::MemFlags::trusted();
let table_entry_addr = pos.ins().load(
pointer_type,
mem_flags,
table_entry_addr,
i32::from(self.offsets.vm_funcref_anyfunc_ptr()),
);
// check if the funcref is null
pos.ins()
.trapz(table_entry_addr, ir::TrapCode::IndirectCallToNull);
let func_addr = pos.ins().load(
pointer_type,
mem_flags,
table_entry_addr,
i32::from(self.offsets.vmcaller_checked_anyfunc_func_ptr()),
);
// If necessary, check the signature.
match self.table_styles[table_index] {
TableStyle::CallerChecksSignature => {
let sig_id_size = self.offsets.size_of_vmshared_signature_index();
let sig_id_type = ir::Type::int(u16::from(sig_id_size) * 8).unwrap();
let vmctx = self.vmctx(pos.func);
let base = pos.ins().global_value(pointer_type, vmctx);
let offset =
i32::try_from(self.offsets.vmctx_vmshared_signature_id(sig_index)).unwrap();
// Load the caller ID.
let mut mem_flags = ir::MemFlags::trusted();
mem_flags.set_readonly();
let caller_sig_id = pos.ins().load(sig_id_type, mem_flags, base, offset);
// Load the callee ID.
let mem_flags = ir::MemFlags::trusted();
let callee_sig_id = pos.ins().load(
sig_id_type,
mem_flags,
table_entry_addr,
i32::from(self.offsets.vmcaller_checked_anyfunc_type_index()),
);
// Check that they match.
let cmp = pos.ins().icmp(IntCC::Equal, callee_sig_id, caller_sig_id);
pos.ins().trapz(cmp, ir::TrapCode::BadSignature);
}
}
let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
// First append the callee vmctx address.
let vmctx = pos.ins().load(
pointer_type,
mem_flags,
table_entry_addr,
i32::from(self.offsets.vmcaller_checked_anyfunc_vmctx()),
);
real_call_args.push(vmctx);
// Then append the regular call arguments.
real_call_args.extend_from_slice(call_args);
Ok(pos.ins().call_indirect(sig_ref, func_addr, &real_call_args))
}
fn translate_call(
&mut self,
mut pos: FuncCursor<'_>,
callee_index: FunctionIndex,
callee: ir::FuncRef,
call_args: &[ir::Value],
) -> WasmResult<ir::Inst> {
let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
// Handle direct calls to locally-defined functions.
if !self.module.is_imported_function(callee_index) {
// Let's get the caller vmctx
let caller_vmctx = pos.func.special_param(ArgumentPurpose::VMContext).unwrap();
// First append the callee vmctx address, which is the same as the caller vmctx in
// this case.
real_call_args.push(caller_vmctx);
// Then append the regular call arguments.
real_call_args.extend_from_slice(call_args);
return Ok(pos.ins().call(callee, &real_call_args));
}
// Handle direct calls to imported functions. We use an indirect call
// so that we don't have to patch the code at runtime.
let pointer_type = self.pointer_type();
let sig_ref = pos.func.dfg.ext_funcs[callee].signature;
let vmctx = self.vmctx(pos.func);
let base = pos.ins().global_value(pointer_type, vmctx);
let mem_flags = ir::MemFlags::trusted();
// Load the callee address.
let body_offset =
i32::try_from(self.offsets.vmctx_vmfunction_import_body(callee_index)).unwrap();
let func_addr = pos.ins().load(pointer_type, mem_flags, base, body_offset);
// First append the callee vmctx address.
let vmctx_offset =
i32::try_from(self.offsets.vmctx_vmfunction_import_vmctx(callee_index)).unwrap();
let vmctx = pos.ins().load(pointer_type, mem_flags, base, vmctx_offset);
real_call_args.push(vmctx);
// Then append the regular call arguments.
real_call_args.extend_from_slice(call_args);
Ok(pos.ins().call_indirect(sig_ref, func_addr, &real_call_args))
}
fn translate_memory_grow(
&mut self,
mut pos: FuncCursor<'_>,
index: MemoryIndex,
_heap: ir::Heap,
val: ir::Value,
) -> WasmResult<ir::Value> {
let (func_sig, index_arg, func_idx) = self.get_memory_grow_func(pos.func, index);
let memory_index = pos.ins().iconst(I32, index_arg as i64);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
let call_inst = pos
.ins()
.call_indirect(func_sig, func_addr, &[vmctx, val, memory_index]);
Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
}
fn translate_memory_size(
&mut self,
mut pos: FuncCursor<'_>,
index: MemoryIndex,
_heap: ir::Heap,
) -> WasmResult<ir::Value> {
let (func_sig, index_arg, func_idx) = self.get_memory_size_func(pos.func, index);
let memory_index = pos.ins().iconst(I32, index_arg as i64);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
let call_inst = pos
.ins()
.call_indirect(func_sig, func_addr, &[vmctx, memory_index]);
Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
}
fn translate_memory_copy(
&mut self,
mut pos: FuncCursor,
src_index: MemoryIndex,
_src_heap: ir::Heap,
_dst_index: MemoryIndex,
_dst_heap: ir::Heap,
dst: ir::Value,
src: ir::Value,
len: ir::Value,
) -> WasmResult<()> {
let (func_sig, src_index, func_idx) = self.get_memory_copy_func(pos.func, src_index);
let src_index_arg = pos.ins().iconst(I32, src_index as i64);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
pos.ins()
.call_indirect(func_sig, func_addr, &[vmctx, src_index_arg, dst, src, len]);
Ok(())
}
fn translate_memory_fill(
&mut self,
mut pos: FuncCursor,
memory_index: MemoryIndex,
_heap: ir::Heap,
dst: ir::Value,
val: ir::Value,
len: ir::Value,
) -> WasmResult<()> {
let (func_sig, memory_index, func_idx) = self.get_memory_fill_func(pos.func, memory_index);
let memory_index_arg = pos.ins().iconst(I32, memory_index as i64);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
pos.ins().call_indirect(
func_sig,
func_addr,
&[vmctx, memory_index_arg, dst, val, len],
);
Ok(())
}
fn translate_memory_init(
&mut self,
mut pos: FuncCursor,
memory_index: MemoryIndex,
_heap: ir::Heap,
seg_index: u32,
dst: ir::Value,
src: ir::Value,
len: ir::Value,
) -> WasmResult<()> {
let (func_sig, func_idx) = self.get_memory_init_func(pos.func);
let memory_index_arg = pos.ins().iconst(I32, memory_index.index() as i64);
let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
pos.ins().call_indirect(
func_sig,
func_addr,
&[vmctx, memory_index_arg, seg_index_arg, dst, src, len],
);
Ok(())
}
fn translate_data_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()> {
let (func_sig, func_idx) = self.get_data_drop_func(pos.func);
let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
pos.ins()
.call_indirect(func_sig, func_addr, &[vmctx, seg_index_arg]);
Ok(())
}
fn translate_table_size(
&mut self,
mut pos: FuncCursor,
table_index: TableIndex,
_table: ir::Table,
) -> WasmResult<ir::Value> {
let (func_sig, index_arg, func_idx) = self.get_table_size_func(pos.func, table_index);
let table_index = pos.ins().iconst(I32, index_arg as i64);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
let call_inst = pos
.ins()
.call_indirect(func_sig, func_addr, &[vmctx, table_index]);
Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
}
fn translate_table_copy(
&mut self,
mut pos: FuncCursor,
dst_table_index: TableIndex,
_dst_table: ir::Table,
src_table_index: TableIndex,
_src_table: ir::Table,
dst: ir::Value,
src: ir::Value,
len: ir::Value,
) -> WasmResult<()> {
let (func_sig, dst_table_index_arg, src_table_index_arg, func_idx) =
self.get_table_copy_func(pos.func, dst_table_index, src_table_index);
let dst_table_index_arg = pos.ins().iconst(I32, dst_table_index_arg as i64);
let src_table_index_arg = pos.ins().iconst(I32, src_table_index_arg as i64);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
pos.ins().call_indirect(
func_sig,
func_addr,
&[
vmctx,
dst_table_index_arg,
src_table_index_arg,
dst,
src,
len,
],
);
Ok(())
}
fn translate_table_init(
&mut self,
mut pos: FuncCursor,
seg_index: u32,
table_index: TableIndex,
_table: ir::Table,
dst: ir::Value,
src: ir::Value,
len: ir::Value,
) -> WasmResult<()> {
let (func_sig, table_index_arg, func_idx) = self.get_table_init_func(pos.func, table_index);
let table_index_arg = pos.ins().iconst(I32, table_index_arg as i64);
let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
pos.ins().call_indirect(
func_sig,
func_addr,
&[vmctx, table_index_arg, seg_index_arg, dst, src, len],
);
Ok(())
}
fn translate_elem_drop(&mut self, mut pos: FuncCursor, elem_index: u32) -> WasmResult<()> {
let (func_sig, func_idx) = self.get_elem_drop_func(pos.func);
let elem_index_arg = pos.ins().iconst(I32, elem_index as i64);
let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
pos.ins()
.call_indirect(func_sig, func_addr, &[vmctx, elem_index_arg]);
Ok(())
}
fn translate_atomic_wait(
&mut self,
_pos: FuncCursor,
_index: MemoryIndex,
_heap: ir::Heap,
_addr: ir::Value,
_expected: ir::Value,
_timeout: ir::Value,
) -> WasmResult<ir::Value> {
Err(WasmError::Unsupported(
"wasm atomics (fn translate_atomic_wait)".to_string(),
))
}
fn translate_atomic_notify(
&mut self,
_pos: FuncCursor,
_index: MemoryIndex,
_heap: ir::Heap,
_addr: ir::Value,
_count: ir::Value,
) -> WasmResult<ir::Value> {
Err(WasmError::Unsupported(
"wasm atomics (fn translate_atomic_notify)".to_string(),
))
}
fn get_global_type(&self, global_index: GlobalIndex) -> Option<WasmerType> {
Some(self.module.globals.get(global_index)?.ty)
}
fn push_local_decl_on_stack(&mut self, ty: WasmerType) {
self.type_stack.push(ty);
}
fn push_params_on_stack(&mut self, function_index: LocalFunctionIndex) {
let func_index = self.module.func_index(function_index);
let sig_idx = self.module.functions[func_index];
let signature = &self.module.signatures[sig_idx];
for param in signature.params() {
self.type_stack.push(*param);
}
}
fn get_local_type(&self, local_index: u32) -> Option<WasmerType> {
self.type_stack.get(local_index as usize).cloned()
}
fn get_local_types(&self) -> &[WasmerType] {
&self.type_stack
}
fn get_function_type(&self, function_index: FunctionIndex) -> Option<&FunctionType> {
let sig_idx = self.module.functions.get(function_index)?;
Some(&self.module.signatures[*sig_idx])
}
fn get_function_sig(&self, sig_index: SignatureIndex) -> Option<&FunctionType> {
self.module.signatures.get(sig_index)
}
fn translate_drop_locals(&mut self, builder: &mut FunctionBuilder) -> WasmResult<()> {
// TODO: this allocation can be removed without too much effort but it will require
// maneuvering around the borrow checker
for (local_index, local_type) in self.type_stack.to_vec().iter().enumerate() {
if *local_type == WasmerType::ExternRef {
let val = builder.use_var(Variable::with_u32(local_index as _));
self.translate_externref_dec(builder.cursor(), val)?;
}
}
Ok(())
}
}
| 36.112987 | 100 | 0.575035 |
16edee95d7af3a4616d57485d16273c6228de74a | 50 | async unsafe fn foo() {}
unsafe const fn bar() {}
| 16.666667 | 24 | 0.64 |
5663c1315210ed80f1d523697bfb41a18321fe55 | 379 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
impl super::FCFG_B6_SSIZE1 {
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R { bits: self.register.get() }
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
}
| 19.947368 | 51 | 0.546174 |
71a2a58a1962027bb45aee7796c9d4ddd7d66ba4 | 137,723 | use std::path::PathBuf;
use swc_common::{chain, pass::Optional, Mark};
use swc_ecma_parser::{Syntax, TsConfig};
use swc_ecma_transforms_base::resolver::resolver_with_mark;
use swc_ecma_transforms_compat::{
es2015::{block_scoping, destructuring, parameters},
es2017::async_to_generator,
es2020::{nullish_coalescing, optional_chaining},
};
use swc_ecma_transforms_proposal::decorators;
use swc_ecma_transforms_testing::{test, test_exec, test_fixture};
use swc_ecma_transforms_typescript::{strip, strip::strip_with_config};
use swc_ecma_visit::Fold;
fn tr() -> impl Fold {
tr_config(None, None)
}
fn tr_config(
config: Option<strip::Config>,
decorators_config: Option<decorators::Config>,
) -> impl Fold {
let mark = Mark::fresh(Mark::root());
let has_decorators = decorators_config.is_some();
let config = config.unwrap_or_else(|| strip::Config {
no_empty_export: true,
..Default::default()
});
chain!(
Optional::new(
decorators(decorators_config.unwrap_or_default()),
has_decorators,
),
resolver_with_mark(mark),
strip_with_config(config, mark),
)
}
macro_rules! to {
($name:ident, $from:expr, $to:expr) => {
test!(
Syntax::Typescript(TsConfig {
decorators: true,
..Default::default()
}),
|_| tr(),
$name,
$from,
$to,
ok_if_code_eq
);
};
}
macro_rules! test_with_config {
($name:ident, $config:expr, $from:expr, $to:expr) => {
test!(
Syntax::Typescript(TsConfig {
decorators: true,
..Default::default()
}),
|_| tr_config(Some($config), None),
$name,
$from,
$to,
ok_if_code_eq
);
};
}
test!(
Syntax::Typescript(Default::default()),
|_| chain!(
tr(),
parameters(parameters::Config {
ignore_function_length: true
}),
destructuring(destructuring::Config { loose: false }),
block_scoping(),
),
fn_len_default_assignment_with_types,
"export function transformFileSync(
filename: string,
opts?: Object = {},
): string {}",
"export function transformFileSync(filename, opts) {
if (opts === void 0) opts = {};
}"
);
to!(
constructor_01,
"class Foo {
constructor(public readonly foo) {}
}",
"class Foo {
constructor(foo) {
this.foo = foo;
}
}"
);
to!(
constructor_02,
"class Foo {
constructor(readonly foo) {
this.bar = 1;
}
}",
"class Foo {
constructor(foo) {
this.foo = foo;
this.bar = 1;
}
}"
);
to!(
private_method_overload_and_abstract,
"class test {
#test();
#test() {
}
abstract #test();
}",
"class test {
#test() {
}
}"
);
to!(export_import, "export import A = B", "export var A = B;");
to!(export_equals, "export = Foo", "module.exports = Foo;");
to!(
issue_196_01,
"export type Link = { key: string; text: string };",
""
);
to!(
issue_196_02,
"type Link = { key: string; text: string };
export { Link };",
""
);
to!(
issue_196_03,
"type Link = { key: string; text: string };
const Link = 'Boo';
export { Link };",
"const Link = 'Boo';
export { Link };"
);
// TODO: Test function / variable hoisting
to!(
issue_179_01,
"import {Types} from 'other';
const a: Types.foo = {};",
"const a = {};"
);
to!(
issue_179_02,
"import {Types} from 'other';
const a: Types = Types.foo;",
"import {Types} from 'other';
const a = Types.foo;"
);
to!(
issue_236,
"function foo(this: any, $scope: angular.IScope){}",
"function foo($scope){}"
);
to!(
issue_357,
"export function addProp<T, K extends string, V>(
obj: T,
prop: K,
value: V
): T & { [x in K]: V };
export function addProp<T, K extends string, V>(
prop: K,
value: V
): (obj: T) => T & { [x in K]: V };
export function addProp(arg1: any, arg2: any, arg3?: any): any {
if (arguments.length === 2) {
return (object: any) => _addProp(object, arg1, arg2);
}
return _addProp(arg1, arg2, arg3);
}
function _addProp(obj: any, prop: string, value: any) {
return {
...obj,
[prop]: value,
};
}",
"export function addProp(arg1, arg2, arg3) {
if (arguments.length === 2) {
return (object) => _addProp(object, arg1, arg2);
}
return _addProp(arg1, arg2, arg3);
}
function _addProp(obj, prop, value) {
return {
...obj,
[prop]: value,
};
}
"
);
to!(
issue_366_01,
"
class App {
public enter?(): void;
public leave?(): void;
public destroy?(): void;
}",
"class App {}"
);
to!(
issue_366_02,
"
function enter(): string;
function enter(foo: string): number;
",
""
);
to!(
issue_392_1,
"
import { PlainObject } from 'simplytyped';
const dict: PlainObject = {};
",
"
const dict = {};"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_392_2,
"
import { PlainObject } from 'simplytyped';
const dict: PlainObject = {};
",
"
const dict = {};"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_461,
"for (let x in ['']) {
(x => 0)(x);
}",
"for(let x in ['']){
((x)=>0
)(x);
}"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_468_1,
"tView.firstCreatePass ?
getOrCreateTNode(tView, lView[T_HOST], index, TNodeType.Element, null, null) :
tView.data[adjustedIndex] as TElementNode",
"tView.firstCreatePass ? getOrCreateTNode(tView, lView[T_HOST], index, TNodeType.Element, \
null, null) : tView.data[adjustedIndex];"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_468_2,
"tView.firstCreatePass ?
getOrCreateTNode(tView, lView[T_HOST], index, TNodeType.Element, null, null) :
tView.data[adjustedIndex] as TElementNode",
"tView.firstCreatePass ? getOrCreateTNode(tView, lView[T_HOST], index, TNodeType.Element, \
null, null) : tView.data[adjustedIndex];"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_468_3,
"tView.firstCreatePass ?
getOrCreateTNode() : tView.data[adjustedIndex] as TElementNode",
"tView.firstCreatePass ? getOrCreateTNode() : tView.data[adjustedIndex];"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_468_4,
"a ? b : c",
"a ? b : c"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_468_5,
"a ? b : c as T",
"a ? b : c"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_468_6,
"a.b ? c() : d.e[f] as T",
"a.b ? c() : d.e[f];"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_468_7,
"tView.firstCreatePass ? getOrCreateTNode() : tView.data[adjustedIndex]",
"tView.firstCreatePass ? getOrCreateTNode() : tView.data[adjustedIndex];"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
enum_simple,
"enum Foo{ a }",
"
var Foo;
(function (Foo) {
Foo[Foo['a'] = 0] = 'a';
})(Foo || (Foo = {}));",
ok_if_code_eq
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
enum_str,
"enum State {
closed = 'closed',
opened = 'opened',
mounted = 'mounted',
unmounted = 'unmounted',
}",
r#"
var State;
(function (State) {
State["closed"] = "closed";
State["opened"] = "opened";
State["mounted"] = "mounted";
State["unmounted"] = "unmounted";
})(State || (State = {}));
"#,
ok_if_code_eq
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
enum_key_value,
"enum StateNum {
closed = 'cl0',
opened = 'op1',
mounted = 'mo2',
}",
r#"
var StateNum;
(function (StateNum) {
StateNum["closed"] = "cl0";
StateNum["opened"] = "op1";
StateNum["mounted"] = "mo2";
})(StateNum || (StateNum = {}));
"#,
ok_if_code_eq
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
enum_export_str,
"export enum State {
closed = 'closed',
opened = 'opened',
mounted = 'mounted',
unmounted = 'unmounted',
}",
r#"export var State;
(function (State) {
State["closed"] = "closed";
State["opened"] = "opened";
State["mounted"] = "mounted";
State["unmounted"] = "unmounted";
})(State || (State = {}));
"#,
ok_if_code_eq
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_640,
"import { Handler } from 'aws-lambda';
export const handler: Handler = async (event, context) => {};",
"export const handler = async (event, context) => {};",
ok_if_code_eq
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_656,
"export const x = { text: 'hello' } as const;",
"export const x = { text: 'hello' };",
ok_if_code_eq
);
to!(import_type, "import type foo from 'foo'", "");
to!(export_type, "export type { foo }", "");
to!(
issue_685,
"
type MyType = string;
export default MyType;",
""
);
to!(
issue_685_2,
"
class MyType {}
type MyType = string;
export default MyType;",
"
class MyType {}
export default MyType;"
);
to!(
issue_685_3,
"
var MyType = function(){};
type MyType = string;
export default MyType;",
"
var MyType = function(){};
export default MyType;"
);
to!(
issue_685_4,
"
interface MyType {
other: number;
}
export default MyType;",
""
);
to!(
ts_enum_str_init,
"enum FlexSize {
md = 'md',
lg = 'lg',
}",
"var FlexSize;
(function (FlexSize) {
FlexSize['md'] = 'md';
FlexSize['lg'] = 'lg';
})(FlexSize || (FlexSize = {}));
"
);
to!(
ts_enum_no_init,
"enum FlexSize {
md,
lg,
}",
"var FlexSize;
(function (FlexSize) {
FlexSize[FlexSize['md'] = 0] = 'md';
FlexSize[FlexSize['lg'] = 1] = 'lg';
})(FlexSize || (FlexSize = {}));
"
);
to!(module_01, "module 'foo'{ }", "");
to!(declare_01, "declare var env: FOO", "");
to!(
issue_757,
"// test.ts
enum Foo {
A,
B,
}
export default Foo;
",
"var Foo;
(function(Foo) {
Foo[Foo['A'] = 0] = 'A';
Foo[Foo['B'] = 1] = 'B';
})(Foo || (Foo = {
}));
export default Foo;"
);
to!(
issue_786_1,
"import { IPerson } from '../types/types'
export function createPerson(person: IPerson) {
const a = {} as IPerson
}",
"export function createPerson(person) {
const a = {};
}"
);
to!(
issue_786_2,
"import { IPerson } from '../types/types'
function createPerson(person: IPerson) {
const a = {} as IPerson
}",
"function createPerson(person) {
const a = {};
}"
);
to!(
issue_791_1,
"import { IPerson } from '../types/types'
export interface IEmployee extends IPerson {
}
export function createPerson(person: IPerson) {
const a = {} as IPerson
}",
"export function createPerson(person) {
const a = {}
}"
);
to!(
issue_791_2,
"import { IPerson } from '../types/types'
export class Employee implements IPerson {
}
export function createPerson(person: IPerson) {
const a = {} as IPerson
}",
"export class Employee {
}
export function createPerson(person) {
const a = {}
}"
);
to!(
issue_791_3,
"import { IPerson } from '../types/types'
export type MyPerson = IPerson;
export function createPerson(person: MyPerson) {
const a = {} as MyPerson
}",
"export function createPerson(person) {
const a = {}
}"
);
to!(
issue_791_4,
"import { A, B } from '../types/types'
export class Child extends A implements B {
}",
"import { A } from '../types/types'
export class Child extends A {
}
"
);
to!(
issue_793_1,
"import { IPerson } from '../types/types'
export function createPerson(person) {
const a = {} as IPerson
}",
"export function createPerson(person) {
const a = {};
}"
);
to!(
issue_793_2,
"import { IPerson } from '../types/types'
export function createPerson(person) {
const a = <IPerson>{};
}",
"export function createPerson(person) {
const a = {};
}"
);
to!(
issue_900_1,
"export class FeatureSet<Name extends string> {
log(a: Name) {
console.log(a)
}
}",
"export class FeatureSet {
log(a) {
console.log(a)
}
}"
);
to!(
issue_900_2,
"class FeatureSet<Name extends string> {
log(a: Name) {
console.log(a)
}
}",
"class FeatureSet {
log(a) {
console.log(a)
}
}"
);
to!(
issue_900_3,
"export default class FeatureSet<Name extends string> {
log(a: Name) {
console.log(a)
}
}",
"class FeatureSet {
log(a) {
console.log(a)
}
}
export { FeatureSet as default };"
);
to!(
issue_820_1,
"enum Direction {
Up = 1,
Down = 2,
Left = Up + Down,
}",
"var Direction;
(function (Direction) {
Direction[Direction['Up'] = 1] = 'Up';
Direction[Direction['Down'] = 2] = 'Down';
Direction[Direction['Left'] = 3] = 'Left';
})(Direction || (Direction = {}));"
);
to!(
issue_915,
"export class Logger {
#level: LogLevels;
#handlers: BaseHandler[];
readonly #loggerName: string;
constructor(
loggerName: string,
levelName: LevelName,
options: LoggerOptions = {},
) {
this.#loggerName = loggerName;
this.#level = getLevelByName(levelName);
this.#handlers = options.handlers || [];
}
}",
"export class Logger {
#level;
#handlers;
#loggerName;
constructor(loggerName, levelName, options = {
}){
this.#loggerName = loggerName;
this.#level = getLevelByName(levelName);
this.#handlers = options.handlers || [];
}
}"
);
to!(
issue_915_2,
r#"Deno.test("[ws] WebSocket should act as asyncIterator", async () => {
enum Frames {
ping,
hello,
close,
end,
}
});"#,
r#"Deno.test("[ws] WebSocket should act as asyncIterator", async ()=>{
let Frames;
(function(Frames) {
Frames[Frames["ping"] = 0] = "ping";
Frames[Frames["hello"] = 1] = "hello";
Frames[Frames["close"] = 2] = "close";
Frames[Frames["end"] = 3] = "end";
})(Frames || (Frames = {
}));
});"#
);
to!(
issue_915_3,
r#"export class MultipartReader {
readonly newLine = encoder.encode("\r\n");
}"#,
r#"export class MultipartReader {
constructor(){
this.newLine = encoder.encode("\r\n");
}
}"#
);
to!(
issue_912,
r#"export class BadRequestError extends Error {
constructor(public readonly message: string) {
super(message)
}
}"#,
r#"export class BadRequestError extends Error {
constructor(message) {
super(message)
this.message = message
}
}"#
);
to!(
issue_921,
"export abstract class Kernel {
[key: string]: any
}",
"export abstract class Kernel {}"
);
to!(
issue_926,
"class A extends Object {
constructor(public a, private b) {
super();
}
}",
"class A extends Object {
constructor(a, b){
super();
this.a = a;
this.b = b;
}
}"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_930_instance,
"class A {
b = this.a;
constructor(readonly a){
}
}",
"class A {
constructor(a) {
this.a = a;
this.b = this.a;
}
}"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_930_static,
"class A {
static b = 'foo';
constructor(a){
}
}",
"class A {
constructor(a) {
}
}
A.b = 'foo';"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
typescript_001,
"class A {
foo = new Subject()
constructor() {
this.foo.subscribe()
}
}",
"class A {
constructor() {
this.foo = new Subject()
this.foo.subscribe()
}
}"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
typescript_002,
"class A extends B {
foo = 'foo'
b = this.a;
declare1
declare2!: string
constructor(private readonly a: string, readonly c, private d: number = 1) {
super()
this.foo.subscribe()
}
}",
"class A extends B {
constructor(a, c, d = 1) {
super();
this.a = a;
this.c = c;
this.d = d;
this.foo = 'foo';
this.b = this.a;
this.foo.subscribe();
}
}"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_958,
"export class Test {
constructor(readonly test?: string) {}
}",
"export class Test {
constructor(test){
this.test = test;
}
}"
);
test!(
Syntax::Typescript(TsConfig {
decorators: true,
..Default::default()
}),
|_| chain!(
decorators(decorators::Config {
legacy: true,
..Default::default()
}),
tr()
),
issue_960_1,
"
function DefineAction() {
return (target, property) => {
console.log(target, property);
}
}
class Base {
constructor() {
this.action = new Subject()
}
}
class Child extends Base {
@DefineAction() action: Observable<void>
callApi() {
console.log(this.action) // undefined
}
}
",
r#"var _class, _descriptor, _dec;
function DefineAction() {
return (target, property)=>{
console.log(target, property);
};
}
class Base {
constructor(){
this.action = new Subject();
}
}
let Child = ((_class = class Child extends Base {
callApi() {
console.log(this.action);
}
constructor(...args){
super(...args);
_initializerDefineProperty(this, "action", _descriptor, this);
}
}) || _class, _dec = DefineAction(), _descriptor = _applyDecoratedDescriptor(_class.prototype, "action", [
_dec
], {
configurable: true,
enumerable: true,
writable: true,
initializer: void 0
}), _class);
"#,
ok_if_code_eq
);
test_exec!(
Syntax::Typescript(TsConfig {
decorators: true,
..Default::default()
}),
|_| chain!(
decorators(decorators::Config {
legacy: true,
..Default::default()
}),
tr()
),
issue_960_2,
"function DefineAction() { return function(_a, _b, c) { return c } }
class Base {
constructor() {
this.action = 1
}
}
class Child extends Base {
@DefineAction() action: number
callApi() {
console.log(this.action) // undefined
return this.action
}
}
const c = new Child()
c.callApi()
expect(c.callApi()).not.toBe(undefined)
expect(c.action).toBe(1);
"
);
test!(
::swc_ecma_parser::Syntax::Typescript(Default::default()),
|_| tr(),
issue_1032,
r#"import {
indent as indentFormatter,
newline as newlineFormatter,
breakpoint as breakpointFormatter,
} from "./format.ts";
const proseTypes = new Map();
// deno-lint-ignore ban-types
const prose = (l: number, i: Function, nl: Function, bp: string): string => {
return i(l) + bp + "prose {" + nl +
i(l + 1) + "color: #374151;" + nl +
i(l + 1) + "max-width: 65ch;" + nl +
i(l) + "}" + nl +
i(l) + bp + 'prose [class~="lead"] {' + nl +
i(l + 1) + "color: #4b5563;" + nl +
i(l + 1) + "font-size: 1.25em;" + nl +
i(l + 1) + "line-height: 1.6;" + nl +
i(l + 1) + "margin-top: 1.2em;" + nl +
i(l + 1) + "margin-bottom: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose a {" + nl +
i(l + 1) + "color: #5850ec;" + nl +
i(l + 1) + "text-decoration: none;" + nl +
i(l + 1) + "font-weight: 600;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose strong {" + nl +
i(l + 1) + "color: #161e2e;" + nl +
i(l + 1) + "font-weight: 600;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ol {" + nl +
i(l + 1) + "counter-reset: list-counter;" + nl +
i(l + 1) + "margin-top: 1.25em;" + nl +
i(l + 1) + "margin-bottom: 1.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ol > li {" + nl +
i(l + 1) + "position: relative;" + nl +
i(l + 1) + "counter-increment: list-counter;" + nl +
i(l + 1) + "padding-left: 1.75em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ol > li::before {" + nl +
i(l + 1) + 'content: counter(list-counter) ".";' + nl +
i(l + 1) + "position: absolute;" + nl +
i(l + 1) + "font-weight: 400;" + nl +
i(l + 1) + "color: #6b7280;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ul > li {" + nl +
i(l + 1) + "position: relative;" + nl +
i(l + 1) + "padding-left: 1.75em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ul > li::before {" + nl +
i(l + 1) + 'content: "";' + nl +
i(l + 1) + "position: absolute;" + nl +
i(l + 1) + "background-color: #d2d6dc;" + nl +
i(l + 1) + "border-radius: 50%;" + nl +
i(l + 1) + "width: 0.375em;" + nl +
i(l + 1) + "height: 0.375em;" + nl +
i(l + 1) + "top: calc(0.875em - 0.1875em);" + nl +
i(l + 1) + "left: 0.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose hr {" + nl +
i(l + 1) + "border-color: #e5e7eb;" + nl +
i(l + 1) + "border-top-width: 1px;" + nl +
i(l + 1) + "margin-top: 3em;" + nl +
i(l + 1) + "margin-bottom: 3em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose blockquote {" + nl +
i(l + 1) + "font-weight: 500;" + nl +
i(l + 1) + "font-style: italic;" + nl +
i(l + 1) + "color: #161e2e;" + nl +
i(l + 1) + "border-left-width: 0.25rem;" + nl +
i(l + 1) + "border-left-color: #e5e7eb;" + nl +
i(l + 1) + 'quotes: "\\201C""\\201D""\\2018""\\2019";' + nl +
i(l + 1) + "margin-top: 1.6em;" + nl +
i(l + 1) + "margin-bottom: 1.6em;" + nl +
i(l + 1) + "padding-left: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose blockquote p:first-of-type::before {" + nl +
i(l + 1) + "content: open-quote;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose blockquote p:last-of-type::after {" + nl +
i(l + 1) + "content: close-quote;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h1 {" + nl +
i(l + 1) + "color: #1a202c;" + nl +
i(l + 1) + "font-weight: 800;" + nl +
i(l + 1) + "font-size: 2.25em;" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0.8888889em;" + nl +
i(l + 1) + "line-height: 1.1111111;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h2 {" + nl +
i(l + 1) + "color: #1a202c;" + nl +
i(l + 1) + "font-weight: 700;" + nl +
i(l + 1) + "font-size: 1.5em;" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 1em;" + nl +
i(l + 1) + "line-height: 1.3333333;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h3 {" + nl +
i(l + 1) + "color: #1a202c;" + nl +
i(l + 1) + "font-weight: 600;" + nl +
i(l + 1) + "font-size: 1.25em;" + nl +
i(l + 1) + "margin-top: 1.6em;" + nl +
i(l + 1) + "margin-bottom: 0.6em;" + nl +
i(l + 1) + "line-height: 1.6;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h4 {" + nl +
i(l + 1) + "color: #1a202c;" + nl +
i(l + 1) + "font-weight: 600;" + nl +
i(l + 1) + "margin-top: 1.5em;" + nl +
i(l + 1) + "margin-bottom: 0.5em;" + nl +
i(l + 1) + "line-height: 1.5;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose figure figcaption {" + nl +
i(l + 1) + "color: #6b7280;" + nl +
i(l + 1) + "font-size: 0.875em;" + nl +
i(l + 1) + "line-height: 1.4285714;" + nl +
i(l + 1) + "margin-top: 0.8571429em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose code {" + nl +
i(l + 1) + "color: #161e2e;" + nl +
i(l + 1) + "font-weight: 600;" + nl +
i(l + 1) + "font-size: 0.875em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose code::before {" + nl +
i(l + 1) + 'content: "`";' + nl +
i(l) + "}" + nl +
i(l) + bp + "prose code::after {" + nl +
i(l + 1) + 'content: "`";' + nl +
i(l) + "}" + nl +
i(l) + bp + "prose pre {" + nl +
i(l + 1) + "color: #e5e7eb;" + nl +
i(l + 1) + "background-color: #252f3f;" + nl +
i(l + 1) + "overflow-x: auto;" + nl +
i(l + 1) + "font-size: 0.875em;" + nl +
i(l + 1) + "line-height: 1.7142857;" + nl +
i(l + 1) + "margin-top: 1.7142857em;" + nl +
i(l + 1) + "margin-bottom: 1.7142857em;" + nl +
i(l + 1) + "border-radius: 0.375rem;" + nl +
i(l + 1) + "padding-top: 0.8571429em;" + nl +
i(l + 1) + "padding-right: 1.1428571em;" + nl +
i(l + 1) + "padding-bottom: 0.8571429em;" + nl +
i(l + 1) + "padding-left: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose pre code {" + nl +
i(l + 1) + "background-color: transparent;" + nl +
i(l + 1) + "border-width: 0;" + nl +
i(l + 1) + "border-radius: 0;" + nl +
i(l + 1) + "padding: 0;" + nl +
i(l + 1) + "font-weight: 400;" + nl +
i(l + 1) + "color: inherit;" + nl +
i(l + 1) + "font-size: inherit;" + nl +
i(l + 1) + "font-family: inherit;" + nl +
i(l + 1) + "line-height: inherit;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose pre code::before {" + nl +
i(l + 1) + 'content: "";' + nl +
i(l) + "}" + nl +
i(l) + bp + "prose pre code::after {" + nl +
i(l + 1) + 'content: "";' + nl +
i(l) + "}" + nl +
i(l) + bp + "prose table {" + nl +
i(l + 1) + "width: 100%;" + nl +
i(l + 1) + "table-layout: auto;" + nl +
i(l + 1) + "text-align: left;" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l + 1) + "font-size: 0.875em;" + nl +
i(l + 1) + "line-height: 1.7142857;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose thead {" + nl +
i(l + 1) + "color: #161e2e;" + nl +
i(l + 1) + "font-weight: 600;" + nl +
i(l + 1) + "border-bottom-width: 1px;" + nl +
i(l + 1) + "border-bottom-color: #d2d6dc;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose thead th {" + nl +
i(l + 1) + "vertical-align: bottom;" + nl +
i(l + 1) + "padding-right: 0.5714286em;" + nl +
i(l + 1) + "padding-bottom: 0.5714286em;" + nl +
i(l + 1) + "padding-left: 0.5714286em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose tbody tr {" + nl +
i(l + 1) + "border-bottom-width: 1px;" + nl +
i(l + 1) + "border-bottom-color: #e5e7eb;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose tbody tr:last-child {" + nl +
i(l + 1) + "border-bottom-width: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose tbody td {" + nl +
i(l + 1) + "vertical-align: top;" + nl +
i(l + 1) + "padding-top: 0.5714286em;" + nl +
i(l + 1) + "padding-right: 0.5714286em;" + nl +
i(l + 1) + "padding-bottom: 0.5714286em;" + nl +
i(l + 1) + "padding-left: 0.5714286em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose {" + nl +
i(l + 1) + "font-size: 1rem;" + nl +
i(l + 1) + "line-height: 1.75;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose p {" + nl +
i(l + 1) + "margin-top: 1.25em;" + nl +
i(l + 1) + "margin-bottom: 1.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose img {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose video {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose figure {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose figure > * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h2 code {" + nl +
i(l + 1) + "font-size: 0.875em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h3 code {" + nl +
i(l + 1) + "font-size: 0.9em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ul {" + nl +
i(l + 1) + "margin-top: 1.25em;" + nl +
i(l + 1) + "margin-bottom: 1.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose li {" + nl +
i(l + 1) + "margin-top: 0.5em;" + nl +
i(l + 1) + "margin-bottom: 0.5em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ol > li:before {" + nl +
i(l + 1) + "left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose > ul > li p {" + nl +
i(l + 1) + "margin-top: 0.75em;" + nl +
i(l + 1) + "margin-bottom: 0.75em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose > ul > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose > ul > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose > ol > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose > ol > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ol ol," + nl +
i(l) + bp + "prose ol ul," + nl +
i(l) + bp + "prose ul ol," + nl +
i(l) + bp + "prose ul ul {" + nl +
i(l + 1) + "margin-top: 0.75em;" + nl +
i(l + 1) + "margin-bottom: 0.75em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose hr + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h2 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h3 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h4 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose thead th:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose thead th:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose tbody td:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose tbody td:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose > :first-child {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose > :last-child {" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h1," + nl +
i(l) + bp + "prose h2," + nl +
i(l) + bp + "prose h3," + nl +
i(l) + bp + "prose h4 {" + nl +
i(l + 1) + "color: #161e2e;" + nl +
i(l) + "}" + nl;
};
proseTypes.set("prose", prose);
// deno-lint-ignore ban-types
const proseSm = (l: number, i: Function, nl: Function, bp: string): string => {
return i(l) + bp + "prose-sm {" + nl +
i(l + 1) + "font-size: 0.875rem;" + nl +
i(l + 1) + "line-height: 1.7142857;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm p {" + nl +
i(l + 1) + "margin-top: 1.1428571em;" + nl +
i(l + 1) + "margin-bottom: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + 'prose-sm [class~="lead"] {' + nl +
i(l + 1) + "font-size: 1.2857143em;" + nl +
i(l + 1) + "line-height: 1.5555556;" + nl +
i(l + 1) + "margin-top: 0.8888889em;" + nl +
i(l + 1) + "margin-bottom: 0.8888889em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm blockquote {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l + 1) + "padding-left: 1.1111111em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h1 {" + nl +
i(l + 1) + "font-size: 2.1428571em;" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0.8em;" + nl +
i(l + 1) + "line-height: 1.2;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h2 {" + nl +
i(l + 1) + "font-size: 1.4285714em;" + nl +
i(l + 1) + "margin-top: 1.6em;" + nl +
i(l + 1) + "margin-bottom: 0.8em;" + nl +
i(l + 1) + "line-height: 1.4;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h3 {" + nl +
i(l + 1) + "font-size: 1.2857143em;" + nl +
i(l + 1) + "margin-top: 1.5555556em;" + nl +
i(l + 1) + "margin-bottom: 0.4444444em;" + nl +
i(l + 1) + "line-height: 1.5555556;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h4 {" + nl +
i(l + 1) + "margin-top: 1.4285714em;" + nl +
i(l + 1) + "margin-bottom: 0.5714286em;" + nl +
i(l + 1) + "line-height: 1.4285714;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm img {" + nl +
i(l + 1) + "margin-top: 1.7142857em;" + nl +
i(l + 1) + "margin-bottom: 1.7142857em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm video {" + nl +
i(l + 1) + "margin-top: 1.7142857em;" + nl +
i(l + 1) + "margin-bottom: 1.7142857em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm figure {" + nl +
i(l + 1) + "margin-top: 1.7142857em;" + nl +
i(l + 1) + "margin-bottom: 1.7142857em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm figure > * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm figure figcaption {" + nl +
i(l + 1) + "font-size: 0.8571429em;" + nl +
i(l + 1) + "line-height: 1.3333333;" + nl +
i(l + 1) + "margin-top: 0.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm code {" + nl +
i(l + 1) + "font-size: 0.8571429em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h2 code {" + nl +
i(l + 1) + "font-size: 0.9em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h3 code {" + nl +
i(l + 1) + "font-size: 0.8888889em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm pre {" + nl +
i(l + 1) + "font-size: 0.8571429em;" + nl +
i(l + 1) + "line-height: 1.6666667;" + nl +
i(l + 1) + "margin-top: 1.6666667em;" + nl +
i(l + 1) + "margin-bottom: 1.6666667em;" + nl +
i(l + 1) + "border-radius: 0.25rem;" + nl +
i(l + 1) + "padding-top: 0.6666667em;" + nl +
i(l + 1) + "padding-right: 1em;" + nl +
i(l + 1) + "padding-bottom: 0.6666667em;" + nl +
i(l + 1) + "padding-left: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm ol {" + nl +
i(l + 1) + "margin-top: 1.1428571em;" + nl +
i(l + 1) + "margin-bottom: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm ul {" + nl +
i(l + 1) + "margin-top: 1.1428571em;" + nl +
i(l + 1) + "margin-bottom: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm li {" + nl +
i(l + 1) + "margin-top: 0.2857143em;" + nl +
i(l + 1) + "margin-bottom: 0.2857143em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm ol > li {" + nl +
i(l + 1) + "padding-left: 1.5714286em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm ol > li:before {" + nl +
i(l + 1) + "left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm ul > li {" + nl +
i(l + 1) + "padding-left: 1.5714286em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm ul > li::before {" + nl +
i(l + 1) + "height: 0.3571429em;" + nl +
i(l + 1) + "width: 0.3571429em;" + nl +
i(l + 1) + "top: calc(0.8571429em - 0.1785714em);" + nl +
i(l + 1) + "left: 0.2142857em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm > ul > li p {" + nl +
i(l + 1) + "margin-top: 0.5714286em;" + nl +
i(l + 1) + "margin-bottom: 0.5714286em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm > ul > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm > ul > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm > ol > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm > ol > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm ol ol," + nl +
i(l) + bp + "prose-sm ol ul," + nl +
i(l) + bp + "prose-sm ul ol," + nl +
i(l) + bp + "prose-sm ul ul {" + nl +
i(l + 1) + "margin-top: 0.5714286em;" + nl +
i(l + 1) + "margin-bottom: 0.5714286em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm hr {" + nl +
i(l + 1) + "margin-top: 2.8571429em;" + nl +
i(l + 1) + "margin-bottom: 2.8571429em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm hr + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h2 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h3 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h4 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm table {" + nl +
i(l + 1) + "font-size: 0.8571429em;" + nl +
i(l + 1) + "line-height: 1.5;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm thead th {" + nl +
i(l + 1) + "padding-right: 1em;" + nl +
i(l + 1) + "padding-bottom: 0.6666667em;" + nl +
i(l + 1) + "padding-left: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm thead th:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm thead th:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm tbody td {" + nl +
i(l + 1) + "padding-top: 0.6666667em;" + nl +
i(l + 1) + "padding-right: 1em;" + nl +
i(l + 1) + "padding-bottom: 0.6666667em;" + nl +
i(l + 1) + "padding-left: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm tbody td:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm tbody td:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm > :first-child {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm > :last-child {" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl;
};
proseTypes.set("prose-sm", proseSm);
// deno-lint-ignore ban-types
const proseLg = (l: number, i: Function, nl: Function, bp: string): string => {
return i(l) + bp + "prose-lg {" + nl +
i(l + 1) + "font-size: 1.125rem;" + nl +
i(l + 1) + "line-height: 1.7777778;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg p {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + 'prose-lg [class~="lead"] {' + nl +
i(l + 1) + "font-size: 1.2222222em;" + nl +
i(l + 1) + "line-height: 1.4545455;" + nl +
i(l + 1) + "margin-top: 1.0909091em;" + nl +
i(l + 1) + "margin-bottom: 1.0909091em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg blockquote {" + nl +
i(l + 1) + "margin-top: 1.6666667em;" + nl +
i(l + 1) + "margin-bottom: 1.6666667em;" + nl +
i(l + 1) + "padding-left: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h1 {" + nl +
i(l + 1) + "font-size: 2.6666667em;" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0.8333333em;" + nl +
i(l + 1) + "line-height: 1;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h2 {" + nl +
i(l + 1) + "font-size: 1.6666667em;" + nl +
i(l + 1) + "margin-top: 1.8666667em;" + nl +
i(l + 1) + "margin-bottom: 1.0666667em;" + nl +
i(l + 1) + "line-height: 1.3333333;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h3 {" + nl +
i(l + 1) + "font-size: 1.3333333em;" + nl +
i(l + 1) + "margin-top: 1.6666667em;" + nl +
i(l + 1) + "margin-bottom: 0.6666667em;" + nl +
i(l + 1) + "line-height: 1.5;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h4 {" + nl +
i(l + 1) + "margin-top: 1.7777778em;" + nl +
i(l + 1) + "margin-bottom: 0.4444444em;" + nl +
i(l + 1) + "line-height: 1.5555556;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg img {" + nl +
i(l + 1) + "margin-top: 1.7777778em;" + nl +
i(l + 1) + "margin-bottom: 1.7777778em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg video {" + nl +
i(l + 1) + "margin-top: 1.7777778em;" + nl +
i(l + 1) + "margin-bottom: 1.7777778em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg figure {" + nl +
i(l + 1) + "margin-top: 1.7777778em;" + nl +
i(l + 1) + "margin-bottom: 1.7777778em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg figure > * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg figure figcaption {" + nl +
i(l + 1) + "font-size: 0.8888889em;" + nl +
i(l + 1) + "line-height: 1.5;" + nl +
i(l + 1) + "margin-top: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg code {" + nl +
i(l + 1) + "font-size: 0.8888889em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h2 code {" + nl +
i(l + 1) + "font-size: 0.8666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h3 code {" + nl +
i(l + 1) + "font-size: 0.875em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg pre {" + nl +
i(l + 1) + "font-size: 0.8888889em;" + nl +
i(l + 1) + "line-height: 1.75;" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l + 1) + "border-radius: 0.375rem;" + nl +
i(l + 1) + "padding-top: 1em;" + nl +
i(l + 1) + "padding-right: 1.5em;" + nl +
i(l + 1) + "padding-bottom: 1em;" + nl +
i(l + 1) + "padding-left: 1.5em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg ol {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg ul {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg li {" + nl +
i(l + 1) + "margin-top: 0.6666667em;" + nl +
i(l + 1) + "margin-bottom: 0.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg ol > li {" + nl +
i(l + 1) + "padding-left: 1.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg ol > li:before {" + nl +
i(l + 1) + "left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg ul > li {" + nl +
i(l + 1) + "padding-left: 1.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg ul > li::before {" + nl +
i(l + 1) + "width: 0.3333333em;" + nl +
i(l + 1) + "height: 0.3333333em;" + nl +
i(l + 1) + "top: calc(0.8888889em - 0.1666667em);" + nl +
i(l + 1) + "left: 0.2222222em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg > ul > li p {" + nl +
i(l + 1) + "margin-top: 0.8888889em;" + nl +
i(l + 1) + "margin-bottom: 0.8888889em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg > ul > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg > ul > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg > ol > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg > ol > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg ol ol," + nl +
i(l) + bp + "prose-lg ol ul," + nl +
i(l) + bp + "prose-lg ul ol," + nl +
i(l) + bp + "prose-lg ul ul {" + nl +
i(l + 1) + "margin-top: 0.8888889em;" + nl +
i(l + 1) + "margin-bottom: 0.8888889em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg hr {" + nl +
i(l + 1) + "margin-top: 3.1111111em;" + nl +
i(l + 1) + "margin-bottom: 3.1111111em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg hr + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h2 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h3 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h4 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg table {" + nl +
i(l + 1) + "font-size: 0.8888889em;" + nl +
i(l + 1) + "line-height: 1.5;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg thead th {" + nl +
i(l + 1) + "padding-right: 0.75em;" + nl +
i(l + 1) + "padding-bottom: 0.75em;" + nl +
i(l + 1) + "padding-left: 0.75em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg thead th:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg thead th:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg tbody td {" + nl +
i(l + 1) + "padding-top: 0.75em;" + nl +
i(l + 1) + "padding-right: 0.75em;" + nl +
i(l + 1) + "padding-bottom: 0.75em;" + nl +
i(l + 1) + "padding-left: 0.75em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg tbody td:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg tbody td:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg > :first-child {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg > :last-child {" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl;
};
proseTypes.set("prose-lg", proseLg);
// deno-lint-ignore ban-types
const proseXl = (l: number, i: Function, nl: Function, bp: string): string => {
return i(l) + bp + "prose-xl {" + nl +
i(l + 1) + "font-size: 1.25rem;" + nl +
i(l + 1) + "line-height: 1.8;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl p {" + nl +
i(l + 1) + "margin-top: 1.2em;" + nl +
i(l + 1) + "margin-bottom: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + 'prose-xl [class~="lead"] {' + nl +
i(l + 1) + "font-size: 1.2em;" + nl +
i(l + 1) + "line-height: 1.5;" + nl +
i(l + 1) + "margin-top: 1em;" + nl +
i(l + 1) + "margin-bottom: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl blockquote {" + nl +
i(l + 1) + "margin-top: 1.6em;" + nl +
i(l + 1) + "margin-bottom: 1.6em;" + nl +
i(l + 1) + "padding-left: 1.0666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h1 {" + nl +
i(l + 1) + "font-size: 2.8em;" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0.8571429em;" + nl +
i(l + 1) + "line-height: 1;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h2 {" + nl +
i(l + 1) + "font-size: 1.8em;" + nl +
i(l + 1) + "margin-top: 1.5555556em;" + nl +
i(l + 1) + "margin-bottom: 0.8888889em;" + nl +
i(l + 1) + "line-height: 1.1111111;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h3 {" + nl +
i(l + 1) + "font-size: 1.5em;" + nl +
i(l + 1) + "margin-top: 1.6em;" + nl +
i(l + 1) + "margin-bottom: 0.6666667em;" + nl +
i(l + 1) + "line-height: 1.3333333;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h4 {" + nl +
i(l + 1) + "margin-top: 1.8em;" + nl +
i(l + 1) + "margin-bottom: 0.6em;" + nl +
i(l + 1) + "line-height: 1.6;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl img {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl video {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl figure {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl figure > * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl figure figcaption {" + nl +
i(l + 1) + "font-size: 0.9em;" + nl +
i(l + 1) + "line-height: 1.5555556;" + nl +
i(l + 1) + "margin-top: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl code {" + nl +
i(l + 1) + "font-size: 0.9em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h2 code {" + nl +
i(l + 1) + "font-size: 0.8611111em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h3 code {" + nl +
i(l + 1) + "font-size: 0.9em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl pre {" + nl +
i(l + 1) + "font-size: 0.9em;" + nl +
i(l + 1) + "line-height: 1.7777778;" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l + 1) + "border-radius: 0.5rem;" + nl +
i(l + 1) + "padding-top: 1.1111111em;" + nl +
i(l + 1) + "padding-right: 1.3333333em;" + nl +
i(l + 1) + "padding-bottom: 1.1111111em;" + nl +
i(l + 1) + "padding-left: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl ol {" + nl +
i(l + 1) + "margin-top: 1.2em;" + nl +
i(l + 1) + "margin-bottom: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl ul {" + nl +
i(l + 1) + "margin-top: 1.2em;" + nl +
i(l + 1) + "margin-bottom: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl li {" + nl +
i(l + 1) + "margin-top: 0.6em;" + nl +
i(l + 1) + "margin-bottom: 0.6em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl ol > li {" + nl +
i(l + 1) + "padding-left: 1.8em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl ol > li:before {" + nl +
i(l + 1) + "left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl ul > li {" + nl +
i(l + 1) + "padding-left: 1.8em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl ul > li::before {" + nl +
i(l + 1) + "width: 0.35em;" + nl +
i(l + 1) + "height: 0.35em;" + nl +
i(l + 1) + "top: calc(0.9em - 0.175em);" + nl +
i(l + 1) + "left: 0.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl > ul > li p {" + nl +
i(l + 1) + "margin-top: 0.8em;" + nl +
i(l + 1) + "margin-bottom: 0.8em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl > ul > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl > ul > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl > ol > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl > ol > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl ol ol," + nl +
i(l) + bp + "prose-xl ol ul," + nl +
i(l) + bp + "prose-xl ul ol," + nl +
i(l) + bp + "prose-xl ul ul {" + nl +
i(l + 1) + "margin-top: 0.8em;" + nl +
i(l + 1) + "margin-bottom: 0.8em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl hr {" + nl +
i(l + 1) + "margin-top: 2.8em;" + nl +
i(l + 1) + "margin-bottom: 2.8em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl hr + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h2 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h3 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h4 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl table {" + nl +
i(l + 1) + "font-size: 0.9em;" + nl +
i(l + 1) + "line-height: 1.5555556;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl thead th {" + nl +
i(l + 1) + "padding-right: 0.6666667em;" + nl +
i(l + 1) + "padding-bottom: 0.8888889em;" + nl +
i(l + 1) + "padding-left: 0.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl thead th:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl thead th:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl tbody td {" + nl +
i(l + 1) + "padding-top: 0.8888889em;" + nl +
i(l + 1) + "padding-right: 0.6666667em;" + nl +
i(l + 1) + "padding-bottom: 0.8888889em;" + nl +
i(l + 1) + "padding-left: 0.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl tbody td:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl tbody td:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl > :first-child {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl > :last-child {" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl;
};
proseTypes.set("prose-xl", proseXl);
// deno-lint-ignore ban-types
const prose2xl = (l: number, i: Function, nl: Function, bp: string): string => {
return i(l) + bp + "prose-2xl {" + nl +
i(l + 1) + "font-size: 1.5rem;" + nl +
i(l + 1) + "line-height: 1.6666667;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl p {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + 'prose-2xl [class~="lead"] {' + nl +
i(l + 1) + "font-size: 1.25em;" + nl +
i(l + 1) + "line-height: 1.4666667;" + nl +
i(l + 1) + "margin-top: 1.0666667em;" + nl +
i(l + 1) + "margin-bottom: 1.0666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl blockquote {" + nl +
i(l + 1) + "margin-top: 1.7777778em;" + nl +
i(l + 1) + "margin-bottom: 1.7777778em;" + nl +
i(l + 1) + "padding-left: 1.1111111em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h1 {" + nl +
i(l + 1) + "font-size: 2.6666667em;" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0.875em;" + nl +
i(l + 1) + "line-height: 1;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h2 {" + nl +
i(l + 1) + "font-size: 2em;" + nl +
i(l + 1) + "margin-top: 1.5em;" + nl +
i(l + 1) + "margin-bottom: 0.8333333em;" + nl +
i(l + 1) + "line-height: 1.0833333;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h3 {" + nl +
i(l + 1) + "font-size: 1.5em;" + nl +
i(l + 1) + "margin-top: 1.5555556em;" + nl +
i(l + 1) + "margin-bottom: 0.6666667em;" + nl +
i(l + 1) + "line-height: 1.2222222;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h4 {" + nl +
i(l + 1) + "margin-top: 1.6666667em;" + nl +
i(l + 1) + "margin-bottom: 0.6666667em;" + nl +
i(l + 1) + "line-height: 1.5;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl img {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl video {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl figure {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl figure > * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl figure figcaption {" + nl +
i(l + 1) + "font-size: 0.8333333em;" + nl +
i(l + 1) + "line-height: 1.6;" + nl +
i(l + 1) + "margin-top: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl code {" + nl +
i(l + 1) + "font-size: 0.8333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h2 code {" + nl +
i(l + 1) + "font-size: 0.875em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h3 code {" + nl +
i(l + 1) + "font-size: 0.8888889em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl pre {" + nl +
i(l + 1) + "font-size: 0.8333333em;" + nl +
i(l + 1) + "line-height: 1.8;" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l + 1) + "border-radius: 0.5rem;" + nl +
i(l + 1) + "padding-top: 1.2em;" + nl +
i(l + 1) + "padding-right: 1.6em;" + nl +
i(l + 1) + "padding-bottom: 1.2em;" + nl +
i(l + 1) + "padding-left: 1.6em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl ol {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl ul {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl li {" + nl +
i(l + 1) + "margin-top: 0.5em;" + nl +
i(l + 1) + "margin-bottom: 0.5em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl ol > li {" + nl +
i(l + 1) + "padding-left: 1.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl ol > li:before {" + nl +
i(l + 1) + "left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl ul > li {" + nl +
i(l + 1) + "padding-left: 1.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl ul > li::before {" + nl +
i(l + 1) + "width: 0.3333333em;" + nl +
i(l + 1) + "height: 0.3333333em;" + nl +
i(l + 1) + "top: calc(0.8333333em - 0.1666667em);" + nl +
i(l + 1) + "left: 0.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl > ul > li p {" + nl +
i(l + 1) + "margin-top: 0.8333333em;" + nl +
i(l + 1) + "margin-bottom: 0.8333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl > ul > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl > ul > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl > ol > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl > ol > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl ol ol," + nl +
i(l) + bp + "prose-2xl ol ul," + nl +
i(l) + bp + "prose-2xl ul ol," + nl +
i(l) + bp + "prose-2xl ul ul {" + nl +
i(l + 1) + "margin-top: 0.6666667em;" + nl +
i(l + 1) + "margin-bottom: 0.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl hr {" + nl +
i(l + 1) + "margin-top: 3em;" + nl +
i(l + 1) + "margin-bottom: 3em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl hr + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h2 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h3 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h4 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl table {" + nl +
i(l + 1) + "font-size: 0.8333333em;" + nl +
i(l + 1) + "line-height: 1.4;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl thead th {" + nl +
i(l + 1) + "padding-right: 0.6em;" + nl +
i(l + 1) + "padding-bottom: 0.8em;" + nl +
i(l + 1) + "padding-left: 0.6em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl thead th:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl thead th:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl tbody td {" + nl +
i(l + 1) + "padding-top: 0.8em;" + nl +
i(l + 1) + "padding-right: 0.6em;" + nl +
i(l + 1) + "padding-bottom: 0.8em;" + nl +
i(l + 1) + "padding-left: 0.6em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl tbody td:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl tbody td:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl > :first-child {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl > :last-child {" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl;
};
proseTypes.set("prose-2xl", prose2xl);
export default (identifier: string, level = 0, b = "", m = false) => {
const i = indentFormatter(m);
const nl = newlineFormatter(m)();
const bp = breakpointFormatter(b);
if (proseTypes.has(identifier)) {
return proseTypes.get(identifier)(level, i, nl, bp);
}
return;
};"#,
r#"
import {
indent as indentFormatter,
newline as newlineFormatter,
breakpoint as breakpointFormatter,
} from "./format.ts";
const proseTypes = new Map();
// deno-lint-ignore ban-types
const prose = (l, i, nl, bp): string => {
return i(l) + bp + "prose {" + nl +
i(l + 1) + "color: #374151;" + nl +
i(l + 1) + "max-width: 65ch;" + nl +
i(l) + "}" + nl +
i(l) + bp + 'prose [class~="lead"] {' + nl +
i(l + 1) + "color: #4b5563;" + nl +
i(l + 1) + "font-size: 1.25em;" + nl +
i(l + 1) + "line-height: 1.6;" + nl +
i(l + 1) + "margin-top: 1.2em;" + nl +
i(l + 1) + "margin-bottom: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose a {" + nl +
i(l + 1) + "color: #5850ec;" + nl +
i(l + 1) + "text-decoration: none;" + nl +
i(l + 1) + "font-weight: 600;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose strong {" + nl +
i(l + 1) + "color: #161e2e;" + nl +
i(l + 1) + "font-weight: 600;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ol {" + nl +
i(l + 1) + "counter-reset: list-counter;" + nl +
i(l + 1) + "margin-top: 1.25em;" + nl +
i(l + 1) + "margin-bottom: 1.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ol > li {" + nl +
i(l + 1) + "position: relative;" + nl +
i(l + 1) + "counter-increment: list-counter;" + nl +
i(l + 1) + "padding-left: 1.75em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ol > li::before {" + nl +
i(l + 1) + 'content: counter(list-counter) ".";' + nl +
i(l + 1) + "position: absolute;" + nl +
i(l + 1) + "font-weight: 400;" + nl +
i(l + 1) + "color: #6b7280;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ul > li {" + nl +
i(l + 1) + "position: relative;" + nl +
i(l + 1) + "padding-left: 1.75em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ul > li::before {" + nl +
i(l + 1) + 'content: "";' + nl +
i(l + 1) + "position: absolute;" + nl +
i(l + 1) + "background-color: #d2d6dc;" + nl +
i(l + 1) + "border-radius: 50%;" + nl +
i(l + 1) + "width: 0.375em;" + nl +
i(l + 1) + "height: 0.375em;" + nl +
i(l + 1) + "top: calc(0.875em - 0.1875em);" + nl +
i(l + 1) + "left: 0.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose hr {" + nl +
i(l + 1) + "border-color: #e5e7eb;" + nl +
i(l + 1) + "border-top-width: 1px;" + nl +
i(l + 1) + "margin-top: 3em;" + nl +
i(l + 1) + "margin-bottom: 3em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose blockquote {" + nl +
i(l + 1) + "font-weight: 500;" + nl +
i(l + 1) + "font-style: italic;" + nl +
i(l + 1) + "color: #161e2e;" + nl +
i(l + 1) + "border-left-width: 0.25rem;" + nl +
i(l + 1) + "border-left-color: #e5e7eb;" + nl +
i(l + 1) + 'quotes: "\\201C""\\201D""\\2018""\\2019";' + nl +
i(l + 1) + "margin-top: 1.6em;" + nl +
i(l + 1) + "margin-bottom: 1.6em;" + nl +
i(l + 1) + "padding-left: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose blockquote p:first-of-type::before {" + nl +
i(l + 1) + "content: open-quote;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose blockquote p:last-of-type::after {" + nl +
i(l + 1) + "content: close-quote;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h1 {" + nl +
i(l + 1) + "color: #1a202c;" + nl +
i(l + 1) + "font-weight: 800;" + nl +
i(l + 1) + "font-size: 2.25em;" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0.8888889em;" + nl +
i(l + 1) + "line-height: 1.1111111;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h2 {" + nl +
i(l + 1) + "color: #1a202c;" + nl +
i(l + 1) + "font-weight: 700;" + nl +
i(l + 1) + "font-size: 1.5em;" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 1em;" + nl +
i(l + 1) + "line-height: 1.3333333;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h3 {" + nl +
i(l + 1) + "color: #1a202c;" + nl +
i(l + 1) + "font-weight: 600;" + nl +
i(l + 1) + "font-size: 1.25em;" + nl +
i(l + 1) + "margin-top: 1.6em;" + nl +
i(l + 1) + "margin-bottom: 0.6em;" + nl +
i(l + 1) + "line-height: 1.6;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h4 {" + nl +
i(l + 1) + "color: #1a202c;" + nl +
i(l + 1) + "font-weight: 600;" + nl +
i(l + 1) + "margin-top: 1.5em;" + nl +
i(l + 1) + "margin-bottom: 0.5em;" + nl +
i(l + 1) + "line-height: 1.5;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose figure figcaption {" + nl +
i(l + 1) + "color: #6b7280;" + nl +
i(l + 1) + "font-size: 0.875em;" + nl +
i(l + 1) + "line-height: 1.4285714;" + nl +
i(l + 1) + "margin-top: 0.8571429em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose code {" + nl +
i(l + 1) + "color: #161e2e;" + nl +
i(l + 1) + "font-weight: 600;" + nl +
i(l + 1) + "font-size: 0.875em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose code::before {" + nl +
i(l + 1) + 'content: "`";' + nl +
i(l) + "}" + nl +
i(l) + bp + "prose code::after {" + nl +
i(l + 1) + 'content: "`";' + nl +
i(l) + "}" + nl +
i(l) + bp + "prose pre {" + nl +
i(l + 1) + "color: #e5e7eb;" + nl +
i(l + 1) + "background-color: #252f3f;" + nl +
i(l + 1) + "overflow-x: auto;" + nl +
i(l + 1) + "font-size: 0.875em;" + nl +
i(l + 1) + "line-height: 1.7142857;" + nl +
i(l + 1) + "margin-top: 1.7142857em;" + nl +
i(l + 1) + "margin-bottom: 1.7142857em;" + nl +
i(l + 1) + "border-radius: 0.375rem;" + nl +
i(l + 1) + "padding-top: 0.8571429em;" + nl +
i(l + 1) + "padding-right: 1.1428571em;" + nl +
i(l + 1) + "padding-bottom: 0.8571429em;" + nl +
i(l + 1) + "padding-left: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose pre code {" + nl +
i(l + 1) + "background-color: transparent;" + nl +
i(l + 1) + "border-width: 0;" + nl +
i(l + 1) + "border-radius: 0;" + nl +
i(l + 1) + "padding: 0;" + nl +
i(l + 1) + "font-weight: 400;" + nl +
i(l + 1) + "color: inherit;" + nl +
i(l + 1) + "font-size: inherit;" + nl +
i(l + 1) + "font-family: inherit;" + nl +
i(l + 1) + "line-height: inherit;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose pre code::before {" + nl +
i(l + 1) + 'content: "";' + nl +
i(l) + "}" + nl +
i(l) + bp + "prose pre code::after {" + nl +
i(l + 1) + 'content: "";' + nl +
i(l) + "}" + nl +
i(l) + bp + "prose table {" + nl +
i(l + 1) + "width: 100%;" + nl +
i(l + 1) + "table-layout: auto;" + nl +
i(l + 1) + "text-align: left;" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l + 1) + "font-size: 0.875em;" + nl +
i(l + 1) + "line-height: 1.7142857;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose thead {" + nl +
i(l + 1) + "color: #161e2e;" + nl +
i(l + 1) + "font-weight: 600;" + nl +
i(l + 1) + "border-bottom-width: 1px;" + nl +
i(l + 1) + "border-bottom-color: #d2d6dc;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose thead th {" + nl +
i(l + 1) + "vertical-align: bottom;" + nl +
i(l + 1) + "padding-right: 0.5714286em;" + nl +
i(l + 1) + "padding-bottom: 0.5714286em;" + nl +
i(l + 1) + "padding-left: 0.5714286em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose tbody tr {" + nl +
i(l + 1) + "border-bottom-width: 1px;" + nl +
i(l + 1) + "border-bottom-color: #e5e7eb;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose tbody tr:last-child {" + nl +
i(l + 1) + "border-bottom-width: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose tbody td {" + nl +
i(l + 1) + "vertical-align: top;" + nl +
i(l + 1) + "padding-top: 0.5714286em;" + nl +
i(l + 1) + "padding-right: 0.5714286em;" + nl +
i(l + 1) + "padding-bottom: 0.5714286em;" + nl +
i(l + 1) + "padding-left: 0.5714286em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose {" + nl +
i(l + 1) + "font-size: 1rem;" + nl +
i(l + 1) + "line-height: 1.75;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose p {" + nl +
i(l + 1) + "margin-top: 1.25em;" + nl +
i(l + 1) + "margin-bottom: 1.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose img {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose video {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose figure {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose figure > * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h2 code {" + nl +
i(l + 1) + "font-size: 0.875em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h3 code {" + nl +
i(l + 1) + "font-size: 0.9em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ul {" + nl +
i(l + 1) + "margin-top: 1.25em;" + nl +
i(l + 1) + "margin-bottom: 1.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose li {" + nl +
i(l + 1) + "margin-top: 0.5em;" + nl +
i(l + 1) + "margin-bottom: 0.5em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ol > li:before {" + nl +
i(l + 1) + "left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose > ul > li p {" + nl +
i(l + 1) + "margin-top: 0.75em;" + nl +
i(l + 1) + "margin-bottom: 0.75em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose > ul > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose > ul > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose > ol > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose > ol > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose ol ol," + nl +
i(l) + bp + "prose ol ul," + nl +
i(l) + bp + "prose ul ol," + nl +
i(l) + bp + "prose ul ul {" + nl +
i(l + 1) + "margin-top: 0.75em;" + nl +
i(l + 1) + "margin-bottom: 0.75em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose hr + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h2 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h3 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h4 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose thead th:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose thead th:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose tbody td:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose tbody td:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose > :first-child {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose > :last-child {" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose h1," + nl +
i(l) + bp + "prose h2," + nl +
i(l) + bp + "prose h3," + nl +
i(l) + bp + "prose h4 {" + nl +
i(l + 1) + "color: #161e2e;" + nl +
i(l) + "}" + nl;
};
proseTypes.set("prose", prose);
// deno-lint-ignore ban-types
const proseSm = (l, i, nl, bp): string => {
return i(l) + bp + "prose-sm {" + nl +
i(l + 1) + "font-size: 0.875rem;" + nl +
i(l + 1) + "line-height: 1.7142857;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm p {" + nl +
i(l + 1) + "margin-top: 1.1428571em;" + nl +
i(l + 1) + "margin-bottom: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + 'prose-sm [class~="lead"] {' + nl +
i(l + 1) + "font-size: 1.2857143em;" + nl +
i(l + 1) + "line-height: 1.5555556;" + nl +
i(l + 1) + "margin-top: 0.8888889em;" + nl +
i(l + 1) + "margin-bottom: 0.8888889em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm blockquote {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l + 1) + "padding-left: 1.1111111em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h1 {" + nl +
i(l + 1) + "font-size: 2.1428571em;" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0.8em;" + nl +
i(l + 1) + "line-height: 1.2;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h2 {" + nl +
i(l + 1) + "font-size: 1.4285714em;" + nl +
i(l + 1) + "margin-top: 1.6em;" + nl +
i(l + 1) + "margin-bottom: 0.8em;" + nl +
i(l + 1) + "line-height: 1.4;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h3 {" + nl +
i(l + 1) + "font-size: 1.2857143em;" + nl +
i(l + 1) + "margin-top: 1.5555556em;" + nl +
i(l + 1) + "margin-bottom: 0.4444444em;" + nl +
i(l + 1) + "line-height: 1.5555556;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h4 {" + nl +
i(l + 1) + "margin-top: 1.4285714em;" + nl +
i(l + 1) + "margin-bottom: 0.5714286em;" + nl +
i(l + 1) + "line-height: 1.4285714;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm img {" + nl +
i(l + 1) + "margin-top: 1.7142857em;" + nl +
i(l + 1) + "margin-bottom: 1.7142857em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm video {" + nl +
i(l + 1) + "margin-top: 1.7142857em;" + nl +
i(l + 1) + "margin-bottom: 1.7142857em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm figure {" + nl +
i(l + 1) + "margin-top: 1.7142857em;" + nl +
i(l + 1) + "margin-bottom: 1.7142857em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm figure > * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm figure figcaption {" + nl +
i(l + 1) + "font-size: 0.8571429em;" + nl +
i(l + 1) + "line-height: 1.3333333;" + nl +
i(l + 1) + "margin-top: 0.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm code {" + nl +
i(l + 1) + "font-size: 0.8571429em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h2 code {" + nl +
i(l + 1) + "font-size: 0.9em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h3 code {" + nl +
i(l + 1) + "font-size: 0.8888889em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm pre {" + nl +
i(l + 1) + "font-size: 0.8571429em;" + nl +
i(l + 1) + "line-height: 1.6666667;" + nl +
i(l + 1) + "margin-top: 1.6666667em;" + nl +
i(l + 1) + "margin-bottom: 1.6666667em;" + nl +
i(l + 1) + "border-radius: 0.25rem;" + nl +
i(l + 1) + "padding-top: 0.6666667em;" + nl +
i(l + 1) + "padding-right: 1em;" + nl +
i(l + 1) + "padding-bottom: 0.6666667em;" + nl +
i(l + 1) + "padding-left: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm ol {" + nl +
i(l + 1) + "margin-top: 1.1428571em;" + nl +
i(l + 1) + "margin-bottom: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm ul {" + nl +
i(l + 1) + "margin-top: 1.1428571em;" + nl +
i(l + 1) + "margin-bottom: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm li {" + nl +
i(l + 1) + "margin-top: 0.2857143em;" + nl +
i(l + 1) + "margin-bottom: 0.2857143em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm ol > li {" + nl +
i(l + 1) + "padding-left: 1.5714286em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm ol > li:before {" + nl +
i(l + 1) + "left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm ul > li {" + nl +
i(l + 1) + "padding-left: 1.5714286em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm ul > li::before {" + nl +
i(l + 1) + "height: 0.3571429em;" + nl +
i(l + 1) + "width: 0.3571429em;" + nl +
i(l + 1) + "top: calc(0.8571429em - 0.1785714em);" + nl +
i(l + 1) + "left: 0.2142857em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm > ul > li p {" + nl +
i(l + 1) + "margin-top: 0.5714286em;" + nl +
i(l + 1) + "margin-bottom: 0.5714286em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm > ul > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm > ul > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm > ol > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm > ol > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.1428571em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm ol ol," + nl +
i(l) + bp + "prose-sm ol ul," + nl +
i(l) + bp + "prose-sm ul ol," + nl +
i(l) + bp + "prose-sm ul ul {" + nl +
i(l + 1) + "margin-top: 0.5714286em;" + nl +
i(l + 1) + "margin-bottom: 0.5714286em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm hr {" + nl +
i(l + 1) + "margin-top: 2.8571429em;" + nl +
i(l + 1) + "margin-bottom: 2.8571429em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm hr + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h2 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h3 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm h4 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm table {" + nl +
i(l + 1) + "font-size: 0.8571429em;" + nl +
i(l + 1) + "line-height: 1.5;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm thead th {" + nl +
i(l + 1) + "padding-right: 1em;" + nl +
i(l + 1) + "padding-bottom: 0.6666667em;" + nl +
i(l + 1) + "padding-left: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm thead th:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm thead th:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm tbody td {" + nl +
i(l + 1) + "padding-top: 0.6666667em;" + nl +
i(l + 1) + "padding-right: 1em;" + nl +
i(l + 1) + "padding-bottom: 0.6666667em;" + nl +
i(l + 1) + "padding-left: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm tbody td:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm tbody td:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm > :first-child {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-sm > :last-child {" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl;
};
proseTypes.set("prose-sm", proseSm);
// deno-lint-ignore ban-types
const proseLg = (l, i, nl, bp): string => {
return i(l) + bp + "prose-lg {" + nl +
i(l + 1) + "font-size: 1.125rem;" + nl +
i(l + 1) + "line-height: 1.7777778;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg p {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + 'prose-lg [class~="lead"] {' + nl +
i(l + 1) + "font-size: 1.2222222em;" + nl +
i(l + 1) + "line-height: 1.4545455;" + nl +
i(l + 1) + "margin-top: 1.0909091em;" + nl +
i(l + 1) + "margin-bottom: 1.0909091em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg blockquote {" + nl +
i(l + 1) + "margin-top: 1.6666667em;" + nl +
i(l + 1) + "margin-bottom: 1.6666667em;" + nl +
i(l + 1) + "padding-left: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h1 {" + nl +
i(l + 1) + "font-size: 2.6666667em;" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0.8333333em;" + nl +
i(l + 1) + "line-height: 1;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h2 {" + nl +
i(l + 1) + "font-size: 1.6666667em;" + nl +
i(l + 1) + "margin-top: 1.8666667em;" + nl +
i(l + 1) + "margin-bottom: 1.0666667em;" + nl +
i(l + 1) + "line-height: 1.3333333;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h3 {" + nl +
i(l + 1) + "font-size: 1.3333333em;" + nl +
i(l + 1) + "margin-top: 1.6666667em;" + nl +
i(l + 1) + "margin-bottom: 0.6666667em;" + nl +
i(l + 1) + "line-height: 1.5;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h4 {" + nl +
i(l + 1) + "margin-top: 1.7777778em;" + nl +
i(l + 1) + "margin-bottom: 0.4444444em;" + nl +
i(l + 1) + "line-height: 1.5555556;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg img {" + nl +
i(l + 1) + "margin-top: 1.7777778em;" + nl +
i(l + 1) + "margin-bottom: 1.7777778em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg video {" + nl +
i(l + 1) + "margin-top: 1.7777778em;" + nl +
i(l + 1) + "margin-bottom: 1.7777778em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg figure {" + nl +
i(l + 1) + "margin-top: 1.7777778em;" + nl +
i(l + 1) + "margin-bottom: 1.7777778em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg figure > * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg figure figcaption {" + nl +
i(l + 1) + "font-size: 0.8888889em;" + nl +
i(l + 1) + "line-height: 1.5;" + nl +
i(l + 1) + "margin-top: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg code {" + nl +
i(l + 1) + "font-size: 0.8888889em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h2 code {" + nl +
i(l + 1) + "font-size: 0.8666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h3 code {" + nl +
i(l + 1) + "font-size: 0.875em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg pre {" + nl +
i(l + 1) + "font-size: 0.8888889em;" + nl +
i(l + 1) + "line-height: 1.75;" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l + 1) + "border-radius: 0.375rem;" + nl +
i(l + 1) + "padding-top: 1em;" + nl +
i(l + 1) + "padding-right: 1.5em;" + nl +
i(l + 1) + "padding-bottom: 1em;" + nl +
i(l + 1) + "padding-left: 1.5em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg ol {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg ul {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg li {" + nl +
i(l + 1) + "margin-top: 0.6666667em;" + nl +
i(l + 1) + "margin-bottom: 0.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg ol > li {" + nl +
i(l + 1) + "padding-left: 1.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg ol > li:before {" + nl +
i(l + 1) + "left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg ul > li {" + nl +
i(l + 1) + "padding-left: 1.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg ul > li::before {" + nl +
i(l + 1) + "width: 0.3333333em;" + nl +
i(l + 1) + "height: 0.3333333em;" + nl +
i(l + 1) + "top: calc(0.8888889em - 0.1666667em);" + nl +
i(l + 1) + "left: 0.2222222em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg > ul > li p {" + nl +
i(l + 1) + "margin-top: 0.8888889em;" + nl +
i(l + 1) + "margin-bottom: 0.8888889em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg > ul > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg > ul > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg > ol > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg > ol > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg ol ol," + nl +
i(l) + bp + "prose-lg ol ul," + nl +
i(l) + bp + "prose-lg ul ol," + nl +
i(l) + bp + "prose-lg ul ul {" + nl +
i(l + 1) + "margin-top: 0.8888889em;" + nl +
i(l + 1) + "margin-bottom: 0.8888889em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg hr {" + nl +
i(l + 1) + "margin-top: 3.1111111em;" + nl +
i(l + 1) + "margin-bottom: 3.1111111em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg hr + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h2 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h3 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg h4 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg table {" + nl +
i(l + 1) + "font-size: 0.8888889em;" + nl +
i(l + 1) + "line-height: 1.5;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg thead th {" + nl +
i(l + 1) + "padding-right: 0.75em;" + nl +
i(l + 1) + "padding-bottom: 0.75em;" + nl +
i(l + 1) + "padding-left: 0.75em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg thead th:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg thead th:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg tbody td {" + nl +
i(l + 1) + "padding-top: 0.75em;" + nl +
i(l + 1) + "padding-right: 0.75em;" + nl +
i(l + 1) + "padding-bottom: 0.75em;" + nl +
i(l + 1) + "padding-left: 0.75em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg tbody td:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg tbody td:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg > :first-child {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-lg > :last-child {" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl;
};
proseTypes.set("prose-lg", proseLg);
// deno-lint-ignore ban-types
const proseXl = (l, i, nl, bp): string => {
return i(l) + bp + "prose-xl {" + nl +
i(l + 1) + "font-size: 1.25rem;" + nl +
i(l + 1) + "line-height: 1.8;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl p {" + nl +
i(l + 1) + "margin-top: 1.2em;" + nl +
i(l + 1) + "margin-bottom: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + 'prose-xl [class~="lead"] {' + nl +
i(l + 1) + "font-size: 1.2em;" + nl +
i(l + 1) + "line-height: 1.5;" + nl +
i(l + 1) + "margin-top: 1em;" + nl +
i(l + 1) + "margin-bottom: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl blockquote {" + nl +
i(l + 1) + "margin-top: 1.6em;" + nl +
i(l + 1) + "margin-bottom: 1.6em;" + nl +
i(l + 1) + "padding-left: 1.0666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h1 {" + nl +
i(l + 1) + "font-size: 2.8em;" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0.8571429em;" + nl +
i(l + 1) + "line-height: 1;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h2 {" + nl +
i(l + 1) + "font-size: 1.8em;" + nl +
i(l + 1) + "margin-top: 1.5555556em;" + nl +
i(l + 1) + "margin-bottom: 0.8888889em;" + nl +
i(l + 1) + "line-height: 1.1111111;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h3 {" + nl +
i(l + 1) + "font-size: 1.5em;" + nl +
i(l + 1) + "margin-top: 1.6em;" + nl +
i(l + 1) + "margin-bottom: 0.6666667em;" + nl +
i(l + 1) + "line-height: 1.3333333;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h4 {" + nl +
i(l + 1) + "margin-top: 1.8em;" + nl +
i(l + 1) + "margin-bottom: 0.6em;" + nl +
i(l + 1) + "line-height: 1.6;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl img {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl video {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl figure {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl figure > * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl figure figcaption {" + nl +
i(l + 1) + "font-size: 0.9em;" + nl +
i(l + 1) + "line-height: 1.5555556;" + nl +
i(l + 1) + "margin-top: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl code {" + nl +
i(l + 1) + "font-size: 0.9em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h2 code {" + nl +
i(l + 1) + "font-size: 0.8611111em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h3 code {" + nl +
i(l + 1) + "font-size: 0.9em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl pre {" + nl +
i(l + 1) + "font-size: 0.9em;" + nl +
i(l + 1) + "line-height: 1.7777778;" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l + 1) + "border-radius: 0.5rem;" + nl +
i(l + 1) + "padding-top: 1.1111111em;" + nl +
i(l + 1) + "padding-right: 1.3333333em;" + nl +
i(l + 1) + "padding-bottom: 1.1111111em;" + nl +
i(l + 1) + "padding-left: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl ol {" + nl +
i(l + 1) + "margin-top: 1.2em;" + nl +
i(l + 1) + "margin-bottom: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl ul {" + nl +
i(l + 1) + "margin-top: 1.2em;" + nl +
i(l + 1) + "margin-bottom: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl li {" + nl +
i(l + 1) + "margin-top: 0.6em;" + nl +
i(l + 1) + "margin-bottom: 0.6em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl ol > li {" + nl +
i(l + 1) + "padding-left: 1.8em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl ol > li:before {" + nl +
i(l + 1) + "left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl ul > li {" + nl +
i(l + 1) + "padding-left: 1.8em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl ul > li::before {" + nl +
i(l + 1) + "width: 0.35em;" + nl +
i(l + 1) + "height: 0.35em;" + nl +
i(l + 1) + "top: calc(0.9em - 0.175em);" + nl +
i(l + 1) + "left: 0.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl > ul > li p {" + nl +
i(l + 1) + "margin-top: 0.8em;" + nl +
i(l + 1) + "margin-bottom: 0.8em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl > ul > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl > ul > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl > ol > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl > ol > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl ol ol," + nl +
i(l) + bp + "prose-xl ol ul," + nl +
i(l) + bp + "prose-xl ul ol," + nl +
i(l) + bp + "prose-xl ul ul {" + nl +
i(l + 1) + "margin-top: 0.8em;" + nl +
i(l + 1) + "margin-bottom: 0.8em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl hr {" + nl +
i(l + 1) + "margin-top: 2.8em;" + nl +
i(l + 1) + "margin-bottom: 2.8em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl hr + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h2 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h3 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl h4 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl table {" + nl +
i(l + 1) + "font-size: 0.9em;" + nl +
i(l + 1) + "line-height: 1.5555556;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl thead th {" + nl +
i(l + 1) + "padding-right: 0.6666667em;" + nl +
i(l + 1) + "padding-bottom: 0.8888889em;" + nl +
i(l + 1) + "padding-left: 0.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl thead th:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl thead th:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl tbody td {" + nl +
i(l + 1) + "padding-top: 0.8888889em;" + nl +
i(l + 1) + "padding-right: 0.6666667em;" + nl +
i(l + 1) + "padding-bottom: 0.8888889em;" + nl +
i(l + 1) + "padding-left: 0.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl tbody td:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl tbody td:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl > :first-child {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-xl > :last-child {" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl;
};
proseTypes.set("prose-xl", proseXl);
// deno-lint-ignore ban-types
const prose2xl = (l, i, nl, bp) => {
return i(l) + bp + "prose-2xl {" + nl +
i(l + 1) + "font-size: 1.5rem;" + nl +
i(l + 1) + "line-height: 1.6666667;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl p {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + 'prose-2xl [class~="lead"] {' + nl +
i(l + 1) + "font-size: 1.25em;" + nl +
i(l + 1) + "line-height: 1.4666667;" + nl +
i(l + 1) + "margin-top: 1.0666667em;" + nl +
i(l + 1) + "margin-bottom: 1.0666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl blockquote {" + nl +
i(l + 1) + "margin-top: 1.7777778em;" + nl +
i(l + 1) + "margin-bottom: 1.7777778em;" + nl +
i(l + 1) + "padding-left: 1.1111111em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h1 {" + nl +
i(l + 1) + "font-size: 2.6666667em;" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0.875em;" + nl +
i(l + 1) + "line-height: 1;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h2 {" + nl +
i(l + 1) + "font-size: 2em;" + nl +
i(l + 1) + "margin-top: 1.5em;" + nl +
i(l + 1) + "margin-bottom: 0.8333333em;" + nl +
i(l + 1) + "line-height: 1.0833333;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h3 {" + nl +
i(l + 1) + "font-size: 1.5em;" + nl +
i(l + 1) + "margin-top: 1.5555556em;" + nl +
i(l + 1) + "margin-bottom: 0.6666667em;" + nl +
i(l + 1) + "line-height: 1.2222222;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h4 {" + nl +
i(l + 1) + "margin-top: 1.6666667em;" + nl +
i(l + 1) + "margin-bottom: 0.6666667em;" + nl +
i(l + 1) + "line-height: 1.5;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl img {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl video {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl figure {" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl figure > * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl figure figcaption {" + nl +
i(l + 1) + "font-size: 0.8333333em;" + nl +
i(l + 1) + "line-height: 1.6;" + nl +
i(l + 1) + "margin-top: 1em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl code {" + nl +
i(l + 1) + "font-size: 0.8333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h2 code {" + nl +
i(l + 1) + "font-size: 0.875em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h3 code {" + nl +
i(l + 1) + "font-size: 0.8888889em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl pre {" + nl +
i(l + 1) + "font-size: 0.8333333em;" + nl +
i(l + 1) + "line-height: 1.8;" + nl +
i(l + 1) + "margin-top: 2em;" + nl +
i(l + 1) + "margin-bottom: 2em;" + nl +
i(l + 1) + "border-radius: 0.5rem;" + nl +
i(l + 1) + "padding-top: 1.2em;" + nl +
i(l + 1) + "padding-right: 1.6em;" + nl +
i(l + 1) + "padding-bottom: 1.2em;" + nl +
i(l + 1) + "padding-left: 1.6em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl ol {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl ul {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl li {" + nl +
i(l + 1) + "margin-top: 0.5em;" + nl +
i(l + 1) + "margin-bottom: 0.5em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl ol > li {" + nl +
i(l + 1) + "padding-left: 1.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl ol > li:before {" + nl +
i(l + 1) + "left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl ul > li {" + nl +
i(l + 1) + "padding-left: 1.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl ul > li::before {" + nl +
i(l + 1) + "width: 0.3333333em;" + nl +
i(l + 1) + "height: 0.3333333em;" + nl +
i(l + 1) + "top: calc(0.8333333em - 0.1666667em);" + nl +
i(l + 1) + "left: 0.25em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl > ul > li p {" + nl +
i(l + 1) + "margin-top: 0.8333333em;" + nl +
i(l + 1) + "margin-bottom: 0.8333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl > ul > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl > ul > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl > ol > li > :first-child {" + nl +
i(l + 1) + "margin-top: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl > ol > li > :last-child {" + nl +
i(l + 1) + "margin-bottom: 1.3333333em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl ol ol," + nl +
i(l) + bp + "prose-2xl ol ul," + nl +
i(l) + bp + "prose-2xl ul ol," + nl +
i(l) + bp + "prose-2xl ul ul {" + nl +
i(l + 1) + "margin-top: 0.6666667em;" + nl +
i(l + 1) + "margin-bottom: 0.6666667em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl hr {" + nl +
i(l + 1) + "margin-top: 3em;" + nl +
i(l + 1) + "margin-bottom: 3em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl hr + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h2 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h3 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl h4 + * {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl table {" + nl +
i(l + 1) + "font-size: 0.8333333em;" + nl +
i(l + 1) + "line-height: 1.4;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl thead th {" + nl +
i(l + 1) + "padding-right: 0.6em;" + nl +
i(l + 1) + "padding-bottom: 0.8em;" + nl +
i(l + 1) + "padding-left: 0.6em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl thead th:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl thead th:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl tbody td {" + nl +
i(l + 1) + "padding-top: 0.8em;" + nl +
i(l + 1) + "padding-right: 0.6em;" + nl +
i(l + 1) + "padding-bottom: 0.8em;" + nl +
i(l + 1) + "padding-left: 0.6em;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl tbody td:first-child {" + nl +
i(l + 1) + "padding-left: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl tbody td:last-child {" + nl +
i(l + 1) + "padding-right: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl > :first-child {" + nl +
i(l + 1) + "margin-top: 0;" + nl +
i(l) + "}" + nl +
i(l) + bp + "prose-2xl > :last-child {" + nl +
i(l + 1) + "margin-bottom: 0;" + nl +
i(l) + "}" + nl;
};
proseTypes.set("prose-2xl", prose2xl);
export default ((identifier, level = 0, b = "", m = false) => {
const i = indentFormatter(m);
const nl = newlineFormatter(m)();
const bp = breakpointFormatter(b);
if (proseTypes.has(identifier)) {
return proseTypes.get(identifier)(level, i, nl, bp);
}
return;
});
"#,
ok_if_code_eq
);
to!(bin_01, "a!!!! + b!!!!!! + c!!!!!", "a + b + c");
test!(
Syntax::Typescript(TsConfig {
decorators: true,
..Default::default()
}),
|_| tr(),
deno_7413_1,
"
import { a } from './foo';
import { Type } from './types';
",
"
"
);
test!(
Syntax::Typescript(TsConfig {
decorators: true,
..Default::default()
}),
|_| tr(),
deno_7413_2,
"
import './foo';
",
"
import './foo';
"
);
test!(
Syntax::Typescript(TsConfig {
decorators: true,
..Default::default()
}),
|_| {
tr_config(
Some(strip::Config {
no_empty_export: true,
import_not_used_as_values: strip::ImportsNotUsedAsValues::Preserve,
..Default::default()
}),
None,
)
},
deno_7413_3,
"
import { a } from './foo';
import { Type } from './types';
",
"
import './foo';
import './types';
"
);
test!(
Syntax::Typescript(TsConfig {
decorators: true,
..Default::default()
}),
|_| tr(),
issue_1124,
"
import { Type } from './types';
export type { Type };
",
"
"
);
test!(
Syntax::Typescript(TsConfig {
..Default::default()
}),
|_| chain!(tr(), async_to_generator()),
issue_1235_1,
"
class Service {
async is(a: string): Promise<boolean> {
return a.toUpperCase() === a;
}
}
(async() => { await (new Service()).is('ABC'); })();
",
"
class Service {
is(a) {
return _asyncToGenerator(function* () {
return a.toUpperCase() === a;
})();
}
}
_asyncToGenerator(function* () {
yield new Service().is('ABC');
})();
"
);
test!(
Syntax::Typescript(TsConfig {
decorators: true,
..Default::default()
}),
|_| chain!(tr(), optional_chaining(Default::default())),
issue_1149_1,
"
const tmp = tt?.map((t: any) => t).join((v: any) => v);
",
"
const tmp = tt === null || tt === void 0 ? void 0 : tt.map((t) => t).join((v) => v);
"
);
test!(
Syntax::Typescript(TsConfig {
..Default::default()
}),
|_| chain!(tr(), nullish_coalescing(Default::default())),
issue_1123_1,
r#"
interface SuperSubmission {
[key: string]: any;
}
const normalizedQuestionSet: any = {};
const submissions: SuperSubmission[] = (
normalizedQuestionSet.submissionIds ?? []
).map(
(id, index): SuperSubmission => {
const submission = normalizedQuestionSet.submissions?.[id];
const submissionAnswers = (submission.answers ?? []).map(
(answerId) => normalizedQuestionSet.answers?.[answerId]
);
console.log(id, index);
return {
type: "super-submission",
};
}
);
console.log(submissions);
"#,
r#"
const normalizedQuestionSet = {
};
var _submissionIds;
const submissions = ((_submissionIds = normalizedQuestionSet.submissionIds) !== null && _submissionIds !== void 0 ? _submissionIds : []).map((id, index)=>{
const submission = normalizedQuestionSet.submissions?.[id];
var _answers;
const submissionAnswers = ((_answers = submission.answers) !== null && _answers !== void 0 ? _answers : []).map((answerId)=>normalizedQuestionSet.answers?.[answerId]
);
console.log(id, index);
return {
type: "super-submission"
};
});
console.log(submissions);
"#
);
// compile_to_class_constructor_collision_ignores_types
test!(
Syntax::Typescript(TsConfig {
..Default::default()
}),
|_| tr_config(
Some(strip::Config {
use_define_for_class_fields: true,
no_empty_export: true,
..Default::default()
}),
None
),
compile_to_class_constructor_collision_ignores_types,
r#"
class C {
// Output should not use `_initialiseProps`
x: T;
y = 0;
constructor(T) {}
}
"#,
r#"
class C {
x;
y = 0;
// Output should not use `_initialiseProps`
constructor(T) {
}
}
"#
);
test!(
Syntax::Typescript(TsConfig {
decorators: true,
..Default::default()
}),
|_| tr_config(None, Some(Default::default())),
issue_367,
"
// before
import { bind } from 'some';
class A {
@bind
public get foo() {
return 1;
}
@bind
public bar() {
return 1;
}
}",
"import { bind } from 'some';
let A = _decorate([], function(_initialize) {
class A1{
constructor(){
_initialize(this);
}
}
return {
F: A1,
d: [{
kind: 'get',
decorators: [bind],
key: 'foo',
value: function foo() {
return 1;
}
}, {
kind: 'method',
decorators: [bind],
key: 'bar',
value: function bar() {
return 1;
}
}]
};
});
"
);
to!(
deno_8978,
"
import { any } from './dep.ts';
export { any };
export type { any as t };
",
"
import { any } from './dep.ts';
export { any };
"
);
to!(
deno_9097,
"
export namespace util {
export type AssertEqual<T, Expected> = T extends Expected
? Expected extends T
? true
: false
: false;
export function assertNever(_x: never): never {
throw new Error();
}
export type Omit<T, K extends keyof T> = Pick<T, Exclude<keyof T, K>>;
export type OmitKeys<T, K extends string> = Pick<T, Exclude<keyof T, K>>;
export type MakePartial<T, K extends keyof T> = Omit<T, K> &
Partial<Pick<T, K>>;
export const arrayToEnum = <T extends string, U extends [T, ...T[]]>(
items: U
): { [k in U[number]]: k } => {
};
export const getValidEnumValues = (obj: any) => {
};
export const getValues = (obj: any) => {
};
export const objectValues = (obj: any) => {
};
}
",
"
export var util;
(function (util1) {
function assertNever(_x) {
throw new Error();
}
util1.assertNever = assertNever;
util1.arrayToEnum = (items) => {
};
util1.getValidEnumValues = (obj) => {
};
util1.getValues = (obj) => {
};
util1.objectValues = (obj) => {
};
})(util || (util = {}));
"
);
to!(
namespace_001,
"
export namespace util {
const c = 3;
export const [a, b] = [1, 2, 3];
}
",
"
export var util;
(function (util1) {
const c = 3;
[util1.a, util1.b] = [1, 2, 3];
})(util || (util = {}));
"
);
to!(
namespace_002,
"
export namespace util {
const c = 3;
export function foo() {
}
function bar() {
}
}
",
"
export var util;
(function (util1) {
const c = 3;
function foo() {
}
util1.foo = foo;
function bar() {
}
})(util || (util = {}));
"
);
to!(
issue_1329,
"
namespace Test {
export enum DummyValues {
A = 'A',
B = 'B',
}
}
console(Test.DummyValues.A);
",
"
var Test;
(function(Test1) {
let DummyValues;
(function(DummyValues) {
DummyValues['A'] = 'A';
DummyValues['B'] = 'B';
})(DummyValues || (DummyValues = {
}));
Test1.DummyValues = DummyValues;
})(Test || (Test = {
}));
console(Test.DummyValues.A);
"
);
to!(
deno_9289_1,
"
export class TestClass {
public testMethod (args: TestClass.TestArgs)
{
return args.param1;
}
}
declare namespace TestClass {
export interface TestArgs {
param1: boolean;
}
}
",
"
export class TestClass {
testMethod(args) {
return args.param1;
}
}
"
);
to!(
deno_9289_2,
"
declare namespace TestClass {
export interface TestArgs {
param1: boolean;
}
}
",
"
"
);
to!(
issue_1383,
"
declare global {
const process: Process;
}
export {}
",
"
"
);
test_with_config!(
issue_1472_1_define,
strip::Config {
use_define_for_class_fields: true,
no_empty_export: true,
..Default::default()
},
"
class A extends Object {
a = 1;
constructor(public b = 2) {
super();
}
}
",
"
class A extends Object {
b;
a = 1;
constructor(b = 2){
super();
this.b = b;
}
}
"
);
test_with_config!(
issue_1472_1_no_define,
strip::Config {
no_empty_export: true,
use_define_for_class_fields: false,
..Default::default()
},
"
class A extends Object {
a = 1;
constructor(public b = 2) {
super();
}
}
",
"
class A extends Object {
constructor(b = 2){
super();
this.b = b;
this.a = 1;
}
}
"
);
to!(
issue_1497_1,
"
class A {
[(console.log(1), 'a')] = 1;
static [(console.log(2), 'b')] = 2;
}
",
"
var _key, _key1;
class A {
constructor() {
this[_key] = 1;
}
}
_key = (console.log(1), 'a');
_key1 = (console.log(2), 'b');
A[_key1] = 2;
"
);
to!(
issue_1497_2,
"
class A {
[(console.log(1), 'a')] = 1;
static [(console.log(2), 'b')] = 2;
[(console.log(3), 'c')]() {}
}
",
"
var _key, _key1;
class A {
[(_key = (console.log(1), 'a'), _key1 = (console.log(2), 'b'), console.log(3), 'c')]() {}
constructor() {
this[_key] = 1;
}
}
A[_key1] = 2;
"
);
to!(
issue_1515_1,
"
export class A {}
export namespace A {
export class B extends A {}
}
",
"
export class A {
}
(function(A1) {
class B extends A {
}
A1.B = B;
})(A || (A = {
}));
"
);
to!(
issue_1515_2,
"
export namespace A {
export class B extends A {}
}
export enum A {}
",
"
export var A;
(function(A1) {
class B extends A {
}
A1.B = B;
})(A || (A = {
}));
(function(A) {
})(A || (A = {
}));
"
);
to!(
issue_1515_3,
"
export class A {}
export enum A {}
",
"
export class A {
}
(function(A) {
})(A || (A = {
}));
"
);
to!(
class_expression_sequence,
"
const A = class {
static a = 1;
}
",
"
var _class;
const A = (_class = class {},
_class.a = 1,
_class);
"
);
to!(
issue_1508_1,
"
declare namespace twttr {
export const txt: typeof import('twitter-text')
}
",
""
);
to!(
issue_1517_1,
"
interface X {
get foo(): string;
set foo(v: string | number);
}
",
""
);
to!(
issue_1517_2,
"
type Y = {
get bar(): string;
set bar(v: string | number);
}
",
""
);
to!(
import_shadow_named,
"
import { Test } from 'test';
const Test = 2;
console.log(Test);
",
"
const Test = 2;
console.log(Test);
"
);
to!(
import_shadow_default,
"
import Test from 'test';
const Test = 2;
console.log(Test);
",
"
const Test = 2;
console.log(Test);
"
);
to!(
import_shadow_namespace,
"
import * as Test from 'test';
const Test = 2;
console.log(Test);
",
"
const Test = 2;
console.log(Test);
"
);
to!(
import_shadow_array_pat,
"
import { Test } from 'test';
const [Test] = [];
console.log(a);
",
"
const [Test] = [];
console.log(a);
"
);
to!(
import_shadow_array_pat_default,
"
import { Test } from 'test';
const [a = Test] = [];
console.log(a);
",
"
import { Test } from 'test';
const [a = Test] = [];
console.log(a);
"
);
to!(
import_shadow_object_pat,
"
import { Test } from 'test';
const {Test: a} = {};
console.log(a);
",
"
const {Test: a} = {};
console.log(a);
"
);
to!(
import_shadow_object_pat_default,
"
import { Test } from 'test';
const {a = Test} = {};
console.log(Test);
",
"
import { Test } from 'test';
const {a = Test} = {};
console.log(Test);
"
);
to!(
import_shadow_type,
"
import { Test } from 'test';
interface Test {}
",
""
);
to!(
import_concrete,
"
import { Test } from 'test';
console.log(Test);
",
"
import { Test } from 'test';
console.log(Test);
"
);
to!(
import_shadow_type_concrete,
"
import { Test } from 'test';
interface Test {}
console.log(Test);
",
"
import { Test } from 'test';
console.log(Test);
"
);
to!(
import_hoist,
"
console.log(Test);
import { Test } from 'test';
",
"
console.log(Test);
import { Test } from 'test';
"
);
to!(
import_shadow_hoist,
"
const Test = 2;
console.log(Test);
import { Test } from 'test';
",
"
const Test = 2;
console.log(Test);
"
);
to!(
import_shadow_hoist_type,
"
interface Test {}
import { Test } from 'test';
",
""
);
to!(
import_shadow_hoist_type_concrete,
"
interface Test {}
console.log(Test);
import { Test } from 'test';
",
"
console.log(Test);
import { Test } from 'test';
"
);
to!(
issue_1448_1,
"
import F = require('yaml')
console.log(F)
",
"
const F = require('yaml');
console.log(F)
"
);
to!(
constructor_1,
"export class Query {
public text: string;
public args: EncodedArg[];
public fields?: string[];
constructor(config: QueryObjectConfig);
constructor(text: string, ...args: unknown[]);
}",
"
export class Query {
}"
);
to!(
constructor_2,
"export class Context {
app!: Application;
request!: ServerRequest;
url!: URL;
response: Response & { headers: Headers } = { headers: new Headers() };
params: Record<string, string> = {};
customContext: any;
#store?: Map<string | symbol, unknown>;
#body: Promise<unknown> | undefined;
constructor(opts: ContextOptions);
constructor(c: Context);
constructor(optionsOrContext: ContextOptions | Context) {
if (optionsOrContext instanceof Context) {
Object.assign(this, optionsOrContext);
this.customContext = this;
return;
}
}
}",
"
export class Context {
#store;
#body;
constructor(optionsOrContext){
this.response = {
headers: new Headers()
};
this.params = {
};
if (optionsOrContext instanceof Context) {
Object.assign(this, optionsOrContext);
this.customContext = this;
return;
}
}
}
"
);
to!(
issue_1593,
"
export = 'something';
",
"
module.exports = 'something';
"
);
to!(
deno_10462,
"
import { foo } from './temp2.ts';
const _: foo = null;
console.log({ foo: 1 });
",
"
const _ = null;
console.log({ foo: 1 });
"
);
to!(
pr_1835,
r#"
import { A } from "./a";
import { B } from "./b";
import { C } from "./c";
const { A: AB } = B;
const { CB = C } = B;
console.log(A, AB, CB);
"#,
r#"
import { A } from "./a";
import { B } from "./b";
import { C } from "./c";
const { A: AB } = B;
const { CB = C } = B;
console.log(A, AB, CB);"#
);
to!(
deno_10684,
"
import { Foo } from './temp2.ts';
const a: Foo = null;
console.log(a);
const b = { Foo: 1 };
console.log(b.Foo)
",
"
const a = null;
console.log(a);
const b = {
Foo: 1
};
console.log(b.Foo);
"
);
to!(
issue_1869_3,
"
var _class;
let TestClass = _class = someClassDecorator((_class = class TestClass {
static Something = 'hello';
static SomeProperties = {
firstProp: TestClass.Something
};
}) || _class) || _class;
function someClassDecorator(c) {
return c;
}
",
"
var _class;
var _class1;
let TestClass = _class1 = someClassDecorator((_class1 = (_class = class TestClass {
}, _class.Something = 'hello', _class.SomeProperties = {
firstProp: _class.Something
}, _class)) || _class1) || _class1;
function someClassDecorator(c) {
return c;
}
"
);
to!(
issue_2219,
"
import type { TestInfo } from './config'
export { TestInfo }
",
"
"
);
to!(
issue_1122_2,
"
const identifier = 'bar';
class Foo {
identifier = 5;
}
",
"
const identifier = \"bar\";
class Foo {
constructor(){
this.identifier = 5;
}
}
"
);
to!(
issue_1122_5,
"
const identifier = 'bar';
class Foo {
static identifier = 5;
}
",
"
const identifier = \"bar\";
class Foo {
}
Foo.identifier = 5;
"
);
to!(
deno_12395_import_equals_1,
"
import * as mongo from 'https://deno.land/x/[email protected]/mod.ts';
import MongoClient = mongo.MongoClient;
const mongoClient = new MongoClient();
",
"
import * as mongo from 'https://deno.land/x/[email protected]/mod.ts';
var MongoClient = mongo.MongoClient;
const mongoClient = new MongoClient();
"
);
to!(
deno_12395_import_equals_2,
"
import * as mongo from 'https://deno.land/x/[email protected]/mod.ts';
import MongoClient = mongo.MongoClient;
const mongoClient: MongoClient = {};
",
"
const mongoClient = {};
"
);
test_with_config!(
deno_12532_declare_class_prop,
strip::Config {
use_define_for_class_fields: true,
no_empty_export: true,
..Default::default()
},
"
export class Foo {
x: number;
constructor(x: number) {
this.x = x;
}
}
export class Bar extends Foo {
declare x: 123;
constructor() {
super(123);
}
}
",
"
export class Foo {
x;
constructor(x){
this.x = x;
}
}
export class Bar extends Foo {
constructor() {
super(123);
}
}
"
);
to!(
issue_2613,
"
export = function (foo: string, bar: number): boolean {
return true
};
",
"
module.exports = function (foo, bar) {
return true
};
"
);
to!(
issue_2809,
"enum Color {
Aqua = '#00ffff',
Cyan = Aqua,
}",
"var Color;
(function (Color) {
Color['Aqua'] = '#00ffff';
Color['Cyan'] = '#00ffff';
})(Color || (Color = {}));"
);
to!(
issue_2886_enum_namespace_block_scoping,
"
export enum Enum {
test = 1
}
namespace Namespace {
export enum Enum {
test = 1
}
export enum Enum {
test2 = 1
}
}
{
enum Enum {
test = 1
}
namespace Namespace {
export enum Enum {
test = 1
}
}
}
{
enum Enum {
test = 1
}
namespace Namespace {
export enum Enum {
test = 1
}
}
}
",
r#"
export var Enum;
(function (Enum) {
Enum[Enum["test"] = 1] = "test";
})(Enum || (Enum = {}));
var Namespace;
(function(Namespace1) {
let Enum;
(function(Enum) {
Enum[Enum["test"] = 1] = "test";
})(Enum || (Enum = {
}));
(function(Enum) {
Enum[Enum["test2"] = 1] = "test2";
})(Enum || (Enum = {
}));
Namespace1.Enum = Enum;
})(Namespace || (Namespace = {
}));
{
let Enum;
(function (Enum) {
Enum[Enum["test"] = 1] = "test";
})(Enum || (Enum = {}));
let Namespace2;
(function(Namespace) {
let Enum;
(function(Enum) {
Enum[Enum["test"] = 1] = "test";
})(Enum || (Enum = {
}));
Namespace.Enum = Enum;
})(Namespace2 || (Namespace2 = {
}));
}
{
let Enum;
(function (Enum) {
Enum[Enum["test"] = 1] = "test";
})(Enum || (Enum = {}));
let Namespace3;
(function(Namespace) {
let Enum;
(function(Enum) {
Enum[Enum["test"] = 1] = "test";
})(Enum || (Enum = {
}));
Namespace.Enum = Enum;
})(Namespace3 || (Namespace3 = {
}));
}
"#
);
#[testing::fixture("tests/fixture/**/input.ts")]
#[testing::fixture("tests/fixture/**/input.tsx")]
fn exec(input: PathBuf) {
let output = input.with_file_name("output.js");
test_fixture(
Syntax::Typescript(TsConfig {
tsx: input.to_string_lossy().ends_with(".tsx"),
..Default::default()
}),
&|_| tr(),
&input,
&output,
);
}
| 31.595091 | 173 | 0.409249 |
1494d6f457ff2e5b9a561dd7e0975fa918676a17 | 2,878 | #[cfg(test)]
mod tests {
use crate::queue::PriorityQueue;
#[test]
fn priority_queue_test() {
let mut queue = PriorityQueue::new();
queue.push(150);
queue.push(80);
queue.push(40);
queue.push(30);
queue.push(10);
queue.push(70);
queue.push(110);
queue.push(100);
queue.push(20);
queue.push(90);
queue.push(60);
queue.push(50);
queue.push(120);
queue.push(140);
queue.push(130);
let min = queue.peek();
assert_eq!(min, Some(&10));
}
}
struct PriorityQueue<T: Ord + Copy> {
items: Vec<T>,
}
impl<T: Ord + Copy> PriorityQueue<T> {
pub fn new() -> PriorityQueue<T> {
PriorityQueue { items: Vec::new() }
}
pub fn push(&mut self, item: T) {
self.items.push(item);
let length = self.items.len();
if length <= 1 {
return;
}
let mut index = length - 1;
let mut parent = (index - 1) / 2;
while self.items[parent] > item {
let temp = self.items[parent];
self.items[parent] = self.items[index];
self.items[index] = temp;
index = parent;
if index < 1 {
break;
}
parent = (index - 1) / 2
}
}
pub fn peek(&self) -> Option<&T> {
if self.items.len() > 0 {
Some(&self.items[0])
} else {
None
}
}
pub fn pop(&mut self) -> Option<T> {
if self.is_empty() {
None
} else {
let length = self.items.len();
let temp = self.items[length - 1];
self.items[length - 1] = self.items[0];
self.items[0] = temp;
let result = self.items.pop().unwrap();
let mut index = 0;
let mut child_left = 2 * index + 1;
let mut child_right = 2 * index + 2;
while self.items.len() > child_left {
let mut child = child_left;
if self.items.len() > child_right
&& self.items[child_right] < self.items[child_left]
{
child = child_right
}
if self.items[child] < self.items[index] {
let temp = self.items[child];
self.items[child] = self.items[index];
self.items[index] = temp;
index = child;
child_left = 2 * index + 1;
child_right = 2 * index + 2;
} else {
break;
}
}
Some(result)
}
}
pub fn is_empty(&self) -> bool {
self.items.is_empty()
}
pub fn len(&self) -> usize {
self.items.len()
}
}
| 24.389831 | 71 | 0.442321 |
232aa18ffd3a349f357c6e5381c16857877a0e30 | 18,935 | //! Panic support in the standard library.
#![stable(feature = "std_panic", since = "1.9.0")]
use crate::any::Any;
use crate::cell::UnsafeCell;
use crate::collections;
use crate::fmt;
use crate::future::Future;
use crate::ops::{Deref, DerefMut};
use crate::panicking;
use crate::pin::Pin;
use crate::ptr::{NonNull, Unique};
use crate::rc::Rc;
use crate::stream::Stream;
use crate::sync::atomic;
use crate::sync::{Arc, Mutex, RwLock};
use crate::task::{Context, Poll};
use crate::thread::Result;
#[doc(hidden)]
#[unstable(feature = "edition_panic", issue = "none", reason = "use panic!() instead")]
#[allow_internal_unstable(libstd_sys_internals)]
#[cfg_attr(not(test), rustc_diagnostic_item = "std_panic_2015_macro")]
#[rustc_macro_transparency = "semitransparent"]
pub macro panic_2015 {
() => ({
$crate::rt::begin_panic("explicit panic")
}),
($msg:expr $(,)?) => ({
$crate::rt::begin_panic($msg)
}),
($fmt:expr, $($arg:tt)+) => ({
$crate::rt::begin_panic_fmt(&$crate::format_args!($fmt, $($arg)+))
}),
}
#[doc(hidden)]
#[unstable(feature = "edition_panic", issue = "none", reason = "use panic!() instead")]
pub use core::panic::panic_2021;
#[stable(feature = "panic_hooks", since = "1.10.0")]
pub use crate::panicking::{set_hook, take_hook};
#[stable(feature = "panic_hooks", since = "1.10.0")]
pub use core::panic::{Location, PanicInfo};
/// Panic the current thread with the given message as the panic payload.
///
/// The message can be of any (`Any + Send`) type, not just strings.
///
/// The message is wrapped in a `Box<'static + Any + Send>`, which can be
/// accessed later using [`PanicInfo::payload`].
///
/// See the [`panic!`] macro for more information about panicking.
#[cfg(not(target_arch = "bpf"))]
#[stable(feature = "panic_any", since = "1.51.0")]
#[inline]
pub fn panic_any<M: 'static + Any + Send>(msg: M) -> ! {
crate::panicking::begin_panic(msg);
}
/// A marker trait which represents "panic safe" types in Rust.
///
/// This trait is implemented by default for many types and behaves similarly in
/// terms of inference of implementation to the [`Send`] and [`Sync`] traits. The
/// purpose of this trait is to encode what types are safe to cross a [`catch_unwind`]
/// boundary with no fear of unwind safety.
///
/// ## What is unwind safety?
///
/// In Rust a function can "return" early if it either panics or calls a
/// function which transitively panics. This sort of control flow is not always
/// anticipated, and has the possibility of causing subtle bugs through a
/// combination of two critical components:
///
/// 1. A data structure is in a temporarily invalid state when the thread
/// panics.
/// 2. This broken invariant is then later observed.
///
/// Typically in Rust, it is difficult to perform step (2) because catching a
/// panic involves either spawning a thread (which in turns makes it difficult
/// to later witness broken invariants) or using the `catch_unwind` function in this
/// module. Additionally, even if an invariant is witnessed, it typically isn't a
/// problem in Rust because there are no uninitialized values (like in C or C++).
///
/// It is possible, however, for **logical** invariants to be broken in Rust,
/// which can end up causing behavioral bugs. Another key aspect of unwind safety
/// in Rust is that, in the absence of `unsafe` code, a panic cannot lead to
/// memory unsafety.
///
/// That was a bit of a whirlwind tour of unwind safety, but for more information
/// about unwind safety and how it applies to Rust, see an [associated RFC][rfc].
///
/// [rfc]: https://github.com/rust-lang/rfcs/blob/master/text/1236-stabilize-catch-panic.md
///
/// ## What is `UnwindSafe`?
///
/// Now that we've got an idea of what unwind safety is in Rust, it's also
/// important to understand what this trait represents. As mentioned above, one
/// way to witness broken invariants is through the `catch_unwind` function in this
/// module as it allows catching a panic and then re-using the environment of
/// the closure.
///
/// Simply put, a type `T` implements `UnwindSafe` if it cannot easily allow
/// witnessing a broken invariant through the use of `catch_unwind` (catching a
/// panic). This trait is an auto trait, so it is automatically implemented for
/// many types, and it is also structurally composed (e.g., a struct is unwind
/// safe if all of its components are unwind safe).
///
/// Note, however, that this is not an unsafe trait, so there is not a succinct
/// contract that this trait is providing. Instead it is intended as more of a
/// "speed bump" to alert users of `catch_unwind` that broken invariants may be
/// witnessed and may need to be accounted for.
///
/// ## Who implements `UnwindSafe`?
///
/// Types such as `&mut T` and `&RefCell<T>` are examples which are **not**
/// unwind safe. The general idea is that any mutable state which can be shared
/// across `catch_unwind` is not unwind safe by default. This is because it is very
/// easy to witness a broken invariant outside of `catch_unwind` as the data is
/// simply accessed as usual.
///
/// Types like `&Mutex<T>`, however, are unwind safe because they implement
/// poisoning by default. They still allow witnessing a broken invariant, but
/// they already provide their own "speed bumps" to do so.
///
/// ## When should `UnwindSafe` be used?
///
/// It is not intended that most types or functions need to worry about this trait.
/// It is only used as a bound on the `catch_unwind` function and as mentioned
/// above, the lack of `unsafe` means it is mostly an advisory. The
/// [`AssertUnwindSafe`] wrapper struct can be used to force this trait to be
/// implemented for any closed over variables passed to `catch_unwind`.
#[stable(feature = "catch_unwind", since = "1.9.0")]
#[rustc_on_unimplemented(
message = "the type `{Self}` may not be safely transferred across an unwind boundary",
label = "`{Self}` may not be safely transferred across an unwind boundary"
)]
pub auto trait UnwindSafe {}
/// A marker trait representing types where a shared reference is considered
/// unwind safe.
///
/// This trait is namely not implemented by [`UnsafeCell`], the root of all
/// interior mutability.
///
/// This is a "helper marker trait" used to provide impl blocks for the
/// [`UnwindSafe`] trait, for more information see that documentation.
#[stable(feature = "catch_unwind", since = "1.9.0")]
#[rustc_on_unimplemented(
message = "the type `{Self}` may contain interior mutability and a reference may not be safely \
transferrable across a catch_unwind boundary",
label = "`{Self}` may contain interior mutability and a reference may not be safely \
transferrable across a catch_unwind boundary"
)]
pub auto trait RefUnwindSafe {}
/// A simple wrapper around a type to assert that it is unwind safe.
///
/// When using [`catch_unwind`] it may be the case that some of the closed over
/// variables are not unwind safe. For example if `&mut T` is captured the
/// compiler will generate a warning indicating that it is not unwind safe. It
/// may not be the case, however, that this is actually a problem due to the
/// specific usage of [`catch_unwind`] if unwind safety is specifically taken into
/// account. This wrapper struct is useful for a quick and lightweight
/// annotation that a variable is indeed unwind safe.
///
/// # Examples
///
/// One way to use `AssertUnwindSafe` is to assert that the entire closure
/// itself is unwind safe, bypassing all checks for all variables:
///
/// ```
/// use std::panic::{self, AssertUnwindSafe};
///
/// let mut variable = 4;
///
/// // This code will not compile because the closure captures `&mut variable`
/// // which is not considered unwind safe by default.
///
/// // panic::catch_unwind(|| {
/// // variable += 3;
/// // });
///
/// // This, however, will compile due to the `AssertUnwindSafe` wrapper
/// let result = panic::catch_unwind(AssertUnwindSafe(|| {
/// variable += 3;
/// }));
/// // ...
/// ```
///
/// Wrapping the entire closure amounts to a blanket assertion that all captured
/// variables are unwind safe. This has the downside that if new captures are
/// added in the future, they will also be considered unwind safe. Therefore,
/// you may prefer to just wrap individual captures, as shown below. This is
/// more annotation, but it ensures that if a new capture is added which is not
/// unwind safe, you will get a compilation error at that time, which will
/// allow you to consider whether that new capture in fact represent a bug or
/// not.
///
/// ```
/// use std::panic::{self, AssertUnwindSafe};
///
/// let mut variable = 4;
/// let other_capture = 3;
///
/// let result = {
/// let mut wrapper = AssertUnwindSafe(&mut variable);
/// panic::catch_unwind(move || {
/// **wrapper += other_capture;
/// })
/// };
/// // ...
/// ```
#[stable(feature = "catch_unwind", since = "1.9.0")]
pub struct AssertUnwindSafe<T>(#[stable(feature = "catch_unwind", since = "1.9.0")] pub T);
// Implementations of the `UnwindSafe` trait:
//
// * By default everything is unwind safe
// * pointers T contains mutability of some form are not unwind safe
// * Unique, an owning pointer, lifts an implementation
// * Types like Mutex/RwLock which are explicitly poisoned are unwind safe
// * Our custom AssertUnwindSafe wrapper is indeed unwind safe
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T: ?Sized> !UnwindSafe for &mut T {}
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T: RefUnwindSafe + ?Sized> UnwindSafe for &T {}
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T: RefUnwindSafe + ?Sized> UnwindSafe for *const T {}
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T: RefUnwindSafe + ?Sized> UnwindSafe for *mut T {}
#[unstable(feature = "ptr_internals", issue = "none")]
impl<T: UnwindSafe + ?Sized> UnwindSafe for Unique<T> {}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: RefUnwindSafe + ?Sized> UnwindSafe for NonNull<T> {}
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T: ?Sized> UnwindSafe for Mutex<T> {}
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T: ?Sized> UnwindSafe for RwLock<T> {}
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T> UnwindSafe for AssertUnwindSafe<T> {}
// not covered via the Shared impl above b/c the inner contents use
// Cell/AtomicUsize, but the usage here is unwind safe so we can lift the
// impl up one level to Arc/Rc itself
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T: RefUnwindSafe + ?Sized> UnwindSafe for Rc<T> {}
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T: RefUnwindSafe + ?Sized> UnwindSafe for Arc<T> {}
// Pretty simple implementations for the `RefUnwindSafe` marker trait,
// basically just saying that `UnsafeCell` is the
// only thing which doesn't implement it (which then transitively applies to
// everything else).
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T: ?Sized> !RefUnwindSafe for UnsafeCell<T> {}
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T> RefUnwindSafe for AssertUnwindSafe<T> {}
#[stable(feature = "unwind_safe_lock_refs", since = "1.12.0")]
impl<T: ?Sized> RefUnwindSafe for Mutex<T> {}
#[stable(feature = "unwind_safe_lock_refs", since = "1.12.0")]
impl<T: ?Sized> RefUnwindSafe for RwLock<T> {}
#[cfg(target_has_atomic_load_store = "ptr")]
#[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")]
impl RefUnwindSafe for atomic::AtomicIsize {}
#[cfg(target_has_atomic_load_store = "8")]
#[stable(feature = "integer_atomics_stable", since = "1.34.0")]
impl RefUnwindSafe for atomic::AtomicI8 {}
#[cfg(target_has_atomic_load_store = "16")]
#[stable(feature = "integer_atomics_stable", since = "1.34.0")]
impl RefUnwindSafe for atomic::AtomicI16 {}
#[cfg(target_has_atomic_load_store = "32")]
#[stable(feature = "integer_atomics_stable", since = "1.34.0")]
impl RefUnwindSafe for atomic::AtomicI32 {}
#[cfg(target_has_atomic_load_store = "64")]
#[stable(feature = "integer_atomics_stable", since = "1.34.0")]
impl RefUnwindSafe for atomic::AtomicI64 {}
#[cfg(target_has_atomic_load_store = "128")]
#[unstable(feature = "integer_atomics", issue = "32976")]
impl RefUnwindSafe for atomic::AtomicI128 {}
#[cfg(target_has_atomic_load_store = "ptr")]
#[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")]
impl RefUnwindSafe for atomic::AtomicUsize {}
#[cfg(target_has_atomic_load_store = "8")]
#[stable(feature = "integer_atomics_stable", since = "1.34.0")]
impl RefUnwindSafe for atomic::AtomicU8 {}
#[cfg(target_has_atomic_load_store = "16")]
#[stable(feature = "integer_atomics_stable", since = "1.34.0")]
impl RefUnwindSafe for atomic::AtomicU16 {}
#[cfg(target_has_atomic_load_store = "32")]
#[stable(feature = "integer_atomics_stable", since = "1.34.0")]
impl RefUnwindSafe for atomic::AtomicU32 {}
#[cfg(target_has_atomic_load_store = "64")]
#[stable(feature = "integer_atomics_stable", since = "1.34.0")]
impl RefUnwindSafe for atomic::AtomicU64 {}
#[cfg(target_has_atomic_load_store = "128")]
#[unstable(feature = "integer_atomics", issue = "32976")]
impl RefUnwindSafe for atomic::AtomicU128 {}
#[cfg(target_has_atomic_load_store = "8")]
#[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")]
impl RefUnwindSafe for atomic::AtomicBool {}
#[cfg(target_has_atomic_load_store = "ptr")]
#[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")]
impl<T> RefUnwindSafe for atomic::AtomicPtr<T> {}
// https://github.com/rust-lang/rust/issues/62301
#[stable(feature = "hashbrown", since = "1.36.0")]
impl<K, V, S> UnwindSafe for collections::HashMap<K, V, S>
where
K: UnwindSafe,
V: UnwindSafe,
S: UnwindSafe,
{
}
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T> Deref for AssertUnwindSafe<T> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T> DerefMut for AssertUnwindSafe<T> {
fn deref_mut(&mut self) -> &mut T {
&mut self.0
}
}
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<R, F: FnOnce() -> R> FnOnce<()> for AssertUnwindSafe<F> {
type Output = R;
extern "rust-call" fn call_once(self, _args: ()) -> R {
(self.0)()
}
}
#[stable(feature = "std_debug", since = "1.16.0")]
impl<T: fmt::Debug> fmt::Debug for AssertUnwindSafe<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("AssertUnwindSafe").field(&self.0).finish()
}
}
#[stable(feature = "futures_api", since = "1.36.0")]
impl<F: Future> Future for AssertUnwindSafe<F> {
type Output = F::Output;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let pinned_field = unsafe { Pin::map_unchecked_mut(self, |x| &mut x.0) };
F::poll(pinned_field, cx)
}
}
#[unstable(feature = "async_stream", issue = "79024")]
impl<S: Stream> Stream for AssertUnwindSafe<S> {
type Item = S::Item;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<S::Item>> {
unsafe { self.map_unchecked_mut(|x| &mut x.0) }.poll_next(cx)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}
/// Invokes a closure, capturing the cause of an unwinding panic if one occurs.
///
/// This function will return `Ok` with the closure's result if the closure
/// does not panic, and will return `Err(cause)` if the closure panics. The
/// `cause` returned is the object with which panic was originally invoked.
///
/// It is currently undefined behavior to unwind from Rust code into foreign
/// code, so this function is particularly useful when Rust is called from
/// another language (normally C). This can run arbitrary Rust code, capturing a
/// panic and allowing a graceful handling of the error.
///
/// It is **not** recommended to use this function for a general try/catch
/// mechanism. The [`Result`] type is more appropriate to use for functions that
/// can fail on a regular basis. Additionally, this function is not guaranteed
/// to catch all panics, see the "Notes" section below.
///
/// The closure provided is required to adhere to the [`UnwindSafe`] trait to ensure
/// that all captured variables are safe to cross this boundary. The purpose of
/// this bound is to encode the concept of [exception safety][rfc] in the type
/// system. Most usage of this function should not need to worry about this
/// bound as programs are naturally unwind safe without `unsafe` code. If it
/// becomes a problem the [`AssertUnwindSafe`] wrapper struct can be used to quickly
/// assert that the usage here is indeed unwind safe.
///
/// [rfc]: https://github.com/rust-lang/rfcs/blob/master/text/1236-stabilize-catch-panic.md
///
/// # Notes
///
/// Note that this function **may not catch all panics** in Rust. A panic in
/// Rust is not always implemented via unwinding, but can be implemented by
/// aborting the process as well. This function *only* catches unwinding panics,
/// not those that abort the process.
///
/// Also note that unwinding into Rust code with a foreign exception (e.g.
/// an exception thrown from C++ code) is undefined behavior.
///
/// # Examples
///
/// ```
/// use std::panic;
///
/// let result = panic::catch_unwind(|| {
/// println!("hello!");
/// });
/// assert!(result.is_ok());
///
/// let result = panic::catch_unwind(|| {
/// panic!("oh no!");
/// });
/// assert!(result.is_err());
/// ```
#[stable(feature = "catch_unwind", since = "1.9.0")]
pub fn catch_unwind<F: FnOnce() -> R + UnwindSafe, R>(f: F) -> Result<R> {
unsafe { panicking::r#try(f) }
}
/// Triggers a panic without invoking the panic hook.
///
/// This is designed to be used in conjunction with [`catch_unwind`] to, for
/// example, carry a panic across a layer of C code.
///
/// # Notes
///
/// Note that panics in Rust are not always implemented via unwinding, but they
/// may be implemented by aborting the process. If this function is called when
/// panics are implemented this way then this function will abort the process,
/// not trigger an unwind.
///
/// # Examples
///
/// ```should_panic
/// use std::panic;
///
/// let result = panic::catch_unwind(|| {
/// panic!("oh no!");
/// });
///
/// if let Err(err) = result {
/// panic::resume_unwind(err);
/// }
/// ```
#[stable(feature = "resume_unwind", since = "1.9.0")]
#[cfg(not(target_arch = "bpf"))]
pub fn resume_unwind(payload: Box<dyn Any + Send>) -> ! {
panicking::rust_panic_without_hook(payload)
}
/// BPF version of resume_unwind
#[stable(feature = "resume_unwind", since = "1.9.0")]
#[cfg(target_arch = "bpf")]
pub fn resume_unwind(_payload: Box<dyn Any + Send>) -> ! {
// Only used by thread, redirect to plain old panic
panicking::begin_panic_fmt(&format_args!("unwind"))
}
#[cfg(test)]
mod tests;
| 39.779412 | 100 | 0.68582 |
d9c9d49137068c5a3d7f2965a65d14250811052c | 17,732 | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::hostcalls;
use crate::traits::*;
use crate::types::*;
use hashbrown::HashMap;
use std::cell::{Cell, RefCell};
thread_local! {
static DISPATCHER: Dispatcher = Dispatcher::new();
}
pub(crate) fn set_root_context(callback: NewRootContext) {
DISPATCHER.with(|dispatcher| dispatcher.set_root_context(callback));
}
pub(crate) fn set_stream_context(callback: NewStreamContext) {
DISPATCHER.with(|dispatcher| dispatcher.set_stream_context(callback));
}
pub(crate) fn set_http_context(callback: NewHttpContext) {
DISPATCHER.with(|dispatcher| dispatcher.set_http_context(callback));
}
pub(crate) fn register_callout(token_id: u32) {
DISPATCHER.with(|dispatcher| dispatcher.register_callout(token_id));
}
struct NoopRoot;
impl Context for NoopRoot {}
impl RootContext for NoopRoot {}
struct Dispatcher {
new_root: Cell<Option<NewRootContext>>,
roots: RefCell<HashMap<u32, Box<dyn RootContext>>>,
new_stream: Cell<Option<NewStreamContext>>,
streams: RefCell<HashMap<u32, Box<dyn StreamContext>>>,
new_http_stream: Cell<Option<NewHttpContext>>,
http_streams: RefCell<HashMap<u32, Box<dyn HttpContext>>>,
active_id: Cell<u32>,
callouts: RefCell<HashMap<u32, u32>>,
}
impl Dispatcher {
fn new() -> Dispatcher {
Dispatcher {
new_root: Cell::new(None),
roots: RefCell::new(HashMap::new()),
new_stream: Cell::new(None),
streams: RefCell::new(HashMap::new()),
new_http_stream: Cell::new(None),
http_streams: RefCell::new(HashMap::new()),
active_id: Cell::new(0),
callouts: RefCell::new(HashMap::new()),
}
}
fn set_root_context(&self, callback: NewRootContext) {
self.new_root.set(Some(callback));
}
fn set_stream_context(&self, callback: NewStreamContext) {
self.new_stream.set(Some(callback));
}
fn set_http_context(&self, callback: NewHttpContext) {
self.new_http_stream.set(Some(callback));
}
fn register_callout(&self, token_id: u32) {
if self
.callouts
.borrow_mut()
.insert(token_id, self.active_id.get())
.is_some()
{
panic!("duplicate token_id")
}
}
fn create_root_context(&self, context_id: u32) {
let new_context = match self.new_root.get() {
Some(f) => f(context_id),
None => Box::new(NoopRoot),
};
if self
.roots
.borrow_mut()
.insert(context_id, new_context)
.is_some()
{
panic!("duplicate context_id")
}
}
fn create_stream_context(&self, context_id: u32, root_context_id: u32) {
let new_context = match self.roots.borrow().get(&root_context_id) {
Some(root_context) => match self.new_stream.get() {
Some(f) => f(context_id, root_context_id),
None => match root_context.create_stream_context(context_id) {
Some(stream_context) => stream_context,
None => panic!("create_stream_context returned None"),
},
},
None => panic!("invalid root_context_id"),
};
if self
.streams
.borrow_mut()
.insert(context_id, new_context)
.is_some()
{
panic!("duplicate context_id")
}
}
fn create_http_context(&self, context_id: u32, root_context_id: u32) {
let new_context = match self.roots.borrow().get(&root_context_id) {
Some(root_context) => match self.new_http_stream.get() {
Some(f) => f(context_id, root_context_id),
None => match root_context.create_http_context(context_id) {
Some(stream_context) => stream_context,
None => panic!("create_http_context returned None"),
},
},
None => panic!("invalid root_context_id"),
};
if self
.http_streams
.borrow_mut()
.insert(context_id, new_context)
.is_some()
{
panic!("duplicate context_id")
}
}
fn on_create_context(&self, context_id: u32, root_context_id: u32) {
if root_context_id == 0 {
self.create_root_context(context_id);
} else if self.new_http_stream.get().is_some() {
self.create_http_context(context_id, root_context_id);
} else if self.new_stream.get().is_some() {
self.create_stream_context(context_id, root_context_id);
} else if let Some(root_context) = self.roots.borrow().get(&root_context_id) {
match root_context.get_type() {
Some(ContextType::HttpContext) => {
self.create_http_context(context_id, root_context_id)
}
Some(ContextType::StreamContext) => {
self.create_stream_context(context_id, root_context_id)
}
None => panic!("missing ContextType on root_context"),
}
} else {
panic!("invalid root_context_id and missing constructors");
}
}
fn on_done(&self, context_id: u32) -> bool {
if let Some(http_stream) = self.http_streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
http_stream.on_done()
} else if let Some(stream) = self.streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
stream.on_done()
} else if let Some(root) = self.roots.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
root.on_done()
} else {
panic!("invalid context_id")
}
}
fn on_log(&self, context_id: u32) {
if let Some(http_stream) = self.http_streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
http_stream.on_log()
} else if let Some(stream) = self.streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
stream.on_log()
} else if let Some(root) = self.roots.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
root.on_log()
} else {
panic!("invalid context_id")
}
}
fn on_delete(&self, context_id: u32) {
if !(self.http_streams.borrow_mut().remove(&context_id).is_some()
|| self.streams.borrow_mut().remove(&context_id).is_some()
|| self.roots.borrow_mut().remove(&context_id).is_some())
{
panic!("invalid context_id")
}
}
fn on_vm_start(&self, context_id: u32, vm_configuration_size: usize) -> bool {
if let Some(root) = self.roots.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
root.on_vm_start(vm_configuration_size)
} else {
panic!("invalid context_id")
}
}
fn on_configure(&self, context_id: u32, plugin_configuration_size: usize) -> bool {
if let Some(root) = self.roots.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
root.on_configure(plugin_configuration_size)
} else {
panic!("invalid context_id")
}
}
fn on_tick(&self, context_id: u32) {
if let Some(root) = self.roots.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
root.on_tick()
} else {
panic!("invalid context_id")
}
}
fn on_queue_ready(&self, context_id: u32, queue_id: u32) {
if let Some(root) = self.roots.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
root.on_queue_ready(queue_id)
} else {
panic!("invalid context_id")
}
}
fn on_new_connection(&self, context_id: u32) -> Action {
if let Some(stream) = self.streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
stream.on_new_connection()
} else {
panic!("invalid context_id")
}
}
fn on_downstream_data(&self, context_id: u32, data_size: usize, end_of_stream: bool) -> Action {
if let Some(stream) = self.streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
stream.on_downstream_data(data_size, end_of_stream)
} else {
panic!("invalid context_id")
}
}
fn on_downstream_close(&self, context_id: u32, peer_type: PeerType) {
if let Some(stream) = self.streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
stream.on_downstream_close(peer_type)
} else {
panic!("invalid context_id")
}
}
fn on_upstream_data(&self, context_id: u32, data_size: usize, end_of_stream: bool) -> Action {
if let Some(stream) = self.streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
stream.on_upstream_data(data_size, end_of_stream)
} else {
panic!("invalid context_id")
}
}
fn on_upstream_close(&self, context_id: u32, peer_type: PeerType) {
if let Some(stream) = self.streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
stream.on_upstream_close(peer_type)
} else {
panic!("invalid context_id")
}
}
fn on_http_request_headers(&self, context_id: u32, num_headers: usize) -> Action {
if let Some(http_stream) = self.http_streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
http_stream.on_http_request_headers(num_headers)
} else {
panic!("invalid context_id")
}
}
fn on_http_request_body(
&self,
context_id: u32,
body_size: usize,
end_of_stream: bool,
) -> Action {
if let Some(http_stream) = self.http_streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
http_stream.on_http_request_body(body_size, end_of_stream)
} else {
panic!("invalid context_id")
}
}
fn on_http_request_trailers(&self, context_id: u32, num_trailers: usize) -> Action {
if let Some(http_stream) = self.http_streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
http_stream.on_http_request_trailers(num_trailers)
} else {
panic!("invalid context_id")
}
}
fn on_http_response_headers(&self, context_id: u32, num_headers: usize) -> Action {
if let Some(http_stream) = self.http_streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
http_stream.on_http_response_headers(num_headers)
} else {
panic!("invalid context_id")
}
}
fn on_http_response_body(
&self,
context_id: u32,
body_size: usize,
end_of_stream: bool,
) -> Action {
if let Some(http_stream) = self.http_streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
http_stream.on_http_response_body(body_size, end_of_stream)
} else {
panic!("invalid context_id")
}
}
fn on_http_response_trailers(&self, context_id: u32, num_trailers: usize) -> Action {
if let Some(http_stream) = self.http_streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
http_stream.on_http_response_trailers(num_trailers)
} else {
panic!("invalid context_id")
}
}
fn on_http_call_response(
&self,
token_id: u32,
num_headers: usize,
body_size: usize,
num_trailers: usize,
) {
let context_id = self
.callouts
.borrow_mut()
.remove(&token_id)
.expect("invalid token_id");
if let Some(http_stream) = self.http_streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
hostcalls::set_effective_context(context_id).unwrap();
http_stream.on_http_call_response(token_id, num_headers, body_size, num_trailers)
} else if let Some(stream) = self.streams.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
hostcalls::set_effective_context(context_id).unwrap();
stream.on_http_call_response(token_id, num_headers, body_size, num_trailers)
} else if let Some(root) = self.roots.borrow_mut().get_mut(&context_id) {
self.active_id.set(context_id);
hostcalls::set_effective_context(context_id).unwrap();
root.on_http_call_response(token_id, num_headers, body_size, num_trailers)
}
}
}
#[no_mangle]
pub extern "C" fn proxy_on_context_create(context_id: u32, root_context_id: u32) {
DISPATCHER.with(|dispatcher| dispatcher.on_create_context(context_id, root_context_id))
}
#[no_mangle]
pub extern "C" fn proxy_on_done(context_id: u32) -> bool {
DISPATCHER.with(|dispatcher| dispatcher.on_done(context_id))
}
#[no_mangle]
pub extern "C" fn proxy_on_log(context_id: u32) {
DISPATCHER.with(|dispatcher| dispatcher.on_log(context_id))
}
#[no_mangle]
pub extern "C" fn proxy_on_delete(context_id: u32) {
DISPATCHER.with(|dispatcher| dispatcher.on_delete(context_id))
}
#[no_mangle]
pub extern "C" fn proxy_on_vm_start(context_id: u32, vm_configuration_size: usize) -> bool {
DISPATCHER.with(|dispatcher| dispatcher.on_vm_start(context_id, vm_configuration_size))
}
#[no_mangle]
pub extern "C" fn proxy_on_configure(context_id: u32, plugin_configuration_size: usize) -> bool {
DISPATCHER.with(|dispatcher| dispatcher.on_configure(context_id, plugin_configuration_size))
}
#[no_mangle]
pub extern "C" fn proxy_on_tick(context_id: u32) {
DISPATCHER.with(|dispatcher| dispatcher.on_tick(context_id))
}
#[no_mangle]
pub extern "C" fn proxy_on_queue_ready(context_id: u32, queue_id: u32) {
DISPATCHER.with(|dispatcher| dispatcher.on_queue_ready(context_id, queue_id))
}
#[no_mangle]
pub extern "C" fn proxy_on_new_connection(context_id: u32) -> Action {
DISPATCHER.with(|dispatcher| dispatcher.on_new_connection(context_id))
}
#[no_mangle]
pub extern "C" fn proxy_on_downstream_data(
context_id: u32,
data_size: usize,
end_of_stream: bool,
) -> Action {
DISPATCHER
.with(|dispatcher| dispatcher.on_downstream_data(context_id, data_size, end_of_stream))
}
#[no_mangle]
pub extern "C" fn proxy_on_downstream_connection_close(context_id: u32, peer_type: PeerType) {
DISPATCHER.with(|dispatcher| dispatcher.on_downstream_close(context_id, peer_type))
}
#[no_mangle]
pub extern "C" fn proxy_on_upstream_data(
context_id: u32,
data_size: usize,
end_of_stream: bool,
) -> Action {
DISPATCHER.with(|dispatcher| dispatcher.on_upstream_data(context_id, data_size, end_of_stream))
}
#[no_mangle]
pub extern "C" fn proxy_on_upstream_connection_close(context_id: u32, peer_type: PeerType) {
DISPATCHER.with(|dispatcher| dispatcher.on_upstream_close(context_id, peer_type))
}
#[no_mangle]
pub extern "C" fn proxy_on_request_headers(context_id: u32, num_headers: usize) -> Action {
DISPATCHER.with(|dispatcher| dispatcher.on_http_request_headers(context_id, num_headers))
}
#[no_mangle]
pub extern "C" fn proxy_on_request_body(
context_id: u32,
body_size: usize,
end_of_stream: bool,
) -> Action {
DISPATCHER
.with(|dispatcher| dispatcher.on_http_request_body(context_id, body_size, end_of_stream))
}
#[no_mangle]
pub extern "C" fn proxy_on_request_trailers(context_id: u32, num_trailers: usize) -> Action {
DISPATCHER.with(|dispatcher| dispatcher.on_http_request_trailers(context_id, num_trailers))
}
#[no_mangle]
pub extern "C" fn proxy_on_response_headers(context_id: u32, num_headers: usize) -> Action {
DISPATCHER.with(|dispatcher| dispatcher.on_http_response_headers(context_id, num_headers))
}
#[no_mangle]
pub extern "C" fn proxy_on_response_body(
context_id: u32,
body_size: usize,
end_of_stream: bool,
) -> Action {
DISPATCHER
.with(|dispatcher| dispatcher.on_http_response_body(context_id, body_size, end_of_stream))
}
#[no_mangle]
pub extern "C" fn proxy_on_response_trailers(context_id: u32, num_trailers: usize) -> Action {
DISPATCHER.with(|dispatcher| dispatcher.on_http_response_trailers(context_id, num_trailers))
}
#[no_mangle]
pub extern "C" fn proxy_on_http_call_response(
_context_id: u32,
token_id: u32,
num_headers: usize,
body_size: usize,
num_trailers: usize,
) {
DISPATCHER.with(|dispatcher| {
dispatcher.on_http_call_response(token_id, num_headers, body_size, num_trailers)
})
}
| 34.632813 | 100 | 0.636928 |
d6101b85d37db17df9dd028d44cd51ee54d0b5c0 | 4,790 | use super::Subnet;
use nom::branch::alt;
use nom::character::complete::{char, digit1, hex_digit1};
use nom::combinator::{map, map_res};
use nom::error::{ErrorKind, ParseError};
use nom::multi::many_m_n;
use nom::sequence::{preceded, terminated};
use nom::{Err, IResult};
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
pub fn ipv4_subnet<'a, E: ParseError<&'a str>>(input: &'a str) -> IResult<&'a str, Subnet, E> {
let (input, prefix) = many_m_n(
1,
3,
map_res(terminated(digit1, char('.')), str::parse::<u8>),
)(input)?;
Ok((input, Subnet::V4(prefix)))
}
pub fn ipv6_subnet<'a, E: ParseError<&'a str>>(input: &'a str) -> IResult<&'a str, Subnet, E> {
let (input, prefix) = many_m_n(
1,
7,
map_res(terminated(hex_digit1, char(':')), |s| {
u16::from_str_radix(s, 16)
}),
)(input)?;
Ok((input, Subnet::V6(prefix)))
}
pub fn subnet<'a, E: ParseError<&'a str>>(input: &'a str) -> IResult<&'a str, Subnet, E> {
alt((ipv4_subnet, ipv6_subnet))(input)
}
pub fn ipv4_addr<'a, E: ParseError<&'a str>>(input: &'a str) -> IResult<&'a str, Ipv4Addr, E> {
let mut octets = [0u8; 4];
let (input, octet) = map_res(digit1, str::parse::<u8>)(input)?;
octets[0] = octet;
let (input, octet) = map_res(preceded(char('.'), digit1), str::parse::<u8>)(input)?;
octets[1] = octet;
let (input, octet) = map_res(preceded(char('.'), digit1), str::parse::<u8>)(input)?;
octets[2] = octet;
let (input, octet) = map_res(preceded(char('.'), digit1), str::parse::<u8>)(input)?;
octets[3] = octet;
Ok((input, Ipv4Addr::from(octets)))
}
pub fn ipv6_addr<'a, E: ParseError<&'a str>>(orginal: &'a str) -> IResult<&'a str, Ipv6Addr, E> {
let mut head_segments = Vec::<u16>::with_capacity(8);
let mut tail_segments = Vec::<u16>::with_capacity(8);
let mut append_head = true;
let (mut input, word) = map_res(hex_digit1, |s| u16::from_str_radix(s, 16))(orginal)?;
head_segments.push(word);
while let Ok((remain, _)) = char::<&'a str, E>(':')(input) {
input = remain;
if append_head {
if let Ok((remain, _)) = char::<&'a str, E>(':')(input) {
input = remain;
append_head = false;
}
}
let (remain, word) = map_res(hex_digit1, |s| u16::from_str_radix(s, 16))(input)?;
input = remain;
if append_head {
head_segments.push(word);
} else {
tail_segments.push(word);
}
}
if head_segments.len() + tail_segments.len() < 2 {
return Err(Err::Error(E::from_char(orginal, ':')));
}
if head_segments.len() + tail_segments.len() > 8 {
return Err(Err::Error(E::from_error_kind(orginal, ErrorKind::TooLarge)));
}
let mut segments = [0u16; 8];
segments[0..head_segments.len()].copy_from_slice(&head_segments);
if !tail_segments.is_empty() {
segments[(8 - tail_segments.len())..].copy_from_slice(&tail_segments);
}
Ok((input, Ipv6Addr::from(segments)))
}
pub fn ip_addr<'a, E: ParseError<&'a str>>(input: &'a str) -> IResult<&'a str, IpAddr, E> {
alt((map(ipv4_addr, IpAddr::V4), map(ipv6_addr, IpAddr::V6)))(input)
}
#[cfg(test)]
mod test {
use super::*;
use nom::error::VerboseError;
use spectral::prelude::*;
#[test]
fn test_ipv4_addr() {
let input = "1.12.123.234";
let (remain, addr) = ipv4_addr::<VerboseError<&str>>(input).unwrap();
assert_that(&remain).is_equal_to("");
assert_that(&addr.octets()).is_equal_to([1u8, 12u8, 123u8, 234u8])
}
#[test]
fn test_ipv6_addr() {
let input = "1234:5678::4321";
let (remain, addr) = ipv6_addr::<VerboseError<&str>>(input).unwrap();
assert_that(&remain).is_equal_to("");
assert_that(&addr.segments()).is_equal_to([
0x1234u16, 0x5678u16, 0u16, 0u16, 0u16, 0u16, 0u16, 0x4321u16,
]);
let input = "1234:2345:3456:4567:4321:5432:6543:7654";
let (remain, addr) = ipv6_addr::<VerboseError<&str>>(input).unwrap();
assert_that(&remain).is_equal_to("");
assert_that(&addr.segments()).is_equal_to([
0x1234u16, 0x2345u16, 0x3456u16, 0x4567u16, 0x4321u16, 0x5432u16, 0x6543u16, 0x7654u16,
]);
}
#[test]
fn test_ip_addr() {
let input = "123.34.4.56";
let (remain, addr) = ip_addr::<VerboseError<&str>>(input).unwrap();
assert_that(&remain).is_equal_to("");
assert_that(&addr.is_ipv4()).is_true();
let input = "1234:5678::4321";
let (remain, addr) = ip_addr::<VerboseError<&str>>(input).unwrap();
assert_that(&remain).is_equal_to("");
assert_that(&addr.is_ipv6()).is_true();
}
}
| 33.263889 | 99 | 0.58142 |
eb2272e00105cda123c3ad7f80592ea9f91d52cb | 4,794 | /*
* Copyright (c) 2017-2018 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
extern crate gtk;
#[macro_use]
extern crate relm;
extern crate relm_attributes;
#[macro_use]
extern crate relm_derive;
#[macro_use]
extern crate relm_test;
use gtk::{
CellLayoutExt,
CellRendererText,
Inhibit,
ListStore,
GtkListStoreExt,
GtkListStoreExtManual,
ToValue,
TreeSelection,
TreeSelectionExt,
TreeViewColumn,
TreeViewExt,
Type,
WidgetExt,
};
use relm::Widget;
use relm_attributes::widget;
use self::Msg::*;
#[widget]
impl Widget for TreeView {
fn init_view(&mut self) {
let columns = vec![Type::String];
let model = ListStore::new(&columns);
let row = model.append();
model.set_value(&row, 0, &"String".to_value());
let row = model.append();
model.set_value(&row, 0, &"Text".to_value());
let view_column = TreeViewColumn::new();
let cell = CellRendererText::new();
view_column.pack_start(&cell, true);
view_column.add_attribute(&cell, "text", 0);
self.tree_view.append_column(&view_column);
self.tree_view.set_model(Some(&model));
}
fn model() -> () {
}
fn update(&mut self, _event: Msg) {
}
view! {
#[name="tree_view"]
gtk::TreeView {
selection.changed(selection) => SelectionChanged(selection.clone()),
}
}
}
pub struct Model {
visible: bool,
}
#[derive(Clone, Msg)]
pub enum Msg {
SelectionChanged(TreeSelection),
Quit,
}
#[widget]
impl Widget for Win {
fn init_view(&mut self) {
let columns = vec![Type::String];
let model = ListStore::new(&columns);
let row = model.append();
model.set_value(&row, 0, &"String".to_value());
let row = model.append();
model.set_value(&row, 0, &"Text".to_value());
let view_column = TreeViewColumn::new();
let cell = CellRendererText::new();
view_column.pack_start(&cell, true);
view_column.add_attribute(&cell, "text", 0);
self.tree_view.append_column(&view_column);
self.tree_view.set_model(Some(&model));
}
fn model() -> Model {
Model {
visible: true,
}
}
fn update(&mut self, event: Msg) {
match event {
SelectionChanged(_selection) => println!("selection changed"),
Quit => gtk::main_quit(),
}
}
view! {
gtk::Window {
gtk::Box {
#[name="tree_view"]
gtk::TreeView {
selection.changed(selection) => SelectionChanged(selection.clone()),
},
TreeView {
selection.changed(selection) => SelectionChanged(selection.clone()),
visible: self.model.visible,
},
},
delete_event(_, _) => (Quit, Inhibit(false)),
}
}
}
fn main() {
Win::run(()).expect("Win::run failed");
}
#[cfg(test)]
mod tests {
use gtk::{TreeSelectionExt, TreeModelExt, TreeViewExt};
use relm;
use Msg::SelectionChanged;
use Win;
#[test]
fn child_event() {
let (component, widgets) = relm::init_test::<Win>(()).expect("init_test failed");
let tree_view = &widgets.tree_view;
let selection_observer = relm_observer_new!(component, SelectionChanged(_));
let selection = tree_view.get_selection();
let model = tree_view.get_model().expect("model");
let iter = model.get_iter_first().expect("first row");
selection.select_iter(&iter);
relm_observer_wait!(let SelectionChanged(_selection) = selection_observer);
}
}
| 28.035088 | 89 | 0.62161 |
bff3fcdd280ba74618db5b0a02ab49899b772cac | 1,152 | use serde_json::Value as JsonValue;
use crate::switchboard::SessionId;
#[derive(Debug)]
pub struct Request {
session_id: SessionId,
transaction: String,
jsep_offer: Option<JsonValue>,
audio_level_ext_id: Option<u32>,
}
impl Request {
pub fn new(session_id: SessionId, transaction: &str) -> Self {
Self {
session_id,
transaction: transaction.to_owned(),
jsep_offer: None,
audio_level_ext_id: None,
}
}
pub fn set_jsep_offer(self, jsep_offer: JsonValue) -> Self {
Self {
jsep_offer: Some(jsep_offer),
..self
}
}
pub fn session_id(&self) -> SessionId {
self.session_id
}
pub fn transaction(&self) -> &str {
&self.transaction
}
pub fn jsep_offer(&self) -> Option<&JsonValue> {
self.jsep_offer.as_ref()
}
pub fn set_audio_level_ext_id(self, audio_level_ext_id: Option<u32>) -> Self {
Self {
audio_level_ext_id,
..self
}
}
pub fn audio_level_ext_id(&self) -> Option<u32> {
self.audio_level_ext_id
}
}
| 21.735849 | 82 | 0.578125 |
333680e093f2aadd7fa7f92f27f714732720dd89 | 8,279 | #![allow(warnings)]
use reqwest::{
blocking::{Client, Response},
header::CONTENT_TYPE,
Body, ClientBuilder,
};
use std::{
error::Error,
io::{self, Write},
};
use structopt::StructOpt;
trait ToIoError
where
Self: Sized + Send + Sync + 'static,
Self: Into<Box<(dyn std::error::Error + Send + Sync + 'static)>>,
{
fn to_error(self) -> io::Error;
}
impl<T> ToIoError for T
where
T: Sized + Send + Sync + 'static,
T: Into<Box<(dyn std::error::Error + Send + Sync + 'static)>>,
{
fn to_error(self) -> io::Error {
io::Error::new(io::ErrorKind::Other, self.into())
}
}
#[derive(Debug, StructOpt)]
#[structopt(name = "restaff-claim-points", about = "Claim point from Restaff page")]
pub struct Args {
#[structopt(short, long, help = "It's just your username, what can I say ;)")]
username: Option<String>,
#[structopt(
short,
long,
help = "Only specify user to open a prompt to input password"
)]
password: Option<String>,
#[structopt(short, long, default_value = "3", help = "Claim type, from 1 to 5")]
claim_type: u8,
#[structopt(
short,
long,
default_value = "https://api-staff.netjob.asia",
help = "Specify API server"
)]
api_server: String,
#[structopt(short = "f", long, help = "Use password file (Base64 encoded)")]
password_file: Option<String>,
#[structopt(short, long, help = "Use JWT token")]
token: Option<String>,
#[structopt(short = "k", long, help = "Use JWT token file")]
token_file: Option<String>,
#[structopt(short = "l", help = "Log into file")]
log_file: Option<String>,
}
type Token = String;
const RESTAFF_API_LOGIN: &str = "/api/user/login";
const RESTAFF_API_CLAIM: &str = "/api/user/claim-daily";
const RESTAFF_API_LOGOUT: &str = "/api/user/logout";
const RESTAFF_USER_AGENT: &str =
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:88.0) Gecko/20100101 Firefox/88.0";
fn api_post<B>(url: &str, payload: Option<B>, auth_token: Option<&str>) -> anyhow::Result<String>
where
reqwest::blocking::Body: From<B>,
{
let client = Client::builder().user_agent(RESTAFF_USER_AGENT).build()?;
let mut request = client
.post(url)
.header("appid", "c03714075869519a54ba70e31d6751c3")
.header(CONTENT_TYPE, "application/json");
if let Some(payload) = payload {
request = request.body(payload);
}
if let Some(token) = auth_token {
request = request.bearer_auth(token);
}
Ok(request.send()?.text()?)
}
fn get_response_id(response: &str) -> anyhow::Result<i64> {
let id_response = serde_json::from_str::<serde_json::Value>(response)?
.get("id")
.and_then(|id| id.as_i64())
.ok_or_else(|| "Invalid id value from response".to_error())?;
Ok(id_response)
}
fn restaff_login(api_server: &str, username: &str, password: &str) -> anyhow::Result<Token> {
let payload = format!(
r#"{{"UserName":"{}","PassWord":"","Password":"{}","pushToken":"","platform":"Web","deviceId":"Windows-Firefox-88.0"}}"#,
username, password
);
let url = format!("{}{}", api_server, RESTAFF_API_LOGIN);
let response = api_post(&url, Some(payload.clone()), None)?;
let json = serde_json::from_str::<serde_json::Value>(&response)?;
let code = json
.get("code")
.and_then(|code| code.as_i64())
.ok_or_else(|| "Bad response (contains no `id`)".to_error())?;
if code < 0 {
Err("Login failed".to_error())?;
}
let token = json
.get("data")
.and_then(|data| data.get("token"))
.and_then(|token| token.as_str())
.ok_or_else(|| "Response doesn't contain token".to_error())?;
Ok(token.to_string())
}
fn restaff_claim_points(api_server: &str, auth_token: &str, claim_type: u8) -> anyhow::Result<i64> {
let payload = claim_type.to_string();
let url = format!("{}{}", api_server, RESTAFF_API_CLAIM);
let response = api_post(&url, Some(payload), Some(auth_token))?;
let result = get_response_id(&response);
result
}
fn restaff_logout(api_server: &str, token: &str) -> anyhow::Result<i64> {
let url = format!("{}{}", api_server, RESTAFF_API_LOGOUT);
let response = api_post(&url, Some(""), Some(token))?;
let result = serde_json::from_str::<serde_json::Value>(&response)?
.get("code")
.and_then(|code| code.as_i64())
.ok_or_else(|| "Invalid code value from response".to_error())?;
Ok(result)
}
fn split_once<'a>(src: &'a str, pat: &str) -> Option<(&'a str, &'a str)> {
src.find(pat).map(|pos| (&src[..pos], &src[pos + 1..]))
}
fn acquire_token(mut args: Args) -> Option<String> {
let mut username = args.username.unwrap_or(String::new());
let password = if let Some(pw) = args.password {
pw
} else if let Some(pf) = args.password_file {
let content = match std::fs::read_to_string(&pf) {
Ok(s) => s,
Err(err) => {
println!("Error: read content from file `{}`: {}", pf, err);
return None;
}
};
let mut content = content.trim();
match split_once(content, ":") {
Some((un, pw)) => {
username = un.to_string();
content = pw;
}
None => {}
}
let decoded = match base64::decode(&content) {
Ok(pw) => pw,
Err(err) => {
println!("Error: decoding password: {}", err);
return None;
}
};
let pw = String::from_utf8_lossy(&decoded).to_string();
pw
} else {
if username.is_empty() {
eprintln!("Error: username is empty");
return None;
}
print!("Input password: ");
io::stdout().flush();
match rpassword::read_password() {
Ok(pw) => pw,
Err(err) => {
eprintln!(
"Error: can't read password from console. Try input password via option `-p`"
);
return None;
}
}
};
if username.is_empty() {
eprintln!("Error: username is empty");
return None;
}
if !matches!(args.claim_type, 0..=5) {
args.claim_type = 3;
}
let token = match restaff_login(&args.api_server, username.as_str(), password.as_str()) {
Ok(token) => token,
Err(err) => {
println!("Error: {}", err);
return None;
}
};
Some(token)
}
fn main() {
let mut args = Args::from_args();
let claim_type = args.claim_type;
let api_server = args.api_server.to_string();
let mut is_using_input_token = true;
let token = if let Some(token) = args.token {
is_using_input_token = false;
token
} else if let Some(token_file) = args.token_file {
is_using_input_token = false;
match std::fs::read_to_string(&token_file) {
Ok(content) => content.trim().to_string(),
Err(err) => {
eprintln!("Error: Failed to read token file content: {}", token_file);
return;
}
}
} else if let Some(token) = acquire_token(args) {
println!("Login successfully.");
token
} else {
println!("Login failed.");
return;
};
match restaff_claim_points(api_server.as_str(), token.as_str(), claim_type) {
Ok(id) if id >= 0 => println!("Points claimed successfully."),
Ok(id) => println!("Points claimed failed. Returned id: {}", id),
Err(err) => {
if is_using_input_token {
eprintln!("Error: Used user input token and failed.");
}
eprintln!("Error: {}", err);
}
}
match restaff_logout(api_server.as_str(), token.as_str()) {
Ok(id) if id >= 0 => println!("Logout successfully."),
Ok(id) => println!("Logout failed. Returned id: {}", id),
Err(err) => {
if is_using_input_token {
eprintln!("Error: Used user input token and failed.");
}
eprintln!("Error: {}", err);
}
}
}
| 28.746528 | 129 | 0.560575 |
89c239e38b9adbbcd726bb14bd3e25e72aa64c91 | 10,881 | use embedded_hal as hal;
use hal::blocking::delay::DelayMs;
use hal::blocking::i2c::{Read, Write, WriteRead};
use crate::commands::Command;
use crate::error::Error;
use crate::types::{RawSensorData, SensorData};
use sensirion_i2c::{crc8, i2c};
const SCD4X_I2C_ADDRESS: u8 = 0x62;
/// SCD4X sensor instance. Use related methods to take measurements.
#[derive(Debug, Default)]
pub struct Scd4x<I2C, D> {
i2c: I2C,
delay: D,
is_running: bool,
}
impl<I2C, D, E> Scd4x<I2C, D>
where
I2C: Read<Error = E> + Write<Error = E> + WriteRead<Error = E>,
D: DelayMs<u32>,
{
pub fn new(i2c: I2C, delay: D) -> Self {
Scd4x {
i2c,
delay,
is_running: false,
}
}
/// Start periodic measurement, signal update interval is 5 seconds.
/// This command is only available in idle mode.
pub fn start_periodic_measurement(&mut self) -> Result<(), Error<E>> {
self.write_command(Command::StartPeriodicMeasurement)?;
self.is_running = true;
Ok(())
}
/// Stop periodic measurement and return to idle mode for sensor configuration or to safe energy.
/// This command is only available in measurement mode.
pub fn stop_periodic_measurement(&mut self) -> Result<(), Error<E>> {
self.write_command(Command::StopPeriodicMeasurement)?;
self.is_running = false;
Ok(())
}
/// Read raw sensor data
pub fn sensor_output(&mut self) -> Result<RawSensorData, Error<E>> {
let mut buf = [0; 9];
self.delayed_read_cmd(Command::ReadMeasurement, &mut buf)?;
let co2 = u16::from_be_bytes([buf[0], buf[1]]);
let temperature = u16::from_be_bytes([buf[3], buf[4]]);
let humidity = u16::from_be_bytes([buf[6], buf[7]]);
Ok(RawSensorData {
co2,
temperature,
humidity,
})
}
/// Read converted sensor data
pub fn measurement(&mut self) -> Result<SensorData, Error<E>> {
let mut buf = [0; 9];
self.delayed_read_cmd(Command::ReadMeasurement, &mut buf)?;
// buf[2], buf[5], buf[8] is CRC bytes and not used
let co2 = u16::from_be_bytes([buf[0], buf[1]]);
let temperature = u16::from_be_bytes([buf[3], buf[4]]);
let humidity = u16::from_be_bytes([buf[6], buf[7]]);
Ok(SensorData {
co2,
temperature: ((((21875 * temperature) >> 13) - 45000) as f32) / 1000.0,
humidity: (((12500 * humidity) >> 13) as f32) / 1000.0,
})
}
/// Get sensor temperature offset
pub fn temperature_offset(&mut self) -> Result<f32, Error<E>> {
let mut buf = [0; 3];
self.delayed_read_cmd(Command::GetTemperatureOffset, &mut buf)?;
let raw_offset = u16::from_be_bytes([buf[0], buf[1]]);
let offset = raw_offset as f32 * 175.0 / 65536.0;
Ok(offset)
}
/// Set sensor temperature offset
pub fn set_temperature_offset(&mut self, offset: f32) -> Result<(), Error<E>> {
let t_offset = (offset * 65536.0 / 175.0) as i16;
self.write_command_with_data(Command::SetTemperatureOffset, t_offset as u16)?;
Ok(())
}
/// Get sensor altitude in meters above sea level.
pub fn altitude(&mut self) -> Result<u16, Error<E>> {
let mut buf = [0; 3];
self.delayed_read_cmd(Command::GetTemperatureOffset, &mut buf)?;
let altitude = u16::from_be_bytes([buf[0], buf[1]]);
Ok(altitude)
}
/// Set sensor altitude in meters above sea level.
pub fn set_altitude(&mut self, altitude: u16) -> Result<(), Error<E>> {
self.write_command_with_data(Command::SetSensorAltitude, altitude)?;
Ok(())
}
/// Set ambient pressure to enable continious pressure compensation
pub fn set_ambient_pressure(&mut self, pressure_hpa: u16) -> Result<(), Error<E>> {
self.write_command_with_data(Command::SetAmbientPressure, pressure_hpa)?;
Ok(())
}
/// Perform forced recalibration
pub fn forced_recalibration(&mut self, target_co2_concentration: u16) -> Result<u16, Error<E>> {
let frc_correction = self.delayed_read_cmd_with_data(
Command::PerformForcedRecalibration,
target_co2_concentration,
)?;
if frc_correction == u16::MAX {
return Err(Error::Internal);
}
match frc_correction.checked_sub(0x8000) {
Some(concentration) => Ok(concentration),
None => Err(Error::Internal),
}
}
/// Get the status of automatic self-calibration
pub fn automatic_self_calibration(&mut self) -> Result<bool, Error<E>> {
let mut buf = [0; 3];
self.delayed_read_cmd(Command::GetAutomaticSelfCalibrationEnabled, &mut buf)?;
let status = u16::from_be_bytes([buf[0], buf[1]]) != 0;
Ok(status)
}
/// Enable or disable automatic self-calibration
pub fn set_automatic_self_calibration(&mut self, enabled: bool) -> Result<(), Error<E>> {
self.write_command_with_data(Command::SetAutomaticSelfCalibrationEnabled, enabled as u16)?;
Ok(())
}
/// Start low power periodic measurements
pub fn start_low_power_periodic_measurements(&mut self) -> Result<(), Error<E>> {
self.write_command(Command::StartLowPowerPeriodicMeasurement)?;
Ok(())
}
/// Check whether new measurement data is available for read-out.
pub fn data_ready_status(&mut self) -> Result<bool, Error<E>> {
let mut buf = [0; 3];
self.delayed_read_cmd(Command::GetDataReadyStatus, &mut buf)?;
let status = u16::from_be_bytes([buf[0], buf[1]]);
// 7FF is the last 11 bytes. If they are all zeroes, then data isn't ready.
let ready = (status & 0x7FF) != 0;
Ok(ready)
}
/// Save settings to non-volatile memory
pub fn persist_settings(&mut self) -> Result<(), Error<E>> {
self.write_command(Command::PersistSettings)?;
Ok(())
}
/// Get 48-bit serial number
pub fn serial_number(&mut self) -> Result<u64, Error<E>> {
let mut buf = [0; 9];
self.delayed_read_cmd(Command::GetSerialNumber, &mut buf)?;
let serial = u64::from(buf[0]) << 40
| u64::from(buf[1]) << 32
| u64::from(buf[3]) << 24
| u64::from(buf[4]) << 16
| u64::from(buf[6]) << 8
| u64::from(buf[7]);
Ok(serial)
}
/// End-of-line test to confirm sensor functionality.
pub fn self_test_is_ok(&mut self) -> Result<bool, Error<E>> {
let mut buf = [0; 3];
self.delayed_read_cmd(Command::PerformSelfTest, &mut buf)?;
let status = u16::from_be_bytes([buf[0], buf[1]]) == 0;
Ok(status)
}
/// Initiates the reset of all configurations stored in the EEPROM and erases the FRC and ASC algorithm history.
pub fn factory_reset(&mut self) -> Result<(), Error<E>> {
self.write_command(Command::PerformFactoryReset)?;
Ok(())
}
/// The reinit command reinitializes the sensor by reloading user settings from EEPROM.
pub fn reinit(&mut self) -> Result<(), Error<E>> {
self.write_command(Command::Reinit)?;
Ok(())
}
/// On-demand measurement of CO₂ concentration, relative humidity and temperature.
/// The sensor output is read with the measurement method.
#[cfg(feature = "scd41")]
pub fn measure_single_shot(&mut self) -> Result<(), Error<E>>{
self.write_command(Command::MeasureSingleShot)?;
Ok(())
}
/// On-demand measurement of relative humidity and temperature only.
pub fn measure_single_shot_rht(&mut self) ->Result<(), Error<E>> {
self.write_command(Command::MeasureSingleShotRhtOnly)?;
Ok(())
}
/// Put the sensor from idle to sleep mode to reduce current consumption.
pub fn power_down(&mut self) -> Result<(), Error<E>>{
self.write_command(Command::PowerDown)?;
Ok(())
}
/// Wake up sensor from sleep mode to idle mode.
pub fn wake_up(&mut self) {
// Sensor does not acknowledge the wake-up call, error is ignored
self.write_command(Command::WakeUp).ok();
}
/// Command for reading values from the sensor
fn delayed_read_cmd(&mut self, cmd: Command, data: &mut [u8]) -> Result<(), Error<E>> {
self.write_command(cmd)?;
i2c::read_words_with_crc(&mut self.i2c, SCD4X_I2C_ADDRESS, data)?;
Ok(())
}
/// Send command with parameter, takes response
fn delayed_read_cmd_with_data(&mut self, cmd: Command, data: u16) -> Result<u16, Error<E>> {
self.write_command_with_data(cmd, data)?;
let mut buf = [0; 3];
i2c::read_words_with_crc(&mut self.i2c, SCD4X_I2C_ADDRESS, &mut buf)?;
Ok(u16::from_be_bytes([buf[0], buf[1]]))
}
/// Writes commands without additional arguments.
fn write_command(&mut self, cmd: Command) -> Result<(), Error<E>> {
let (command, delay, allowed_if_running) = cmd.as_tuple();
if !allowed_if_running && self.is_running {
return Err(Error::NotAllowed);
}
i2c::write_command(&mut self.i2c, SCD4X_I2C_ADDRESS, command).map_err(Error::I2c)?;
self.delay.delay_ms(delay);
Ok(())
}
/// Sets sensor internal parameter
fn write_command_with_data(&mut self, cmd: Command, data: u16) -> Result<(), Error<E>> {
let (command, delay, allowed_if_running) = cmd.as_tuple();
if !allowed_if_running && self.is_running {
return Err(Error::NotAllowed);
}
let c = command.to_be_bytes();
let d = data.to_be_bytes();
let mut buf = [0; 5];
buf[0..2].copy_from_slice(&c);
buf[2..4].copy_from_slice(&d);
buf[4] = crc8::calculate(&d);
self.i2c
.write(SCD4X_I2C_ADDRESS, &buf)
.map_err(Error::I2c)?;
self.delay.delay_ms(delay);
Ok(())
}
}
#[cfg(test)]
mod tests {
use embedded_hal_mock as hal;
use self::hal::delay::MockNoop as DelayMock;
use self::hal::i2c::{Mock as I2cMock, Transaction};
use super::*;
/// Test the get_serial_number function
#[test]
fn test_get_serial_number() {
// Arrange
let (cmd, _, _) = Command::GetSerialNumber.as_tuple();
let expectations = [
Transaction::write(SCD4X_I2C_ADDRESS, cmd.to_be_bytes().to_vec()),
Transaction::read(
SCD4X_I2C_ADDRESS,
vec![0xbe, 0xef, 0x92, 0xbe, 0xef, 0x92, 0xbe, 0xef, 0x92],
),
];
let mock = I2cMock::new(&expectations);
let mut sensor = Scd4x::new(mock, DelayMock);
// Act
let serial = sensor.serial_number().unwrap();
// Assert
assert_eq!(serial, 0xbeefbeefbeef);
}
}
| 35.213592 | 116 | 0.60307 |
50631ec052a4408e256b014d22d1f3aa3bebb0d5 | 9,375 | // Copyright 2021 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#[derive(PartialEq)]
pub struct LetterKey {
pub(crate) lower: &'static str,
pub(crate) upper: &'static str,
pub(crate) alt: &'static str,
#[allow(unused)]
pub(crate) is_alt_accent: bool,
}
#[derive(PartialEq)]
pub enum SpecialKey {
ALT,
DEL,
ENTER,
SHIFT,
SPACE,
}
#[derive(PartialEq)]
pub enum Key {
Letter(LetterKey),
Special(SpecialKey, &'static str),
}
const DEFAULT_KEY: LetterKey = LetterKey { lower: "", upper: "", alt: "", is_alt_accent: false };
const KEY_A: Key = Key::Letter(LetterKey { lower: "a", upper: "A", alt: "æ", ..DEFAULT_KEY });
const KEY_B: Key = Key::Letter(LetterKey { lower: "b", upper: "B", ..DEFAULT_KEY });
const KEY_C: Key = Key::Letter(LetterKey { lower: "c", upper: "C", alt: "ç", ..DEFAULT_KEY });
const KEY_D: Key = Key::Letter(LetterKey { lower: "d", upper: "D", alt: "∂", ..DEFAULT_KEY });
const KEY_E: Key =
Key::Letter(LetterKey {
lower: "e", upper: "E", alt: "´", is_alt_accent: true, ..DEFAULT_KEY
});
const KEY_F: Key = Key::Letter(LetterKey { lower: "f", upper: "F", alt: "ƒ", ..DEFAULT_KEY });
const KEY_G: Key = Key::Letter(LetterKey { lower: "g", upper: "G", alt: "©", ..DEFAULT_KEY });
const KEY_H: Key = Key::Letter(LetterKey { lower: "h", upper: "H", ..DEFAULT_KEY });
const KEY_I: Key =
Key::Letter(LetterKey {
lower: "i", upper: "I", alt: "ˆ", is_alt_accent: true, ..DEFAULT_KEY
});
const KEY_J: Key = Key::Letter(LetterKey { lower: "j", upper: "J", ..DEFAULT_KEY });
const KEY_K: Key =
Key::Letter(LetterKey {
lower: "k", upper: "K", alt: "˚", is_alt_accent: true, ..DEFAULT_KEY
});
const KEY_L: Key = Key::Letter(LetterKey { lower: "l", upper: "L", ..DEFAULT_KEY });
const KEY_M: Key = Key::Letter(LetterKey { lower: "m", upper: "M", ..DEFAULT_KEY });
const KEY_N: Key =
Key::Letter(LetterKey {
lower: "n", upper: "N", alt: "˜", is_alt_accent: true, ..DEFAULT_KEY
});
const KEY_O: Key = Key::Letter(LetterKey { lower: "o", upper: "O", alt: "ø", ..DEFAULT_KEY });
const KEY_P: Key = Key::Letter(LetterKey { lower: "p", upper: "P", alt: "Ø", ..DEFAULT_KEY });
const KEY_Q: Key = Key::Letter(LetterKey { lower: "q", upper: "Q", alt: "œ", ..DEFAULT_KEY });
const KEY_R: Key = Key::Letter(LetterKey { lower: "r", upper: "R", alt: "®", ..DEFAULT_KEY });
const KEY_S: Key = Key::Letter(LetterKey { lower: "s", upper: "S", alt: "ß", ..DEFAULT_KEY });
const KEY_T: Key = Key::Letter(LetterKey { lower: "t", upper: "T", ..DEFAULT_KEY });
const KEY_U: Key =
Key::Letter(LetterKey {
lower: "u", upper: "U", alt: "¨", is_alt_accent: true, ..DEFAULT_KEY
});
const KEY_V: Key = Key::Letter(LetterKey { lower: "v", upper: "V", ..DEFAULT_KEY });
const KEY_W: Key = Key::Letter(LetterKey { lower: "w", upper: "W", alt: "Œ", ..DEFAULT_KEY });
const KEY_X: Key = Key::Letter(LetterKey { lower: "x", upper: "X", alt: "Ç", ..DEFAULT_KEY });
const KEY_Y: Key = Key::Letter(LetterKey { lower: "y", upper: "Y", alt: "¥", ..DEFAULT_KEY });
const KEY_Z: Key = Key::Letter(LetterKey { lower: "z", upper: "Z", alt: "Æ", ..DEFAULT_KEY });
const KEY_0: Key = Key::Letter(LetterKey { lower: "0", upper: ")", alt: "º", ..DEFAULT_KEY });
const KEY_1: Key = Key::Letter(LetterKey { lower: "1", upper: "!", alt: "¡", ..DEFAULT_KEY });
const KEY_2: Key = Key::Letter(LetterKey { lower: "2", upper: "@", alt: "€", ..DEFAULT_KEY });
const KEY_3: Key = Key::Letter(LetterKey { lower: "3", upper: "#", alt: "£", ..DEFAULT_KEY });
const KEY_4: Key = Key::Letter(LetterKey { lower: "4", upper: "$", alt: "¢", ..DEFAULT_KEY });
const KEY_5: Key = Key::Letter(LetterKey { lower: "5", upper: "%", alt: "∞", ..DEFAULT_KEY });
const KEY_6: Key = Key::Letter(LetterKey { lower: "6", upper: "^", alt: "§", ..DEFAULT_KEY });
const KEY_7: Key = Key::Letter(LetterKey { lower: "7", upper: "&", alt: "¶", ..DEFAULT_KEY });
const KEY_8: Key = Key::Letter(LetterKey { lower: "8", upper: "*", alt: "•", ..DEFAULT_KEY });
const KEY_9: Key = Key::Letter(LetterKey { lower: "9", upper: "(", alt: "ª", ..DEFAULT_KEY });
const KEY_BACK_TICK: Key =
Key::Letter(LetterKey { lower: "`", upper: "~", alt: "`", ..DEFAULT_KEY });
const KEY_HYPHEN: Key = Key::Letter(LetterKey { lower: "-", upper: "_", alt: "±", ..DEFAULT_KEY });
const KEY_EQUALS: Key =
Key::Letter(LetterKey { lower: "=", upper: "+", alt: "≠", ..DEFAULT_KEY });
const KEY_L_BRACKET: Key = Key::Letter(LetterKey { lower: "[", upper: "{", ..DEFAULT_KEY });
const KEY_R_BRACKET: Key = Key::Letter(LetterKey { lower: "]", upper: "}", ..DEFAULT_KEY });
const KEY_BACKSLASH: Key = Key::Letter(LetterKey { lower: "\\", upper: "|", ..DEFAULT_KEY });
const KEY_SEMICOLON: Key = Key::Letter(LetterKey { lower: ";", upper: ":", ..DEFAULT_KEY });
const KEY_QUOTE: Key = Key::Letter(LetterKey { lower: "'", upper: "\"", ..DEFAULT_KEY });
const KEY_COMMA: Key = Key::Letter(LetterKey { lower: ",", upper: "<", ..DEFAULT_KEY });
const KEY_FULL_STOP: Key = Key::Letter(LetterKey { lower: ".", upper: ">", ..DEFAULT_KEY });
const KEY_SLASH: Key = Key::Letter(LetterKey { lower: "/", upper: "?", ..DEFAULT_KEY });
const KEY_ALT: Key = Key::Special(SpecialKey::ALT, "alt");
const KEY_DEL: Key = Key::Special(SpecialKey::DEL, "del");
const KEY_ENTER: Key = Key::Special(SpecialKey::ENTER, "enter");
const KEY_SHIFT: Key = Key::Special(SpecialKey::SHIFT, "shift");
const KEY_SPACE: Key = Key::Special(SpecialKey::SPACE, "space");
#[allow(unused)]
pub(crate) const ROW0: &'static [&Key] = &[
&KEY_BACK_TICK,
&KEY_1,
&KEY_2,
&KEY_3,
&KEY_4,
&KEY_5,
&KEY_6,
&KEY_7,
&KEY_8,
&KEY_9,
&KEY_0,
&KEY_HYPHEN,
&KEY_EQUALS,
];
#[allow(unused)]
pub(crate) const ROW1: &'static [&Key] = &[
&KEY_Q,
&KEY_W,
&KEY_E,
&KEY_R,
&KEY_T,
&KEY_Y,
&KEY_U,
&KEY_I,
&KEY_O,
&KEY_P,
&KEY_L_BRACKET,
&KEY_R_BRACKET,
&KEY_BACKSLASH,
];
#[allow(unused)]
const ROW2: &'static [&Key] = &[
&KEY_A,
&KEY_S,
&KEY_D,
&KEY_F,
&KEY_G,
&KEY_H,
&KEY_J,
&KEY_K,
&KEY_L,
&KEY_SEMICOLON,
&KEY_QUOTE,
];
#[allow(unused)]
const ROW3: &'static [&Key] = &[
&KEY_Z,
&KEY_X,
&KEY_C,
&KEY_V,
&KEY_B,
&KEY_N,
&KEY_M,
&KEY_COMMA,
&KEY_FULL_STOP,
&KEY_SLASH,
];
#[allow(unused)]
const SPECIAL_ROW: &'static [&Key] = &[&KEY_SHIFT, &KEY_ALT, &KEY_SPACE, &KEY_DEL, &KEY_ENTER];
#[allow(unused)]
pub const KEYBOARD: &'static [&'static [&Key]] = &[ROW0, ROW1, ROW2, ROW3, SPECIAL_ROW];
#[allow(unused)]
pub struct Accent {
accent: &'static Key,
pub(crate) lower: &'static str,
pub(crate) upper: &'static str,
}
#[allow(unused)]
pub struct AccentKey {
pub alt_key: &'static Key,
accents: &'static [&'static Accent],
}
const ACCENT_GRAVE: AccentKey = AccentKey {
alt_key: &KEY_BACK_TICK,
accents: &[
&Accent { accent: &KEY_A, lower: "à", upper: "À" },
&Accent { accent: &KEY_E, lower: "è", upper: "È" },
&Accent { accent: &KEY_I, lower: "ì", upper: "Ì" },
&Accent { accent: &KEY_O, lower: "ò", upper: "Ò" },
&Accent { accent: &KEY_U, lower: "ù", upper: "Ù" },
],
};
const ACCENT_ACUTE: AccentKey = AccentKey {
alt_key: &KEY_E,
accents: &[
&Accent { accent: &KEY_A, lower: "á", upper: "Á" },
&Accent { accent: &KEY_E, lower: "é", upper: "É" },
&Accent { accent: &KEY_I, lower: "í", upper: "Í" },
&Accent { accent: &KEY_O, lower: "ó", upper: "Ó" },
&Accent { accent: &KEY_U, lower: "ù", upper: "Ù" },
],
};
const ACCENT_RING: AccentKey =
AccentKey { alt_key: &KEY_K, accents: &[&Accent { accent: &KEY_A, lower: "å", upper: "Å" }] };
const ACCENT_UMLAUT: AccentKey = AccentKey {
alt_key: &KEY_U,
accents: &[
&Accent { accent: &KEY_A, lower: "ä", upper: "Ä" },
&Accent { accent: &KEY_E, lower: "ë", upper: "Ë" },
&Accent { accent: &KEY_I, lower: "ï", upper: "Ï" },
&Accent { accent: &KEY_O, lower: "ö", upper: "Ö" },
&Accent { accent: &KEY_U, lower: "ü", upper: "ü" },
],
};
const ACCENT_CIRCUMFLEX: AccentKey = AccentKey {
alt_key: &KEY_I,
accents: &[
&Accent { accent: &KEY_A, lower: "â", upper: "Â" },
&Accent { accent: &KEY_E, lower: "ê", upper: "Ê" },
&Accent { accent: &KEY_I, lower: "î", upper: "Î" },
&Accent { accent: &KEY_O, lower: "ô", upper: "Ô" },
&Accent { accent: &KEY_U, lower: "û", upper: "Û" },
],
};
const ACCENT_TILDE: AccentKey = AccentKey {
alt_key: &KEY_N,
accents: &[
&Accent { accent: &KEY_A, lower: "ã", upper: "Ã" },
&Accent { accent: &KEY_N, lower: "ñ", upper: "Ñ" },
&Accent { accent: &KEY_O, lower: "õ", upper: "Õ " },
&Accent { accent: &KEY_U, lower: "û", upper: "Û" },
],
};
#[allow(unused)]
pub const ACCENTS: &'static [AccentKey] =
&[ACCENT_GRAVE, ACCENT_CIRCUMFLEX, ACCENT_ACUTE, ACCENT_RING, ACCENT_UMLAUT, ACCENT_TILDE];
pub fn get_accent(alt_key: &Key, pressed_key: &Key) -> Option<&'static &'static Accent> {
if let Some(accent_key) = ACCENTS.iter().find(|ak| ak.alt_key == alt_key) {
return accent_key.accents.iter().find(|a| a.accent == pressed_key);
};
None
}
| 37.955466 | 99 | 0.589333 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.