hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
6790531c93dc7e912799a4a6b30c023a24a59b59
| 22,726 |
//! A helper trait to improve the ergonomics when working with multiple [`Option`]s. After
//! importing [`TupleCombinator`], you can treat a tuple of `Option`s as one `Option`.
//!
//! # Example
//!
//! ```
//! use tuple_combinator::TupleCombinator;
//!
//! fn main() {
//! let tuples = (Some(1), Some(2), Some(3));
//!
//! assert_eq!(tuples.map(|(a,b,c)| a + b + c), Some(6));
//! assert_eq!(tuples.and_then(|(a,b,c)| Some(a + b - c)), Some(0));
//! assert_eq!(tuples.transpose(), Some((1,2,3)));
//! assert_eq!((Some(1), None).map(|(a, b): (i32, i32)| 100), None);
//! }
//! ```
use std::any::Any;
/// The traits that provides helper functions for tuples. This trait implementation mirros most of
/// the methods defined in [`Option`].
#[doc(inline)]
pub trait TupleCombinator: Sized {
type Tuple;
/// Transposes a tuple of [`Option`]s into an `Option` of tuples. This function returns `None`
/// if any of the `Option` is `None`.
/// ```
/// # use tuple_combinator::TupleCombinator;
/// let left = (Some("foo"), Some(123));
/// assert_eq!(left.transpose(), Some(("foo", 123)));
/// ```
fn transpose(self) -> Option<Self::Tuple>;
/// See [`Option::map`].
///
/// # Examples
///
/// ```
/// # use tuple_combinator::TupleCombinator;
/// let tuples = (Some("foo"), Some("bar"));
/// assert_eq!(tuples.map(|(a, b)| format!("{}{}", a, b)).unwrap(), "foobar");
/// ```
fn map<U, F: FnOnce(Self::Tuple) -> U>(self, f: F) -> Option<U> {
self.transpose().map(f)
}
/// See [`Option::expect`].
///
/// # Examples
///
/// ```
/// # use tuple_combinator::TupleCombinator;
/// let tuples = (Some("foo"), Some(123));
/// assert_eq!(tuples.expect("should not panic"), ("foo", 123));
/// ```
///
/// ```{.should_panic}
/// # use tuple_combinator::TupleCombinator;
/// let tuples: (_, Option<i32>) = (Some("foo"), None);
/// tuples.expect("will panic");
/// ```
fn expect(self, msg: &str) -> Self::Tuple {
self.transpose().expect(msg)
}
/// See [`Option::unwrap`].
/// ```
/// # use tuple_combinator::TupleCombinator;
/// let tuples = (Some("foo"), Some(123));
/// assert_eq!(tuples.unwrap(), ("foo", 123));
/// ```
///
/// This example will panic:
///
/// ```{.should_panic}
/// # use tuple_combinator::TupleCombinator;
/// let tuples: (_, Option<i32>) = (Some("foo"), None);
/// tuples.unwrap();
/// ```
fn unwrap(self) -> Self::Tuple {
self.transpose().unwrap()
}
/// See [`Option::and`].
/// ```
/// # use tuple_combinator::TupleCombinator;
/// let left = (Some("foo"), Some(123));
/// let right = Some(("bar", 456));
/// assert_eq!(left.and(right), right);
///
/// let left_none = (None, Some(123));
/// assert_eq!(left_none.and(right), None);
/// ```
fn and(self, optb: Option<Self::Tuple>) -> Option<Self::Tuple> {
self.transpose().and(optb)
}
/// See [`Option::and_then`].
/// ```
/// # use tuple_combinator::TupleCombinator;
/// let tuples = (Some("foobar"), Some(123));
/// assert_eq!(tuples.and_then(|(a, b)| Some(a.len() + b)), Some(129));
///
/// assert_eq!(tuples.and_then(|(a, b)| if b % 2 != 1 { Some(b) } else { None }), None);
/// ```
fn and_then<U, F: FnOnce(Self::Tuple) -> Option<U>>(self, f: F) -> Option<U> {
self.transpose().and_then(f)
}
/// See [`Option::filter`].
/// ```
/// # use tuple_combinator::TupleCombinator;
/// let tuples = (Some("foobar"), Some(123));
/// assert_eq!(tuples.filter(|(a, b)| b % 2 == 1), Some(("foobar", 123)));
/// assert_eq!(tuples.filter(|(a, b)| b % 2 != 1), None);
/// ```
fn filter<P: FnOnce(&Self::Tuple) -> bool>(self, predicate: P) -> Option<Self::Tuple> {
self.transpose().filter(predicate)
}
/// See [`Option::or`].
/// ```
/// # use tuple_combinator::TupleCombinator;
/// let left = (Some("foo"), Some(123));
/// let right = Some(("bar", 456));
/// assert_eq!(left.or(right), left.transpose());
///
/// let left_none = (None, Some(123));
/// assert_eq!(left_none.or(right), right);
/// ```
fn or(self, optb: Option<Self::Tuple>) -> Option<Self::Tuple> {
self.transpose().or(optb)
}
/// See [`Option::or_else`].
/// ```
/// # use tuple_combinator::TupleCombinator;
/// let left = (Some("foo"), Some(123));
/// let right = Some(("bar", 456));
/// assert_eq!(left.or_else(|| right), left.transpose());
/// assert_eq!((None, Some(456)).or_else(|| right), right);
/// ```
fn or_else<F: FnOnce() -> Option<Self::Tuple>>(self, f: F) -> Option<Self::Tuple> {
self.transpose().or_else(f)
}
/// See [`Option::xor`].
/// ```
/// # use tuple_combinator::TupleCombinator;
/// let left = (Some("foo"), Some(123));
/// let right = Some(("bar", 456));
/// assert_eq!(left.xor(None), left.transpose());
/// assert_eq!(None.xor(left.transpose()), left.transpose());
/// assert_eq!(left.xor(right), None);
/// ```
fn xor(self, optb: Option<Self::Tuple>) -> Option<Self::Tuple> {
self.transpose().xor(optb)
}
}
/// Reduce tuples of [`Option`]s into results of various form, act in comparable to the iterators.
/// ```
/// use tuple_combinator::TupleReducer;
///
/// let res = (Some(1), Some(5), Some("rust_tuple")).fold(0, |sum, item| {
/// sum.and_then(|s| {
/// if let Some(raw_i32) = item.downcast_ref::<Option<i32>>() {
/// return raw_i32.as_ref()
/// .and_then(|val| {
/// Some(s + val)
/// });
/// }
///
/// if let Some(raw_str) = item.downcast_ref::<Option<&str>>() {
/// return raw_str.as_ref()
/// .and_then(|val| {
/// Some(s + val.len() as i32)
/// });
/// }
///
/// Some(s)
/// })
/// });
///
/// assert_eq!(res, Some(16));
/// ```
#[doc(inline)]
pub trait TupleReducer: Sized {
/// Fold the tuple to obtain a final outcome. Depending on the implementation of the handler
/// function, the fold can behave differently on various option types or values.
///
/// # Examples
///
/// Reduce tuples of i32 options to the sum of the contained values:
///
/// ```rust
/// use tuple_combinator::TupleReducer;
///
/// let res = (Some(17), Some(20)).fold(5, |sum, item| {
/// sum.and_then(|s| {
/// item.downcast_ref::<Option<i32>>()
/// .and_then(|raw| raw.as_ref())
/// .and_then(|val| {
/// Some(s + val)
/// })
/// })
/// });
///
/// assert_eq!(res, Some(42));
/// ```
fn fold<U, F: Fn(Option<U>, &dyn Any) -> Option<U>>(&self, init: U, f: F) -> Option<U>;
/// `fold_strict` works very much like `fold`, except that only options with the same wrapped data
/// type as the output type will be "folded", i.e. invoking the supplied folding function. This
/// function will come into handy when the caller only care about the options in the tuples that
/// match the output type.
///
/// # Examples
/// ```rust
/// use tuple_combinator::TupleReducer;
///
/// let res = (Some(40), Some("noise"), None as Option<i32>, Some(2))
/// .fold_strict(0i32, |sum, item| {
/// sum.and_then(|s| {
/// Some(s + item)
/// })
/// });
///
/// assert_eq!(res, Some(42));
/// ```
fn fold_strict<U: Any, F: Fn(Option<U>, &U) -> Option<U>>(&self, init: U, f: F) -> Option<U>;
/// Convert the tuples to a reference slice, where caller can use native iteration tools. Note
/// that this is re-export of the tuples' internal content, hence the slice can't live longer
/// than the tuple self.
///
/// # Examples
///
/// ```rust
/// use std::any::Any;
/// use tuple_combinator::TupleReducer;
///
/// let mut src = (Some(1), None as Option<&str>, Some(2), None as Option<i32>, Some(()));
///
/// // convert the tuples to a slice of `Any` type
/// let slice: Box<[&dyn Any]> = src.ref_slice();
///
/// // the slice has the same amount of elements as in the tuples.
/// assert_eq!(slice.len(), 5);
///
/// // downcast the element to its actual type; wrong type cast will be rejected with a `None`
/// // output from the API call.
/// assert_eq!(slice[0].downcast_ref::<Option<i32>>().unwrap(), &Some(1));
/// assert_eq!(slice[0].downcast_ref::<Option<&str>>(), None);
/// assert_eq!(slice[1].downcast_ref::<Option<&str>>().unwrap(), &None);
///
/// // unlike `mut_slice` API, the line below won't compile even if adding the `mut` keyword
/// // to the `slice` variable, because the source slice is immutable.
/// // let first = slice[0].downcast_mut::<Option<i32>>().unwrap().take();
/// ```
fn ref_slice(&self) -> Box<[&dyn Any]>;
/// Convert the tuples to a reference slice which only include options wrapping the data of the
/// desired type [`T`]. Options with types other than the given one will be excluded from the slice.
/// Note that if the slice length is 0, it means the source tuple does not contain elements in
/// options that can be converted to type ['T'].
///
/// # Examples
/// ```rust
/// use tuple_combinator::TupleReducer;
///
/// let src = (Some(1), None as Option<&str>, Some(2), None as Option<i32>, Some(()));
/// let slice = src.strict_ref_slice::<i32>();
///
/// // The above variable initiation is equivalent to the following:
/// // let slice: Box<[&i32]> = src.strict_ref_slice();
///
/// assert_eq!(slice.len(), 3);
/// assert_eq!(
/// slice,
/// vec![&Some(1), &Some(2), &None as &Option<i32>].into_boxed_slice()
/// );
///
/// // The line below won't compile because the immutability of the slice.
/// // let first = slice[0].downcast_mut::<Option<i32>>().unwrap().take();
/// ```
fn strict_ref_slice<T: Any>(&self) -> Box<[&Option<T>]>;
/// This method works similar to `ref_slice`, except that the members of the slice are mutable,
/// such that it is possible to make updates, or taking ownership from the underlying tuples data.
/// Note that modifying or altering the slice data will also cause the same data in the tuples to
/// be altered.
///
/// # Examples
///
/// ```rust
/// use std::any::Any;
/// use tuple_combinator::TupleReducer;
///
/// let mut src = (Some(1), None as Option<&str>, Some(2), None as Option<i32>, Some(()));
/// let slice: Box<[&mut dyn Any]> = src.mut_slice();
///
/// assert_eq!(slice.len(), 5);
/// assert_eq!(slice[0].downcast_ref::<Option<i32>>().unwrap(), &Some(1));
/// assert_eq!(slice[1].downcast_ref::<Option<&str>>().unwrap(), &None);
///
/// let first = slice[0].downcast_mut::<Option<i32>>().unwrap().take();
/// assert_eq!(first, Some(1));
/// ```
fn mut_slice(&mut self) -> Box<[&mut dyn Any]>;
/// This method works similar to `strict_ref_slice`, except that the members of the slice are
/// mutable, such that it is possible to make updates, or taking ownership from the underlying
/// tuples data. Note that modifying or altering the slice data will also cause the same data
/// in the tuples to be altered.
///
/// # Examples
///
/// ```rust
/// use tuple_combinator::TupleReducer;
///
/// let mut src = (Some(1), None as Option<&str>, Some(2), None as Option<i32>, Some(()));
/// let slice = src.strict_mut_slice::<i32>();
///
/// // The above variable initiation is equivalent to the following:
/// // let slice: Box<[&mut i32]> = src.strict_mut_slice();
///
/// assert_eq!(slice.len(), 3);
/// assert_eq!(
/// slice,
/// vec![&mut Some(1), &mut Some(2), &mut None as &mut Option<i32>].into_boxed_slice()
/// );
///
/// // Now you can take the wrapped content out of the tuples/slice and operate on the element.
/// // Note that operations on the slice element will take the same effect on the origin tuples,
/// // since slice elements are merely mutable borrows.
/// let first = slice[0].take();
/// assert_eq!(first, Some(1));
/// assert_eq!(slice[0], &mut None);
/// ```
fn strict_mut_slice<T: Any>(&mut self) -> Box<[&mut Option<T>]>;
}
macro_rules! tuple_impls {
( $( $v:ident: $T:ident, )* ) => {
impl<$($T,)*> TupleCombinator for ($(Option<$T>,)*) {
type Tuple = ($($T,)*);
fn transpose(self) -> Option<Self::Tuple> {
if let ($(Some($v),)*) = self {
Some(($($v,)*))
} else {
None
}
}
}
};
}
macro_rules! tuple_impl_reduce {
() => {};
( $( $ntyp:ident => $nidx:tt, )+ ) => {
impl<$( $ntyp, )+> TupleReducer for ( $( Option<$ntyp>, )+ )
where
$( $ntyp: Any, )*
{
fn fold<U, F: Fn(Option<U>, &dyn Any) -> Option<U>>(&self, init: U, f: F) -> Option<U> {
let mut accu = Some(init);
$(
accu = f(accu, &self.$nidx);
)*
accu
}
fn fold_strict<U: Any, F: Fn(Option<U>, &U) -> Option<U>>(&self, init: U, f: F) -> Option<U> {
let mut accu = Some(init);
$(
let opt = (&self.$nidx as &dyn Any)
.downcast_ref::<Option<U>>()
.and_then(|opt| opt.as_ref());
// avoid using combinator here since closure will cause `accu` to move and lead
// to all sorts of headache.
if let Some(value) = opt {
accu = f(accu, value);
}
)*
accu
}
fn ref_slice(&self) -> Box<[&dyn Any]> {
// The maximum amount of elements in a tuple is 12, that's the upper-bound
let mut vec: Vec<&dyn Any> = Vec::with_capacity(12);
$(
vec.push(&self.$nidx);
)*
vec.into_boxed_slice()
}
fn strict_ref_slice<T: Any>(&self) -> Box<[&Option<T>]> {
// The maximum amount of elements in a tuple is 12, that's the upper-bound
let mut vec: Vec<&Option<T>> = Vec::with_capacity(12);
$(
(&self.$nidx as &dyn Any)
.downcast_ref::<Option<T>>()
.and_then(|opt| {
vec.push(opt);
Some(())
});
)*
vec.into_boxed_slice()
}
fn mut_slice(&mut self) -> Box<[&mut dyn Any]> {
// The maximum amount of elements in a tuple is 12, that's the upper-bound
let mut vec: Vec<&mut dyn Any> = Vec::with_capacity(12);
$(
vec.push(&mut self.$nidx);
)*
vec.into_boxed_slice()
}
fn strict_mut_slice<T: Any>(&mut self) -> Box<[&mut Option<T>]> {
// The maximum amount of elements in a tuple is 12, that's the upper-bound
let mut vec: Vec<&mut Option<T>> = Vec::with_capacity(12);
$(
(&mut self.$nidx as &mut dyn Any)
.downcast_mut::<Option<T>>()
.and_then(|opt| {
vec.push(opt);
Some(())
});
)*
vec.into_boxed_slice()
}
}
};
}
// Impl TupleCombinator
tuple_impls! { t1: T1, }
tuple_impls! { t1: T1, t2: T2, }
tuple_impls! { t1: T1, t2: T2, t3: T3, }
tuple_impls! { t1: T1, t2: T2, t3: T3, t4: T4, }
tuple_impls! { t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, }
tuple_impls! { t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, }
tuple_impls! { t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, }
tuple_impls! { t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, }
tuple_impls! { t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, }
tuple_impls! { t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, }
// Impl TupleReducer
tuple_impl_reduce! { T0 => 0, }
tuple_impl_reduce! { T0 => 0, T1 => 1, }
tuple_impl_reduce! { T0 => 0, T1 => 1, T2 => 2, }
tuple_impl_reduce! { T0 => 0, T1 => 1, T2 => 2, T3 => 3, }
tuple_impl_reduce! { T0 => 0, T1 => 1, T2 => 2, T3 => 3, T4 => 4, }
tuple_impl_reduce! { T0 => 0, T1 => 1, T2 => 2, T3 => 3, T4 => 4, T5 => 5, }
tuple_impl_reduce! { T0 => 0, T1 => 1, T2 => 2, T3 => 3, T4 => 4, T5 => 5, T6 => 6, }
tuple_impl_reduce! { T0 => 0, T1 => 1, T2 => 2, T3 => 3, T4 => 4, T5 => 5, T6 => 6, T7 => 7, }
tuple_impl_reduce! { T0 => 0, T1 => 1, T2 => 2, T3 => 3, T4 => 4, T5 => 5, T6 => 6, T7 => 7, T8 => 8, }
tuple_impl_reduce! { T0 => 0, T1 => 1, T2 => 2, T3 => 3, T4 => 4, T5 => 5, T6 => 6, T7 => 7, T8 => 8, T9 => 9, }
#[cfg(test)]
mod impl_tests {
use super::TupleReducer;
use std::any::Any;
#[test]
fn fold_sum() {
let res = (Some(17), Some(20)).fold(5, |sum, item| {
sum.and_then(|s| {
item.downcast_ref::<Option<i32>>()
.and_then(|raw| raw.as_ref())
.and_then(|val| Some(s + val))
})
});
assert_eq!(res, Some(42));
}
#[test]
fn fold_mixed() {
let res = (
Some(1),
Some(5),
Some("rust_tuple"),
Some(String::from("tuple_reducer")),
Some(vec![0u8, 1, 42]), // the vec that wraps all the wisdom of this universe
).fold(0, |sum, item| {
sum.and_then(|s| {
if let Some(raw_i32) = item.downcast_ref::<Option<i32>>() {
return raw_i32.as_ref().and_then(|val| Some(s + val));
}
if let Some(raw_str) = item.downcast_ref::<Option<&str>>() {
return raw_str.as_ref().and_then(|val| Some(s + val.len() as i32));
}
if let Some(raw_string) = item.downcast_ref::<Option<String>>() {
return raw_string.as_ref().and_then(|val| Some(s + val.len() as i32));
}
if let Some(raw_vec) = item.downcast_ref::<Option<Vec<u8>>>() {
return raw_vec.as_ref().and_then(|val| Some(s + val.len() as i32));
}
Some(s)
})
});
assert_eq!(res, Some(32));
}
#[test]
fn fold_none_as_nuke() {
let none: Option<i32> = None;
let res = (Some(1), none, Some(5)).fold(0, |sum, item| {
sum.and_then(|s| {
item.downcast_ref::<Option<i32>>()
.and_then(|raw| raw.as_ref())
.and_then(|val| Some(s + val))
})
});
assert_eq!(res, None);
}
#[test]
fn fold_none_as_reset() {
let none: Option<i32> = None;
let init = 0;
let res = (Some(1), none, Some(5)).fold(init, |sum, item| {
item.downcast_ref::<Option<i32>>()
.and_then(|raw| raw.as_ref())
.and_then(|val| {
if let Some(s) = sum {
Some(s + val)
} else {
Some(init + val)
}
})
});
assert_eq!(res, Some(5));
}
#[test]
fn fold_strict_base() {
let res = (Some(40), Some("noise"), None as Option<i32>, Some(2))
.fold_strict(0i32, |sum, item| {
sum.and_then(|s| {
Some(s + item)
})
});
assert_eq!(res, Some(42));
}
#[test]
fn ref_slice_base() {
let src = (Some(1), None as Option<&str>, Some(2), None as Option<i32>, Some(()));
// convert the tuples to a slice of `Any` type
let slice: Box<[&dyn Any]> = src.ref_slice();
// the slice has the same amount of elements as in the tuples.
assert_eq!(slice.len(), 5);
// downcast the element to its actual type; wrong type cast will be rejected with a `None`
// output from the API call.
assert_eq!(slice[0].downcast_ref::<Option<i32>>().unwrap(), &Some(1));
assert_eq!(slice[0].downcast_ref::<Option<&str>>(), None);
assert_eq!(slice[1].downcast_ref::<Option<&str>>().unwrap(), &None);
}
#[test]
fn strict_ref_slice_base() {
let src = (Some(1), None as Option<&str>, Some(2), None as Option<i32>, Some(()));
let slice = src.strict_ref_slice::<i32>();
// The above variable initiation is equivalent to the following:
// let slice: Box<[&i32]> = src.strict_ref_slice();
assert_eq!(slice.len(), 3);
assert_eq!(
slice,
vec![&Some(1), &Some(2), &None as &Option<i32>].into_boxed_slice()
);
}
#[test]
fn mut_slice_base() {
let mut src = (Some(1), None as Option<&str>, Some(2), None as Option<i32>, Some(()));
let slice: Box<[&mut dyn Any]> = src.mut_slice();
assert_eq!(slice.len(), 5);
assert_eq!(slice[0].downcast_ref::<Option<i32>>().unwrap(), &Some(1));
assert_eq!(slice[1].downcast_ref::<Option<&str>>().unwrap(), &None);
let first = slice[0].downcast_mut::<Option<i32>>().unwrap().take();
assert_eq!(first, Some(1));
}
#[test]
fn strict_mut_slice_base() {
let mut src = (Some(1), None as Option<&str>, Some(2), None as Option<i32>, Some(()));
let slice = src.strict_mut_slice::<i32>();
// The above variable initiation is equivalent to the following:
// let slice: Box<[&mut i32]> = src.strict_mut_slice();
assert_eq!(slice.len(), 3);
assert_eq!(
slice,
vec![&mut Some(1), &mut Some(2), &mut None as &mut Option<i32>].into_boxed_slice()
);
let first = slice[0].take();
assert_eq!(first, Some(1));
assert_eq!(slice[0], &mut None);
}
}
| 35.343701 | 112 | 0.502464 |
50e007549cad03719ddbd4e22bfd2a002fd6d83c
| 3,771 |
use crate::{proto_divine::ProtoDivine, Lang};
use wfts_pedia_ssg::{
component::{
text::{Link, Paragraph},
Component,
},
location::{Fragment, Id, InternalPath, Location},
page::{Page, Section},
site::{Directory, Node},
};
pub fn make(dir: &mut Directory) {
dir.insert(
InternalPath::parse("index.html").unwrap(),
Node::Page(Page {
title: "Proto-Divine Language".to_owned(),
body: vec![
Paragraph(
"This article is about the ancestor of all languages: \
Proto-Divine. This language was spoken by the younger \
gods, and split into several languages as the group of \
gods was split by itself. It was never written, and so, \
it is a reconstructed language.",
),
Paragraph(
"The reconstruction is mainly based on the languages of \
the later gods. However, some divine seer magic is also \
used, in order to gather visions from the youger gods \
speaking. Although the younger gods died to raise the \
next generation of gods, some memories were carried on \
through the generations, and those are also important.",
),
]
.to_dyn(),
sections: vec![
Section {
title: "Phonology".to_dyn(),
body: Paragraph(Link {
text: "See this article.",
location: Location::from(
ProtoDivine
.path()
.append(Fragment::new("phonology").unwrap()),
),
})
.to_dyn(),
children: vec![],
id: Id::new("phonology").unwrap(),
},
Section {
title: "Writing System".to_dyn(),
body: Paragraph(Link {
text: "See this article.",
location: Location::from(
ProtoDivine
.path()
.append(Fragment::new("writing").unwrap()),
),
})
.to_dyn(),
children: vec![],
id: Id::new("phonology").unwrap(),
},
Section {
title: "Grammar".to_dyn(),
body: Paragraph(Link {
text: "See this article.",
location: Location::from(
ProtoDivine
.path()
.append(Fragment::new("grammar").unwrap()),
),
})
.to_dyn(),
children: vec![],
id: Id::new("grammar").unwrap(),
},
Section {
title: "Dictionary".to_dyn(),
body: Paragraph(Link {
text: "See this article.",
location: Location::from(
ProtoDivine
.path()
.append(Fragment::new("dictionary").unwrap()),
),
})
.to_dyn(),
children: vec![],
id: Id::new("dictionary").unwrap(),
},
],
}),
);
}
| 39.28125 | 78 | 0.383187 |
9b72f4866497bec8713dc1f5ef7dd3856038e0b3
| 19,724 |
use rand_core;
use zeroize::Zeroize;
use crate::strobe::Strobe128;
fn encode_u64(x: u64) -> [u8; 8] {
use byteorder::{ByteOrder, LittleEndian};
let mut buf = [0; 8];
LittleEndian::write_u64(&mut buf, x);
buf
}
fn encode_usize_as_u32(x: usize) -> [u8; 4] {
use byteorder::{ByteOrder, LittleEndian};
assert!(x <= (u32::max_value() as usize));
let mut buf = [0; 4];
LittleEndian::write_u32(&mut buf, x as u32);
buf
}
/// A transcript of a public-coin argument.
///
/// The prover's messages are added to the transcript using
/// [`append_message`](Transcript::append_message), and the verifier's
/// challenges can be computed using
/// [`challenge_bytes`](Transcript::challenge_bytes).
///
/// # Creating and using a Merlin transcript
///
/// To create a Merlin transcript, use [`Transcript::new()`]. This
/// function takes a domain separation label which should be unique to
/// the application.
///
/// To use the transcript with a Merlin-based proof implementation,
/// the prover's side creates a Merlin transcript with an
/// application-specific domain separation label, and passes a `&mut`
/// reference to the transcript to the proving function(s).
///
/// To verify the resulting proof, the verifier creates their own
/// Merlin transcript using the same domain separation label, then
/// passes a `&mut` reference to the verifier's transcript to the
/// verification function.
///
/// # Implementing proofs using Merlin
///
/// For information on the design of Merlin and how to use it to
/// implement a proof system, see the documentation at
/// [merlin.cool](https://merlin.cool), particularly the [Using
/// Merlin](https://merlin.cool/use/index.html) section.
#[derive(Clone, Zeroize)]
pub struct Transcript {
strobe: Strobe128,
}
impl Transcript {
/// Initialize a new transcript with the supplied `label`, which
/// is used as a domain separator.
///
/// # Note
///
/// This function should be called by a proof library's API
/// consumer (i.e., the application using the proof library), and
/// **not by the proof implementation**. See the [Passing
/// Transcripts](https://merlin.cool/use/passing.html) section of
/// the Merlin website for more details on why.
pub fn new(label: &'static [u8]) -> Transcript {
use crate::constants::MERLIN_PROTOCOL_LABEL;
#[cfg(feature = "debug-transcript")]
{
use std::str::from_utf8;
println!(
"Initialize STROBE-128({})\t# b\"{}\"",
hex::encode(MERLIN_PROTOCOL_LABEL),
from_utf8(MERLIN_PROTOCOL_LABEL).unwrap(),
);
}
let mut transcript = Transcript {
strobe: Strobe128::new(MERLIN_PROTOCOL_LABEL),
};
transcript.append_message(b"dom-sep", label);
transcript
}
/// Append a prover's `message` to the transcript.
///
/// The `label` parameter is metadata about the message, and is
/// also appended to the transcript. See the [Transcript
/// Protocols](https://merlin.cool/use/protocol.html) section of
/// the Merlin website for details on labels.
pub fn append_message(&mut self, label: &'static [u8], message: &[u8]) {
let data_len = encode_usize_as_u32(message.len());
self.strobe.meta_ad(label, false);
self.strobe.meta_ad(&data_len, true);
self.strobe.ad(message, false);
#[cfg(feature = "debug-transcript")]
{
use std::str::from_utf8;
match from_utf8(label) {
Ok(label_str) => {
println!(
"meta-AD : {} || LE32({})\t# b\"{}\"",
hex::encode(label),
message.len(),
label_str
);
}
Err(_) => {
println!(
"meta-AD : {} || LE32({})",
hex::encode(label),
message.len()
);
}
}
match from_utf8(message) {
Ok(message_str) => {
println!(" AD : {}\t# b\"{}\"", hex::encode(message), message_str);
}
Err(_) => {
println!(" AD : {}", hex::encode(message));
}
}
}
}
/// Deprecated. This function was renamed to
/// [`append_message`](Transcript::append_message).
///
/// This is intended to avoid any possible confusion between the
/// transcript-level messages and protocol-level commitments.
#[deprecated(since = "1.1.0", note = "renamed to append_message for clarity.")]
pub fn commit_bytes(&mut self, label: &'static [u8], message: &[u8]) {
self.append_message(label, message);
}
/// Convenience method for appending a `u64` to the transcript.
///
/// The `label` parameter is metadata about the message, and is
/// also appended to the transcript. See the [Transcript
/// Protocols](https://merlin.cool/use/protocol.html) section of
/// the Merlin website for details on labels.
///
/// # Implementation
///
/// Calls `append_message` with the 8-byte little-endian encoding
/// of `x`.
pub fn append_u64(&mut self, label: &'static [u8], x: u64) {
self.append_message(label, &encode_u64(x));
}
/// Deprecated. This function was renamed to
/// [`append_u64`](Transcript::append_u64).
///
/// This is intended to avoid any possible confusion between the
/// transcript-level messages and protocol-level commitments.
#[deprecated(since = "1.1.0", note = "renamed to append_u64 for clarity.")]
pub fn commit_u64(&mut self, label: &'static [u8], x: u64) {
self.append_u64(label, x);
}
/// Fill the supplied buffer with the verifier's challenge bytes.
///
/// The `label` parameter is metadata about the challenge, and is
/// also appended to the transcript. See the [Transcript
/// Protocols](https://merlin.cool/use/protocol.html) section of
/// the Merlin website for details on labels.
pub fn challenge_bytes(&mut self, label: &'static [u8], dest: &mut [u8]) {
let data_len = encode_usize_as_u32(dest.len());
self.strobe.meta_ad(label, false);
self.strobe.meta_ad(&data_len, true);
self.strobe.prf(dest, false);
#[cfg(feature = "debug-transcript")]
{
use std::str::from_utf8;
match from_utf8(label) {
Ok(label_str) => {
println!(
"meta-AD : {} || LE32({})\t# b\"{}\"",
hex::encode(label),
dest.len(),
label_str
);
}
Err(_) => {
println!("meta-AD : {} || LE32({})", hex::encode(label), dest.len());
}
}
println!(" PRF: {}", hex::encode(dest));
}
}
/// Fork the current [`Transcript`] to construct an RNG whose output is bound
/// to the current transcript state as well as prover's secrets.
///
/// See the [`TranscriptRngBuilder`] documentation for more details.
pub fn build_rng(&self) -> TranscriptRngBuilder {
TranscriptRngBuilder {
strobe: self.strobe.clone(),
}
}
}
/// Constructs a [`TranscriptRng`] by rekeying the [`Transcript`] with
/// prover secrets and an external RNG.
///
/// The prover uses a [`TranscriptRngBuilder`] to rekey with its
/// witness data, before using an external RNG to finalize to a
/// [`TranscriptRng`]. The resulting [`TranscriptRng`] will be a PRF
/// of all of the entire public transcript, the prover's secret
/// witness data, and randomness from the external RNG.
///
/// # Usage
///
/// To construct a [`TranscriptRng`], a prover calls
/// [`Transcript::build_rng()`] to clone the transcript state, then
/// uses [`rekey_with_witness_bytes()`][rekey_with_witness_bytes] to rekey the
/// transcript with the prover's secrets, before finally calling
/// [`finalize()`][finalize]. This rekeys the transcript with the
/// output of an external [`rand_core::RngCore`] instance and returns
/// a finalized [`TranscriptRng`].
///
/// These methods are intended to be chained, passing from a borrowed
/// [`Transcript`] to an owned [`TranscriptRng`] as follows:
/// ```
/// # extern crate merlin;
/// # extern crate rand_core;
/// # use merlin::Transcript;
/// # fn main() {
/// # let mut transcript = Transcript::new(b"TranscriptRng doctest");
/// # let public_data = b"public data";
/// # let witness_data = b"witness data";
/// # let more_witness_data = b"witness data";
/// transcript.append_message(b"public", public_data);
///
/// let mut rng = transcript
/// .build_rng()
/// .rekey_with_witness_bytes(b"witness1", witness_data)
/// .rekey_with_witness_bytes(b"witness2", more_witness_data)
/// .finalize(&mut rand_core::OsRng);
/// # }
/// ```
/// In this example, the final `rng` is a PRF of `public_data`
/// (as well as all previous `transcript` state), and of the prover's
/// secret `witness_data` and `more_witness_data`, and finally, of the
/// output of the thread-local RNG.
/// Note that because the [`TranscriptRng`] is produced from
/// [`finalize()`][finalize], it's impossible to forget
/// to rekey the transcript with external randomness.
///
/// # Note
///
/// Protocols that require randomness in multiple places (e.g., to
/// choose blinding factors for a multi-round protocol) should create
/// a fresh [`TranscriptRng`] **each time they need randomness**,
/// rather than reusing a single instance. This ensures that the
/// randomness in each round is bound to the latest transcript state,
/// rather than just the state of the transcript when randomness was
/// first required.
///
/// # Typed Witness Data
///
/// Like the [`Transcript`], the [`TranscriptRngBuilder`] provides a
/// minimal, byte-oriented API, and like the [`Transcript`], this API
/// can be extended to allow rekeying with protocol-specific types
/// using an extension trait. See the [Transcript
/// Protocols](https://merlin.cool/use/protocol.html) section of the
/// Merlin website for more details.
///
/// [rekey_with_witness_bytes]: TranscriptRngBuilder::rekey_with_witness_bytes
/// [finalize]: TranscriptRngBuilder::finalize
pub struct TranscriptRngBuilder {
strobe: Strobe128,
}
impl TranscriptRngBuilder {
/// Rekey the transcript using the provided witness data.
///
/// The `label` parameter is metadata about `witness`.
pub fn rekey_with_witness_bytes(
mut self,
label: &'static [u8],
witness: &[u8],
) -> TranscriptRngBuilder {
let witness_len = encode_usize_as_u32(witness.len());
self.strobe.meta_ad(label, false);
self.strobe.meta_ad(&witness_len, true);
self.strobe.key(witness, false);
self
}
/// Deprecated. This function was renamed to
/// [`rekey_with_witness_bytes`](Transcript::rekey_with_witness_bytes).
///
/// This is intended to avoid any possible confusion between the
/// transcript-level messages and protocol-level commitments.
#[deprecated(
since = "1.1.0",
note = "renamed to rekey_with_witness_bytes for clarity."
)]
pub fn commit_witness_bytes(
self,
label: &'static [u8],
witness: &[u8],
) -> TranscriptRngBuilder {
self.rekey_with_witness_bytes(label, witness)
}
/// Use the supplied external `rng` to rekey the transcript, so
/// that the finalized [`TranscriptRng`] is a PRF bound to
/// randomness from the external RNG, as well as all other
/// transcript data.
pub fn finalize<R>(mut self, rng: &mut R) -> TranscriptRng
where
R: rand_core::RngCore + rand_core::CryptoRng,
{
let random_bytes = {
let mut bytes = [0u8; 32];
rng.fill_bytes(&mut bytes);
bytes
};
self.strobe.meta_ad(b"rng", false);
self.strobe.key(&random_bytes, false);
TranscriptRng {
strobe: self.strobe,
}
}
}
/// An RNG providing synthetic randomness to the prover.
///
/// A [`TranscriptRng`] is constructed from a [`Transcript`] using a
/// [`TranscriptRngBuilder`]; see its documentation for details on
/// how to construct one.
///
/// The transcript RNG construction is described in the [Generating
/// Randomness](https://merlin.cool/transcript/rng.html) section of
/// the Merlin website.
pub struct TranscriptRng {
strobe: Strobe128,
}
impl rand_core::RngCore for TranscriptRng {
fn next_u32(&mut self) -> u32 {
rand_core::impls::next_u32_via_fill(self)
}
fn next_u64(&mut self) -> u64 {
rand_core::impls::next_u64_via_fill(self)
}
fn fill_bytes(&mut self, dest: &mut [u8]) {
let dest_len = encode_usize_as_u32(dest.len());
self.strobe.meta_ad(&dest_len, false);
self.strobe.prf(dest, false);
}
fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand_core::Error> {
self.fill_bytes(dest);
Ok(())
}
}
impl rand_core::CryptoRng for TranscriptRng {}
#[cfg(test)]
mod tests {
use strobe_rs::SecParam;
use strobe_rs::Strobe;
use super::*;
/// Test against a full strobe implementation to ensure we match the few
/// operations we're interested in.
struct TestTranscript {
state: Strobe,
}
impl TestTranscript {
/// Strobe init; meta-AD(label)
pub fn new(label: &[u8]) -> TestTranscript {
use crate::constants::MERLIN_PROTOCOL_LABEL;
let mut tt = TestTranscript {
state: Strobe::new(MERLIN_PROTOCOL_LABEL, SecParam::B128),
};
tt.append_message(b"dom-sep", label);
tt
}
/// Strobe op: meta-AD(label || len(message)); AD(message)
pub fn append_message(&mut self, label: &[u8], message: &[u8]) {
// metadata = label || len(message);
let mut metadata: Vec<u8> = Vec::with_capacity(label.len() + 4);
metadata.extend_from_slice(label);
metadata.extend_from_slice(&encode_usize_as_u32(message.len()));
self.state.meta_ad(&metadata, false);
self.state.ad(&message, false);
}
/// Strobe op: meta-AD(label || len(dest)); PRF into challenge_bytes
pub fn challenge_bytes(&mut self, label: &[u8], dest: &mut [u8]) {
let prf_len = dest.len();
// metadata = label || len(challenge_bytes);
let mut metadata: Vec<u8> = Vec::with_capacity(label.len() + 4);
metadata.extend_from_slice(label);
metadata.extend_from_slice(&encode_usize_as_u32(prf_len));
self.state.meta_ad(&metadata, false);
self.state.prf(dest, false);
}
}
/// Test a simple protocol with one message and one challenge
#[test]
fn equivalence_simple() {
let mut real_transcript = Transcript::new(b"test protocol");
let mut test_transcript = TestTranscript::new(b"test protocol");
real_transcript.append_message(b"some label", b"some data");
test_transcript.append_message(b"some label", b"some data");
let mut real_challenge = [0u8; 32];
let mut test_challenge = [0u8; 32];
real_transcript.challenge_bytes(b"challenge", &mut real_challenge);
test_transcript.challenge_bytes(b"challenge", &mut test_challenge);
assert_eq!(real_challenge, test_challenge);
}
/// Test a complex protocol with multiple messages and challenges,
/// with messages long enough to wrap around the sponge state, and
/// with multiple rounds of messages and challenges.
#[test]
fn equivalence_complex() {
let mut real_transcript = Transcript::new(b"test protocol");
let mut test_transcript = TestTranscript::new(b"test protocol");
let data = vec![99; 1024];
real_transcript.append_message(b"step1", b"some data");
test_transcript.append_message(b"step1", b"some data");
let mut real_challenge = [0u8; 32];
let mut test_challenge = [0u8; 32];
for _ in 0..32 {
real_transcript.challenge_bytes(b"challenge", &mut real_challenge);
test_transcript.challenge_bytes(b"challenge", &mut test_challenge);
assert_eq!(real_challenge, test_challenge);
real_transcript.append_message(b"bigdata", &data);
test_transcript.append_message(b"bigdata", &data);
real_transcript.append_message(b"challengedata", &real_challenge);
test_transcript.append_message(b"challengedata", &test_challenge);
}
}
#[test]
fn transcript_rng_is_bound_to_transcript_and_witnesses() {
use curve25519_dalek::scalar::Scalar;
use rand_chacha::ChaChaRng;
use rand_core::SeedableRng;
// Check that the TranscriptRng is bound to the transcript and
// the witnesses. This is done by producing a sequence of
// transcripts that diverge at different points and checking
// that they produce different challenges.
let protocol_label = b"test TranscriptRng collisions";
let commitment1 = b"commitment data 1";
let commitment2 = b"commitment data 2";
let witness1 = b"witness data 1";
let witness2 = b"witness data 2";
let mut t1 = Transcript::new(protocol_label);
let mut t2 = Transcript::new(protocol_label);
let mut t3 = Transcript::new(protocol_label);
let mut t4 = Transcript::new(protocol_label);
t1.append_message(b"com", commitment1);
t2.append_message(b"com", commitment2);
t3.append_message(b"com", commitment2);
t4.append_message(b"com", commitment2);
let mut r1 = t1
.build_rng()
.rekey_with_witness_bytes(b"witness", witness1)
.finalize(&mut ChaChaRng::from_seed([0; 32]));
let mut r2 = t2
.build_rng()
.rekey_with_witness_bytes(b"witness", witness1)
.finalize(&mut ChaChaRng::from_seed([0; 32]));
let mut r3 = t3
.build_rng()
.rekey_with_witness_bytes(b"witness", witness2)
.finalize(&mut ChaChaRng::from_seed([0; 32]));
let mut r4 = t4
.build_rng()
.rekey_with_witness_bytes(b"witness", witness2)
.finalize(&mut ChaChaRng::from_seed([0; 32]));
let s1 = Scalar::random(&mut r1);
let s2 = Scalar::random(&mut r2);
let s3 = Scalar::random(&mut r3);
let s4 = Scalar::random(&mut r4);
// Transcript t1 has different commitments than t2, t3, t4, so
// it should produce distinct challenges from all of them.
assert_ne!(s1, s2);
assert_ne!(s1, s3);
assert_ne!(s1, s4);
// Transcript t2 has different witness variables from t3, t4,
// so it should produce distinct challenges from all of them.
assert_ne!(s2, s3);
assert_ne!(s2, s4);
// Transcripts t3 and t4 have the same commitments and
// witnesses, so they should give different challenges only
// based on the RNG. Checking that they're equal in the
// presence of a bad RNG checks that the different challenges
// above aren't because the RNG is accidentally different.
assert_eq!(s3, s4);
}
}
| 35.992701 | 91 | 0.612959 |
62f01226fd558bb9e4c063f0c0c36786a97d370b
| 9,815 |
use nom::{
branch::alt,
bytes::complete::{escaped_transform, is_not, tag, take},
character::complete::{space0, space1},
combinator::{complete, eof, map, recognize, value},
multi::{many0, many1, separated_list0, separated_list1},
sequence::delimited,
IResult,
};
fn unquoted_token(input: &str) -> IResult<&str, String> {
let mut parser = map(recognize(is_not(" ;")), String::from);
parser(input)
}
fn quoted_token<'a>(input: &'a str) -> IResult<&'a str, String> {
let parser = escaped_transform(is_not(r#""\"#), '\\', |control_char: &'a str| {
alt((
value(r#"""#, tag(r#"""#)),
value(r#"\"#, tag(r#"\"#)),
value("\r", tag("r")),
value("\n", tag("n")),
value("\t", tag("t")),
take(1usize), // all other escaped characters are passed through, unmodified
))(control_char)
});
let double_quote = tag("\"");
let mut parser = delimited(&double_quote, parser, alt((&double_quote, eof)));
parser(input)
}
fn token(input: &str) -> IResult<&str, String> {
let mut parser = alt((quoted_token, unquoted_token));
parser(input)
}
fn operation_with_args(input: &str) -> IResult<&str, Vec<String>> {
let mut parser = separated_list1(space1, token);
parser(input)
}
fn semicolon(input: &str) -> IResult<&str, &str> {
delimited(space0, tag(";"), space0)(input)
}
fn operation_sequence(input: &str) -> IResult<&str, Vec<Vec<String>>> {
let parser = separated_list0(many1(semicolon), operation_with_args);
let parser = delimited(many0(semicolon), parser, many0(semicolon));
let mut parser = complete(parser);
parser(input)
}
/// Split a semicolon-separated list of operations into a vector. Each operation is represented by
/// a non-empty sub-vector, where the first element is the name of the operation, and the rest of
/// the elements are operation's arguments.
///
/// Tokens can be double-quoted. Such tokens can contain spaces and C-like escaped sequences: `\n`
/// for newline, `\r` for carriage return, `\t` for tab, `\"` for double quote, `\\` for backslash.
/// Unsupported sequences are stripped of the escaping, e.g. `\e` turns into `e`.
///
/// This function assumes that the input string:
/// 1. doesn't contain a comment;
/// 2. doesn't contain backticks that need to be processed.
///
/// Returns `None` if the input could not be parsed.
pub fn tokenize_operation_sequence(input: &str) -> Option<Vec<Vec<String>>> {
match operation_sequence(input) {
Ok((_leftovers, tokens)) => Some(tokens),
Err(_error) => None,
}
}
#[cfg(test)]
mod tests {
use super::tokenize_operation_sequence;
#[test]
fn t_tokenize_operation_sequence_works_for_all_cpp_inputs() {
assert_eq!(
tokenize_operation_sequence("").unwrap(),
Vec::<Vec<String>>::new()
);
assert_eq!(
tokenize_operation_sequence("open").unwrap(),
vec![vec!["open"]]
);
assert_eq!(
tokenize_operation_sequence("open-all-unread-in-browser-and-mark-read").unwrap(),
vec![vec!["open-all-unread-in-browser-and-mark-read"]]
);
assert_eq!(
tokenize_operation_sequence("; ; ; ;").unwrap(),
Vec::<Vec<String>>::new()
);
assert_eq!(
tokenize_operation_sequence("open ; next").unwrap(),
vec![vec!["open"], vec!["next"]]
);
assert_eq!(
tokenize_operation_sequence("open ; next ; prev").unwrap(),
vec![vec!["open"], vec!["next"], vec!["prev"]]
);
assert_eq!(
tokenize_operation_sequence("open ; next ; prev ; quit").unwrap(),
vec![vec!["open"], vec!["next"], vec!["prev"], vec!["quit"]]
);
assert_eq!(
tokenize_operation_sequence(r#"set "arg 1""#).unwrap(),
vec![vec!["set", "arg 1"]]
);
assert_eq!(
tokenize_operation_sequence(r#"set "arg 1" ; set "arg 2" "arg 3""#).unwrap(),
vec![vec!["set", "arg 1"], vec!["set", "arg 2", "arg 3"]]
);
assert_eq!(
tokenize_operation_sequence(r#"set browser "firefox"; open-in-browser"#).unwrap(),
vec![vec!["set", "browser", "firefox"], vec!["open-in-browser"]]
);
assert_eq!(
tokenize_operation_sequence("set browser firefox; open-in-browser").unwrap(),
vec![vec!["set", "browser", "firefox"], vec!["open-in-browser"]]
);
assert_eq!(
tokenize_operation_sequence("open-in-browser; quit").unwrap(),
vec![vec!["open-in-browser"], vec!["quit"]]
);
assert_eq!(
tokenize_operation_sequence(r#"open; set browser "firefox --private-window"; quit"#)
.unwrap(),
vec![
vec!["open"],
vec!["set", "browser", "firefox --private-window"],
vec!["quit"]
]
);
assert_eq!(
tokenize_operation_sequence(r#"open ;set browser "firefox --private-window" ;quit"#)
.unwrap(),
vec![
vec!["open"],
vec!["set", "browser", "firefox --private-window"],
vec!["quit"]
]
);
assert_eq!(
tokenize_operation_sequence(r#"open;set browser "firefox --private-window";quit"#)
.unwrap(),
vec![
vec!["open"],
vec!["set", "browser", "firefox --private-window"],
vec!["quit"]
]
);
assert_eq!(
tokenize_operation_sequence("; ;; ; open",).unwrap(),
vec![vec!["open"]]
);
assert_eq!(
tokenize_operation_sequence(";;; ;; ; open",).unwrap(),
vec![vec!["open"]]
);
assert_eq!(
tokenize_operation_sequence(";;; ;; ; open ;",).unwrap(),
vec![vec!["open"]]
);
assert_eq!(
tokenize_operation_sequence(";;; ;; ; open ;; ;",).unwrap(),
vec![vec!["open"]]
);
assert_eq!(
tokenize_operation_sequence(";;; ;; ; open ; ;;;;",).unwrap(),
vec![vec!["open"]]
);
assert_eq!(
tokenize_operation_sequence(";;; open ; ;;;;",).unwrap(),
vec![vec!["open"]]
);
assert_eq!(
tokenize_operation_sequence("; open ;; ;; ;",).unwrap(),
vec![vec!["open"]]
);
assert_eq!(
tokenize_operation_sequence("open ; ;;; ;;",).unwrap(),
vec![vec!["open"]]
);
assert_eq!(
tokenize_operation_sequence(
r#"set browser "sleep 3; do-something ; echo hi"; open-in-browser"#
)
.unwrap(),
vec![
vec!["set", "browser", "sleep 3; do-something ; echo hi"],
vec!["open-in-browser"]
]
);
}
#[test]
fn t_tokenize_operation_sequence_ignores_escaped_sequences_outside_double_quotes() {
assert_eq!(
tokenize_operation_sequence(r#"\t"#).unwrap(),
vec![vec![r#"\t"#]]
);
assert_eq!(
tokenize_operation_sequence(r#"\r"#).unwrap(),
vec![vec![r#"\r"#]]
);
assert_eq!(
tokenize_operation_sequence(r#"\n"#).unwrap(),
vec![vec![r#"\n"#]]
);
assert_eq!(
tokenize_operation_sequence(r#"\v"#).unwrap(),
vec![vec![r#"\v"#]]
);
assert_eq!(
tokenize_operation_sequence(r#"\""#).unwrap(),
vec![vec![r#"\""#]]
);
assert_eq!(
tokenize_operation_sequence(r#"\\"#).unwrap(),
vec![vec![r#"\\"#]]
);
}
#[test]
fn t_tokenize_operation_sequence_expands_escaped_sequences_inside_double_quotes() {
assert_eq!(
tokenize_operation_sequence(r#""\t""#).unwrap(),
vec![vec!["\t"]]
);
assert_eq!(
tokenize_operation_sequence(r#""\r""#).unwrap(),
vec![vec!["\r"]]
);
assert_eq!(
tokenize_operation_sequence(r#""\n""#).unwrap(),
vec![vec!["\n"]]
);
assert_eq!(
tokenize_operation_sequence(r#""\"""#).unwrap(),
vec![vec!["\""]]
);
assert_eq!(
tokenize_operation_sequence(r#""\\""#).unwrap(),
vec![vec!["\\"]]
);
}
#[test]
fn t_tokenize_operation_sequence_passes_through_unsupported_escaped_chars_inside_double_quotes()
{
assert_eq!(
tokenize_operation_sequence(r#""\1""#).unwrap(),
vec![vec!["1"]]
);
assert_eq!(
tokenize_operation_sequence(r#""\W""#).unwrap(),
vec![vec!["W"]]
);
assert_eq!(
tokenize_operation_sequence(r#""\b""#).unwrap(),
vec![vec!["b"]]
);
assert_eq!(
tokenize_operation_sequence(r#""\d""#).unwrap(),
vec![vec!["d"]]
);
assert_eq!(
tokenize_operation_sequence(r#""\x""#).unwrap(),
vec![vec!["x"]]
);
}
#[test]
fn t_tokenize_operation_sequence_implicitly_closes_double_quotes_at_end_of_input() {
assert_eq!(
tokenize_operation_sequence(r#"set "arg 1"#).unwrap(),
vec![vec!["set", "arg 1"]]
);
}
#[test]
fn t_tokenize_operation_sequence_allows_single_character_unquoted() {
assert_eq!(
tokenize_operation_sequence(r#"set a b"#).unwrap(),
vec![vec!["set", "a", "b"]]
);
}
}
| 33.158784 | 100 | 0.518492 |
e2266c4c59a3410dfbc3fc18df4bdd4f1b5413f6
| 8,353 |
use super::{headers::RatelimitHeaders, GlobalLockPair};
use crate::routing::Path;
use std::{
collections::HashMap,
sync::{
atomic::{AtomicU64, Ordering},
Arc, Mutex,
},
time::{Duration, Instant},
};
use tokio::{
sync::{
mpsc::{self, UnboundedReceiver, UnboundedSender},
oneshot::{self, Sender},
Mutex as AsyncMutex,
},
time::{sleep, timeout},
};
#[derive(Clone, Debug)]
pub enum TimeRemaining {
Finished,
NotStarted,
Some(Duration),
}
#[derive(Debug)]
pub struct Bucket {
pub limit: AtomicU64,
pub path: Path,
pub queue: BucketQueue,
pub remaining: AtomicU64,
pub reset_after: AtomicU64,
pub started_at: Mutex<Option<Instant>>,
}
impl Bucket {
pub fn new(path: Path) -> Self {
Self {
limit: AtomicU64::new(u64::max_value()),
path,
queue: BucketQueue::default(),
remaining: AtomicU64::new(u64::max_value()),
reset_after: AtomicU64::new(u64::max_value()),
started_at: Mutex::new(None),
}
}
pub fn limit(&self) -> u64 {
self.limit.load(Ordering::Relaxed)
}
pub fn remaining(&self) -> u64 {
self.remaining.load(Ordering::Relaxed)
}
pub fn reset_after(&self) -> u64 {
self.reset_after.load(Ordering::Relaxed)
}
pub fn time_remaining(&self) -> TimeRemaining {
let reset_after = self.reset_after();
let started_at = match *self.started_at.lock().expect("bucket poisoned") {
Some(v) => v,
None => return TimeRemaining::NotStarted,
};
let elapsed = started_at.elapsed();
if elapsed > Duration::from_millis(reset_after) {
return TimeRemaining::Finished;
}
TimeRemaining::Some(Duration::from_millis(reset_after) - elapsed)
}
pub fn try_reset(&self) -> bool {
if self.started_at.lock().expect("bucket poisoned").is_none() {
return false;
}
if let TimeRemaining::Finished = self.time_remaining() {
self.remaining.store(self.limit(), Ordering::Relaxed);
*self.started_at.lock().expect("bucket poisoned") = None;
true
} else {
false
}
}
pub fn update(&self, ratelimits: Option<(u64, u64, u64)>) {
let bucket_limit = self.limit();
{
let mut started_at = self.started_at.lock().expect("bucket poisoned");
if started_at.is_none() {
started_at.replace(Instant::now());
}
}
if let Some((limit, remaining, reset_after)) = ratelimits {
if bucket_limit != limit && bucket_limit == u64::max_value() {
self.reset_after.store(reset_after, Ordering::SeqCst);
self.limit.store(limit, Ordering::SeqCst);
}
self.remaining.store(remaining, Ordering::Relaxed);
} else {
self.remaining.fetch_sub(1, Ordering::Relaxed);
}
}
}
#[derive(Debug)]
pub struct BucketQueue {
rx: AsyncMutex<UnboundedReceiver<Sender<Sender<Option<RatelimitHeaders>>>>>,
tx: UnboundedSender<Sender<Sender<Option<RatelimitHeaders>>>>,
}
impl BucketQueue {
pub fn push(&self, tx: Sender<Sender<Option<RatelimitHeaders>>>) {
let _sent = self.tx.send(tx);
}
pub async fn pop(
&self,
timeout_duration: Duration,
) -> Option<Sender<Sender<Option<RatelimitHeaders>>>> {
let mut rx = self.rx.lock().await;
match timeout(timeout_duration, rx.recv()).await.ok() {
Some(x) => x,
None => None,
}
}
}
impl Default for BucketQueue {
fn default() -> Self {
let (tx, rx) = mpsc::unbounded_channel();
Self {
rx: AsyncMutex::new(rx),
tx,
}
}
}
pub(super) struct BucketQueueTask {
bucket: Arc<Bucket>,
buckets: Arc<Mutex<HashMap<Path, Arc<Bucket>>>>,
global: Arc<GlobalLockPair>,
path: Path,
}
impl BucketQueueTask {
const WAIT: Duration = Duration::from_secs(10);
pub fn new(
bucket: Arc<Bucket>,
buckets: Arc<Mutex<HashMap<Path, Arc<Bucket>>>>,
global: Arc<GlobalLockPair>,
path: Path,
) -> Self {
Self {
bucket,
buckets,
global,
path,
}
}
pub async fn run(self) {
#[cfg(feature = "tracing")]
let span = tracing::debug_span!("background queue task", path=?self.path);
while let Some(queue_tx) = self.next().await {
let (tx, rx) = oneshot::channel();
if self.global.is_locked() {
self.global.0.lock().await;
}
let _sent = queue_tx.send(tx);
#[cfg(feature = "tracing")]
tracing::debug!(parent: &span, "starting to wait for response headers",);
// TODO: Find a better way of handling nested types.
#[allow(clippy::unnested_or_patterns)]
match timeout(Self::WAIT, rx).await {
Ok(Ok(Some(headers))) => self.handle_headers(&headers).await,
// - None was sent through the channel (request aborted)
// - channel was closed
// - timeout reached
Ok(Err(_)) | Err(_) | Ok(Ok(None)) => {
#[cfg(feature = "tracing")]
tracing::debug!(parent: &span, "receiver timed out");
}
}
}
#[cfg(feature = "tracing")]
tracing::debug!(parent: &span, "bucket appears finished, removing");
self.buckets
.lock()
.expect("ratelimit buckets poisoned")
.remove(&self.path);
}
async fn handle_headers(&self, headers: &RatelimitHeaders) {
let ratelimits = match headers {
RatelimitHeaders::GlobalLimited { reset_after } => {
self.lock_global(Duration::from_secs(*reset_after)).await;
None
}
RatelimitHeaders::None => return,
RatelimitHeaders::Present {
global,
limit,
remaining,
reset_after,
..
} => {
if *global {
self.lock_global(Duration::from_secs(*reset_after)).await;
}
Some((*limit, *remaining, *reset_after))
}
};
#[cfg(feature = "tracing")]
tracing::debug!(path=?self.path, "updating bucket");
self.bucket.update(ratelimits);
}
async fn lock_global(&self, wait: Duration) {
#[cfg(feature = "tracing")]
tracing::debug!(path=?self.path, "request got global ratelimited");
self.global.lock();
let lock = self.global.0.lock().await;
sleep(wait).await;
self.global.unlock();
drop(lock);
}
async fn next(&self) -> Option<Sender<Sender<Option<RatelimitHeaders>>>> {
#[cfg(feature = "tracing")]
tracing::debug!(path=?self.path, "starting to get next in queue");
self.wait_if_needed().await;
self.bucket.queue.pop(Self::WAIT).await
}
async fn wait_if_needed(&self) {
#[cfg(feature = "tracing")]
let span = tracing::debug_span!("waiting for bucket to refresh", path=?self.path);
let wait = {
if self.bucket.remaining() > 0 {
return;
}
#[cfg(feature = "tracing")]
tracing::debug!(parent: &span, "0 tickets remaining, may have to wait");
match self.bucket.time_remaining() {
TimeRemaining::Finished => {
self.bucket.try_reset();
return;
}
TimeRemaining::NotStarted => return,
TimeRemaining::Some(dur) => dur,
}
};
#[cfg(feature = "tracing")]
tracing::debug!(
parent: &span,
milliseconds=%wait.as_millis(),
"waiting for ratelimit to pass",
);
sleep(wait).await;
#[cfg(feature = "tracing")]
tracing::debug!(parent: &span, "done waiting for ratelimit to pass");
self.bucket.try_reset();
}
}
| 27.936455 | 90 | 0.535975 |
e827a1e1c45e3e40597c22a537c737cd0279aa8c
| 1,526 |
use anyhow::Result;
use libaoc::{aoc, AocResult, Timer};
use std::cmp::max;
#[aoc("953", "615")]
pub fn solve(timer: &mut Timer, input: &str) -> Result<AocResult> {
let lines: Vec<_> = input.lines().collect();
let mut seats = [[false; 8]; 128];
timer.lap("Parse");
let part1 = lines.iter().fold(0, |acc, &line| {
let mut row = 0;
let mut col = 0;
let mut row_size = 64;
let mut col_size = 4;
for c in line.chars() {
match c {
'F' => row_size /= 2,
'B' => {
// upper
row += row_size;
row_size /= 2;
}
'R' => {
// upper
col += col_size;
col_size /= 2;
}
'L' => col_size /= 2,
_ => panic!(),
}
}
seats[row][col] = true;
max(acc, row * 8 + col)
});
timer.lap("Part 1");
let mut found = false;
let mut part2 = 0;
for (y, row) in seats.iter().enumerate() {
if !found {
found = row.iter().fold(false, |a, x| a | x);
} else {
let res = row
.iter()
.enumerate()
.fold(0, |_, (x, v)| if *v { 0 } else { y * 8 + x });
if res > 0 {
part2 = res;
break;
}
}
}
timer.lap("Part 2");
Ok(AocResult::new(part1, part2))
}
| 26.310345 | 69 | 0.378768 |
76b8dd60d432ce6fcb29c22d85ba8b045710bd64
| 5,395 |
use std::fs::{File, OpenOptions};
use std::io;
use std::io::prelude::*;
use std::io::ErrorKind;
use std::path::{Path, PathBuf};
pub fn create(pass: Option<&str>, lint_name: Option<&str>, category: Option<&str>) -> Result<(), io::Error> {
let pass = pass.expect("`pass` argument is validated by clap");
let lint_name = lint_name.expect("`name` argument is validated by clap");
let category = category.expect("`category` argument is validated by clap");
match open_files(lint_name) {
Ok((mut test_file, mut lint_file)) => {
let (pass_type, pass_lifetimes, pass_import, context_import) = match pass {
"early" => ("EarlyLintPass", "", "use syntax::ast::*;", "EarlyContext"),
"late" => ("LateLintPass", "<'_, '_>", "use rustc_hir::*;", "LateContext"),
_ => {
unreachable!("`pass_type` should only ever be `early` or `late`!");
},
};
let camel_case_name = to_camel_case(lint_name);
if let Err(e) = test_file.write_all(get_test_file_contents(lint_name).as_bytes()) {
return Err(io::Error::new(
ErrorKind::Other,
format!("Could not write to test file: {}", e),
));
};
if let Err(e) = lint_file.write_all(
get_lint_file_contents(
pass_type,
pass_lifetimes,
lint_name,
&camel_case_name,
category,
pass_import,
context_import,
)
.as_bytes(),
) {
return Err(io::Error::new(
ErrorKind::Other,
format!("Could not write to lint file: {}", e),
));
}
Ok(())
},
Err(e) => Err(io::Error::new(
ErrorKind::Other,
format!("Unable to create lint: {}", e),
)),
}
}
fn open_files(lint_name: &str) -> Result<(File, File), io::Error> {
let project_root = project_root()?;
let test_file_path = project_root.join("tests").join("ui").join(format!("{}.rs", lint_name));
let lint_file_path = project_root
.join("clippy_lints")
.join("src")
.join(format!("{}.rs", lint_name));
if Path::new(&test_file_path).exists() {
return Err(io::Error::new(
ErrorKind::AlreadyExists,
format!("test file {:?} already exists", test_file_path),
));
}
if Path::new(&lint_file_path).exists() {
return Err(io::Error::new(
ErrorKind::AlreadyExists,
format!("lint file {:?} already exists", lint_file_path),
));
}
let test_file = OpenOptions::new().write(true).create_new(true).open(test_file_path)?;
let lint_file = OpenOptions::new().write(true).create_new(true).open(lint_file_path)?;
Ok((test_file, lint_file))
}
fn project_root() -> Result<PathBuf, io::Error> {
let current_dir = std::env::current_dir()?;
for path in current_dir.ancestors() {
let result = std::fs::read_to_string(path.join("Cargo.toml"));
if let Err(err) = &result {
if err.kind() == io::ErrorKind::NotFound {
continue;
}
}
let content = result?;
if content.contains("[package]\nname = \"clippy\"") {
return Ok(path.to_path_buf());
}
}
Err(io::Error::new(ErrorKind::Other, "Unable to find project root"))
}
fn to_camel_case(name: &str) -> String {
name.split('_')
.map(|s| {
if s.is_empty() {
String::from("")
} else {
[&s[0..1].to_uppercase(), &s[1..]].concat()
}
})
.collect()
}
fn get_test_file_contents(lint_name: &str) -> String {
format!(
"#![warn(clippy::{})]
fn main() {{
// test code goes here
}}
",
lint_name
)
}
fn get_lint_file_contents(
pass_type: &str,
pass_lifetimes: &str,
lint_name: &str,
camel_case_name: &str,
category: &str,
pass_import: &str,
context_import: &str,
) -> String {
format!(
"use rustc_lint::{{LintArray, LintPass, {type}, {context_import}}};
use rustc_session::{{declare_lint_pass, declare_tool_lint}};
{pass_import}
declare_clippy_lint! {{
/// **What it does:**
///
/// **Why is this bad?**
///
/// **Known problems:** None.
///
/// **Example:**
///
/// ```rust
/// // example code
/// ```
pub {name_upper},
{category},
\"default lint description\"
}}
declare_lint_pass!({name_camel} => [{name_upper}]);
impl {type}{lifetimes} for {name_camel} {{}}
",
type=pass_type,
lifetimes=pass_lifetimes,
name_upper=lint_name.to_uppercase(),
name_camel=camel_case_name,
category=category,
pass_import=pass_import,
context_import=context_import
)
}
#[test]
fn test_camel_case() {
let s = "a_lint";
let s2 = to_camel_case(s);
assert_eq!(s2, "ALint");
let name = "a_really_long_new_lint";
let name2 = to_camel_case(name);
assert_eq!(name2, "AReallyLongNewLint");
let name3 = "lint__name";
let name4 = to_camel_case(name3);
assert_eq!(name4, "LintName");
}
| 29.005376 | 109 | 0.534569 |
ccf77f69716e50ec90d9866c7348028e3bb3d70f
| 2,979 |
use crate::file_io;
use std::io::Error;
use serde_json::json;
use chrono::{Utc};
/// Generates the json required for a tracker, adding data and writes it to the filename provided.
pub fn add_tracker(password: &String, filename: &String, tracker_name: &String, data: &f64) -> Result<String, Error>{
let data = json!({
"trackers": [
json!({
"tracker_name": tracker_name,
"data": [data],
"data_time": [Utc::now().to_rfc2822()]}),
]
});
return file_io::write_json(password, filename, &data);
}
#[cfg(test)]
mod tests{
use super::add_tracker;
use crate::file_io;
use std::fs;
use serde_json::from_str;
// Create a random filename, with a preset password and tracker and return it
fn setup() -> (String, String, String) {
let password = "abcd".to_string();
// Create random filename
let x = rand::random::<u64>();
let filename = "temp".to_string();
let filename = [filename, x.to_string()].concat();
let tracker = "tracker".to_string();
return (password, filename, tracker);
}
fn teardown(file_path: &str) {
fs::remove_file(file_path).unwrap();
}
#[test]
fn test_add_tracker_output(){
let setup_res = setup();
let data = 5;
add_tracker(&setup_res.0, &setup_res.1, &setup_res.2, &(data as f64)).unwrap();
let plaintext = file_io::load_file(&setup_res.0, &setup_res.1).unwrap();
let obj: serde_json::Value = from_str(&plaintext).unwrap();
let file_path = ["./files/", &setup_res.1].concat();
teardown(&file_path);
assert!(obj["trackers"][0]["tracker_name"] == setup_res.2 && obj["trackers"][0]["data"][0] == (data as f64));
}
#[test]
fn test_add_tracker_append(){
let setup_res = setup();
let data = 5;
let data2 = 7;
add_tracker(&setup_res.0, &setup_res.1, &setup_res.2, &(data as f64)).unwrap();
let plaintext = file_io::load_file(&setup_res.0, &setup_res.1).unwrap();
let obj_before: serde_json::Value = from_str(&plaintext).unwrap();
add_tracker(&setup_res.0, &setup_res.1, &setup_res.2, &(data2 as f64)).unwrap();
let plaintext2 = file_io::load_file(&setup_res.0, &setup_res.1).unwrap();
let obj_after: serde_json::Value = from_str(&plaintext2).unwrap();
let file_path = ["./files/", &setup_res.1].concat();
teardown(&file_path);
assert!(obj_before["trackers"][0]["tracker_name"] == setup_res.2);
assert!(obj_before["trackers"][0]["data"][0] == (data as f64));
assert!(obj_after["trackers"][0]["tracker_name"] == setup_res.2);
assert!(obj_after["trackers"][0]["data"][0] == (data as f64));
assert!(obj_after["trackers"][0]["data"][1] == (data2 as f64));
}
}
| 32.380435 | 118 | 0.571333 |
505d35bffe916cece5e1af333e7fc39bf9783db5
| 176 |
#![recursion_limit = "512"]
mod app;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
pub fn run_app() -> Result<(), JsValue> {
yew::start_app::<app::App>();
Ok(())
}
| 13.538462 | 41 | 0.585227 |
8a530c1178ed42535311cfd2f98484e3b800e669
| 75,838 |
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub mod event_subscriptions {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
scope: &str,
event_subscription_name: &str,
) -> std::result::Result<EventSubscription, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.EventGrid/eventSubscriptions/{}",
operation_config.base_path(),
scope,
event_subscription_name
);
let mut url = url::Url::parse(url_str).context(get::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: EventSubscription =
serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => get::DefaultResponse { status_code }.fail(),
}
}
pub mod get {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
scope: &str,
event_subscription_name: &str,
event_subscription_info: &EventSubscription,
) -> std::result::Result<EventSubscription, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.EventGrid/eventSubscriptions/{}",
operation_config.base_path(),
scope,
event_subscription_name
);
let mut url = url::Url::parse(url_str).context(create_or_update::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(create_or_update::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: EventSubscription =
serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => create_or_update::DefaultResponse { status_code }.fail(),
}
}
pub mod create_or_update {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
scope: &str,
event_subscription_name: &str,
event_subscription_update_parameters: &EventSubscriptionUpdateParameters,
) -> std::result::Result<EventSubscription, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.EventGrid/eventSubscriptions/{}",
operation_config.base_path(),
scope,
event_subscription_name
);
let mut url = url::Url::parse(url_str).context(update::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(update::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(update::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: EventSubscription =
serde_json::from_slice(rsp_body).context(update::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => update::DefaultResponse { status_code }.fail(),
}
}
pub mod update {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
scope: &str,
event_subscription_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.EventGrid/eventSubscriptions/{}",
operation_config.base_path(),
scope,
event_subscription_name
);
let mut url = url::Url::parse(url_str).context(delete::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(delete::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => delete::DefaultResponse { status_code }.fail(),
}
}
pub mod delete {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get_full_url(
operation_config: &crate::OperationConfig,
scope: &str,
event_subscription_name: &str,
) -> std::result::Result<EventSubscriptionFullUrl, get_full_url::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.EventGrid/eventSubscriptions/{}/getFullUrl",
operation_config.base_path(),
scope,
event_subscription_name
);
let mut url = url::Url::parse(url_str).context(get_full_url::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get_full_url::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get_full_url::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(get_full_url::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: EventSubscriptionFullUrl =
serde_json::from_slice(rsp_body).context(get_full_url::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => get_full_url::DefaultResponse { status_code }.fail(),
}
}
pub mod get_full_url {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_global_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<EventSubscriptionsListResult, list_global_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.EventGrid/eventSubscriptions",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).context(list_global_by_subscription::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_global_by_subscription::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_global_by_subscription::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_global_by_subscription::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: EventSubscriptionsListResult =
serde_json::from_slice(rsp_body).context(list_global_by_subscription::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list_global_by_subscription::DefaultResponse { status_code }.fail(),
}
}
pub mod list_global_by_subscription {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_global_by_subscription_for_topic_type(
operation_config: &crate::OperationConfig,
subscription_id: &str,
topic_type_name: &str,
) -> std::result::Result<EventSubscriptionsListResult, list_global_by_subscription_for_topic_type::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.EventGrid/topicTypes/{}/eventSubscriptions",
operation_config.base_path(),
subscription_id,
topic_type_name
);
let mut url = url::Url::parse(url_str).context(list_global_by_subscription_for_topic_type::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_global_by_subscription_for_topic_type::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(list_global_by_subscription_for_topic_type::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_global_by_subscription_for_topic_type::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body)
.context(list_global_by_subscription_for_topic_type::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list_global_by_subscription_for_topic_type::DefaultResponse { status_code }.fail(),
}
}
pub mod list_global_by_subscription_for_topic_type {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_global_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<EventSubscriptionsListResult, list_global_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/eventSubscriptions",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).context(list_global_by_resource_group::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_global_by_resource_group::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(list_global_by_resource_group::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_global_by_resource_group::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: EventSubscriptionsListResult =
serde_json::from_slice(rsp_body).context(list_global_by_resource_group::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list_global_by_resource_group::DefaultResponse { status_code }.fail(),
}
}
pub mod list_global_by_resource_group {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_global_by_resource_group_for_topic_type(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
topic_type_name: &str,
) -> std::result::Result<EventSubscriptionsListResult, list_global_by_resource_group_for_topic_type::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topicTypes/{}/eventSubscriptions",
operation_config.base_path(),
subscription_id,
resource_group_name,
topic_type_name
);
let mut url = url::Url::parse(url_str).context(list_global_by_resource_group_for_topic_type::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_global_by_resource_group_for_topic_type::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(list_global_by_resource_group_for_topic_type::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_global_by_resource_group_for_topic_type::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body)
.context(list_global_by_resource_group_for_topic_type::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list_global_by_resource_group_for_topic_type::DefaultResponse { status_code }.fail(),
}
}
pub mod list_global_by_resource_group_for_topic_type {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_regional_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location: &str,
) -> std::result::Result<EventSubscriptionsListResult, list_regional_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.EventGrid/locations/{}/eventSubscriptions",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).context(list_regional_by_subscription::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_regional_by_subscription::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(list_regional_by_subscription::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_regional_by_subscription::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: EventSubscriptionsListResult =
serde_json::from_slice(rsp_body).context(list_regional_by_subscription::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list_regional_by_subscription::DefaultResponse { status_code }.fail(),
}
}
pub mod list_regional_by_subscription {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_regional_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
location: &str,
) -> std::result::Result<EventSubscriptionsListResult, list_regional_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/locations/{}/eventSubscriptions",
operation_config.base_path(),
subscription_id,
resource_group_name,
location
);
let mut url = url::Url::parse(url_str).context(list_regional_by_resource_group::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_regional_by_resource_group::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(list_regional_by_resource_group::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_regional_by_resource_group::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body)
.context(list_regional_by_resource_group::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list_regional_by_resource_group::DefaultResponse { status_code }.fail(),
}
}
pub mod list_regional_by_resource_group {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_regional_by_subscription_for_topic_type(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location: &str,
topic_type_name: &str,
) -> std::result::Result<EventSubscriptionsListResult, list_regional_by_subscription_for_topic_type::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.EventGrid/locations/{}/topicTypes/{}/eventSubscriptions",
operation_config.base_path(),
subscription_id,
location,
topic_type_name
);
let mut url = url::Url::parse(url_str).context(list_regional_by_subscription_for_topic_type::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_regional_by_subscription_for_topic_type::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(list_regional_by_subscription_for_topic_type::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_regional_by_subscription_for_topic_type::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body)
.context(list_regional_by_subscription_for_topic_type::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list_regional_by_subscription_for_topic_type::DefaultResponse { status_code }.fail(),
}
}
pub mod list_regional_by_subscription_for_topic_type {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_regional_by_resource_group_for_topic_type(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
location: &str,
topic_type_name: &str,
) -> std::result::Result<EventSubscriptionsListResult, list_regional_by_resource_group_for_topic_type::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/locations/{}/topicTypes/{}/eventSubscriptions",
operation_config.base_path(),
subscription_id,
resource_group_name,
location,
topic_type_name
);
let mut url = url::Url::parse(url_str).context(list_regional_by_resource_group_for_topic_type::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_regional_by_resource_group_for_topic_type::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(list_regional_by_resource_group_for_topic_type::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_regional_by_resource_group_for_topic_type::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body)
.context(list_regional_by_resource_group_for_topic_type::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list_regional_by_resource_group_for_topic_type::DefaultResponse { status_code }.fail(),
}
}
pub mod list_regional_by_resource_group_for_topic_type {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_by_resource(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
provider_namespace: &str,
resource_type_name: &str,
resource_name: &str,
) -> std::result::Result<EventSubscriptionsListResult, list_by_resource::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/{}/{}/providers/Microsoft.EventGrid/eventSubscriptions",
operation_config.base_path(),
subscription_id,
resource_group_name,
provider_namespace,
resource_type_name,
resource_name
);
let mut url = url::Url::parse(url_str).context(list_by_resource::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_by_resource::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_by_resource::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_by_resource::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: EventSubscriptionsListResult =
serde_json::from_slice(rsp_body).context(list_by_resource::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list_by_resource::DefaultResponse { status_code }.fail(),
}
}
pub mod list_by_resource {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod operations {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationsListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.EventGrid/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationsListResult =
serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list::DefaultResponse { status_code }.fail(),
}
}
pub mod list {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod topics {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
topic_name: &str,
) -> std::result::Result<Topic, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topics/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
topic_name
);
let mut url = url::Url::parse(url_str).context(get::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Topic = serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => get::DefaultResponse { status_code }.fail(),
}
}
pub mod get {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
topic_name: &str,
topic_info: &Topic,
) -> std::result::Result<Topic, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topics/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
topic_name
);
let mut url = url::Url::parse(url_str).context(create_or_update::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(create_or_update::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: Topic =
serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => create_or_update::DefaultResponse { status_code }.fail(),
}
}
pub mod create_or_update {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
topic_name: &str,
topic_update_parameters: &TopicUpdateParameters,
) -> std::result::Result<Topic, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topics/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
topic_name
);
let mut url = url::Url::parse(url_str).context(update::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(update::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(update::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: Topic = serde_json::from_slice(rsp_body).context(update::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => update::DefaultResponse { status_code }.fail(),
}
}
pub mod update {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
topic_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topics/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
topic_name
);
let mut url = url::Url::parse(url_str).context(delete::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(delete::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => delete::DefaultResponse { status_code }.fail(),
}
}
pub mod delete {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<TopicsListResult, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.EventGrid/topics",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).context(list_by_subscription::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_by_subscription::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_by_subscription::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_by_subscription::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: TopicsListResult =
serde_json::from_slice(rsp_body).context(list_by_subscription::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list_by_subscription::DefaultResponse { status_code }.fail(),
}
}
pub mod list_by_subscription {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<TopicsListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topics",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).context(list_by_resource_group::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_by_resource_group::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_by_resource_group::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_by_resource_group::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: TopicsListResult =
serde_json::from_slice(rsp_body).context(list_by_resource_group::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list_by_resource_group::DefaultResponse { status_code }.fail(),
}
}
pub mod list_by_resource_group {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_shared_access_keys(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
topic_name: &str,
) -> std::result::Result<TopicSharedAccessKeys, list_shared_access_keys::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topics/{}/listKeys",
operation_config.base_path(),
subscription_id,
resource_group_name,
topic_name
);
let mut url = url::Url::parse(url_str).context(list_shared_access_keys::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_shared_access_keys::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_shared_access_keys::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_shared_access_keys::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: TopicSharedAccessKeys =
serde_json::from_slice(rsp_body).context(list_shared_access_keys::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list_shared_access_keys::DefaultResponse { status_code }.fail(),
}
}
pub mod list_shared_access_keys {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn regenerate_key(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
topic_name: &str,
regenerate_key_request: &TopicRegenerateKeyRequest,
) -> std::result::Result<TopicSharedAccessKeys, regenerate_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topics/{}/regenerateKey",
operation_config.base_path(),
subscription_id,
resource_group_name,
topic_name
);
let mut url = url::Url::parse(url_str).context(regenerate_key::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(regenerate_key::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(regenerate_key::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(regenerate_key::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: TopicSharedAccessKeys =
serde_json::from_slice(rsp_body).context(regenerate_key::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => regenerate_key::DefaultResponse { status_code }.fail(),
}
}
pub mod regenerate_key {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_event_types(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
provider_namespace: &str,
resource_type_name: &str,
resource_name: &str,
) -> std::result::Result<EventTypesListResult, list_event_types::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/{}/{}/providers/Microsoft.EventGrid/eventTypes",
operation_config.base_path(),
subscription_id,
resource_group_name,
provider_namespace,
resource_type_name,
resource_name
);
let mut url = url::Url::parse(url_str).context(list_event_types::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_event_types::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_event_types::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_event_types::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: EventTypesListResult =
serde_json::from_slice(rsp_body).context(list_event_types::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list_event_types::DefaultResponse { status_code }.fail(),
}
}
pub mod list_event_types {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod topic_types {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<TopicTypesListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.EventGrid/topicTypes", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: TopicTypesListResult =
serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list::DefaultResponse { status_code }.fail(),
}
}
pub mod list {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get(operation_config: &crate::OperationConfig, topic_type_name: &str) -> std::result::Result<TopicTypeInfo, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.EventGrid/topicTypes/{}",
operation_config.base_path(),
topic_type_name
);
let mut url = url::Url::parse(url_str).context(get::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: TopicTypeInfo =
serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => get::DefaultResponse { status_code }.fail(),
}
}
pub mod get {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_event_types(
operation_config: &crate::OperationConfig,
topic_type_name: &str,
) -> std::result::Result<EventTypesListResult, list_event_types::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.EventGrid/topicTypes/{}/eventTypes",
operation_config.base_path(),
topic_type_name
);
let mut url = url::Url::parse(url_str).context(list_event_types::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_event_types::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_event_types::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_event_types::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: EventTypesListResult =
serde_json::from_slice(rsp_body).context(list_event_types::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => list_event_types::DefaultResponse { status_code }.fail(),
}
}
pub mod list_event_types {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: http::StatusCode },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
| 50.357238 | 138 | 0.619056 |
649259cc54b1afb8795192fc97190e0ae105ced0
| 4,525 |
#[doc = "Register `ROM_TRAP_ADDR` reader"]
pub struct R(crate::R<ROM_TRAP_ADDR_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<ROM_TRAP_ADDR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<ROM_TRAP_ADDR_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<ROM_TRAP_ADDR_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `ROM_TRAP_ADDR` writer"]
pub struct W(crate::W<ROM_TRAP_ADDR_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<ROM_TRAP_ADDR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<ROM_TRAP_ADDR_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<ROM_TRAP_ADDR_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `ADDR` reader - Trap Address Match Bits"]
pub struct ADDR_R(crate::FieldReader<u16, u16>);
impl ADDR_R {
#[inline(always)]
pub(crate) fn new(bits: u16) -> Self {
ADDR_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for ADDR_R {
type Target = crate::FieldReader<u16, u16>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ADDR` writer - Trap Address Match Bits"]
pub struct ADDR_W<'a> {
w: &'a mut W,
}
impl<'a> ADDR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x3fff << 2)) | ((value as u32 & 0x3fff) << 2);
self.w
}
}
#[doc = "Field `ENABLE` reader - Trap Enable Bit"]
pub struct ENABLE_R(crate::FieldReader<bool, bool>);
impl ENABLE_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
ENABLE_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for ENABLE_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ENABLE` writer - Trap Enable Bit"]
pub struct ENABLE_W<'a> {
w: &'a mut W,
}
impl<'a> ENABLE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | ((value as u32 & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bits 2:15 - Trap Address Match Bits"]
#[inline(always)]
pub fn addr(&self) -> ADDR_R {
ADDR_R::new(((self.bits >> 2) & 0x3fff) as u16)
}
#[doc = "Bit 31 - Trap Enable Bit"]
#[inline(always)]
pub fn enable(&self) -> ENABLE_R {
ENABLE_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 2:15 - Trap Address Match Bits"]
#[inline(always)]
pub fn addr(&mut self) -> ADDR_W {
ADDR_W { w: self }
}
#[doc = "Bit 31 - Trap Enable Bit"]
#[inline(always)]
pub fn enable(&mut self) -> ENABLE_W {
ENABLE_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "ROM Trap Address\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rom_trap_addr](index.html) module"]
pub struct ROM_TRAP_ADDR_SPEC;
impl crate::RegisterSpec for ROM_TRAP_ADDR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [rom_trap_addr::R](R) reader structure"]
impl crate::Readable for ROM_TRAP_ADDR_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [rom_trap_addr::W](W) writer structure"]
impl crate::Writable for ROM_TRAP_ADDR_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets ROM_TRAP_ADDR to value 0"]
impl crate::Resettable for ROM_TRAP_ADDR_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 29.966887 | 410 | 0.591823 |
0121e7a73fe04e13b76a8129185c6f64c91343f8
| 1,076 |
pub use kinetis_common::pit::*;
::bobbin_mcu::periph!( PIT, Pit, PIT_PERIPH, PitPeriph, PIT_OWNED, PIT_REF_COUNT, 0x40037000, 0x00, 0x0d);
::bobbin_mcu::channel!(PIT_CH0, PitCh0, pit_ch0, PIT, Pit, PIT_CH0_CH, PitCh, PIT_PERIPH, PIT_CH0_OWNED, PIT_CH0_REF_COUNT, 0);
::bobbin_mcu::channel!(PIT_CH1, PitCh1, pit_ch1, PIT, Pit, PIT_CH1_CH, PitCh, PIT_PERIPH, PIT_CH1_OWNED, PIT_CH1_REF_COUNT, 1);
::bobbin_mcu::channel!(PIT_CH2, PitCh2, pit_ch2, PIT, Pit, PIT_CH2_CH, PitCh, PIT_PERIPH, PIT_CH2_OWNED, PIT_CH2_REF_COUNT, 2);
::bobbin_mcu::channel!(PIT_CH3, PitCh3, pit_ch3, PIT, Pit, PIT_CH3_CH, PitCh, PIT_PERIPH, PIT_CH3_OWNED, PIT_CH3_REF_COUNT, 3);
// Gate { name: None, gate_type: Some("EN"), periph: Some("SIM"), register: Some("SCGC6"), field: Some("PIT"), description: None }
impl ::bobbin_mcu::gate::GateEn for Pit {
#[inline]
fn gate_en(&self) -> ::bobbin_bits::U1 { ::sim::SIM.scgc6().pit() }
#[inline]
fn set_gate_en<V: Into<::bobbin_bits::U1>>(&self, value: V) -> &Self {
::sim::SIM.with_scgc6(|r| r.set_pit(value));
self
}
}
| 53.8 | 130 | 0.692379 |
8f2eef9eb737c7f0d350e890b8cf39b29ecd62dc
| 335 |
use criterion::{criterion_group, criterion_main, Criterion};
use rnglib::{Language, RNG};
fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("RNG Fantasy", |b| {
b.iter(|| RNG::new(&Language::Fantasy).unwrap().generate_name())
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
| 27.916667 | 72 | 0.707463 |
1ed28624fced89c2e230030f4ed8a28055cd632f
| 2,481 |
#![feature(bench_black_box)]
#![feature(test)]
extern crate swc_node_base;
extern crate test;
use rkyv::Deserialize;
use rplugin::StableAst;
use std::{hint::black_box, path::Path};
use swc_common::input::SourceFileInput;
use swc_ecma_ast::{EsVersion, Program};
use swc_ecma_parser::{lexer::Lexer, Parser};
use test::Bencher;
fn input() -> Program {
testing::run_test(false, |cm, _handler| {
let fm = cm.load_file(Path::new("benches/input.js")).unwrap();
let lexer = Lexer::new(
Default::default(),
EsVersion::latest(),
SourceFileInput::from(&*fm),
None,
);
let mut parser = Parser::new_from(lexer);
let program = parser.parse_program().unwrap();
Ok(program)
})
.unwrap()
}
#[bench]
fn rkyv_serialize(b: &mut Bencher) {
let program = input();
b.iter(|| {
let v = rkyv::to_bytes::<_, 512>(&program).unwrap();
black_box(v);
})
}
#[bench]
fn rkyv_deserialize(b: &mut Bencher) {
let program = input();
let bytes = rkyv::to_bytes::<_, 512>(&program).unwrap();
b.iter(|| {
let archived = unsafe { rkyv::archived_root::<Program>(&bytes[..]) };
let v: Program = archived.deserialize(&mut rkyv::Infallible).unwrap();
black_box(v);
})
}
#[bench]
fn json_serialize(b: &mut Bencher) {
let program = input();
b.iter(|| {
let v = serde_json::to_string(&program).unwrap();
black_box(v);
})
}
#[bench]
fn json_deserialize(b: &mut Bencher) {
let program = input();
let json_str = serde_json::to_string(&program).unwrap();
eprintln!("json: {}", json_str);
b.iter(|| {
let v: Program = serde_json::from_str(&json_str).unwrap();
black_box(v);
})
}
#[bench]
fn ast_clone(b: &mut Bencher) {
let program = input();
b.iter(|| {
let program = program.clone();
black_box(program);
})
}
#[bench]
fn ast_clone_to_stable(b: &mut Bencher) {
let program = input();
b.iter(|| {
let program = program.clone();
let v = swc_ecma_plugin_ast::Program::from_unstable(program);
black_box(v);
})
}
#[bench]
fn ast_clone_to_stable_then_to_unstable(b: &mut Bencher) {
let program = input();
b.iter(|| {
let program = program.clone();
let stable = swc_ecma_plugin_ast::Program::from_unstable(program);
let v = stable.into_unstable();
black_box(v);
})
}
| 21.763158 | 78 | 0.587263 |
092ac0fdcb0238de2fe669c4e2b191373cba4091
| 24,973 |
#![allow(unsafe_code)]
use std::{collections::HashMap, sync::Arc};
use egui::{
emath::Rect,
epaint::{Color32, Mesh, Primitive, Vertex},
};
use glow::HasContext as _;
use memoffset::offset_of;
use crate::check_for_gl_error;
use crate::misc_util::{compile_shader, link_program};
use crate::post_process::PostProcess;
use crate::shader_version::ShaderVersion;
use crate::vao;
pub use glow::Context;
const VERT_SRC: &str = include_str!("shader/vertex.glsl");
const FRAG_SRC: &str = include_str!("shader/fragment.glsl");
pub type TextureFilter = egui::TextureFilter;
trait TextureFilterExt {
fn glow_code(&self) -> u32;
}
impl TextureFilterExt for TextureFilter {
fn glow_code(&self) -> u32 {
match self {
TextureFilter::Linear => glow::LINEAR,
TextureFilter::Nearest => glow::NEAREST,
}
}
}
/// An OpenGL painter using [`glow`].
///
/// This is responsible for painting egui and managing egui textures.
/// You can access the underlying [`glow::Context`] with [`Self::gl`].
///
/// This struct must be destroyed with [`Painter::destroy`] before dropping, to ensure OpenGL
/// objects have been properly deleted and are not leaked.
pub struct Painter {
gl: Arc<glow::Context>,
max_texture_side: usize,
program: glow::Program,
u_screen_size: glow::UniformLocation,
u_sampler: glow::UniformLocation,
is_webgl_1: bool,
is_embedded: bool,
vao: crate::vao::VertexArrayObject,
srgb_support: bool,
post_process: Option<PostProcess>,
vbo: glow::Buffer,
element_array_buffer: glow::Buffer,
textures: HashMap<egui::TextureId, glow::Texture>,
next_native_tex_id: u64,
/// Stores outdated OpenGL textures that are yet to be deleted
textures_to_destroy: Vec<glow::Texture>,
/// Used to make sure we are destroyed correctly.
destroyed: bool,
}
impl Painter {
/// Create painter.
///
/// Set `pp_fb_extent` to the framebuffer size to enable `sRGB` support on OpenGL ES and WebGL.
///
/// Set `shader_prefix` if you want to turn on shader workaround e.g. `"#define APPLY_BRIGHTENING_GAMMA\n"`
/// (see <https://github.com/emilk/egui/issues/794>).
///
/// # Errors
/// will return `Err` below cases
/// * failed to compile shader
/// * failed to create postprocess on webgl with `sRGB` support
/// * failed to create buffer
pub fn new(
gl: Arc<glow::Context>,
pp_fb_extent: Option<[i32; 2]>,
shader_prefix: &str,
) -> Result<Painter, String> {
crate::profile_function!();
crate::check_for_gl_error_even_in_release!(&gl, "before Painter::new");
let max_texture_side = unsafe { gl.get_parameter_i32(glow::MAX_TEXTURE_SIZE) } as usize;
let shader_version = ShaderVersion::get(&gl);
let is_webgl_1 = shader_version == ShaderVersion::Es100;
let header = shader_version.version();
tracing::debug!("Shader header: {:?}.", header);
let srgb_support = gl.supported_extensions().contains("EXT_sRGB");
let (post_process, srgb_support_define) = match (shader_version, srgb_support) {
// WebGL2 support sRGB default
(ShaderVersion::Es300, _) | (ShaderVersion::Es100, true) => unsafe {
// Add sRGB support marker for fragment shader
if let Some(size) = pp_fb_extent {
tracing::debug!("WebGL with sRGB enabled. Turning on post processing for linear framebuffer blending.");
// install post process to correct sRGB color:
(
Some(PostProcess::new(
gl.clone(),
shader_prefix,
is_webgl_1,
size,
)?),
"#define SRGB_SUPPORTED",
)
} else {
tracing::debug!("WebGL or OpenGL ES detected but PostProcess disabled because dimension is None");
(None, "")
}
},
// WebGL1 without sRGB support disable postprocess and use fallback shader
(ShaderVersion::Es100, false) => (None, ""),
// OpenGL 2.1 or above always support sRGB so add sRGB support marker
_ => (None, "#define SRGB_SUPPORTED"),
};
unsafe {
let vert = compile_shader(
&gl,
glow::VERTEX_SHADER,
&format!(
"{}\n{}\n{}\n{}",
header,
shader_prefix,
shader_version.is_new_shader_interface(),
VERT_SRC
),
)?;
let frag = compile_shader(
&gl,
glow::FRAGMENT_SHADER,
&format!(
"{}\n{}\n{}\n{}\n{}",
header,
shader_prefix,
srgb_support_define,
shader_version.is_new_shader_interface(),
FRAG_SRC
),
)?;
let program = link_program(&gl, [vert, frag].iter())?;
gl.detach_shader(program, vert);
gl.detach_shader(program, frag);
gl.delete_shader(vert);
gl.delete_shader(frag);
let u_screen_size = gl.get_uniform_location(program, "u_screen_size").unwrap();
let u_sampler = gl.get_uniform_location(program, "u_sampler").unwrap();
let vbo = gl.create_buffer()?;
let a_pos_loc = gl.get_attrib_location(program, "a_pos").unwrap();
let a_tc_loc = gl.get_attrib_location(program, "a_tc").unwrap();
let a_srgba_loc = gl.get_attrib_location(program, "a_srgba").unwrap();
let stride = std::mem::size_of::<Vertex>() as i32;
let buffer_infos = vec![
vao::BufferInfo {
location: a_pos_loc,
vector_size: 2,
data_type: glow::FLOAT,
normalized: false,
stride,
offset: offset_of!(Vertex, pos) as i32,
},
vao::BufferInfo {
location: a_tc_loc,
vector_size: 2,
data_type: glow::FLOAT,
normalized: false,
stride,
offset: offset_of!(Vertex, uv) as i32,
},
vao::BufferInfo {
location: a_srgba_loc,
vector_size: 4,
data_type: glow::UNSIGNED_BYTE,
normalized: false,
stride,
offset: offset_of!(Vertex, color) as i32,
},
];
let vao = crate::vao::VertexArrayObject::new(&gl, vbo, buffer_infos);
let element_array_buffer = gl.create_buffer()?;
crate::check_for_gl_error_even_in_release!(&gl, "after Painter::new");
Ok(Painter {
gl,
max_texture_side,
program,
u_screen_size,
u_sampler,
is_webgl_1,
is_embedded: matches!(shader_version, ShaderVersion::Es100 | ShaderVersion::Es300),
vao,
srgb_support,
post_process,
vbo,
element_array_buffer,
textures: Default::default(),
next_native_tex_id: 1 << 32,
textures_to_destroy: Vec::new(),
destroyed: false,
})
}
}
/// Access the shared glow context.
pub fn gl(&self) -> &Arc<glow::Context> {
&self.gl
}
pub fn max_texture_side(&self) -> usize {
self.max_texture_side
}
unsafe fn prepare_painting(
&mut self,
[width_in_pixels, height_in_pixels]: [u32; 2],
pixels_per_point: f32,
) -> (u32, u32) {
self.gl.enable(glow::SCISSOR_TEST);
// egui outputs mesh in both winding orders
self.gl.disable(glow::CULL_FACE);
self.gl.disable(glow::DEPTH_TEST);
self.gl.color_mask(true, true, true, true);
self.gl.enable(glow::BLEND);
self.gl
.blend_equation_separate(glow::FUNC_ADD, glow::FUNC_ADD);
self.gl.blend_func_separate(
// egui outputs colors with premultiplied alpha:
glow::ONE,
glow::ONE_MINUS_SRC_ALPHA,
// Less important, but this is technically the correct alpha blend function
// when you want to make use of the framebuffer alpha (for screenshots, compositing, etc).
glow::ONE_MINUS_DST_ALPHA,
glow::ONE,
);
if !cfg!(target_arch = "wasm32") {
self.gl.enable(glow::FRAMEBUFFER_SRGB);
check_for_gl_error!(&self.gl, "FRAMEBUFFER_SRGB");
}
let width_in_points = width_in_pixels as f32 / pixels_per_point;
let height_in_points = height_in_pixels as f32 / pixels_per_point;
self.gl
.viewport(0, 0, width_in_pixels as i32, height_in_pixels as i32);
self.gl.use_program(Some(self.program));
self.gl
.uniform_2_f32(Some(&self.u_screen_size), width_in_points, height_in_points);
self.gl.uniform_1_i32(Some(&self.u_sampler), 0);
self.gl.active_texture(glow::TEXTURE0);
self.vao.bind(&self.gl);
self.gl
.bind_buffer(glow::ELEMENT_ARRAY_BUFFER, Some(self.element_array_buffer));
check_for_gl_error!(&self.gl, "prepare_painting");
(width_in_pixels, height_in_pixels)
}
/// You are expected to have cleared the color buffer before calling this.
pub fn paint_and_update_textures(
&mut self,
screen_size_px: [u32; 2],
pixels_per_point: f32,
clipped_primitives: &[egui::ClippedPrimitive],
textures_delta: &egui::TexturesDelta,
) {
crate::profile_function!();
for (id, image_delta) in &textures_delta.set {
self.set_texture(*id, image_delta);
}
self.paint_primitives(screen_size_px, pixels_per_point, clipped_primitives);
for &id in &textures_delta.free {
self.free_texture(id);
}
}
/// Main entry-point for painting a frame.
///
/// You should call `target.clear_color(..)` before
/// and `target.finish()` after this.
///
/// The following OpenGL features will be set:
/// - Scissor test will be enabled
/// - Cull face will be disabled
/// - Blend will be enabled
///
/// The scissor area and blend parameters will be changed.
///
/// As well as this, the following objects will be unset:
/// - Vertex Buffer
/// - Element Buffer
/// - Texture (and active texture will be set to 0)
/// - Program
///
/// Please be mindful of these effects when integrating into your program, and also be mindful
/// of the effects your program might have on this code. Look at the source if in doubt.
pub fn paint_primitives(
&mut self,
screen_size_px: [u32; 2],
pixels_per_point: f32,
clipped_primitives: &[egui::ClippedPrimitive],
) {
crate::profile_function!();
self.assert_not_destroyed();
if let Some(ref mut post_process) = self.post_process {
unsafe {
post_process.begin(screen_size_px[0] as i32, screen_size_px[1] as i32);
post_process.bind();
self.gl.disable(glow::SCISSOR_TEST);
self.gl
.viewport(0, 0, screen_size_px[0] as i32, screen_size_px[1] as i32);
// use the same clear-color as was set for the screen framebuffer.
self.gl.clear(glow::COLOR_BUFFER_BIT);
}
}
let size_in_pixels = unsafe { self.prepare_painting(screen_size_px, pixels_per_point) };
for egui::ClippedPrimitive {
clip_rect,
primitive,
} in clipped_primitives
{
set_clip_rect(&self.gl, size_in_pixels, pixels_per_point, *clip_rect);
match primitive {
Primitive::Mesh(mesh) => {
self.paint_mesh(mesh);
}
Primitive::Callback(callback) => {
if callback.rect.is_positive() {
crate::profile_scope!("callback");
// Transform callback rect to physical pixels:
let rect_min_x = pixels_per_point * callback.rect.min.x;
let rect_min_y = pixels_per_point * callback.rect.min.y;
let rect_max_x = pixels_per_point * callback.rect.max.x;
let rect_max_y = pixels_per_point * callback.rect.max.y;
let rect_min_x = rect_min_x.round() as i32;
let rect_min_y = rect_min_y.round() as i32;
let rect_max_x = rect_max_x.round() as i32;
let rect_max_y = rect_max_y.round() as i32;
unsafe {
self.gl.viewport(
rect_min_x,
size_in_pixels.1 as i32 - rect_max_y,
rect_max_x - rect_min_x,
rect_max_y - rect_min_y,
);
}
let info = egui::PaintCallbackInfo {
viewport: callback.rect,
clip_rect: *clip_rect,
pixels_per_point,
screen_size_px,
};
callback.call(&info, self);
check_for_gl_error!(&self.gl, "callback");
// Restore state:
unsafe {
if let Some(ref mut post_process) = self.post_process {
post_process.bind();
}
self.prepare_painting(screen_size_px, pixels_per_point)
};
}
}
}
}
unsafe {
self.vao.unbind(&self.gl);
self.gl.bind_buffer(glow::ELEMENT_ARRAY_BUFFER, None);
if let Some(ref post_process) = self.post_process {
post_process.end();
}
self.gl.disable(glow::SCISSOR_TEST);
check_for_gl_error!(&self.gl, "painting");
}
}
#[inline(never)] // Easier profiling
fn paint_mesh(&mut self, mesh: &Mesh) {
debug_assert!(mesh.is_valid());
if let Some(texture) = self.get_texture(mesh.texture_id) {
unsafe {
self.gl.bind_buffer(glow::ARRAY_BUFFER, Some(self.vbo));
self.gl.buffer_data_u8_slice(
glow::ARRAY_BUFFER,
bytemuck::cast_slice(&mesh.vertices),
glow::STREAM_DRAW,
);
self.gl
.bind_buffer(glow::ELEMENT_ARRAY_BUFFER, Some(self.element_array_buffer));
self.gl.buffer_data_u8_slice(
glow::ELEMENT_ARRAY_BUFFER,
bytemuck::cast_slice(&mesh.indices),
glow::STREAM_DRAW,
);
self.gl.bind_texture(glow::TEXTURE_2D, Some(texture));
}
unsafe {
self.gl.draw_elements(
glow::TRIANGLES,
mesh.indices.len() as i32,
glow::UNSIGNED_INT,
0,
);
}
check_for_gl_error!(&self.gl, "paint_mesh");
}
}
// ------------------------------------------------------------------------
pub fn set_texture(&mut self, tex_id: egui::TextureId, delta: &egui::epaint::ImageDelta) {
crate::profile_function!();
self.assert_not_destroyed();
let glow_texture = *self
.textures
.entry(tex_id)
.or_insert_with(|| unsafe { self.gl.create_texture().unwrap() });
unsafe {
self.gl.bind_texture(glow::TEXTURE_2D, Some(glow_texture));
}
match &delta.image {
egui::ImageData::Color(image) => {
assert_eq!(
image.width() * image.height(),
image.pixels.len(),
"Mismatch between texture size and texel count"
);
let data: &[u8] = bytemuck::cast_slice(image.pixels.as_ref());
self.upload_texture_srgb(delta.pos, image.size, delta.filter, data);
}
egui::ImageData::Font(image) => {
assert_eq!(
image.width() * image.height(),
image.pixels.len(),
"Mismatch between texture size and texel count"
);
let gamma = if self.is_embedded && self.post_process.is_none() {
1.0 / 2.2
} else {
1.0
};
let data: Vec<u8> = image
.srgba_pixels(gamma)
.flat_map(|a| a.to_array())
.collect();
self.upload_texture_srgb(delta.pos, image.size, delta.filter, &data);
}
};
}
fn upload_texture_srgb(
&mut self,
pos: Option<[usize; 2]>,
[w, h]: [usize; 2],
texture_filter: TextureFilter,
data: &[u8],
) {
assert_eq!(data.len(), w * h * 4);
assert!(
w >= 1 && h >= 1,
"Got a texture image of size {}x{}. A texture must at least be one texel wide.",
w,
h
);
assert!(
w <= self.max_texture_side && h <= self.max_texture_side,
"Got a texture image of size {}x{}, but the maximum supported texture side is only {}",
w,
h,
self.max_texture_side
);
unsafe {
self.gl.tex_parameter_i32(
glow::TEXTURE_2D,
glow::TEXTURE_MAG_FILTER,
texture_filter.glow_code() as i32,
);
self.gl.tex_parameter_i32(
glow::TEXTURE_2D,
glow::TEXTURE_MIN_FILTER,
texture_filter.glow_code() as i32,
);
self.gl.tex_parameter_i32(
glow::TEXTURE_2D,
glow::TEXTURE_WRAP_S,
glow::CLAMP_TO_EDGE as i32,
);
self.gl.tex_parameter_i32(
glow::TEXTURE_2D,
glow::TEXTURE_WRAP_T,
glow::CLAMP_TO_EDGE as i32,
);
check_for_gl_error!(&self.gl, "tex_parameter");
let (internal_format, src_format) = if self.is_webgl_1 {
let format = if self.srgb_support {
glow::SRGB_ALPHA
} else {
glow::RGBA
};
(format, format)
} else {
(glow::SRGB8_ALPHA8, glow::RGBA)
};
self.gl.pixel_store_i32(glow::UNPACK_ALIGNMENT, 1);
let level = 0;
if let Some([x, y]) = pos {
self.gl.tex_sub_image_2d(
glow::TEXTURE_2D,
level,
x as _,
y as _,
w as _,
h as _,
src_format,
glow::UNSIGNED_BYTE,
glow::PixelUnpackData::Slice(data),
);
check_for_gl_error!(&self.gl, "tex_sub_image_2d");
} else {
let border = 0;
self.gl.tex_image_2d(
glow::TEXTURE_2D,
level,
internal_format as _,
w as _,
h as _,
border,
src_format,
glow::UNSIGNED_BYTE,
Some(data),
);
check_for_gl_error!(&self.gl, "tex_image_2d");
}
}
}
pub fn free_texture(&mut self, tex_id: egui::TextureId) {
if let Some(old_tex) = self.textures.remove(&tex_id) {
unsafe { self.gl.delete_texture(old_tex) };
}
}
/// Get the [`glow::Texture`] bound to a [`egui::TextureId`].
pub fn get_texture(&self, texture_id: egui::TextureId) -> Option<glow::Texture> {
self.textures.get(&texture_id).copied()
}
#[allow(clippy::needless_pass_by_value)] // False positive
pub fn register_native_texture(&mut self, native: glow::Texture) -> egui::TextureId {
self.assert_not_destroyed();
let id = egui::TextureId::User(self.next_native_tex_id);
self.next_native_tex_id += 1;
self.textures.insert(id, native);
id
}
#[allow(clippy::needless_pass_by_value)] // False positive
pub fn replace_native_texture(&mut self, id: egui::TextureId, replacing: glow::Texture) {
if let Some(old_tex) = self.textures.insert(id, replacing) {
self.textures_to_destroy.push(old_tex);
}
}
unsafe fn destroy_gl(&self) {
self.gl.delete_program(self.program);
for tex in self.textures.values() {
self.gl.delete_texture(*tex);
}
self.gl.delete_buffer(self.vbo);
self.gl.delete_buffer(self.element_array_buffer);
for t in &self.textures_to_destroy {
self.gl.delete_texture(*t);
}
}
/// This function must be called before [`Painter`] is dropped, as [`Painter`] has some OpenGL objects
/// that should be deleted.
pub fn destroy(&mut self) {
if !self.destroyed {
unsafe {
self.destroy_gl();
if let Some(ref post_process) = self.post_process {
post_process.destroy();
}
}
self.destroyed = true;
}
}
fn assert_not_destroyed(&self) {
assert!(!self.destroyed, "the egui glow has already been destroyed!");
}
}
pub fn clear(gl: &glow::Context, screen_size_in_pixels: [u32; 2], clear_color: egui::Rgba) {
crate::profile_function!();
unsafe {
gl.disable(glow::SCISSOR_TEST);
gl.viewport(
0,
0,
screen_size_in_pixels[0] as i32,
screen_size_in_pixels[1] as i32,
);
if true {
// verified to be correct on eframe native (on Mac).
gl.clear_color(
clear_color[0] as f32,
clear_color[1] as f32,
clear_color[2] as f32,
clear_color[3] as f32,
);
} else {
let clear_color: Color32 = clear_color.into();
gl.clear_color(
clear_color[0] as f32 / 255.0,
clear_color[1] as f32 / 255.0,
clear_color[2] as f32 / 255.0,
clear_color[3] as f32 / 255.0,
);
}
gl.clear(glow::COLOR_BUFFER_BIT);
}
}
impl Drop for Painter {
fn drop(&mut self) {
if !self.destroyed {
tracing::warn!(
"You forgot to call destroy() on the egui glow painter. Resources will leak!"
);
}
}
}
fn set_clip_rect(
gl: &glow::Context,
size_in_pixels: (u32, u32),
pixels_per_point: f32,
clip_rect: Rect,
) {
// Transform clip rect to physical pixels:
let clip_min_x = pixels_per_point * clip_rect.min.x;
let clip_min_y = pixels_per_point * clip_rect.min.y;
let clip_max_x = pixels_per_point * clip_rect.max.x;
let clip_max_y = pixels_per_point * clip_rect.max.y;
// Make sure clip rect can fit within a `u32`:
let clip_min_x = clip_min_x.clamp(0.0, size_in_pixels.0 as f32);
let clip_min_y = clip_min_y.clamp(0.0, size_in_pixels.1 as f32);
let clip_max_x = clip_max_x.clamp(clip_min_x, size_in_pixels.0 as f32);
let clip_max_y = clip_max_y.clamp(clip_min_y, size_in_pixels.1 as f32);
let clip_min_x = clip_min_x.round() as i32;
let clip_min_y = clip_min_y.round() as i32;
let clip_max_x = clip_max_x.round() as i32;
let clip_max_y = clip_max_y.round() as i32;
unsafe {
gl.scissor(
clip_min_x,
size_in_pixels.1 as i32 - clip_max_y,
clip_max_x - clip_min_x,
clip_max_y - clip_min_y,
);
}
}
| 34.540802 | 124 | 0.521283 |
26ac30fdfbee8a49aea67c6824b561c14ae5a3fa
| 863 |
use std::collections::HashMap;
use crate::{variables::Variables, types::Value};
pub struct Stack {
data: Vec<Variables>,
}
impl Stack {
pub fn new() -> Self {
Self {
data: Vec::new(),
}
}
pub fn last_mut(&mut self) -> Option<&mut Variables> {
self.data.last_mut()
}
pub fn get(&self, name: &String) -> Option<Value> {
self.data.last()?.get(name)
}
pub fn get_mut(&mut self, name: &String) -> Option<&mut Value> {
self.last_mut()?.get_mut(name)
}
pub fn set(&mut self, name: &String, value: Value) -> Option<()> {
self.last_mut()?.set(name, value)
}
pub fn push(&mut self, frame: HashMap<String, Value>) {
self.data.push(Variables::with_frame(frame));
}
pub fn pop(&mut self) -> Option<Variables> {
self.data.pop()
}
}
| 21.575 | 70 | 0.553882 |
fc0ad8ae85b0d2c853403cfbb7916e09beb8e82b
| 17,325 |
use crate::fp::*;
use crate::fp2::*;
#[cfg(feature = "canon")]
use canonical_derive::Canon;
use core::fmt;
use core::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign};
use subtle::{Choice, ConditionallySelectable, ConstantTimeEq, CtOption};
#[cfg(feature = "serde_req")]
use serde::{
self, de::Visitor, ser::SerializeStruct, Deserialize, Deserializer, Serialize, Serializer,
};
/// This represents an element $c_0 + c_1 v + c_2 v^2$ of $\mathbb{F}_{p^6} = \mathbb{F}_{p^2} / v^3 - u - 1$.
#[cfg_attr(feature = "canon", derive(Canon))]
pub struct Fp6 {
pub c0: Fp2,
pub c1: Fp2,
pub c2: Fp2,
}
impl From<Fp> for Fp6 {
fn from(f: Fp) -> Fp6 {
Fp6 {
c0: Fp2::from(f),
c1: Fp2::zero(),
c2: Fp2::zero(),
}
}
}
impl From<Fp2> for Fp6 {
fn from(f: Fp2) -> Fp6 {
Fp6 {
c0: f,
c1: Fp2::zero(),
c2: Fp2::zero(),
}
}
}
impl PartialEq for Fp6 {
fn eq(&self, other: &Fp6) -> bool {
self.ct_eq(other).into()
}
}
impl Copy for Fp6 {}
impl Clone for Fp6 {
#[inline]
fn clone(&self) -> Self {
*self
}
}
impl Default for Fp6 {
fn default() -> Self {
Fp6::zero()
}
}
impl fmt::Debug for Fp6 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?} + ({:?})*v + ({:?})*v^2", self.c0, self.c1, self.c2)
}
}
#[cfg(feature = "serde_req")]
impl Serialize for Fp6 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut fp2 = serializer.serialize_struct("struct Fp6", 3)?;
fp2.serialize_field("c0", &self.c0)?;
fp2.serialize_field("c1", &self.c1)?;
fp2.serialize_field("c2", &self.c2)?;
fp2.end()
}
}
#[cfg(feature = "serde_req")]
impl<'de> Deserialize<'de> for Fp6 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
enum Field {
C0,
C1,
C2,
}
impl<'de> Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Field, D::Error>
where
D: Deserializer<'de>,
{
struct FieldVisitor;
impl<'de> Visitor<'de> for FieldVisitor {
type Value = Field;
fn expecting(
&self,
formatter: &mut ::core::fmt::Formatter,
) -> ::core::fmt::Result {
formatter.write_str("struct Fp6")
}
fn visit_str<E>(self, value: &str) -> Result<Field, E>
where
E: serde::de::Error,
{
match value {
"c0" => Ok(Field::C0),
"c1" => Ok(Field::C1),
"c2" => Ok(Field::C2),
_ => Err(serde::de::Error::unknown_field(value, FIELDS)),
}
}
}
deserializer.deserialize_identifier(FieldVisitor)
}
}
struct Fp6Visitor;
impl<'de> Visitor<'de> for Fp6Visitor {
type Value = Fp6;
fn expecting(&self, formatter: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
formatter.write_str("struct Fp6")
}
fn visit_seq<V>(self, mut seq: V) -> Result<Fp6, V::Error>
where
V: serde::de::SeqAccess<'de>,
{
let c0 = seq
.next_element()?
.ok_or_else(|| serde::de::Error::invalid_length(0, &self))?;
let c1 = seq
.next_element()?
.ok_or_else(|| serde::de::Error::invalid_length(0, &self))?;
let c2 = seq
.next_element()?
.ok_or_else(|| serde::de::Error::invalid_length(0, &self))?;
Ok(Fp6 { c0, c1, c2 })
}
}
const FIELDS: &[&str] = &["c0", "c1", "c2"];
deserializer.deserialize_struct("Fp6", FIELDS, Fp6Visitor)
}
}
impl ConditionallySelectable for Fp6 {
#[inline(always)]
fn conditional_select(a: &Self, b: &Self, choice: Choice) -> Self {
Fp6 {
c0: Fp2::conditional_select(&a.c0, &b.c0, choice),
c1: Fp2::conditional_select(&a.c1, &b.c1, choice),
c2: Fp2::conditional_select(&a.c2, &b.c2, choice),
}
}
}
impl ConstantTimeEq for Fp6 {
#[inline(always)]
fn ct_eq(&self, other: &Self) -> Choice {
self.c0.ct_eq(&other.c0) & self.c1.ct_eq(&other.c1) & self.c2.ct_eq(&other.c2)
}
}
impl Fp6 {
#[inline]
pub fn zero() -> Self {
Fp6 {
c0: Fp2::zero(),
c1: Fp2::zero(),
c2: Fp2::zero(),
}
}
#[inline]
pub fn one() -> Self {
Fp6 {
c0: Fp2::one(),
c1: Fp2::zero(),
c2: Fp2::zero(),
}
}
pub fn mul_by_1(&self, c1: &Fp2) -> Fp6 {
let b_b = self.c1 * c1;
let t1 = (self.c1 + self.c2) * c1 - b_b;
let t1 = t1.mul_by_nonresidue();
let t2 = (self.c0 + self.c1) * c1 - b_b;
Fp6 {
c0: t1,
c1: t2,
c2: b_b,
}
}
pub fn mul_by_01(&self, c0: &Fp2, c1: &Fp2) -> Fp6 {
let a_a = self.c0 * c0;
let b_b = self.c1 * c1;
let t1 = (self.c1 + self.c2) * c1 - b_b;
let t1 = t1.mul_by_nonresidue() + a_a;
let t2 = (c0 + c1) * (self.c0 + self.c1) - a_a - b_b;
let t3 = (self.c0 + self.c2) * c0 - a_a + b_b;
Fp6 {
c0: t1,
c1: t2,
c2: t3,
}
}
/// Multiply by quadratic nonresidue v.
pub fn mul_by_nonresidue(&self) -> Self {
// Given a + bv + cv^2, this produces
// av + bv^2 + cv^3
// but because v^3 = u + 1, we have
// c(u + 1) + av + v^2
Fp6 {
c0: self.c2.mul_by_nonresidue(),
c1: self.c0,
c2: self.c1,
}
}
/// Raises this element to p.
#[inline(always)]
pub fn frobenius_map(&self) -> Self {
let c0 = self.c0.frobenius_map();
let c1 = self.c1.frobenius_map();
let c2 = self.c2.frobenius_map();
// c1 = c1 * (u + 1)^((p - 1) / 3)
let c1 = c1
* Fp2 {
c0: Fp::zero(),
c1: Fp::from_raw_unchecked([
0xcd03c9e48671f071,
0x5dab22461fcda5d2,
0x587042afd3851b95,
0x8eb60ebe01bacb9e,
0x3f97d6e83d050d2,
0x18f0206554638741,
]),
};
// c2 = c2 * (u + 1)^((2p - 2) / 3)
let c2 = c2
* Fp2 {
c0: Fp::from_raw_unchecked([
0x890dc9e4867545c3,
0x2af322533285a5d5,
0x50880866309b7e2c,
0xa20d1b8c7e881024,
0x14e4f04fe2db9068,
0x14e56d3f1564853a,
]),
c1: Fp::zero(),
};
Fp6 { c0, c1, c2 }
}
#[inline(always)]
pub fn is_zero(&self) -> Choice {
self.c0.is_zero() & self.c1.is_zero() & self.c2.is_zero()
}
#[inline]
pub fn square(&self) -> Self {
let s0 = self.c0.square();
let ab = self.c0 * self.c1;
let s1 = ab + ab;
let s2 = (self.c0 - self.c1 + self.c2).square();
let bc = self.c1 * self.c2;
let s3 = bc + bc;
let s4 = self.c2.square();
Fp6 {
c0: s3.mul_by_nonresidue() + s0,
c1: s4.mul_by_nonresidue() + s1,
c2: s1 + s2 + s3 - s0 - s4,
}
}
#[inline]
pub fn invert(&self) -> CtOption<Self> {
let c0 = (self.c1 * self.c2).mul_by_nonresidue();
let c0 = self.c0.square() - c0;
let c1 = self.c2.square().mul_by_nonresidue();
let c1 = c1 - (self.c0 * self.c1);
let c2 = self.c1.square();
let c2 = c2 - (self.c0 * self.c2);
let tmp = ((self.c1 * c2) + (self.c2 * c1)).mul_by_nonresidue();
let tmp = tmp + (self.c0 * c0);
tmp.invert().map(|t| Fp6 {
c0: t * c0,
c1: t * c1,
c2: t * c2,
})
}
}
impl<'a, 'b> Mul<&'b Fp6> for &'a Fp6 {
type Output = Fp6;
#[inline]
fn mul(self, other: &'b Fp6) -> Self::Output {
let aa = self.c0 * other.c0;
let bb = self.c1 * other.c1;
let cc = self.c2 * other.c2;
let t1 = other.c1 + other.c2;
let tmp = self.c1 + self.c2;
let t1 = t1 * tmp;
let t1 = t1 - bb;
let t1 = t1 - cc;
let t1 = t1.mul_by_nonresidue();
let t1 = t1 + aa;
let t3 = other.c0 + other.c2;
let tmp = self.c0 + self.c2;
let t3 = t3 * tmp;
let t3 = t3 - aa;
let t3 = t3 + bb;
let t3 = t3 - cc;
let t2 = other.c0 + other.c1;
let tmp = self.c0 + self.c1;
let t2 = t2 * tmp;
let t2 = t2 - aa;
let t2 = t2 - bb;
let cc = cc.mul_by_nonresidue();
let t2 = t2 + cc;
Fp6 {
c0: t1,
c1: t2,
c2: t3,
}
}
}
impl<'a, 'b> Add<&'b Fp6> for &'a Fp6 {
type Output = Fp6;
#[inline]
fn add(self, rhs: &'b Fp6) -> Self::Output {
Fp6 {
c0: self.c0 + rhs.c0,
c1: self.c1 + rhs.c1,
c2: self.c2 + rhs.c2,
}
}
}
impl<'a> Neg for &'a Fp6 {
type Output = Fp6;
#[inline]
fn neg(self) -> Self::Output {
Fp6 {
c0: -self.c0,
c1: -self.c1,
c2: -self.c2,
}
}
}
impl Neg for Fp6 {
type Output = Fp6;
#[inline]
fn neg(self) -> Self::Output {
-&self
}
}
impl<'a, 'b> Sub<&'b Fp6> for &'a Fp6 {
type Output = Fp6;
#[inline]
fn sub(self, rhs: &'b Fp6) -> Self::Output {
Fp6 {
c0: self.c0 - rhs.c0,
c1: self.c1 - rhs.c1,
c2: self.c2 - rhs.c2,
}
}
}
impl_binops_additive!(Fp6, Fp6);
impl_binops_multiplicative!(Fp6, Fp6);
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_arithmetic() {
use crate::fp::*;
let a = Fp6 {
c0: Fp2 {
c0: Fp::from_raw_unchecked([
0x47f9cb98b1b82d58,
0x5fe911eba3aa1d9d,
0x96bf1b5f4dd81db3,
0x8100d27cc9259f5b,
0xafa20b9674640eab,
0x9bbcea7d8d9497d,
]),
c1: Fp::from_raw_unchecked([
0x303cb98b1662daa,
0xd93110aa0a621d5a,
0xbfa9820c5be4a468,
0xba3643ecb05a348,
0xdc3534bb1f1c25a6,
0x6c305bb19c0e1c1,
]),
},
c1: Fp2 {
c0: Fp::from_raw_unchecked([
0x46f9cb98b162d858,
0xbe9109cf7aa1d57,
0xc791bc55fece41d2,
0xf84c57704e385ec2,
0xcb49c1d9c010e60f,
0xacdb8e158bfe3c8,
]),
c1: Fp::from_raw_unchecked([
0x8aefcb98b15f8306,
0x3ea1108fe4f21d54,
0xcf79f69fa1b7df3b,
0xe4f54aa1d16b1a3c,
0xba5e4ef86105a679,
0xed86c0797bee5cf,
]),
},
c2: Fp2 {
c0: Fp::from_raw_unchecked([
0xcee5cb98b15c2db4,
0x71591082d23a1d51,
0xd76230e944a17ca4,
0xd19e3dd3549dd5b6,
0xa972dc1701fa66e3,
0x12e31f2dd6bde7d6,
]),
c1: Fp::from_raw_unchecked([
0xad2acb98b1732d9d,
0x2cfd10dd06961d64,
0x7396b86c6ef24e8,
0xbd76e2fdb1bfc820,
0x6afea7f6de94d0d5,
0x10994b0c5744c040,
]),
},
};
let b = Fp6 {
c0: Fp2 {
c0: Fp::from_raw_unchecked([
0xf120cb98b16fd84b,
0x5fb510cff3de1d61,
0xf21a5d069d8c251,
0xaa1fd62f34f2839a,
0x5a1335157f89913f,
0x14a3fe329643c247,
]),
c1: Fp::from_raw_unchecked([
0x3516cb98b16c82f9,
0x926d10c2e1261d5f,
0x1709e01a0cc25fba,
0x96c8c960b8253f14,
0x4927c234207e51a9,
0x18aeb158d542c44e,
]),
},
c1: Fp2 {
c0: Fp::from_raw_unchecked([
0xbf0dcb98b16982fc,
0xa67910b71d1a1d5c,
0xb7c147c2b8fb06ff,
0x1efa710d47d2e7ce,
0xed20a79c7e27653c,
0x2b85294dac1dfba,
]),
c1: Fp::from_raw_unchecked([
0x9d52cb98b18082e5,
0x621d111151761d6f,
0xe79882603b48af43,
0xad31637a4f4da37,
0xaeac737c5ac1cf2e,
0x6e7e735b48b824,
]),
},
c2: Fp2 {
c0: Fp::from_raw_unchecked([
0xe148cb98b17d2d93,
0x94d511043ebe1d6c,
0xef80bca9de324cac,
0xf77c0969282795b1,
0x9dc1009afbb68f97,
0x47931999a47ba2b,
]),
c1: Fp::from_raw_unchecked([
0x253ecb98b179d841,
0xc78d10f72c061d6a,
0xf768f6f3811bea15,
0xe424fc9aab5a512b,
0x8cd58db99cab5001,
0x883e4bfd946bc32,
]),
},
};
let c = Fp6 {
c0: Fp2 {
c0: Fp::from_raw_unchecked([
0x6934cb98b17682ef,
0xfa4510ea194e1d67,
0xff51313d2405877e,
0xd0cdefcc2e8d0ca5,
0x7bea1ad83da0106b,
0xc8e97e61845be39,
]),
c1: Fp::from_raw_unchecked([
0x4779cb98b18d82d8,
0xb5e911444daa1d7a,
0x2f286bdaa6532fc2,
0xbca694f68baeff0f,
0x3d75e6b81a3a7a5d,
0xa44c3c498cc96a3,
]),
},
c1: Fp2 {
c0: Fp::from_raw_unchecked([
0x8b6fcb98b18a2d86,
0xe8a111373af21d77,
0x3710a624493ccd2b,
0xa94f88280ee1ba89,
0x2c8a73d6bb2f3ac7,
0xe4f76ead7cb98aa,
]),
c1: Fp::from_raw_unchecked([
0xcf65cb98b186d834,
0x1b59112a283a1d74,
0x3ef8e06dec266a95,
0x95f87b5992147603,
0x1b9f00f55c23fb31,
0x125a2a1116ca9ab1,
]),
},
c2: Fp2 {
c0: Fp::from_raw_unchecked([
0x135bcb98b18382e2,
0x4e11111d15821d72,
0x46e11ab78f1007fe,
0x82a16e8b1547317d,
0xab38e13fd18bb9b,
0x1664dd3755c99cb8,
]),
c1: Fp::from_raw_unchecked([
0xce65cb98b1318334,
0xc7590fdb7c3a1d2e,
0x6fcb81649d1c8eb3,
0xd44004d1727356a,
0x3746b738a7d0d296,
0x136c144a96b134fc,
]),
},
};
assert_eq!(a.square(), &a * &a);
assert_eq!(b.square(), &b * &b);
assert_eq!(c.square(), &c * &c);
assert_eq!(
(a + b) * c.square(),
&(&(&c * &c) * &a) + &(&(&c * &c) * &b)
);
assert_eq!(
&a.invert().unwrap() * &b.invert().unwrap(),
(&a * &b).invert().unwrap()
);
assert_eq!(&a.invert().unwrap() * &a, Fp6::one());
}
#[test]
#[cfg(feature = "serde_req")]
fn fp6_serde_roundtrip() {
use bincode;
let fp6 = Fp6 {
c0: Fp2::one(),
c1: Fp2::one(),
c2: Fp2::one(),
};
let ser = bincode::serialize(&fp6).unwrap();
let deser: Fp6 = bincode::deserialize(&ser).unwrap();
assert_eq!(fp6, deser);
}
}
| 27.54372 | 110 | 0.428629 |
c10ed301387e63f4680768bd6de4982d402493db
| 12,483 |
#![feature(plugin)]
#![plugin(phf_macros)]
extern crate phf;
mod map {
use std::collections::{HashMap, HashSet};
use phf;
#[allow(dead_code)]
static TRAILING_COMMA: phf::Map<&'static str, isize> = phf_map!(
"foo" => 10,
);
#[allow(dead_code)]
static NO_TRAILING_COMMA: phf::Map<&'static str, isize> = phf_map!(
"foo" => 10
);
#[allow(dead_code)]
static BYTE_STRING_KEY: phf::Map<&'static [u8], &'static str> = phf_map!(
b"camembert" => "delicious",
);
#[test]
fn test_two() {
static MAP: phf::Map<&'static str, isize> = phf_map!(
"foo" => 10,
"bar" => 11,
);
assert!(Some(&10) == MAP.get(&("foo")));
assert!(Some(&11) == MAP.get(&("bar")));
assert_eq!(None, MAP.get(&("asdf")));
assert_eq!(2, MAP.len());
}
#[test]
fn test_entries() {
static MAP: phf::Map<&'static str, isize> = phf_map!(
"foo" => 10,
"bar" => 11,
);
let hash = MAP.entries().map(|(&k, &v)| (k, v)).collect::<HashMap<_, isize>>();
assert!(Some(&10) == hash.get(&("foo")));
assert!(Some(&11) == hash.get(&("bar")));
assert_eq!(2, hash.len());
}
#[test]
fn test_keys() {
static MAP: phf::Map<&'static str, isize> = phf_map!(
"foo" => 10,
"bar" => 11,
);
let hash = MAP.keys().map(|&e| e).collect::<HashSet<_>>();
assert!(hash.contains(&("foo")));
assert!(hash.contains(&("bar")));
assert_eq!(2, hash.len());
}
#[test]
fn test_values() {
static MAP: phf::Map<&'static str, isize> = phf_map!(
"foo" => 10,
"bar" => 11,
);
let hash = MAP.values().map(|&e| e).collect::<HashSet<isize>>();
assert!(hash.contains(&10));
assert!(hash.contains(&11));
assert_eq!(2, hash.len());
}
#[test]
fn test_large() {
static MAP: phf::Map<&'static str, isize> = phf_map!(
"a" => 0,
"b" => 1,
"c" => 2,
"d" => 3,
"e" => 4,
"f" => 5,
"g" => 6,
"h" => 7,
"i" => 8,
"j" => 9,
"k" => 10,
"l" => 11,
"m" => 12,
"n" => 13,
"o" => 14,
"p" => 15,
"q" => 16,
"r" => 17,
"s" => 18,
"t" => 19,
"u" => 20,
"v" => 21,
"w" => 22,
"x" => 23,
"y" => 24,
"z" => 25,
);
assert!(MAP.get(&("a")) == Some(&0));
}
#[test]
fn test_macro_key() {
static MAP: phf::Map<&'static str, isize> = phf_map!(
concat!("foo", "bar") => 1
);
assert!(Some(&1) == MAP.get(&("foobar")));
}
#[test]
fn test_non_static_str_key() {
static MAP: phf::Map<&'static str, isize> = phf_map!(
"a" => 0,
);
assert_eq!(Some(&0), MAP.get(&*"a".to_string()));
}
#[test]
fn test_index_ok() {
static MAP: phf::Map<&'static str, isize> = phf_map!(
"a" => 0,
);
assert_eq!(0, MAP["a"]);
}
#[test]
#[should_panic]
fn test_index_fail() {
static MAP: phf::Map<&'static str, isize> = phf_map!(
"a" => 0,
);
MAP["b"];
}
macro_rules! test_key_type(
($t:ty, $($k:expr => $v:expr),+) => ({
static MAP: phf::Map<$t, isize> = phf_map! {
$($k => $v),+
};
$(
assert_eq!(Some(&$v), MAP.get(&$k));
)+
})
);
#[test]
fn test_array_vals() {
static MAP: phf::Map<&'static str, [u8; 3]> = phf_map!(
"a" => [0u8, 1, 2],
);
assert_eq!(Some(&[0u8, 1, 2]), MAP.get(&("a")));
}
#[test]
fn test_array_keys() {
static MAP: phf::Map<[u8; 2], isize> = phf_map!(
[0u8, 1] => 0,
[2, 3u8] => 1,
[4, 5] => 2,
);
assert_eq!(Some(&0), MAP.get(&[0u8, 1u8]));
}
#[test]
fn test_byte_keys() {
test_key_type!(u8, b'a' => 0, b'b' => 1);
}
#[test]
fn test_char_keys() {
test_key_type!(char, 'a' => 0, 'b' => 1);
}
#[test]
fn test_i8_keys() {
test_key_type!(i8, 0i8 => 0, 1i8 => 1);
}
#[test]
fn test_i16_keys() {
test_key_type!(i16, 0i16 => 0, 1i16 => 1);
}
#[test]
fn test_i32_keys() {
test_key_type!(i32, 0i32 => 0, 1i32 => 1);
}
#[test]
fn test_i64_keys() {
test_key_type!(i64, 0i64 => 0, 1i64 => 1);
}
#[test]
fn test_u8_keys() {
test_key_type!(u8, 0u8 => 0, 1u8 => 1);
}
#[test]
fn test_u16_keys() {
test_key_type!(u16, 0u16 => 0, 1u16 => 1);
}
#[test]
fn test_u32_keys() {
test_key_type!(u32, 0u32 => 0, 1u32 => 1);
}
#[test]
fn test_u64_keys() {
test_key_type!(u64, 0u64 => 0, 1u64 => 1);
}
#[test]
fn test_bool_keys() {
test_key_type!(bool, false => 0, true => 1);
}
#[test]
fn test_into_iterator() {
static MAP: phf::Map<&'static str, isize> = phf_map!(
"foo" => 10,
);
for (k, v) in &MAP {
assert_eq!(&"foo", k);
assert_eq!(&10, v)
}
}
}
mod set {
use std::collections::HashSet;
use phf;
#[allow(dead_code)]
static TRAILING_COMMA: phf::Set<&'static str> = phf_set! {
"foo",
};
#[allow(dead_code)]
static NO_TRAILING_COMMA: phf::Set<&'static str> = phf_set! {
"foo"
};
#[test]
fn test_two() {
static SET: phf::Set<&'static str> = phf_set! {
"hello",
"world",
};
assert!(SET.contains(&"hello"));
assert!(SET.contains(&"world"));
assert!(!SET.contains(&"foo"));
assert_eq!(2, SET.len());
}
#[test]
fn test_iter() {
static SET: phf::Set<&'static str> = phf_set! {
"hello",
"world",
};
let set = SET.iter().map(|e| *e).collect::<HashSet<_>>();
assert!(set.contains(&"hello"));
assert!(set.contains(&"world"));
assert_eq!(2, set.len());
}
#[test]
fn test_non_static_str_contains() {
static SET: phf::Set<&'static str> = phf_set! {
"hello",
"world",
};
assert!(SET.contains(&*"hello".to_string()));
}
#[test]
fn test_into_iterator() {
static SET: phf::Set<&'static str> = phf_set! {
"hello",
};
for e in &SET {
assert_eq!(&"hello", e);
}
}
}
mod ordered_map {
use phf;
#[allow(dead_code)]
static TRAILING_COMMA: phf::OrderedMap<&'static str, isize> = phf_ordered_map!(
"foo" => 10,
);
#[allow(dead_code)]
static NO_TRAILING_COMMA: phf::OrderedMap<&'static str, isize> = phf_ordered_map!(
"foo" => 10
);
#[test]
fn test_two() {
static MAP: phf::OrderedMap<&'static str, isize> = phf_ordered_map!(
"foo" => 10,
"bar" => 11,
);
assert!(Some(&10) == MAP.get(&"foo"));
assert!(Some(&11) == MAP.get(&"bar"));
assert_eq!(None, MAP.get(&"asdf"));
assert_eq!(2, MAP.len());
}
#[test]
fn test_get_index() {
static MAP: phf::OrderedMap<&'static str, isize> = phf_ordered_map!(
"foo" => 5,
"bar" => 5,
"baz" => 5,
);
assert_eq!(Some(0), MAP.get_index(&"foo"));
assert_eq!(Some(2), MAP.get_index(&"baz"));
assert_eq!(None, MAP.get_index(&"xyz"));
assert_eq!(Some(0), MAP.get_index(&*"foo".to_string()));
assert_eq!(Some(2), MAP.get_index(&*"baz".to_string()));
assert_eq!(None, MAP.get_index(&*"xyz".to_string()));
}
#[test]
fn test_index() {
static MAP: phf::OrderedMap<&'static str, isize> = phf_ordered_map!(
"foo" => 5,
"bar" => 6,
);
assert_eq!(Some((&"foo", &5)), MAP.index(0));
assert_eq!(Some((&"bar", &6)), MAP.index(1));
assert_eq!(None, MAP.index(2));
}
#[test]
fn test_entries() {
static MAP: phf::OrderedMap<&'static str, i32> = phf_ordered_map!(
"foo" => 10,
"bar" => 11,
"baz" => 12,
);
let vec = MAP.entries().map(|(&k, &v)| (k, v)).collect::<Vec<_>>();
assert_eq!(vec, vec!(("foo", 10), ("bar", 11), ("baz", 12)));
}
#[test]
fn test_keys() {
static MAP: phf::OrderedMap<&'static str, isize> = phf_ordered_map!(
"foo" => 10,
"bar" => 11,
"baz" => 12,
);
let vec = MAP.keys().map(|&e| e).collect::<Vec<_>>();
assert_eq!(vec, vec!("foo", "bar", "baz"));
}
#[test]
fn test_values() {
static MAP: phf::OrderedMap<&'static str, i32> = phf_ordered_map!(
"foo" => 10,
"bar" => 11,
"baz" => 12,
);
let vec = MAP.values().map(|&v| v).collect::<Vec<_>>();
assert_eq!(vec, vec!(10, 11, 12));
}
#[test]
fn test_index_ok() {
static MAP: phf::OrderedMap<&'static str, isize> = phf_ordered_map!(
"a" => 0,
);
assert_eq!(0, MAP["a"]);
}
#[test]
#[should_panic]
fn test_index_fail() {
static MAP: phf::OrderedMap<&'static str, isize> = phf_ordered_map!(
"a" => 0,
);
MAP["b"];
}
#[test]
fn test_non_static_str_key() {
static MAP: phf::OrderedMap<&'static str, isize> = phf_ordered_map!(
"a" => 0,
);
assert_eq!(Some(&0), MAP.get(&*"a".to_string()));
}
#[test]
fn test_into_iterator() {
static MAP: phf::OrderedMap<&'static str, isize> = phf_ordered_map!(
"foo" => 10,
);
for (k, v) in &MAP {
assert_eq!(&"foo", k);
assert_eq!(&10, v)
}
}
}
mod ordered_set {
use phf;
#[allow(dead_code)]
static TRAILING_COMMA: phf::OrderedSet<&'static str> = phf_ordered_set! {
"foo",
};
#[allow(dead_code)]
static NO_TRAILING_COMMA: phf::OrderedSet<&'static str> = phf_ordered_set! {
"foo"
};
#[test]
fn test_two() {
static SET: phf::OrderedSet<&'static str> = phf_ordered_set! {
"hello",
"there",
"world",
};
assert!(SET.contains(&"hello"));
assert!(SET.contains(&"there"));
assert!(SET.contains(&"world"));
assert!(!SET.contains(&"foo"));
assert_eq!(3, SET.len());
}
#[test]
fn test_get_index() {
static SET: phf::OrderedSet<&'static str> = phf_ordered_set! {
"foo",
"bar",
"baz",
};
assert_eq!(Some(0), SET.get_index(&"foo"));
assert_eq!(Some(2), SET.get_index(&"baz"));
assert_eq!(None, SET.get_index(&"xyz"));
assert_eq!(Some(0), SET.get_index(&*"foo".to_string()));
assert_eq!(Some(2), SET.get_index(&*"baz".to_string()));
assert_eq!(None, SET.get_index(&*"xyz".to_string()));
}
#[test]
fn test_index() {
static MAP: phf::OrderedSet<&'static str> = phf_ordered_set!(
"foo",
"bar",
);
assert_eq!(Some(&"foo"), MAP.index(0));
assert_eq!(Some(&"bar"), MAP.index(1));
assert_eq!(None, MAP.index(2));
}
#[test]
fn test_iter() {
static SET: phf::OrderedSet<&'static str> = phf_ordered_set! {
"hello",
"there",
"world",
};
let vec = SET.iter().map(|&e| e).collect::<Vec<_>>();
assert_eq!(vec, vec!("hello", "there", "world"));
}
#[test]
fn test_non_static_str_contains() {
static SET: phf::OrderedSet<&'static str> = phf_ordered_set! {
"hello",
"world",
};
assert!(SET.contains(&*"hello".to_string()));
}
#[test]
fn test_into_iterator() {
static SET: phf::OrderedSet<&'static str> = phf_ordered_set!(
"foo",
);
for e in &SET {
assert_eq!(&"foo", e);
}
}
}
| 24.817097 | 87 | 0.447008 |
563c8593e2920d453cdd63f93dd496c9dca8a543
| 681 |
use crate::formatter_traits::{FormatOptionalTokenAndNode, FormatTokenAndNode};
use crate::{format_elements, FormatElement, FormatResult, Formatter, ToFormatElement};
use rome_js_syntax::{TsNameWithTypeArguments, TsNameWithTypeArgumentsFields};
impl ToFormatElement for TsNameWithTypeArguments {
fn to_format_element(&self, formatter: &Formatter) -> FormatResult<FormatElement> {
let TsNameWithTypeArgumentsFields {
name,
type_arguments,
} = self.as_fields();
let name = name.format(formatter)?;
let type_arguments = type_arguments.format_or_empty(formatter)?;
Ok(format_elements![name, type_arguments])
}
}
| 40.058824 | 87 | 0.732746 |
d511e5ed20f8ec49a22d7c070240c1596a344c03
| 2,404 |
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-android: FIXME(#10381)
// Gdb doesn't know about UTF-32 character encoding and will print a rust char as only
// its numerical value.
// compile-flags:-g
// gdb-command:rbreak zzz
// gdb-command:run
// gdb-command:finish
// gdb-command:print *bool_ref
// gdb-check:$1 = true
// gdb-command:print *int_ref
// gdb-check:$2 = -1
// gdb-command:print *char_ref
// gdb-check:$3 = 97
// gdb-command:print *i8_ref
// gdb-check:$4 = 68 'D'
// gdb-command:print *i16_ref
// gdb-check:$5 = -16
// gdb-command:print *i32_ref
// gdb-check:$6 = -32
// gdb-command:print *i64_ref
// gdb-check:$7 = -64
// gdb-command:print *uint_ref
// gdb-check:$8 = 1
// gdb-command:print *u8_ref
// gdb-check:$9 = 100 'd'
// gdb-command:print *u16_ref
// gdb-check:$10 = 16
// gdb-command:print *u32_ref
// gdb-check:$11 = 32
// gdb-command:print *u64_ref
// gdb-check:$12 = 64
// gdb-command:print *f32_ref
// gdb-check:$13 = 2.5
// gdb-command:print *f64_ref
// gdb-check:$14 = 3.5
#![allow(unused_variable)]
fn main() {
let bool_val: bool = true;
let bool_ref: &bool = &bool_val;
let int_val: int = -1;
let int_ref: &int = &int_val;
let char_val: char = 'a';
let char_ref: &char = &char_val;
let i8_val: i8 = 68;
let i8_ref: &i8 = &i8_val;
let i16_val: i16 = -16;
let i16_ref: &i16 = &i16_val;
let i32_val: i32 = -32;
let i32_ref: &i32 = &i32_val;
let uint_val: i64 = -64;
let i64_ref: &i64 = &uint_val;
let uint_val: uint = 1;
let uint_ref: &uint = &uint_val;
let u8_val: u8 = 100;
let u8_ref: &u8 = &u8_val;
let u16_val: u16 = 16;
let u16_ref: &u16 = &u16_val;
let u32_val: u32 = 32;
let u32_ref: &u32 = &u32_val;
let u64_val: u64 = 64;
let u64_ref: &u64 = &u64_val;
let f32_val: f32 = 2.5;
let f32_ref: &f32 = &f32_val;
let f64_val: f64 = 3.5;
let f64_ref: &f64 = &f64_val;
zzz();
}
fn zzz() {()}
| 21.854545 | 86 | 0.633111 |
dd826809c4a9b958d23df83b9c83a536aeb1b68c
| 1,424 |
//! Computation of basic block order in emitted code.
use crate::machinst::*;
/// Simple reverse postorder-based block order emission.
///
/// TODO: use a proper algorithm, such as the bottom-up straight-line-section
/// construction algorithm.
struct BlockRPO {
visited: Vec<bool>,
postorder: Vec<BlockIndex>,
}
impl BlockRPO {
fn new<I: VCodeInst>(vcode: &VCode<I>) -> BlockRPO {
BlockRPO {
visited: vec![false; vcode.num_blocks()],
postorder: Vec::with_capacity(vcode.num_blocks()),
}
}
fn visit<I: VCodeInst>(&mut self, vcode: &VCode<I>, block: BlockIndex) {
self.visited[block as usize] = true;
for succ in vcode.succs(block) {
if !self.visited[succ.get() as usize] {
self.visit(vcode, succ.get());
}
}
if Some(block) != vcode.fallthrough_return_block {
self.postorder.push(block);
}
}
fn rpo<I: VCodeInst>(self, vcode: &VCode<I>) -> Vec<BlockIndex> {
let mut rpo = self.postorder;
rpo.reverse();
if let Some(block) = vcode.fallthrough_return_block {
rpo.push(block);
}
rpo
}
}
/// Compute the final block order.
pub fn compute_final_block_order<I: VCodeInst>(vcode: &VCode<I>) -> Vec<BlockIndex> {
let mut rpo = BlockRPO::new(vcode);
rpo.visit(vcode, vcode.entry());
rpo.rpo(vcode)
}
| 28.48 | 85 | 0.595506 |
26989094fb5bdfa13c53162fdcc4713758894a5f
| 45,480 |
// Copyright (c) SimpleStaking, Viable Systems and Tezedge Contributors
// SPDX-License-Identifier: MIT
use std::fmt::Debug;
/// Rust implementation of messages required for Rust <-> OCaml FFI communication.
use std::{convert::TryFrom, fmt};
use derive_builder::Builder;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crypto::hash::{
BlockHash, BlockMetadataHash, ChainId, ContextHash, FromBytesError, OperationHash,
OperationMetadataHash, OperationMetadataListListHash, ProtocolHash,
};
use tezos_messages::p2p::binary_message::{MessageHash, MessageHashError};
use tezos_messages::p2p::encoding::block_header::{display_fitness, Fitness};
use tezos_messages::p2p::encoding::prelude::{
BlockHeader, Operation, OperationsForBlocksMessage, Path,
};
use url::Url;
pub mod ffi_error_ids {
pub const APPLY_ERROR: &str = "ffi.apply_error";
pub const CALL_ERROR: &str = "ffi.call_error";
pub const CALL_EXCEPTION: &str = "ffi.call_exception";
pub const INCONSISTENT_OPERATIONS_HASH: &str = "ffi.inconsistent_operations_hash";
pub const INCOMPLETE_OPERATIONS: &str = "ffi.incomplete_operations";
pub const PREDECESSOR_MISMATCH: &str = "ffi.predecessor_mismatch";
pub const UNAVAILABLE_PROTOCOL: &str = "ffi.unavailable_protocol";
pub const UNKNOWN_CONTEXT: &str = "ffi.unknown_context";
pub const UNKNOWN_PREDECESSOR_CONTEXT: &str = "ffi.unknown_predecessor_context";
}
pub type RustBytes = Vec<u8>;
/// Test chain information
#[derive(Debug, Serialize, Deserialize)]
pub struct TestChain {
pub chain_id: RustBytes,
pub protocol_hash: RustBytes,
pub expiration_date: String,
}
#[derive(Clone, Serialize, Deserialize, Debug)]
pub enum TezosRuntimeLogLevel {
App,
Error,
Warning,
Info,
Debug,
}
// Must be in sync with ffi_config.ml
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct TezosRuntimeConfiguration {
pub log_enabled: bool,
pub log_level: Option<TezosRuntimeLogLevel>,
}
#[derive(Clone, Serialize, Deserialize, Debug, Builder)]
pub struct ApplyBlockRequest {
pub chain_id: ChainId,
pub block_header: BlockHeader,
pub pred_header: BlockHeader,
pub max_operations_ttl: i32,
pub operations: Vec<Vec<Operation>>,
pub predecessor_block_metadata_hash: Option<BlockMetadataHash>,
pub predecessor_ops_metadata_hash: Option<OperationMetadataListListHash>,
}
impl ApplyBlockRequest {
pub fn convert_operations(
block_operations: Vec<OperationsForBlocksMessage>,
) -> Vec<Vec<Operation>> {
block_operations.into_iter().map(|ops| ops.into()).collect()
}
}
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
pub struct CycleRollsOwnerSnapshot {
pub cycle: i32,
pub seed_bytes: Vec<u8>,
pub rolls_data: Vec<(Vec<u8>, Vec<i32>)>,
pub last_roll: i32,
}
/// Application block result
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
pub struct ApplyBlockResponse {
pub validation_result_message: String,
pub context_hash: ContextHash,
pub protocol_hash: ProtocolHash,
pub next_protocol_hash: ProtocolHash,
pub block_header_proto_json: String,
pub block_header_proto_metadata_bytes: Vec<u8>,
pub operations_proto_metadata_bytes: Vec<Vec<Vec<u8>>>,
pub max_operations_ttl: i32,
pub last_allowed_fork_level: i32,
pub forking_testchain: bool,
pub forking_testchain_data: Option<ForkingTestchainData>,
pub block_metadata_hash: Option<BlockMetadataHash>,
pub ops_metadata_hashes: Option<Vec<Vec<OperationMetadataHash>>>,
// TODO: TE-207 - not needed, can be calculated from ops_metadata_hashes
/// Note: This is calculated from ops_metadata_hashes - we need this in request
/// This is calculated as merkle tree hash, like operation paths
pub ops_metadata_hash: Option<OperationMetadataListListHash>,
pub cycle_rolls_owner_snapshots: Vec<CycleRollsOwnerSnapshot>,
pub new_protocol_constants_json: Option<String>,
pub new_cycle_eras_json: Option<String>,
pub commit_time: f64,
}
#[derive(Clone, Serialize, Deserialize)]
pub struct PrevalidatorWrapper {
pub chain_id: ChainId,
pub protocol: ProtocolHash,
pub context_fitness: Option<Fitness>,
}
impl fmt::Debug for PrevalidatorWrapper {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"PrevalidatorWrapper[chain_id: {}, protocol: {}, context_fitness: {}]",
self.chain_id.to_base58_check(),
self.protocol.to_base58_check(),
match &self.context_fitness {
Some(fitness) => display_fitness(fitness),
None => "-none-".to_string(),
},
)
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct BeginApplicationRequest {
pub chain_id: ChainId,
pub pred_header: BlockHeader,
pub block_header: BlockHeader,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct BeginApplicationResponse {
pub result: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct BeginConstructionRequest {
pub chain_id: ChainId,
pub predecessor: BlockHeader,
pub protocol_data: Option<Vec<u8>>,
pub predecessor_block_metadata_hash: Option<BlockMetadataHash>,
pub predecessor_ops_metadata_hash: Option<OperationMetadataListListHash>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ValidateOperationRequest {
pub prevalidator: PrevalidatorWrapper,
pub operation: Operation,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ValidateOperationResponse {
pub prevalidator: PrevalidatorWrapper,
pub result: ValidateOperationResult,
pub validate_operation_started_at: f64,
pub validate_operation_ended_at: f64,
}
pub type OperationProtocolDataJson = String;
pub type ErrorListJson = String;
#[derive(Serialize, Deserialize, Clone)]
pub struct OperationProtocolDataJsonWithErrorListJson {
pub protocol_data_json: OperationProtocolDataJson,
pub error_json: ErrorListJson,
}
impl fmt::Debug for OperationProtocolDataJsonWithErrorListJson {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"[error_json: {}, protocol_data_json: (-stripped-)]",
&self.error_json,
)
}
}
trait HasOperationHash {
fn operation_hash(&self) -> &OperationHash;
}
#[derive(Clone, Serialize, Deserialize)]
pub struct Applied {
pub hash: OperationHash,
pub protocol_data_json: OperationProtocolDataJson,
}
impl HasOperationHash for Applied {
fn operation_hash(&self) -> &OperationHash {
&self.hash
}
}
impl fmt::Debug for Applied {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"[hash: {}, protocol_data_json: {}]",
self.hash.to_base58_check(),
&self.protocol_data_json
)
}
}
#[derive(Serialize, Deserialize, Clone)]
pub struct Errored {
pub hash: OperationHash,
pub is_endorsement: Option<bool>,
pub protocol_data_json_with_error_json: OperationProtocolDataJsonWithErrorListJson,
}
impl HasOperationHash for Errored {
fn operation_hash(&self) -> &OperationHash {
&self.hash
}
}
impl fmt::Debug for Errored {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"[hash: {}, protocol_data_json_with_error_json: {:?}]",
self.hash.to_base58_check(),
&self.protocol_data_json_with_error_json
)
}
}
#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct ValidateOperationResult {
pub applied: Vec<Applied>,
pub refused: Vec<Errored>,
pub branch_refused: Vec<Errored>,
pub branch_delayed: Vec<Errored>,
// TODO: outedate?
}
impl ValidateOperationResult {
/// Merges result with new one, and returns `true/false` if something was changed
pub fn merge(&mut self, new_result: ValidateOperationResult) -> bool {
let mut changed = Self::merge_items(&mut self.applied, new_result.applied);
changed |= Self::merge_items(&mut self.refused, new_result.refused);
changed |= Self::merge_items(&mut self.branch_refused, new_result.branch_refused);
changed |= Self::merge_items(&mut self.branch_delayed, new_result.branch_delayed);
changed
}
fn merge_items<ITEM: HasOperationHash>(
result_items: &mut Vec<ITEM>,
new_items: Vec<ITEM>,
) -> bool {
let mut changed = false;
let mut added = false;
for new_item in new_items {
// check if present
let old_value = result_items
.iter()
.position(|old_item| old_item.operation_hash().eq(new_item.operation_hash()));
// replace or add
if let Some(idx) = old_value {
// replace
result_items[idx] = new_item;
changed |= true;
} else {
// add
result_items.push(new_item);
added |= true;
}
}
added || changed
}
pub fn operations_count(&self) -> usize {
self.applied.len()
+ self.branch_delayed.len()
+ self.branch_refused.len()
+ self.refused.len()
}
}
/// Init protocol context result
#[derive(Serialize, Deserialize, Clone)]
pub struct InitProtocolContextResult {
pub supported_protocol_hashes: Vec<ProtocolHash>,
/// Presents only if was genesis commited to context
pub genesis_commit_hash: Option<ContextHash>,
}
impl fmt::Debug for InitProtocolContextResult {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let genesis_commit_hash = match &self.genesis_commit_hash {
Some(hash) => hash.to_base58_check(),
None => "-none-".to_string(),
};
let supported_protocol_hashes = self
.supported_protocol_hashes
.iter()
.map(|ph| ph.to_base58_check())
.collect::<Vec<String>>();
write!(
f,
"genesis_commit_hash: {}, supported_protocol_hashes: {:?}",
&genesis_commit_hash, &supported_protocol_hashes
)
}
}
/// Commit genesis result
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct CommitGenesisResult {
pub block_header_proto_json: String,
pub block_header_proto_metadata_bytes: Vec<u8>,
pub operations_proto_metadata_bytes: Vec<Vec<Vec<u8>>>,
}
/// Forking test chain data
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
pub struct ForkingTestchainData {
pub forking_block_hash: BlockHash,
pub test_chain_id: ChainId,
}
/// Represents a trace of errors produced by Tezos.
///
/// `head_error_id` is the id of the main error in the trace, useful for mapping into a Rust error.
/// `trace_json` is json of the trace.
#[derive(Debug)]
pub struct TezosErrorTrace {
pub head_error_id: String,
pub trace_json: String,
}
#[derive(Serialize, Deserialize, Debug, Error)]
pub enum CallError {
#[error("Failed to call - error_id: {error_id} message: {trace_message:?}!")]
FailedToCall {
error_id: String,
trace_message: String,
},
#[error("Invalid request data - message: {message}!")]
InvalidRequestData { message: String },
#[error("Invalid response data - message: {message}!")]
InvalidResponseData { message: String },
}
impl From<TezosErrorTrace> for CallError {
fn from(error: TezosErrorTrace) -> Self {
CallError::FailedToCall {
error_id: error.head_error_id.clone(),
trace_message: error.trace_json,
}
}
}
#[derive(Serialize, Deserialize, Debug, Error)]
pub enum TezosStorageInitError {
#[error("OCaml storage init failed, message: {message}!")]
InitializeError { message: String },
}
impl From<TezosErrorTrace> for TezosStorageInitError {
fn from(error: TezosErrorTrace) -> Self {
TezosStorageInitError::InitializeError {
message: error.trace_json,
}
}
}
impl From<FromBytesError> for TezosStorageInitError {
fn from(error: FromBytesError) -> Self {
TezosStorageInitError::InitializeError {
message: format!("Error constructing hash from bytes: {:?}", error),
}
}
}
#[derive(Serialize, Deserialize, Debug, Error)]
pub enum GetDataError {
#[error("OCaml failed to get data, message: {message}!")]
ReadError { message: String },
}
impl From<TezosErrorTrace> for GetDataError {
fn from(error: TezosErrorTrace) -> Self {
GetDataError::ReadError {
message: error.trace_json,
}
}
}
#[derive(Serialize, Deserialize, Debug, Error, PartialEq)]
pub enum ApplyBlockError {
#[error("Incomplete operations, exptected: {expected}, has actual: {actual}!")]
IncompleteOperations { expected: usize, actual: usize },
#[error("Failed to apply block - message: {message}!")]
FailedToApplyBlock { message: String },
#[error("Unknown predecessor context - try to apply predecessor at first message: {message}!")]
UnknownPredecessorContext { message: String },
#[error("Predecessor does not match - message: {message}!")]
PredecessorMismatch { message: String },
#[error("Invalid request/response data - message: {message}!")]
InvalidRequestResponseData { message: String },
}
// Extracts the parameters from a JSON error coming from the protocol runner like:
// [{"kind":"permanent","id":"ffi.incomplete_operations","expected":4,"actual":1}]
// If cannot be parsed, returns 0 as the values.
fn extract_incomplete_operation_values_from_json(json: &str) -> (usize, usize) {
let json = serde_json::from_str::<serde_json::Value>(json).unwrap_or(serde_json::Value::Null);
let expected = json[0]["expected"].as_u64().unwrap_or(0) as usize;
let actual = json[0]["actual"].as_u64().unwrap_or(0) as usize;
(expected, actual)
}
impl From<CallError> for ApplyBlockError {
fn from(error: CallError) -> Self {
match error {
CallError::FailedToCall {
error_id,
trace_message,
} => match error_id.as_str() {
ffi_error_ids::UNKNOWN_PREDECESSOR_CONTEXT => {
ApplyBlockError::UnknownPredecessorContext {
message: trace_message,
}
}
ffi_error_ids::PREDECESSOR_MISMATCH => ApplyBlockError::PredecessorMismatch {
message: trace_message,
},
ffi_error_ids::INCOMPLETE_OPERATIONS => {
let (expected, actual) =
extract_incomplete_operation_values_from_json(&trace_message);
ApplyBlockError::IncompleteOperations { expected, actual }
}
_ => ApplyBlockError::FailedToApplyBlock {
message: trace_message,
},
},
CallError::InvalidRequestData { message } => {
ApplyBlockError::InvalidRequestResponseData { message }
}
CallError::InvalidResponseData { message } => {
ApplyBlockError::InvalidRequestResponseData { message }
}
}
}
}
#[derive(Serialize, Deserialize, Debug, Error)]
pub enum BeginApplicationError {
#[error("Failed to begin application - message: {message}!")]
FailedToBeginApplication { message: String },
#[error("Unknown predecessor context - try to apply predecessor at first message: {message}!")]
UnknownPredecessorContext { message: String },
#[error("Invalid request/response data - message: {message}!")]
InvalidRequestResponseData { message: String },
}
impl From<CallError> for BeginApplicationError {
fn from(error: CallError) -> Self {
match error {
CallError::FailedToCall {
error_id,
trace_message,
} => match error_id.as_str() {
ffi_error_ids::UNKNOWN_PREDECESSOR_CONTEXT => {
BeginApplicationError::UnknownPredecessorContext {
message: trace_message,
}
}
_ => BeginApplicationError::FailedToBeginApplication {
message: trace_message,
},
},
CallError::InvalidRequestData { message } => {
BeginApplicationError::InvalidRequestResponseData { message }
}
CallError::InvalidResponseData { message } => {
BeginApplicationError::InvalidRequestResponseData { message }
}
}
}
}
#[derive(Serialize, Deserialize, Debug, Error)]
pub enum BeginConstructionError {
#[error("Failed to begin construction - message: {message}!")]
FailedToBeginConstruction { message: String },
#[error("Unknown predecessor context - try to apply predecessor at first message: {message}!")]
UnknownPredecessorContext { message: String },
#[error("Invalid request/response data - message: {message}!")]
InvalidRequestResponseData { message: String },
}
impl From<CallError> for BeginConstructionError {
fn from(error: CallError) -> Self {
match error {
CallError::FailedToCall {
error_id,
trace_message,
} => match error_id.as_str() {
ffi_error_ids::UNKNOWN_PREDECESSOR_CONTEXT => {
BeginConstructionError::UnknownPredecessorContext {
message: trace_message,
}
}
_ => BeginConstructionError::FailedToBeginConstruction {
message: trace_message,
},
},
CallError::InvalidRequestData { message } => {
BeginConstructionError::InvalidRequestResponseData { message }
}
CallError::InvalidResponseData { message } => {
BeginConstructionError::InvalidRequestResponseData { message }
}
}
}
}
#[derive(Serialize, Deserialize, Debug, Error)]
pub enum ValidateOperationError {
#[error("Failed to validate operation - message: {message}!")]
FailedToValidateOperation { message: String },
#[error("Invalid request/response data - message: {message}!")]
InvalidRequestResponseData { message: String },
}
impl From<CallError> for ValidateOperationError {
fn from(error: CallError) -> Self {
match error {
CallError::FailedToCall { trace_message, .. } => {
ValidateOperationError::FailedToValidateOperation {
message: trace_message,
}
}
CallError::InvalidRequestData { message } => {
ValidateOperationError::InvalidRequestResponseData { message }
}
CallError::InvalidResponseData { message } => {
ValidateOperationError::InvalidRequestResponseData { message }
}
}
}
}
// NOTE: used by decode_context_data, which is unused at the moment
#[derive(Debug, Error)]
pub enum ContextDataError {
#[error("Resolve/decode context data failed to decode: {message}!")]
DecodeError { message: String },
}
impl From<TezosErrorTrace> for ContextDataError {
fn from(error: TezosErrorTrace) -> Self {
ContextDataError::DecodeError {
message: error.trace_json,
}
}
}
#[derive(Serialize, Deserialize, Debug, Error)]
pub enum ProtocolDataError {
#[error("Resolve/decode context data failed to decode: {message}!")]
DecodeError { message: String },
}
impl From<TezosErrorTrace> for ProtocolDataError {
fn from(error: TezosErrorTrace) -> Self {
ProtocolDataError::DecodeError {
message: error.trace_json,
}
}
}
impl From<FromBytesError> for ProtocolDataError {
fn from(error: FromBytesError) -> Self {
ProtocolDataError::DecodeError {
message: format!("Error constructing hash from bytes: {:?}", error),
}
}
}
#[derive(Serialize, Deserialize, Debug, Error)]
pub enum FfiJsonEncoderError {
#[error("FFI JSON encoding error: {message}!")]
EncodeError { message: String },
}
impl From<TezosErrorTrace> for FfiJsonEncoderError {
fn from(error: TezosErrorTrace) -> Self {
FfiJsonEncoderError::EncodeError {
message: error.trace_json,
}
}
}
pub type Json = String;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct RpcRequest {
pub body: Json,
pub context_path: String,
pub meth: RpcMethod,
pub content_type: Option<String>,
pub accept: Option<String>,
}
impl RpcRequest {
/// Produces a key for the routed requests cache.
///
/// The cache key is just the original path+query, with a bit of normalization
/// and the "/chains/:chan_id/blocks/:block_id" prefix removed if present.
pub fn ffi_rpc_router_cache_key(&self) -> String {
Self::ffi_rpc_router_cache_key_helper(&self.context_path)
}
fn ffi_rpc_router_cache_key_helper(path: &str) -> String {
let base = match Url::parse("http://tezedge.com") {
Ok(base) => base,
Err(_) => return path.to_string(),
};
let parsed = Url::options().base_url(Some(&base)).parse(path).unwrap();
let normalized_path = match parsed.query() {
Some(query) => format!("{}?{}", parsed.path().trim_end_matches('/'), query),
None => parsed.path().trim_end_matches('/').to_string(),
};
let mut segments = match parsed.path_segments() {
Some(segments) => segments,
// Not the subpath we expect, bail-out
None => return normalized_path,
};
// /chains/:chain_id/blocks/:block_id
let (chains, _, blocks, _) = (
segments.next(),
segments.next(),
segments.next(),
segments.next(),
);
match (chains, blocks) {
(Some("chains"), Some("blocks")) => (),
// Not the subpath we expect, bail-out
_ => return normalized_path,
}
let remaining: Vec<_> = segments.filter(|s| !s.is_empty()).collect();
let subpath = remaining.join("/");
// We only care about subpaths, bail-out
if subpath.is_empty() {
return normalized_path;
}
if let Some(query) = parsed.query() {
format!("/{}?{}", subpath, query)
} else {
format!("/{}", subpath)
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct HelpersPreapplyBlockRequest {
pub protocol_rpc_request: ProtocolRpcRequest,
pub predecessor_block_metadata_hash: Option<BlockMetadataHash>,
pub predecessor_ops_metadata_hash: Option<OperationMetadataListListHash>,
pub predecessor_max_operations_ttl: i32,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct HelpersPreapplyResponse {
pub body: Json,
}
#[derive(Serialize, Deserialize, Debug)]
pub enum ProtocolRpcResponse {
RPCConflict(Option<String>),
RPCCreated(Option<String>),
RPCError(Option<String>),
RPCForbidden(Option<String>),
RPCGone(Option<String>),
RPCNoContent,
RPCNotFound(Option<String>),
RPCOk(String),
RPCUnauthorized,
}
fn body_or_empty(body: &Option<String>) -> String {
match body {
Some(body) => body.clone(),
None => "".to_string(),
}
}
impl ProtocolRpcResponse {
pub fn status_code(&self) -> u16 {
// These HTTP codes are mapped form what the `resto` OCaml library defines
match self {
ProtocolRpcResponse::RPCConflict(_) => 409,
ProtocolRpcResponse::RPCCreated(_) => 201,
ProtocolRpcResponse::RPCError(_) => 500,
ProtocolRpcResponse::RPCForbidden(_) => 403,
ProtocolRpcResponse::RPCGone(_) => 410,
ProtocolRpcResponse::RPCNoContent => 204,
ProtocolRpcResponse::RPCNotFound(_) => 404,
ProtocolRpcResponse::RPCOk(_) => 200,
ProtocolRpcResponse::RPCUnauthorized => 401,
}
}
pub fn body_json_string_or_empty(&self) -> String {
match self {
ProtocolRpcResponse::RPCConflict(body) => body_or_empty(body),
ProtocolRpcResponse::RPCCreated(body) => body_or_empty(body),
ProtocolRpcResponse::RPCError(body) => body_or_empty(body),
ProtocolRpcResponse::RPCForbidden(body) => body_or_empty(body),
ProtocolRpcResponse::RPCGone(body) => body_or_empty(body),
ProtocolRpcResponse::RPCNoContent => "".to_string(),
ProtocolRpcResponse::RPCNotFound(body) => body_or_empty(body),
ProtocolRpcResponse::RPCOk(body) => body.clone(),
ProtocolRpcResponse::RPCUnauthorized => "".to_string(),
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum RpcMethod {
DELETE,
GET,
PATCH,
POST,
PUT,
}
impl TryFrom<&str> for RpcMethod {
type Error = String;
fn try_from(s: &str) -> Result<Self, Self::Error> {
let upper = s.to_uppercase();
match upper.as_ref() {
"DELETE" => Ok(RpcMethod::DELETE),
"GET" => Ok(RpcMethod::GET),
"PATCH" => Ok(RpcMethod::PATCH),
"POST" => Ok(RpcMethod::POST),
"PUT" => Ok(RpcMethod::PUT),
other => Err(format!("Invalid RPC method: {:?}", other)),
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct RpcArgDesc {
pub name: String,
pub descr: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Error)]
pub enum ProtocolRpcError {
#[error("RPC: cannot parse body: {0}")]
RPCErrorCannotParseBody(String),
#[error("RPC: cannot parse path: {0:?}, arg_desc={1:?}, message: {2}")]
RPCErrorCannotParsePath(Vec<String>, RpcArgDesc, String),
#[error("RPC: cannot parse query: {0}")]
RPCErrorCannotParseQuery(String),
#[error("RPC: invalid method string: {0}")]
RPCErrorInvalidMethodString(String),
#[error("RPC: method not allowed: {0:?}")]
RPCErrorMethodNotAllowed(Vec<RpcMethod>),
#[error("RPC: service not found")]
RPCErrorServiceNotFound,
#[error("RPC: Failed to call protocol RPC - message: {0}!")]
FailedToCallProtocolRpc(String),
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ProtocolRpcRequest {
pub block_header: BlockHeader,
pub chain_arg: String,
pub chain_id: ChainId,
pub request: RpcRequest,
}
#[derive(Serialize, Deserialize, Debug, Error)]
pub enum HelpersPreapplyError {
#[error("Failed to call protocol rpc - message: {message}!")]
FailedToCallProtocolRpc { message: String },
#[error("Invalid request data - message: {message}!")]
InvalidRequestData { message: String },
#[error("Invalid response data - message: {message}!")]
InvalidResponseData { message: String },
}
impl From<CallError> for HelpersPreapplyError {
fn from(error: CallError) -> Self {
match error {
CallError::FailedToCall { trace_message, .. } => {
HelpersPreapplyError::FailedToCallProtocolRpc {
message: trace_message,
}
}
CallError::InvalidRequestData { message } => {
HelpersPreapplyError::InvalidRequestData { message }
}
CallError::InvalidResponseData { message } => {
HelpersPreapplyError::InvalidResponseData { message }
}
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ComputePathRequest {
pub operations: Vec<Vec<OperationHash>>,
}
impl TryFrom<&Vec<Vec<Operation>>> for ComputePathRequest {
type Error = MessageHashError;
fn try_from(ops: &Vec<Vec<Operation>>) -> Result<Self, Self::Error> {
let mut operation_hashes = Vec::with_capacity(ops.len());
for inner_ops in ops {
let mut iophs = Vec::with_capacity(inner_ops.len());
for op in inner_ops {
iophs.push(OperationHash::try_from(op.message_hash()?)?);
}
operation_hashes.push(iophs);
}
Ok(ComputePathRequest {
operations: operation_hashes,
})
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ComputePathResponse {
pub operations_hashes_path: Vec<Path>,
}
#[derive(Serialize, Deserialize, Debug, Error)]
pub enum ComputePathError {
#[error("Path computation failed, message: {message}!")]
PathError { message: String },
#[error("Path computation failed, message: {message}!")]
InvalidRequestResponseData { message: String },
}
impl From<CallError> for ComputePathError {
fn from(error: CallError) -> Self {
match error {
CallError::FailedToCall { trace_message, .. } => ComputePathError::PathError {
message: trace_message,
},
CallError::InvalidRequestData { message } => {
ComputePathError::InvalidRequestResponseData { message }
}
CallError::InvalidResponseData { message } => {
ComputePathError::InvalidRequestResponseData { message }
}
}
}
}
/// Error types generated by a tezos protocol.
#[derive(Error, Debug)]
pub enum ProtocolError {
/// Protocol rejected to apply a block.
#[error("Apply block error: {reason}")]
ApplyBlockError { reason: ApplyBlockError },
#[error("Assert encoding for protocol data error: {reason}")]
AssertEncodingForProtocolDataError { reason: ProtocolDataError },
#[error("Begin construction error: {reason}")]
BeginApplicationError { reason: BeginApplicationError },
#[error("Begin construction error: {reason}")]
BeginConstructionError { reason: BeginConstructionError },
#[error("Validate operation error: {reason}")]
ValidateOperationError { reason: ValidateOperationError },
#[error("Protocol rpc call error: {reason}")]
ProtocolRpcError {
reason: ProtocolRpcError,
request_path: String,
},
#[error("Helper Preapply call error: {reason}")]
HelpersPreapplyError { reason: HelpersPreapplyError },
#[error("Compute path call error: {reason}")]
ComputePathError { reason: ComputePathError },
/// OCaml part failed to initialize tezos storage.
#[error("OCaml storage init error: {reason}")]
OcamlStorageInitError { reason: TezosStorageInitError },
/// OCaml part failed to get genesis data.
#[error("Failed to get genesis data: {reason}")]
GenesisResultDataError { reason: GetDataError },
#[error("Failed to decode binary data to json ({caller}): {reason}")]
FfiJsonEncoderError {
caller: String,
reason: FfiJsonEncoderError,
},
#[error("Failed to get key from history: {reason}")]
ContextGetKeyFromHistoryError { reason: String },
#[error("Failed to get values by prefix: {reason}")]
ContextGetKeyValuesByPrefixError { reason: String },
}
#[cfg(test)]
mod tests {
use std::convert::TryInto;
use assert_json_diff::assert_json_eq;
use tezos_context_api::ProtocolOverrides;
use super::*;
#[test]
fn test_validate_operation_result_merge() {
let mut validate_result1 = validate_operation_result(
"onvN8U6QJ6DGJKVYkHXYRtFm3tgBJScj9P5bbPjSZUuFaGzwFuJ",
"opVUxMhZttd858HXEHCgchknnnZFmUExtHrbmVSh1G9Pg24X1Pj",
);
assert_eq!(2, validate_result1.applied.len());
assert_eq!(2, validate_result1.refused.len());
assert_eq!(2, validate_result1.branch_delayed.len());
assert_eq!(2, validate_result1.branch_refused.len());
// merge empty -> no change
assert_eq!(
false,
validate_result1.merge(ValidateOperationResult {
applied: vec![],
refused: vec![],
branch_refused: vec![],
branch_delayed: vec![],
})
);
assert_eq!(2, validate_result1.applied.len());
assert_eq!(2, validate_result1.refused.len());
assert_eq!(2, validate_result1.branch_delayed.len());
assert_eq!(2, validate_result1.branch_refused.len());
// merge
let validate_result2 = validate_operation_result(
"onvN8U6QJ6DGJKVYkHXYRtFm3tgBJScj9P5bbPjSZUuFaGzwFuJ",
"opJ4FdKumPfykAP9ZqwY7rNB8y1SiMupt44RqBDMWL7cmb4xbNr",
);
assert!(validate_result1.merge(validate_result2));
assert_eq!(3, validate_result1.applied.len());
assert_eq!(3, validate_result1.refused.len());
assert_eq!(3, validate_result1.branch_delayed.len());
assert_eq!(3, validate_result1.branch_refused.len());
}
#[test]
fn test_validate_operation_result_merge_items() -> Result<(), anyhow::Error> {
let mut validate_result = ValidateOperationResult {
applied: vec![],
refused: vec![],
branch_refused: vec![],
branch_delayed: vec![],
};
assert_eq!(0, validate_result.applied.len());
assert_eq!(
false,
ValidateOperationResult::merge_items(&mut validate_result.applied, vec![])
);
assert!(ValidateOperationResult::merge_items(
&mut validate_result.applied,
vec![Applied {
hash: "onvN8U6QJ6DGJKVYkHXYRtFm3tgBJScj9P5bbPjSZUuFaGzwFuJ".try_into()?,
protocol_data_json: "protocol_data_json1".to_string(),
},],
));
assert_eq!(1, validate_result.applied.len());
assert_eq!(
"protocol_data_json1",
validate_result.applied[0].protocol_data_json
);
// merge the same -> test change
assert!(ValidateOperationResult::merge_items(
&mut validate_result.applied,
vec![Applied {
hash: "onvN8U6QJ6DGJKVYkHXYRtFm3tgBJScj9P5bbPjSZUuFaGzwFuJ".try_into()?,
protocol_data_json: "protocol_data_json2".to_string(),
},],
));
assert_eq!(1, validate_result.applied.len());
assert_eq!(
"protocol_data_json2",
validate_result.applied[0].protocol_data_json
);
// merge another new one
assert!(ValidateOperationResult::merge_items(
&mut validate_result.applied,
vec![Applied {
hash: "opJ4FdKumPfykAP9ZqwY7rNB8y1SiMupt44RqBDMWL7cmb4xbNr".try_into()?,
protocol_data_json: "protocol_data_json2".to_string(),
},],
));
assert_eq!(2, validate_result.applied.len());
Ok(())
}
fn validate_operation_result(op1: &str, op2: &str) -> ValidateOperationResult {
let applied = vec![
Applied {
hash: op1.try_into().expect("Error"),
protocol_data_json: "{ \"contents\": [ { \"kind\": \"endorsement\", \"level\": 459020 } ],\n \"signature\":\n \"siguKbKFVDkXo2m1DqZyftSGg7GZRq43EVLSutfX5yRLXXfWYG5fegXsDT6EUUqawYpjYE1GkyCVHfc2kr3hcaDAvWSAhnV9\" }".to_string(),
},
Applied {
hash: op2.try_into().expect("Error"),
protocol_data_json: "{ \"contents\": [ { \"kind\": \"endorsement\", \"level\": 459020 } ],\n \"signature\":\n \"siguKbKFVDkXo2m1DqZyftSGg7GZRq43EVLSutfX5yRLXXfWYG5fegXsDT6EUUqawYpjYE1GkyCVHfc2kr3hcaDAvWSAhnV9\" }".to_string(),
}
];
let branch_delayed = vec![
Errored {
hash: op1.try_into().expect("Error"),
is_endorsement: None,
protocol_data_json_with_error_json: OperationProtocolDataJsonWithErrorListJson {
protocol_data_json: "{ \"contents\": [ { \"kind\": \"endorsement\", \"level\": 459020 } ],\n \"signature\":\n \"siguKbKFVDkXo2m1DqZyftSGg7GZRq43EVLSutfX5yRLXXfWYG5fegXsDT6EUUqawYpjYE1GkyCVHfc2kr3hcaDAvWSAhnV9\" }".to_string(),
error_json: "[ { \"kind\": \"temporary\",\n \"id\": \"proto.005-PsBabyM1.operation.wrong_endorsement_predecessor\",\n \"expected\": \"BMDb9PfcJmiibDDEbd6bEEDj4XNG4C7QACG6TWqz29c9FxNgDLL\",\n \"provided\": \"BLd8dLs4X5Ve6a8B37kUu7iJkRycWzfSF5MrskY4z8YaideQAp4\" } ]".to_string(),
},
},
Errored {
hash: op2.try_into().expect("Error"),
is_endorsement: None,
protocol_data_json_with_error_json: OperationProtocolDataJsonWithErrorListJson {
protocol_data_json: "{ \"contents\": [ { \"kind\": \"endorsement\", \"level\": 459020 } ],\n \"signature\":\n \"siguKbKFVDkXo2m1DqZyftSGg7GZRq43EVLSutfX5yRLXXfWYG5fegXsDT6EUUqawYpjYE1GkyCVHfc2kr3hcaDAvWSAhnV9\" }".to_string(),
error_json: "[ { \"kind\": \"temporary\",\n \"id\": \"proto.005-PsBabyM1.operation.wrong_endorsement_predecessor\",\n \"expected\": \"BMDb9PfcJmiibDDEbd6bEEDj4XNG4C7QACG6TWqz29c9FxNgDLL\",\n \"provided\": \"BLd8dLs4X5Ve6a8B37kUu7iJkRycWzfSF5MrskY4z8YaideQAp4\" } ]".to_string(),
},
}
];
let branch_refused = vec![
Errored {
hash: op1.try_into().expect("Error"),
is_endorsement: None,
protocol_data_json_with_error_json: OperationProtocolDataJsonWithErrorListJson {
protocol_data_json: "{ \"contents\": [ { \"kind\": \"endorsement\", \"level\": 459020 } ],\n \"signature\":\n \"siguKbKFVDkXo2m1DqZyftSGg7GZRq43EVLSutfX5yRLXXfWYG5fegXsDT6EUUqawYpjYE1GkyCVHfc2kr3hcaDAvWSAhnV9\" }".to_string(),
error_json: "[ { \"kind\": \"temporary\",\n \"id\": \"proto.005-PsBabyM1.operation.wrong_endorsement_predecessor\",\n \"expected\": \"BMDb9PfcJmiibDDEbd6bEEDj4XNG4C7QACG6TWqz29c9FxNgDLL\",\n \"provided\": \"BLd8dLs4X5Ve6a8B37kUu7iJkRycWzfSF5MrskY4z8YaideQAp4\" } ]".to_string(),
},
},
Errored {
hash: op2.try_into().expect("Error"),
is_endorsement: None,
protocol_data_json_with_error_json: OperationProtocolDataJsonWithErrorListJson {
protocol_data_json: "{ \"contents\": [ { \"kind\": \"endorsement\", \"level\": 459020 } ],\n \"signature\":\n \"siguKbKFVDkXo2m1DqZyftSGg7GZRq43EVLSutfX5yRLXXfWYG5fegXsDT6EUUqawYpjYE1GkyCVHfc2kr3hcaDAvWSAhnV9\" }".to_string(),
error_json: "[ { \"kind\": \"temporary\",\n \"id\": \"proto.005-PsBabyM1.operation.wrong_endorsement_predecessor\",\n \"expected\": \"BMDb9PfcJmiibDDEbd6bEEDj4XNG4C7QACG6TWqz29c9FxNgDLL\",\n \"provided\": \"BLd8dLs4X5Ve6a8B37kUu7iJkRycWzfSF5MrskY4z8YaideQAp4\" } ]".to_string(),
},
}
];
let refused = vec![
Errored {
hash: op1.try_into().expect("Error"),
is_endorsement: None,
protocol_data_json_with_error_json: OperationProtocolDataJsonWithErrorListJson {
protocol_data_json: "{ \"contents\": [ { \"kind\": \"endorsement\", \"level\": 459020 } ],\n \"signature\":\n \"siguKbKFVDkXo2m1DqZyftSGg7GZRq43EVLSutfX5yRLXXfWYG5fegXsDT6EUUqawYpjYE1GkyCVHfc2kr3hcaDAvWSAhnV9\" }".to_string(),
error_json: "[ { \"kind\": \"temporary\",\n \"id\": \"proto.005-PsBabyM1.operation.wrong_endorsement_predecessor\",\n \"expected\": \"BMDb9PfcJmiibDDEbd6bEEDj4XNG4C7QACG6TWqz29c9FxNgDLL\",\n \"provided\": \"BLd8dLs4X5Ve6a8B37kUu7iJkRycWzfSF5MrskY4z8YaideQAp4\" } ]".to_string(),
},
},
Errored {
hash: op2.try_into().expect("Error"),
is_endorsement: None,
protocol_data_json_with_error_json: OperationProtocolDataJsonWithErrorListJson {
protocol_data_json: "{ \"contents\": [ { \"kind\": \"endorsement\", \"level\": 459020 } ],\n \"signature\":\n \"siguKbKFVDkXo2m1DqZyftSGg7GZRq43EVLSutfX5yRLXXfWYG5fegXsDT6EUUqawYpjYE1GkyCVHfc2kr3hcaDAvWSAhnV9\" }".to_string(),
error_json: "[ { \"kind\": \"temporary\",\n \"id\": \"proto.005-PsBabyM1.operation.wrong_endorsement_predecessor\",\n \"expected\": \"BMDb9PfcJmiibDDEbd6bEEDj4XNG4C7QACG6TWqz29c9FxNgDLL\",\n \"provided\": \"BLd8dLs4X5Ve6a8B37kUu7iJkRycWzfSF5MrskY4z8YaideQAp4\" } ]".to_string(),
},
}
];
ValidateOperationResult {
applied,
branch_delayed,
branch_refused,
refused,
}
}
#[test]
fn test_rpc_format_user_activated_upgrades() -> Result<(), anyhow::Error> {
let expected_json = serde_json::json!(
[
{
"level": 28082,
"replacement_protocol": "PsYLVpVvgbLhAhoqAkMFUo6gudkJ9weNXhUYCiLDzcUpFpkk8Wt"
},
{
"level": 204761,
"replacement_protocol": "PsddFKi32cMJ2qPjf43Qv5GDWLDPZb3T3bF6fLKiF5HtvHNU7aP"
}
]
);
let protocol_overrides = ProtocolOverrides {
user_activated_upgrades: vec![
(
28082_i32,
"PsYLVpVvgbLhAhoqAkMFUo6gudkJ9weNXhUYCiLDzcUpFpkk8Wt".to_string(),
),
(
204761_i32,
"PsddFKi32cMJ2qPjf43Qv5GDWLDPZb3T3bF6fLKiF5HtvHNU7aP".to_string(),
),
],
user_activated_protocol_overrides: vec![],
};
assert_json_eq!(
expected_json,
serde_json::to_value(protocol_overrides.user_activated_upgrades_to_rpc_json())?
);
Ok(())
}
#[test]
fn test_rpc_format_user_activated_protocol_overrides() -> Result<(), anyhow::Error> {
let expected_json = serde_json::json!(
[
{
"replaced_protocol": "PsBABY5HQTSkA4297zNHfsZNKtxULfL18y95qb3m53QJiXGmrbU",
"replacement_protocol": "PsBabyM1eUXZseaJdmXFApDSBqj8YBfwELoxZHHW77EMcAbbwAS"
}
]
);
let protocol_overrides = ProtocolOverrides {
user_activated_upgrades: vec![],
user_activated_protocol_overrides: vec![(
"PsBABY5HQTSkA4297zNHfsZNKtxULfL18y95qb3m53QJiXGmrbU".to_string(),
"PsBabyM1eUXZseaJdmXFApDSBqj8YBfwELoxZHHW77EMcAbbwAS".to_string(),
)],
};
assert_json_eq!(
expected_json,
serde_json::to_value(
protocol_overrides.user_activated_protocol_overrides_to_rpc_json()
)?
);
Ok(())
}
#[test]
fn test_rpc_ffi_rpc_router_cache_key_helper() {
let with_prefix_to_remove = "/chains/main/blocks/head/some/subpath/url";
assert_eq!(
"/some/subpath/url".to_string(),
RpcRequest::ffi_rpc_router_cache_key_helper(with_prefix_to_remove)
);
let without_prefix_to_remove = "/chains/main/something/else/some/subpath/url";
assert_eq!(
without_prefix_to_remove.to_string(),
RpcRequest::ffi_rpc_router_cache_key_helper(without_prefix_to_remove)
);
let without_suffix = "/chains/main/blocks/head/";
assert_eq!(
"/chains/main/blocks/head".to_string(),
RpcRequest::ffi_rpc_router_cache_key_helper(without_suffix)
);
let without_prefix_to_remove_short = "/chains/main/";
assert_eq!(
"/chains/main".to_string(),
RpcRequest::ffi_rpc_router_cache_key_helper(without_prefix_to_remove_short)
);
let with_prefix_to_remove_and_query =
"/chains/main/blocks/head/some/subpath/url?query=args&with-slash=/slash";
assert_eq!(
"/some/subpath/url?query=args&with-slash=/slash".to_string(),
RpcRequest::ffi_rpc_router_cache_key_helper(with_prefix_to_remove_and_query)
);
let without_suffix_and_query = "/chains/main/blocks/head/?query=1";
assert_eq!(
"/chains/main/blocks/head?query=1".to_string(),
RpcRequest::ffi_rpc_router_cache_key_helper(without_suffix_and_query)
);
let without_suffix_and_slashes = "/chains/main/blocks/head//";
assert_eq!(
"/chains/main/blocks/head".to_string(),
RpcRequest::ffi_rpc_router_cache_key_helper(without_suffix_and_slashes)
);
let without_suffix_and_sharp = "/chains/main/blocks/head/#";
assert_eq!(
"/chains/main/blocks/head".to_string(),
RpcRequest::ffi_rpc_router_cache_key_helper(without_suffix_and_sharp)
);
let with_prefix_to_remove_with_question_mark = "/chains/main?/blocks/head/some/subpath/url";
assert_eq!(
with_prefix_to_remove_with_question_mark.to_string(),
RpcRequest::ffi_rpc_router_cache_key_helper(with_prefix_to_remove_with_question_mark)
);
let with_prefix_to_remove_with_sharp = "/chains/main#/blocks/head/some/subpath/url";
assert_eq!(
"/chains/main".to_string(),
RpcRequest::ffi_rpc_router_cache_key_helper(with_prefix_to_remove_with_sharp)
);
}
}
| 36.915584 | 307 | 0.631684 |
29515fff910aeb922aec69da3785e5726cfbc0b0
| 3,559 |
//! Debug printing.
use std::fmt;
use rustc::hir;
use rustc::ty::TyKind;
use rustc_data_structures::indexed_vec::Idx;
use crate::analysis::labeled_ty::LabeledTy;
use super::{ConcretePerm, Perm};
pub struct Pretty<'lty, 'tcx, L: 'lty>(pub LabeledTy<'lty, 'tcx, L>);
pub fn pretty_slice<'lty, 'tcx, L>(
tys: &'lty [LabeledTy<'lty, 'tcx, L>],
) -> &'lty [Pretty<'lty, 'tcx, L>] {
unsafe { ::std::mem::transmute(tys) }
}
pub struct PrettyLabel<L>(pub L);
impl fmt::Debug for PrettyLabel<ConcretePerm> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self.0 {
ConcretePerm::Read => write!(fmt, "READ"),
ConcretePerm::Write => write!(fmt, "WRITE"),
ConcretePerm::Move => write!(fmt, "MOVE"),
}
}
}
impl<L> fmt::Debug for PrettyLabel<Option<L>>
where
L: Copy,
PrettyLabel<L>: fmt::Debug,
{
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self.0 {
Some(x) => write!(fmt, "{:?}", PrettyLabel(x)),
None => Ok(()),
}
}
}
impl<'tcx> fmt::Debug for PrettyLabel<bool> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if self.0 {
write!(fmt, "T")
} else {
write!(fmt, "F")
}
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct PrintVar<'tcx>(pub Perm<'tcx>);
impl<'tcx> fmt::Debug for PrettyLabel<(ConcretePerm, PrintVar<'tcx>)> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(
fmt,
"{:?}{:?}",
PrettyLabel((self.0).0),
PrettyLabel((self.0).1)
)
}
}
impl<'tcx> fmt::Debug for PrettyLabel<PrintVar<'tcx>> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match (self.0).0 {
Perm::Concrete(_) => Ok(()),
Perm::StaticVar(v) => write!(fmt, "#s{}", v.index()),
Perm::SigVar(v) => write!(fmt, "#f{}", v.index()),
Perm::InstVar(v) => write!(fmt, "#i{}", v.index()),
Perm::LocalVar(v) => write!(fmt, "#l{}", v.index()),
Perm::Min(ps) => {
write!(fmt, "#min(")?;
let mut first = true;
for &p in ps {
match p {
Perm::Concrete(_) => continue,
_ => {}
}
if !first {
write!(fmt, ", ")?;
}
first = false;
write!(fmt, "{:?}", PrettyLabel(PrintVar(p)))?;
}
write!(fmt, ")")
}
}
}
}
impl<'lty, 'tcx, L> fmt::Debug for Pretty<'lty, 'tcx, L>
where
L: Copy + fmt::Debug,
PrettyLabel<L>: fmt::Debug,
{
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self.0.ty.sty {
TyKind::Ref(_, _, m) => write!(
fmt,
"&{}{:?} {:?}",
if m == hir::MutImmutable { "" } else { "mut " },
PrettyLabel(self.0.label),
Pretty(self.0.args[0])
),
TyKind::RawPtr(mty) => write!(
fmt,
"*{} {:?} {:?}",
if mty.mutbl == hir::MutImmutable {
"const"
} else {
"mut"
},
PrettyLabel(self.0.label),
Pretty(self.0.args[0])
),
_ => write!(fmt, "{:?}", self.0.ty),
}
}
}
| 27.804688 | 71 | 0.439449 |
2638f0b6a5bcd2f942c7e7253dcb7f28998f209a
| 1,866 |
mod helper;
mod subcommand;
use ckb_app_config::{cli, ExitCode, Setup};
use ckb_build_info::Version;
pub(crate) const LOG_TARGET_MAIN: &str = "main";
pub fn run_app(version: Version) -> Result<(), ExitCode> {
// Always print backtrace on panic.
::std::env::set_var("RUST_BACKTRACE", "full");
let app_matches = cli::get_matches(&version);
match app_matches.subcommand() {
(cli::CMD_INIT, Some(matches)) => return subcommand::init(Setup::init(&matches)?),
(cli::CMD_CLI, Some(matches)) => {
return match matches.subcommand() {
(cli::CMD_BLAKE160, Some(sub_matches)) => subcommand::cli::blake160(sub_matches),
(cli::CMD_BLAKE256, Some(sub_matches)) => subcommand::cli::blake256(sub_matches),
(cli::CMD_SECP256K1_LOCK, Some(sub_matches)) => {
subcommand::cli::secp256k1_lock(sub_matches)
}
(cli::CMD_HASHES, Some(sub_matches)) => {
subcommand::cli::hashes(Setup::root_dir_from_matches(&matches)?, sub_matches)
}
_ => unreachable!(),
};
}
_ => {
// continue
}
}
let setup = Setup::from_matches(&app_matches)?;
let _guard = setup.setup_app(&version);
match app_matches.subcommand() {
(cli::CMD_RUN, _) => subcommand::run(setup.run()?, version),
(cli::CMD_MINER, _) => subcommand::miner(setup.miner()?),
(cli::CMD_PROF, Some(matches)) => subcommand::profile(setup.prof(&matches)?),
(cli::CMD_EXPORT, Some(matches)) => subcommand::export(setup.export(&matches)?),
(cli::CMD_IMPORT, Some(matches)) => subcommand::import(setup.import(&matches)?),
(cli::CMD_STATS, Some(matches)) => subcommand::stats(setup.stats(&matches)?),
_ => unreachable!(),
}
}
| 39.702128 | 97 | 0.587889 |
abe22fd29d373f906f8f4a43ec1492f5d4a6b5a1
| 2,038 |
use skia_bindings as sb;
use std::ptr;
pub use skia_bindings::GrBackendApi as BackendAPI;
variant_name!(BackendAPI::Dawn, backend_api_naming);
// TODO: this should be a newtype(bool) I guess with implementations
// of From<bool> and Deref?
pub use skia_bindings::GrMipmapped as Mipmapped;
#[deprecated(since = "0.35.0", note = "Use Mipmapped (with a lowercase 'm')")]
pub use skia_bindings::GrMipmapped as MipMapped;
variant_name!(Mipmapped::Yes, mipmapped_naming);
// TODO: this should be a newtype(bool) I guess with implementations
// of From<bool> and Deref?
pub use skia_bindings::GrRenderable as Renderable;
variant_name!(Renderable::No, renderable_naming);
// TODO: this should be a newtype(bool) I guess with implementations
// of From<bool> and Deref?
pub use skia_bindings::GrProtected as Protected;
variant_name!(Protected::Yes, protected_naming);
pub use skia_bindings::GrSurfaceOrigin as SurfaceOrigin;
variant_name!(SurfaceOrigin::BottomLeft, surface_origin_naming);
// Note: BackendState is in gl/types.rs/
#[repr(C)]
#[allow(dead_code)]
#[derive(Debug)]
pub struct FlushInfo {
// TODO: wrap access to the following fields in a safe way:
num_semaphores: std::os::raw::c_int,
signal_semaphores: *mut sb::GrBackendSemaphore,
finished_proc: sb::GrGpuFinishedProc,
finished_context: sb::GrGpuFinishedContext,
submitted_proc: sb::GrGpuSubmittedProc,
submitted_context: sb::GrGpuSubmittedContext,
}
impl Default for FlushInfo {
fn default() -> Self {
Self {
num_semaphores: 0,
signal_semaphores: ptr::null_mut(),
finished_proc: None,
finished_context: ptr::null_mut(),
submitted_proc: None,
submitted_context: ptr::null_mut(),
}
}
}
native_transmutable!(sb::GrFlushInfo, FlushInfo, flush_info_layout);
pub use sb::GrSemaphoresSubmitted as SemaphoresSubmitted;
variant_name!(SemaphoresSubmitted::Yes, semaphores_submitted_naming);
// TODO: wrap GrPrepareForExternalIORequests
| 32.870968 | 78 | 0.727674 |
7664251e3417f0f2f90ddf055efbf4dc39d580c8
| 13 |
mod face;
| 4.333333 | 10 | 0.538462 |
ef42ffb73a294bde449f829c61ef4a9f84d6ccbe
| 60,124 |
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)]
pub struct CollateralReport {
/// MsgType = BA
#[serde(flatten)]
pub standard_message_header: super::super::standard_message_header::StandardMessageHeader<'B', 'A'>,
/// Unique Identifer for collateral report
#[serde(rename = "908")]
pub coll_rpt_id: String,
/// Identifier for the collateral inquiry to which this message is a reply
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "909")]
pub coll_inquiry_id: Option<String>,
/// TransactTime
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "60")]
pub transact_time: Option<fix_common::UTCTimestamp>,
/// Differentiates collateral pledged specifically against a position from collateral pledged against an entire portfolio on a
/// valued basis.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "1043")]
pub coll_appl_type: Option<CollApplType>,
/// Tells whether security has been restricted.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "291")]
pub financial_status: Option<fix_common::SeparatedValues<FinancialStatus>>,
/// Collateral status
#[serde(rename = "910")]
pub coll_status: CollStatus,
/// TotNumReports
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "911")]
pub tot_num_reports: Option<i32>,
/// LastRptRequested
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "912")]
pub last_rpt_requested: Option<LastRptRequested>,
/// Parties
#[serde(flatten)]
pub parties: Option<super::super::parties::Parties>,
/// Customer Account
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "1")]
pub account: Option<String>,
/// Type of account associated with the order (Origin)
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "581")]
pub account_type: Option<AccountType>,
/// Identifier fo order for which collateral is required
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "11")]
pub cl_ord_id: Option<String>,
/// Identifier fo order for which collateral is required
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "37")]
pub order_id: Option<String>,
/// Identifier fo order for which collateral is required
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "198")]
pub secondary_order_id: Option<String>,
/// Identifier fo order for which collateral is required
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "526")]
pub secondary_cl_ord_id: Option<String>,
/// Executions for which collateral is required
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "124")]
pub execs: Option<fix_common::RepeatingValues<Exec>>,
/// Trades for which collateral is required
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "897")]
pub trades: Option<fix_common::RepeatingValues<Trade>>,
/// Insert here the set of "Instrument" fields defined in "Common Components of Application Messages".
#[serde(flatten)]
pub instrument: Option<super::super::instrument::Instrument>,
/// Insert here the set of "FinancingDetails" fields defined in "Common Components of Application Messages".
#[serde(flatten)]
pub financing_details: Option<super::super::financing_details::FinancingDetails>,
/// SettlDate
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "64")]
pub settl_date: Option<fix_common::LocalMktDate>,
/// Quantity
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "53")]
pub quantity: Option<f64>,
/// QtyType
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "854")]
pub qty_type: Option<QtyType>,
/// Currency
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "15")]
pub currency: Option<Currency>,
/// Number of legs Identifies a Multi-leg Execution if present and non-zero.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "555")]
pub legs: Option<fix_common::RepeatingValues<super::super::instrument_leg::InstrumentLeg>>,
/// Number of underlyings
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "711")]
pub underlyings: Option<fix_common::RepeatingValues<super::super::underlying_instrument::UnderlyingInstrument>>,
/// MarginExcess
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "899")]
pub margin_excess: Option<f64>,
/// TotalNetValue
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "900")]
pub total_net_value: Option<f64>,
/// CashOutstanding
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "901")]
pub cash_outstanding: Option<f64>,
/// Insert here the set of "TrdRegTimestamps" fields defined in "Common Components of Application Messages".
#[serde(flatten)]
pub trd_reg_timestamps: Option<super::super::trd_reg_timestamps::TrdRegTimestamps>,
/// Side
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "54")]
pub side: Option<Side>,
/// Required if any miscellaneous fees are reported. Indicates number of repeating entries. Repeating group. ** Nested Repeating
/// Group follows **
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "136")]
pub misc_fees: Option<fix_common::RepeatingValues<MiscFee>>,
/// Price
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "44")]
pub price: Option<f64>,
/// PriceType
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "423")]
pub price_type: Option<PriceType>,
/// AccruedInterestAmt
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "159")]
pub accrued_interest_amt: Option<f64>,
/// EndAccruedInterestAmt
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "920")]
pub end_accrued_interest_amt: Option<f64>,
/// StartCash
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "921")]
pub start_cash: Option<f64>,
/// EndCash
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "922")]
pub end_cash: Option<f64>,
/// Insert here the set of "SpreadOrBenchmarkCurveData" fields defined in "Common Components of Application Messages".
#[serde(flatten)]
pub spread_or_benchmark_curve_data: Option<super::super::spread_or_benchmark_curve_data::SpreadOrBenchmarkCurveData>,
/// Insert here the set of "Stipulations" fields defined in "Common Components of Application Messages".
#[serde(flatten)]
pub stipulations: Option<super::super::stipulations::Stipulations>,
/// Insert here the set of "SettlInstructionsData" fields defined in "Common Components of Application Messages".
#[serde(flatten)]
pub settl_instructions_data: Option<super::super::settl_instructions_data::SettlInstructionsData>,
/// Trading Session in which trade occurred
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "336")]
pub trading_session_id: Option<String>,
/// Trading Session Subid in which trade occurred
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "625")]
pub trading_session_sub_id: Option<String>,
/// SettlSessID
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "716")]
pub settl_sess_id: Option<SettlSessID>,
/// SettlSessSubID
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "717")]
pub settl_sess_sub_id: Option<String>,
/// ClearingBusinessDate
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "715")]
pub clearing_business_date: Option<fix_common::LocalMktDate>,
/// Text
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "58")]
pub text: Option<String>,
/// Must be set if <a href="tag_355_EncodedText.html" target="bottom">EncodedText (355)</a> field is specified and must immediately precede it.
#[serde(rename = "354")]
/// Encoded (non-ASCII characters) representation of the <a href="tag_58_Text.html" target="bottom">Text (58)</a> field in the encoded format specified via the <a href="tag_347_MessageEncoding.html" target="bottom">MessageEncoding (347)</a> field.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(alias = "355")]
pub encoded_text: Option<fix_common::EncodedText<355>>,
/// Standard Message Trailer
#[serde(flatten)]
pub standard_message_trailer: super::super::standard_message_trailer::StandardMessageTrailer,
}
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)]
pub struct Exec {
/// Required if <a href="tag_124_NoExecs.html" target="bottom">NoExecs (124)</a> > 0
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "17")]
pub exec_id: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)]
pub struct Trade {
/// Required if <a href="tag_897_NoTrades.html" target="bottom">NoTrades (897)</a> > 0
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "571")]
pub trade_report_id: Option<String>,
/// SecondaryTradeReportID
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "818")]
pub secondary_trade_report_id: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)]
pub struct MiscFee {
/// Required if <a href="tag_136_NoMiscFees.html" target="bottom">NoMiscFees (136)</a> > 0
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "137")]
pub misc_fee_amt: Option<f64>,
/// MiscFeeCurr
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "138")]
pub misc_fee_curr: Option<MiscFeeCurr>,
/// Required if <a href="tag_136_NoMiscFees.html" target="bottom">NoMiscFees (136)</a> > 0
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "139")]
pub misc_fee_type: Option<MiscFeeType>,
/// MiscFeeBasis
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "891")]
pub misc_fee_basis: Option<MiscFeeBasis>,
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum CollApplType {
/// Specific Deposit
#[serde(rename = "0")]
SpecificDeposit,
/// General
#[serde(rename = "1")]
General,
}
impl Default for CollApplType {
fn default() -> Self {
CollApplType::SpecificDeposit
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum FinancialStatus {
/// Bankrupt
#[serde(rename = "1")]
Bankrupt,
/// Pending delisting
#[serde(rename = "2")]
PendingDelisting,
/// Restricted
#[serde(rename = "3")]
Restricted,
}
impl Default for FinancialStatus {
fn default() -> Self {
FinancialStatus::Bankrupt
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum CollStatus {
/// Unassigned
#[serde(rename = "0")]
Unassigned,
/// Partially Assigned
#[serde(rename = "1")]
PartiallyAssigned,
/// Assignment Proposed
#[serde(rename = "2")]
AssignmentProposed,
/// Assigned (Accepted)
#[serde(rename = "3")]
Assigned,
/// Challenged
#[serde(rename = "4")]
Challenged,
}
impl Default for CollStatus {
fn default() -> Self {
CollStatus::Unassigned
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum LastRptRequested {
/// Not last message
#[serde(rename = "N")]
NotLastMessage,
/// Last message
#[serde(rename = "Y")]
LastMessage,
}
impl Default for LastRptRequested {
fn default() -> Self {
LastRptRequested::NotLastMessage
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum AccountType {
/// Account is carried on customer Side of Books
#[serde(rename = "1")]
AccountIsCarriedOnCustomerSideOfBooks,
/// Account is carried on non-Customer Side of books
#[serde(rename = "2")]
AccountIsCarriedOnNonCustomerSideOfBooks,
/// House Trader
#[serde(rename = "3")]
HouseTrader,
/// Floor Trader
#[serde(rename = "4")]
FloorTrader,
/// Account is carried on non-customer side of books and is cross margined
#[serde(rename = "6")]
AccountIsCarriedOnNonCustomerSideOfBooksAndIsCrossMargined,
/// Account is house trader and is cross margined
#[serde(rename = "7")]
AccountIsHouseTraderAndIsCrossMargined,
/// Joint Backoffice Account (JBO)
#[serde(rename = "8")]
JointBackofficeAccount,
}
impl Default for AccountType {
fn default() -> Self {
AccountType::AccountIsCarriedOnCustomerSideOfBooks
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum QtyType {
/// Units (shares, par, currency)
#[serde(rename = "0")]
Units,
/// Contracts (if used - must specify <a href="tag_231_ContractMultiplier.html" target="bottom">ContractMultiplier (231)</a> )
#[serde(rename = "1")]
ContractsA,
/// Units of Measure per Time Unit (if used - must specify <a href="tag_996_UnitofMeasure.html" target="bottom">UnitofMeasure (996)</a> and <a href="tag_997_TimeUnit.html" target="bottom">TimeUnit (997)</a> )
#[serde(rename = "2")]
UnitsOfMeasurePerTimeUnitAAndTimeUnit,
}
impl Default for QtyType {
fn default() -> Self {
QtyType::Units
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum Currency {
/// Afghani
#[serde(rename = "AFA")]
Afa,
/// Algerian Dinar
#[serde(rename = "DZD")]
Dzd,
/// Andorran Peseta
#[serde(rename = "ADP")]
Adp,
/// Argentine Peso
#[serde(rename = "ARS")]
Ars,
/// Armenian Dram
#[serde(rename = "AMD")]
Amd,
/// Aruban Guilder
#[serde(rename = "AWG")]
Awg,
/// Australian Dollar
#[serde(rename = "AUD")]
Aud,
/// Azerbaijanian Manat
#[serde(rename = "AZM")]
Azm,
/// Bahamian Dollar
#[serde(rename = "BSD")]
Bsd,
/// Bahraini Dinar
#[serde(rename = "BHD")]
Bhd,
/// Baht
#[serde(rename = "THB")]
Thb,
/// Balboa
#[serde(rename = "PAB")]
Pab,
/// Barbados Dollar
#[serde(rename = "BBD")]
Bbd,
/// Belarussian Ruble
#[serde(rename = "BYB")]
Byb,
/// Belgian Franc
#[serde(rename = "BEF")]
Bef,
/// Belize Dollar
#[serde(rename = "BZD")]
Bzd,
/// Bermudian Dollar
#[serde(rename = "BMD")]
Bmd,
/// Bolivar
#[serde(rename = "VEB")]
Veb,
/// Boliviano
#[serde(rename = "BOB")]
Bob,
/// Brazilian Real
#[serde(rename = "BRL")]
Brl,
/// Brunei Dollar
#[serde(rename = "BND")]
Bnd,
/// Burundi Franc
#[serde(rename = "BIF")]
Bif,
/// CFA Franc BCEAO+
#[serde(rename = "XOF")]
Xof,
/// CFA Franc BEAC#
#[serde(rename = "XAF")]
Xaf,
/// CFP Franc
#[serde(rename = "XPF")]
Xpf,
/// Canadian Dollar
#[serde(rename = "CAD")]
Cad,
/// Cape Verde Escudo
#[serde(rename = "CVE")]
Cve,
/// Cayman Islands Dollar
#[serde(rename = "KYD")]
Kyd,
/// Cedi
#[serde(rename = "GHC")]
Ghc,
/// Chilean Peso
#[serde(rename = "CLP")]
Clp,
/// Colombian Peso
#[serde(rename = "COP")]
Cop,
/// Comoro Franc
#[serde(rename = "KMF")]
Kmf,
/// Convertible Marks
#[serde(rename = "BAM")]
Bam,
/// Cordoba Oro
#[serde(rename = "NIO")]
Nio,
/// Costa Rican Colon
#[serde(rename = "CRC")]
Crc,
/// Cuban Peso
#[serde(rename = "CUP")]
Cup,
/// Cyprus Pound
#[serde(rename = "CYP")]
Cyp,
/// Czech Koruna
#[serde(rename = "CZK")]
Czk,
/// Dalasi
#[serde(rename = "GMD")]
Gmd,
/// Danish Krone
#[serde(rename = "DKK")]
Dkk,
/// Denar
#[serde(rename = "MKD")]
Mkd,
/// Deutsche Mark
#[serde(rename = "DEM")]
Dem,
/// Djibouti Franc
#[serde(rename = "DJF")]
Djf,
/// Dobra
#[serde(rename = "STD")]
Std,
/// Dominican Peso
#[serde(rename = "DOP")]
Dop,
/// Dong
#[serde(rename = "VND")]
Vnd,
/// Drachma
#[serde(rename = "GRD")]
Grd,
/// East Caribbean Dollar
#[serde(rename = "XCD")]
Xcd,
/// Egyptian Pound
#[serde(rename = "EGP")]
Egp,
/// El Salvador Colon
#[serde(rename = "SVC")]
Svc,
/// Ethiopian Birr
#[serde(rename = "ETB")]
Etb,
/// Euro
#[serde(rename = "EUR")]
Eur,
/// Falkland Islands Pound
#[serde(rename = "FKP")]
Fkp,
/// Fiji Dollar
#[serde(rename = "FJD")]
Fjd,
/// Forint
#[serde(rename = "HUF")]
Huf,
/// Franc Congolais
#[serde(rename = "CDF")]
Cdf,
/// French Franc
#[serde(rename = "FRF")]
Frf,
/// Gibraltar Pound
#[serde(rename = "GIP")]
Gip,
/// Gourde
#[serde(rename = "HTG")]
Htg,
/// Guarani
#[serde(rename = "PYG")]
Pyg,
/// Guinea Franc
#[serde(rename = "GNF")]
Gnf,
/// Guinea-Bissau Peso
#[serde(rename = "GWP")]
Gwp,
/// Guyana Dollar
#[serde(rename = "GYD")]
Gyd,
/// Hong Kong Dollar
#[serde(rename = "HKD")]
Hkd,
/// Hryvnia
#[serde(rename = "UAH")]
Uah,
/// Iceland Krona
#[serde(rename = "ISK")]
Isk,
/// Indian Rupee
#[serde(rename = "INR")]
Inr,
/// Iranian Rial
#[serde(rename = "IRR")]
Irr,
/// Iraqi Dinar
#[serde(rename = "IQD")]
Iqd,
/// Irish Pound
#[serde(rename = "IEP")]
Iep,
/// Italian Lira
#[serde(rename = "ITL")]
Itl,
/// Jamaican Dollar
#[serde(rename = "JMD")]
Jmd,
/// Jordanian Dinar
#[serde(rename = "JOD")]
Jod,
/// Kenyan Shilling
#[serde(rename = "KES")]
Kes,
/// Kina
#[serde(rename = "PGK")]
Pgk,
/// Kip
#[serde(rename = "LAK")]
Lak,
/// Kroon
#[serde(rename = "EEK")]
Eek,
/// Kuna
#[serde(rename = "HRK")]
Hrk,
/// Kuwaiti Dinar
#[serde(rename = "KWD")]
Kwd,
/// Kwacha
#[serde(rename = "MWK")]
Mwk,
/// Kwacha
#[serde(rename = "ZMK")]
Zmk,
/// Kwanza Reajustado
#[serde(rename = "AOR")]
Aor,
/// Kyat
#[serde(rename = "MMK")]
Mmk,
/// Lari
#[serde(rename = "GEL")]
Gel,
/// Latvian Lats
#[serde(rename = "LVL")]
Lvl,
/// Lebanese Pound
#[serde(rename = "LBP")]
Lbp,
/// Lek
#[serde(rename = "ALL")]
All,
/// Lempira
#[serde(rename = "HNL")]
Hnl,
/// Leone
#[serde(rename = "SLL")]
Sll,
/// Leu
#[serde(rename = "ROL")]
Rol,
/// Lev
#[serde(rename = "BGL")]
Bgl,
/// Liberian Dollar
#[serde(rename = "LRD")]
Lrd,
/// Libyan Dinar
#[serde(rename = "LYD")]
Lyd,
/// Lilangeni
#[serde(rename = "SZL")]
Szl,
/// Lithuanian Litas
#[serde(rename = "LTL")]
Ltl,
/// Loti
#[serde(rename = "LSL")]
Lsl,
/// Luxembourg Franc
#[serde(rename = "LUF")]
Luf,
/// Malagasy Franc
#[serde(rename = "MGF")]
Mgf,
/// Malaysian Ringgit
#[serde(rename = "MYR")]
Myr,
/// Maltese Lira
#[serde(rename = "MTL")]
Mtl,
/// Manat
#[serde(rename = "TMM")]
Tmm,
/// Markka
#[serde(rename = "FIM")]
Fim,
/// Mauritius Rupee
#[serde(rename = "MUR")]
Mur,
/// Metical
#[serde(rename = "MZM")]
Mzm,
/// Mexican Peso
#[serde(rename = "MXN")]
Mxn,
/// Mexican Unidad de Inversion (UDI)
#[serde(rename = "MXV")]
Mxv,
/// Moldovan Leu
#[serde(rename = "MDL")]
Mdl,
/// Moroccan Dirham
#[serde(rename = "MAD")]
Mad,
/// Mvdol
#[serde(rename = "BOV")]
Bov,
/// Naira
#[serde(rename = "NGN")]
Ngn,
/// Nakfa
#[serde(rename = "ERN")]
Ern,
/// Namibia Dollar
#[serde(rename = "NAD")]
Nad,
/// Nepalese Rupee
#[serde(rename = "NPR")]
Npr,
/// Netherlands Antillian Guilder
#[serde(rename = "ANG")]
Ang,
/// Netherlands Guilder
#[serde(rename = "NLG")]
Nlg,
/// New Dinar
#[serde(rename = "YUM")]
Yum,
/// New Israeli Sheqel
#[serde(rename = "ILS")]
Ils,
/// New Kwanza
#[serde(rename = "AON")]
Aon,
/// New Taiwan Dollar
#[serde(rename = "TWD")]
Twd,
/// New Zaire
#[serde(rename = "ZRN")]
Zrn,
/// New Zealand Dollar
#[serde(rename = "NZD")]
Nzd,
/// Next day
#[serde(rename = "USN")]
Usn,
/// Ngultrum
#[serde(rename = "BTN")]
Btn,
/// North Korean Won
#[serde(rename = "KPW")]
Kpw,
/// Norwegian Krone
#[serde(rename = "NOK")]
Nok,
/// Nuevo Sol
#[serde(rename = "PEN")]
Pen,
/// Ouguiya
#[serde(rename = "MRO")]
Mro,
/// Pa'anga
#[serde(rename = "TOP")]
Top,
/// Pakistan Rupee
#[serde(rename = "PKR")]
Pkr,
/// Pataca
#[serde(rename = "MOP")]
Mop,
/// Peso Uruguayo
#[serde(rename = "UYU")]
Uyu,
/// Philippine Peso
#[serde(rename = "PHP")]
Php,
/// Portuguese Escudo
#[serde(rename = "PTE")]
Pte,
/// Pound Sterling
#[serde(rename = "GBP")]
Gbp,
/// Pula
#[serde(rename = "BWP")]
Bwp,
/// Qatari Rial
#[serde(rename = "QAR")]
Qar,
/// Quetzal
#[serde(rename = "GTQ")]
Gtq,
/// Rand
#[serde(rename = "ZAR")]
Zar,
/// Rial Omani
#[serde(rename = "OMR")]
Omr,
/// Riel
#[serde(rename = "KHR")]
Khr,
/// Rufiyaa
#[serde(rename = "MVR")]
Mvr,
/// Rupiah
#[serde(rename = "IDR")]
Idr,
/// Russian Ruble
#[serde(rename = "RUB")]
Rub,
/// Russian Ruble
#[serde(rename = "RUR")]
Rur,
/// Rwanda Franc
#[serde(rename = "RWF")]
Rwf,
/// SDR
#[serde(rename = "XDR")]
Xdr,
/// Same day
#[serde(rename = "USS")]
Uss,
/// Saudi Riyal
#[serde(rename = "SAR")]
Sar,
/// Schilling
#[serde(rename = "ATS")]
Ats,
/// Seychelles Rupee
#[serde(rename = "SCR")]
Scr,
/// Singapore Dollar
#[serde(rename = "SGD")]
Sgd,
/// Slovak Koruna
#[serde(rename = "SKK")]
Skk,
/// Solomon Islands Dollar
#[serde(rename = "SBD")]
Sbd,
/// Som
#[serde(rename = "KGS")]
Kgs,
/// Somali Shilling
#[serde(rename = "SOS")]
Sos,
/// Spanish Peseta
#[serde(rename = "ESP")]
Esp,
/// Sri Lanka Rupee
#[serde(rename = "LKR")]
Lkr,
/// St Helena Pound
#[serde(rename = "SHP")]
Shp,
/// Sucre
#[serde(rename = "ECS")]
Ecs,
/// Sudanese Dinar
#[serde(rename = "SDD")]
Sdd,
/// Surinam Guilder
#[serde(rename = "SRG")]
Srg,
/// Swedish Krona
#[serde(rename = "SEK")]
Sek,
/// Swiss Franc
#[serde(rename = "CHF")]
Chf,
/// Syrian Pound
#[serde(rename = "SYP")]
Syp,
/// Tajik Ruble
#[serde(rename = "TJR")]
Tjr,
/// Taka
#[serde(rename = "BDT")]
Bdt,
/// Tala
#[serde(rename = "WST")]
Wst,
/// Tanzanian Shilling
#[serde(rename = "TZS")]
Tzs,
/// Tenge
#[serde(rename = "KZT")]
Kzt,
/// Timor Escudo
#[serde(rename = "TPE")]
Tpe,
/// Tolar
#[serde(rename = "SIT")]
Sit,
/// Trinidad and Tobago Dollar
#[serde(rename = "TTD")]
Ttd,
/// Tugrik
#[serde(rename = "MNT")]
Mnt,
/// Tunisian Dinar
#[serde(rename = "TND")]
Tnd,
/// Turkish Lira
#[serde(rename = "TRL")]
Trl,
/// UAE Dirham
#[serde(rename = "AED")]
Aed,
/// US Dollar
#[serde(rename = "USD")]
Usd,
/// Uganda Shilling
#[serde(rename = "UGX")]
Ugx,
/// Unidad de Valor Constante (UVC)
#[serde(rename = "ECV")]
Ecv,
/// Unidades de fomento
#[serde(rename = "CLF")]
Clf,
/// Uzbekistan Sum
#[serde(rename = "UZS")]
Uzs,
/// Vatu
#[serde(rename = "VUV")]
Vuv,
/// Won
#[serde(rename = "KRW")]
Krw,
/// Yemeni Rial
#[serde(rename = "YER")]
Yer,
/// Yen
#[serde(rename = "JPY")]
Jpy,
/// Yuan Renminbi
#[serde(rename = "CNY")]
Cny,
/// Zimbabwe Dollar
#[serde(rename = "ZWD")]
Zwd,
/// Zloty
#[serde(rename = "PLN")]
Pln,
/// financial Rand
#[serde(rename = "ZAL")]
Zal,
/// Afghani
#[serde(rename = "004")]
N004,
/// Algerian Dinar
#[serde(rename = "01")]
N01,
/// Andorran Peseta
#[serde(rename = "020")]
N020,
/// Argentine Peso
#[serde(rename = "032")]
N032,
/// Armenian Dram
#[serde(rename = "051")]
N051,
/// Aruban Guilder
#[serde(rename = "533")]
N533,
/// Australian Dollar
#[serde(rename = "036")]
N036,
/// Azerbaijanian Manat
#[serde(rename = "031")]
N031,
/// Bahamian Dollar
#[serde(rename = "044")]
N044,
/// Bahraini Dinar
#[serde(rename = "048")]
N048,
/// Baht
#[serde(rename = "764")]
N764,
/// Balboa
#[serde(rename = "590")]
N590,
/// Barbados Dollar
#[serde(rename = "052")]
N052,
/// Belarussian Ruble
#[serde(rename = "112")]
N112,
/// Belgian Franc
#[serde(rename = "056")]
N056,
/// Belize Dollar
#[serde(rename = "084")]
N084,
/// Bermudian Dollar
#[serde(rename = "060")]
N060,
/// Bolivar
#[serde(rename = "862")]
N862,
/// Boliviano
#[serde(rename = "068")]
N068,
/// Brazilian Real
#[serde(rename = "986")]
N986,
/// Brunei Dollar
#[serde(rename = "096")]
N096,
/// Burundi Franc
#[serde(rename = "108")]
N108,
/// CFA Franc BCEAO+
#[serde(rename = "952")]
N952,
/// CFA Franc BEAC#
#[serde(rename = "950")]
N950,
/// CFP Franc
#[serde(rename = "953")]
N953,
/// Canadian Dollar
#[serde(rename = "124")]
N124,
/// Cape Verde Escudo
#[serde(rename = "132")]
N132,
/// Cayman Islands Dollar
#[serde(rename = "136")]
N136,
/// Cedi
#[serde(rename = "288")]
N288,
/// Chilean Peso
#[serde(rename = "152")]
N152,
/// Colombian Peso
#[serde(rename = "170")]
N170,
/// Comoro Franc
#[serde(rename = "174")]
N174,
/// Convertible Marks
#[serde(rename = "977")]
N977,
/// Cordoba Oro
#[serde(rename = "558")]
N558,
/// Costa Rican Colon
#[serde(rename = "188")]
N188,
/// Cuban Peso
#[serde(rename = "192")]
N192,
/// Cyprus Pound
#[serde(rename = "196")]
N196,
/// Czech Koruna
#[serde(rename = "203")]
N203,
/// Dalasi
#[serde(rename = "270")]
N270,
/// Danish Krone
#[serde(rename = "208")]
N208,
/// Denar
#[serde(rename = "807")]
N807,
/// Deutsche Mark
#[serde(rename = "280")]
N280,
/// Djibouti Franc
#[serde(rename = "262")]
N262,
/// Dobra
#[serde(rename = "678")]
N678,
/// Dominican Peso
#[serde(rename = "214")]
N214,
/// Dong
#[serde(rename = "704")]
N704,
/// Drachma
#[serde(rename = "300")]
N300,
/// East Caribbean Dollar
#[serde(rename = "951")]
N951,
/// Egyptian Pound
#[serde(rename = "818")]
N818,
/// El Salvador Colon
#[serde(rename = "222")]
N222,
/// Ethiopian Birr
#[serde(rename = "230")]
N230,
/// Euro
#[serde(rename = "978")]
N978,
/// Falkland Islands Pound
#[serde(rename = "238")]
N238,
/// Fiji Dollar
#[serde(rename = "242")]
N242,
/// Forint
#[serde(rename = "348")]
N348,
/// Franc Congolais
#[serde(rename = "976")]
N976,
/// French Franc
#[serde(rename = "250")]
N250,
/// Gibraltar Pound
#[serde(rename = "292")]
N292,
/// Gourde
#[serde(rename = "332")]
N332,
/// Guarani
#[serde(rename = "600")]
N600,
/// Guinea Franc
#[serde(rename = "324")]
N324,
/// Guinea-Bissau Peso
#[serde(rename = "624")]
N624,
/// Guyana Dollar
#[serde(rename = "328")]
N328,
/// Hong Kong Dollar
#[serde(rename = "344")]
N344,
/// Hryvnia
#[serde(rename = "980")]
N980,
/// Iceland Krona
#[serde(rename = "352")]
N352,
/// Indian Rupee
#[serde(rename = "356")]
N356,
/// Iranian Rial
#[serde(rename = "364")]
N364,
/// Iraqi Dinar
#[serde(rename = "368")]
N368,
/// Irish Pound
#[serde(rename = "372")]
N372,
/// Italian Lira
#[serde(rename = "380")]
N380,
/// Jamaican Dollar
#[serde(rename = "388")]
N388,
/// Jordanian Dinar
#[serde(rename = "400")]
N400,
/// Kenyan Shilling
#[serde(rename = "404")]
N404,
/// Kina
#[serde(rename = "598")]
N598,
/// Kip
#[serde(rename = "418")]
N418,
/// Kroon
#[serde(rename = "233")]
N233,
/// Kuna
#[serde(rename = "191")]
N191,
/// Kuwaiti Dinar
#[serde(rename = "414")]
N414,
/// Kwacha
#[serde(rename = "454")]
N454,
/// Kwacha
#[serde(rename = "894")]
N894,
/// Kwanza Reajustado
#[serde(rename = "982")]
N982,
/// Kyat
#[serde(rename = "104")]
N104,
/// Lari
#[serde(rename = "981")]
N981,
/// Latvian Lats
#[serde(rename = "428")]
N428,
/// Lebanese Pound
#[serde(rename = "422")]
N422,
/// Lek
#[serde(rename = "008")]
N008,
/// Lempira
#[serde(rename = "340")]
N340,
/// Leone
#[serde(rename = "694")]
N694,
/// Leu
#[serde(rename = "642")]
N642,
/// Lev
#[serde(rename = "100")]
N100,
/// Liberian Dollar
#[serde(rename = "430")]
N430,
/// Libyan Dinar
#[serde(rename = "434")]
N434,
/// Lilangeni
#[serde(rename = "748")]
N748,
/// Lithuanian Litas
#[serde(rename = "440")]
N440,
/// Loti
#[serde(rename = "426")]
N426,
/// Luxembourg Franc
#[serde(rename = "442")]
N442,
/// Malagasy Franc
#[serde(rename = "450")]
N450,
/// Malaysian Ringgit
#[serde(rename = "458")]
N458,
/// Maltese Lira
#[serde(rename = "470")]
N470,
/// Manat
#[serde(rename = "795")]
N795,
/// Markka
#[serde(rename = "246")]
N246,
/// Mauritius Rupee
#[serde(rename = "480")]
N480,
/// Metical
#[serde(rename = "508")]
N508,
/// Mexican Peso
#[serde(rename = "484")]
N484,
/// Mexican Unidad de Inversion (UDI)
#[serde(rename = "979")]
N979,
/// Moldovan Leu
#[serde(rename = "498")]
N498,
/// Moroccan Dirham
#[serde(rename = "504")]
N504,
/// Mvdol
#[serde(rename = "984")]
N984,
/// Naira
#[serde(rename = "566")]
N566,
/// Nakfa
#[serde(rename = "232")]
N232,
/// Namibia Dollar
#[serde(rename = "516")]
N516,
/// Nepalese Rupee
#[serde(rename = "524")]
N524,
/// Netherlands Antillian Guilder
#[serde(rename = "532")]
N532,
/// Netherlands Guilder
#[serde(rename = "528")]
N528,
/// New Dinar
#[serde(rename = "891")]
N891,
/// New Israeli Sheqel
#[serde(rename = "376")]
N376,
/// New Kwanza
#[serde(rename = "02")]
N02,
/// New Taiwan Dollar
#[serde(rename = "901")]
N901,
/// New Zaire
#[serde(rename = "180")]
N180,
/// New Zealand Dollar
#[serde(rename = "554")]
N554,
/// Next day
#[serde(rename = "997")]
N997,
/// Ngultrum
#[serde(rename = "064")]
N064,
/// North Korean Won
#[serde(rename = "408")]
N408,
/// Norwegian Krone
#[serde(rename = "578")]
N578,
/// Nuevo Sol
#[serde(rename = "604")]
N604,
/// Ouguiya
#[serde(rename = "478")]
N478,
/// Pa'anga
#[serde(rename = "776")]
N776,
/// Pakistan Rupee
#[serde(rename = "586")]
N586,
/// Pataca
#[serde(rename = "446")]
N446,
/// Peso Uruguayo
#[serde(rename = "858")]
N858,
/// Philippine Peso
#[serde(rename = "608")]
N608,
/// Portuguese Escudo
#[serde(rename = "620")]
N620,
/// Pound Sterling
#[serde(rename = "826")]
N826,
/// Pula
#[serde(rename = "072")]
N072,
/// Qatari Rial
#[serde(rename = "634")]
N634,
/// Quetzal
#[serde(rename = "320")]
N320,
/// Rand
#[serde(rename = "710")]
N710,
/// Rial Omani
#[serde(rename = "512")]
N512,
/// Riel
#[serde(rename = "116")]
N116,
/// Rufiyaa
#[serde(rename = "462")]
N462,
/// Rupiah
#[serde(rename = "360")]
N360,
/// Russian Ruble
#[serde(rename = "643")]
N643,
/// Russian Ruble
#[serde(rename = "810")]
N810,
/// Rwanda Franc
#[serde(rename = "646")]
N646,
/// SDR
#[serde(rename = "960")]
N960,
/// Same day
#[serde(rename = "998")]
N998,
/// Saudi Riyal
#[serde(rename = "682")]
N682,
/// Schilling
#[serde(rename = "040")]
N040,
/// Seychelles Rupee
#[serde(rename = "690")]
N690,
/// Singapore Dollar
#[serde(rename = "702")]
N702,
/// Slovak Koruna
#[serde(rename = "703")]
N703,
/// Solomon Islands Dollar
#[serde(rename = "090")]
N090,
/// Som
#[serde(rename = "417")]
N417,
/// Somali Shilling
#[serde(rename = "706")]
N706,
/// Spanish Peseta
#[serde(rename = "724")]
N724,
/// Sri Lanka Rupee
#[serde(rename = "144")]
N144,
/// St Helena Pound
#[serde(rename = "654")]
N654,
/// Sucre
#[serde(rename = "218")]
N218,
/// Sudanese Dinar
#[serde(rename = "736")]
N736,
/// Surinam Guilder
#[serde(rename = "740")]
N740,
/// Swedish Krona
#[serde(rename = "752")]
N752,
/// Swiss Franc
#[serde(rename = "756")]
N756,
/// Syrian Pound
#[serde(rename = "760")]
N760,
/// Tajik Ruble
#[serde(rename = "762")]
N762,
/// Taka
#[serde(rename = "050")]
N050,
/// Tala
#[serde(rename = "882")]
N882,
/// Tanzanian Shilling
#[serde(rename = "834")]
N834,
/// Tenge
#[serde(rename = "398")]
N398,
/// Timor Escudo
#[serde(rename = "626")]
N626,
/// Tolar
#[serde(rename = "705")]
N705,
/// Trinidad and Tobago Dollar
#[serde(rename = "780")]
N780,
/// Tugrik
#[serde(rename = "496")]
N496,
/// Tunisian Dinar
#[serde(rename = "788")]
N788,
/// Turkish Lira
#[serde(rename = "792")]
N792,
/// UAE Dirham
#[serde(rename = "784")]
N784,
/// US Dollar
#[serde(rename = "840")]
N840,
/// Uganda Shilling
#[serde(rename = "800")]
N800,
/// Unidad de Valor Constante (UVC)
#[serde(rename = "983")]
N983,
/// Unidades de fomento
#[serde(rename = "990")]
N990,
/// Uzbekistan Sum
#[serde(rename = "860")]
N860,
/// Vatu
#[serde(rename = "548")]
N548,
/// Won
#[serde(rename = "410")]
N410,
/// Yemeni Rial
#[serde(rename = "886")]
N886,
/// Yen
#[serde(rename = "392")]
N392,
/// Yuan Renminbi
#[serde(rename = "156")]
N156,
/// Zimbabwe Dollar
#[serde(rename = "716")]
N716,
/// Zloty
#[serde(rename = "985")]
N985,
/// financial Rand
#[serde(rename = "991")]
N991,
/// Gold
#[serde(rename = "XAU")]
Xau,
/// European Composite Unit (EURCO)
#[serde(rename = "XBA")]
Xba,
/// European Monetary Unit (E.M.U.-6)
#[serde(rename = "XBB")]
Xbb,
/// European Unit of Account 9 (E.U.A.- 9)
#[serde(rename = "XBC")]
Xbc,
/// European Unit of Account 17 (E.U.A.- 17)
#[serde(rename = "XBD")]
Xbd,
/// Palladium
#[serde(rename = "XPD")]
Xpd,
/// Platinum
#[serde(rename = "XPT")]
Xpt,
/// Silver
#[serde(rename = "XAG")]
Xag,
/// UIC-Franc
#[serde(rename = "XFU")]
Xfu,
/// Gold-Franc
#[serde(rename = "XFO")]
Xfo,
/// Codes specifically reserved for testing purposes
#[serde(rename = "XTS")]
Xts,
/// Codes assigned for transactions where no currency is involved
#[serde(rename = "XXX")]
Xxx,
/// Gold
#[serde(rename = "959")]
N959,
/// European Composite Unit (EURCO)
#[serde(rename = "955")]
N955,
/// European Monetary Unit (E.M.U.-6)
#[serde(rename = "956")]
N956,
/// European Unit of Account 9 (E.U.A.- 9)
#[serde(rename = "957")]
N957,
/// European Unit of Account 17 (E.U.A.- 17)
#[serde(rename = "958")]
N958,
/// Palladium
#[serde(rename = "964")]
N964,
/// Platinum
#[serde(rename = "962")]
N962,
/// Silver
#[serde(rename = "961")]
N961,
/// Codes specifically reserved for testing purposes
#[serde(rename = "963")]
N963,
/// Codes assigned for transactions where no currency is involved
#[serde(rename = "999")]
N999,
}
impl Default for Currency {
fn default() -> Self {
Currency::Afa
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum Side {
/// Buy
#[serde(rename = "1")]
Buy,
/// Sell
#[serde(rename = "2")]
Sell,
/// Buy minus
#[serde(rename = "3")]
BuyMinus,
/// Sell plus
#[serde(rename = "4")]
SellPlus,
/// Sell short
#[serde(rename = "5")]
SellShort,
/// Sell short exempt
#[serde(rename = "6")]
SellShortExempt,
/// Undisclosed (valid for IOI and List Order messages only)
#[serde(rename = "7")]
Undisclosed,
/// Cross (orders where counterparty is an exchange, valid for all messages except IOIs)
#[serde(rename = "8")]
Cross,
/// Cross short
#[serde(rename = "9")]
CrossShort,
/// Cross short exempt
#[serde(rename = "A")]
CrossShortExempt,
/// "As Defined" (for use with multileg instruments)
#[serde(rename = "B")]
AsDefined,
/// "Opposite" (for use with multileg instruments)
#[serde(rename = "C")]
Opposite,
/// Subscribe (e.g. CIV)
#[serde(rename = "D")]
Subscribe,
/// Redeem (e.g. CIV)
#[serde(rename = "E")]
Redeem,
/// Lend (FINANCING - identifies direction of collateral)
#[serde(rename = "F")]
Lend,
/// Borrow (FINANCING - identifies direction of collateral)
#[serde(rename = "G")]
Borrow,
}
impl Default for Side {
fn default() -> Self {
Side::Buy
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum PriceType {
/// Percentage (e.g. percent of par) (often called "dollar price" for fixed income)
#[serde(rename = "1")]
Percentage,
/// Per unit (i.e. per share or contract)
#[serde(rename = "2")]
PerUnit,
/// Fixed Amount (absolute value)
#[serde(rename = "3")]
FixedAmount,
/// Discount - percentage points below par
#[serde(rename = "4")]
DiscountPercentagePointsBelowPar,
/// Premium - percentage points over par
#[serde(rename = "5")]
PremiumPercentagePointsOverPar,
/// Spread
#[serde(rename = "6")]
Spread,
/// TED price
#[serde(rename = "7")]
TedPrice,
/// TED yield
#[serde(rename = "8")]
TedYield,
/// Yield
#[serde(rename = "9")]
Yield,
/// Fixed cabinet trade price (primarily for listed futures and options)
#[serde(rename = "10")]
FixedCabinetTradePrice,
/// Variable cabinet trade price (primarily for listed futures and options)
#[serde(rename = "11")]
VariableCabinetTradePrice,
/// Product ticks in halfs
#[serde(rename = "13")]
ProductTicksInHalfs,
/// Product ticks in fourths
#[serde(rename = "14")]
ProductTicksInFourths,
/// Product ticks in eights
#[serde(rename = "15")]
ProductTicksInEights,
/// Product ticks in sixteenths
#[serde(rename = "16")]
ProductTicksInSixteenths,
/// Product ticks in thirty-seconds
#[serde(rename = "17")]
ProductTicksInThirtySeconds,
/// Product ticks in sixty-forths
#[serde(rename = "18")]
ProductTicksInSixtyForths,
/// Product ticks in one-twenty-eights
#[serde(rename = "19")]
ProductTicksInOneTwentyEights,
}
impl Default for PriceType {
fn default() -> Self {
PriceType::Percentage
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum SettlSessID {
/// Intraday
#[serde(rename = "ITD")]
Intraday,
/// Regular Trading Hours
#[serde(rename = "RTH")]
RegularTradingHours,
/// Electronic Trading Hours
#[serde(rename = "ETH")]
ElectronicTradingHours,
/// End Of Day
#[serde(rename = "EOD")]
EndOfDay,
}
impl Default for SettlSessID {
fn default() -> Self {
SettlSessID::Intraday
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum MiscFeeCurr {
/// Afghani
#[serde(rename = "AFA")]
Afa,
/// Algerian Dinar
#[serde(rename = "DZD")]
Dzd,
/// Andorran Peseta
#[serde(rename = "ADP")]
Adp,
/// Argentine Peso
#[serde(rename = "ARS")]
Ars,
/// Armenian Dram
#[serde(rename = "AMD")]
Amd,
/// Aruban Guilder
#[serde(rename = "AWG")]
Awg,
/// Australian Dollar
#[serde(rename = "AUD")]
Aud,
/// Azerbaijanian Manat
#[serde(rename = "AZM")]
Azm,
/// Bahamian Dollar
#[serde(rename = "BSD")]
Bsd,
/// Bahraini Dinar
#[serde(rename = "BHD")]
Bhd,
/// Baht
#[serde(rename = "THB")]
Thb,
/// Balboa
#[serde(rename = "PAB")]
Pab,
/// Barbados Dollar
#[serde(rename = "BBD")]
Bbd,
/// Belarussian Ruble
#[serde(rename = "BYB")]
Byb,
/// Belgian Franc
#[serde(rename = "BEF")]
Bef,
/// Belize Dollar
#[serde(rename = "BZD")]
Bzd,
/// Bermudian Dollar
#[serde(rename = "BMD")]
Bmd,
/// Bolivar
#[serde(rename = "VEB")]
Veb,
/// Boliviano
#[serde(rename = "BOB")]
Bob,
/// Brazilian Real
#[serde(rename = "BRL")]
Brl,
/// Brunei Dollar
#[serde(rename = "BND")]
Bnd,
/// Burundi Franc
#[serde(rename = "BIF")]
Bif,
/// CFA Franc BCEAO+
#[serde(rename = "XOF")]
Xof,
/// CFA Franc BEAC#
#[serde(rename = "XAF")]
Xaf,
/// CFP Franc
#[serde(rename = "XPF")]
Xpf,
/// Canadian Dollar
#[serde(rename = "CAD")]
Cad,
/// Cape Verde Escudo
#[serde(rename = "CVE")]
Cve,
/// Cayman Islands Dollar
#[serde(rename = "KYD")]
Kyd,
/// Cedi
#[serde(rename = "GHC")]
Ghc,
/// Chilean Peso
#[serde(rename = "CLP")]
Clp,
/// Colombian Peso
#[serde(rename = "COP")]
Cop,
/// Comoro Franc
#[serde(rename = "KMF")]
Kmf,
/// Convertible Marks
#[serde(rename = "BAM")]
Bam,
/// Cordoba Oro
#[serde(rename = "NIO")]
Nio,
/// Costa Rican Colon
#[serde(rename = "CRC")]
Crc,
/// Cuban Peso
#[serde(rename = "CUP")]
Cup,
/// Cyprus Pound
#[serde(rename = "CYP")]
Cyp,
/// Czech Koruna
#[serde(rename = "CZK")]
Czk,
/// Dalasi
#[serde(rename = "GMD")]
Gmd,
/// Danish Krone
#[serde(rename = "DKK")]
Dkk,
/// Denar
#[serde(rename = "MKD")]
Mkd,
/// Deutsche Mark
#[serde(rename = "DEM")]
Dem,
/// Djibouti Franc
#[serde(rename = "DJF")]
Djf,
/// Dobra
#[serde(rename = "STD")]
Std,
/// Dominican Peso
#[serde(rename = "DOP")]
Dop,
/// Dong
#[serde(rename = "VND")]
Vnd,
/// Drachma
#[serde(rename = "GRD")]
Grd,
/// East Caribbean Dollar
#[serde(rename = "XCD")]
Xcd,
/// Egyptian Pound
#[serde(rename = "EGP")]
Egp,
/// El Salvador Colon
#[serde(rename = "SVC")]
Svc,
/// Ethiopian Birr
#[serde(rename = "ETB")]
Etb,
/// Euro
#[serde(rename = "EUR")]
Eur,
/// Falkland Islands Pound
#[serde(rename = "FKP")]
Fkp,
/// Fiji Dollar
#[serde(rename = "FJD")]
Fjd,
/// Forint
#[serde(rename = "HUF")]
Huf,
/// Franc Congolais
#[serde(rename = "CDF")]
Cdf,
/// French Franc
#[serde(rename = "FRF")]
Frf,
/// Gibraltar Pound
#[serde(rename = "GIP")]
Gip,
/// Gourde
#[serde(rename = "HTG")]
Htg,
/// Guarani
#[serde(rename = "PYG")]
Pyg,
/// Guinea Franc
#[serde(rename = "GNF")]
Gnf,
/// Guinea-Bissau Peso
#[serde(rename = "GWP")]
Gwp,
/// Guyana Dollar
#[serde(rename = "GYD")]
Gyd,
/// Hong Kong Dollar
#[serde(rename = "HKD")]
Hkd,
/// Hryvnia
#[serde(rename = "UAH")]
Uah,
/// Iceland Krona
#[serde(rename = "ISK")]
Isk,
/// Indian Rupee
#[serde(rename = "INR")]
Inr,
/// Iranian Rial
#[serde(rename = "IRR")]
Irr,
/// Iraqi Dinar
#[serde(rename = "IQD")]
Iqd,
/// Irish Pound
#[serde(rename = "IEP")]
Iep,
/// Italian Lira
#[serde(rename = "ITL")]
Itl,
/// Jamaican Dollar
#[serde(rename = "JMD")]
Jmd,
/// Jordanian Dinar
#[serde(rename = "JOD")]
Jod,
/// Kenyan Shilling
#[serde(rename = "KES")]
Kes,
/// Kina
#[serde(rename = "PGK")]
Pgk,
/// Kip
#[serde(rename = "LAK")]
Lak,
/// Kroon
#[serde(rename = "EEK")]
Eek,
/// Kuna
#[serde(rename = "HRK")]
Hrk,
/// Kuwaiti Dinar
#[serde(rename = "KWD")]
Kwd,
/// Kwacha
#[serde(rename = "MWK")]
Mwk,
/// Kwacha
#[serde(rename = "ZMK")]
Zmk,
/// Kwanza Reajustado
#[serde(rename = "AOR")]
Aor,
/// Kyat
#[serde(rename = "MMK")]
Mmk,
/// Lari
#[serde(rename = "GEL")]
Gel,
/// Latvian Lats
#[serde(rename = "LVL")]
Lvl,
/// Lebanese Pound
#[serde(rename = "LBP")]
Lbp,
/// Lek
#[serde(rename = "ALL")]
All,
/// Lempira
#[serde(rename = "HNL")]
Hnl,
/// Leone
#[serde(rename = "SLL")]
Sll,
/// Leu
#[serde(rename = "ROL")]
Rol,
/// Lev
#[serde(rename = "BGL")]
Bgl,
/// Liberian Dollar
#[serde(rename = "LRD")]
Lrd,
/// Libyan Dinar
#[serde(rename = "LYD")]
Lyd,
/// Lilangeni
#[serde(rename = "SZL")]
Szl,
/// Lithuanian Litas
#[serde(rename = "LTL")]
Ltl,
/// Loti
#[serde(rename = "LSL")]
Lsl,
/// Luxembourg Franc
#[serde(rename = "LUF")]
Luf,
/// Malagasy Franc
#[serde(rename = "MGF")]
Mgf,
/// Malaysian Ringgit
#[serde(rename = "MYR")]
Myr,
/// Maltese Lira
#[serde(rename = "MTL")]
Mtl,
/// Manat
#[serde(rename = "TMM")]
Tmm,
/// Markka
#[serde(rename = "FIM")]
Fim,
/// Mauritius Rupee
#[serde(rename = "MUR")]
Mur,
/// Metical
#[serde(rename = "MZM")]
Mzm,
/// Mexican Peso
#[serde(rename = "MXN")]
Mxn,
/// Mexican Unidad de Inversion (UDI)
#[serde(rename = "MXV")]
Mxv,
/// Moldovan Leu
#[serde(rename = "MDL")]
Mdl,
/// Moroccan Dirham
#[serde(rename = "MAD")]
Mad,
/// Mvdol
#[serde(rename = "BOV")]
Bov,
/// Naira
#[serde(rename = "NGN")]
Ngn,
/// Nakfa
#[serde(rename = "ERN")]
Ern,
/// Namibia Dollar
#[serde(rename = "NAD")]
Nad,
/// Nepalese Rupee
#[serde(rename = "NPR")]
Npr,
/// Netherlands Antillian Guilder
#[serde(rename = "ANG")]
Ang,
/// Netherlands Guilder
#[serde(rename = "NLG")]
Nlg,
/// New Dinar
#[serde(rename = "YUM")]
Yum,
/// New Israeli Sheqel
#[serde(rename = "ILS")]
Ils,
/// New Kwanza
#[serde(rename = "AON")]
Aon,
/// New Taiwan Dollar
#[serde(rename = "TWD")]
Twd,
/// New Zaire
#[serde(rename = "ZRN")]
Zrn,
/// New Zealand Dollar
#[serde(rename = "NZD")]
Nzd,
/// Next day
#[serde(rename = "USN")]
Usn,
/// Ngultrum
#[serde(rename = "BTN")]
Btn,
/// North Korean Won
#[serde(rename = "KPW")]
Kpw,
/// Norwegian Krone
#[serde(rename = "NOK")]
Nok,
/// Nuevo Sol
#[serde(rename = "PEN")]
Pen,
/// Ouguiya
#[serde(rename = "MRO")]
Mro,
/// Pa'anga
#[serde(rename = "TOP")]
Top,
/// Pakistan Rupee
#[serde(rename = "PKR")]
Pkr,
/// Pataca
#[serde(rename = "MOP")]
Mop,
/// Peso Uruguayo
#[serde(rename = "UYU")]
Uyu,
/// Philippine Peso
#[serde(rename = "PHP")]
Php,
/// Portuguese Escudo
#[serde(rename = "PTE")]
Pte,
/// Pound Sterling
#[serde(rename = "GBP")]
Gbp,
/// Pula
#[serde(rename = "BWP")]
Bwp,
/// Qatari Rial
#[serde(rename = "QAR")]
Qar,
/// Quetzal
#[serde(rename = "GTQ")]
Gtq,
/// Rand
#[serde(rename = "ZAR")]
Zar,
/// Rial Omani
#[serde(rename = "OMR")]
Omr,
/// Riel
#[serde(rename = "KHR")]
Khr,
/// Rufiyaa
#[serde(rename = "MVR")]
Mvr,
/// Rupiah
#[serde(rename = "IDR")]
Idr,
/// Russian Ruble
#[serde(rename = "RUB")]
Rub,
/// Russian Ruble
#[serde(rename = "RUR")]
Rur,
/// Rwanda Franc
#[serde(rename = "RWF")]
Rwf,
/// SDR
#[serde(rename = "XDR")]
Xdr,
/// Same day
#[serde(rename = "USS")]
Uss,
/// Saudi Riyal
#[serde(rename = "SAR")]
Sar,
/// Schilling
#[serde(rename = "ATS")]
Ats,
/// Seychelles Rupee
#[serde(rename = "SCR")]
Scr,
/// Singapore Dollar
#[serde(rename = "SGD")]
Sgd,
/// Slovak Koruna
#[serde(rename = "SKK")]
Skk,
/// Solomon Islands Dollar
#[serde(rename = "SBD")]
Sbd,
/// Som
#[serde(rename = "KGS")]
Kgs,
/// Somali Shilling
#[serde(rename = "SOS")]
Sos,
/// Spanish Peseta
#[serde(rename = "ESP")]
Esp,
/// Sri Lanka Rupee
#[serde(rename = "LKR")]
Lkr,
/// St Helena Pound
#[serde(rename = "SHP")]
Shp,
/// Sucre
#[serde(rename = "ECS")]
Ecs,
/// Sudanese Dinar
#[serde(rename = "SDD")]
Sdd,
/// Surinam Guilder
#[serde(rename = "SRG")]
Srg,
/// Swedish Krona
#[serde(rename = "SEK")]
Sek,
/// Swiss Franc
#[serde(rename = "CHF")]
Chf,
/// Syrian Pound
#[serde(rename = "SYP")]
Syp,
/// Tajik Ruble
#[serde(rename = "TJR")]
Tjr,
/// Taka
#[serde(rename = "BDT")]
Bdt,
/// Tala
#[serde(rename = "WST")]
Wst,
/// Tanzanian Shilling
#[serde(rename = "TZS")]
Tzs,
/// Tenge
#[serde(rename = "KZT")]
Kzt,
/// Timor Escudo
#[serde(rename = "TPE")]
Tpe,
/// Tolar
#[serde(rename = "SIT")]
Sit,
/// Trinidad and Tobago Dollar
#[serde(rename = "TTD")]
Ttd,
/// Tugrik
#[serde(rename = "MNT")]
Mnt,
/// Tunisian Dinar
#[serde(rename = "TND")]
Tnd,
/// Turkish Lira
#[serde(rename = "TRL")]
Trl,
/// UAE Dirham
#[serde(rename = "AED")]
Aed,
/// US Dollar
#[serde(rename = "USD")]
Usd,
/// Uganda Shilling
#[serde(rename = "UGX")]
Ugx,
/// Unidad de Valor Constante (UVC)
#[serde(rename = "ECV")]
Ecv,
/// Unidades de fomento
#[serde(rename = "CLF")]
Clf,
/// Uzbekistan Sum
#[serde(rename = "UZS")]
Uzs,
/// Vatu
#[serde(rename = "VUV")]
Vuv,
/// Won
#[serde(rename = "KRW")]
Krw,
/// Yemeni Rial
#[serde(rename = "YER")]
Yer,
/// Yen
#[serde(rename = "JPY")]
Jpy,
/// Yuan Renminbi
#[serde(rename = "CNY")]
Cny,
/// Zimbabwe Dollar
#[serde(rename = "ZWD")]
Zwd,
/// Zloty
#[serde(rename = "PLN")]
Pln,
/// financial Rand
#[serde(rename = "ZAL")]
Zal,
/// Afghani
#[serde(rename = "004")]
N004,
/// Algerian Dinar
#[serde(rename = "01")]
N01,
/// Andorran Peseta
#[serde(rename = "020")]
N020,
/// Argentine Peso
#[serde(rename = "032")]
N032,
/// Armenian Dram
#[serde(rename = "051")]
N051,
/// Aruban Guilder
#[serde(rename = "533")]
N533,
/// Australian Dollar
#[serde(rename = "036")]
N036,
/// Azerbaijanian Manat
#[serde(rename = "031")]
N031,
/// Bahamian Dollar
#[serde(rename = "044")]
N044,
/// Bahraini Dinar
#[serde(rename = "048")]
N048,
/// Baht
#[serde(rename = "764")]
N764,
/// Balboa
#[serde(rename = "590")]
N590,
/// Barbados Dollar
#[serde(rename = "052")]
N052,
/// Belarussian Ruble
#[serde(rename = "112")]
N112,
/// Belgian Franc
#[serde(rename = "056")]
N056,
/// Belize Dollar
#[serde(rename = "084")]
N084,
/// Bermudian Dollar
#[serde(rename = "060")]
N060,
/// Bolivar
#[serde(rename = "862")]
N862,
/// Boliviano
#[serde(rename = "068")]
N068,
/// Brazilian Real
#[serde(rename = "986")]
N986,
/// Brunei Dollar
#[serde(rename = "096")]
N096,
/// Burundi Franc
#[serde(rename = "108")]
N108,
/// CFA Franc BCEAO+
#[serde(rename = "952")]
N952,
/// CFA Franc BEAC#
#[serde(rename = "950")]
N950,
/// CFP Franc
#[serde(rename = "953")]
N953,
/// Canadian Dollar
#[serde(rename = "124")]
N124,
/// Cape Verde Escudo
#[serde(rename = "132")]
N132,
/// Cayman Islands Dollar
#[serde(rename = "136")]
N136,
/// Cedi
#[serde(rename = "288")]
N288,
/// Chilean Peso
#[serde(rename = "152")]
N152,
/// Colombian Peso
#[serde(rename = "170")]
N170,
/// Comoro Franc
#[serde(rename = "174")]
N174,
/// Convertible Marks
#[serde(rename = "977")]
N977,
/// Cordoba Oro
#[serde(rename = "558")]
N558,
/// Costa Rican Colon
#[serde(rename = "188")]
N188,
/// Cuban Peso
#[serde(rename = "192")]
N192,
/// Cyprus Pound
#[serde(rename = "196")]
N196,
/// Czech Koruna
#[serde(rename = "203")]
N203,
/// Dalasi
#[serde(rename = "270")]
N270,
/// Danish Krone
#[serde(rename = "208")]
N208,
/// Denar
#[serde(rename = "807")]
N807,
/// Deutsche Mark
#[serde(rename = "280")]
N280,
/// Djibouti Franc
#[serde(rename = "262")]
N262,
/// Dobra
#[serde(rename = "678")]
N678,
/// Dominican Peso
#[serde(rename = "214")]
N214,
/// Dong
#[serde(rename = "704")]
N704,
/// Drachma
#[serde(rename = "300")]
N300,
/// East Caribbean Dollar
#[serde(rename = "951")]
N951,
/// Egyptian Pound
#[serde(rename = "818")]
N818,
/// El Salvador Colon
#[serde(rename = "222")]
N222,
/// Ethiopian Birr
#[serde(rename = "230")]
N230,
/// Euro
#[serde(rename = "978")]
N978,
/// Falkland Islands Pound
#[serde(rename = "238")]
N238,
/// Fiji Dollar
#[serde(rename = "242")]
N242,
/// Forint
#[serde(rename = "348")]
N348,
/// Franc Congolais
#[serde(rename = "976")]
N976,
/// French Franc
#[serde(rename = "250")]
N250,
/// Gibraltar Pound
#[serde(rename = "292")]
N292,
/// Gourde
#[serde(rename = "332")]
N332,
/// Guarani
#[serde(rename = "600")]
N600,
/// Guinea Franc
#[serde(rename = "324")]
N324,
/// Guinea-Bissau Peso
#[serde(rename = "624")]
N624,
/// Guyana Dollar
#[serde(rename = "328")]
N328,
/// Hong Kong Dollar
#[serde(rename = "344")]
N344,
/// Hryvnia
#[serde(rename = "980")]
N980,
/// Iceland Krona
#[serde(rename = "352")]
N352,
/// Indian Rupee
#[serde(rename = "356")]
N356,
/// Iranian Rial
#[serde(rename = "364")]
N364,
/// Iraqi Dinar
#[serde(rename = "368")]
N368,
/// Irish Pound
#[serde(rename = "372")]
N372,
/// Italian Lira
#[serde(rename = "380")]
N380,
/// Jamaican Dollar
#[serde(rename = "388")]
N388,
/// Jordanian Dinar
#[serde(rename = "400")]
N400,
/// Kenyan Shilling
#[serde(rename = "404")]
N404,
/// Kina
#[serde(rename = "598")]
N598,
/// Kip
#[serde(rename = "418")]
N418,
/// Kroon
#[serde(rename = "233")]
N233,
/// Kuna
#[serde(rename = "191")]
N191,
/// Kuwaiti Dinar
#[serde(rename = "414")]
N414,
/// Kwacha
#[serde(rename = "454")]
N454,
/// Kwacha
#[serde(rename = "894")]
N894,
/// Kwanza Reajustado
#[serde(rename = "982")]
N982,
/// Kyat
#[serde(rename = "104")]
N104,
/// Lari
#[serde(rename = "981")]
N981,
/// Latvian Lats
#[serde(rename = "428")]
N428,
/// Lebanese Pound
#[serde(rename = "422")]
N422,
/// Lek
#[serde(rename = "008")]
N008,
/// Lempira
#[serde(rename = "340")]
N340,
/// Leone
#[serde(rename = "694")]
N694,
/// Leu
#[serde(rename = "642")]
N642,
/// Lev
#[serde(rename = "100")]
N100,
/// Liberian Dollar
#[serde(rename = "430")]
N430,
/// Libyan Dinar
#[serde(rename = "434")]
N434,
/// Lilangeni
#[serde(rename = "748")]
N748,
/// Lithuanian Litas
#[serde(rename = "440")]
N440,
/// Loti
#[serde(rename = "426")]
N426,
/// Luxembourg Franc
#[serde(rename = "442")]
N442,
/// Malagasy Franc
#[serde(rename = "450")]
N450,
/// Malaysian Ringgit
#[serde(rename = "458")]
N458,
/// Maltese Lira
#[serde(rename = "470")]
N470,
/// Manat
#[serde(rename = "795")]
N795,
/// Markka
#[serde(rename = "246")]
N246,
/// Mauritius Rupee
#[serde(rename = "480")]
N480,
/// Metical
#[serde(rename = "508")]
N508,
/// Mexican Peso
#[serde(rename = "484")]
N484,
/// Mexican Unidad de Inversion (UDI)
#[serde(rename = "979")]
N979,
/// Moldovan Leu
#[serde(rename = "498")]
N498,
/// Moroccan Dirham
#[serde(rename = "504")]
N504,
/// Mvdol
#[serde(rename = "984")]
N984,
/// Naira
#[serde(rename = "566")]
N566,
/// Nakfa
#[serde(rename = "232")]
N232,
/// Namibia Dollar
#[serde(rename = "516")]
N516,
/// Nepalese Rupee
#[serde(rename = "524")]
N524,
/// Netherlands Antillian Guilder
#[serde(rename = "532")]
N532,
/// Netherlands Guilder
#[serde(rename = "528")]
N528,
/// New Dinar
#[serde(rename = "891")]
N891,
/// New Israeli Sheqel
#[serde(rename = "376")]
N376,
/// New Kwanza
#[serde(rename = "02")]
N02,
/// New Taiwan Dollar
#[serde(rename = "901")]
N901,
/// New Zaire
#[serde(rename = "180")]
N180,
/// New Zealand Dollar
#[serde(rename = "554")]
N554,
/// Next day
#[serde(rename = "997")]
N997,
/// Ngultrum
#[serde(rename = "064")]
N064,
/// North Korean Won
#[serde(rename = "408")]
N408,
/// Norwegian Krone
#[serde(rename = "578")]
N578,
/// Nuevo Sol
#[serde(rename = "604")]
N604,
/// Ouguiya
#[serde(rename = "478")]
N478,
/// Pa'anga
#[serde(rename = "776")]
N776,
/// Pakistan Rupee
#[serde(rename = "586")]
N586,
/// Pataca
#[serde(rename = "446")]
N446,
/// Peso Uruguayo
#[serde(rename = "858")]
N858,
/// Philippine Peso
#[serde(rename = "608")]
N608,
/// Portuguese Escudo
#[serde(rename = "620")]
N620,
/// Pound Sterling
#[serde(rename = "826")]
N826,
/// Pula
#[serde(rename = "072")]
N072,
/// Qatari Rial
#[serde(rename = "634")]
N634,
/// Quetzal
#[serde(rename = "320")]
N320,
/// Rand
#[serde(rename = "710")]
N710,
/// Rial Omani
#[serde(rename = "512")]
N512,
/// Riel
#[serde(rename = "116")]
N116,
/// Rufiyaa
#[serde(rename = "462")]
N462,
/// Rupiah
#[serde(rename = "360")]
N360,
/// Russian Ruble
#[serde(rename = "643")]
N643,
/// Russian Ruble
#[serde(rename = "810")]
N810,
/// Rwanda Franc
#[serde(rename = "646")]
N646,
/// SDR
#[serde(rename = "960")]
N960,
/// Same day
#[serde(rename = "998")]
N998,
/// Saudi Riyal
#[serde(rename = "682")]
N682,
/// Schilling
#[serde(rename = "040")]
N040,
/// Seychelles Rupee
#[serde(rename = "690")]
N690,
/// Singapore Dollar
#[serde(rename = "702")]
N702,
/// Slovak Koruna
#[serde(rename = "703")]
N703,
/// Solomon Islands Dollar
#[serde(rename = "090")]
N090,
/// Som
#[serde(rename = "417")]
N417,
/// Somali Shilling
#[serde(rename = "706")]
N706,
/// Spanish Peseta
#[serde(rename = "724")]
N724,
/// Sri Lanka Rupee
#[serde(rename = "144")]
N144,
/// St Helena Pound
#[serde(rename = "654")]
N654,
/// Sucre
#[serde(rename = "218")]
N218,
/// Sudanese Dinar
#[serde(rename = "736")]
N736,
/// Surinam Guilder
#[serde(rename = "740")]
N740,
/// Swedish Krona
#[serde(rename = "752")]
N752,
/// Swiss Franc
#[serde(rename = "756")]
N756,
/// Syrian Pound
#[serde(rename = "760")]
N760,
/// Tajik Ruble
#[serde(rename = "762")]
N762,
/// Taka
#[serde(rename = "050")]
N050,
/// Tala
#[serde(rename = "882")]
N882,
/// Tanzanian Shilling
#[serde(rename = "834")]
N834,
/// Tenge
#[serde(rename = "398")]
N398,
/// Timor Escudo
#[serde(rename = "626")]
N626,
/// Tolar
#[serde(rename = "705")]
N705,
/// Trinidad and Tobago Dollar
#[serde(rename = "780")]
N780,
/// Tugrik
#[serde(rename = "496")]
N496,
/// Tunisian Dinar
#[serde(rename = "788")]
N788,
/// Turkish Lira
#[serde(rename = "792")]
N792,
/// UAE Dirham
#[serde(rename = "784")]
N784,
/// US Dollar
#[serde(rename = "840")]
N840,
/// Uganda Shilling
#[serde(rename = "800")]
N800,
/// Unidad de Valor Constante (UVC)
#[serde(rename = "983")]
N983,
/// Unidades de fomento
#[serde(rename = "990")]
N990,
/// Uzbekistan Sum
#[serde(rename = "860")]
N860,
/// Vatu
#[serde(rename = "548")]
N548,
/// Won
#[serde(rename = "410")]
N410,
/// Yemeni Rial
#[serde(rename = "886")]
N886,
/// Yen
#[serde(rename = "392")]
N392,
/// Yuan Renminbi
#[serde(rename = "156")]
N156,
/// Zimbabwe Dollar
#[serde(rename = "716")]
N716,
/// Zloty
#[serde(rename = "985")]
N985,
/// financial Rand
#[serde(rename = "991")]
N991,
/// Gold
#[serde(rename = "XAU")]
Xau,
/// European Composite Unit (EURCO)
#[serde(rename = "XBA")]
Xba,
/// European Monetary Unit (E.M.U.-6)
#[serde(rename = "XBB")]
Xbb,
/// European Unit of Account 9 (E.U.A.- 9)
#[serde(rename = "XBC")]
Xbc,
/// European Unit of Account 17 (E.U.A.- 17)
#[serde(rename = "XBD")]
Xbd,
/// Palladium
#[serde(rename = "XPD")]
Xpd,
/// Platinum
#[serde(rename = "XPT")]
Xpt,
/// Silver
#[serde(rename = "XAG")]
Xag,
/// UIC-Franc
#[serde(rename = "XFU")]
Xfu,
/// Gold-Franc
#[serde(rename = "XFO")]
Xfo,
/// Codes specifically reserved for testing purposes
#[serde(rename = "XTS")]
Xts,
/// Codes assigned for transactions where no currency is involved
#[serde(rename = "XXX")]
Xxx,
/// Gold
#[serde(rename = "959")]
N959,
/// European Composite Unit (EURCO)
#[serde(rename = "955")]
N955,
/// European Monetary Unit (E.M.U.-6)
#[serde(rename = "956")]
N956,
/// European Unit of Account 9 (E.U.A.- 9)
#[serde(rename = "957")]
N957,
/// European Unit of Account 17 (E.U.A.- 17)
#[serde(rename = "958")]
N958,
/// Palladium
#[serde(rename = "964")]
N964,
/// Platinum
#[serde(rename = "962")]
N962,
/// Silver
#[serde(rename = "961")]
N961,
/// Codes specifically reserved for testing purposes
#[serde(rename = "963")]
N963,
/// Codes assigned for transactions where no currency is involved
#[serde(rename = "999")]
N999,
}
impl Default for MiscFeeCurr {
fn default() -> Self {
MiscFeeCurr::Afa
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum MiscFeeType {
/// Regulatory (e.g. SEC)
#[serde(rename = "1")]
Regulatory,
/// Tax
#[serde(rename = "2")]
Tax,
/// Local Commission
#[serde(rename = "3")]
LocalCommission,
/// Exchange Fees
#[serde(rename = "4")]
ExchangeFees,
/// Stamp
#[serde(rename = "5")]
Stamp,
/// Levy
#[serde(rename = "6")]
Levy,
/// Other
#[serde(rename = "7")]
Other,
/// Markup
#[serde(rename = "8")]
Markup,
/// Consumption Tax
#[serde(rename = "9")]
ConsumptionTax,
/// Per transaction
#[serde(rename = "10")]
PerTransaction,
/// Conversion
#[serde(rename = "11")]
Conversion,
/// Agent
#[serde(rename = "12")]
Agent,
/// Transfer Fee
#[serde(rename = "13")]
TransferFee,
/// Security Lending
#[serde(rename = "14")]
SecurityLending,
}
impl Default for MiscFeeType {
fn default() -> Self {
MiscFeeType::Regulatory
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum MiscFeeBasis {
/// Absolute
#[serde(rename = "0")]
Absolute,
/// Per Unit
#[serde(rename = "1")]
PerUnit,
/// Percentage
#[serde(rename = "2")]
Percentage,
}
impl Default for MiscFeeBasis {
fn default() -> Self {
MiscFeeBasis::Absolute
}
}
| 19.888852 | 258 | 0.611004 |
dbb928b2677f692fa8d71aea2e6ee1276c8728b0
| 10,887 |
//! Use this library to open a path or URL using the program configured on the system.
//!
//! # Usage
//!
//! Open the given URL in the default web browser.
//!
//! ```no_run
//! open::that("http://rust-lang.org").unwrap();
//! ```
//!
//! Alternatively, specify the program to be used to open the path or URL.
//!
//! ```no_run
//! open::with("http://rust-lang.org", "firefox").unwrap();
//! ```
//!
//! # Notes
//!
//! As an operating system program is used, the open operation can fail.
//! Therefore, you are advised to at least check the result and behave
//! accordingly, e.g. by letting the user know that the open operation failed.
//!
//! ```no_run
//! let path = "http://rust-lang.org";
//!
//! match open::that(path) {
//! Ok(()) => println!("Opened '{}' successfully.", path),
//! Err(err) => eprintln!("An error occurred when opening '{}': {}", path, err),
//! }
//! ```
#[cfg(target_os = "windows")]
use windows as os;
#[cfg(target_os = "macos")]
use macos as os;
#[cfg(target_os = "ios")]
use ios as os;
#[cfg(any(
target_os = "linux",
target_os = "android",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "netbsd",
target_os = "openbsd",
target_os = "solaris"
))]
use unix as os;
#[cfg(not(any(
target_os = "linux",
target_os = "android",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "netbsd",
target_os = "openbsd",
target_os = "solaris",
target_os = "ios",
target_os = "macos",
target_os = "windows",
)))]
compile_error!("open is not supported on this platform");
use std::{
ffi::OsStr,
io,
process::{Command, Output, Stdio},
thread,
};
type Result = io::Result<()>;
/// Open path with the default application.
///
/// # Examples
///
/// ```no_run
/// let path = "http://rust-lang.org";
///
/// match open::that(path) {
/// Ok(()) => println!("Opened '{}' successfully.", path),
/// Err(err) => panic!("An error occurred when opening '{}': {}", path, err),
/// }
/// ```
///
/// # Errors
///
/// A [`std::io::Error`] is returned on failure. Because different operating systems
/// handle errors differently it is recommend to not match on a certain error.
pub fn that<T: AsRef<OsStr> + Sized>(path: T) -> Result {
os::that(path)
}
/// Open path with the given application.
///
/// # Examples
///
/// ```no_run
/// let path = "http://rust-lang.org";
/// let app = "firefox";
///
/// match open::with(path, app) {
/// Ok(()) => println!("Opened '{}' successfully.", path),
/// Err(err) => panic!("An error occurred when opening '{}': {}", path, err),
/// }
/// ```
///
/// # Errors
///
/// A [`std::io::Error`] is returned on failure. Because different operating systems
/// handle errors differently it is recommend to not match on a certain error.
pub fn with<T: AsRef<OsStr> + Sized>(path: T, app: impl Into<String>) -> Result {
os::with(path, app)
}
/// Open path with the default application in a new thread.
///
/// See documentation of [`that`] for more details.
pub fn that_in_background<T: AsRef<OsStr> + Sized>(path: T) -> thread::JoinHandle<Result> {
let path = path.as_ref().to_os_string();
thread::spawn(|| that(path))
}
/// Open path with the given application in a new thread.
///
/// See documentation of [`with`] for more details.
pub fn with_in_background<T: AsRef<OsStr> + Sized>(
path: T,
app: impl Into<String>,
) -> thread::JoinHandle<Result> {
let path = path.as_ref().to_os_string();
let app = app.into();
thread::spawn(|| with(path, app))
}
trait IntoResult<T> {
fn into_result(self) -> T;
}
impl IntoResult<Result> for io::Result<Output> {
fn into_result(self) -> Result {
match self {
Ok(o) if o.status.success() => Ok(()),
Ok(o) => Err(from_output(o)),
Err(err) => Err(err),
}
}
}
#[cfg(windows)]
impl IntoResult<Result> for winapi::ctypes::c_int {
fn into_result(self) -> Result {
match self {
i if i > 32 => Ok(()),
_ => Err(io::Error::last_os_error()),
}
}
}
fn from_output(output: Output) -> io::Error {
let error_msg = match output.stderr.is_empty() {
true => output.status.to_string(),
false => format!(
"{} ({})",
String::from_utf8_lossy(&output.stderr).trim(),
output.status
),
};
io::Error::new(io::ErrorKind::Other, error_msg)
}
trait CommandExt {
fn output_stderr(&mut self) -> io::Result<Output>;
}
impl CommandExt for Command {
fn output_stderr(&mut self) -> io::Result<Output> {
let mut process = self
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::piped())
.spawn()?;
// Consume all stderr - it's open just for a few programs which can't handle it being closed.
use std::io::Read;
let mut stderr = vec![0; 256];
let mut stderr_src = process.stderr.take().expect("piped stderr");
let len = stderr_src.read(&mut stderr).unwrap_or(0);
stderr.truncate(len);
// consume the rest to avoid blocking
std::io::copy(&mut stderr_src, &mut std::io::sink()).ok();
let status = process.wait()?;
Ok(Output {
status,
stderr,
stdout: vec![],
})
}
}
#[cfg(windows)]
mod windows {
use std::{ffi::OsStr, io, os::windows::ffi::OsStrExt, ptr};
use winapi::ctypes::c_int;
use winapi::um::shellapi::ShellExecuteW;
use crate::{IntoResult, Result};
fn convert_path(path: &OsStr) -> io::Result<Vec<u16>> {
let mut maybe_result: Vec<_> = path.encode_wide().collect();
if maybe_result.iter().any(|&u| u == 0) {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"path contains NUL byte(s)",
));
}
maybe_result.push(0);
Ok(maybe_result)
}
pub fn that<T: AsRef<OsStr> + Sized>(path: T) -> Result {
const SW_SHOW: c_int = 5;
let path = convert_path(path.as_ref())?;
let operation: Vec<u16> = OsStr::new("open\0").encode_wide().collect();
let result = unsafe {
ShellExecuteW(
ptr::null_mut(),
operation.as_ptr(),
path.as_ptr(),
ptr::null(),
ptr::null(),
SW_SHOW,
)
};
(result as c_int).into_result()
}
pub fn with<T: AsRef<OsStr> + Sized>(path: T, app: impl Into<String>) -> Result {
const SW_SHOW: c_int = 5;
let path = convert_path(path.as_ref())?;
let operation: Vec<u16> = OsStr::new("open\0").encode_wide().collect();
let app_name: Vec<u16> = OsStr::new(&format!("{}\0", app.into()))
.encode_wide()
.collect();
let result = unsafe {
ShellExecuteW(
ptr::null_mut(),
operation.as_ptr(),
app_name.as_ptr(),
path.as_ptr(),
ptr::null(),
SW_SHOW,
)
};
(result as c_int).into_result()
}
}
#[cfg(target_os = "macos")]
mod macos {
use std::{ffi::OsStr, process::Command};
use crate::{CommandExt, IntoResult, Result};
pub fn that<T: AsRef<OsStr> + Sized>(path: T) -> Result {
Command::new("/usr/bin/open")
.arg(path.as_ref())
.output_stderr()
.into_result()
}
pub fn with<T: AsRef<OsStr> + Sized>(path: T, app: impl Into<String>) -> Result {
Command::new("/usr/bin/open")
.arg(path.as_ref())
.arg("-a")
.arg(app.into())
.output_stderr()
.into_result()
}
}
#[cfg(target_os = "ios")]
mod ios {
use std::{ffi::OsStr, process::Command};
use crate::{CommandExt, IntoResult, Result};
pub fn that<T: AsRef<OsStr> + Sized>(path: T) -> Result {
Command::new("uiopen")
.arg("--url")
.arg(path.as_ref())
.output_stderr()
.into_result()
}
pub fn with<T: AsRef<OsStr> + Sized>(path: T, app: impl Into<String>) -> Result {
Command::new("uiopen")
.arg("--url")
.arg(path.as_ref())
.arg("--bundleid")
.arg(app.into())
.output_stderr()
.into_result()
}
}
#[cfg(any(
target_os = "linux",
target_os = "android",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "netbsd",
target_os = "openbsd",
target_os = "solaris"
))]
mod unix {
use std::{
env,
ffi::{OsStr, OsString},
path::{Path, PathBuf},
process::Command,
};
use crate::{CommandExt, IntoResult, Result};
pub fn that<T: AsRef<OsStr> + Sized>(path: T) -> Result {
let path = path.as_ref();
let open_handlers = [
("xdg-open", &[path] as &[_]),
("gio", &[OsStr::new("open"), path]),
("gnome-open", &[path]),
("kde-open", &[path]),
("wslview", &[&wsl_path(path)]),
];
let mut unsuccessful = None;
let mut io_error = None;
for (command, args) in &open_handlers {
let result = Command::new(command).args(*args).output_stderr();
match result {
Ok(o) if o.status.success() => return Ok(()),
Ok(o) => unsuccessful = unsuccessful.or_else(|| Some(crate::from_output(o))),
Err(err) => io_error = io_error.or(Some(err)),
}
}
Err(unsuccessful
.or(io_error)
.expect("successful cases don't get here"))
}
pub fn with<T: AsRef<OsStr> + Sized>(path: T, app: impl Into<String>) -> Result {
Command::new(app.into())
.arg(path.as_ref())
.output_stderr()
.into_result()
}
// Polyfill to workaround absolute path bug in wslu(wslview). In versions before
// v3.1.1, wslview is unable to find absolute paths. `wsl_path` converts an
// absolute path into a relative path starting from the current directory. If
// the path is already a relative path or the conversion fails the original path
// is returned.
fn wsl_path<T: AsRef<OsStr>>(path: T) -> OsString {
fn path_relative_to_current_dir<T: AsRef<OsStr>>(path: T) -> Option<PathBuf> {
let path = Path::new(&path);
if path.is_relative() {
return None;
}
let base = env::current_dir().ok()?;
pathdiff::diff_paths(path, base)
}
match path_relative_to_current_dir(&path) {
None => OsString::from(&path),
Some(relative) => OsString::from(relative),
}
}
}
| 27.70229 | 101 | 0.542941 |
e25fe4cfc8dfad40f579dcb364ab4abcf311da37
| 1,481 |
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::BASE1 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
| 22.784615 | 59 | 0.495611 |
036397966c7e7a9701a3ffc2a4bcf362c0272133
| 2,724 |
use std::fs;
use std::io::Write;
struct BmpPixel {
pub r: u8,
pub g: u8,
pub b: u8,
}
pub struct BmpImage {
width: u32,
height: u32,
data: Vec<BmpPixel>
}
impl BmpImage {
pub fn new(width: u32, height: u32) -> BmpImage {
let mut data = Vec::with_capacity((width * height) as usize);
for _ in 0..width*height {
data.push(BmpPixel{r:0, g:0, b:0});
}
BmpImage {
width: width,
height: height,
data: data
}
}
pub fn write_to_file(&self, file_path: String) {
let size_of_file_header: u32 = 14;
let size_of_bitmap_header: u32 = 40;
let size_of_header: u32 = size_of_file_header + size_of_bitmap_header;
let size_of_padding: u32 = (4 - (self.width*3) % 4) % 4;
let filesize = size_of_header + self.height * (3*self.width + size_of_padding);
let mut stream = Vec::with_capacity(filesize as usize);
stream.push('B' as u8);
stream.push('M' as u8);
stream.push(((filesize >> 0) & 0xFF) as u8);
stream.push(((filesize >> 8) & 0xFF) as u8);
stream.push(((filesize >> 16) & 0xFF) as u8);
stream.push(((filesize >> 24) & 0xFF) as u8);
stream.push(0);
stream.push(0);
stream.push(0);
stream.push(0);
stream.push(size_of_header as u8);
stream.push(0);
stream.push(0);
stream.push(0);
stream.push(size_of_bitmap_header as u8);
stream.push(0);
stream.push(0);
stream.push(0);
stream.push(((self.width >> 0) & 0xFF) as u8);
stream.push(((self.width >> 8) & 0xFF) as u8);
stream.push(((self.width >> 16) & 0xFF) as u8);
stream.push(((self.width >> 24) & 0xFF) as u8);
stream.push(((self.height >> 0) & 0xFF) as u8);
stream.push(((self.height >> 8) & 0xFF) as u8);
stream.push(((self.height >> 16) & 0xFF) as u8);
stream.push(((self.height >> 24) & 0xFF) as u8);
stream.push(1);
stream.push(0);
stream.push(24);
stream.push(0);
while size_of_header as usize > stream.len() {
stream.push(0);
}
for row in 0..self.height {
for col in 0..self.width {
let ind = (row*self.width + col) as usize;
let p = &self.data[ind];
stream.push(p.b);
stream.push(p.g);
stream.push(p.r);
}
for _ in 0..size_of_padding {
stream.push(0);
}
}
let mut file = fs::File::create(file_path).unwrap();
file.write(&stream).unwrap();
}
pub fn set_pixel(&mut self, row:u32, col:u32, r:f64, g:f64, b:f64) {
// row is inverted
self.data[((self.height-1-row)*self.width+col) as usize] = BmpPixel{
r: BmpImage::float_to_byte(r),
g: BmpImage::float_to_byte(g),
b: BmpImage::float_to_byte(b)
};
}
pub fn float_to_byte(value: f64) -> u8 {
if value >= 1.0 {
return 255 as u8;
} else if value <= 0.0 {
return 0 as u8;
} else {
return (value * 255 as f64) as u8;
}
}
}
| 24.321429 | 81 | 0.616373 |
33fb5bb813045502a0592c2b6f923617ae2e364b
| 1,913 |
use std::env;
use super::{Context, Module, RootModuleConfig, SegmentConfig};
use crate::configs::nix_shell::NixShellConfig;
// IN_NIX_SHELL should be "pure" or "impure" but lorri uses "1" for "impure"
// https://github.com/target/lorri/issues/140
/// Creates a module showing if inside a nix-shell
///
/// The module will use the `$IN_NIX_SHELL` and `$name` environment variable to
/// determine if it's inside a nix-shell and the name of it.
///
/// The following options are availables:
/// - use_name (bool) // print the name of the nix-shell
/// - impure_msg (string) // change the impure msg
/// - pure_msg (string) // change the pure msg
///
/// Will display the following:
/// - name (pure) // use_name == true in a pure nix-shell
/// - name (impure) // use_name == true in an impure nix-shell
/// - pure // use_name == false in a pure nix-shell
/// - impure // use_name == false in an impure nix-shell
pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
let mut module = context.new_module("nix_shell");
let config: NixShellConfig = NixShellConfig::try_load(module.config);
module.set_style(config.style);
let shell_type = env::var("IN_NIX_SHELL").ok()?;
let shell_type_segment: SegmentConfig = match shell_type.as_ref() {
"1" | "impure" => config.impure_msg,
"pure" => config.pure_msg,
_ => {
return None;
}
};
if config.use_name {
if let Ok(name) = env::var("name") {
module.create_segment(
"nix_shell",
&shell_type_segment.with_value(&format!("{} ({})", name, shell_type_segment.value)),
);
} else {
module.create_segment("nix_shell", &shell_type_segment);
}
} else {
module.create_segment("nix_shell", &shell_type_segment);
}
Some(module)
}
| 34.781818 | 100 | 0.608991 |
abf5d92c793e2e1936abab1ac3d0abbc2aed2697
| 10,492 |
use super::QueueItem;
use crate::rla;
use crate::rla::ci::{self, CiPlatform};
use regex::bytes::Regex;
use std::collections::VecDeque;
use std::path::PathBuf;
use std::str;
static REPO: &str = "rust-lang/rust";
// We keep track of the last several unique job IDs. This is because
// Azure sends us a notification for every individual builder's
// state (around 70 notifications/job as of this time), but we want
// to only process a given job once.
//
// You might ask -- why is this not a HashSet/HashMap? That would
// also work, but be a little more complicated to remove things
// from. We would need to keep track of order somehow to remove the
// oldest job ID. An attempt at such an API was tried in PR #29, but
// ultimately scrapped as too complicated.
//
// We keep few enough elements in this "set" that a Vec isn't too bad.
//
// Note: Don't update this number too high, as we O(n) loop through it on every
// notification from GitHub (twice).
const KEEP_IDS: usize = 16;
pub struct Worker {
debug_post: Option<(String, u32)>,
index_file: PathBuf,
index: rla::Index,
extract_config: rla::extract::Config,
github: rla::github::Client,
queue: crossbeam::channel::Receiver<QueueItem>,
seen: VecDeque<u64>,
ci: Box<dyn CiPlatform + Send>,
}
impl Worker {
pub fn new(
index_file: PathBuf,
debug_post: Option<String>,
queue: crossbeam::channel::Receiver<QueueItem>,
ci: Box<dyn CiPlatform + Send>,
) -> rla::Result<Worker> {
let debug_post = match debug_post {
None => None,
Some(v) => {
let parts = v.splitn(2, '#').collect::<Vec<_>>();
if parts.len() != 2 {
bail!("Invalid debug-post argument: '{}'", v);
}
let n = parts[1].parse()?;
Some((parts[0].to_owned(), n))
}
};
Ok(Worker {
debug_post,
index: rla::Index::load(&index_file)?,
index_file,
extract_config: Default::default(),
github: rla::github::Client::new()?,
seen: VecDeque::new(),
queue,
ci,
})
}
pub fn main(&mut self) -> rla::Result<()> {
loop {
let item = self.queue.recv()?;
match self.process(item) {
Ok(()) => (),
Err(e) => error!("Processing queue item failed: {}", e),
}
}
}
fn process(&mut self, item: QueueItem) -> rla::Result<()> {
let build_id = match item {
QueueItem::GitHubStatus(ev) => match self.ci.build_id_from_github_status(&ev) {
Some(id) if ev.repository.full_name == REPO => id,
_ => {
info!(
"Ignoring invalid event (ctx: {:?}, url: {:?}).",
ev.context, ev.target_url
);
return Ok(());
}
},
QueueItem::GitHubCheckRun(ev) => match self.ci.build_id_from_github_check(&ev) {
Some(id) if ev.repository.full_name == REPO => id,
_ => {
info!(
"Ignoring invalid event (app id: {:?}, url: {:?}).",
ev.check_run.app.id, ev.check_run.details_url
);
return Ok(());
}
},
};
info!("Processing build #{}...", build_id);
if self.seen.contains(&build_id) {
info!("Ignore recently seen build id");
return Ok(());
}
self.seen.push_front(build_id);
if self.seen.len() > KEEP_IDS {
self.seen.pop_back();
}
let build = self.ci.query_build(build_id)?;
if !build.outcome().is_finished() {
info!("Ignoring in-progress build.");
if let Some(idx) = self.seen.iter().position(|id| *id == build_id) {
// Remove ignored builds, as we haven't reported anything for them and the
// in-progress status might be misleading (e.g., leading edge of a group of
// notifications).
self.seen.remove(idx);
}
return Ok(());
}
if !build.outcome().is_passed() {
self.report_failed(build.as_ref())?;
}
if build.pr_number().is_none() && build.branch_name() == "auto" {
self.learn(build.as_ref())?;
}
Ok(())
}
fn report_failed(&mut self, build: &dyn rla::ci::Build) -> rla::Result<()> {
debug!("Preparing report...");
let job = match build.jobs().iter().find(|j| j.outcome().is_failed()) {
Some(job) => *job,
None => bail!("No failed job found, cannot report."),
};
let log = match ci::download_log(job, self.github.internal()) {
Some(res) => res?,
None => bail!("No log for failed job"),
};
let lines = rla::sanitize::split_lines(&log)
.iter()
.map(|l| rla::index::Sanitized(rla::sanitize::clean(l)))
.collect::<Vec<_>>();
let blocks = rla::extract::extract(&self.extract_config, &self.index, &lines);
let blocks = blocks
.iter()
.map(|block| {
block
.iter()
.map(|line| String::from_utf8_lossy(&line.0).into_owned())
.collect::<Vec<_>>()
.join("\n")
})
.collect::<Vec<_>>();
let extracted = blocks.join("\n---\n");
let commit_info = self
.github
.query_commit("rust-lang/rust", &build.commit_sha())?;
let commit_message = commit_info.commit.message;
let (pr, is_bors) = if let Some(pr) = build.pr_number() {
(pr, false)
} else {
static BORS_MERGE_PREFIX: &str = "Auto merge of #";
if commit_message.starts_with(BORS_MERGE_PREFIX) {
let s = &commit_message[BORS_MERGE_PREFIX.len()..];
(
s[..s.find(' ').ok_or_else(|| {
format_err!("Invalid bors commit message: '{}'", commit_message)
})?]
.parse()?,
true,
)
} else {
bail!("Could not determine PR number, cannot report.");
}
};
if !is_bors {
let pr_info = self.github.query_pr("rust-lang/rust", pr)?;
if !commit_message.starts_with("Merge ") {
bail!(
"Did not recognize commit {} with message '{}', skipping report.",
build.commit_sha(),
commit_message
);
}
let sha = commit_message.split(' ').nth(1).ok_or_else(|| {
format_err!(
"Did not recognize commit {} with message '{}', skipping report.",
build.commit_sha(),
commit_message
)
})?;
debug!("Extracted head commit sha: '{}'", sha);
if pr_info.head.sha != sha {
info!("Build results outdated, skipping report.");
return Ok(());
}
}
let (repo, pr) = match self.debug_post {
Some((ref repo, pr_override)) => {
warn!(
"Would post to 'rust-lang/rust#{}', debug override to '{}#{}'",
pr, repo, pr_override
);
(repo.as_ref(), pr_override)
}
None => ("rust-lang/rust", pr),
};
let opening = match extract_job_name(&lines) {
Some(job_name) => format!("The job `{}` of your PR", job_name),
None => "Your PR".to_owned(),
};
let log_url = job.log_url().unwrap_or("unknown".into());
let log_url = format!(
"https://rust-lang.github.io/rust-log-analyzer/log-viewer/#{}",
log_url
);
self.github.post_comment(repo, pr, &format!(r#"
{opening} [failed]({html_url}) ([raw log]({log_url})). Through arcane magic we have determined that the following fragments from the build log may contain information about the problem.
<details><summary><i>Click to expand the log.</i></summary>
```plain
{log}
```
</details><p></p>
[I'm a bot](https://github.com/rust-ops/rust-log-analyzer)! I can only do what humans tell me to, so if this was not helpful or you have suggestions for improvements, please ping or otherwise contact **`@TimNN`**. ([Feature Requests](https://github.com/rust-ops/rust-log-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3Afeature-request))
"#, opening = opening, html_url = job.html_url(), log_url = log_url, log = extracted))?;
Ok(())
}
fn learn(&mut self, build: &dyn rla::ci::Build) -> rla::Result<()> {
for job in &build.jobs() {
if !job.outcome().is_passed() {
continue;
}
debug!("Processing {}...", job);
match ci::download_log(*job, self.github.internal()) {
Some(Ok(log)) => {
for line in rla::sanitize::split_lines(&log) {
self.index
.learn(&rla::index::Sanitized(rla::sanitize::clean(line)), 1);
}
}
None => {
warn!(
"Failed to learn from successful {}, download failed; no log",
job
);
}
Some(Err(e)) => {
warn!(
"Failed to learn from successful {}, download failed: {}",
job, e
);
}
}
}
self.index.save(&self.index_file)?;
Ok(())
}
}
fn extract_job_name<I: rla::index::IndexData>(lines: &[I]) -> Option<&str> {
lazy_static! {
static ref JOB_NAME_PATTERN: Regex = Regex::new("\\[CI_JOB_NAME=([^\\]]+)\\]").unwrap();
}
for line in lines {
if let Some(m) = JOB_NAME_PATTERN.captures(line.sanitized()) {
return str::from_utf8(m.get(1).unwrap().as_bytes()).ok();
}
}
None
}
| 33.954693 | 335 | 0.491231 |
29ba2205cdaa19e15e9f137538f0b8fd46319761
| 727 |
// Copyright 2017 Serde Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[macro_use]
extern crate serde_derive;
#[derive(Deserialize)]
enum Enum {
#[serde(deserialize_with = "deserialize_some_other_variant")]
//~^^^ ERROR: variant `Struct` cannot have both #[serde(deserialize_with)] and a field `f1` marked with #[serde(skip_deserializing)]
Struct {
#[serde(skip_deserializing)]
f1: String,
f2: u8,
},
}
fn main() {}
| 30.291667 | 136 | 0.691884 |
eb2a2923b95e57f48c51a3263d3a7fa671e62ac3
| 812 |
use ggez::graphics::Vector2;
/// *********************************************************************
/// Basic stuff, make some helpers for vector functions.
/// ggez includes the nalgebra math library to provide lots of
/// math stuff We just add some helpers.
/// **********************************************************************
/// Create a unit vector representing the
/// given angle (in radians)
pub fn vec_from_angle(angle: f32) -> Vector2 {
let vx = angle.sin();
let vy = angle.cos();
Vector2::new(vx, vy)
}
/// Just makes a random `Vector2` with the given max magnitude.
pub fn random_vec(max_magnitude: f32) -> Vector2 {
let angle = rand::random::<f32>() * 2.0 * std::f32::consts::PI;
let mag = rand::random::<f32>() * max_magnitude;
vec_from_angle(angle) * (mag)
}
| 35.304348 | 74 | 0.546798 |
5b43bd5e1b6831abdd50a57424af09b4c85153b5
| 19,184 |
#[doc = "Register `MR` reader"]
pub struct R(crate::R<MR_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<MR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<MR_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<MR_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `MR` writer"]
pub struct W(crate::W<MR_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<MR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<MR_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<MR_SPEC>) -> Self {
W(writer)
}
}
#[doc = "CLKA, CLKB Divide Factor\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum DIVA_A {
#[doc = "0: CLKA, CLKB clock is turned off"]
CLK_OFF = 0,
#[doc = "1: CLKA, CLKB clock is clock selected by PREA, PREB"]
CLK_DIV1 = 1,
}
impl From<DIVA_A> for u8 {
#[inline(always)]
fn from(variant: DIVA_A) -> Self {
variant as _
}
}
#[doc = "Field `DIVA` reader - CLKA, CLKB Divide Factor"]
pub struct DIVA_R(crate::FieldReader<u8, DIVA_A>);
impl DIVA_R {
pub(crate) fn new(bits: u8) -> Self {
DIVA_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<DIVA_A> {
match self.bits {
0 => Some(DIVA_A::CLK_OFF),
1 => Some(DIVA_A::CLK_DIV1),
_ => None,
}
}
#[doc = "Checks if the value of the field is `CLK_OFF`"]
#[inline(always)]
pub fn is_clk_off(&self) -> bool {
**self == DIVA_A::CLK_OFF
}
#[doc = "Checks if the value of the field is `CLK_DIV1`"]
#[inline(always)]
pub fn is_clk_div1(&self) -> bool {
**self == DIVA_A::CLK_DIV1
}
}
impl core::ops::Deref for DIVA_R {
type Target = crate::FieldReader<u8, DIVA_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `DIVA` writer - CLKA, CLKB Divide Factor"]
pub struct DIVA_W<'a> {
w: &'a mut W,
}
impl<'a> DIVA_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DIVA_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "CLKA, CLKB clock is turned off"]
#[inline(always)]
pub fn clk_off(self) -> &'a mut W {
self.variant(DIVA_A::CLK_OFF)
}
#[doc = "CLKA, CLKB clock is clock selected by PREA, PREB"]
#[inline(always)]
pub fn clk_div1(self) -> &'a mut W {
self.variant(DIVA_A::CLK_DIV1)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | (value as u32 & 0xff);
self.w
}
}
#[doc = "\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum PREA_A {
#[doc = "0: Master Clock"]
MCK = 0,
#[doc = "1: Master Clock divided by 2"]
MCKDIV2 = 1,
#[doc = "2: Master Clock divided by 4"]
MCKDIV4 = 2,
#[doc = "3: Master Clock divided by 8"]
MCKDIV8 = 3,
#[doc = "4: Master Clock divided by 16"]
MCKDIV16 = 4,
#[doc = "5: Master Clock divided by 32"]
MCKDIV32 = 5,
#[doc = "6: Master Clock divided by 64"]
MCKDIV64 = 6,
#[doc = "7: Master Clock divided by 128"]
MCKDIV128 = 7,
#[doc = "8: Master Clock divided by 256"]
MCKDIV256 = 8,
#[doc = "9: Master Clock divided by 512"]
MCKDIV512 = 9,
#[doc = "10: Master Clock divided by 1024"]
MCKDIV1024 = 10,
}
impl From<PREA_A> for u8 {
#[inline(always)]
fn from(variant: PREA_A) -> Self {
variant as _
}
}
#[doc = "Field `PREA` reader - "]
pub struct PREA_R(crate::FieldReader<u8, PREA_A>);
impl PREA_R {
pub(crate) fn new(bits: u8) -> Self {
PREA_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<PREA_A> {
match self.bits {
0 => Some(PREA_A::MCK),
1 => Some(PREA_A::MCKDIV2),
2 => Some(PREA_A::MCKDIV4),
3 => Some(PREA_A::MCKDIV8),
4 => Some(PREA_A::MCKDIV16),
5 => Some(PREA_A::MCKDIV32),
6 => Some(PREA_A::MCKDIV64),
7 => Some(PREA_A::MCKDIV128),
8 => Some(PREA_A::MCKDIV256),
9 => Some(PREA_A::MCKDIV512),
10 => Some(PREA_A::MCKDIV1024),
_ => None,
}
}
#[doc = "Checks if the value of the field is `MCK`"]
#[inline(always)]
pub fn is_mck(&self) -> bool {
**self == PREA_A::MCK
}
#[doc = "Checks if the value of the field is `MCKDIV2`"]
#[inline(always)]
pub fn is_mckdiv2(&self) -> bool {
**self == PREA_A::MCKDIV2
}
#[doc = "Checks if the value of the field is `MCKDIV4`"]
#[inline(always)]
pub fn is_mckdiv4(&self) -> bool {
**self == PREA_A::MCKDIV4
}
#[doc = "Checks if the value of the field is `MCKDIV8`"]
#[inline(always)]
pub fn is_mckdiv8(&self) -> bool {
**self == PREA_A::MCKDIV8
}
#[doc = "Checks if the value of the field is `MCKDIV16`"]
#[inline(always)]
pub fn is_mckdiv16(&self) -> bool {
**self == PREA_A::MCKDIV16
}
#[doc = "Checks if the value of the field is `MCKDIV32`"]
#[inline(always)]
pub fn is_mckdiv32(&self) -> bool {
**self == PREA_A::MCKDIV32
}
#[doc = "Checks if the value of the field is `MCKDIV64`"]
#[inline(always)]
pub fn is_mckdiv64(&self) -> bool {
**self == PREA_A::MCKDIV64
}
#[doc = "Checks if the value of the field is `MCKDIV128`"]
#[inline(always)]
pub fn is_mckdiv128(&self) -> bool {
**self == PREA_A::MCKDIV128
}
#[doc = "Checks if the value of the field is `MCKDIV256`"]
#[inline(always)]
pub fn is_mckdiv256(&self) -> bool {
**self == PREA_A::MCKDIV256
}
#[doc = "Checks if the value of the field is `MCKDIV512`"]
#[inline(always)]
pub fn is_mckdiv512(&self) -> bool {
**self == PREA_A::MCKDIV512
}
#[doc = "Checks if the value of the field is `MCKDIV1024`"]
#[inline(always)]
pub fn is_mckdiv1024(&self) -> bool {
**self == PREA_A::MCKDIV1024
}
}
impl core::ops::Deref for PREA_R {
type Target = crate::FieldReader<u8, PREA_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `PREA` writer - "]
pub struct PREA_W<'a> {
w: &'a mut W,
}
impl<'a> PREA_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: PREA_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "Master Clock"]
#[inline(always)]
pub fn mck(self) -> &'a mut W {
self.variant(PREA_A::MCK)
}
#[doc = "Master Clock divided by 2"]
#[inline(always)]
pub fn mckdiv2(self) -> &'a mut W {
self.variant(PREA_A::MCKDIV2)
}
#[doc = "Master Clock divided by 4"]
#[inline(always)]
pub fn mckdiv4(self) -> &'a mut W {
self.variant(PREA_A::MCKDIV4)
}
#[doc = "Master Clock divided by 8"]
#[inline(always)]
pub fn mckdiv8(self) -> &'a mut W {
self.variant(PREA_A::MCKDIV8)
}
#[doc = "Master Clock divided by 16"]
#[inline(always)]
pub fn mckdiv16(self) -> &'a mut W {
self.variant(PREA_A::MCKDIV16)
}
#[doc = "Master Clock divided by 32"]
#[inline(always)]
pub fn mckdiv32(self) -> &'a mut W {
self.variant(PREA_A::MCKDIV32)
}
#[doc = "Master Clock divided by 64"]
#[inline(always)]
pub fn mckdiv64(self) -> &'a mut W {
self.variant(PREA_A::MCKDIV64)
}
#[doc = "Master Clock divided by 128"]
#[inline(always)]
pub fn mckdiv128(self) -> &'a mut W {
self.variant(PREA_A::MCKDIV128)
}
#[doc = "Master Clock divided by 256"]
#[inline(always)]
pub fn mckdiv256(self) -> &'a mut W {
self.variant(PREA_A::MCKDIV256)
}
#[doc = "Master Clock divided by 512"]
#[inline(always)]
pub fn mckdiv512(self) -> &'a mut W {
self.variant(PREA_A::MCKDIV512)
}
#[doc = "Master Clock divided by 1024"]
#[inline(always)]
pub fn mckdiv1024(self) -> &'a mut W {
self.variant(PREA_A::MCKDIV1024)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 8)) | ((value as u32 & 0x0f) << 8);
self.w
}
}
#[doc = "CLKA, CLKB Divide Factor\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum DIVB_A {
#[doc = "0: CLKA, CLKB clock is turned off"]
CLK_OFF = 0,
#[doc = "1: CLKA, CLKB clock is clock selected by PREA, PREB"]
CLK_DIV1 = 1,
}
impl From<DIVB_A> for u8 {
#[inline(always)]
fn from(variant: DIVB_A) -> Self {
variant as _
}
}
#[doc = "Field `DIVB` reader - CLKA, CLKB Divide Factor"]
pub struct DIVB_R(crate::FieldReader<u8, DIVB_A>);
impl DIVB_R {
pub(crate) fn new(bits: u8) -> Self {
DIVB_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<DIVB_A> {
match self.bits {
0 => Some(DIVB_A::CLK_OFF),
1 => Some(DIVB_A::CLK_DIV1),
_ => None,
}
}
#[doc = "Checks if the value of the field is `CLK_OFF`"]
#[inline(always)]
pub fn is_clk_off(&self) -> bool {
**self == DIVB_A::CLK_OFF
}
#[doc = "Checks if the value of the field is `CLK_DIV1`"]
#[inline(always)]
pub fn is_clk_div1(&self) -> bool {
**self == DIVB_A::CLK_DIV1
}
}
impl core::ops::Deref for DIVB_R {
type Target = crate::FieldReader<u8, DIVB_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `DIVB` writer - CLKA, CLKB Divide Factor"]
pub struct DIVB_W<'a> {
w: &'a mut W,
}
impl<'a> DIVB_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DIVB_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "CLKA, CLKB clock is turned off"]
#[inline(always)]
pub fn clk_off(self) -> &'a mut W {
self.variant(DIVB_A::CLK_OFF)
}
#[doc = "CLKA, CLKB clock is clock selected by PREA, PREB"]
#[inline(always)]
pub fn clk_div1(self) -> &'a mut W {
self.variant(DIVB_A::CLK_DIV1)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 16)) | ((value as u32 & 0xff) << 16);
self.w
}
}
#[doc = "\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum PREB_A {
#[doc = "0: Master Clock"]
MCK = 0,
#[doc = "1: Master Clock divided by 2"]
MCKDIV2 = 1,
#[doc = "2: Master Clock divided by 4"]
MCKDIV4 = 2,
#[doc = "3: Master Clock divided by 8"]
MCKDIV8 = 3,
#[doc = "4: Master Clock divided by 16"]
MCKDIV16 = 4,
#[doc = "5: Master Clock divided by 32"]
MCKDIV32 = 5,
#[doc = "6: Master Clock divided by 64"]
MCKDIV64 = 6,
#[doc = "7: Master Clock divided by 128"]
MCKDIV128 = 7,
#[doc = "8: Master Clock divided by 256"]
MCKDIV256 = 8,
#[doc = "9: Master Clock divided by 512"]
MCKDIV512 = 9,
#[doc = "10: Master Clock divided by 1024"]
MCKDIV1024 = 10,
}
impl From<PREB_A> for u8 {
#[inline(always)]
fn from(variant: PREB_A) -> Self {
variant as _
}
}
#[doc = "Field `PREB` reader - "]
pub struct PREB_R(crate::FieldReader<u8, PREB_A>);
impl PREB_R {
pub(crate) fn new(bits: u8) -> Self {
PREB_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<PREB_A> {
match self.bits {
0 => Some(PREB_A::MCK),
1 => Some(PREB_A::MCKDIV2),
2 => Some(PREB_A::MCKDIV4),
3 => Some(PREB_A::MCKDIV8),
4 => Some(PREB_A::MCKDIV16),
5 => Some(PREB_A::MCKDIV32),
6 => Some(PREB_A::MCKDIV64),
7 => Some(PREB_A::MCKDIV128),
8 => Some(PREB_A::MCKDIV256),
9 => Some(PREB_A::MCKDIV512),
10 => Some(PREB_A::MCKDIV1024),
_ => None,
}
}
#[doc = "Checks if the value of the field is `MCK`"]
#[inline(always)]
pub fn is_mck(&self) -> bool {
**self == PREB_A::MCK
}
#[doc = "Checks if the value of the field is `MCKDIV2`"]
#[inline(always)]
pub fn is_mckdiv2(&self) -> bool {
**self == PREB_A::MCKDIV2
}
#[doc = "Checks if the value of the field is `MCKDIV4`"]
#[inline(always)]
pub fn is_mckdiv4(&self) -> bool {
**self == PREB_A::MCKDIV4
}
#[doc = "Checks if the value of the field is `MCKDIV8`"]
#[inline(always)]
pub fn is_mckdiv8(&self) -> bool {
**self == PREB_A::MCKDIV8
}
#[doc = "Checks if the value of the field is `MCKDIV16`"]
#[inline(always)]
pub fn is_mckdiv16(&self) -> bool {
**self == PREB_A::MCKDIV16
}
#[doc = "Checks if the value of the field is `MCKDIV32`"]
#[inline(always)]
pub fn is_mckdiv32(&self) -> bool {
**self == PREB_A::MCKDIV32
}
#[doc = "Checks if the value of the field is `MCKDIV64`"]
#[inline(always)]
pub fn is_mckdiv64(&self) -> bool {
**self == PREB_A::MCKDIV64
}
#[doc = "Checks if the value of the field is `MCKDIV128`"]
#[inline(always)]
pub fn is_mckdiv128(&self) -> bool {
**self == PREB_A::MCKDIV128
}
#[doc = "Checks if the value of the field is `MCKDIV256`"]
#[inline(always)]
pub fn is_mckdiv256(&self) -> bool {
**self == PREB_A::MCKDIV256
}
#[doc = "Checks if the value of the field is `MCKDIV512`"]
#[inline(always)]
pub fn is_mckdiv512(&self) -> bool {
**self == PREB_A::MCKDIV512
}
#[doc = "Checks if the value of the field is `MCKDIV1024`"]
#[inline(always)]
pub fn is_mckdiv1024(&self) -> bool {
**self == PREB_A::MCKDIV1024
}
}
impl core::ops::Deref for PREB_R {
type Target = crate::FieldReader<u8, PREB_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `PREB` writer - "]
pub struct PREB_W<'a> {
w: &'a mut W,
}
impl<'a> PREB_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: PREB_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "Master Clock"]
#[inline(always)]
pub fn mck(self) -> &'a mut W {
self.variant(PREB_A::MCK)
}
#[doc = "Master Clock divided by 2"]
#[inline(always)]
pub fn mckdiv2(self) -> &'a mut W {
self.variant(PREB_A::MCKDIV2)
}
#[doc = "Master Clock divided by 4"]
#[inline(always)]
pub fn mckdiv4(self) -> &'a mut W {
self.variant(PREB_A::MCKDIV4)
}
#[doc = "Master Clock divided by 8"]
#[inline(always)]
pub fn mckdiv8(self) -> &'a mut W {
self.variant(PREB_A::MCKDIV8)
}
#[doc = "Master Clock divided by 16"]
#[inline(always)]
pub fn mckdiv16(self) -> &'a mut W {
self.variant(PREB_A::MCKDIV16)
}
#[doc = "Master Clock divided by 32"]
#[inline(always)]
pub fn mckdiv32(self) -> &'a mut W {
self.variant(PREB_A::MCKDIV32)
}
#[doc = "Master Clock divided by 64"]
#[inline(always)]
pub fn mckdiv64(self) -> &'a mut W {
self.variant(PREB_A::MCKDIV64)
}
#[doc = "Master Clock divided by 128"]
#[inline(always)]
pub fn mckdiv128(self) -> &'a mut W {
self.variant(PREB_A::MCKDIV128)
}
#[doc = "Master Clock divided by 256"]
#[inline(always)]
pub fn mckdiv256(self) -> &'a mut W {
self.variant(PREB_A::MCKDIV256)
}
#[doc = "Master Clock divided by 512"]
#[inline(always)]
pub fn mckdiv512(self) -> &'a mut W {
self.variant(PREB_A::MCKDIV512)
}
#[doc = "Master Clock divided by 1024"]
#[inline(always)]
pub fn mckdiv1024(self) -> &'a mut W {
self.variant(PREB_A::MCKDIV1024)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 24)) | ((value as u32 & 0x0f) << 24);
self.w
}
}
impl R {
#[doc = "Bits 0:7 - CLKA, CLKB Divide Factor"]
#[inline(always)]
pub fn diva(&self) -> DIVA_R {
DIVA_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 8:11"]
#[inline(always)]
pub fn prea(&self) -> PREA_R {
PREA_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bits 16:23 - CLKA, CLKB Divide Factor"]
#[inline(always)]
pub fn divb(&self) -> DIVB_R {
DIVB_R::new(((self.bits >> 16) & 0xff) as u8)
}
#[doc = "Bits 24:27"]
#[inline(always)]
pub fn preb(&self) -> PREB_R {
PREB_R::new(((self.bits >> 24) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - CLKA, CLKB Divide Factor"]
#[inline(always)]
pub fn diva(&mut self) -> DIVA_W {
DIVA_W { w: self }
}
#[doc = "Bits 8:11"]
#[inline(always)]
pub fn prea(&mut self) -> PREA_W {
PREA_W { w: self }
}
#[doc = "Bits 16:23 - CLKA, CLKB Divide Factor"]
#[inline(always)]
pub fn divb(&mut self) -> DIVB_W {
DIVB_W { w: self }
}
#[doc = "Bits 24:27"]
#[inline(always)]
pub fn preb(&mut self) -> PREB_W {
PREB_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "PWM Mode Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [mr](index.html) module"]
pub struct MR_SPEC;
impl crate::RegisterSpec for MR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [mr::R](R) reader structure"]
impl crate::Readable for MR_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [mr::W](W) writer structure"]
impl crate::Writable for MR_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets MR to value 0"]
impl crate::Resettable for MR_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 29.928237 | 400 | 0.553795 |
ab96cb178441bca527db06fe03800e684f1aadf7
| 55,813 |
use std::collections::VecDeque;
use std::fmt;
use std::fs;
use std::io::{BufReader, Cursor};
use std::ops::Range;
use std::path::Path;
use std::sync::mpsc::{sync_channel, Receiver};
use std::sync::{Arc, Mutex};
use std::thread;
use std::time::Duration;
use hdf5::{types::VarLenUnicode, File};
use lewton::inside_ogg::OggStreamReader;
use ndarray::prelude::*;
use ndarray::Slice;
use ogg::reading::PacketReader as OggPacketReader;
use rand::prelude::{IteratorRandom, SliceRandom};
use rand::Rng;
use rayon::prelude::*;
use realfft::num_traits::Zero;
use serde::Deserialize;
use thiserror::Error;
use crate::{augmentations::*, transforms::*, util::*, Complex32, DFState};
type Result<T> = std::result::Result<T, DfDatasetError>;
#[derive(Error, Debug)]
pub enum DfDatasetError {
#[error("Dataloading Timeout")]
TimeoutError,
#[error("No Hdf5 datasets found")]
NoDatasetFoundError,
#[error("No Hdf5 dataset type found")]
Hdf5DsTypeNotFoundError,
#[error("{codec:?} codec not supported for file {file:?}")]
CodecNotSupportedError { codec: Codec, file: String },
#[error("Channels not initialized. Have you already called start_epoch()?")]
ChannelsNotInitializedError,
#[error(
"Dataset {split} size ({dataset_size}) smaller than batch size ({batch_size}). Try increasing the dataset sampling factor or decreasing the batch size."
)]
DatasetTooSmall {
split: Split,
dataset_size: usize,
batch_size: usize,
},
#[error("Unsupported during PCM decode: {0}")]
PcmUnspportedDimension(usize),
#[error("Wav Reader Error")]
WarReadError(#[from] crate::wav_utils::WavUtilsError),
#[error("Input Range ({range:?}) larger than dataset size ({size:?})")]
PcmRangeToLarge {
range: Range<usize>,
size: Vec<usize>,
},
#[error("Data Processing Error: {0:?}")]
DataProcessingError(String),
#[error("Multithreading Send Error: {0:?}")]
SendError(String),
#[error("DF Transforms Error")]
TransformError(#[from] crate::transforms::TransformError),
#[error("DF Augmentation Error")]
AugmentationError(#[from] crate::augmentations::AugmentationError),
#[error("DF Utils Error")]
UtilsError(#[from] crate::util::UtilsError),
#[error("Ndarray Shape Error")]
NdarrayShapeError(#[from] ndarray::ShapeError),
#[error("Hdf5 Error")]
Hdf5Error(#[from] hdf5::Error),
#[error("Hdf5 Error Detail")]
Hdf5ErrorDetail { source: hdf5::Error, msg: String },
#[error("IO Error")]
IoError(#[from] std::io::Error),
#[error("Json Decoding Error")]
JsonDecode(#[from] serde_json::Error),
#[error("Threadpool Builder Error")]
ThreadPoolBuildError(#[from] rayon::ThreadPoolBuildError),
#[error("Vorbis Decode Error")]
VorbisError(#[from] lewton::VorbisError),
#[error("Ogg Decode Error")]
OggReadError(#[from] ogg::reading::OggReadError),
#[error("Thread Join Error: {0:?}")]
ThreadJoinError(String),
}
type Signal = Array2<f32>;
fn one() -> f32 {
1.
}
#[derive(Deserialize, Debug, Clone)]
pub struct Hdf5Cfg(
pub String, // file name
#[serde(default = "one")] pub f32, // dataset sampling factor
#[serde(default = "Option::default")] pub Option<usize>, // fallback sampling rate
#[serde(default = "Option::default")] pub Option<usize>, // fallback max freq
);
impl Hdf5Cfg {
pub fn filename(&self) -> &str {
self.0.as_str()
}
pub fn sampling_factor(&self) -> f32 {
self.1
}
pub fn fallback_sr(&self) -> Option<usize> {
self.2
}
pub fn fallback_max_freq(&self) -> Option<usize> {
self.3
}
}
#[derive(Deserialize, Debug)]
pub struct DatasetConfig {
pub train: Vec<Hdf5Cfg>,
pub valid: Vec<Hdf5Cfg>,
pub test: Vec<Hdf5Cfg>,
}
impl DatasetConfig {
pub fn open(path: &str) -> Result<Self> {
let file = fs::File::open(path)?;
let reader = BufReader::new(file);
let cfg = serde_json::from_reader(reader)?;
Ok(cfg)
}
}
pub struct Datasets<T> {
train: Arc<dyn Dataset<T> + Sync + Send>,
valid: Arc<dyn Dataset<T> + Sync + Send>,
test: Arc<dyn Dataset<T> + Sync + Send>,
}
impl<T> Datasets<T> {
pub fn new(
train: Arc<dyn Dataset<T> + Sync + Send>,
valid: Arc<dyn Dataset<T> + Sync + Send>,
test: Arc<dyn Dataset<T> + Sync + Send>,
) -> Self {
Datasets { train, valid, test }
}
fn get<S: Into<Split>>(&self, split: S) -> &Arc<dyn Dataset<T> + Sync + Send> {
match split.into() {
Split::Train => &self.train,
Split::Valid => &self.valid,
Split::Test => &self.test,
}
}
}
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum Split {
Train = 0,
Valid = 1,
Test = 2,
}
impl fmt::Display for Split {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Train => write!(f, "train"),
Self::Valid => write!(f, "valid"),
Self::Test => write!(f, "test"),
}
}
}
impl From<&str> for Split {
fn from(split: &str) -> Self {
match split {
"train" => Split::Train,
"valid" => Split::Valid,
"test" => Split::Test,
s => panic!("Split '{}' does not exist.", s),
}
}
}
pub struct DataLoader<T>
where
T: Data,
{
datasets: Datasets<T>,
batch_size_train: usize,
batch_size_eval: usize,
num_prefech: usize,
idcs: Arc<Mutex<VecDeque<usize>>>,
current_split: Split,
fill_thread: Option<thread::JoinHandle<Result<()>>>,
out_receiver: Option<Receiver<Result<Sample<T>>>>,
overfit: bool,
}
#[derive(Default)]
pub struct DataLoaderBuilder<T>
where
T: Data,
{
_ds: Option<Datasets<T>>,
_batch_size: Option<usize>,
_batch_size_eval: Option<usize>,
_prefetch: Option<usize>,
_num_threads: Option<usize>,
_overfit: Option<bool>,
}
impl<T> DataLoaderBuilder<T>
where
T: Data,
{
pub fn new(ds: Datasets<T>) -> Self {
DataLoaderBuilder::<T> {
_ds: Some(ds),
_batch_size: None,
_batch_size_eval: None,
_prefetch: None,
_num_threads: None,
_overfit: None,
}
}
pub fn batch_size(mut self, batch_size: usize) -> Self {
self._batch_size = Some(batch_size);
self
}
pub fn batch_size_eval(mut self, batch_size: usize) -> Self {
self._batch_size_eval = Some(batch_size);
self
}
pub fn prefetch(mut self, prefetch: usize) -> Self {
self._prefetch = Some(prefetch);
self
}
pub fn num_threads(mut self, num_threads: usize) -> Self {
self._num_threads = Some(num_threads);
self
}
pub fn overfit(mut self, overfit: bool) -> Self {
self._overfit = Some(overfit);
self
}
pub fn build(self) -> Result<DataLoader<T>> {
let bs = self._batch_size.unwrap_or(1);
let prefetch = self._prefetch.unwrap_or(bs * self._num_threads.unwrap_or(4) * 2);
let mut loader = DataLoader::new(
self._ds.unwrap(),
bs,
self._batch_size_eval,
prefetch,
self._num_threads,
)?;
loader.overfit = self._overfit.unwrap_or(false);
Ok(loader)
}
}
impl<T> DataLoader<T>
where
T: Data,
{
pub fn builder(ds: Datasets<T>) -> DataLoaderBuilder<T> {
DataLoaderBuilder::new(ds)
}
pub fn new(
datasets: Datasets<T>,
batch_size_train: usize,
batch_size_eval: Option<usize>,
num_prefech: usize,
num_threads: Option<usize>,
) -> Result<Self> {
// Register global rayon threadpool. It will only be used for data loader workers.
let mut poolbuilder = rayon::ThreadPoolBuilder::new();
if let Some(num_threads) = num_threads {
poolbuilder = poolbuilder.num_threads(num_threads)
}
match poolbuilder
.thread_name(|idx| format!("DataLoader Worker {}", idx))
.build_global()
{
Ok(()) => (),
Err(e) => {
if e.to_string() != "The global thread pool has already been initialized." {
return Err(e.into());
}
// else: already initialized, do not complain.
}
};
let batch_size_eval = batch_size_eval.unwrap_or(batch_size_train);
Ok(DataLoader {
datasets,
batch_size_train,
batch_size_eval,
num_prefech,
idcs: Arc::new(Mutex::new(VecDeque::new())),
current_split: Split::Train,
fill_thread: None,
out_receiver: None,
overfit: false,
})
}
pub fn dataset_len<S: Into<Split>>(&self, split: S) -> usize {
self.datasets.get(split).len()
}
pub fn len_of<S: Into<Split>>(&self, split: S) -> usize {
let split = split.into();
let bs = self.batch_size(&split);
self.dataset_len(split) / bs
}
pub fn batch_size(&self, split: &Split) -> usize {
if split == &Split::Train {
self.batch_size_train
} else {
self.batch_size_eval
}
}
pub fn start_idx_worker(
&mut self,
split: Split,
seed: u64,
) -> Result<thread::JoinHandle<Result<()>>> {
let bs = self.batch_size(&split);
if self.num_prefech < bs {
eprintln!(
"Warning: Prefetch size ({}) is smaller then batch size ({}).",
self.num_prefech, bs
)
}
let (out_sender, out_receiver) = sync_channel(self.num_prefech);
self.out_receiver = Some(out_receiver);
let ds = Arc::clone(self.datasets.get(split));
let idcs = self.idcs.clone();
let handle = thread::spawn(move || -> Result<()> {
idcs.lock().unwrap().par_drain(..).try_for_each_init(
|| {
seed_from_u64(seed);
},
// TODO: This closure get's submitted to the thread pool in order. However,
// get_sample may take different amounts of time resulting in a different return
// order. This should be the last major thing that reduces reproducability a little.
// To make sure, we get the samples in the correct order, we could add another
// ordering index and some kind of cache to use in get_batch().
|(), idx| -> Result<()> {
let sample = ds.get_sample(idx);
if let Err(e) = out_sender.send(sample) {
return Err(DfDatasetError::SendError(e.to_string()));
}
Ok(())
},
)?;
Ok(())
});
Ok(handle)
}
pub fn start_epoch<S: Into<Split>>(&mut self, split: S, seed: usize) -> Result<()> {
let split: Split = split.into();
// Drop fill thread if exits
if self.fill_thread.is_some() {
self.join_fill_thread()?;
}
// Prepare for new epoch
self.current_split = split;
if self.batch_size(&split) > self.dataset_len(split) {
return Err(DfDatasetError::DatasetTooSmall {
split,
dataset_size: self.dataset_len(split),
batch_size: self.batch_size(&split),
});
}
seed_from_u64(seed as u64);
{
// Recreate indices to index into the dataset and shuffle them
let mut idcs = self.idcs.lock().unwrap();
if self.overfit {
println!("Overfitting on one batch.");
let bs = self.batch_size(&split);
idcs.clone_from(&(0..bs).cycle().take(self.dataset_len(split)).collect());
} else {
idcs.clone_from(&(0..self.dataset_len(split)).collect());
idcs.make_contiguous().shuffle(&mut thread_rng()?);
}
}
// Start thread to submit dataset jobs for the pool workers
self.fill_thread = Some(self.start_idx_worker(split, seed as u64)?);
Ok(())
}
pub fn get_batch<C>(&mut self) -> Result<Option<DsBatch<T>>>
where
C: Collate<T>,
{
let bs = self.batch_size(&self.current_split);
let mut samples = Vec::with_capacity(bs);
let mut i = 0;
let mut tries = 0;
let reciever = match self.out_receiver.as_ref() {
None => {
return Err(DfDatasetError::ChannelsNotInitializedError);
}
Some(r) => r,
};
'outer: while i < bs {
match reciever.recv_timeout(Duration::from_millis(100)) {
Err(_e) => {
let isempty = if let Ok(idcs) = self.idcs.try_lock() {
idcs.is_empty()
} else {
false
};
if isempty {
self.join_fill_thread()?;
return Ok(None);
}
if tries > 1000 {
return Err(DfDatasetError::TimeoutError);
}
tries += 1;
continue 'outer;
}
Ok(s) => samples.push(s?),
}
i += 1;
tries = 0;
}
if samples.is_empty() {
println!("No more samples.");
return Ok(None);
}
let out = C::collate(
samples.as_mut_slice(),
self.datasets.get(self.current_split).max_sample_len(),
)?;
Ok(Some(out))
}
pub fn join_fill_thread(&mut self) -> Result<()> {
// Drop out_receiver so that parallel iter in fill thread will return
drop(self.out_receiver.take());
if let Some(thread) = self.fill_thread.take() {
if let Err(e) =
thread.join().map_err(|e| DfDatasetError::ThreadJoinError(format!("{:?}", e)))?
{
match e {
DfDatasetError::SendError(_) => (),
// Not expected send error due to out_channel closing
e => {
eprint!("Error during worker shutdown: {:?}", e);
return Err(e);
}
}
}
}
Ok(())
}
}
pub trait Collate<T: Data> {
fn collate(samples: &mut [Sample<T>], len: usize) -> Result<DsBatch<T>>;
}
impl Collate<f32> for f32 {
fn collate(samples: &mut [Sample<f32>], len: usize) -> Result<DsBatch<f32>> {
let lengths = samples.iter().map(|s| s.speech.len_of(Axis(1))).collect();
let speech = unpack_pad(|s: &mut Sample<f32>| &mut s.speech, samples, len)?;
let noise = unpack_pad(|s: &mut Sample<f32>| &mut s.noise, samples, len)?;
let noisy = unpack_pad(|s: &mut Sample<f32>| &mut s.noisy, samples, len)?;
let max_freq = samples.iter().map(|s| s.max_freq).collect();
let snr = samples.iter().map(|s| s.snr).collect();
let gain = samples.iter().map(|s| s.gain).collect();
let atten = samples.iter().map(|s| s.attenuation.unwrap_or(0)).collect();
Ok(DsBatch {
speech,
noise,
noisy,
feat_erb: None,
feat_spec: None,
lengths,
max_freq,
snr,
gain,
atten,
})
}
}
impl Collate<Complex32> for Complex32 {
fn collate(samples: &mut [Sample<Complex32>], len: usize) -> Result<DsBatch<Complex32>> {
let lengths = samples.iter().map(|s| s.speech.len_of(Axis(1))).collect();
let speech = unpack_pad(|s: &mut Sample<Complex32>| &mut s.speech, samples, len)?;
let noise = unpack_pad(|s: &mut Sample<Complex32>| &mut s.noise, samples, len)?;
let noisy = unpack_pad(|s: &mut Sample<Complex32>| &mut s.noisy, samples, len)?;
let feat_erb = if samples.first().unwrap().feat_erb.is_some() {
Some(unpack_pad(
|s: &mut Sample<Complex32>| s.feat_erb.as_mut().unwrap(),
samples,
len,
)?)
} else {
None
};
let feat_spec = if samples.first().unwrap().feat_spec.is_some() {
Some(unpack_pad(
|s: &mut Sample<Complex32>| s.feat_spec.as_mut().unwrap(),
samples,
len,
)?)
} else {
None
};
let max_freq = samples.iter().map(|s| s.max_freq).collect();
let snr = samples.iter().map(|s| s.snr).collect();
let gain = samples.iter().map(|s| s.gain).collect();
let atten = samples.iter().map(|s| s.attenuation.unwrap_or(0)).collect();
Ok(DsBatch {
speech,
noise,
noisy,
feat_erb,
feat_spec,
lengths,
max_freq,
snr,
gain,
atten,
})
}
}
impl<T> Drop for DataLoader<T>
where
T: Data,
{
fn drop(&mut self) {
self.join_fill_thread().unwrap(); // Stop out_receiver and join fill thread
}
}
pub struct DsBatch<T>
where
T: Data,
{
pub speech: ArrayD<T>,
pub noise: ArrayD<T>,
pub noisy: ArrayD<T>,
pub feat_erb: Option<ArrayD<f32>>,
pub feat_spec: Option<ArrayD<Complex32>>,
pub lengths: Array1<usize>,
pub max_freq: Array1<usize>,
pub snr: Vec<i8>,
pub gain: Vec<i8>,
pub atten: Vec<u8>, // attenuation limit in dB; 0 stands for no limit
}
impl<T> fmt::Debug for DsBatch<T>
where
T: Data,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_fmt(format_args!(
"Dataset Batch with batch_size: '{}, len: '{}', snrs: '{:?}', gain: '{:?}')",
self.speech.len_of(Axis(0)),
self.speech.len_of(Axis(2)),
self.snr,
self.gain
))
}
}
pub trait Data: Sized + Clone + Default + Send + Sync + Zero + 'static {}
impl Data for f32 {}
impl Data for Complex32 {}
pub enum SampleType {
TimeDomain,
FreqDomain,
}
pub struct Sample<T>
where
T: Data,
{
pub speech: ArrayD<T>,
pub noise: ArrayD<T>,
pub noisy: ArrayD<T>,
pub feat_erb: Option<ArrayD<f32>>,
pub feat_spec: Option<ArrayD<Complex32>>,
pub max_freq: usize,
pub snr: i8,
pub gain: i8,
pub attenuation: Option<u8>,
pub idx: usize,
}
impl Sample<f32> {
fn sample_type(&self) -> SampleType {
SampleType::TimeDomain
}
fn get_speech_view(&self) -> Result<ArrayView2<f32>> {
Ok(self.speech.view().into_dimensionality()?)
}
fn get_noise_view(&self) -> Result<ArrayView2<f32>> {
Ok(self.noise.view().into_dimensionality()?)
}
fn get_noisy_view(&self) -> Result<ArrayView2<f32>> {
Ok(self.noisy.view().into_dimensionality()?)
}
fn dim(&self) -> usize {
2
}
}
impl Sample<Complex32> {
fn sample_type(&self) -> SampleType {
SampleType::FreqDomain
}
fn get_speech_view(&self) -> Result<ArrayView3<Complex32>> {
Ok(self.speech.view().into_dimensionality()?)
}
fn get_noise_view(&self) -> Result<ArrayView3<Complex32>> {
Ok(self.noise.view().into_dimensionality()?)
}
fn get_noisy_view(&self) -> Result<ArrayView3<Complex32>> {
Ok(self.noisy.view().into_dimensionality()?)
}
fn dim(&self) -> usize {
3
}
}
impl<T> fmt::Debug for Sample<T>
where
T: Data,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_fmt(format_args!(
"Dataset Sample {} with len: '{}', snr: '{}', gain: '{}')",
self.idx,
self.speech.shape().last().unwrap(),
self.snr,
self.gain
))
}
}
pub trait Dataset<T>
where
T: Data,
{
fn get_sample(&self, idx: usize) -> Result<Sample<T>>;
fn sr(&self) -> usize;
fn len(&self) -> usize;
fn is_empty(&self) -> bool {
self.len() == 0
}
fn max_sample_len(&self) -> usize;
fn set_seed(&mut self, seed: u64);
}
#[derive(Clone)]
pub struct DatasetBuilder<'a> {
ds_dir: &'a str,
sr: usize,
fft_size: Option<usize>,
datasets: Vec<Hdf5Cfg>,
max_len_s: Option<f32>,
hop_size: Option<usize>,
nb_erb: Option<usize>,
nb_spec: Option<usize>,
norm_alpha: Option<f32>,
p_atten_lim: Option<f32>,
p_reverb: Option<f32>,
p_fill_speech: Option<f32>,
seed: Option<u64>,
min_nb_freqs: Option<usize>,
}
impl<'a> DatasetBuilder<'a> {
pub fn new(ds_dir: &'a str, sr: usize) -> Self {
DatasetBuilder {
ds_dir,
sr,
datasets: Vec::new(),
max_len_s: None,
fft_size: None,
hop_size: None,
nb_erb: None,
nb_spec: None,
norm_alpha: None,
p_atten_lim: None,
p_reverb: None,
p_fill_speech: None,
seed: None,
min_nb_freqs: None,
}
}
pub fn build_fft_dataset(self) -> Result<FftDataset> {
if self.datasets.is_empty() {
panic!("No datasets provided")
}
let ds = self.clone().build_td_dataset()?;
if self.fft_size.is_none() {
panic!("No fft size provided when building FFT dataset.")
}
let fft_size = self.fft_size.unwrap();
let hop_size = self.hop_size.unwrap_or(fft_size / 2);
let nb_erb = self.nb_erb.unwrap_or(32);
if let Some(b) = self.nb_spec {
let nfreqs = fft_size / 2 + 1;
if b > nfreqs {
let msg = format!("Number of spectrogram bins ({}) is larger then number of available frequency bins ({})", b, nfreqs);
return Err(DfDatasetError::DataProcessingError(msg));
}
}
Ok(FftDataset {
ds,
fft_size,
hop_size,
nb_erb: Some(nb_erb),
nb_spec: self.nb_spec,
norm_alpha: self.norm_alpha,
min_nb_freqs: self.min_nb_freqs,
})
}
pub fn build_td_dataset(mut self) -> Result<TdDataset> {
if self.datasets.is_empty() {
panic!("No datasets provided")
}
let max_samples: usize = (self.max_len_s.unwrap_or(10.) * self.sr as f32).round() as usize;
let mut hdf5_handles = Vec::new();
let mut sp_keys: Vec<(usize, String)> = Vec::new();
let mut ns_keys: Vec<(usize, String)> = Vec::new();
let mut rir_keys: Vec<(usize, String)> = Vec::new();
let mut config: Vec<Hdf5Cfg> = Vec::new();
let mut i = 0;
for cfg in self.datasets.drain(..) {
let name = cfg.filename();
let path = Path::new(self.ds_dir).join(name);
if (!path.is_file())
|| match path.read_link() {
Err(_) => false,
Ok(p) => !p.is_file(),
}
{
eprintln!("Dataset {:?} not found. Skipping.", path);
continue;
}
let ds = Hdf5Dataset::new(path.to_str().unwrap())?;
let n_samples = (cfg.sampling_factor() * ds.len() as f32).round() as usize;
let keys: Vec<(usize, String)> =
ds.keys()?.iter().cycle().take(n_samples).map(|k| (i, k.clone())).collect();
match ds.dstype {
DsType::Speech => sp_keys.extend(keys),
DsType::Noise => ns_keys.extend(keys),
DsType::RIR => rir_keys.extend(keys),
}
hdf5_handles.push(ds);
config.push(cfg);
i += 1;
}
if hdf5_handles.is_empty() {
return Err(DfDatasetError::NoDatasetFoundError);
}
let snrs = vec![-5, 0, 5, 10, 20, 40];
let gains = vec![-6, 0, 6];
let attenuation_range = (6, 40);
let p_atten_lim = self.p_atten_lim.unwrap_or(0.);
let p_fill_speech = self.p_fill_speech.unwrap_or(0.);
let sp_transforms = Compose::new(vec![
Box::new(RandRemoveDc::default_with_prob(0.25)),
Box::new(RandLFilt::default_with_prob(0.25)),
Box::new(RandEQ::default_with_prob(0.25).with_sr(self.sr)),
Box::new(RandResample::default_with_prob(0.1).with_sr(self.sr)),
]);
let ns_transforms = sp_transforms.clone();
let p_reverb = self.p_reverb.unwrap_or(0.);
if p_reverb > 0. && rir_keys.is_empty() {
eprintln!("Warning: Reverb augmentation enabled but no RIRs provided!");
}
let reverb = RandReverbSim::new(p_reverb, self.sr);
let seed = self.seed.unwrap_or(0);
Ok(TdDataset {
config,
hdf5_handles,
max_samples,
sr: self.sr,
sp_keys,
ns_keys,
rir_keys,
snrs,
gains,
attenuation_range,
p_fill_speech,
p_atten_lim,
sp_transforms,
ns_transforms,
reverb,
seed,
})
}
pub fn dataset(mut self, datasets: Vec<Hdf5Cfg>) -> Self {
self.datasets.extend(datasets);
self
}
pub fn max_len(mut self, max_len_s: f32) -> Self {
self.max_len_s = Some(max_len_s);
self
}
pub fn df_params(
mut self,
fft_size: usize,
hop_size: Option<usize>,
nb_erb: Option<usize>,
nb_spec: Option<usize>,
norm_alpha: Option<f32>,
) -> Self {
self.fft_size = Some(fft_size);
self.hop_size = hop_size;
self.nb_erb = nb_erb;
self.nb_spec = nb_spec;
self.norm_alpha = norm_alpha;
self
}
pub fn prob_atten_lim(mut self, p_atten_lim: f32) -> Self {
assert!((0. ..=1.).contains(&p_atten_lim));
self.p_atten_lim = Some(p_atten_lim);
self
}
pub fn prob_reverberation(mut self, p_reverb: f32) -> Self {
assert!((0. ..=1.).contains(&p_reverb));
self.p_reverb = Some(p_reverb);
self
}
pub fn seed(mut self, seed: u64) -> Self {
self.seed = Some(seed);
self
}
pub fn p_sample_full_speech(mut self, p_full: f32) -> Self {
self.p_fill_speech = Some(p_full);
self
}
pub fn min_nb_erb_freqs(mut self, n: usize) -> Self {
self.min_nb_freqs = Some(n);
self
}
}
pub struct FftDataset {
ds: TdDataset,
fft_size: usize,
hop_size: usize,
nb_erb: Option<usize>,
nb_spec: Option<usize>,
norm_alpha: Option<f32>,
min_nb_freqs: Option<usize>,
}
impl Dataset<Complex32> for FftDataset {
fn get_sample(&self, idx: usize) -> Result<Sample<Complex32>> {
let sample: Sample<f32> = self.ds.get_sample(idx)?;
let nb_erb = self.nb_erb.unwrap_or(1);
let mut state = DFState::new(self.sr(), self.fft_size, self.hop_size, nb_erb, 1);
let speech = stft(sample.get_speech_view()?, &mut state, false);
let noise = stft(sample.get_noise_view()?, &mut state, true);
let noisy = stft(sample.get_noisy_view()?, &mut state, true);
let erb = if let Some(_b) = self.nb_erb {
let mut erb = erb(&noisy.view(), true, &state.erb)?;
if let Some(alpha) = self.norm_alpha {
erb_norm(&mut erb.view_mut(), None, alpha)?;
}
Some(erb.into_dyn())
} else {
None
};
let spec = if let Some(b) = self.nb_spec {
let mut spec = noisy.slice_axis(Axis(2), Slice::from(..b)).into_owned();
if let Some(alpha) = self.norm_alpha {
unit_norm(&mut spec.view_mut(), None, alpha)?;
}
Some(spec.into_dyn())
} else {
None
};
Ok(Sample {
speech: speech.into_dyn(),
noise: noise.into_dyn(),
noisy: noisy.into_dyn(),
feat_erb: erb,
feat_spec: spec,
max_freq: sample.max_freq,
gain: sample.gain,
snr: sample.snr,
attenuation: sample.attenuation,
idx: sample.idx,
})
}
fn len(&self) -> usize {
self.ds.sp_keys.len()
}
fn sr(&self) -> usize {
self.ds.sr
}
fn max_sample_len(&self) -> usize {
self.ds.max_samples / self.hop_size
}
fn set_seed(&mut self, seed: u64) {
self.ds.set_seed(seed)
}
}
pub struct TdDataset {
config: Vec<Hdf5Cfg>,
hdf5_handles: Vec<Hdf5Dataset>,
max_samples: usize,
sr: usize,
sp_keys: Vec<(usize, String)>,
ns_keys: Vec<(usize, String)>,
rir_keys: Vec<(usize, String)>,
snrs: Vec<i8>, // in dB; SNR to sample from
gains: Vec<i8>, // in dB; Speech (loudness) to sample from
attenuation_range: (u8, u8), // in dB; Return a target sample containing noise for attenuation limited algorithm
p_atten_lim: f32, // Probability for containing noise in target
p_fill_speech: f32, // Probability to completely fill the speech signal to `max_samples` with a different speech sample
sp_transforms: Compose, // Transforms to augment speech samples
ns_transforms: Compose, // Transforms to augment noise samples
reverb: RandReverbSim, // Separate reverb transform that may be applied to both speech and noise
seed: u64,
}
impl TdDataset {
fn _read_from_hdf5(
&self,
key: &str,
idx: usize,
max_len: Option<usize>,
) -> Result<Array2<f32>> {
let h = &self.hdf5_handles[idx];
let sr = h.sr.unwrap_or_else(|| self.config[idx].fallback_sr().unwrap_or(self.sr));
let slc = if let Some(l) = max_len {
let l_sr = l * sr / self.sr;
let sample_len = h.sample_len(key)?;
let max_len = sample_len.min(l_sr);
let s = sample_len as i64 - max_len as i64;
if s > 0 {
let s = thread_rng()?.gen_range(0..(s as usize));
Some(s..s + l_sr)
} else {
None
}
} else {
None
};
let mut x = if let Some(slc) = slc {
h.read_slc(key, slc)?
} else {
h.read(key)?
};
if sr != self.sr {
x = resample(&x, sr, self.sr, None)?;
if let Some(l) = max_len {
if x.len_of(Axis(1)) > l {
x.slice_axis_inplace(Axis(1), Slice::from(0..l))
}
}
return Ok(x);
}
Ok(x)
}
fn read(&self, idx: usize, key: &str) -> Result<Array2<f32>> {
let x = self._read_from_hdf5(key, idx, None)?;
Ok(x)
}
fn read_max_len(&self, idx: usize, key: &str) -> Result<Array2<f32>> {
let x = match self._read_from_hdf5(key, idx, Some(self.max_samples)) {
Err(e) => {
eprintln!("Error during speech reading get_data(): {:?}", e);
if e.to_string().contains("inflate") {
// Get a different speech then
let idx = thread_rng()?.gen_range(0..self.len());
let (sp_idx, sp_key) = &self.sp_keys[idx];
eprintln!(
"Returning a different speech sample from {}",
self.ds_name(*sp_idx)
);
self.read_max_len(*sp_idx, sp_key)?
} else {
return Err(e);
}
}
Ok(s) => s,
};
debug_assert!(x.len_of(Axis(1)) <= self.max_samples);
Ok(x)
}
fn max_freq(&self, idx: usize) -> Result<usize> {
let ds = &self.hdf5_handles[idx];
let max_freq = match ds.max_freq {
Some(x) if x > 0 => x,
_ => self.config[idx].fallback_max_freq().unwrap_or_else(|| {
ds.sr.unwrap_or_else(|| self.config[idx].fallback_sr().unwrap_or(self.sr)) / 2
}),
};
Ok(max_freq)
}
fn ds_name(&self, idx: usize) -> String {
self.hdf5_handles[idx].name()
}
}
impl Dataset<f32> for TdDataset {
fn get_sample(&self, idx: usize) -> Result<Sample<f32>> {
seed_from_u64(idx as u64 + self.seed);
let mut rng = thread_rng()?;
let (sp_idx, sp_key) = &self.sp_keys[idx];
let mut speech = self.read_max_len(*sp_idx, sp_key)?;
self.sp_transforms.transform(&mut speech)?;
let mut max_freq = self.max_freq(*sp_idx)?;
while speech.len_of(Axis(1)) < self.max_sample_len()
&& self.p_fill_speech > 0.0
&& self.p_fill_speech > rng.gen_range(0f32..1f32)
{
// If too short, maybe sample another speech sample
let (sp_idx, sp_key) = &self.sp_keys.choose(&mut rng).unwrap();
let mut another_speech = self.read_max_len(*sp_idx, sp_key)?;
self.sp_transforms.transform(&mut another_speech)?;
speech.append(Axis(1), another_speech.view())?;
max_freq = max_freq.min(self.max_freq(*sp_idx)?);
}
if speech.len_of(Axis(1)) > self.max_sample_len() {
speech.slice_axis_inplace(Axis(1), Slice::from(..self.max_samples));
}
// Apply low pass to the noise as well
let noise_low_pass = if max_freq < self.sr / 2 {
Some(LpParam {
cut_off: max_freq,
sr: self.sr,
})
} else {
None
};
let mut ch = speech.len_of(Axis(0));
let mut len = speech.len_of(Axis(1));
if len > self.max_samples {
speech.slice_axis_inplace(Axis(1), Slice::from(..self.max_samples));
len = speech.len_of(Axis(1));
}
if ch > 1 {
speech.slice_axis_inplace(Axis(0), Slice::from(..1));
ch = 1;
}
// Sample 2-5 noises and augment each
let n_noises = rng.gen_range(2..6);
let ns_ids = self.ns_keys.iter().choose_multiple(&mut rng, n_noises);
let mut noises = Vec::with_capacity(n_noises);
let mut noise_gains = Vec::with_capacity(n_noises);
for (ns_idx, ns_key) in &ns_ids {
let mut ns = match self.read_max_len(*ns_idx, ns_key) {
Err(e) => {
eprintln!("Error during noise reading get_data(): {}", e);
continue;
}
Ok(n) => n,
};
if ns.len_of(Axis(1)) < 10 {
continue;
}
self.ns_transforms.transform(&mut ns)?;
if ns.len_of(Axis(1)) > self.max_samples {
ns.slice_axis_inplace(Axis(1), Slice::from(..self.max_samples));
}
noises.push(ns);
noise_gains.push(self.gains.choose(&mut rng).unwrap());
}
let noise_gains_f32: Vec<f32> = noise_gains.iter().map(|x| **x as f32).collect();
// Sample SNR and gain
let &snr = self.snrs.choose(&mut rng).unwrap();
let &gain = self.gains.choose(&mut rng).unwrap();
// Sample attenuation limiting during training
let atten = if self.p_atten_lim > 0. && self.p_atten_lim > rng.gen_range(0f32..1f32) {
Some(rng.gen_range(self.attenuation_range.0..self.attenuation_range.1))
} else {
None
};
// Truncate to speech len, combine noises and mix to noisy
let mut noise = combine_noises(ch, len, &mut noises, Some(noise_gains_f32.as_slice()))?;
// Apply reverberation using a randomly sampled RIR
let speech_rev = if !self.rir_keys.is_empty() {
self.reverb.transform(&mut speech, &mut noise, || {
let (rir_idx, rir_key) = self.rir_keys.iter().choose(&mut rng).unwrap();
let rir = self.read(*rir_idx, rir_key)?;
Ok(rir)
})?
} else {
None
};
let (speech, noise, noisy) = mix_audio_signal(
speech,
speech_rev,
noise,
snr as f32,
gain as f32,
atten.map(|a| a as f32),
noise_low_pass,
)?;
Ok(Sample {
speech: speech.into_dyn(),
noise: noise.into_dyn(),
noisy: noisy.into_dyn(),
feat_erb: None,
feat_spec: None,
max_freq,
snr,
gain,
attenuation: atten,
idx,
})
}
fn len(&self) -> usize {
self.sp_keys.len()
}
fn sr(&self) -> usize {
self.sr
}
fn max_sample_len(&self) -> usize {
self.max_samples
}
fn set_seed(&mut self, seed: u64) {
self.seed = seed
}
}
#[derive(Debug)]
pub enum DsType {
Speech = 0,
Noise = 1,
RIR = 2,
}
impl fmt::Display for DsType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
#[derive(Debug, Eq, PartialEq)]
pub enum Codec {
PCM = 0,
Vorbis = 1,
}
impl Default for &Codec {
fn default() -> Self {
&Codec::PCM
}
}
impl Default for Codec {
fn default() -> Self {
Codec::PCM
}
}
#[derive(Debug)]
pub enum DType {
I16 = 0,
F32 = 1,
}
#[derive(Debug)]
pub struct Hdf5Dataset {
file: File,
dstype: DsType,
sr: Option<usize>,
codec: Option<Codec>,
max_freq: Option<usize>,
dtype: Option<DType>,
}
fn get_dstype(file: &File) -> Option<DsType> {
for g in file.member_names().unwrap_or_default() {
match g.to_lowercase().as_str() {
"speech" => return Some(DsType::Speech),
"noise" => return Some(DsType::Noise),
"rir" => return Some(DsType::RIR),
_ => (),
};
}
None
}
impl Hdf5Dataset {
fn new(path: &str) -> Result<Self> {
let file = File::open(path).map_err(move |e: hdf5::Error| -> DfDatasetError {
DfDatasetError::Hdf5ErrorDetail {
source: e,
msg: format!("Error during File::open of dataset {}", path),
}
})?;
match get_dstype(&file) {
None => Err(DfDatasetError::Hdf5DsTypeNotFoundError),
Some(dstype) => {
let sr = match file.attr("sr") {
Err(_e) => None,
Ok(attr) => Some(attr.read_scalar::<usize>().unwrap()),
};
let max_freq = match file.attr("max_freq") {
Err(_e) => None,
Ok(attr) => Some(attr.read_scalar::<usize>().unwrap()),
};
let codec = match file.attr("codec") {
Err(_e) => None,
Ok(attr) => match attr.read_scalar::<VarLenUnicode>().unwrap().as_str() {
"pcm" => Some(Codec::PCM),
"vorbis" => Some(Codec::Vorbis),
_ => None,
},
};
let dtype = match file.attr("dtype") {
Err(_e) => None,
Ok(attr) => match attr.read_scalar::<VarLenUnicode>().unwrap().as_str() {
"float32" => Some(DType::F32),
"int16" => Some(DType::I16),
_ => None,
},
};
Ok(Hdf5Dataset {
file,
dstype,
sr,
max_freq,
codec,
dtype,
})
}
}
}
fn name(&self) -> String {
self.file.filename()
}
fn group(&self) -> Result<hdf5::Group> {
Ok(self.file.group(&self.dstype.to_string().to_lowercase())?)
}
pub fn len(&self) -> usize {
self.group().unwrap().len() as usize
}
pub fn is_empty(&self) -> bool {
self.group().unwrap().is_empty()
}
pub fn keys(&self) -> Result<Vec<String>> {
Ok(self.group()?.member_names()?)
}
pub fn attributes(&self) -> Result<Vec<String>> {
Ok(self.file.attr_names()?)
}
fn fmt_err(
&self,
fn_: &'static str,
key: &str,
) -> impl FnOnce(hdf5::Error) -> DfDatasetError + '_ {
let key = key.to_string();
let name = self.name();
move |e: hdf5::Error| -> DfDatasetError {
DfDatasetError::Hdf5ErrorDetail {
source: e,
msg: format!("Error during {} of dataset {} key {}", fn_, name, key),
}
}
}
fn sample_len(&self, key: &str) -> Result<usize> {
let ds = self.group()?.dataset(key)?;
if *self.codec.as_ref().unwrap_or(&Codec::PCM) == Codec::Vorbis {
let s = *ds.shape().last().unwrap(); // length of raw buffer
let lastpages = ds
.read_slice_1d(s![s - 50 * 1024 / 8..])
.map_err(self.fmt_err("sample_len", key))?; // seek to last 50 kB
let mut rdr = OggPacketReader::new(Cursor::new(lastpages.as_slice().unwrap()));
// Ensure that rdr is at the start of a ogg page
rdr.seek_absgp(None, 0).unwrap();
let mut absgp = 0;
while let Some(pkg) = rdr.read_packet()? {
absgp = pkg.absgp_page();
}
Ok(absgp as usize)
} else {
Ok(*ds.shape().last().unwrap_or(&0))
}
}
fn sample_shape(&self, key: &str) -> Result<Vec<usize>> {
let ds = self.group()?.dataset(key)?;
match *self.codec.as_ref().unwrap_or(&Codec::PCM) {
Codec::PCM => Ok(ds.shape()),
Codec::Vorbis => {
let firstpages =
ds.read_slice_1d(s![..512]).map_err(self.fmt_err("sample_shape", key))?;
let ident_hdr =
lewton::header::read_header_ident(firstpages.as_slice().unwrap()).unwrap();
Ok(vec![ident_hdr.audio_channels.into(), self.sample_len(key)?])
}
}
}
pub fn read_pcm(&self, key: &str, r: Option<Range<usize>>) -> Result<Array2<f32>> {
let ds = self.group()?.dataset(key)?;
let mut arr: ArrayD<f32> = if let Some(r) = r {
if r.end > *ds.shape().last().unwrap_or(&0) {
return Err(DfDatasetError::PcmRangeToLarge {
range: r,
size: ds.shape(),
});
}
match ds.ndim() {
1 => ds.read_slice(s![r]).map_err(self.fmt_err("read_pcm", key))?,
2 => ds.read_slice(s![0, r]).map_err(self.fmt_err("read_pcm", key))?, // Just take the first channel for now
n => return Err(DfDatasetError::PcmUnspportedDimension(n)),
}
} else {
ds.read_dyn::<f32>().map_err(self.fmt_err("read_pcm", key))?
};
#[allow(clippy::branches_sharing_code)]
let mut arr = if arr.ndim() == 1 {
let len = arr.len_of(Axis(0));
arr.into_shape((1, len))?
} else {
let ch = arr.len_of(Axis(0));
if ch > 1 {
let idx = thread_rng()?.gen_range(0..ch);
arr.slice_axis_inplace(Axis(0), Slice::from(idx..idx + 1));
}
arr.into_dimensionality()?
};
match self.dtype {
Some(DType::I16) => arr /= std::i16::MAX as f32,
Some(DType::F32) => (),
None => {
if ds.dtype()?.is::<i16>() {
arr /= std::i16::MAX as f32
}
}
}
Ok(arr)
}
pub fn read_vorbis(&self, key: &str, r: Option<Range<usize>>) -> Result<Array2<f32>> {
let ds = self.group()?.dataset(key)?;
let encoded = Cursor::new(ds.read_raw::<u8>().map_err(self.fmt_err("read_vorbis", key))?);
let mut srr = OggStreamReader::new(encoded)?;
let ch = srr.ident_hdr.audio_channels as usize;
let mut out: Vec<i16> = Vec::new();
while let Some(mut pck) = srr.read_dec_packet_itl()? {
out.append(&mut pck);
}
let mut out: Array2<i16> = Array2::from_shape_vec((out.len() / ch, ch), out)?;
if ch > 1 {
let idx = thread_rng()?.gen_range(0..ch);
out.slice_axis_inplace(Axis(1), Slice::from(idx..idx + 1));
}
debug_assert_eq!(1, out.len_of(Axis(1)));
if let Some(r) = r {
out.slice_axis_inplace(Axis(0), Slice::from(r));
}
let out = out.mapv(|x| x as f32 / std::i16::MAX as f32);
// Transpose to channels first
let out_len = out.len_of(Axis(0));
Ok(out.into_shape((1, out_len))?)
}
pub fn read(&self, key: &str) -> Result<Array2<f32>> {
match *self.codec.as_ref().unwrap_or_default() {
Codec::PCM => self.read_pcm(key, None),
Codec::Vorbis => self.read_vorbis(key, None),
}
}
pub fn read_slc(&self, key: &str, r: Range<usize>) -> Result<Array2<f32>> {
match *self.codec.as_ref().unwrap_or_default() {
Codec::PCM => self.read_pcm(key, Some(r)),
Codec::Vorbis => self.read_vorbis(key, Some(r)),
}
}
}
struct LpParam {
sr: usize,
cut_off: usize,
}
fn combine_noises(
ch: usize,
len: usize,
noises: &mut [Array2<f32>],
noise_gains: Option<&[f32]>,
) -> Result<Signal> {
let mut rng = thread_rng()?;
// Adjust length of noises to clean length
for ns in noises.iter_mut() {
loop {
if len.checked_sub(ns.len_of(Axis(1))).is_some() {
// TODO: Remove this clone if ndarray supports repeat
ns.append(Axis(1), ns.clone().view())?;
} else {
break;
}
}
let too_large = ns.len_of(Axis(1)).checked_sub(len);
if let Some(too_large) = too_large {
let start: usize = rng.gen_range(0..too_large);
ns.slice_collapse(s![.., start..start + len]);
}
}
// Adjust number of noise channels to clean channels
for ns in noises.iter_mut() {
while ns.len_of(Axis(0)) > ch {
ns.remove_index(Axis(0), rng.gen_range(0..ns.len_of(Axis(0))))
}
while ns.len_of(Axis(0)) < ch {
let r = rng.gen_range(0..ns.len_of(Axis(0)));
let slc = ns.slice(s![r..r + 1, ..]).to_owned();
ns.append(Axis(0), slc.view())?;
}
}
// Apply gain to noises
if let Some(ns_gains) = noise_gains {
for (ns, &g) in noises.iter_mut().zip(ns_gains) {
*ns *= 10f32.powf(g / 20.);
}
}
// Average noises
let noise = Array2::zeros((ch, len));
let noise = noises.iter().fold(noise, |acc, x| acc + x) / ch as f32;
Ok(noise)
}
fn mix_audio_signal(
clean: Array2<f32>,
clean_rev: Option<Array2<f32>>,
mut noise: Array2<f32>,
snr_db: f32,
gain_db: f32,
atten_db: Option<f32>,
noise_resample: Option<LpParam>,
) -> Result<(Signal, Signal, Signal)> {
let len = clean.len_of(Axis(1));
if let Some(re) = noise_resample {
// Low pass filtering via resampling
noise = low_pass_resample(&noise, re.cut_off, re.sr)?;
noise.slice_axis_inplace(Axis(1), Slice::from(..len));
}
// Apply gain to speech
let g = 10f32.powf(gain_db / 20.);
let mut clean_out = &clean * g;
let clean_mix = clean_rev.map(|c| &c * g).unwrap_or_else(|| clean_out.clone());
// For energy calculation use clean speech to also consider direct-to-reverberant ratio
let k = mix_f(clean.view(), noise.view(), snr_db);
if let Some(atten_db) = atten_db {
// Create a mixture with a higher SNR as target signal
let k_target = mix_f(clean.view(), noise.view(), snr_db + atten_db);
for (c, &n) in clean_out.iter_mut().zip(noise.iter()) {
*c += n * k_target;
}
}
// Create mixture at given SNR
noise *= k;
let mut mixture = clean_mix + &noise;
// Guard against clipping
let max = &([&clean_out, &noise, &mixture].iter().map(|x| find_max_abs(x.iter())))
.collect::<std::result::Result<Vec<f32>, crate::util::UtilsError>>()?;
let max = find_max(max)?;
if (max - 1.) > 1e-10 {
let f = 1. / (max + 1e-10);
clean_out *= f;
noise *= f;
mixture *= f;
}
Ok((clean_out, noise, mixture))
}
fn unpack_pad<Ts, To, F>(mut f: F, samples: &mut [Sample<Ts>], len: usize) -> Result<ArrayD<To>>
where
Ts: Data,
To: Data,
F: FnMut(&mut Sample<Ts>) -> &mut ArrayD<To>,
{
let mut out: Vec<ArrayViewMutD<To>> = Vec::with_capacity(samples.len());
for sample in samples.iter_mut() {
let x: &mut ArrayD<To> = f(sample);
let missing = len.saturating_sub(x.len_of(Axis(1)));
if missing > 0 {
let mut shape: Vec<usize> = x.shape().into();
shape[1] = missing;
let tmp: ArrayD<To> = ArrayD::<To>::zeros(shape);
x.append(Axis(1), tmp.into_dimensionality()?.view())?;
}
out.push(x.view_mut());
}
let out: Vec<ArrayViewD<To>> = out.iter().map(|s| s.view()).collect();
if !out.windows(2).all(|w| w[0].shape() == w[1].shape()) {
eprintln!("Shapes do not match!");
for outs in out.iter() {
eprintln!(" shape: {:?}", outs.shape());
}
}
Ok(ndarray::stack(Axis(0), out.as_slice())?.into_dyn())
}
#[cfg(test)]
mod tests {
use std::time::Instant;
use dirs::home_dir;
use super::*;
use crate::util::seed_from_u64;
use crate::wav_utils::*;
#[test]
pub fn test_mix_audio_signal() -> Result<()> {
seed_from_u64(42);
// 2ch 10 second speech signal
let reader = ReadWav::new("../assets/clean_freesound_33711.wav")?;
let (sr, ch) = (reader.sr as u32, reader.channels as u16);
let clean = reader.samples_arr2()?;
// 1ch shorter then clean
let noise1 = ReadWav::new("../assets/noise_freesound_573577.wav")?.samples_arr2()?;
// 2ch longer then clean
let noise2 = ReadWav::new("../assets/noise_freesound_2530.wav")?.samples_arr2()?;
let noise = combine_noises(
ch as usize,
clean.len_of(Axis(1)),
&mut [noise1, noise2],
None,
)?;
let (clean, noise, noisy) =
mix_audio_signal(clean, None, noise, 0., 6., None, None).unwrap();
dbg!(noisy.len());
write_wav_iter("../out/clean.wav", clean.iter(), sr, ch)?;
write_wav_iter("../out/noise.wav", noise.iter(), sr, ch)?;
write_wav_iter("../out/noisy.wav", noisy.iter(), sr, ch)?;
Ok(())
}
#[test]
pub fn test_hdf5_read() -> Result<()> {
let hdf5 = Hdf5Dataset::new(
home_dir().unwrap().join("data/hdf5/EDINBURGH_56.hdf5").to_str().unwrap(),
)?;
let sr = hdf5.sr.unwrap() as u32;
let keys = hdf5.keys()?;
let signal = hdf5.read(&keys[0])?;
dbg!(signal.shape());
let max_len = 3 * sr as usize; // 1 second
let key = &keys[0];
let signal = hdf5.read_slc(key, 0..max_len.min(hdf5.sample_len(key)?))?;
dbg!(signal.shape());
let ch = signal.len_of(Axis(0));
write_wav_iter("../out/hdf5_signal.wav", &signal, sr, ch as u16)?;
Ok(())
}
#[test]
pub fn test_hdf5_vorbis_read() -> Result<()> {
seed_from_u64(42);
let hdf5 = Hdf5Dataset::new(
home_dir().unwrap().join("data/hdf5/OWN_NOISES_TRAIN.hdf5").to_str().unwrap(),
)?;
let sr = hdf5.sr.unwrap() as u32;
let keys = hdf5.keys()?;
let key = &keys[0];
let signal = hdf5.read(key)?;
write_wav_arr2("../out/hdf5_signal.wav", signal.view(), sr)?;
dbg!(signal.shape());
let max_len = 3 * sr as usize;
let signal = hdf5.read_slc(key, 0..max_len.min(hdf5.sample_len(key)?))?;
dbg!(signal.shape());
write_wav_arr2("../out/hdf5_signal_slc.wav", signal.view(), sr)?;
Ok(())
}
#[test]
pub fn test_data_loader() -> Result<()> {
println!("******** Start test_data_loader() ********");
seed_from_u64(42);
let batch_size = 1;
let sr = 48000;
let ds_dir = home_dir().unwrap().join("data/hdf5").to_str().unwrap().to_string();
let cfg = DatasetConfig::open("../assets/dataset.cfg")?;
let builder = DatasetBuilder::new(&ds_dir, sr);
let ds = Datasets::new(
Arc::new(builder.clone().dataset(cfg.train).build_td_dataset()?),
Arc::new(builder.clone().dataset(cfg.valid).build_td_dataset()?),
Arc::new(builder.clone().dataset(cfg.test).build_td_dataset()?),
);
let mut loader = DataLoader::builder(ds).batch_size(batch_size).build()?;
loader.start_epoch("train", 1)?;
for i in 0..10 {
let t0 = Instant::now();
let batch = loader.get_batch::<f32>()?.unwrap();
dbg!(i, &batch);
let t1 = Instant::now();
println!("test_data_loader: {:?}", t1 - t0);
write_wav_iter(
"../out/clean.wav",
&batch.speech.slice(s![0, 0, ..]),
sr as u32,
1,
)?;
write_wav_iter(
"../out/noise.wav",
&batch.noise.slice(s![0, 0, ..]),
sr as u32,
1,
)?;
write_wav_iter(
"../out/noisy.wav",
&batch.noisy.slice(s![0, 0, ..]),
sr as u32,
1,
)?;
}
loader.start_epoch("train", 2)?;
for i in 0..2 {
dbg!(i, loader.get_batch::<f32>()?);
}
println!("Dropping loader");
drop(loader);
println!("Done");
Ok(())
}
#[test]
pub fn test_fft_dataset() -> Result<()> {
println!("******** Start test_data_loader() ********");
seed_from_u64(42);
let batch_size = 2;
let fft_size = 960;
let hop_size = Some(480);
let nb_erb = Some(32);
let nb_spec = None;
let norm_alpha = None;
let sr = 48000;
let ds_dir = home_dir().unwrap().join("data/hdf5").to_str().unwrap().to_string();
let cfg = DatasetConfig::open("../assets/dataset.cfg")?;
let builder = DatasetBuilder::new(&ds_dir, sr)
.df_params(fft_size, hop_size, nb_erb, nb_spec, norm_alpha);
let ds = Datasets::new(
Arc::new(builder.clone().dataset(cfg.train).build_fft_dataset()?),
Arc::new(builder.clone().dataset(cfg.valid).build_fft_dataset()?),
Arc::new(builder.clone().dataset(cfg.test).build_fft_dataset()?),
);
let mut loader = DataLoader::builder(ds).num_threads(1).batch_size(batch_size).build()?;
loader.start_epoch("train", 1)?;
for i in 0..2 {
let batch = loader.get_batch::<Complex32>()?;
if let Some(batch) = batch {
dbg!(i, &batch, batch.feat_erb.as_ref().unwrap().shape());
}
}
Ok(())
}
}
| 33.622289 | 160 | 0.525989 |
9cbfb94509526bc3fc70828e1c3c23cb7033b60e
| 5,153 |
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use aptos_types::account_config;
use language_e2e_tests::executor::FakeExecutor;
use move_core_types::{
account_address::AccountAddress,
value::{serialize_values, MoveValue},
};
#[test]
fn test_aptos_initialize() {
let mut executor = FakeExecutor::stdlib_only_genesis();
// DR doesn't have role yet, so role check will fail
let output = executor.try_exec(
"Diem",
"initialize",
vec![],
serialize_values(&vec![MoveValue::Signer(
account_config::aptos_root_address(),
)]),
);
assert_eq!(output.unwrap_err().move_abort_code(), Some(5));
// Grant the DR role
executor.exec(
"Roles",
"grant_diem_root_role",
vec![],
serialize_values(&vec![MoveValue::Signer(
account_config::aptos_root_address(),
)]),
);
// Now initialize, it should all succeed.
executor.exec(
"Diem",
"initialize",
vec![],
serialize_values(&vec![MoveValue::Signer(
account_config::aptos_root_address(),
)]),
);
// Second time you try though you'll get an already published error with EMODIFY_CAPABILITY
// reason.
let output = executor.try_exec(
"Diem",
"initialize",
vec![],
serialize_values(&vec![MoveValue::Signer(
account_config::aptos_root_address(),
)]),
);
assert_eq!(output.unwrap_err().move_abort_code(), Some(262));
}
#[test]
fn test_aptos_initialize_tc_account() {
let mut executor = FakeExecutor::stdlib_only_genesis();
// DR doesn't have role yet, so role check will fail
let output = executor.try_exec(
"Diem",
"initialize",
vec![],
serialize_values(&vec![MoveValue::Signer(
account_config::aptos_root_address(),
)]),
);
assert_eq!(output.unwrap_err().move_abort_code(), Some(5));
// Grant the DR role
executor.exec(
"Roles",
"grant_diem_root_role",
vec![],
serialize_values(&vec![MoveValue::Signer(
account_config::aptos_root_address(),
)]),
);
// Grant the TC role
executor.exec(
"Roles",
"grant_treasury_compliance_role",
vec![],
serialize_values(&vec![
MoveValue::Signer(account_config::treasury_compliance_account_address()),
MoveValue::Signer(account_config::aptos_root_address()),
]),
);
// Try to initialize, invalid sender so role check will fail
let output = executor.try_exec(
"Diem",
"initialize",
vec![],
serialize_values(&vec![MoveValue::Signer(
account_config::treasury_compliance_account_address(),
)]),
);
assert_eq!(output.unwrap_err().move_abort_code(), Some(2));
// Now initialize, it should all succeed.
executor.exec(
"Diem",
"initialize",
vec![],
serialize_values(&vec![MoveValue::Signer(
account_config::aptos_root_address(),
)]),
);
// Second time you try though you'll get an already published error with EMODIFY_CAPABILITY
// reason.
let output = executor.try_exec(
"Diem",
"initialize",
vec![],
serialize_values(&vec![MoveValue::Signer(
account_config::treasury_compliance_account_address(),
)]),
);
assert_eq!(output.unwrap_err().move_abort_code(), Some(2));
}
#[test]
fn test_diem_timestamp_time_has_started() {
let mut executor = FakeExecutor::stdlib_only_genesis();
let account_address = AccountAddress::random();
// Invalid address used to call `Timestamp::set_time_has_started`
let output = executor.try_exec(
"Timestamp",
"set_time_has_started",
vec![],
serialize_values(&vec![MoveValue::Signer(account_address)]),
);
assert_eq!(output.unwrap_err().move_abort_code(), Some(2));
executor.exec(
"Timestamp",
"set_time_has_started",
vec![],
serialize_values(&vec![MoveValue::Signer(
account_config::aptos_root_address(),
)]),
);
let output = executor.try_exec(
"Timestamp",
"set_time_has_started",
vec![],
serialize_values(&vec![MoveValue::Signer(
account_config::aptos_root_address(),
)]),
);
assert_eq!(output.unwrap_err().move_abort_code(), Some(1));
}
#[test]
fn test_diem_block_double_init() {
let mut executor = FakeExecutor::stdlib_only_genesis();
executor.exec(
"Block",
"initialize_block_metadata",
vec![],
serialize_values(&vec![MoveValue::Signer(
account_config::aptos_root_address(),
)]),
);
let output = executor.try_exec(
"Block",
"initialize_block_metadata",
vec![],
serialize_values(&vec![MoveValue::Signer(
account_config::aptos_root_address(),
)]),
);
assert_eq!(output.unwrap_err().move_abort_code(), Some(6));
}
| 26.979058 | 95 | 0.601591 |
3a5e93ba6ffdd882ce80143a951985b5aa7c4587
| 11,887 |
// Copyright 2017 Parity Technologies (UK) Ltd.
// Copyright 2020 Netwarps Ltd.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
use async_trait::async_trait;
use futures::future::BoxFuture;
use futures::{
io::{IoSlice, IoSliceMut},
prelude::*,
};
use pin_project::pin_project;
use std::{io, io::Error, pin::Pin, task::Context, task::Poll};
use libp2prs_traits::{ReadEx, SplitEx, WriteEx};
use crate::identity::Keypair;
use crate::muxing::{IReadWrite, IStreamMuxer, StreamInfo, StreamMuxer, StreamMuxerEx};
use crate::secure_io::SecureInfo;
use crate::transport::{ConnectionInfo, TransportError};
use crate::upgrade::ProtocolName;
use crate::{Multiaddr, PeerId, PublicKey};
#[pin_project(project = EitherOutputProj)]
#[derive(Debug, Copy, Clone)]
pub enum AsyncEitherOutput<A, B> {
A(#[pin] A),
B(#[pin] B),
}
impl<A, B> AsyncRead for AsyncEitherOutput<A, B>
where
A: AsyncRead,
B: AsyncRead,
{
fn poll_read(self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut [u8]) -> Poll<Result<usize, Error>> {
match self.project() {
EitherOutputProj::A(a) => AsyncRead::poll_read(a, cx, buf),
EitherOutputProj::B(b) => AsyncRead::poll_read(b, cx, buf),
}
}
fn poll_read_vectored(self: Pin<&mut Self>, cx: &mut Context<'_>, bufs: &mut [IoSliceMut<'_>]) -> Poll<Result<usize, Error>> {
match self.project() {
EitherOutputProj::A(a) => AsyncRead::poll_read_vectored(a, cx, bufs),
EitherOutputProj::B(b) => AsyncRead::poll_read_vectored(b, cx, bufs),
}
}
}
impl<A, B> AsyncWrite for AsyncEitherOutput<A, B>
where
A: AsyncWrite,
B: AsyncWrite,
{
fn poll_write(self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8]) -> Poll<Result<usize, Error>> {
match self.project() {
EitherOutputProj::A(a) => AsyncWrite::poll_write(a, cx, buf),
EitherOutputProj::B(b) => AsyncWrite::poll_write(b, cx, buf),
}
}
fn poll_write_vectored(self: Pin<&mut Self>, cx: &mut Context<'_>, bufs: &[IoSlice<'_>]) -> Poll<Result<usize, Error>> {
match self.project() {
EitherOutputProj::A(a) => AsyncWrite::poll_write_vectored(a, cx, bufs),
EitherOutputProj::B(b) => AsyncWrite::poll_write_vectored(b, cx, bufs),
}
}
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Error>> {
match self.project() {
EitherOutputProj::A(a) => AsyncWrite::poll_flush(a, cx),
EitherOutputProj::B(b) => AsyncWrite::poll_flush(b, cx),
}
}
fn poll_close(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Error>> {
match self.project() {
EitherOutputProj::A(a) => AsyncWrite::poll_close(a, cx),
EitherOutputProj::B(b) => AsyncWrite::poll_close(b, cx),
}
}
}
#[derive(Debug, Copy, Clone)]
pub enum EitherOutput<A, B> {
A(A),
B(B),
}
#[async_trait]
impl<A, B> ReadEx for EitherOutput<A, B>
where
A: ReadEx,
B: ReadEx,
{
async fn read2(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match self {
EitherOutput::A(a) => ReadEx::read2(a, buf).await,
EitherOutput::B(b) => ReadEx::read2(b, buf).await,
}
}
}
#[async_trait]
impl<A, B> WriteEx for EitherOutput<A, B>
where
A: WriteEx,
B: WriteEx,
{
async fn write2(&mut self, buf: &[u8]) -> io::Result<usize> {
match self {
EitherOutput::A(a) => WriteEx::write2(a, buf).await,
EitherOutput::B(b) => WriteEx::write2(b, buf).await,
}
}
async fn flush2(&mut self) -> io::Result<()> {
match self {
EitherOutput::A(a) => WriteEx::flush2(a).await,
EitherOutput::B(b) => WriteEx::flush2(b).await,
}
}
async fn close2(&mut self) -> io::Result<()> {
match self {
EitherOutput::A(a) => WriteEx::close2(a).await,
EitherOutput::B(b) => WriteEx::close2(b).await,
}
}
}
pub enum EitherReaderWriter<A, B> {
A(A),
B(B),
}
#[async_trait]
impl<A, B> ReadEx for EitherReaderWriter<A, B>
where
A: ReadEx,
B: ReadEx,
{
async fn read2(&mut self, buf: &mut [u8]) -> Result<usize, Error> {
match self {
EitherReaderWriter::A(a) => a.read2(buf).await,
EitherReaderWriter::B(b) => b.read2(buf).await,
}
}
}
#[async_trait]
impl<A, B> WriteEx for EitherReaderWriter<A, B>
where
A: WriteEx,
B: WriteEx,
{
async fn write2(&mut self, buf: &[u8]) -> Result<usize, Error> {
match self {
EitherReaderWriter::A(a) => a.write2(buf).await,
EitherReaderWriter::B(b) => b.write2(buf).await,
}
}
async fn flush2(&mut self) -> Result<(), Error> {
match self {
EitherReaderWriter::A(a) => a.flush2().await,
EitherReaderWriter::B(b) => b.flush2().await,
}
}
async fn close2(&mut self) -> Result<(), Error> {
match self {
EitherReaderWriter::A(a) => a.close2().await,
EitherReaderWriter::B(b) => b.close2().await,
}
}
}
impl<A, B> SplitEx for EitherOutput<A, B>
where
A: SplitEx,
B: SplitEx,
{
type Reader = EitherReaderWriter<A::Reader, B::Reader>;
type Writer = EitherReaderWriter<A::Writer, B::Writer>;
fn split(self) -> (Self::Reader, Self::Writer) {
match self {
EitherOutput::A(a) => {
let (r, w) = a.split();
(EitherReaderWriter::A(r), EitherReaderWriter::A(w))
}
EitherOutput::B(b) => {
let (r, w) = b.split();
(EitherReaderWriter::B(r), EitherReaderWriter::B(w))
}
}
}
}
impl<A, B> SecureInfo for EitherOutput<A, B>
where
A: SecureInfo,
B: SecureInfo,
{
fn local_peer(&self) -> PeerId {
match self {
EitherOutput::A(a) => a.local_peer(),
EitherOutput::B(b) => b.local_peer(),
}
}
fn remote_peer(&self) -> PeerId {
match self {
EitherOutput::A(a) => a.remote_peer(),
EitherOutput::B(b) => b.remote_peer(),
}
}
fn local_priv_key(&self) -> Keypair {
match self {
EitherOutput::A(a) => a.local_priv_key(),
EitherOutput::B(b) => b.local_priv_key(),
}
}
fn remote_pub_key(&self) -> PublicKey {
match self {
EitherOutput::A(a) => a.remote_pub_key(),
EitherOutput::B(b) => b.remote_pub_key(),
}
}
}
impl<A, B> StreamInfo for EitherOutput<A, B>
where
A: StreamInfo,
B: StreamInfo,
{
fn id(&self) -> usize {
match self {
EitherOutput::A(a) => a.id(),
EitherOutput::B(b) => b.id(),
}
}
}
#[async_trait]
impl<A, B> StreamMuxer for EitherOutput<A, B>
where
A: StreamMuxer + Send,
B: StreamMuxer + Send,
{
async fn open_stream(&mut self) -> Result<IReadWrite, TransportError> {
match self {
EitherOutput::A(a) => Ok(a.open_stream().await?),
EitherOutput::B(b) => Ok(b.open_stream().await?),
}
}
async fn accept_stream(&mut self) -> Result<IReadWrite, TransportError> {
match self {
EitherOutput::A(a) => Ok(a.accept_stream().await?),
EitherOutput::B(b) => Ok(b.accept_stream().await?),
}
}
async fn close(&mut self) -> Result<(), TransportError> {
match self {
EitherOutput::A(a) => a.close().await,
EitherOutput::B(b) => b.close().await,
}
}
fn task(&mut self) -> Option<BoxFuture<'static, ()>> {
match self {
EitherOutput::A(a) => a.task(),
EitherOutput::B(b) => b.task(),
}
}
fn box_clone(&self) -> IStreamMuxer {
match self {
EitherOutput::A(a) => a.box_clone(),
EitherOutput::B(b) => b.box_clone(),
}
}
}
impl<A, B> ConnectionInfo for EitherOutput<A, B>
where
A: ConnectionInfo,
B: ConnectionInfo,
{
fn local_multiaddr(&self) -> Multiaddr {
match self {
EitherOutput::A(a) => a.local_multiaddr(),
EitherOutput::B(b) => b.local_multiaddr(),
}
}
fn remote_multiaddr(&self) -> Multiaddr {
match self {
EitherOutput::A(a) => a.remote_multiaddr(),
EitherOutput::B(b) => b.remote_multiaddr(),
}
}
}
impl<A, B> StreamMuxerEx for EitherOutput<A, B>
where
A: StreamMuxer + ConnectionInfo + SecureInfo + std::fmt::Debug,
B: StreamMuxer + ConnectionInfo + SecureInfo + std::fmt::Debug,
{
}
#[derive(Debug, Clone)]
pub enum EitherName<A, B> {
A(A),
B(B),
}
impl<A: ProtocolName, B: ProtocolName> ProtocolName for EitherName<A, B> {
fn protocol_name(&self) -> &[u8] {
match self {
EitherName::A(a) => a.protocol_name(),
EitherName::B(b) => b.protocol_name(),
}
}
}
// #[derive(Debug, Copy, Clone)]
// pub enum EitherTransport<A, B> {
// A(A),
// B(B),
// }
// #[async_trait]
// impl<A, B> Transport for EitherTransport<A, B>
// where
// B: Transport,
// A: Transport,
// {
// type Output = EitherOutput<A::Output, B::Output>;
// type Listener = EitherTransportListener<A::Listener, B::Listener>;
// fn listen_on(self, addr: Multiaddr) -> Result<Self::Listener, TransportError> {
// match self {
// EitherTransport::A(a) => Ok(EitherTransportListener::A(a.listen_on(addr)?)),
// EitherTransport::B(b) => Ok(EitherTransportListener::B(b.listen_on(addr)?)),
// }
// }
// async fn dial(self, addr: Multiaddr) -> Result<Self::Output, TransportError> {
// match self {
// EitherTransport::A(a) => Ok(EitherOutput::A(a.dial(addr).await?)),
// EitherTransport::B(b) => Ok(EitherOutput::B(b.dial(addr).await?)),
// }
// }
// }
// #[derive(Debug, Copy, Clone)]
// pub enum EitherTransportListener<A, B> {
// A(A),
// B(B),
// }
// #[async_trait]
// impl<A, B> TransportListener for EitherTransportListener<A, B>
// where
// B: TransportListener,
// A: TransportListener,
// {
// type Output = EitherOutput<A::Output, B::Output>;
// async fn accept(&mut self) -> Result<Self::Output, TransportError> {
// match self {
// EitherTransportListener::A(a) => Ok(EitherOutput::A(a.accept().await?)),
// EitherTransportListener::B(b) => Ok(EitherOutput::B(b.accept().await?)),
// }
// }
// fn multi_addr(&self) -> Multiaddr {
// match self {
// EitherTransportListener::A(a) => a.multi_addr(),
// EitherTransportListener::B(b) => b.multi_addr(),
// }
// }
// }
| 29.350617 | 130 | 0.574577 |
f8284d4f097be3bb636d71a0f5bfa740d187b9cc
| 9,173 |
use super::{client, server};
use proc_macro2::TokenStream;
use prost_build::{Config, Method, Service};
use quote::ToTokens;
use std::io;
use std::path::{Path, PathBuf};
/// Configure `tonic-build` code generation.
///
/// Use [`compile_protos`] instead if you don't need to tweak anything.
pub fn configure() -> Builder {
Builder {
build_client: true,
build_server: true,
out_dir: None,
extern_path: Vec::new(),
field_attributes: Vec::new(),
type_attributes: Vec::new(),
proto_path: "super".to_string(),
#[cfg(feature = "rustfmt")]
format: true,
}
}
/// Simple `.proto` compiling. Use [`configure`] instead if you need more options.
///
/// The include directory will be the parent folder of the specified path.
/// The package name will be the filename without the extension.
pub fn compile_protos(proto: impl AsRef<Path>) -> io::Result<()> {
let proto_path: &Path = proto.as_ref();
// directory the main .proto file resides in
let proto_dir = proto_path
.parent()
.expect("proto file should reside in a directory");
self::configure().compile(&[proto_path], &[proto_dir])?;
Ok(())
}
const PROST_CODEC_PATH: &str = "tonic::codec::ProstCodec";
impl crate::Service for Service {
const CODEC_PATH: &'static str = PROST_CODEC_PATH;
type Method = Method;
type Comment = String;
fn name(&self) -> &str {
&self.name
}
fn package(&self) -> &str {
&self.package
}
fn identifier(&self) -> &str {
&self.proto_name
}
fn comment(&self) -> &[Self::Comment] {
&self.comments.leading[..]
}
fn methods(&self) -> &[Self::Method] {
&self.methods[..]
}
}
impl crate::Method for Method {
const CODEC_PATH: &'static str = PROST_CODEC_PATH;
type Comment = String;
fn name(&self) -> &str {
&self.name
}
fn identifier(&self) -> &str {
&self.proto_name
}
fn client_streaming(&self) -> bool {
self.client_streaming
}
fn server_streaming(&self) -> bool {
self.server_streaming
}
fn comment(&self) -> &[Self::Comment] {
&self.comments.leading[..]
}
fn request_response_name(&self, proto_path: &str) -> (TokenStream, TokenStream) {
let request = if self.input_proto_type.starts_with(".google.protobuf")
|| self.input_type.starts_with("::")
{
self.input_type.parse::<TokenStream>().unwrap()
} else {
syn::parse_str::<syn::Path>(&format!("{}::{}", proto_path, self.input_type))
.unwrap()
.to_token_stream()
};
let response = if self.output_proto_type.starts_with(".google.protobuf")
|| self.output_type.starts_with("::")
{
self.output_type.parse::<TokenStream>().unwrap()
} else {
syn::parse_str::<syn::Path>(&format!("{}::{}", proto_path, self.output_type))
.unwrap()
.to_token_stream()
};
(request, response)
}
}
struct ServiceGenerator {
builder: Builder,
clients: TokenStream,
servers: TokenStream,
}
impl ServiceGenerator {
fn new(builder: Builder) -> Self {
ServiceGenerator {
builder,
clients: TokenStream::default(),
servers: TokenStream::default(),
}
}
}
impl prost_build::ServiceGenerator for ServiceGenerator {
fn generate(&mut self, service: prost_build::Service, _buf: &mut String) {
if self.builder.build_server {
let server = server::generate(&service, &self.builder.proto_path);
self.servers.extend(server);
}
if self.builder.build_client {
let client = client::generate(&service, &self.builder.proto_path);
self.clients.extend(client);
}
}
fn finalize(&mut self, buf: &mut String) {
if self.builder.build_client && !self.clients.is_empty() {
let clients = &self.clients;
let client_service = quote::quote! {
#clients
};
let code = format!("{}", client_service);
buf.push_str(&code);
self.clients = TokenStream::default();
}
if self.builder.build_server && !self.servers.is_empty() {
let servers = &self.servers;
let server_service = quote::quote! {
#servers
};
let code = format!("{}", server_service);
buf.push_str(&code);
self.servers = TokenStream::default();
}
}
}
/// Service generator builder.
#[derive(Debug, Clone)]
pub struct Builder {
pub(crate) build_client: bool,
pub(crate) build_server: bool,
pub(crate) extern_path: Vec<(String, String)>,
pub(crate) field_attributes: Vec<(String, String)>,
pub(crate) type_attributes: Vec<(String, String)>,
pub(crate) proto_path: String,
out_dir: Option<PathBuf>,
#[cfg(feature = "rustfmt")]
format: bool,
}
impl Builder {
/// Enable or disable gRPC client code generation.
pub fn build_client(mut self, enable: bool) -> Self {
self.build_client = enable;
self
}
/// Enable or disable gRPC server code generation.
pub fn build_server(mut self, enable: bool) -> Self {
self.build_server = enable;
self
}
/// Enable the output to be formated by rustfmt.
#[cfg(feature = "rustfmt")]
pub fn format(mut self, run: bool) -> Self {
self.format = run;
self
}
/// Set the output directory to generate code to.
///
/// Defaults to the `OUT_DIR` environment variable.
pub fn out_dir(mut self, out_dir: impl AsRef<Path>) -> Self {
self.out_dir = Some(out_dir.as_ref().to_path_buf());
self
}
/// Declare externally provided Protobuf package or type.
///
/// Passed directly to `prost_build::Config.extern_path`.
/// Note that both the Protobuf path and the rust package paths should both be fully qualified.
/// i.e. Protobuf paths should start with "." and rust paths should start with "::"
pub fn extern_path(mut self, proto_path: impl AsRef<str>, rust_path: impl AsRef<str>) -> Self {
self.extern_path.push((
proto_path.as_ref().to_string(),
rust_path.as_ref().to_string(),
));
self
}
/// Add additional attribute to matched messages, enums, and one-offs.
///
/// Passed directly to `prost_build::Config.field_attribute`.
pub fn field_attribute<P: AsRef<str>, A: AsRef<str>>(mut self, path: P, attribute: A) -> Self {
self.field_attributes
.push((path.as_ref().to_string(), attribute.as_ref().to_string()));
self
}
/// Add additional attribute to matched messages, enums, and one-offs.
///
/// Passed directly to `prost_build::Config.type_attribute`.
pub fn type_attribute<P: AsRef<str>, A: AsRef<str>>(mut self, path: P, attribute: A) -> Self {
self.type_attributes
.push((path.as_ref().to_string(), attribute.as_ref().to_string()));
self
}
/// Set the path to where tonic will search for the Request/Response proto structs
/// live relative to the module where you call `include_proto!`.
///
/// This defaults to `super` since tonic will generate code in a module.
pub fn proto_path(mut self, proto_path: impl AsRef<str>) -> Self {
self.proto_path = proto_path.as_ref().to_string();
self
}
/// Compile the .proto files and execute code generation.
pub fn compile<P>(self, protos: &[P], includes: &[P]) -> io::Result<()>
where
P: AsRef<Path>,
{
self.compile_with_config(Config::new(), protos, includes)
}
/// Compile the .proto files and execute code generation using a
/// custom `prost_build::Config`.
pub fn compile_with_config<P>(
self,
mut config: Config,
protos: &[P],
includes: &[P],
) -> io::Result<()>
where
P: AsRef<Path>,
{
let out_dir = if let Some(out_dir) = self.out_dir.as_ref() {
out_dir.clone()
} else {
PathBuf::from(std::env::var("OUT_DIR").unwrap())
};
#[cfg(feature = "rustfmt")]
let format = self.format;
config.out_dir(out_dir.clone());
for (proto_path, rust_path) in self.extern_path.iter() {
config.extern_path(proto_path, rust_path);
}
for (prost_path, attr) in self.field_attributes.iter() {
config.field_attribute(prost_path, attr);
}
for (prost_path, attr) in self.type_attributes.iter() {
config.type_attribute(prost_path, attr);
}
config.service_generator(Box::new(ServiceGenerator::new(self)));
config.compile_protos(protos, includes)?;
#[cfg(feature = "rustfmt")]
{
if format {
super::fmt(out_dir.to_str().expect("Expected utf8 out_dir"));
}
}
Ok(())
}
}
| 29.306709 | 99 | 0.585632 |
d753f422cbdcc047951a1e941e9ae38271ef6738
| 3,908 |
use crate::{
prelude::Requester,
requests::HasPayload,
types::{ChatId, InputFile, ParseMode, *},
};
/// Default parse mode adaptor, see
/// [`RequesterExt::parse_mode`](crate::requests::RequesterExt::parse_mode).
pub struct DefaultParseMode<B> {
bot: B,
mode: ParseMode,
}
impl<B> DefaultParseMode<B> {
/// Creates new [`DefaultParseMode`].
///
/// Note: it's recommended to use [`RequesterExt::parse_mode`] instead.
///
/// [`RequesterExt::parse_mode`]: crate::requests::RequesterExt::parse_mode
pub fn new(bot: B, parse_mode: ParseMode) -> Self {
Self {
bot,
mode: parse_mode,
}
}
/// Allows to access the inner bot.
pub fn inner(&self) -> &B {
&self.bot
}
/// Unwraps the inner bot.
pub fn into_inner(self) -> B {
self.bot
}
}
macro_rules! f {
($m:ident $this:ident ($($arg:ident : $T:ty),*)) => {
{
let mut req = $this.inner().$m($($arg),*);
req.payload_mut().parse_mode = Some($this.mode);
req
}
};
}
macro_rules! fty {
($T:ident) => {
B::$T
};
}
macro_rules! fid {
($m:ident $this:ident ($($arg:ident : $T:ty),*)) => {
$this.inner().$m($($arg),*)
};
}
impl<B: Requester> Requester for DefaultParseMode<B> {
type Err = B::Err;
requester_forward! {
send_message,
send_photo,
send_video,
send_audio,
send_document,
send_animation,
send_voice,
edit_message_text,
edit_message_text_inline,
edit_message_caption,
edit_message_caption_inline => f, fty
}
type SendPoll = B::SendPoll;
fn send_poll<C, Q, O>(
&self,
chat_id: C,
question: Q,
options: O,
type_: PollType,
) -> Self::SendPoll
where
C: Into<ChatId>,
Q: Into<String>,
O: IntoIterator<Item = String>,
{
let mut req = self.inner().send_poll(chat_id, question, options, type_);
req.payload_mut().explanation_parse_mode = Some(self.mode);
req
}
requester_forward! {
get_me, log_out, close, get_updates, set_webhook, delete_webhook, get_webhook_info,
forward_message, copy_message, send_video_note, send_media_group, send_location,
edit_message_live_location, edit_message_live_location_inline,
stop_message_live_location, stop_message_live_location_inline, send_venue,
send_contact, send_dice, send_chat_action, get_user_profile_photos,
get_file, kick_chat_member, unban_chat_member, restrict_chat_member,
promote_chat_member, set_chat_administrator_custom_title, set_chat_permissions,
export_chat_invite_link, create_chat_invite_link, edit_chat_invite_link,
revoke_chat_invite_link, set_chat_photo, delete_chat_photo, set_chat_title,
set_chat_description, pin_chat_message, unpin_chat_message, unpin_all_chat_messages,
leave_chat, get_chat, get_chat_administrators, get_chat_members_count,get_chat_member,
set_chat_sticker_set, delete_chat_sticker_set, answer_callback_query,
set_my_commands, get_my_commands, answer_inline_query,
edit_message_media, edit_message_media_inline, edit_message_reply_markup,
edit_message_reply_markup_inline, stop_poll, delete_message, send_sticker,
get_sticker_set, upload_sticker_file, create_new_sticker_set,
add_sticker_to_set, set_sticker_position_in_set, delete_sticker_from_set,
set_sticker_set_thumb, send_invoice, answer_shipping_query,
answer_pre_checkout_query, set_passport_data_errors, send_game,
set_game_score, set_game_score_inline, get_game_high_scores,
get_updates_fault_tolerant => fid, fty
}
}
download_forward! {
'w
B
DefaultParseMode<B>
{ this => this.inner() }
}
| 30.771654 | 94 | 0.653019 |
01d38d5ca01528e673eaf6671fbe1de322031ead
| 7,173 |
use crate::session::SessionContext;
use crate::types::{Message, ProviderId};
use crate::write_response;
use anyhow::{anyhow, Context, Result};
use log::{debug, error};
use pattern::*;
use serde_json::json;
use std::path::Path;
use std::path::PathBuf;
#[inline]
pub fn as_absolute_path<P: AsRef<Path>>(path: P) -> Result<String> {
std::fs::canonicalize(path.as_ref())?
.into_os_string()
.into_string()
.map_err(|e| anyhow!("{:?}, path:{}", e, path.as_ref().display()))
}
/// Preview environment on Vim CursorMoved event.
#[derive(Debug, Clone)]
#[allow(dead_code)]
pub enum OnMove {
Files(PathBuf),
Filer(PathBuf),
History(PathBuf),
Grep { path: PathBuf, lnum: usize },
BLines { path: PathBuf, lnum: usize },
ProjTags { path: PathBuf, lnum: usize },
BufferTags { path: PathBuf, lnum: usize },
}
/// Build the absolute path using cwd and relative path.
pub fn build_abs_path(cwd: &str, curline: String) -> PathBuf {
let mut path: PathBuf = cwd.into();
path.push(&curline);
path
}
impl OnMove {
pub fn new(curline: String, context: &SessionContext) -> Result<Self> {
let context = match context.provider_id.as_str() {
"files" | "git_files" => Self::Files(build_abs_path(&context.cwd, curline)),
"history" => {
if curline.starts_with('~') {
// I know std::env::home_dir() is incorrect in some rare cases[1], but dirs crate has been archived.
//
// [1] https://www.reddit.com/r/rust/comments/ga7f56/why_dirs_and_directories_repositories_have_been/fsjbsac/
#[allow(deprecated)]
let mut path = std::env::home_dir().context("failed to get home_dir")?;
path.push(&curline[2..]);
Self::History(path)
} else {
Self::History(build_abs_path(&context.cwd, curline))
}
}
"filer" => unreachable!("filer has been handled ahead"),
"proj_tags" => {
let (lnum, p) =
extract_proj_tags(&curline).context("Couldn't extract proj tags")?;
let mut path: PathBuf = context.cwd.clone().into();
path.push(&p);
Self::ProjTags { path, lnum }
}
"grep" | "grep2" => {
let (fpath, lnum, _col) =
extract_grep_position(&curline).context("Couldn't extract grep position")?;
let mut path: PathBuf = context.cwd.clone().into();
path.push(&fpath);
Self::Grep { path, lnum }
}
"blines" => {
let lnum = extract_blines_lnum(&curline).context("Couldn't extract buffer lnum")?;
let path = context.start_buffer_path.clone().into();
Self::BLines { path, lnum }
}
"tags" => {
let lnum =
extract_buf_tags_lnum(&curline).context("Couldn't extract buffer tags")?;
let path = context.start_buffer_path.clone().into();
Self::BufferTags { path, lnum }
}
_ => {
return Err(anyhow!(
"Couldn't constructs a OnMove instance, context: {:?}",
context
))
}
};
Ok(context)
}
}
pub struct OnMoveHandler {
pub msg_id: u64,
pub provider_id: ProviderId,
pub size: usize,
pub inner: OnMove,
}
impl OnMoveHandler {
pub fn try_new(msg: Message, context: &SessionContext) -> anyhow::Result<Self> {
let msg_id = msg.id;
let provider_id = context.provider_id.clone();
let curline = msg.get_curline(&provider_id)?;
if provider_id.as_str() == "filer" {
let path = build_abs_path(&msg.get_cwd(), curline);
return Ok(Self {
msg_id,
size: provider_id.get_preview_size(),
provider_id,
inner: OnMove::Filer(path),
});
}
Ok(Self {
msg_id,
size: provider_id.get_preview_size(),
provider_id,
inner: OnMove::new(curline, context)?,
})
}
pub fn handle(&self) -> Result<()> {
use OnMove::*;
match &self.inner {
BLines { path, lnum }
| Grep { path, lnum }
| ProjTags { path, lnum }
| BufferTags { path, lnum } => {
debug!("path:{}, lnum:{}", path.display(), lnum);
self.preview_file_at(&path, *lnum);
}
Filer(path) if path.is_dir() => {
self.preview_directory(&path)?;
}
Files(path) | Filer(path) | History(path) => {
self.preview_file(&path)?;
}
}
Ok(())
}
fn send_response(&self, result: serde_json::value::Value) {
let provider_id: crate::types::ProviderId = self.provider_id.clone().into();
write_response(json!({
"id": self.msg_id,
"provider_id": provider_id,
"result": result
}));
}
fn preview_file_at<P: AsRef<Path>>(&self, path: P, lnum: usize) {
match utility::read_preview_lines(path.as_ref(), lnum, self.size) {
Ok((lines_iter, hi_lnum)) => {
let fname = format!("{}", path.as_ref().display());
let lines = std::iter::once(fname.clone())
.chain(lines_iter)
.collect::<Vec<_>>();
debug!(
"sending msg_id:{}, provider_id:{}",
self.msg_id, self.provider_id
);
self.send_response(json!({
"event": "on_move",
"lines": lines,
"fname": fname,
"hi_lnum": hi_lnum
}));
}
Err(err) => {
error!(
"[{}]Couldn't read first lines of {}, error: {:?}",
self.provider_id,
path.as_ref().display(),
err
);
}
}
}
fn preview_file<P: AsRef<Path>>(&self, path: P) -> Result<()> {
let abs_path = as_absolute_path(path.as_ref())?;
let lines_iter = utility::read_first_lines(path.as_ref(), 2 * self.size)?;
let lines = std::iter::once(abs_path.clone())
.chain(lines_iter)
.collect::<Vec<_>>();
self.send_response(json!({
"event": "on_move",
"lines": lines,
"fname": abs_path
}));
Ok(())
}
fn preview_directory<P: AsRef<Path>>(&self, path: P) -> Result<()> {
let enable_icon = crate::env::global().enable_icon;
let lines = crate::filer::read_dir_entries(&path, enable_icon, Some(2 * self.size))?;
self.send_response(json!({
"event": "on_move",
"lines": lines,
"is_dir": true
}));
Ok(())
}
}
| 34.652174 | 129 | 0.497142 |
223a2fddf52886544f2adadb489c481b240661af
| 11,235 |
use crate::{
ast::{Id, ParameterizedValue},
connector::queryable::{ToColumnNames, ToRow},
};
#[cfg(feature = "chrono-0_4")]
use chrono::{DateTime, NaiveDateTime, Utc};
use postgres::{
types::{FromSql, ToSql, Type as PostgresType},
Statement as PostgresStatement,
};
use rust_decimal::Decimal;
use tokio_postgres::Row as PostgresRow;
#[cfg(feature = "uuid-0_7")]
use uuid::Uuid;
pub fn conv_params<'a>(
params: &'a [ParameterizedValue<'a>],
) -> Vec<&'a dyn tokio_postgres::types::ToSql> {
params.iter().map(|x| x as &dyn ToSql).collect::<Vec<_>>()
}
#[cfg(feature = "uuid-0_7")]
fn accepts(ty: &PostgresType) -> bool {
<Uuid as FromSql>::accepts(ty)
|| <&str as FromSql>::accepts(ty)
|| <i16 as FromSql>::accepts(ty)
|| <i32 as FromSql>::accepts(ty)
|| <i64 as FromSql>::accepts(ty)
}
#[cfg(not(feature = "uuid-0_7"))]
fn accepts(ty: &PostgresType) -> bool {
<&str as FromSql>::accepts(ty)
|| <i16 as FromSql>::accepts(ty)
|| <i32 as FromSql>::accepts(ty)
|| <i64 as FromSql>::accepts(ty)
}
impl<'a> FromSql<'a> for Id {
fn from_sql(
ty: &PostgresType,
raw: &'a [u8],
) -> Result<Id, Box<dyn std::error::Error + Sync + Send>> {
let res = match *ty {
PostgresType::INT2 => Id::Int(i16::from_sql(ty, raw)? as usize),
PostgresType::INT4 => Id::Int(i32::from_sql(ty, raw)? as usize),
PostgresType::INT8 => Id::Int(i64::from_sql(ty, raw)? as usize),
#[cfg(feature = "uuid-0_7")]
PostgresType::UUID => Id::UUID(Uuid::from_sql(ty, raw)?),
_ => Id::String(String::from_sql(ty, raw)?),
};
Ok(res)
}
fn accepts(ty: &PostgresType) -> bool {
accepts(ty)
}
}
impl ToRow for PostgresRow {
fn to_result_row<'b>(&'b self) -> crate::Result<Vec<ParameterizedValue<'static>>> {
fn convert(row: &PostgresRow, i: usize) -> crate::Result<ParameterizedValue<'static>> {
let result = match *row.columns()[i].type_() {
PostgresType::BOOL => match row.try_get(i)? {
Some(val) => ParameterizedValue::Boolean(val),
None => ParameterizedValue::Null,
},
PostgresType::INT2 => match row.try_get(i)? {
Some(val) => {
let val: i16 = val;
ParameterizedValue::Integer(i64::from(val))
}
None => ParameterizedValue::Null,
},
PostgresType::INT4 => match row.try_get(i)? {
Some(val) => {
let val: i32 = val;
ParameterizedValue::Integer(i64::from(val))
}
None => ParameterizedValue::Null,
},
PostgresType::INT8 => match row.try_get(i)? {
Some(val) => {
let val: i64 = val;
ParameterizedValue::Integer(val)
}
None => ParameterizedValue::Null,
},
PostgresType::NUMERIC => match row.try_get(i)? {
Some(val) => {
let val: Decimal = val;
let val: f64 = val.to_string().parse().unwrap();
ParameterizedValue::Real(val)
}
None => ParameterizedValue::Null,
},
PostgresType::FLOAT4 => match row.try_get(i)? {
Some(val) => {
let val: f32 = val;
ParameterizedValue::Real(f64::from(val))
}
None => ParameterizedValue::Null,
},
PostgresType::FLOAT8 => match row.try_get(i)? {
Some(val) => {
let val: f64 = val;
ParameterizedValue::Real(val)
}
None => ParameterizedValue::Null,
},
#[cfg(feature = "chrono-0_4")]
PostgresType::TIMESTAMP => match row.try_get(i)? {
Some(val) => {
let ts: NaiveDateTime = val;
let dt = DateTime::<Utc>::from_utc(ts, Utc);
ParameterizedValue::DateTime(dt)
}
None => ParameterizedValue::Null,
},
#[cfg(feature = "uuid-0_7")]
PostgresType::UUID => match row.try_get(i)? {
Some(val) => {
let val: Uuid = val;
ParameterizedValue::Uuid(val)
}
None => ParameterizedValue::Null,
},
#[cfg(feature = "array")]
PostgresType::INT2_ARRAY => match row.try_get(i)? {
Some(val) => {
let val: Vec<i16> = val;
ParameterizedValue::Array(
val.into_iter()
.map(|x| ParameterizedValue::Integer(i64::from(x)))
.collect(),
)
}
None => ParameterizedValue::Null,
},
#[cfg(feature = "array")]
PostgresType::INT4_ARRAY => match row.try_get(i)? {
Some(val) => {
let val: Vec<i32> = val;
ParameterizedValue::Array(
val.into_iter()
.map(|x| ParameterizedValue::Integer(i64::from(x)))
.collect(),
)
}
None => ParameterizedValue::Null,
},
#[cfg(feature = "array")]
PostgresType::INT8_ARRAY => match row.try_get(i)? {
Some(val) => {
let val: Vec<i64> = val;
ParameterizedValue::Array(
val.into_iter()
.map(|x| ParameterizedValue::Integer(x as i64))
.collect(),
)
}
None => ParameterizedValue::Null,
},
#[cfg(feature = "array")]
PostgresType::FLOAT4_ARRAY => match row.try_get(i)? {
Some(val) => {
let val: Vec<f32> = val;
ParameterizedValue::Array(
val.into_iter()
.map(|x| ParameterizedValue::Real(f64::from(x)))
.collect(),
)
}
None => ParameterizedValue::Null,
},
#[cfg(feature = "array")]
PostgresType::FLOAT8_ARRAY => match row.try_get(i)? {
Some(val) => {
let val: Vec<f64> = val;
ParameterizedValue::Array(
val.into_iter()
.map(|x| ParameterizedValue::Real(x as f64))
.collect(),
)
}
None => ParameterizedValue::Null,
},
#[cfg(feature = "array")]
PostgresType::BOOL_ARRAY => match row.try_get(i)? {
Some(val) => {
let val: Vec<bool> = val;
ParameterizedValue::Array(
val.into_iter().map(ParameterizedValue::Boolean).collect(),
)
}
None => ParameterizedValue::Null,
},
#[cfg(all(feature = "array", feature = "chrono-0_4"))]
PostgresType::TIMESTAMP_ARRAY => match row.try_get(i)? {
Some(val) => {
let val: Vec<NaiveDateTime> = val;
ParameterizedValue::Array(
val.into_iter()
.map(|x| {
ParameterizedValue::DateTime(DateTime::<Utc>::from_utc(x, Utc))
})
.collect(),
)
}
None => ParameterizedValue::Null,
},
#[cfg(feature = "array")]
PostgresType::NUMERIC_ARRAY => match row.try_get(i)? {
Some(val) => {
let val: Vec<Decimal> = val;
ParameterizedValue::Array(
val.into_iter()
.map(|x| ParameterizedValue::Real(x.to_string().parse().unwrap()))
.collect(),
)
}
None => ParameterizedValue::Null,
},
#[cfg(feature = "array")]
PostgresType::TEXT_ARRAY
| PostgresType::NAME_ARRAY
| PostgresType::VARCHAR_ARRAY => match row.try_get(i)? {
Some(val) => {
let val: Vec<&str> = val;
ParameterizedValue::Array(
val.into_iter()
.map(|x| ParameterizedValue::Text(String::from(x).into()))
.collect(),
)
}
None => ParameterizedValue::Null,
},
PostgresType::OID => match row.try_get(i)? {
Some(val) => {
let val: u32 = val;
ParameterizedValue::Integer(i64::from(val))
}
None => ParameterizedValue::Null,
},
PostgresType::CHAR => match row.try_get(i)? {
Some(val) => {
let val: i8 = val;
ParameterizedValue::Char((val as u8) as char)
}
None => ParameterizedValue::Null,
},
_ => match row.try_get(i)? {
Some(val) => {
let val: String = val;
ParameterizedValue::Text(val.into())
}
None => ParameterizedValue::Null,
},
};
Ok(result)
}
let mut row = Vec::new();
for i in 0..self.columns().len() {
row.push(convert(self, i)?);
}
Ok(row)
}
}
impl ToColumnNames for PostgresStatement {
fn to_column_names(&self) -> Vec<String> {
let mut names = Vec::new();
for column in self.columns() {
names.push(String::from(column.name()));
}
names
}
}
| 39.146341 | 99 | 0.401335 |
186826abcec3996a97aaf502a04195269000fac5
| 21,437 |
// Copyright 2020 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT
use super::{policy::*, types::*, DealProposal, DealState, DEAL_UPDATES_INTERVAL};
use crate::{make_map_with_root, ActorDowncast, BalanceTable, DealID, Map, SetMultimap};
use address::Address;
use cid::Cid;
use clock::{ChainEpoch, EPOCH_UNDEFINED};
use encoding::tuple::*;
use encoding::Cbor;
use ipld_blockstore::BlockStore;
use num_bigint::{bigint_ser, Sign};
use num_traits::Zero;
use std::error::Error as StdError;
use vm::{actor_error, ActorError, ExitCode, TokenAmount};
/// Market actor state
#[derive(Default, Serialize_tuple, Deserialize_tuple)]
pub struct State {
/// Amt<DealID, DealProposal>
pub proposals: Cid,
/// Amt<DealID, DealState>
pub states: Cid,
/// PendingProposals tracks dealProposals that have not yet reached their deal start date.
/// We track them here to ensure that miners can't publish the same deal proposal twice
pub pending_proposals: Cid,
/// Total amount held in escrow, indexed by actor address (including both locked and unlocked amounts).
pub escrow_table: Cid,
/// Amount locked, indexed by actor address.
/// Note: the amounts in this table do not affect the overall amount in escrow:
/// only the _portion_ of the total escrow amount that is locked.
pub locked_table: Cid,
/// Deal id state sequential incrementer
pub next_id: DealID,
/// Metadata cached for efficient iteration over deals.
/// SetMultimap<Address>
pub deal_ops_by_epoch: Cid,
pub last_cron: ChainEpoch,
/// Total Client Collateral that is locked -> unlocked when deal is terminated
#[serde(with = "bigint_ser")]
pub total_client_locked_colateral: TokenAmount,
/// Total Provider Collateral that is locked -> unlocked when deal is terminated
#[serde(with = "bigint_ser")]
pub total_provider_locked_colateral: TokenAmount,
/// Total storage fee that is locked in escrow -> unlocked when payments are made
#[serde(with = "bigint_ser")]
pub total_client_storage_fee: TokenAmount,
}
impl State {
pub fn new(empty_arr: Cid, empty_map: Cid, empty_mset: Cid) -> Self {
Self {
proposals: empty_arr,
states: empty_arr,
pending_proposals: empty_map,
escrow_table: empty_map,
locked_table: empty_map,
next_id: 0,
deal_ops_by_epoch: empty_mset,
last_cron: EPOCH_UNDEFINED,
total_client_locked_colateral: TokenAmount::default(),
total_provider_locked_colateral: TokenAmount::default(),
total_client_storage_fee: TokenAmount::default(),
}
}
pub fn total_locked(&self) -> TokenAmount {
&self.total_client_locked_colateral
+ &self.total_provider_locked_colateral
+ &self.total_client_storage_fee
}
pub(super) fn mutator<'bs, BS: BlockStore>(
&mut self,
store: &'bs BS,
) -> MarketStateMutation<'bs, '_, BS> {
MarketStateMutation::new(self, store)
}
}
fn deal_get_payment_remaining(deal: &DealProposal, mut slash_epoch: ChainEpoch) -> TokenAmount {
assert!(
slash_epoch <= deal.end_epoch,
"Current epoch must be before the end epoch of the deal"
);
// Payments are always for start -> end epoch irrespective of when the deal is slashed.
slash_epoch = std::cmp::max(slash_epoch, deal.start_epoch);
let duration_remaining = deal.end_epoch - slash_epoch;
assert!(duration_remaining >= 0);
&deal.storage_price_per_epoch * duration_remaining as u64
}
impl Cbor for State {}
#[derive(Debug, PartialEq)]
pub(super) enum Permission {
Invalid,
ReadOnly,
Write,
}
pub(super) enum Reason {
ClientCollateral,
ClientStorageFee,
ProviderCollateral,
}
pub(super) struct MarketStateMutation<'bs, 's, BS> {
pub(super) st: &'s mut State,
pub(super) store: &'bs BS,
pub(super) proposal_permit: Permission,
pub(super) deal_proposals: Option<DealArray<'bs, BS>>,
pub(super) state_permit: Permission,
pub(super) deal_states: Option<DealMetaArray<'bs, BS>>,
pub(super) escrow_permit: Permission,
pub(super) escrow_table: Option<BalanceTable<'bs, BS>>,
pub(super) pending_permit: Permission,
pub(super) pending_deals: Option<Map<'bs, BS, DealProposal>>,
pub(super) dpe_permit: Permission,
pub(super) deals_by_epoch: Option<SetMultimap<'bs, BS>>,
pub(super) locked_permit: Permission,
pub(super) locked_table: Option<BalanceTable<'bs, BS>>,
pub(super) total_client_locked_colateral: Option<TokenAmount>,
pub(super) total_provider_locked_colateral: Option<TokenAmount>,
pub(super) total_client_storage_fee: Option<TokenAmount>,
pub(super) next_deal_id: DealID,
}
impl<'bs, 's, BS> MarketStateMutation<'bs, 's, BS>
where
BS: BlockStore,
{
pub(super) fn new(st: &'s mut State, store: &'bs BS) -> Self {
Self {
next_deal_id: st.next_id,
st,
store,
proposal_permit: Permission::Invalid,
deal_proposals: None,
state_permit: Permission::Invalid,
deal_states: None,
escrow_permit: Permission::Invalid,
escrow_table: None,
pending_permit: Permission::Invalid,
pending_deals: None,
dpe_permit: Permission::Invalid,
deals_by_epoch: None,
locked_permit: Permission::Invalid,
locked_table: None,
total_client_locked_colateral: None,
total_provider_locked_colateral: None,
total_client_storage_fee: None,
}
}
pub(super) fn build(&mut self) -> Result<&mut Self, Box<dyn StdError>> {
if self.proposal_permit != Permission::Invalid {
self.deal_proposals = Some(DealArray::load(&self.st.proposals, self.store)?);
}
if self.state_permit != Permission::Invalid {
self.deal_states = Some(DealMetaArray::load(&self.st.states, self.store)?);
}
if self.locked_permit != Permission::Invalid {
self.locked_table = Some(BalanceTable::from_root(self.store, &self.st.locked_table)?);
self.total_client_locked_colateral =
Some(self.st.total_client_locked_colateral.clone());
self.total_client_storage_fee = Some(self.st.total_client_storage_fee.clone());
self.total_provider_locked_colateral =
Some(self.st.total_provider_locked_colateral.clone());
}
if self.escrow_permit != Permission::Invalid {
self.escrow_table = Some(BalanceTable::from_root(self.store, &self.st.escrow_table)?);
}
if self.pending_permit != Permission::Invalid {
self.pending_deals = Some(make_map_with_root(&self.st.pending_proposals, self.store)?);
}
if self.dpe_permit != Permission::Invalid {
self.deals_by_epoch = Some(SetMultimap::from_root(
self.store,
&self.st.deal_ops_by_epoch,
)?);
}
self.next_deal_id = self.st.next_id;
Ok(self)
}
pub(super) fn with_deal_proposals(&mut self, permit: Permission) -> &mut Self {
self.proposal_permit = permit;
self
}
pub(super) fn with_deal_states(&mut self, permit: Permission) -> &mut Self {
self.state_permit = permit;
self
}
pub(super) fn with_escrow_table(&mut self, permit: Permission) -> &mut Self {
self.escrow_permit = permit;
self
}
pub(super) fn with_locked_table(&mut self, permit: Permission) -> &mut Self {
self.locked_permit = permit;
self
}
pub(super) fn with_pending_proposals(&mut self, permit: Permission) -> &mut Self {
self.pending_permit = permit;
self
}
pub(super) fn with_deals_by_epoch(&mut self, permit: Permission) -> &mut Self {
self.dpe_permit = permit;
self
}
pub(super) fn commit_state(&mut self) -> Result<(), Box<dyn StdError>> {
if self.proposal_permit == Permission::Write {
if let Some(s) = &mut self.deal_proposals {
self.st.proposals = s
.flush()
.map_err(|e| e.downcast_wrap("failed to flush deal proposals"))?;
}
}
if self.state_permit == Permission::Write {
if let Some(s) = &mut self.deal_states {
self.st.states = s
.flush()
.map_err(|e| e.downcast_wrap("failed to flush deal states"))?;
}
}
if self.locked_permit == Permission::Write {
if let Some(s) = &mut self.locked_table {
self.st.locked_table = s
.root()
.map_err(|e| e.downcast_wrap("failed to flush locked table"))?;
}
if let Some(s) = &mut self.total_client_locked_colateral {
self.st.total_client_locked_colateral = s.clone();
}
if let Some(s) = &mut self.total_provider_locked_colateral {
self.st.total_provider_locked_colateral = s.clone();
}
if let Some(s) = &mut self.total_client_storage_fee {
self.st.total_client_storage_fee = s.clone();
}
}
if self.escrow_permit == Permission::Write {
if let Some(s) = &mut self.escrow_table {
self.st.escrow_table = s
.root()
.map_err(|e| e.downcast_wrap("failed to flush escrow table"))?;
}
}
if self.pending_permit == Permission::Write {
if let Some(s) = &mut self.pending_deals {
self.st.pending_proposals = s
.flush()
.map_err(|e| e.downcast_wrap("failed to flush escrow table"))?;
}
}
if self.dpe_permit == Permission::Write {
if let Some(s) = &mut self.deals_by_epoch {
self.st.deal_ops_by_epoch = s
.root()
.map_err(|e| e.downcast_wrap("failed to flush escrow table"))?;
}
}
self.st.next_id = self.next_deal_id;
Ok(())
}
////////////////////////////////////////////////////////////////////////////////
// Deal state operations
////////////////////////////////////////////////////////////////////////////////
#[allow(clippy::too_many_arguments)]
pub(super) fn update_pending_deal_state(
&mut self,
state: &DealState,
deal: &DealProposal,
epoch: ChainEpoch,
) -> Result<(TokenAmount, ChainEpoch, bool), ActorError> {
let ever_updated = state.last_updated_epoch != EPOCH_UNDEFINED;
let ever_slashed = state.slash_epoch != EPOCH_UNDEFINED;
// if the deal was ever updated, make sure it didn't happen in the future
assert!(!ever_updated || state.last_updated_epoch <= epoch);
// This would be the case that the first callback somehow triggers before it is scheduled to
// This is expected not to be able to happen
if deal.start_epoch > epoch {
return Ok((TokenAmount::zero(), EPOCH_UNDEFINED, false));
}
let payment_end_epoch = if ever_slashed {
assert!(
state.slash_epoch <= deal.end_epoch,
"Epoch slashed must be less or equal to the end epoch"
);
assert!(
epoch >= state.slash_epoch,
"Epoch slashed must be less or equal to the end epoch"
);
state.slash_epoch
} else {
deal.end_epoch
};
let payment_start_epoch = if ever_updated && state.last_updated_epoch > deal.start_epoch {
state.last_updated_epoch
} else {
deal.start_epoch
};
let elapsed_end = std::cmp::min(epoch, payment_end_epoch);
let num_epochs_elapsed = elapsed_end - payment_start_epoch;
let total_payment = &deal.storage_price_per_epoch * num_epochs_elapsed as u64;
if total_payment > 0.into() {
self.transfer_balance(&deal.client, &deal.provider, &total_payment)?;
}
if ever_slashed {
// unlock client collateral and locked storage fee
let payment_remaining = deal_get_payment_remaining(&deal, state.slash_epoch);
// Unlock remaining storage fee
self.unlock_balance(&deal.client, &payment_remaining, Reason::ClientStorageFee)
.map_err(|e| {
e.downcast_default(
ExitCode::ErrIllegalState,
"failed to unlock remaining client storage fee",
)
})?;
// Unlock client collateral
self.unlock_balance(
&deal.client,
&deal.client_collateral,
Reason::ClientCollateral,
)
.map_err(|e| {
e.downcast_default(
ExitCode::ErrIllegalState,
"failed to unlock client collateral",
)
})?;
// slash provider collateral
let slashed = deal.provider_collateral.clone();
self.slash_balance(&deal.provider, &slashed, Reason::ProviderCollateral)
.map_err(|e| e.downcast_default(ExitCode::ErrIllegalState, "slashing balance"))?;
return Ok((slashed, EPOCH_UNDEFINED, true));
}
if epoch >= deal.end_epoch {
self.process_deal_expired(&deal, state)?;
return Ok((TokenAmount::zero(), EPOCH_UNDEFINED, true));
}
// We're explicitly not inspecting the end epoch and may process a deal's expiration late,
// in order to prevent an outsider from loading a cron tick by activating too many deals
// with the same end epoch.
let next = epoch + DEAL_UPDATES_INTERVAL;
Ok((TokenAmount::zero(), next, false))
}
/// Deal start deadline elapsed without appearing in a proven sector.
/// Slash a portion of provider's collateral, and unlock remaining collaterals
/// for both provider and client.
pub(super) fn process_deal_init_timed_out(
&mut self,
deal: &DealProposal,
) -> Result<TokenAmount, ActorError> {
self.unlock_balance(
&deal.client,
&deal.total_storage_fee(),
Reason::ClientStorageFee,
)
.map_err(|e| {
e.downcast_default(
ExitCode::ErrIllegalState,
"failure unlocking client storage fee",
)
})?;
self.unlock_balance(
&deal.client,
&deal.client_collateral,
Reason::ClientCollateral,
)
.map_err(|e| {
e.downcast_default(
ExitCode::ErrIllegalState,
"failure unlocking client collateral",
)
})?;
let amount_slashed =
collateral_penalty_for_deal_activation_missed(deal.provider_collateral.clone());
let amount_remaining = deal.provider_balance_requirement() - &amount_slashed;
self.slash_balance(&deal.provider, &amount_slashed, Reason::ProviderCollateral)
.map_err(|e| {
e.downcast_default(ExitCode::ErrIllegalState, "failed to slash balance")
})?;
self.unlock_balance(
&deal.provider,
&amount_remaining,
Reason::ProviderCollateral,
)
.map_err(|e| {
e.downcast_default(
ExitCode::ErrIllegalState,
"failed to unlock deal provider balance",
)
})?;
Ok(amount_slashed)
}
/// Normal expiration. Unlock collaterals for both miner and client.
fn process_deal_expired(
&mut self,
deal: &DealProposal,
state: &DealState,
) -> Result<(), ActorError>
where
BS: BlockStore,
{
assert_ne!(
state.sector_start_epoch, EPOCH_UNDEFINED,
"Sector start epoch must be initialized at this point"
);
self.unlock_balance(
&deal.provider,
&deal.provider_collateral,
Reason::ProviderCollateral,
)
.map_err(|e| {
e.downcast_default(
ExitCode::ErrIllegalArgument,
"failed unlocking deal provider balance",
)
})?;
self.unlock_balance(
&deal.client,
&deal.client_collateral,
Reason::ClientCollateral,
)
.map_err(|e| {
e.downcast_default(
ExitCode::ErrIllegalArgument,
"failed unlocking deal client balance",
)
})?;
Ok(())
}
pub(super) fn generate_storage_deal_id(&mut self) -> DealID {
let ret = self.next_deal_id;
self.next_deal_id += 1;
ret
}
pub(super) fn maybe_lock_balance(
&mut self,
addr: &Address,
amount: &TokenAmount,
) -> Result<(), ActorError> {
assert_ne!(amount.sign(), Sign::Minus);
let prev_locked = self.locked_table.as_ref().unwrap().get(addr).map_err(|e| {
e.downcast_default(ExitCode::ErrIllegalState, "failed to get locked balance")
})?;
let escrow_balance = self.escrow_table.as_ref().unwrap().get(addr).map_err(|e| {
e.downcast_default(ExitCode::ErrIllegalState, "failed to get escrow balance")
})?;
if &prev_locked + amount > escrow_balance {
return Err(actor_error!(ErrInsufficientFunds;
"not enough balance to lock for addr{}: \
escrow balance {} < prev locked {} + amount {}",
addr, escrow_balance, prev_locked, amount));
}
self.locked_table
.as_mut()
.unwrap()
.add(addr, amount)
.map_err(|e| {
e.downcast_default(ExitCode::ErrIllegalState, "failed to add locked balance")
})?;
Ok(())
}
pub(super) fn lock_client_and_provider_balances(
&mut self,
proposal: &DealProposal,
) -> Result<(), ActorError> {
self.maybe_lock_balance(&proposal.client, &proposal.client_balance_requirement())
.map_err(|e| e.wrap("failed to lock client funds"))?;
self.maybe_lock_balance(&proposal.provider, &proposal.provider_collateral)
.map_err(|e| e.wrap("failed to lock provider funds"))?;
if let Some(v) = self.total_client_locked_colateral.as_mut() {
*v += &proposal.client_collateral;
}
if let Some(v) = self.total_client_storage_fee.as_mut() {
*v += proposal.total_storage_fee();
}
if let Some(v) = self.total_provider_locked_colateral.as_mut() {
*v += &proposal.provider_collateral;
}
Ok(())
}
fn unlock_balance(
&mut self,
addr: &Address,
amount: &TokenAmount,
lock_reason: Reason,
) -> Result<(), Box<dyn StdError>> {
assert_ne!(amount.sign(), Sign::Minus);
self.locked_table
.as_mut()
.unwrap()
.must_subtract(addr, amount)?;
match lock_reason {
Reason::ClientCollateral => self.total_client_locked_colateral.as_mut().map(|v| {
*v -= amount;
}),
Reason::ClientStorageFee => self.total_client_storage_fee.as_mut().map(|v| {
*v -= amount;
}),
Reason::ProviderCollateral => self.total_provider_locked_colateral.as_mut().map(|v| {
*v -= amount;
}),
};
Ok(())
}
/// move funds from locked in client to available in provider
fn transfer_balance(
&mut self,
from_addr: &Address,
to_addr: &Address,
amount: &TokenAmount,
) -> Result<(), ActorError> {
assert!(amount >= &TokenAmount::from(0));
// Subtract from locked and escrow tables
self.escrow_table
.as_mut()
.unwrap()
.must_subtract(from_addr, &amount)
.map_err(|e| e.downcast_default(ExitCode::ErrIllegalState, "subtract from escrow"))?;
self.unlock_balance(from_addr, &amount, Reason::ClientStorageFee)
.map_err(|e| e.downcast_default(ExitCode::ErrIllegalState, "subtract from locked"))?;
// Add subtracted amount to the recipient
self.escrow_table
.as_mut()
.unwrap()
.add(to_addr, &amount)
.map_err(|e| e.downcast_default(ExitCode::ErrIllegalState, "add to escrow"))?;
Ok(())
}
fn slash_balance(
&mut self,
addr: &Address,
amount: &TokenAmount,
lock_reason: Reason,
) -> Result<(), Box<dyn StdError>> {
assert!(amount >= &TokenAmount::from(0));
// Subtract from locked and escrow tables
self.escrow_table
.as_mut()
.unwrap()
.must_subtract(addr, &amount)?;
self.unlock_balance(addr, amount, lock_reason)
}
}
| 34.026984 | 107 | 0.583944 |
fbc13ba695b8ba39faf6e933e2b45403ecccbd3b
| 3,457 |
#[doc = "Register `OUT_PRI_CH1` reader"]
pub struct R(crate::R<OUT_PRI_CH1_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<OUT_PRI_CH1_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<OUT_PRI_CH1_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<OUT_PRI_CH1_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `OUT_PRI_CH1` writer"]
pub struct W(crate::W<OUT_PRI_CH1_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<OUT_PRI_CH1_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<OUT_PRI_CH1_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<OUT_PRI_CH1_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `TX_PRI_CH1` reader - The priority of Tx channel 1. The larger of the value, the higher of the priority."]
pub struct TX_PRI_CH1_R(crate::FieldReader<u8, u8>);
impl TX_PRI_CH1_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
TX_PRI_CH1_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for TX_PRI_CH1_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `TX_PRI_CH1` writer - The priority of Tx channel 1. The larger of the value, the higher of the priority."]
pub struct TX_PRI_CH1_W<'a> {
w: &'a mut W,
}
impl<'a> TX_PRI_CH1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | (value as u32 & 0x0f);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - The priority of Tx channel 1. The larger of the value, the higher of the priority."]
#[inline(always)]
pub fn tx_pri_ch1(&self) -> TX_PRI_CH1_R {
TX_PRI_CH1_R::new((self.bits & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - The priority of Tx channel 1. The larger of the value, the higher of the priority."]
#[inline(always)]
pub fn tx_pri_ch1(&mut self) -> TX_PRI_CH1_W {
TX_PRI_CH1_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "DMA_OUT_PRI_CH1_REG.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [out_pri_ch1](index.html) module"]
pub struct OUT_PRI_CH1_SPEC;
impl crate::RegisterSpec for OUT_PRI_CH1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [out_pri_ch1::R](R) reader structure"]
impl crate::Readable for OUT_PRI_CH1_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [out_pri_ch1::W](W) writer structure"]
impl crate::Writable for OUT_PRI_CH1_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets OUT_PRI_CH1 to value 0"]
impl crate::Resettable for OUT_PRI_CH1_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 33.240385 | 412 | 0.627133 |
399949acac5cfbfb68f3ab5743352f322cd53337
| 16,012 |
use std::collections::{BTreeMap, BTreeSet};
use anyhow::Result;
use serde::{Deserialize, Serialize};
use abstio::MapName;
use abstutil::Timer;
use map_model::osm::RoadRank;
use map_model::{Block, Map, Perimeter, RoadID, RoadSideID};
use widgetry::Color;
use crate::{colors, App};
/// An opaque ID, won't be contiguous as we adjust boundaries
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
pub struct NeighborhoodID(usize);
/// Identifies a single / unmerged block, which never changes
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
pub struct BlockID(usize);
// Some states want this
impl widgetry::mapspace::ObjectID for NeighborhoodID {}
impl widgetry::mapspace::ObjectID for BlockID {}
#[derive(Clone, Serialize, Deserialize)]
pub struct Partitioning {
pub map: MapName,
neighborhoods: BTreeMap<NeighborhoodID, (Block, Color)>,
// The single / unmerged blocks never change
single_blocks: Vec<Block>,
neighborhood_id_counter: usize,
// Invariant: This is a surjection, every block belongs to exactly one neighborhood
block_to_neighborhood: BTreeMap<BlockID, NeighborhoodID>,
use_expensive_blockfinding: bool,
}
impl Partitioning {
/// Only valid before the LTN tool has been activated this session
pub fn empty() -> Partitioning {
Partitioning {
map: MapName::new("zz", "temp", "orary"),
neighborhoods: BTreeMap::new(),
single_blocks: Vec::new(),
neighborhood_id_counter: 0,
block_to_neighborhood: BTreeMap::new(),
use_expensive_blockfinding: false,
}
}
pub fn is_empty(&self) -> bool {
self.neighborhoods.is_empty()
}
pub fn seed_using_heuristics(app: &App, timer: &mut Timer) -> Partitioning {
// Try the easy thing first, but then give up
'METHOD: for use_expensive_blockfinding in [false, true] {
let map = &app.map;
timer.start("find single blocks");
let mut single_blocks = Vec::new();
let mut single_block_perims = Vec::new();
for mut perim in Perimeter::find_all_single_blocks(map) {
// TODO Some perimeters don't blockify after collapsing dead-ends. So do this
// upfront, and separately work on any blocks that don't show up.
// https://github.com/a-b-street/abstreet/issues/841
perim.collapse_deadends();
if let Ok(block) = perim.to_block(map) {
single_block_perims.push(block.perimeter.clone());
single_blocks.push(block);
}
}
timer.stop("find single blocks");
timer.start("partition");
let partitions = Perimeter::partition_by_predicate(single_block_perims, |r| {
// "Interior" roads of a neighborhood aren't classified as arterial
map.get_r(r).get_rank() == RoadRank::Local
});
let mut merged = Vec::new();
for perimeters in partitions {
// If we got more than one result back, merging partially failed. Oh well?
let stepwise_debug = false;
merged.extend(Perimeter::merge_all(
map,
perimeters,
stepwise_debug,
use_expensive_blockfinding,
));
}
timer.stop("partition");
timer.start_iter("blockify", merged.len());
let mut blocks = Vec::new();
for perimeter in merged {
timer.next();
match perimeter.to_block(map) {
Ok(block) => {
blocks.push(block);
}
Err(err) => {
warn!("Failed to make a block from a merged perimeter: {}", err);
}
}
}
let mut neighborhoods = BTreeMap::new();
for block in blocks {
neighborhoods.insert(NeighborhoodID(neighborhoods.len()), (block, Color::CLEAR));
}
let neighborhood_id_counter = neighborhoods.len();
let mut p = Partitioning {
map: map.get_name().clone(),
neighborhoods,
single_blocks,
neighborhood_id_counter,
block_to_neighborhood: BTreeMap::new(),
use_expensive_blockfinding,
};
// TODO We could probably build this up as we go
for id in p.all_block_ids() {
if let Some(neighborhood) = p.neighborhood_containing(id) {
p.block_to_neighborhood.insert(id, neighborhood);
} else {
if !use_expensive_blockfinding {
// Try the expensive check, then
error!(
"Block doesn't belong to any neighborhood? Retrying with expensive checks {:?}",
p.get_block(id).perimeter
);
continue 'METHOD;
}
// This will break everything downstream, so bail out immediately
panic!(
"Block doesn't belong to any neighborhood?! {:?}",
p.get_block(id).perimeter
);
}
}
p.recalculate_coloring();
return p;
}
unreachable!()
}
/// True if the coloring changed
pub fn recalculate_coloring(&mut self) -> bool {
let perims: Vec<Perimeter> = self
.neighborhoods
.values()
.map(|pair| pair.0.perimeter.clone())
.collect();
let colors = Perimeter::calculate_coloring(&perims, colors::NEIGHBORHOODS.len())
.unwrap_or_else(|| (0..perims.len()).collect());
let orig_coloring: Vec<Color> = self.neighborhoods.values().map(|pair| pair.1).collect();
for (pair, color_idx) in self.neighborhoods.values_mut().zip(colors.into_iter()) {
pair.1 = colors::NEIGHBORHOODS[color_idx % colors::NEIGHBORHOODS.len()];
}
let new_coloring: Vec<Color> = self.neighborhoods.values().map(|pair| pair.1).collect();
orig_coloring != new_coloring
}
// TODO Explain return value
pub fn transfer_block(
&mut self,
map: &Map,
id: BlockID,
old_owner: NeighborhoodID,
new_owner: NeighborhoodID,
) -> Result<Option<NeighborhoodID>> {
assert_ne!(old_owner, new_owner);
// Is the newly expanded neighborhood a valid perimeter?
let new_owner_blocks: Vec<BlockID> = self
.block_to_neighborhood
.iter()
.filter_map(|(block, neighborhood)| {
if *neighborhood == new_owner || *block == id {
Some(*block)
} else {
None
}
})
.collect();
let mut new_neighborhood_blocks = self.make_merged_blocks(map, new_owner_blocks)?;
if new_neighborhood_blocks.len() != 1 {
// This happens when a hole would be created by adding this block. There are probably
// some smaller blocks nearby to add first.
bail!("Couldn't add block -- you may need to add an intermediate block first to avoid a hole, or there's a bug you can't workaround yet");
}
let new_neighborhood_block = new_neighborhood_blocks.pop().unwrap();
// Is the old neighborhood, minus this block, still valid?
// TODO refactor Neighborhood to BlockIDs?
let old_owner_blocks: Vec<BlockID> = self
.block_to_neighborhood
.iter()
.filter_map(|(block, neighborhood)| {
if *neighborhood == old_owner && *block != id {
Some(*block)
} else {
None
}
})
.collect();
if old_owner_blocks.is_empty() {
// We're deleting the old neighborhood!
self.neighborhoods.get_mut(&new_owner).unwrap().0 = new_neighborhood_block;
self.neighborhoods.remove(&old_owner).unwrap();
self.block_to_neighborhood.insert(id, new_owner);
// Tell the caller to recreate this SelectBoundary state, switching to the neighborhood
// we just donated to, since the old is now gone
return Ok(Some(new_owner));
}
let mut old_neighborhood_blocks = self.make_merged_blocks(map, old_owner_blocks.clone())?;
// We might be splitting the old neighborhood into multiple pieces! Pick the largest piece
// as the old_owner (so the UI for trimming a neighborhood is less jarring), and create new
// neighborhoods for the others.
old_neighborhood_blocks.sort_by_key(|block| block.perimeter.interior.len());
self.neighborhoods.get_mut(&old_owner).unwrap().0 = old_neighborhood_blocks.pop().unwrap();
let new_splits = !old_neighborhood_blocks.is_empty();
for split_piece in old_neighborhood_blocks {
let new_neighborhood = NeighborhoodID(self.neighborhood_id_counter);
self.neighborhood_id_counter += 1;
// Temporary color
self.neighborhoods
.insert(new_neighborhood, (split_piece, Color::CLEAR));
}
if new_splits {
// We need to update the owner of all single blocks in these new pieces
for id in old_owner_blocks {
self.block_to_neighborhood
.insert(id, self.neighborhood_containing(id).unwrap());
}
}
self.neighborhoods.get_mut(&new_owner).unwrap().0 = new_neighborhood_block;
self.block_to_neighborhood.insert(id, new_owner);
Ok(None)
}
/// Needs to find an existing neighborhood to take the block, or make a new one
pub fn remove_block_from_neighborhood(
&mut self,
map: &Map,
id: BlockID,
old_owner: NeighborhoodID,
) -> Result<Option<NeighborhoodID>> {
// Find all RoadSideIDs in the block matching the current neighborhood perimeter. Look for
// the first one that borders another neighborhood, and transfer the block there.
// TODO This can get unintuitive -- if we remove a block bordering two other
// neighborhoods, which one should we donate to?
let current_perim_set: BTreeSet<RoadSideID> = self.neighborhoods[&old_owner]
.0
.perimeter
.roads
.iter()
.cloned()
.collect();
for road_side in &self.get_block(id).perimeter.roads {
if !current_perim_set.contains(road_side) {
continue;
}
// Is there another neighborhood that has the other side of this road on its perimeter?
// TODO We could map road -> BlockID then use block_to_neighborhood
let other_side = road_side.other_side();
if let Some((new_owner, _)) = self
.neighborhoods
.iter()
.find(|(_, (block, _))| block.perimeter.roads.contains(&other_side))
{
let new_owner = *new_owner;
return self.transfer_block(map, id, old_owner, new_owner);
}
}
// We didn't find any match, so we're jettisoning a block near the edge of the map (or a
// buggy area missing blocks). Create a new neighborhood with just this block.
let new_owner = NeighborhoodID(self.neighborhood_id_counter);
self.neighborhood_id_counter += 1;
// Temporary color
self.neighborhoods
.insert(new_owner, (self.get_block(id).clone(), Color::CLEAR));
let result = self.transfer_block(map, id, old_owner, new_owner);
if result.is_err() {
// Revert the change above!
self.neighborhoods.remove(&new_owner).unwrap();
}
result
}
}
// Read-only
impl Partitioning {
pub fn neighborhood_block(&self, id: NeighborhoodID) -> &Block {
&self.neighborhoods[&id].0
}
pub fn neighborhood_area_km2(&self, id: NeighborhoodID) -> String {
// Convert from m^2 to km^2
let area = self.neighborhood_block(id).polygon.area() / 1_000_000.0;
format!("~{:.1} km²", area)
}
pub fn neighborhood_color(&self, id: NeighborhoodID) -> Color {
self.neighborhoods[&id].1
}
pub fn all_neighborhoods(&self) -> &BTreeMap<NeighborhoodID, (Block, Color)> {
&self.neighborhoods
}
// Just used for initial creation
fn neighborhood_containing(&self, find_block: BlockID) -> Option<NeighborhoodID> {
// TODO We could probably build this mapping up when we do Perimeter::merge_all
let find_block = self.get_block(find_block);
for (id, (block, _)) in &self.neighborhoods {
if block.perimeter.contains(&find_block.perimeter) {
return Some(*id);
}
}
None
}
pub fn all_single_blocks(&self) -> Vec<(BlockID, &Block)> {
self.single_blocks
.iter()
.enumerate()
.map(|(idx, block)| (BlockID(idx), block))
.collect()
}
pub fn all_block_ids(&self) -> Vec<BlockID> {
(0..self.single_blocks.len()).map(BlockID).collect()
}
pub fn get_block(&self, id: BlockID) -> &Block {
&self.single_blocks[id.0]
}
pub fn block_to_neighborhood(&self, id: BlockID) -> NeighborhoodID {
self.block_to_neighborhood[&id]
}
pub fn all_blocks_in_neighborhood(&self, id: NeighborhoodID) -> Vec<BlockID> {
let mut result = Vec::new();
for (block, n) in &self.block_to_neighborhood {
if *n == id {
result.push(*block);
}
}
result
}
pub fn some_block_in_neighborhood(&self, id: NeighborhoodID) -> BlockID {
for (block, neighborhood) in &self.block_to_neighborhood {
if id == *neighborhood {
return *block;
}
}
unreachable!("{:?} has no blocks", id);
}
/// Blocks on the "frontier" are adjacent to the perimeter, either just inside or outside.
pub fn calculate_frontier(&self, perim: &Perimeter) -> BTreeSet<BlockID> {
let perim_roads: BTreeSet<RoadID> = perim.roads.iter().map(|id| id.road).collect();
let mut frontier = BTreeSet::new();
for (block_id, block) in self.all_single_blocks() {
for road_side_id in &block.perimeter.roads {
// If the perimeter has this RoadSideID on the same side, we're just inside. If it has
// the other side, just on the outside. Either way, on the frontier.
if perim_roads.contains(&road_side_id.road) {
frontier.insert(block_id);
break;
}
}
}
frontier
}
// Possibly returns multiple merged blocks. The input is never "lost" -- if any perimeter fails
// to become a block, fail the whole operation.
fn make_merged_blocks(&self, map: &Map, input: Vec<BlockID>) -> Result<Vec<Block>> {
let mut perimeters = Vec::new();
for id in input {
perimeters.push(self.get_block(id).perimeter.clone());
}
let mut blocks = Vec::new();
let stepwise_debug = false;
for perim in Perimeter::merge_all(
map,
perimeters,
stepwise_debug,
self.use_expensive_blockfinding,
) {
blocks.push(perim.to_block(map)?);
}
Ok(blocks)
}
}
| 38.864078 | 150 | 0.573882 |
615085a3c1e8ffd181b38b831e5323be3ef302f6
| 304 |
#[doc = "Writer for register TASKS_SENSE"]
pub type W = crate::W<u32, super::TASKS_SENSE>;
#[doc = "Register TASKS_SENSE `reset()`'s with value 0"]
impl crate::ResetValue for super::TASKS_SENSE {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
impl W {}
| 25.333333 | 56 | 0.625 |
753d6f4a0a7497671bc236b2682180bb722d92dd
| 4,633 |
#[cfg(test)]
mod tests {
use crate::{search_case_sensitive, search_case_insensitive};
#[test]
fn case_sensitive() {
let query = "duct";
let contents = "Rust:\nsafe, fast, productive.\nPick three.\nDuct tape.";
assert_eq!(vec!["safe, fast, productive."], search_case_sensitive(query, contents));
}
#[test]
fn case_insensitive() {
let query = "rUsT";
let contents = "Rust:\nsafe, fast, productive.\nPick three.\nTrust me.";
assert_eq!(vec!["Rust:", "Trust me."], search_case_insensitive(query, contents));
}
}
use std::{fs, error, env};
pub struct Config {
pub query: String,
pub filename: String,
pub case_sensitive: bool
}
impl Config {
// pub fn new(args: &[String]) -> Result<Config, &str> {
// logically there is no link between "args" and output, hence this should be "pub fn new(args: &[String]) -> Result<Config, &'static str> {"
// but from compiler point of view, there could be a link between "&String" in input and "&str" in output, hence "args" should be alive at least the duration of output
// so this should have been actually written as "pub fn new<'a>(args: &'a [String]) -> Result<Config, &'a str> {", but due to ellison rules, there was no need to explicitly write it
pub fn new(mut args: env::Args) -> Result<Config, &'static str> { // now takes in iterator instead of "String" slice, "mut" coz using "next" will change internal state
// now that there is no reference in the input state, the output reference should be defined as "static"
// if args.len() < 3 {
// return Err("not enough arguments");
// }
// let query = args[1].clone(); // previously cloning was done to cause args to not lose ownership
// let filename = args[2].clone();
// let case_sensitive = if args.len() >= 4 {
// args[3].clone().parse().or_else(|_|{
// Err("Invalid case-sensitivity argument")
// })
// } else {
// Ok(Config::does_case_sensitive_env_var_exist())
// }?;
args.next(); // skipping the first argument which contains the name of the binary
let query = match args.next(){
Some(query) => query,
None => return Err("not enough arguments")
};
let filename = match args.next(){
Some(filename) => filename,
None => return Err("not enough arguments")
};
let case_sensitive = match args.next(){
Some(case_sensitive) => {
case_sensitive.parse().or_else(|_|{Err("Invalid case-sensitivity argument")})?
},
None => Config::does_case_sensitive_env_var_exist()
};
Ok(Config {query, filename, case_sensitive})
}
fn does_case_sensitive_env_var_exist() -> bool {
env::var("CASE_SENSITIVE").is_ok()
}
}
pub fn run(config: Config) -> Result<(), Box<dyn error::Error>>{
let contents = fs::read_to_string(config.filename)?;
let res = if config.case_sensitive {
search_case_sensitive(&config.query, &contents)
} else {
search_case_insensitive(&config.query, &contents)
};
// let res = config.case_sensitive.then(||search_case_sensitive(&config.query, &contents))// if "true" then closure is executed and it's output is wrapped in "Some" else "None"
// .unwrap_or_else(||search_case_insensitive(&config.query, &contents)); // if false
// for line in res {
// println!("{}", line);
// }
res.iter().for_each(|line| {println!("{}", line);});
Ok(())
}
pub fn search_case_sensitive<'a>(query: &str, contents: &'a str) -> Vec<&'a str> {
// let mut res = Vec::new();
// for line in contents.lines() {
// if line.contains(query) {
// res.push(line);
// }
// }
// res
contents.lines().filter(|line| line.contains(query)).collect() // more concise and avoids use of intermediate mutable variable
// functional programming minimizes the amount of mutable state and makes code more clear and concise
// if there is no mutable state, it is easy to make it embarrassingly parallel
}
pub fn search_case_insensitive<'a>(query: &str, contents: &'a str) -> Vec<&'a str> {
// let query = query.to_lowercase();
// let mut res = Vec::new();
// for line in contents.lines() {
// if line.to_lowercase().contains(&query) {
// res.push(line);
// }
// }
// res
contents.lines().filter(|line| line.to_lowercase().contains(query.to_lowercase().as_str())).collect()
}
| 40.286957 | 185 | 0.604792 |
dd2c891e8011de6a066a9e76f1e3db3314bf8d6c
| 717 |
fn main() {
assert_eq!(Solution::missing_number(vec![3,0,1]), 2);
}
struct Solution {}
impl Solution {
pub fn missing_number(mut nums: Vec<i32>) -> i32 {
nums.push(-1);
let mut i = 0;
while i < nums.len() {
if nums[i] == i as i32 || nums[i] == -1 { i += 1; }
else {
let target = nums[i] as usize;
nums.swap(i, target);
}
}
nums.iter().position(|&x| x == -1).unwrap() as i32
}
}
#[cfg(test)]
mod test {
use crate::*;
#[test]
fn basic() {
assert_eq!(Solution::missing_number(vec![3,0,1]), 2);
assert_eq!(Solution::missing_number(vec![9,6,4,2,3,5,7,0,1]), 8);
}
}
| 22.40625 | 73 | 0.475593 |
91d6c1572c9de16e63c12873275ec91d4132f337
| 24,175 |
use crate::{
builders::{CrateBuilder, VersionBuilder},
new_user,
util::{MockCookieUser, RequestHelper, Response, StatusCode},
OkBool, TestApp,
};
use cargo_registry::{
models::{Email, NewUser, User},
schema::crate_owners,
views::{EncodablePrivateUser, EncodablePublicUser, EncodableVersion, OwnedCrate},
};
use diesel::prelude::*;
#[derive(Deserialize)]
struct AuthResponse {
url: String,
state: String,
}
#[derive(Deserialize)]
pub struct UserShowPublicResponse {
pub user: EncodablePublicUser,
}
#[derive(Deserialize)]
pub struct UserShowPrivateResponse {
pub user: EncodablePrivateUser,
pub owned_crates: Vec<OwnedCrate>,
}
#[derive(Deserialize)]
struct UserStats {
total_downloads: i64,
}
#[derive(Serialize)]
struct EmailNotificationsUpdate {
id: i32,
email_notifications: bool,
}
impl crate::util::MockCookieUser {
fn show_me(&self) -> UserShowPrivateResponse {
let url = "/api/v1/me";
self.get(url).good()
}
fn update_email(&self, email: &str) -> OkBool {
let model = self.as_model();
self.update_email_more_control(model.id, Some(email)).good()
}
// TODO: I don't like the name of this method or the one above; this is starting to look like
// a builder might help? I want to explore alternative abstractions in any case
fn update_email_more_control(&self, user_id: i32, email: Option<&str>) -> Response<OkBool> {
// When updating your email in crates.io, the request goes to the user route with PUT.
// Ember sends all the user attributes. We check to make sure the ID in the URL matches
// the ID of the currently logged in user, then we ignore everything but the email address.
let body = json!({"user": {
"email": email,
"name": "Arbitrary Name",
"login": "arbitrary_login",
"avatar": "https://arbitrary.com/img.jpg",
"url": "https://arbitrary.com",
"kind": null
}});
let url = format!("/api/v1/users/{}", user_id);
self.put(&url, body.to_string().as_bytes())
}
fn confirm_email(&self, email_token: &str) -> OkBool {
let url = format!("/api/v1/confirm/{}", email_token);
self.put(&url, &[]).good()
}
fn update_email_notifications(&self, updates: Vec<EmailNotificationsUpdate>) -> OkBool {
self.put(
"/api/v1/me/email_notifications",
json!(updates).to_string().as_bytes(),
)
.good()
}
}
impl crate::util::MockAnonymousUser {
// TODO: Refactor to get rid of this duplication with the same method implemented on
// MockCookieUser
fn update_email_more_control(&self, user_id: i32, email: Option<&str>) -> Response<OkBool> {
// When updating your email in crates.io, the request goes to the user route with PUT.
// Ember sends all the user attributes. We check to make sure the ID in the URL matches
// the ID of the currently logged in user, then we ignore everything but the email address.
let body = json!({"user": {
"email": email,
"name": "Arbitrary Name",
"login": "arbitrary_login",
"avatar": "https://arbitrary.com/img.jpg",
"url": "https://arbitrary.com",
"kind": null
}});
let url = format!("/api/v1/users/{}", user_id);
self.put(&url, body.to_string().as_bytes())
}
}
#[test]
fn auth_gives_a_token() {
let (_, anon) = TestApp::init().empty();
let json: AuthResponse = anon.get("/api/private/session/begin").good();
assert!(json.url.contains(&json.state));
}
#[test]
fn access_token_needs_data() {
let (_, anon) = TestApp::init().empty();
let json = anon
.get::<()>("/api/private/session/authorize")
.bad_with_status(StatusCode::BAD_REQUEST);
assert!(json.errors[0].detail.contains("invalid state"));
}
#[test]
fn me() {
let url = "/api/v1/me";
let (app, anon) = TestApp::init().empty();
anon.get(url).assert_forbidden();
let user = app.db_new_user("foo");
let json = user.show_me();
assert_eq!(json.owned_crates.len(), 0);
app.db(|conn| {
CrateBuilder::new("foo_my_packages", user.as_model().id).expect_build(conn);
assert_eq!(json.user.email, user.as_model().email(conn).unwrap());
});
let updated_json = user.show_me();
assert_eq!(updated_json.owned_crates.len(), 1);
}
#[test]
fn show() {
let (app, anon, _) = TestApp::init().with_user();
app.db_new_user("bar");
let json: UserShowPublicResponse = anon.get("/api/v1/users/foo").good();
assert_eq!("foo", json.user.login);
let json: UserShowPublicResponse = anon.get("/api/v1/users/bar").good();
assert_eq!("bar", json.user.login);
assert_eq!(Some("https://github.com/bar".into()), json.user.url);
}
#[test]
fn show_latest_user_case_insensitively() {
let (app, anon) = TestApp::init().empty();
app.db(|conn| {
// Please do not delete or modify the setup of this test in order to get it to pass.
// This setup mimics how GitHub works. If someone abandons a GitHub account, the username is
// available for anyone to take. We need to support having multiple user accounts
// with the same gh_login in crates.io. `gh_id` is stable across renames, so that field
// should be used for uniquely identifying GitHub accounts whenever possible. For the
// crates.io/user/:username pages, the best we can do is show the last crates.io account
// created with that username.
t!(NewUser::new(
1,
"foobar",
Some("I was first then deleted my github account"),
None,
"bar"
)
.create_or_update(None, conn));
t!(NewUser::new(
2,
"FOOBAR",
Some("I was second, I took the foobar username on github"),
None,
"bar"
)
.create_or_update(None, conn));
});
let json: UserShowPublicResponse = anon.get("api/v1/users/fOObAr").good();
assert_eq!(
"I was second, I took the foobar username on github",
json.user.name.unwrap()
);
}
#[test]
fn crates_by_user_id() {
let (app, _, user) = TestApp::init().with_user();
let id = user.as_model().id;
app.db(|conn| {
CrateBuilder::new("foo_my_packages", id).expect_build(conn);
});
let response = user.search_by_user_id(id);
assert_eq!(response.crates.len(), 1);
}
#[test]
fn crates_by_user_id_not_including_deleted_owners() {
let (app, anon, user) = TestApp::init().with_user();
let user = user.as_model();
app.db(|conn| {
let krate = CrateBuilder::new("foo_my_packages", user.id).expect_build(conn);
krate
.owner_remove(app.as_inner(), conn, user, "foo")
.unwrap();
});
let response = anon.search_by_user_id(user.id);
assert_eq!(response.crates.len(), 0);
}
#[test]
fn following() {
use cargo_registry::schema::versions;
use diesel::update;
#[derive(Deserialize)]
struct R {
versions: Vec<EncodableVersion>,
meta: Meta,
}
#[derive(Deserialize)]
struct Meta {
more: bool,
}
let (app, _, user) = TestApp::init().with_user();
let user_model = user.as_model();
let user_id = user_model.id;
app.db(|conn| {
CrateBuilder::new("foo_fighters", user_id)
.version(VersionBuilder::new("1.0.0"))
.expect_build(conn);
// Make foo_fighters's version mimic a version published before we started recording who
// published versions
let none: Option<i32> = None;
update(versions::table)
.set(versions::published_by.eq(none))
.execute(conn)
.unwrap();
CrateBuilder::new("bar_fighters", user_id)
.version(VersionBuilder::new("1.0.0"))
.expect_build(conn);
});
let r: R = user.get("/api/v1/me/updates").good();
assert_eq!(r.versions.len(), 0);
assert_eq!(r.meta.more, false);
user.put::<OkBool>("/api/v1/crates/foo_fighters/follow", b"")
.good();
user.put::<OkBool>("/api/v1/crates/bar_fighters/follow", b"")
.good();
let r: R = user.get("/api/v1/me/updates").good();
assert_eq!(r.versions.len(), 2);
assert_eq!(r.meta.more, false);
let foo_version = r
.versions
.iter()
.find(|v| v.krate == "foo_fighters")
.unwrap();
assert!(foo_version.published_by.is_none());
let bar_version = r
.versions
.iter()
.find(|v| v.krate == "bar_fighters")
.unwrap();
assert_eq!(
bar_version.published_by.as_ref().unwrap().login,
user_model.gh_login
);
let r: R = user
.get_with_query("/api/v1/me/updates", "per_page=1")
.good();
assert_eq!(r.versions.len(), 1);
assert_eq!(r.meta.more, true);
user.delete::<OkBool>("/api/v1/crates/bar_fighters/follow")
.good();
let r: R = user
.get_with_query("/api/v1/me/updates", "page=2&per_page=1")
.good();
assert_eq!(r.versions.len(), 0);
assert_eq!(r.meta.more, false);
user.get_with_query::<()>("/api/v1/me/updates", "page=0")
.bad_with_status(StatusCode::BAD_REQUEST);
}
#[test]
fn user_total_downloads() {
use diesel::update;
let (app, anon, user) = TestApp::init().with_user();
let user = user.as_model();
let another_user = app.db_new_user("bar");
let another_user = another_user.as_model();
app.db(|conn| {
let mut krate = CrateBuilder::new("foo_krate1", user.id).expect_build(conn);
krate.downloads = 10;
update(&krate).set(&krate).execute(conn).unwrap();
let mut krate2 = CrateBuilder::new("foo_krate2", user.id).expect_build(conn);
krate2.downloads = 20;
update(&krate2).set(&krate2).execute(conn).unwrap();
let mut another_krate = CrateBuilder::new("bar_krate1", another_user.id).expect_build(conn);
another_krate.downloads = 2;
update(&another_krate)
.set(&another_krate)
.execute(conn)
.unwrap();
let mut no_longer_my_krate = CrateBuilder::new("nacho", user.id).expect_build(conn);
no_longer_my_krate.downloads = 5;
update(&no_longer_my_krate)
.set(&no_longer_my_krate)
.execute(conn)
.unwrap();
no_longer_my_krate
.owner_remove(app.as_inner(), conn, user, &user.gh_login)
.unwrap();
});
let url = format!("/api/v1/users/{}/stats", user.id);
let stats: UserStats = anon.get(&url).good();
// does not include crates user never owned (2) or no longer owns (5)
assert_eq!(stats.total_downloads, 30);
}
#[test]
fn user_total_downloads_no_crates() {
let (_, anon, user) = TestApp::init().with_user();
let user = user.as_model();
let url = format!("/api/v1/users/{}/stats", user.id);
let stats: UserStats = anon.get(&url).good();
assert_eq!(stats.total_downloads, 0);
}
#[test]
fn updating_existing_user_doesnt_change_api_token() {
let (app, _, user, token) = TestApp::init().with_token();
let gh_id = user.as_model().gh_id;
let token = &token.as_model().token;
let user = app.db(|conn| {
// Reuse gh_id but use new gh_login and gh_access_token
t!(NewUser::new(gh_id, "bar", None, None, "bar_token").create_or_update(None, conn));
// Use the original API token to find the now updated user
t!(User::find_by_api_token(conn, token))
});
assert_eq!("bar", user.gh_login);
assert_eq!("bar_token", user.gh_access_token);
}
/* Given a GitHub user, check that if the user logs in,
updates their email, logs out, then logs back in, the
email they added to crates.io will not be overwritten
by the information sent by GitHub.
This bug is problematic if the user's email preferences
are set to private on GitHub, as GitHub will always
send none as the email and we will end up inadvertenly
deleting their email when they sign back in.
*/
#[test]
fn github_without_email_does_not_overwrite_email() {
let (app, _) = TestApp::init().empty();
// Simulate logging in via GitHub with an account that has no email.
// Because faking GitHub is terrible, call what GithubUser::save_to_database does directly.
// Don't use app.db_new_user because it adds a verified email.
let user_without_github_email = app.db(|conn| {
let u = new_user("arbitrary_username");
let u = u.create_or_update(None, conn).unwrap();
MockCookieUser::new(&app, u)
});
let user_without_github_email_model = user_without_github_email.as_model();
let json = user_without_github_email.show_me();
// Check that the setup is correct and the user indeed has no email
assert_eq!(json.user.email, None);
// Add an email address in crates.io
user_without_github_email.update_email("[email protected]");
// Simulate the same user logging in via GitHub again, still with no email in GitHub.
let again_user_without_github_email = app.db(|conn| {
let u = NewUser {
// Use the same github ID to link to the existing account
gh_id: user_without_github_email_model.gh_id,
// new_user uses a None email; the rest of the fields are arbitrary
..new_user("arbitrary_username")
};
let u = u.create_or_update(None, conn).unwrap();
MockCookieUser::new(&app, u)
});
let json = again_user_without_github_email.show_me();
assert_eq!(json.user.email.unwrap(), "[email protected]");
}
/* Given a new user, test that if they sign in with one email, change their email on GitHub, then
sign in again, that the email in crates.io will remain set to the original email used on GitHub.
*/
#[test]
fn github_with_email_does_not_overwrite_email() {
use cargo_registry::schema::emails;
let (app, _, user) = TestApp::init().with_user();
let model = user.as_model();
let original_email = app.db(|conn| {
Email::belonging_to(model)
.select(emails::email)
.first::<String>(&*conn)
.unwrap()
});
let new_github_email = "[email protected]";
// Simulate logging in to crates.io after changing your email in GitHub
let user_with_different_email_in_github = app.db(|conn| {
let u = NewUser {
// Use the same github ID to link to the existing account
gh_id: model.gh_id,
// the rest of the fields are arbitrary
..new_user("arbitrary_username")
};
let u = u.create_or_update(Some(new_github_email), conn).unwrap();
MockCookieUser::new(&app, u)
});
let json = user_with_different_email_in_github.show_me();
assert_eq!(json.user.email, Some(original_email));
}
/* Given a crates.io user, check that the user's email can be
updated in the database (PUT /user/:user_id), then check
that the updated email is sent back to the user (GET /me).
*/
#[test]
fn test_email_get_and_put() {
let (_app, _anon, user) = TestApp::init().with_user();
let json = user.show_me();
assert_eq!(json.user.email.unwrap(), "[email protected]");
user.update_email("[email protected]");
let json = user.show_me();
assert_eq!(json.user.email.unwrap(), "[email protected]");
assert!(!json.user.email_verified);
assert!(json.user.email_verification_sent);
}
/* Given a crates.io user, check to make sure that the user
cannot add to the database an empty string or null as
their email. If an attempt is made, update_user.rs will
return an error indicating that an empty email cannot be
added.
This is checked on the frontend already, but I'd like to
make sure that a user cannot get around that and delete
their email by adding an empty string.
*/
#[test]
fn test_empty_email_not_added() {
let (_app, _anon, user) = TestApp::init().with_user();
let model = user.as_model();
let json = user
.update_email_more_control(model.id, Some(""))
.bad_with_status(StatusCode::BAD_REQUEST);
assert!(
json.errors[0].detail.contains("empty email rejected"),
"{:?}",
json.errors
);
let json = user
.update_email_more_control(model.id, None)
.bad_with_status(StatusCode::BAD_REQUEST);
assert!(
json.errors[0].detail.contains("empty email rejected"),
"{:?}",
json.errors
);
}
/* Check to make sure that neither other signed in users nor anonymous users can edit another
user's email address.
If an attempt is made, update_user.rs will return an error indicating that the current user
does not match the requested user.
*/
#[test]
fn test_other_users_cannot_change_my_email() {
let (app, anon, user) = TestApp::init().with_user();
let another_user = app.db_new_user("not_me");
let another_user_model = another_user.as_model();
let json = user
.update_email_more_control(
another_user_model.id,
Some("[email protected]"),
)
.bad_with_status(StatusCode::BAD_REQUEST);
assert!(
json.errors[0]
.detail
.contains("current user does not match requested user",),
"{:?}",
json.errors
);
anon.update_email_more_control(
another_user_model.id,
Some("[email protected]"),
)
.bad_with_status(StatusCode::FORBIDDEN);
}
/* Given a new user, test that their email can be added
to the email table and a token for the email is generated
and added to the token table. When /confirm/:email_token is
requested, check that the response back is ok, and that
the email_verified field on user is now set to true.
*/
#[test]
fn test_confirm_user_email() {
use cargo_registry::schema::emails;
let (app, _) = TestApp::init().empty();
// Simulate logging in via GitHub. Don't use app.db_new_user because it inserts a verified
// email directly into the database and we want to test the verification flow here.
let email = "[email protected]";
let user = app.db(|conn| {
let u = NewUser {
..new_user("arbitrary_username")
};
let u = u.create_or_update(Some(email), conn).unwrap();
MockCookieUser::new(&app, u)
});
let user_model = user.as_model();
let email_token = app.db(|conn| {
Email::belonging_to(user_model)
.select(emails::token)
.first::<String>(&*conn)
.unwrap()
});
user.confirm_email(&email_token);
let json = user.show_me();
assert_eq!(json.user.email.unwrap(), "[email protected]");
assert!(json.user.email_verified);
assert!(json.user.email_verification_sent);
}
/* Given a user who existed before we added email confirmation,
test that `email_verification_sent` is false so that we don't
make the user think we've sent an email when we haven't.
*/
#[test]
fn test_existing_user_email() {
use cargo_registry::schema::emails;
use chrono::NaiveDateTime;
use diesel::update;
let (app, _) = TestApp::init().empty();
// Simulate logging in via GitHub. Don't use app.db_new_user because it inserts a verified
// email directly into the database and we want to test the verification flow here.
let email = "[email protected]";
let user = app.db(|conn| {
let u = NewUser {
..new_user("arbitrary_username")
};
let u = u.create_or_update(Some(email), conn).unwrap();
update(Email::belonging_to(&u))
// Users created before we added verification will have
// `NULL` in the `token_generated_at` column.
.set(emails::token_generated_at.eq(None::<NaiveDateTime>))
.execute(conn)
.unwrap();
MockCookieUser::new(&app, u)
});
let json = user.show_me();
assert_eq!(json.user.email.unwrap(), "[email protected]");
assert!(!json.user.email_verified);
assert!(!json.user.email_verification_sent);
}
#[test]
fn test_user_owned_crates_doesnt_include_deleted_ownership() {
let (app, _, user) = TestApp::init().with_user();
let user_model = user.as_model();
app.db(|conn| {
let krate = CrateBuilder::new("foo_my_packages", user_model.id).expect_build(conn);
krate
.owner_remove(app.as_inner(), conn, user_model, &user_model.gh_login)
.unwrap();
});
let json = user.show_me();
assert_eq!(json.owned_crates.len(), 0);
}
/* A user should be able to update the email notifications for crates they own. Only the crates that
were sent in the request should be updated to the corresponding `email_notifications` value.
*/
#[test]
fn test_update_email_notifications() {
let (app, _, user) = TestApp::init().with_user();
let my_crates = app.db(|conn| {
vec![
CrateBuilder::new("test_package", user.as_model().id).expect_build(&conn),
CrateBuilder::new("another_package", user.as_model().id).expect_build(&conn),
]
});
let a_id = my_crates.get(0).unwrap().id;
let b_id = my_crates.get(1).unwrap().id;
// Update crate_a: email_notifications = false
// crate_a should be false, crate_b should be true
user.update_email_notifications(vec![EmailNotificationsUpdate {
id: a_id,
email_notifications: false,
}]);
let json = user.show_me();
assert_eq!(
json.owned_crates
.iter()
.find(|c| c.id == a_id)
.unwrap()
.email_notifications,
false
);
assert_eq!(
json.owned_crates
.iter()
.find(|c| c.id == b_id)
.unwrap()
.email_notifications,
true
);
// Update crate_b: email_notifications = false
// Both should be false now
user.update_email_notifications(vec![EmailNotificationsUpdate {
id: b_id,
email_notifications: false,
}]);
let json = user.show_me();
assert_eq!(
json.owned_crates
.iter()
.find(|c| c.id == a_id)
.unwrap()
.email_notifications,
false
);
assert_eq!(
json.owned_crates
.iter()
.find(|c| c.id == b_id)
.unwrap()
.email_notifications,
false
);
// Update crate_a and crate_b: email_notifications = true
// Both should be true
user.update_email_notifications(vec![
EmailNotificationsUpdate {
id: a_id,
email_notifications: true,
},
EmailNotificationsUpdate {
id: b_id,
email_notifications: true,
},
]);
let json = user.show_me();
json.owned_crates.iter().for_each(|c| {
assert!(c.email_notifications);
})
}
/* A user should not be able to update the `email_notifications` value for a crate that is not
owned by them.
*/
#[test]
fn test_update_email_notifications_not_owned() {
let (app, _, user) = TestApp::init().with_user();
let not_my_crate = app.db(|conn| {
let u = new_user("arbitrary_username")
.create_or_update(None, &conn)
.unwrap();
CrateBuilder::new("test_package", u.id).expect_build(&conn)
});
user.update_email_notifications(vec![EmailNotificationsUpdate {
id: not_my_crate.id,
email_notifications: false,
}]);
let email_notifications = app
.db(|conn| {
crate_owners::table
.select(crate_owners::email_notifications)
.filter(crate_owners::crate_id.eq(not_my_crate.id))
.first::<bool>(&*conn)
})
.unwrap();
// There should be no change to the `email_notifications` value for a crate not belonging to me
assert!(email_notifications);
}
| 32.319519 | 100 | 0.623537 |
db911ab8104c0f7c10dbe219d5905a056f782016
| 329 |
#![cfg_attr(RUSTC_WITH_SPECIALIZATION, feature(min_specialization))]
pub mod accounts_data_meter;
pub mod compute_budget;
pub mod instruction_recorder;
pub mod invoke_context;
pub mod log_collector;
pub mod native_loader;
pub mod neon_evm_program;
pub mod pre_account;
pub mod stable_log;
pub mod sysvar_cache;
pub mod timings;
| 23.5 | 68 | 0.829787 |
114d5d4fdf042f04471e6adb05c47140c6002237
| 1,313 |
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[deny(uppercase_variables)];
use std::io::File;
use std::io::IoError;
struct Something {
X: uint //~ ERROR structure field names should start with a lowercase character
}
fn test(Xx: uint) { //~ ERROR variable names should start with a lowercase character
println!("{}", Xx);
}
fn main() {
let Test: uint = 0; //~ ERROR variable names should start with a lowercase character
println!("{}", Test);
let mut f = File::open(&Path::new("something.txt"));
let mut buff = [0u8, ..16];
match f.read(buff) {
Ok(cnt) => println!("read this many bytes: {}", cnt),
Err(IoError{ kind: EndOfFile, .. }) => println!("Got end of file: {}", EndOfFile.to_str()),
//~^ ERROR variable names should start with a lowercase character
}
test(1);
let _ = Something { X: 0 };
}
| 32.02439 | 99 | 0.653465 |
e62695bc4fc6b713114193705837acb15320a4e4
| 4,148 |
use crate::mux::{Tab, TabId};
use std::rc::Rc;
use std::sync::Arc;
use term::Clipboard;
static WIN_ID: ::std::sync::atomic::AtomicUsize = ::std::sync::atomic::AtomicUsize::new(0);
pub type WindowId = usize;
pub struct Window {
id: WindowId,
tabs: Vec<Rc<dyn Tab>>,
active: usize,
clipboard: Option<Arc<dyn Clipboard>>,
invalidated: bool,
}
impl Window {
pub fn new() -> Self {
Self {
id: WIN_ID.fetch_add(1, ::std::sync::atomic::Ordering::Relaxed),
tabs: vec![],
active: 0,
clipboard: None,
invalidated: false,
}
}
pub fn set_clipboard(&mut self, clipboard: &Arc<dyn Clipboard>) {
self.clipboard.replace(Arc::clone(clipboard));
}
pub fn window_id(&self) -> WindowId {
self.id
}
fn check_that_tab_isnt_already_in_window(&self, tab: &Rc<dyn Tab>) {
for t in &self.tabs {
assert_ne!(t.tab_id(), tab.tab_id(), "tab already added to this window");
}
}
fn assign_clipboard_to_tab(&self, tab: &Rc<dyn Tab>) {
if let Some(clip) = self.clipboard.as_ref() {
tab.set_clipboard(clip);
}
}
pub fn insert(&mut self, index: usize, tab: &Rc<dyn Tab>) {
self.check_that_tab_isnt_already_in_window(tab);
self.assign_clipboard_to_tab(tab);
self.tabs.insert(index, Rc::clone(tab));
self.invalidated = true;
}
pub fn push(&mut self, tab: &Rc<dyn Tab>) {
self.check_that_tab_isnt_already_in_window(tab);
self.assign_clipboard_to_tab(tab);
self.tabs.push(Rc::clone(tab));
self.invalidated = true;
}
pub fn is_empty(&self) -> bool {
self.tabs.is_empty()
}
pub fn len(&self) -> usize {
self.tabs.len()
}
pub fn get_by_idx(&self, idx: usize) -> Option<&Rc<dyn Tab>> {
self.tabs.get(idx)
}
pub fn idx_by_id(&self, id: TabId) -> Option<usize> {
for (idx, t) in self.tabs.iter().enumerate() {
if t.tab_id() == id {
return Some(idx);
}
}
None
}
pub fn remove_by_idx(&mut self, idx: usize) -> Rc<dyn Tab> {
self.invalidated = true;
self.tabs.remove(idx)
}
pub fn remove_by_id(&mut self, id: TabId) -> bool {
if let Some(idx) = self.idx_by_id(id) {
self.tabs.remove(idx);
let len = self.tabs.len();
if len > 0 && self.active == idx && idx >= len {
self.set_active(len - 1);
}
true
} else {
false
}
}
pub fn check_and_reset_invalidated(&mut self) -> bool {
let res = self.invalidated;
self.invalidated = false;
res
}
pub fn get_active(&self) -> Option<&Rc<dyn Tab>> {
self.get_by_idx(self.active)
}
#[inline]
pub fn get_active_idx(&self) -> usize {
self.active
}
pub fn set_active(&mut self, idx: usize) {
assert!(idx < self.tabs.len());
self.invalidated = true;
self.active = idx;
}
pub fn iter(&self) -> impl Iterator<Item = &Rc<dyn Tab>> {
self.tabs.iter()
}
pub fn prune_dead_tabs(&mut self, live_tab_ids: &[TabId]) {
let dead: Vec<TabId> = self
.tabs
.iter()
.filter_map(|tab| {
if tab.is_dead() {
Some(tab.tab_id())
} else {
None
}
})
.collect();
for tab_id in dead {
self.remove_by_id(tab_id);
}
let dead: Vec<TabId> = self
.tabs
.iter()
.filter_map(|tab| {
if live_tab_ids
.iter()
.find(|&&id| id == tab.tab_id())
.is_none()
{
Some(tab.tab_id())
} else {
None
}
})
.collect();
for tab_id in dead {
self.remove_by_id(tab_id);
}
}
}
| 25.604938 | 91 | 0.495661 |
ddd86c4679934d79493a9d20e2755e6d0fd1b164
| 8,002 |
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use context::SharedCrateContext;
use monomorphize::Instance;
use util::nodemap::FxHashMap;
use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE};
use rustc::session::config;
use rustc::ty::TyCtxt;
use syntax::attr;
/// The SymbolExportLevel of a symbols specifies from which kinds of crates
/// the symbol will be exported. `C` symbols will be exported from any
/// kind of crate, including cdylibs which export very few things.
/// `Rust` will only be exported if the crate produced is a Rust
/// dylib.
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
pub enum SymbolExportLevel {
C,
Rust,
}
/// The set of symbols exported from each crate in the crate graph.
pub struct ExportedSymbols {
exports: FxHashMap<CrateNum, Vec<(String, SymbolExportLevel)>>,
}
impl ExportedSymbols {
pub fn empty() -> ExportedSymbols {
ExportedSymbols {
exports: FxHashMap(),
}
}
pub fn compute<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>) -> ExportedSymbols {
let mut local_crate: Vec<_> = scx
.exported_symbols()
.iter()
.map(|&node_id| {
scx.tcx().hir.local_def_id(node_id)
})
.map(|def_id| {
let name = scx.tcx().symbol_name(Instance::mono(scx.tcx(), def_id));
let export_level = export_level(scx, def_id);
debug!("EXPORTED SYMBOL (local): {} ({:?})", name, export_level);
(str::to_owned(&name), export_level)
})
.collect();
if scx.sess().entry_fn.borrow().is_some() {
local_crate.push(("main".to_string(), SymbolExportLevel::C));
}
if let Some(id) = scx.sess().derive_registrar_fn.get() {
let def_id = scx.tcx().hir.local_def_id(id);
let idx = def_id.index;
let disambiguator = scx.sess().local_crate_disambiguator();
let registrar = scx.sess().generate_derive_registrar_symbol(disambiguator, idx);
local_crate.push((registrar, SymbolExportLevel::C));
}
if scx.sess().crate_types.borrow().contains(&config::CrateTypeDylib) {
local_crate.push((metadata_symbol_name(scx.tcx()),
SymbolExportLevel::Rust));
}
let mut exports = FxHashMap();
exports.insert(LOCAL_CRATE, local_crate);
for cnum in scx.sess().cstore.crates() {
debug_assert!(cnum != LOCAL_CRATE);
// If this crate is a plugin and/or a custom derive crate, then
// we're not even going to link those in so we skip those crates.
if scx.sess().cstore.plugin_registrar_fn(cnum).is_some() ||
scx.sess().cstore.derive_registrar_fn(cnum).is_some() {
continue;
}
// Check to see if this crate is a "special runtime crate". These
// crates, implementation details of the standard library, typically
// have a bunch of `pub extern` and `#[no_mangle]` functions as the
// ABI between them. We don't want their symbols to have a `C`
// export level, however, as they're just implementation details.
// Down below we'll hardwire all of the symbols to the `Rust` export
// level instead.
let special_runtime_crate =
scx.sess().cstore.is_allocator(cnum) ||
scx.sess().cstore.is_panic_runtime(cnum) ||
scx.sess().cstore.is_compiler_builtins(cnum);
let crate_exports = scx
.sess()
.cstore
.exported_symbols(cnum)
.iter()
.map(|&def_id| {
let name = scx.tcx().symbol_name(Instance::mono(scx.tcx(), def_id));
let export_level = if special_runtime_crate {
// We can probably do better here by just ensuring that
// it has hidden visibility rather than public
// visibility, as this is primarily here to ensure it's
// not stripped during LTO.
//
// In general though we won't link right if these
// symbols are stripped, and LTO currently strips them.
if &*name == "rust_eh_personality" ||
&*name == "rust_eh_register_frames" ||
&*name == "rust_eh_unregister_frames" {
SymbolExportLevel::C
} else {
SymbolExportLevel::Rust
}
} else {
export_level(scx, def_id)
};
debug!("EXPORTED SYMBOL (re-export): {} ({:?})", name, export_level);
(str::to_owned(&name), export_level)
})
.collect();
exports.insert(cnum, crate_exports);
}
return ExportedSymbols {
exports: exports
};
fn export_level(scx: &SharedCrateContext,
sym_def_id: DefId)
-> SymbolExportLevel {
let attrs = scx.tcx().get_attrs(sym_def_id);
if attr::contains_extern_indicator(scx.sess().diagnostic(), &attrs) {
SymbolExportLevel::C
} else {
SymbolExportLevel::Rust
}
}
}
pub fn exported_symbols(&self,
cnum: CrateNum)
-> &[(String, SymbolExportLevel)] {
match self.exports.get(&cnum) {
Some(exports) => exports,
None => &[]
}
}
pub fn for_each_exported_symbol<F>(&self,
cnum: CrateNum,
export_threshold: SymbolExportLevel,
mut f: F)
where F: FnMut(&str, SymbolExportLevel)
{
for &(ref name, export_level) in self.exported_symbols(cnum) {
if is_below_threshold(export_level, export_threshold) {
f(&name, export_level)
}
}
}
}
pub fn metadata_symbol_name(tcx: TyCtxt) -> String {
format!("rust_metadata_{}_{}",
tcx.crate_name(LOCAL_CRATE),
tcx.crate_disambiguator(LOCAL_CRATE))
}
pub fn crate_export_threshold(crate_type: config::CrateType)
-> SymbolExportLevel {
match crate_type {
config::CrateTypeExecutable |
config::CrateTypeStaticlib |
config::CrateTypeProcMacro |
config::CrateTypeCdylib => SymbolExportLevel::C,
config::CrateTypeRlib |
config::CrateTypeDylib => SymbolExportLevel::Rust,
}
}
pub fn crates_export_threshold(crate_types: &[config::CrateType])
-> SymbolExportLevel {
if crate_types.iter().any(|&crate_type| {
crate_export_threshold(crate_type) == SymbolExportLevel::Rust
}) {
SymbolExportLevel::Rust
} else {
SymbolExportLevel::C
}
}
pub fn is_below_threshold(level: SymbolExportLevel,
threshold: SymbolExportLevel)
-> bool {
if threshold == SymbolExportLevel::Rust {
// We export everything from Rust dylibs
true
} else {
level == SymbolExportLevel::C
}
}
| 38.104762 | 92 | 0.549613 |
1cc9c3965fcf2ae2efcec86f1a04a1da68b565a1
| 8,542 |
use amethyst_core::specs::prelude::Read;
use amethyst_renderer::{Material, MaterialTextureSet, Sprite, TextureOffset};
use minterpolate::InterpolationPrimitive;
use {AnimationSampling, ApplyData, BlendMethod};
/// Sampler primitive for Material animations
/// Note that material can only ever be animated with `Step`, or a panic will occur.
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub enum MaterialPrimitive {
/// Dynamically altering the texture rendered
Texture(u64),
/// Dynamically altering the section of the texture rendered.
Offset((f32, f32), (f32, f32)),
}
impl InterpolationPrimitive for MaterialPrimitive {
fn add(&self, _: &Self) -> Self {
panic!("Cannot add MaterialPrimitive")
}
fn sub(&self, _: &Self) -> Self {
panic!("Cannot sub MaterialPrimitive")
}
fn mul(&self, _: f32) -> Self {
panic!("Cannot mul MaterialPrimitive")
}
fn dot(&self, _: &Self) -> f32 {
panic!("Cannot dot MaterialPrimitive")
}
fn magnitude2(&self) -> f32 {
panic!("Cannot magnitude2 MaterialPrimitive")
}
fn magnitude(&self) -> f32 {
panic!("Cannot magnitude MaterialPrimitive")
}
fn normalize(&self) -> Self {
panic!("Cannot normalize MaterialPrimitive")
}
}
impl From<Sprite> for MaterialPrimitive {
fn from(sprite: Sprite) -> Self {
let tex_coords = &sprite.tex_coords;
MaterialPrimitive::Offset(
(tex_coords.left, tex_coords.right),
(tex_coords.top, tex_coords.bottom),
)
}
}
impl<'a> From<&'a Sprite> for MaterialPrimitive {
fn from(sprite: &'a Sprite) -> Self {
let tex_coords = &sprite.tex_coords;
MaterialPrimitive::Offset(
(tex_coords.left, tex_coords.right),
(tex_coords.top, tex_coords.bottom),
)
}
}
/// Channels that are animatable on `Material`
#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub enum MaterialChannel {
/// Animating the texture used for the albedo
AlbedoTexture,
/// Animating the "window" used to render the albedo.
AlbedoOffset,
/// Animating the texture used for the emission.
EmissionTexture,
/// Animating the "window" used to render the emission.
EmissionOffset,
/// Animating the texture used for the normal
NormalTexture,
/// Animating the "window" used to render the normal.
NormalOffset,
/// Animating the texture used for the metallic
MetallicTexture,
/// Animating the "window" used to render the metallic.
MetallicOffset,
/// Animating the texture used for the roughness
RoughnessTexture,
/// Animating the "window" used to render the roughness.
RoughnessOffset,
/// Animating the texture used for the ambient occlusion
AmbientOcclusionTexture,
/// Animating the "window" used to render the ambient occlusion.
AmbientOcclusionOffset,
/// Animating the texture used for the caveat
CaveatTexture,
/// Animating the "window" used to render the caveat.
CaveatOffset,
}
impl<'a> ApplyData<'a> for Material {
type ApplyData = Read<'a, MaterialTextureSet>;
}
fn offset(offset: &TextureOffset) -> MaterialPrimitive {
MaterialPrimitive::Offset(offset.u, offset.v)
}
fn texture_offset(u: (f32, f32), v: (f32, f32)) -> TextureOffset {
TextureOffset { u, v }
}
impl AnimationSampling for Material {
type Primitive = MaterialPrimitive;
type Channel = MaterialChannel;
fn apply_sample(
&mut self,
channel: &Self::Channel,
data: &Self::Primitive,
extra: &Read<MaterialTextureSet>,
) {
match (*channel, *data) {
(MaterialChannel::AlbedoTexture, MaterialPrimitive::Texture(i)) => {
if let Some(handle) = extra.handle(i) {
self.albedo = handle;
}
}
(MaterialChannel::EmissionTexture, MaterialPrimitive::Texture(i)) => {
if let Some(handle) = extra.handle(i) {
self.emission = handle;
}
}
(MaterialChannel::NormalTexture, MaterialPrimitive::Texture(i)) => {
if let Some(handle) = extra.handle(i) {
self.normal = handle;
}
}
(MaterialChannel::MetallicTexture, MaterialPrimitive::Texture(i)) => {
if let Some(handle) = extra.handle(i) {
self.metallic = handle;
}
}
(MaterialChannel::RoughnessTexture, MaterialPrimitive::Texture(i)) => {
if let Some(handle) = extra.handle(i) {
self.roughness = handle;
}
}
(MaterialChannel::AmbientOcclusionTexture, MaterialPrimitive::Texture(i)) => {
if let Some(handle) = extra.handle(i) {
self.ambient_occlusion = handle;
}
}
(MaterialChannel::CaveatTexture, MaterialPrimitive::Texture(i)) => {
if let Some(handle) = extra.handle(i) {
self.caveat = handle;
}
}
(MaterialChannel::AlbedoOffset, MaterialPrimitive::Offset(u, v)) => {
self.albedo_offset = texture_offset(u, v)
}
(MaterialChannel::EmissionOffset, MaterialPrimitive::Offset(u, v)) => {
self.emission_offset = texture_offset(u, v)
}
(MaterialChannel::NormalOffset, MaterialPrimitive::Offset(u, v)) => {
self.normal_offset = texture_offset(u, v)
}
(MaterialChannel::MetallicOffset, MaterialPrimitive::Offset(u, v)) => {
self.metallic_offset = texture_offset(u, v)
}
(MaterialChannel::RoughnessOffset, MaterialPrimitive::Offset(u, v)) => {
self.roughness_offset = texture_offset(u, v)
}
(MaterialChannel::AmbientOcclusionOffset, MaterialPrimitive::Offset(u, v)) => {
self.ambient_occlusion_offset = texture_offset(u, v)
}
(MaterialChannel::CaveatOffset, MaterialPrimitive::Offset(u, v)) => {
self.caveat_offset = texture_offset(u, v)
}
_ => panic!("Bad combination of data in Material animation"),
}
}
fn current_sample(
&self,
channel: &Self::Channel,
extra: &Read<MaterialTextureSet>,
) -> Self::Primitive {
const ERR_MSG: &str = "Unable to get requested channel from MaterialTextureSet.";
match *channel {
MaterialChannel::AlbedoTexture => {
MaterialPrimitive::Texture(extra.id(&self.albedo).expect(ERR_MSG))
}
MaterialChannel::EmissionTexture => {
MaterialPrimitive::Texture(extra.id(&self.emission).expect(ERR_MSG))
}
MaterialChannel::NormalTexture => {
MaterialPrimitive::Texture(extra.id(&self.normal).expect(ERR_MSG))
}
MaterialChannel::MetallicTexture => {
MaterialPrimitive::Texture(extra.id(&self.metallic).expect(ERR_MSG))
}
MaterialChannel::RoughnessTexture => {
MaterialPrimitive::Texture(extra.id(&self.roughness).expect(ERR_MSG))
}
MaterialChannel::AmbientOcclusionTexture => {
MaterialPrimitive::Texture(extra.id(&self.ambient_occlusion).expect(ERR_MSG))
}
MaterialChannel::CaveatTexture => {
MaterialPrimitive::Texture(extra.id(&self.caveat).expect(ERR_MSG))
}
MaterialChannel::AlbedoOffset => offset(&self.albedo_offset),
MaterialChannel::EmissionOffset => offset(&self.emission_offset),
MaterialChannel::NormalOffset => offset(&self.normal_offset),
MaterialChannel::MetallicOffset => offset(&self.metallic_offset),
MaterialChannel::RoughnessOffset => offset(&self.roughness_offset),
MaterialChannel::AmbientOcclusionOffset => offset(&self.ambient_occlusion_offset),
MaterialChannel::CaveatOffset => offset(&self.caveat_offset),
}
}
fn default_primitive(_: &Self::Channel) -> Self::Primitive {
panic!("Blending is not applicable to Material animation")
}
fn blend_method(&self, _: &Self::Channel) -> Option<BlendMethod> {
None
}
}
| 36.978355 | 94 | 0.601264 |
fe1537fbd021dc15586046888448f01ac0ea898a
| 2,942 |
use crate::abi::FnAbi;
use crate::attributes;
use crate::base;
use crate::context::CodegenCx;
use crate::llvm;
use crate::type_of::LayoutLlvmExt;
use log::debug;
use rustc_codegen_ssa::traits::*;
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
use rustc_middle::mir::mono::{Linkage, Visibility};
use rustc_middle::ty::layout::{FnAbiExt, LayoutOf};
use rustc_middle::ty::{Instance, TypeFoldable};
pub use rustc_middle::mir::mono::MonoItem;
impl PreDefineMethods<'tcx> for CodegenCx<'ll, 'tcx> {
fn predefine_static(
&self,
def_id: DefId,
linkage: Linkage,
visibility: Visibility,
symbol_name: &str,
) {
let instance = Instance::mono(self.tcx, def_id);
let ty = instance.monomorphic_ty(self.tcx);
let llty = self.layout_of(ty).llvm_type(self);
let g = self.define_global(symbol_name, llty).unwrap_or_else(|| {
self.sess().span_fatal(
self.tcx.def_span(def_id),
&format!("symbol `{}` is already defined", symbol_name),
)
});
unsafe {
llvm::LLVMRustSetLinkage(g, base::linkage_to_llvm(linkage));
llvm::LLVMRustSetVisibility(g, base::visibility_to_llvm(visibility));
}
self.instances.borrow_mut().insert(instance, g);
}
fn predefine_fn(
&self,
instance: Instance<'tcx>,
linkage: Linkage,
visibility: Visibility,
symbol_name: &str,
) {
assert!(!instance.substs.needs_infer() && !instance.substs.has_param_types());
let fn_abi = FnAbi::of_instance(self, instance, &[]);
let lldecl = self.declare_fn(symbol_name, &fn_abi);
unsafe { llvm::LLVMRustSetLinkage(lldecl, base::linkage_to_llvm(linkage)) };
let attrs = self.tcx.codegen_fn_attrs(instance.def_id());
base::set_link_section(lldecl, &attrs);
if linkage == Linkage::LinkOnceODR || linkage == Linkage::WeakODR {
llvm::SetUniqueComdat(self.llmod, lldecl);
}
// If we're compiling the compiler-builtins crate, e.g., the equivalent of
// compiler-rt, then we want to implicitly compile everything with hidden
// visibility as we're going to link this object all over the place but
// don't want the symbols to get exported.
if linkage != Linkage::Internal
&& linkage != Linkage::Private
&& self.tcx.is_compiler_builtins(LOCAL_CRATE)
{
unsafe {
llvm::LLVMRustSetVisibility(lldecl, llvm::Visibility::Hidden);
}
} else {
unsafe {
llvm::LLVMRustSetVisibility(lldecl, base::visibility_to_llvm(visibility));
}
}
debug!("predefine_fn: instance = {:?}", instance);
attributes::from_fn_attrs(self, lldecl, instance, &fn_abi);
self.instances.borrow_mut().insert(instance, lldecl);
}
}
| 34.611765 | 90 | 0.615568 |
381599aa2b7a29da94cb745ddcac81002e7f51b9
| 1,213 |
use amfi::NavRecordIterator;
use std::env;
use std::io::Read;
use std::path::PathBuf;
fn parse<T: Read>(items: NavRecordIterator<T>) -> Result<(), Box<dyn std::error::Error>> {
let mut c = 0;
let mut e = 0;
for item in items {
match item {
Err(error) => {
e += 1;
eprintln!("{}", error)
}
Ok(ref record) => {
c += 1;
#[cfg(feature = "serde")]
println!("{}", serde_json::to_string(&record)?);
#[cfg(not(feature = "serde"))]
println!("{:>10.4} {} {}", record.nav, record.date, record.name);
}
}
}
println!("Total: {} Error: {}", c, e);
Ok(())
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let args: Vec<String> = env::args().collect();
if args.len() == 2 && args[1] == "--online" {
let navs = amfi::nav_from_url("http://localhost:8000/NAVAll.txt")?;
parse(navs)?;
} else {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("fixtures/NAVOpen.txt");
let navs = amfi::nav_from_file(path)?;
parse(navs)?;
};
Ok(())
}
| 28.209302 | 90 | 0.472383 |
1eff7b6d7a3980a3b495b89115b6fef14c3b74ff
| 3,617 |
use crate::traits::{DeviceTrait, HostTrait, StreamTrait};
use crate::{
BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError,
InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleFormat,
StreamConfig, StreamError, SupportedStreamConfig, SupportedStreamConfigRange,
SupportedStreamConfigsError,
};
#[derive(Default)]
pub struct Devices;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Device;
pub struct Host;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Stream;
pub struct SupportedInputConfigs;
pub struct SupportedOutputConfigs;
impl Host {
#[allow(dead_code)]
pub fn new() -> Result<Self, crate::HostUnavailable> {
Ok(Host)
}
}
impl Devices {
pub fn new() -> Result<Self, DevicesError> {
Ok(Devices)
}
}
impl DeviceTrait for Device {
type SupportedInputConfigs = SupportedInputConfigs;
type SupportedOutputConfigs = SupportedOutputConfigs;
type Stream = Stream;
#[inline]
fn name(&self) -> Result<String, DeviceNameError> {
Ok("null".to_owned())
}
#[inline]
fn supported_input_configs(
&self,
) -> Result<SupportedInputConfigs, SupportedStreamConfigsError> {
unimplemented!()
}
#[inline]
fn supported_output_configs(
&self,
) -> Result<SupportedOutputConfigs, SupportedStreamConfigsError> {
unimplemented!()
}
#[inline]
fn default_input_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
unimplemented!()
}
#[inline]
fn default_output_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
unimplemented!()
}
fn build_input_stream_raw<D, E>(
&self,
_config: &StreamConfig,
_sample_format: SampleFormat,
_data_callback: D,
_error_callback: E,
) -> Result<Self::Stream, BuildStreamError>
where
D: FnMut(&Data, &InputCallbackInfo) + Send + 'static,
E: FnMut(StreamError) + Send + 'static,
{
unimplemented!()
}
/// Create an output stream.
fn build_output_stream_raw<D, E>(
&self,
_config: &StreamConfig,
_sample_format: SampleFormat,
_data_callback: D,
_error_callback: E,
) -> Result<Self::Stream, BuildStreamError>
where
D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static,
E: FnMut(StreamError) + Send + 'static,
{
unimplemented!()
}
}
impl HostTrait for Host {
type Devices = Devices;
type Device = Device;
fn is_available() -> bool {
false
}
fn devices(&self) -> Result<Self::Devices, DevicesError> {
Devices::new()
}
fn default_input_device(&self) -> Option<Device> {
None
}
fn default_output_device(&self) -> Option<Device> {
None
}
}
impl StreamTrait for Stream {
fn play(&self) -> Result<(), PlayStreamError> {
unimplemented!()
}
fn pause(&self) -> Result<(), PauseStreamError> {
unimplemented!()
}
}
impl Iterator for Devices {
type Item = Device;
#[inline]
fn next(&mut self) -> Option<Device> {
None
}
}
impl Iterator for SupportedInputConfigs {
type Item = SupportedStreamConfigRange;
#[inline]
fn next(&mut self) -> Option<SupportedStreamConfigRange> {
None
}
}
impl Iterator for SupportedOutputConfigs {
type Item = SupportedStreamConfigRange;
#[inline]
fn next(&mut self) -> Option<SupportedStreamConfigRange> {
None
}
}
| 23.038217 | 96 | 0.638927 |
f8652a02933bf7497e0808b235d5505495192ee9
| 2,457 |
use warp::crypto::exchange::X25519PublicKey;
use warp::crypto::PublicKey;
use warp::multipass::identity::Identity;
use warp::multipass::MultiPass;
use warp::tesseract::Tesseract;
use warp_mp_solana::solana::anchor_client::anchor_lang::prelude::Pubkey;
use warp_mp_solana::SolanaAccount;
fn account() -> anyhow::Result<SolanaAccount> {
let mut tesseract = Tesseract::default();
tesseract
.unlock(b"this is my totally secured password that should nnever be embedded in code")?;
let mut account = SolanaAccount::with_devnet(&tesseract);
account.create_identity(None, None)?;
Ok(account)
}
fn username(ident: &Identity) -> String {
format!("{}#{}", &ident.username(), &ident.short_id())
}
fn main() -> anyhow::Result<()> {
let account_a = account()?;
let account_b = account()?;
let ident_a = account_a.get_own_identity()?;
println!(
"{} with {}",
username(&ident_a),
Pubkey::new(ident_a.public_key().as_ref())
);
let ident_b = account_b.get_own_identity()?;
println!(
"{} with {}",
username(&ident_b),
Pubkey::new(ident_b.public_key().as_ref())
);
let alice_pubkey = ecdh_public_key(&account_a)?;
let bob_pubkey = ecdh_public_key(&account_b)?;
let alice_key = ecdh_key_exchange(&account_a, bob_pubkey).map(hex::encode)?;
let bob_key = ecdh_key_exchange(&account_b, alice_pubkey).map(hex::encode)?;
assert_eq!(&alice_key, &bob_key);
println!("Account A Key: {}", alice_key);
println!("Account B Key: {}", bob_key);
//TODO: Encryption?
Ok(())
}
fn ecdh_public_key(account: &impl MultiPass) -> anyhow::Result<PublicKey> {
let privkey = account.decrypt_private_key(None)?;
let keypair = warp::crypto::signature::Ed25519Keypair::from_bytes(&privkey)?;
let secret = warp::crypto::exchange::X25519Secret::from_ed25519_keypair(&keypair)?;
let pubkey = PublicKey::from_bytes(secret.public_key().to_inner().as_bytes());
Ok(pubkey)
}
fn ecdh_key_exchange(account: &impl MultiPass, public_key: PublicKey) -> anyhow::Result<Vec<u8>> {
let private_key = account.decrypt_private_key(None)?;
let kp = warp::crypto::signature::Ed25519Keypair::from_bytes(&private_key)?;
let secret = warp::crypto::exchange::X25519Secret::from_ed25519_keypair(&kp)?;
let pubkey = X25519PublicKey::from_bytes(public_key.as_ref());
let ecdh_key = secret.key_exchange(pubkey);
Ok(ecdh_key)
}
| 33.657534 | 98 | 0.682133 |
d6641e5eae1c108620ccff26bc8f73c873455af9
| 317 |
fn main() {
// Define a default regressor from a dataset
let mut model = automl::SupervisedModel::new(
smartcore::dataset::breast_cancer::load_dataset(),
automl::Settings::default_classification(),
);
// Run a model comparison with all models at default settings
model.train();
}
| 28.818182 | 65 | 0.66877 |
8736b02cb5bee44d75085a311be178c3955ade70
| 1,008 |
use std::env;
use std::fs::File;
use std::io::Read;
use std::path::Path;
#[cfg(windows)]
extern crate winres;
#[cfg(windows)]
fn main() {
get_commit();
let mut res = winres::WindowsResource::new();
res.set_icon("assets\\icon.ico");
res.compile().unwrap();
}
#[cfg(not(windows))]
fn main() {
get_commit();
}
// Save commit to enviromnent variable
fn get_commit() {
// Github Actions commit
let mut commit = if let Ok(commit) = env::var("GITHUB_SHA") {
commit
} else {
// Local commit
if let Ok(mut f) = File::open(Path::new(".git").join("refs").join("heads").join("master")) {
let mut buf = String::new();
f.read_to_string(&mut buf).ok();
buf
} else {
String::new()
}
};
// Trim
if commit.len() > 8 {
commit = commit[..8].to_string()
}
if commit.is_empty() {
commit = "unknown".to_string();
}
println!("cargo:rustc-env=COMMIT={}", commit);
}
| 22.4 | 100 | 0.542659 |
ff6f271296d24dc6d8bbf7ea245ba14ef2242c0f
| 5,035 |
use crate::models::common::*;
use crate::models::problem::{FixedJobPermutation, Job, Multi, Place, Single};
use std::sync::Arc;
pub const DEFAULT_JOB_LOCATION: Location = 0;
pub const DEFAULT_JOB_DURATION: Duration = 0.0;
pub const DEFAULT_JOB_TIME_SPAN: TimeSpan = TimeSpan::Window(TimeWindow { start: 0., end: 1000. });
pub const DEFAULT_ACTIVITY_TIME_WINDOW: TimeWindow = TimeWindow { start: 0., end: 1000. };
pub fn test_place_with_location(location: Option<Location>) -> Place {
Place { location, duration: DEFAULT_JOB_DURATION, times: vec![DEFAULT_JOB_TIME_SPAN] }
}
pub fn test_single() -> Single {
let mut single =
Single { places: vec![test_place_with_location(Some(DEFAULT_JOB_LOCATION))], dimens: Default::default() };
single.dimens.set_id("single");
single
}
pub fn test_single_with_simple_demand(demand: Demand<SingleDimLoad>) -> Arc<Single> {
let mut single = test_single();
single.dimens.set_demand(demand);
Arc::new(single)
}
pub fn test_single_with_id(id: &str) -> Arc<Single> {
let mut single = test_single();
single.dimens.set_id(id);
Arc::new(single)
}
pub fn test_single_with_location(location: Option<Location>) -> Arc<Single> {
Arc::new(Single { places: vec![test_place_with_location(location)], dimens: Default::default() })
}
pub fn test_single_with_id_and_location(id: &str, location: Option<Location>) -> Arc<Single> {
let mut single = Single { places: vec![test_place_with_location(location)], dimens: Default::default() };
single.dimens.set_id(id);
Arc::new(single)
}
pub fn test_single_with_locations(locations: Vec<Option<Location>>) -> Arc<Single> {
Arc::new(Single {
places: locations.into_iter().map(test_place_with_location).collect(),
dimens: Default::default(),
})
}
pub fn test_multi_job_with_locations(locations: Vec<Vec<Option<Location>>>) -> Arc<Multi> {
Multi::bind(Multi::new(locations.into_iter().map(test_single_with_locations).collect(), Default::default()))
}
pub fn get_job_id(job: &Job) -> &String {
job.dimens().get_id().unwrap()
}
pub struct SingleBuilder {
single: Single,
}
impl Default for SingleBuilder {
fn default() -> Self {
Self { single: test_single() }
}
}
impl SingleBuilder {
pub fn id(&mut self, id: &str) -> &mut Self {
self.single.dimens.set_value("id", id.to_string());
self
}
pub fn location(&mut self, loc: Option<Location>) -> &mut Self {
self.single.places.first_mut().unwrap().location = loc;
self
}
pub fn duration(&mut self, dur: Duration) -> &mut Self {
self.single.places.first_mut().unwrap().duration = dur;
self
}
pub fn times(&mut self, times: Vec<TimeWindow>) -> &mut Self {
self.single.places.first_mut().unwrap().times = times.into_iter().map(TimeSpan::Window).collect();
self
}
pub fn demand(&mut self, demand: Demand<SingleDimLoad>) -> &mut Self {
self.single.dimens.set_demand(demand);
self
}
pub fn places(&mut self, places: Vec<(Option<Location>, Duration, Vec<(f64, f64)>)>) -> &mut Self {
self.single.places = places
.into_iter()
.map(|p| Place {
location: p.0,
duration: p.1,
times: p.2.into_iter().map(|(start, end)| TimeSpan::Window(TimeWindow::new(start, end))).collect(),
})
.collect();
self
}
pub fn build(&mut self) -> Single {
std::mem::replace(&mut self.single, test_single())
}
pub fn build_as_job_ref(&mut self) -> Job {
Job::Single(Arc::new(self.build()))
}
}
fn test_multi() -> Multi {
let mut multi =
Multi::new(vec![test_single_with_id("single1"), test_single_with_id("single2")], Default::default());
multi.dimens.set_id("multi");
multi
}
pub struct MultiBuilder {
multi: Multi,
custom_permutator: bool,
}
impl Default for MultiBuilder {
fn default() -> Self {
let mut multi = Multi::new(vec![], Default::default());
multi.dimens.set_id("multi");
Self { multi, custom_permutator: false }
}
}
impl MultiBuilder {
pub fn new_with_permutations(permutations: Vec<Vec<usize>>) -> Self {
Self {
multi: Multi::new_with_permutator(
vec![],
Default::default(),
Box::new(FixedJobPermutation::new(permutations)),
),
custom_permutator: true,
}
}
pub fn id(&mut self, id: &str) -> &mut Self {
self.multi.dimens.set_id(id);
self
}
pub fn job(&mut self, job: Single) -> &mut Self {
self.multi.jobs.push(Arc::new(job));
self
}
pub fn build(&mut self) -> Job {
let multi = std::mem::replace(&mut self.multi, test_multi());
let multi = if !self.custom_permutator { Multi::new(multi.jobs, multi.dimens) } else { multi };
let multi = Multi::bind(multi);
Job::Multi(multi)
}
}
| 30.149701 | 115 | 0.623237 |
26a3855bf4a522474600f475a7a10e72a5737259
| 15,081 |
#![allow(clippy::useless_vec)]
use std::collections::HashMap;
use std::ffi::OsStr;
use std::fs;
use std::ops::{Add, AddAssign};
use std::path::{Path, PathBuf};
use std::sync::mpsc::{sync_channel, Receiver, SyncSender};
use lazy_static::lazy_static;
use crate::executor::ThreadPoolExecutor;
use crate::Result;
pub struct Calculator {
filename_receiver: Receiver<PathBuf>,
detail_sender: SyncSender<Detail>,
executor: ThreadPoolExecutor,
}
impl Calculator {
pub fn new(filename_receiver: Receiver<PathBuf>) -> (Self, Receiver<Detail>) {
let (detail_sender, detail_receiver) = sync_channel::<Detail>(32);
let calculator = Self {
filename_receiver,
detail_sender,
executor: ThreadPoolExecutor::new(),
};
(calculator, detail_receiver)
}
pub fn calculate(self) {
let Calculator {
filename_receiver,
detail_sender,
executor,
} = self;
for filename in filename_receiver {
let sender = SyncSender::clone(&detail_sender);
executor.submit(move || {
filename
.extension()
.and_then(|ext| MANAGER.get_by_extension(ext))
.and_then(|info| Self::statistical_detail(filename, info).ok())
.and_then(|detail| sender.send(detail).ok());
});
}
}
#[inline]
fn statistical_detail<P: AsRef<Path> + Sync + Send>(filename: P, info: &Info) -> Result<Detail> {
Self::statistical_detail_impl(filename.as_ref(), info)
}
fn statistical_detail_impl(filename: &Path, info: &Info) -> Result<Detail> {
#[rustfmt::skip]
let Info { language, single, multi, .. } = info;
let content = fs::read_to_string(&filename)?;
let metadata = filename.metadata()?;
let bytes = metadata.len();
let mut blank = 0;
let mut comment = 0;
let mut code = 0;
let mut in_comment: Option<(&str, &str)> = None;
'here: for line in content.lines() {
let line = line.trim();
// empty line
if line.is_empty() {
blank += 1;
continue;
}
// match single line comments
for single in single {
if line.starts_with(single) {
comment += 1;
continue 'here;
}
}
// match multi line comments
for (start, end) in multi {
if let Some(d) = in_comment {
if d != (*start, *end) {
continue;
}
}
// multi line comments maybe in one line
let mut same_line = false;
if line.starts_with(start) {
in_comment = match in_comment {
Some(_) => {
comment += 1;
in_comment = None;
continue 'here;
}
None => {
same_line = true;
Some((start, end))
}
}
}
// This line is in comments
if in_comment.is_some() {
comment += 1;
if line.ends_with(end) {
if same_line {
if line.len() >= (start.len() + end.len()) {
in_comment = None;
}
} else {
in_comment = None;
}
}
continue 'here;
}
}
code += 1;
}
Ok(Detail::new(language, 1, bytes, blank, comment, code))
}
}
#[derive(Debug)]
struct Info {
language: &'static str,
file_ext: Vec<&'static str>,
single: Vec<&'static str>,
multi: Vec<(&'static str, &'static str)>,
}
impl Info {
#[rustfmt::skip]
#[inline]
fn new(language: &'static str, file_ext: Vec<&'static str>, single: Vec<&'static str>, multi: Vec<(&'static str, &'static str)>) -> Self {
Self { language, file_ext, single, multi }
}
}
struct Manager {
languages: HashMap<&'static str, Info>,
ext_to_language: HashMap<&'static str, &'static str>,
}
impl Manager {
#[inline]
fn get_by_extension(&self, ext: &OsStr) -> Option<&Info> {
ext.to_str()
.and_then(|ext| self.ext_to_language.get(ext))
.and_then(|language| self.languages.get(language))
}
}
#[rustfmt::skip]
lazy_static! {
static ref MANAGER: Manager = {
let mut languages = HashMap::<&'static str, Info>::new();
let mut ext_to_language = HashMap::new();
macro_rules! language {
($language: expr, $ext: expr, $single: expr, $multi: expr) => {{
languages.insert($language, Info::new($language, $ext, $single, $multi));
for e in $ext {
ext_to_language.insert(e, $language);
}
}};
($language: expr, $ext: expr, $single: expr) => {
language!($language, $ext, $single, vec![])
};
($language: expr, $ext: expr) => {
language!($language, $ext, vec![], vec![])
};
}
language!("ABAP", vec!["abap"], vec!["*", "\\\""]);
language!("ABNF", vec!["abnf"], vec![";"]);
language!("ActionScript", vec!["as"], vec!["//"], vec![("/*", "*/")]);
language!("Ada", vec!["ada", "adb", "ads", "pad"], vec!["--"]);
language!("Agda", vec!["agda"], vec!["--"], vec![("{-", "-}")]);
language!("Alloy", vec!["als"], vec!["--", "//"], vec![("/*", "*/")]);
language!("Arduino C++", vec!["ino"], vec!["//"], vec![("/*", "*/")]);
language!("Assembly", vec!["asm"], vec![";"]);
language!("GNU Style Assembly", vec!["s"], vec!["//"], vec![("/*", "*/")]);
language!("ASP", vec!["asa", "asp"], vec!["'", "REM"]);
language!("ASP.NET", vec!["asax", "ascx", "asmx", "aspx", "master", "sitemap", "webinfo"], vec![], vec![("<!--", "-->"), ("<%--", "-->")]);
language!("Autoconf", vec!["in"], vec!["#", "dnl"]);
language!("Automake", vec!["am"], vec!["#"]);
language!("Bash", vec!["bash"], vec!["#"]);
language!("Batch", vec!["bat", "btm", "cmd"], vec!["REM", "::"]);
language!("Cabal", vec!["cabal"], vec!["--"], vec![("{-", "-}")]);
language!("C", vec!["c"], vec!["//"], vec![("/*", "*/")]);
language!("Ceylon", vec!["ceylon"], vec!["//"], vec![("/*", "*/")]);
language!("C Header", vec!["h"], vec!["//"], vec![("/*", "*/")]);
language!("Clojure", vec!["clj"], vec![";"]);
language!("ClojureScript", vec!["cljs"], vec![";"]);
language!("ClojureC", vec!["cljc"], vec![";"]);
language!("CMake", vec!["cmake"], vec!["#"]);
language!("Cobol", vec!["cob", "cbl", "ccp", "cobol", "cpy"], vec!["*"]);
language!("CoffeeScript", vec!["coffee", "cjsx"], vec!["#"], vec![("###", "###")]);
language!("Coq", vec!["v"], vec![], vec![("(*", "*)")]);
language!("C++", vec!["cc", "cpp", "cxx", "c++", "pcc", "tpp"], vec!["//"], vec![("/*", "*/")]);
language!("C++ Header", vec!["hh", "hpp", "hxx", "inl", "ipp"], vec!["//"], vec![("/*", "*/")]);
language!("Crystal", vec!["crystal"], vec!["#"]);
language!("C#", vec!["cs", "csx"], vec!["//"], vec![("/*", "*/")]);
language!("CSS", vec!["css"], vec!["//"], vec![("/*", "*/")]);
language!("D", vec!["d"], vec!["//"], vec![("/*", "*/")]);
language!("DAML", vec!["daml"], vec!["--"], vec![("{-", "-}")]);
language!("dart", vec!["dart"], vec!["//"], vec![("/*", "*/")]);
language!("Emacs Lisp", vec!["el"], vec![";"]);
language!("Elixir", vec!["ex", "exs"], vec!["#"]);
language!("Elm", vec!["elm"], vec!["--"], vec![("{-", "-}")]);
language!("Erlang", vec!["erl", "hrl"], vec!["%"]);
language!("FreeMarker", vec!["ftl", "ftlh", "ftlx"], vec![], vec![("<#--", "-->")]);
language!("F#", vec!["fs", "fsi", "fsx", "fsscript"], vec!["//"], vec![("(*", "*)")]);
language!("Go", vec!["go"], vec!["//"], vec![("/*", "*/"), ("/**", "*/")]);
language!("Go HTML", vec!["gohtml"], vec![], vec![("<!--", "-->"), ("{{/*", "*/}}")]);
language!("GraphQL", vec!["gql", "graphql"], vec!["#"]);
language!("Groovy", vec!["groovy", "grt", "gtpl", "gvy"], vec!["//"], vec![("/*", "*/")]);
language!("Gradle", vec!["gradle"], vec!["//"], vec![("/*", "*/"), ("/**", "*/")]);
language!("Haskell", vec!["hs"], vec!["--"], vec![("{-", "-}")]);
language!("Haxe", vec!["hx"], vec!["//"], vec![("/*", "*/")]);
language!("Html", vec!["html", "xhtml", "hml"], vec![], vec![("<!--", "-->")]);
language!("Idris", vec!["idr", "lidr"], vec!["--"], vec![("{-", "-}")]);
language!("Ini", vec!["ini"], vec![";", "#"]);
language!("Java", vec!["java"], vec!["//"], vec![("/*", "*/")]);
language!("JavaScript", vec!["js", "mjs"], vec!["//"], vec![("/*", "*/")]);
language!("JSON", vec!["json"]);
language!("JSX", vec!["jsx"], vec!["//"], vec![("/*", "*/")]);
language!("Julia", vec!["jl"], vec!["#"], vec![("#=", "=#")]);
language!("Jupyter Notebooks", vec!["ipynb"]);
language!("Kotlin", vec!["kt", "kts"], vec!["//"], vec![("/*", "*/")]);
language!("Less", vec!["less"], vec!["//"], vec![("/*", "*/")]);
language!("LLVM", vec!["ll"], vec![";"]);
language!("Lua", vec!["lua"], vec!["--"], vec![("--[[", "]]")]);
language!("Lucius", vec!["lucius"], vec!["//"], vec![("/*", "*/")]);
language!("Markdown", vec!["md", "markdown"]);
language!("Mint", vec!["mint"]);
language!("Nim", vec!["nim"], vec!["#"]);
language!("Nix", vec!["nix"], vec![], vec![("/*", "*/")]);
language!("Objective-C", vec!["m"], vec!["//"], vec![("/*", "*/")]);
language!("Objective-C++", vec!["mm"], vec!["//"], vec![("/*", "*/")]);
language!("OCaml", vec!["ml", "mli", "re", "rei"], vec![], vec![("/*", "*/")]);
language!("Org", vec!["org"], vec!["#"]);
language!("Pascal", vec!["pas", "pp"], vec!["//"], vec![("{", "}"), ("(*", "*)")]);
language!("Perl", vec!["pl", "pm"], vec!["#"], vec![("=pod", "=cut")]);
language!("Pest", vec!["pest"], vec!["//"]);
language!("Plain Text", vec!["text", "txt"]);
language!("Php", vec!["php4", "php5", "php", "phtml"], vec!["#", "//"], vec![("/*", "*/"), ("/**", "*/")]);
language!("PostCSS", vec!["pcss", "sss"], vec!["//"], vec![("/*", "*/")]);
language!("Prolog", vec!["p", "pro"], vec!["%"]);
language!("Protocol Buffer", vec!["proto"], vec!["//"]);
language!("PowerShell", vec!["ps1", "psm1", "psd1", "ps1xml", "cdxml", "pssc", "psc1"], vec!["#"], vec![("<#", "#>")]);
language!("PureScript", vec!["purs"], vec!["--"], vec![("{-", "-}")]);
language!("Python", vec!["py"], vec!["#"], vec![("'''", "'''"), (r#"""#, r#"""#)]);
language!("QCL", vec!["qcl"], vec!["//"], vec![("/*", "*/")]);
language!("R", vec!["r"], vec!["#"]);
language!("Racket", vec!["rkt"], vec![";"], vec![("#|", "|#")]);
language!("Rakefile", vec!["rake"], vec!["#"], vec![("=begin", "=end")]);
language!("Rakudo", vec!["pl6", "pm6"], vec!["#"], vec![("=begin", "=end")]);
language!("Rust", vec!["rs"], vec!["//", "///", "///!"], vec![("/*", "*/")]);
language!("Ruby", vec!["rb"], vec!["#"], vec![("=begin", "=end")]);
language!("Ruby HTML", vec!["erb", "rhtml"], vec![], vec![("<!--", "-->")]);
language!("ReStructuredText", vec!["rst"]);
language!("Sass", vec!["sass", "scss"], vec!["//"], vec![("/*", "*/")]);
language!("Scala", vec!["scala", "sc"], vec!["//"], vec![("/*", "*/")]);
language!("Scheme", vec!["scm", "ss"], vec![";"], vec![("#|", "|#")]);
language!("Shell", vec!["sh"], vec!["#"]);
language!("Solidity", vec!["sol"], vec!["//"], vec![("/*", "*/")]);
language!("SQL", vec!["sql"], vec!["#", "--"], vec![("/*", "*/")]);
language!("Stylus", vec!["styl"], vec!["//"], vec![("/*", "*/")]);
language!("SVG", vec!["svg"], vec![], vec![("<!--", "-->")]);
language!("Swift", vec!["swift"], vec!["//"], vec![("/*", "*/")]);
language!("TCL", vec!["tcl"], vec!["#"]);
language!("TeX", vec!["tex", "sty"], vec!["%"]);
language!("Thrift", vec!["thrift"], vec!["#", "//"], vec![("/*", "*/")]);
language!("Toml", vec!["toml"], vec!["#"]);
language!("TSX", vec!["tsx"], vec!["//"], vec![("/*", "*/")]);
language!("TypeScript", vec!["ts"], vec!["//"], vec![("/*", "*/")]);
language!("VBScript", vec!["vbs"], vec!["'", "REM"]);
language!("Visual Basic", vec!["vb"], vec!["'"]);
language!("Visual Studio Solution", vec!["sln"]);
language!("Visual Studio Project", vec!["vcproj", "vcxproj"], vec![], vec![("<!--", "-->")]);
language!("Vim script", vec!["vim"], vec!["\\\""], vec![("\\\"", "\\\""), ("'", "'")]);
language!("Vue", vec!["vue"], vec!["//"], vec![("<!--", "-->"), ("/*", "*/")]);
language!("WebAssembly", vec!["wat", "wast"], vec![";;"]);
language!("XML", vec!["xml"], vec![], vec![("<!--", "-->"), ("<![CDATA[", "]]>")]);
language!("Yaml", vec!["yml", "yaml"], vec!["#"]);
language!("Zig", vec!["zig"], vec!["//"]);
language!("Zsh", vec!["zsh"], vec!["#"]);
Manager { languages, ext_to_language }
};
}
#[derive(Debug, Copy, Clone)]
pub struct Detail {
pub language: &'static str,
pub files: usize,
pub bytes: u64,
pub blank: usize,
pub comment: usize,
pub code: usize,
}
impl Detail {
pub fn new(language: &'static str, files: usize, bytes: u64, blank: usize, comment: usize, code: usize) -> Self {
Self {
language,
files,
bytes,
blank,
comment,
code,
}
}
}
impl Add for Detail {
type Output = Detail;
fn add(self, rhs: Self) -> Self::Output {
Self {
language: self.language,
files: self.files + rhs.files,
bytes: self.bytes + rhs.bytes,
blank: self.blank + rhs.blank,
comment: self.comment + rhs.comment,
code: self.code + rhs.comment,
}
}
}
impl AddAssign for Detail {
fn add_assign(&mut self, rhs: Self) {
self.files += rhs.files;
self.bytes += rhs.bytes;
self.blank += rhs.blank;
self.comment += rhs.comment;
self.code += rhs.code;
}
}
| 42.84375 | 147 | 0.426165 |
381ff9eb23a131d36e9ee9ea457997a2bad544eb
| 3,822 |
use crate::engine::Command;
use crate::engine::Engine;
use crate::engine::LogIdList;
use crate::LeaderId;
use crate::LogId;
fn log_id(term: u64, index: u64) -> LogId<u64> {
LogId::<u64> {
leader_id: LeaderId { term, node_id: 1 },
index,
}
}
fn eng() -> Engine<u64> {
let mut eng = Engine::<u64>::default();
eng.state.log_ids = LogIdList::new(vec![log_id(2, 2), log_id(4, 4), log_id(4, 6)]);
eng.state.last_purged_log_id = Some(log_id(2, 2));
eng.state.last_log_id = Some(log_id(4, 6));
eng
}
#[test]
fn test_purge_log_already_purged() -> anyhow::Result<()> {
let mut eng = eng();
eng.purge_log(log_id(1, 1));
assert_eq!(Some(log_id(2, 2)), eng.state.last_purged_log_id,);
assert_eq!(log_id(2, 2), eng.state.log_ids.key_log_ids()[0],);
assert_eq!(Some(log_id(4, 6)), eng.state.last_log_id,);
assert_eq!(0, eng.commands.len());
Ok(())
}
#[test]
fn test_purge_log_equal_prev_last_purged() -> anyhow::Result<()> {
let mut eng = eng();
eng.purge_log(log_id(2, 2));
assert_eq!(Some(log_id(2, 2)), eng.state.last_purged_log_id,);
assert_eq!(log_id(2, 2), eng.state.log_ids.key_log_ids()[0],);
assert_eq!(Some(log_id(4, 6)), eng.state.last_log_id,);
assert_eq!(0, eng.commands.len());
Ok(())
}
#[test]
fn test_purge_log_same_leader_as_prev_last_purged() -> anyhow::Result<()> {
let mut eng = eng();
eng.purge_log(log_id(2, 3));
assert_eq!(Some(log_id(2, 3)), eng.state.last_purged_log_id,);
assert_eq!(log_id(2, 3), eng.state.log_ids.key_log_ids()[0],);
assert_eq!(Some(log_id(4, 6)), eng.state.last_log_id,);
assert_eq!(vec![Command::PurgeLog { upto: log_id(2, 3) }], eng.commands);
Ok(())
}
#[test]
fn test_purge_log_to_last_key_log() -> anyhow::Result<()> {
let mut eng = eng();
eng.purge_log(log_id(4, 4));
assert_eq!(Some(log_id(4, 4)), eng.state.last_purged_log_id,);
assert_eq!(log_id(4, 4), eng.state.log_ids.key_log_ids()[0],);
assert_eq!(Some(log_id(4, 6)), eng.state.last_log_id,);
assert_eq!(vec![Command::PurgeLog { upto: log_id(4, 4) }], eng.commands);
Ok(())
}
#[test]
fn test_purge_log_go_pass_last_key_log() -> anyhow::Result<()> {
let mut eng = eng();
eng.purge_log(log_id(4, 5));
assert_eq!(Some(log_id(4, 5)), eng.state.last_purged_log_id,);
assert_eq!(log_id(4, 5), eng.state.log_ids.key_log_ids()[0],);
assert_eq!(Some(log_id(4, 6)), eng.state.last_log_id,);
assert_eq!(vec![Command::PurgeLog { upto: log_id(4, 5) }], eng.commands);
Ok(())
}
#[test]
fn test_purge_log_to_last_log_id() -> anyhow::Result<()> {
let mut eng = eng();
eng.purge_log(log_id(4, 6));
assert_eq!(Some(log_id(4, 6)), eng.state.last_purged_log_id,);
assert_eq!(log_id(4, 6), eng.state.log_ids.key_log_ids()[0],);
assert_eq!(Some(log_id(4, 6)), eng.state.last_log_id,);
assert_eq!(vec![Command::PurgeLog { upto: log_id(4, 6) }], eng.commands);
Ok(())
}
#[test]
fn test_purge_log_go_pass_last_log_id() -> anyhow::Result<()> {
let mut eng = eng();
eng.purge_log(log_id(4, 7));
assert_eq!(Some(log_id(4, 7)), eng.state.last_purged_log_id,);
assert_eq!(log_id(4, 7), eng.state.log_ids.key_log_ids()[0],);
assert_eq!(Some(log_id(4, 7)), eng.state.last_log_id,);
assert_eq!(vec![Command::PurgeLog { upto: log_id(4, 7) }], eng.commands);
Ok(())
}
#[test]
fn test_purge_log_to_higher_leader_lgo() -> anyhow::Result<()> {
let mut eng = eng();
eng.purge_log(log_id(5, 7));
assert_eq!(Some(log_id(5, 7)), eng.state.last_purged_log_id,);
assert_eq!(log_id(5, 7), eng.state.log_ids.key_log_ids()[0],);
assert_eq!(Some(log_id(5, 7)), eng.state.last_log_id,);
assert_eq!(vec![Command::PurgeLog { upto: log_id(5, 7) }], eng.commands);
Ok(())
}
| 27.3 | 87 | 0.632653 |
d62a2c3b38f8f0afc7bf7e6e1430b0797093620b
| 114 |
pub mod db;
pub mod insert;
pub mod inserter;
pub mod postgresql_generator;
pub mod table;
pub mod table_builder;
| 16.285714 | 29 | 0.789474 |
4b7cc5a3003b4cb9735dcce27a9afa47c7600b6e
| 196 |
// variables4.rs
// 让我通过编译,如要查看提示请运行命令`rustlings hint variables4`
fn main() {
/**
* 如果指明了变量类型,除了必须初始化以外
* 初始化赋值的内容要和变量类型保持一致
*/
let x: i32 = 2;
println!("数字 {}", x);
}
| 16.333333 | 48 | 0.571429 |
ed04bcdf5b5cdfb3a73bc121ccb4adec025d7522
| 534 |
//reexport Timestamp, so other modules don't need to use stderrlog
pub use stderrlog::Timestamp;
#[derive(Debug)]
pub struct Settings {
pub verbosity: usize,
pub quiet: bool,
pub timestamp: Timestamp,
pub module_path: Option<String>,
pub rom_path: Option<String>,
}
impl Default for Settings {
fn default() -> Settings {
Settings {
verbosity: 0,
quiet: false,
timestamp: Timestamp::Off,
module_path: None,
rom_path: None,
}
}
}
| 22.25 | 66 | 0.597378 |
de28268ea368212ee8593e01a7f087dc20b103c4
| 93,372 |
#![unstable(
feature = "ip",
reason = "extra functionality has not been \
scrutinized to the level that it should \
be to be stable",
issue = "27709"
)]
use crate::cmp::Ordering;
use crate::fmt::{self, Write as FmtWrite};
use crate::hash;
use crate::io::Write as IoWrite;
use crate::mem::transmute;
use crate::sys::net::netc as c;
use crate::sys_common::{AsInner, FromInner};
/// An IP address, either IPv4 or IPv6.
///
/// This enum can contain either an [`Ipv4Addr`] or an [`Ipv6Addr`], see their
/// respective documentation for more details.
///
/// The size of an `IpAddr` instance may vary depending on the target operating
/// system.
///
/// # Examples
///
/// ```
/// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
///
/// let localhost_v4 = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1));
/// let localhost_v6 = IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1));
///
/// assert_eq!("127.0.0.1".parse(), Ok(localhost_v4));
/// assert_eq!("::1".parse(), Ok(localhost_v6));
///
/// assert_eq!(localhost_v4.is_ipv6(), false);
/// assert_eq!(localhost_v4.is_ipv4(), true);
/// ```
#[stable(feature = "ip_addr", since = "1.7.0")]
#[derive(Copy, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
pub enum IpAddr {
/// An IPv4 address.
#[stable(feature = "ip_addr", since = "1.7.0")]
V4(#[stable(feature = "ip_addr", since = "1.7.0")] Ipv4Addr),
/// An IPv6 address.
#[stable(feature = "ip_addr", since = "1.7.0")]
V6(#[stable(feature = "ip_addr", since = "1.7.0")] Ipv6Addr),
}
/// An IPv4 address.
///
/// IPv4 addresses are defined as 32-bit integers in [IETF RFC 791].
/// They are usually represented as four octets.
///
/// See [`IpAddr`] for a type encompassing both IPv4 and IPv6 addresses.
///
/// The size of an `Ipv4Addr` struct may vary depending on the target operating
/// system.
///
/// [IETF RFC 791]: https://tools.ietf.org/html/rfc791
///
/// # Textual representation
///
/// `Ipv4Addr` provides a [`FromStr`] implementation. The four octets are in decimal
/// notation, divided by `.` (this is called "dot-decimal notation").
///
/// [`FromStr`]: crate::str::FromStr
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// let localhost = Ipv4Addr::new(127, 0, 0, 1);
/// assert_eq!("127.0.0.1".parse(), Ok(localhost));
/// assert_eq!(localhost.is_loopback(), true);
/// ```
#[derive(Copy)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Ipv4Addr {
inner: c::in_addr,
}
/// An IPv6 address.
///
/// IPv6 addresses are defined as 128-bit integers in [IETF RFC 4291].
/// They are usually represented as eight 16-bit segments.
///
/// See [`IpAddr`] for a type encompassing both IPv4 and IPv6 addresses.
///
/// The size of an `Ipv6Addr` struct may vary depending on the target operating
/// system.
///
/// [IETF RFC 4291]: https://tools.ietf.org/html/rfc4291
///
/// # Textual representation
///
/// `Ipv6Addr` provides a [`FromStr`] implementation. There are many ways to represent
/// an IPv6 address in text, but in general, each segments is written in hexadecimal
/// notation, and segments are separated by `:`. For more information, see
/// [IETF RFC 5952].
///
/// [`FromStr`]: crate::str::FromStr
/// [IETF RFC 5952]: https://tools.ietf.org/html/rfc5952
///
/// # Examples
///
/// ```
/// use std::net::Ipv6Addr;
///
/// let localhost = Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1);
/// assert_eq!("::1".parse(), Ok(localhost));
/// assert_eq!(localhost.is_loopback(), true);
/// ```
#[derive(Copy)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Ipv6Addr {
inner: c::in6_addr,
}
#[allow(missing_docs)]
#[derive(Copy, PartialEq, Eq, Clone, Hash, Debug)]
pub enum Ipv6MulticastScope {
InterfaceLocal,
LinkLocal,
RealmLocal,
AdminLocal,
SiteLocal,
OrganizationLocal,
Global,
}
impl IpAddr {
/// Returns [`true`] for the special 'unspecified' address.
///
/// See the documentation for [`Ipv4Addr::is_unspecified()`] and
/// [`Ipv6Addr::is_unspecified()`] for more details.
///
/// [`true`]: ../../std/primitive.bool.html
///
/// # Examples
///
/// ```
/// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
///
/// assert_eq!(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)).is_unspecified(), true);
/// assert_eq!(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0)).is_unspecified(), true);
/// ```
#[stable(feature = "ip_shared", since = "1.12.0")]
pub fn is_unspecified(&self) -> bool {
match self {
IpAddr::V4(ip) => ip.is_unspecified(),
IpAddr::V6(ip) => ip.is_unspecified(),
}
}
/// Returns [`true`] if this is a loopback address.
///
/// See the documentation for [`Ipv4Addr::is_loopback()`] and
/// [`Ipv6Addr::is_loopback()`] for more details.
///
/// [`true`]: ../../std/primitive.bool.html
///
/// # Examples
///
/// ```
/// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
///
/// assert_eq!(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)).is_loopback(), true);
/// assert_eq!(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0x1)).is_loopback(), true);
/// ```
#[stable(feature = "ip_shared", since = "1.12.0")]
pub fn is_loopback(&self) -> bool {
match self {
IpAddr::V4(ip) => ip.is_loopback(),
IpAddr::V6(ip) => ip.is_loopback(),
}
}
/// Returns [`true`] if the address appears to be globally routable.
///
/// See the documentation for [`Ipv4Addr::is_global()`] and
/// [`Ipv6Addr::is_global()`] for more details.
///
/// [`true`]: ../../std/primitive.bool.html
///
/// # Examples
///
/// ```
/// #![feature(ip)]
///
/// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
///
/// assert_eq!(IpAddr::V4(Ipv4Addr::new(80, 9, 12, 3)).is_global(), true);
/// assert_eq!(IpAddr::V6(Ipv6Addr::new(0, 0, 0x1c9, 0, 0, 0xafc8, 0, 0x1)).is_global(), true);
/// ```
pub fn is_global(&self) -> bool {
match self {
IpAddr::V4(ip) => ip.is_global(),
IpAddr::V6(ip) => ip.is_global(),
}
}
/// Returns [`true`] if this is a multicast address.
///
/// See the documentation for [`Ipv4Addr::is_multicast()`] and
/// [`Ipv6Addr::is_multicast()`] for more details.
///
/// [`true`]: ../../std/primitive.bool.html
///
/// # Examples
///
/// ```
/// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
///
/// assert_eq!(IpAddr::V4(Ipv4Addr::new(224, 254, 0, 0)).is_multicast(), true);
/// assert_eq!(IpAddr::V6(Ipv6Addr::new(0xff00, 0, 0, 0, 0, 0, 0, 0)).is_multicast(), true);
/// ```
#[stable(feature = "ip_shared", since = "1.12.0")]
pub fn is_multicast(&self) -> bool {
match self {
IpAddr::V4(ip) => ip.is_multicast(),
IpAddr::V6(ip) => ip.is_multicast(),
}
}
/// Returns [`true`] if this address is in a range designated for documentation.
///
/// See the documentation for [`Ipv4Addr::is_documentation()`] and
/// [`Ipv6Addr::is_documentation()`] for more details.
///
/// [`true`]: ../../std/primitive.bool.html
///
/// # Examples
///
/// ```
/// #![feature(ip)]
///
/// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
///
/// assert_eq!(IpAddr::V4(Ipv4Addr::new(203, 0, 113, 6)).is_documentation(), true);
/// assert_eq!(
/// IpAddr::V6(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0)).is_documentation(),
/// true
/// );
/// ```
pub fn is_documentation(&self) -> bool {
match self {
IpAddr::V4(ip) => ip.is_documentation(),
IpAddr::V6(ip) => ip.is_documentation(),
}
}
/// Returns [`true`] if this address is an [`IPv4` address], and [`false`]
/// otherwise.
///
/// [`true`]: ../../std/primitive.bool.html
/// [`false`]: ../../std/primitive.bool.html
/// [`IPv4` address]: IpAddr::V4
///
/// # Examples
///
/// ```
/// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
///
/// assert_eq!(IpAddr::V4(Ipv4Addr::new(203, 0, 113, 6)).is_ipv4(), true);
/// assert_eq!(IpAddr::V6(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0)).is_ipv4(), false);
/// ```
#[stable(feature = "ipaddr_checker", since = "1.16.0")]
pub fn is_ipv4(&self) -> bool {
matches!(self, IpAddr::V4(_))
}
/// Returns [`true`] if this address is an [`IPv6` address], and [`false`]
/// otherwise.
///
/// [`true`]: ../../std/primitive.bool.html
/// [`false`]: ../../std/primitive.bool.html
/// [`IPv6` address]: IpAddr::V6
///
/// # Examples
///
/// ```
/// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
///
/// assert_eq!(IpAddr::V4(Ipv4Addr::new(203, 0, 113, 6)).is_ipv6(), false);
/// assert_eq!(IpAddr::V6(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0)).is_ipv6(), true);
/// ```
#[stable(feature = "ipaddr_checker", since = "1.16.0")]
pub fn is_ipv6(&self) -> bool {
matches!(self, IpAddr::V6(_))
}
}
impl Ipv4Addr {
/// Creates a new IPv4 address from four eight-bit octets.
///
/// The result will represent the IP address `a`.`b`.`c`.`d`.
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// let addr = Ipv4Addr::new(127, 0, 0, 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_stable(feature = "const_ipv4", since = "1.32.0")]
pub const fn new(a: u8, b: u8, c: u8, d: u8) -> Ipv4Addr {
// `s_addr` is stored as BE on all machine and the array is in BE order.
// So the native endian conversion method is used so that it's never swapped.
Ipv4Addr { inner: c::in_addr { s_addr: u32::from_ne_bytes([a, b, c, d]) } }
}
/// An IPv4 address with the address pointing to localhost: 127.0.0.1.
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// let addr = Ipv4Addr::LOCALHOST;
/// assert_eq!(addr, Ipv4Addr::new(127, 0, 0, 1));
/// ```
#[stable(feature = "ip_constructors", since = "1.30.0")]
pub const LOCALHOST: Self = Ipv4Addr::new(127, 0, 0, 1);
/// An IPv4 address representing an unspecified address: 0.0.0.0
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// let addr = Ipv4Addr::UNSPECIFIED;
/// assert_eq!(addr, Ipv4Addr::new(0, 0, 0, 0));
/// ```
#[stable(feature = "ip_constructors", since = "1.30.0")]
pub const UNSPECIFIED: Self = Ipv4Addr::new(0, 0, 0, 0);
/// An IPv4 address representing the broadcast address: 255.255.255.255
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// let addr = Ipv4Addr::BROADCAST;
/// assert_eq!(addr, Ipv4Addr::new(255, 255, 255, 255));
/// ```
#[stable(feature = "ip_constructors", since = "1.30.0")]
pub const BROADCAST: Self = Ipv4Addr::new(255, 255, 255, 255);
/// Returns the four eight-bit integers that make up this address.
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// let addr = Ipv4Addr::new(127, 0, 0, 1);
/// assert_eq!(addr.octets(), [127, 0, 0, 1]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn octets(&self) -> [u8; 4] {
// This returns the order we want because s_addr is stored in big-endian.
self.inner.s_addr.to_ne_bytes()
}
/// Returns [`true`] for the special 'unspecified' address (0.0.0.0).
///
/// This property is defined in _UNIX Network Programming, Second Edition_,
/// W. Richard Stevens, p. 891; see also [ip7].
///
/// [`true`]: ../../std/primitive.bool.html
/// [ip7]: http://man7.org/linux/man-pages/man7/ip.7.html
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// assert_eq!(Ipv4Addr::new(0, 0, 0, 0).is_unspecified(), true);
/// assert_eq!(Ipv4Addr::new(45, 22, 13, 197).is_unspecified(), false);
/// ```
#[stable(feature = "ip_shared", since = "1.12.0")]
#[rustc_const_stable(feature = "const_ipv4", since = "1.32.0")]
pub const fn is_unspecified(&self) -> bool {
self.inner.s_addr == 0
}
/// Returns [`true`] if this is a loopback address (127.0.0.0/8).
///
/// This property is defined by [IETF RFC 1122].
///
/// [`true`]: ../../std/primitive.bool.html
/// [IETF RFC 1122]: https://tools.ietf.org/html/rfc1122
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// assert_eq!(Ipv4Addr::new(127, 0, 0, 1).is_loopback(), true);
/// assert_eq!(Ipv4Addr::new(45, 22, 13, 197).is_loopback(), false);
/// ```
#[stable(since = "1.7.0", feature = "ip_17")]
pub fn is_loopback(&self) -> bool {
self.octets()[0] == 127
}
/// Returns [`true`] if this is a private address.
///
/// The private address ranges are defined in [IETF RFC 1918] and include:
///
/// - 10.0.0.0/8
/// - 172.16.0.0/12
/// - 192.168.0.0/16
///
/// [`true`]: ../../std/primitive.bool.html
/// [IETF RFC 1918]: https://tools.ietf.org/html/rfc1918
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// assert_eq!(Ipv4Addr::new(10, 0, 0, 1).is_private(), true);
/// assert_eq!(Ipv4Addr::new(10, 10, 10, 10).is_private(), true);
/// assert_eq!(Ipv4Addr::new(172, 16, 10, 10).is_private(), true);
/// assert_eq!(Ipv4Addr::new(172, 29, 45, 14).is_private(), true);
/// assert_eq!(Ipv4Addr::new(172, 32, 0, 2).is_private(), false);
/// assert_eq!(Ipv4Addr::new(192, 168, 0, 2).is_private(), true);
/// assert_eq!(Ipv4Addr::new(192, 169, 0, 2).is_private(), false);
/// ```
#[stable(since = "1.7.0", feature = "ip_17")]
pub fn is_private(&self) -> bool {
match self.octets() {
[10, ..] => true,
[172, b, ..] if b >= 16 && b <= 31 => true,
[192, 168, ..] => true,
_ => false,
}
}
/// Returns [`true`] if the address is link-local (169.254.0.0/16).
///
/// This property is defined by [IETF RFC 3927].
///
/// [`true`]: ../../std/primitive.bool.html
/// [IETF RFC 3927]: https://tools.ietf.org/html/rfc3927
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// assert_eq!(Ipv4Addr::new(169, 254, 0, 0).is_link_local(), true);
/// assert_eq!(Ipv4Addr::new(169, 254, 10, 65).is_link_local(), true);
/// assert_eq!(Ipv4Addr::new(16, 89, 10, 65).is_link_local(), false);
/// ```
#[stable(since = "1.7.0", feature = "ip_17")]
pub fn is_link_local(&self) -> bool {
match self.octets() {
[169, 254, ..] => true,
_ => false,
}
}
/// Returns [`true`] if the address appears to be globally routable.
/// See [iana-ipv4-special-registry][ipv4-sr].
///
/// The following return [`false`]:
///
/// - private addresses (see [`Ipv4Addr::is_private()`])
/// - the loopback address (see [`Ipv4Addr::is_loopback()`])
/// - the link-local address (see [`Ipv4Addr::is_link_local()`])
/// - the broadcast address (see [`Ipv4Addr::is_broadcast()`])
/// - addresses used for documentation (see [`Ipv4Addr::is_documentation()`])
/// - the unspecified address (see [`Ipv4Addr::is_unspecified()`]), and the whole
/// 0.0.0.0/8 block
/// - addresses reserved for future protocols (see
/// [`Ipv4Addr::is_ietf_protocol_assignment()`], except
/// `192.0.0.9/32` and `192.0.0.10/32` which are globally routable
/// - addresses reserved for future use (see [`Ipv4Addr::is_reserved()`]
/// - addresses reserved for networking devices benchmarking (see
/// [`Ipv4Addr::is_benchmarking()`])
///
/// [`true`]: ../../std/primitive.bool.html
/// [`false`]: ../../std/primitive.bool.html
/// [ipv4-sr]: https://www.iana.org/assignments/iana-ipv4-special-registry/iana-ipv4-special-registry.xhtml
///
/// # Examples
///
/// ```
/// #![feature(ip)]
///
/// use std::net::Ipv4Addr;
///
/// // private addresses are not global
/// assert_eq!(Ipv4Addr::new(10, 254, 0, 0).is_global(), false);
/// assert_eq!(Ipv4Addr::new(192, 168, 10, 65).is_global(), false);
/// assert_eq!(Ipv4Addr::new(172, 16, 10, 65).is_global(), false);
///
/// // the 0.0.0.0/8 block is not global
/// assert_eq!(Ipv4Addr::new(0, 1, 2, 3).is_global(), false);
/// // in particular, the unspecified address is not global
/// assert_eq!(Ipv4Addr::new(0, 0, 0, 0).is_global(), false);
///
/// // the loopback address is not global
/// assert_eq!(Ipv4Addr::new(127, 0, 0, 1).is_global(), false);
///
/// // link local addresses are not global
/// assert_eq!(Ipv4Addr::new(169, 254, 45, 1).is_global(), false);
///
/// // the broadcast address is not global
/// assert_eq!(Ipv4Addr::new(255, 255, 255, 255).is_global(), false);
///
/// // the address space designated for documentation is not global
/// assert_eq!(Ipv4Addr::new(192, 0, 2, 255).is_global(), false);
/// assert_eq!(Ipv4Addr::new(198, 51, 100, 65).is_global(), false);
/// assert_eq!(Ipv4Addr::new(203, 0, 113, 6).is_global(), false);
///
/// // shared addresses are not global
/// assert_eq!(Ipv4Addr::new(100, 100, 0, 0).is_global(), false);
///
/// // addresses reserved for protocol assignment are not global
/// assert_eq!(Ipv4Addr::new(192, 0, 0, 0).is_global(), false);
/// assert_eq!(Ipv4Addr::new(192, 0, 0, 255).is_global(), false);
///
/// // addresses reserved for future use are not global
/// assert_eq!(Ipv4Addr::new(250, 10, 20, 30).is_global(), false);
///
/// // addresses reserved for network devices benchmarking are not global
/// assert_eq!(Ipv4Addr::new(198, 18, 0, 0).is_global(), false);
///
/// // All the other addresses are global
/// assert_eq!(Ipv4Addr::new(1, 1, 1, 1).is_global(), true);
/// assert_eq!(Ipv4Addr::new(80, 9, 12, 3).is_global(), true);
/// ```
pub fn is_global(&self) -> bool {
// check if this address is 192.0.0.9 or 192.0.0.10. These addresses are the only two
// globally routable addresses in the 192.0.0.0/24 range.
if u32::from(*self) == 0xc0000009 || u32::from(*self) == 0xc000000a {
return true;
}
!self.is_private()
&& !self.is_loopback()
&& !self.is_link_local()
&& !self.is_broadcast()
&& !self.is_documentation()
&& !self.is_shared()
&& !self.is_ietf_protocol_assignment()
&& !self.is_reserved()
&& !self.is_benchmarking()
// Make sure the address is not in 0.0.0.0/8
&& self.octets()[0] != 0
}
/// Returns [`true`] if this address is part of the Shared Address Space defined in
/// [IETF RFC 6598] (`100.64.0.0/10`).
///
/// [`true`]: ../../std/primitive.bool.html
/// [IETF RFC 6598]: https://tools.ietf.org/html/rfc6598
///
/// # Examples
///
/// ```
/// #![feature(ip)]
/// use std::net::Ipv4Addr;
///
/// assert_eq!(Ipv4Addr::new(100, 64, 0, 0).is_shared(), true);
/// assert_eq!(Ipv4Addr::new(100, 127, 255, 255).is_shared(), true);
/// assert_eq!(Ipv4Addr::new(100, 128, 0, 0).is_shared(), false);
/// ```
pub fn is_shared(&self) -> bool {
self.octets()[0] == 100 && (self.octets()[1] & 0b1100_0000 == 0b0100_0000)
}
/// Returns [`true`] if this address is part of `192.0.0.0/24`, which is reserved to
/// IANA for IETF protocol assignments, as documented in [IETF RFC 6890].
///
/// Note that parts of this block are in use:
///
/// - `192.0.0.8/32` is the "IPv4 dummy address" (see [IETF RFC 7600])
/// - `192.0.0.9/32` is the "Port Control Protocol Anycast" (see [IETF RFC 7723])
/// - `192.0.0.10/32` is used for NAT traversal (see [IETF RFC 8155])
///
/// [`true`]: ../../std/primitive.bool.html
/// [IETF RFC 6890]: https://tools.ietf.org/html/rfc6890
/// [IETF RFC 7600]: https://tools.ietf.org/html/rfc7600
/// [IETF RFC 7723]: https://tools.ietf.org/html/rfc7723
/// [IETF RFC 8155]: https://tools.ietf.org/html/rfc8155
///
/// # Examples
///
/// ```
/// #![feature(ip)]
/// use std::net::Ipv4Addr;
///
/// assert_eq!(Ipv4Addr::new(192, 0, 0, 0).is_ietf_protocol_assignment(), true);
/// assert_eq!(Ipv4Addr::new(192, 0, 0, 8).is_ietf_protocol_assignment(), true);
/// assert_eq!(Ipv4Addr::new(192, 0, 0, 9).is_ietf_protocol_assignment(), true);
/// assert_eq!(Ipv4Addr::new(192, 0, 0, 255).is_ietf_protocol_assignment(), true);
/// assert_eq!(Ipv4Addr::new(192, 0, 1, 0).is_ietf_protocol_assignment(), false);
/// assert_eq!(Ipv4Addr::new(191, 255, 255, 255).is_ietf_protocol_assignment(), false);
/// ```
pub fn is_ietf_protocol_assignment(&self) -> bool {
self.octets()[0] == 192 && self.octets()[1] == 0 && self.octets()[2] == 0
}
/// Returns [`true`] if this address part of the `198.18.0.0/15` range, which is reserved for
/// network devices benchmarking. This range is defined in [IETF RFC 2544] as `192.18.0.0`
/// through `198.19.255.255` but [errata 423] corrects it to `198.18.0.0/15`.
///
/// [`true`]: ../../std/primitive.bool.html
/// [IETF RFC 2544]: https://tools.ietf.org/html/rfc2544
/// [errata 423]: https://www.rfc-editor.org/errata/eid423
///
/// # Examples
///
/// ```
/// #![feature(ip)]
/// use std::net::Ipv4Addr;
///
/// assert_eq!(Ipv4Addr::new(198, 17, 255, 255).is_benchmarking(), false);
/// assert_eq!(Ipv4Addr::new(198, 18, 0, 0).is_benchmarking(), true);
/// assert_eq!(Ipv4Addr::new(198, 19, 255, 255).is_benchmarking(), true);
/// assert_eq!(Ipv4Addr::new(198, 20, 0, 0).is_benchmarking(), false);
/// ```
pub fn is_benchmarking(&self) -> bool {
self.octets()[0] == 198 && (self.octets()[1] & 0xfe) == 18
}
/// Returns [`true`] if this address is reserved by IANA for future use. [IETF RFC 1112]
/// defines the block of reserved addresses as `240.0.0.0/4`. This range normally includes the
/// broadcast address `255.255.255.255`, but this implementation explicitly excludes it, since
/// it is obviously not reserved for future use.
///
/// [`true`]: ../../std/primitive.bool.html
/// [IETF RFC 1112]: https://tools.ietf.org/html/rfc1112
///
/// # Warning
///
/// As IANA assigns new addresses, this method will be
/// updated. This may result in non-reserved addresses being
/// treated as reserved in code that relies on an outdated version
/// of this method.
///
/// # Examples
///
/// ```
/// #![feature(ip)]
/// use std::net::Ipv4Addr;
///
/// assert_eq!(Ipv4Addr::new(240, 0, 0, 0).is_reserved(), true);
/// assert_eq!(Ipv4Addr::new(255, 255, 255, 254).is_reserved(), true);
///
/// assert_eq!(Ipv4Addr::new(239, 255, 255, 255).is_reserved(), false);
/// // The broadcast address is not considered as reserved for future use by this implementation
/// assert_eq!(Ipv4Addr::new(255, 255, 255, 255).is_reserved(), false);
/// ```
pub fn is_reserved(&self) -> bool {
self.octets()[0] & 240 == 240 && !self.is_broadcast()
}
/// Returns [`true`] if this is a multicast address (224.0.0.0/4).
///
/// Multicast addresses have a most significant octet between 224 and 239,
/// and is defined by [IETF RFC 5771].
///
/// [`true`]: ../../std/primitive.bool.html
/// [IETF RFC 5771]: https://tools.ietf.org/html/rfc5771
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// assert_eq!(Ipv4Addr::new(224, 254, 0, 0).is_multicast(), true);
/// assert_eq!(Ipv4Addr::new(236, 168, 10, 65).is_multicast(), true);
/// assert_eq!(Ipv4Addr::new(172, 16, 10, 65).is_multicast(), false);
/// ```
#[stable(since = "1.7.0", feature = "ip_17")]
pub fn is_multicast(&self) -> bool {
self.octets()[0] >= 224 && self.octets()[0] <= 239
}
/// Returns [`true`] if this is a broadcast address (255.255.255.255).
///
/// A broadcast address has all octets set to 255 as defined in [IETF RFC 919].
///
/// [`true`]: ../../std/primitive.bool.html
/// [IETF RFC 919]: https://tools.ietf.org/html/rfc919
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// assert_eq!(Ipv4Addr::new(255, 255, 255, 255).is_broadcast(), true);
/// assert_eq!(Ipv4Addr::new(236, 168, 10, 65).is_broadcast(), false);
/// ```
#[stable(since = "1.7.0", feature = "ip_17")]
pub fn is_broadcast(&self) -> bool {
self == &Self::BROADCAST
}
/// Returns [`true`] if this address is in a range designated for documentation.
///
/// This is defined in [IETF RFC 5737]:
///
/// - 192.0.2.0/24 (TEST-NET-1)
/// - 198.51.100.0/24 (TEST-NET-2)
/// - 203.0.113.0/24 (TEST-NET-3)
///
/// [`true`]: ../../std/primitive.bool.html
/// [IETF RFC 5737]: https://tools.ietf.org/html/rfc5737
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// assert_eq!(Ipv4Addr::new(192, 0, 2, 255).is_documentation(), true);
/// assert_eq!(Ipv4Addr::new(198, 51, 100, 65).is_documentation(), true);
/// assert_eq!(Ipv4Addr::new(203, 0, 113, 6).is_documentation(), true);
/// assert_eq!(Ipv4Addr::new(193, 34, 17, 19).is_documentation(), false);
/// ```
#[stable(since = "1.7.0", feature = "ip_17")]
pub fn is_documentation(&self) -> bool {
match self.octets() {
[192, 0, 2, _] => true,
[198, 51, 100, _] => true,
[203, 0, 113, _] => true,
_ => false,
}
}
/// Converts this address to an IPv4-compatible [`IPv6` address].
///
/// a.b.c.d becomes ::a.b.c.d
///
/// [`IPv6` address]: Ipv6Addr
///
/// # Examples
///
/// ```
/// use std::net::{Ipv4Addr, Ipv6Addr};
///
/// assert_eq!(
/// Ipv4Addr::new(192, 0, 2, 255).to_ipv6_compatible(),
/// Ipv6Addr::new(0, 0, 0, 0, 0, 0, 49152, 767)
/// );
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_ipv6_compatible(&self) -> Ipv6Addr {
let [a, b, c, d] = self.octets();
Ipv6Addr::from([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, a, b, c, d])
}
/// Converts this address to an IPv4-mapped [`IPv6` address].
///
/// a.b.c.d becomes ::ffff:a.b.c.d
///
/// [`IPv6` address]: Ipv6Addr
///
/// # Examples
///
/// ```
/// use std::net::{Ipv4Addr, Ipv6Addr};
///
/// assert_eq!(Ipv4Addr::new(192, 0, 2, 255).to_ipv6_mapped(),
/// Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 49152, 767));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_ipv6_mapped(&self) -> Ipv6Addr {
let [a, b, c, d] = self.octets();
Ipv6Addr::from([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xFF, 0xFF, a, b, c, d])
}
}
#[stable(feature = "ip_addr", since = "1.7.0")]
impl fmt::Display for IpAddr {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
IpAddr::V4(ip) => ip.fmt(fmt),
IpAddr::V6(ip) => ip.fmt(fmt),
}
}
}
#[stable(feature = "ip_addr", since = "1.7.0")]
impl fmt::Debug for IpAddr {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, fmt)
}
}
#[stable(feature = "ip_from_ip", since = "1.16.0")]
impl From<Ipv4Addr> for IpAddr {
/// Copies this address to a new `IpAddr::V4`.
///
/// # Examples
///
/// ```
/// use std::net::{IpAddr, Ipv4Addr};
///
/// let addr = Ipv4Addr::new(127, 0, 0, 1);
///
/// assert_eq!(
/// IpAddr::V4(addr),
/// IpAddr::from(addr)
/// )
/// ```
fn from(ipv4: Ipv4Addr) -> IpAddr {
IpAddr::V4(ipv4)
}
}
#[stable(feature = "ip_from_ip", since = "1.16.0")]
impl From<Ipv6Addr> for IpAddr {
/// Copies this address to a new `IpAddr::V6`.
///
/// # Examples
///
/// ```
/// use std::net::{IpAddr, Ipv6Addr};
///
/// let addr = Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff);
///
/// assert_eq!(
/// IpAddr::V6(addr),
/// IpAddr::from(addr)
/// );
/// ```
fn from(ipv6: Ipv6Addr) -> IpAddr {
IpAddr::V6(ipv6)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Display for Ipv4Addr {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let octets = self.octets();
// Fast Path: if there's no alignment stuff, write directly to the buffer
if fmt.precision().is_none() && fmt.width().is_none() {
write!(fmt, "{}.{}.{}.{}", octets[0], octets[1], octets[2], octets[3])
} else {
const IPV4_BUF_LEN: usize = 15; // Long enough for the longest possible IPv4 address
let mut buf = [0u8; IPV4_BUF_LEN];
let mut buf_slice = &mut buf[..];
// Note: The call to write should never fail, hence the unwrap
write!(buf_slice, "{}.{}.{}.{}", octets[0], octets[1], octets[2], octets[3]).unwrap();
let len = IPV4_BUF_LEN - buf_slice.len();
// This unsafe is OK because we know what is being written to the buffer
let buf = unsafe { crate::str::from_utf8_unchecked(&buf[..len]) };
fmt.pad(buf)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Debug for Ipv4Addr {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, fmt)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Clone for Ipv4Addr {
fn clone(&self) -> Ipv4Addr {
*self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl PartialEq for Ipv4Addr {
fn eq(&self, other: &Ipv4Addr) -> bool {
self.inner.s_addr == other.inner.s_addr
}
}
#[stable(feature = "ip_cmp", since = "1.16.0")]
impl PartialEq<Ipv4Addr> for IpAddr {
fn eq(&self, other: &Ipv4Addr) -> bool {
match self {
IpAddr::V4(v4) => v4 == other,
IpAddr::V6(_) => false,
}
}
}
#[stable(feature = "ip_cmp", since = "1.16.0")]
impl PartialEq<IpAddr> for Ipv4Addr {
fn eq(&self, other: &IpAddr) -> bool {
match other {
IpAddr::V4(v4) => self == v4,
IpAddr::V6(_) => false,
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Eq for Ipv4Addr {}
#[stable(feature = "rust1", since = "1.0.0")]
impl hash::Hash for Ipv4Addr {
fn hash<H: hash::Hasher>(&self, s: &mut H) {
// `inner` is #[repr(packed)], so we need to copy `s_addr`.
{ self.inner.s_addr }.hash(s)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl PartialOrd for Ipv4Addr {
fn partial_cmp(&self, other: &Ipv4Addr) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[stable(feature = "ip_cmp", since = "1.16.0")]
impl PartialOrd<Ipv4Addr> for IpAddr {
fn partial_cmp(&self, other: &Ipv4Addr) -> Option<Ordering> {
match self {
IpAddr::V4(v4) => v4.partial_cmp(other),
IpAddr::V6(_) => Some(Ordering::Greater),
}
}
}
#[stable(feature = "ip_cmp", since = "1.16.0")]
impl PartialOrd<IpAddr> for Ipv4Addr {
fn partial_cmp(&self, other: &IpAddr) -> Option<Ordering> {
match other {
IpAddr::V4(v4) => self.partial_cmp(v4),
IpAddr::V6(_) => Some(Ordering::Less),
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Ord for Ipv4Addr {
fn cmp(&self, other: &Ipv4Addr) -> Ordering {
u32::from_be(self.inner.s_addr).cmp(&u32::from_be(other.inner.s_addr))
}
}
impl AsInner<c::in_addr> for Ipv4Addr {
fn as_inner(&self) -> &c::in_addr {
&self.inner
}
}
#[stable(feature = "ip_u32", since = "1.1.0")]
impl From<Ipv4Addr> for u32 {
/// Converts an `Ipv4Addr` into a host byte order `u32`.
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// let addr = Ipv4Addr::new(0xca, 0xfe, 0xba, 0xbe);
/// assert_eq!(0xcafebabe, u32::from(addr));
/// ```
fn from(ip: Ipv4Addr) -> u32 {
let ip = ip.octets();
u32::from_be_bytes(ip)
}
}
#[stable(feature = "ip_u32", since = "1.1.0")]
impl From<u32> for Ipv4Addr {
/// Converts a host byte order `u32` into an `Ipv4Addr`.
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// let addr = Ipv4Addr::from(0xcafebabe);
/// assert_eq!(Ipv4Addr::new(0xca, 0xfe, 0xba, 0xbe), addr);
/// ```
fn from(ip: u32) -> Ipv4Addr {
Ipv4Addr::from(ip.to_be_bytes())
}
}
#[stable(feature = "from_slice_v4", since = "1.9.0")]
impl From<[u8; 4]> for Ipv4Addr {
/// Creates an `Ipv4Addr` from a four element byte array.
///
/// # Examples
///
/// ```
/// use std::net::Ipv4Addr;
///
/// let addr = Ipv4Addr::from([13u8, 12u8, 11u8, 10u8]);
/// assert_eq!(Ipv4Addr::new(13, 12, 11, 10), addr);
/// ```
fn from(octets: [u8; 4]) -> Ipv4Addr {
Ipv4Addr::new(octets[0], octets[1], octets[2], octets[3])
}
}
#[stable(feature = "ip_from_slice", since = "1.17.0")]
impl From<[u8; 4]> for IpAddr {
/// Creates an `IpAddr::V4` from a four element byte array.
///
/// # Examples
///
/// ```
/// use std::net::{IpAddr, Ipv4Addr};
///
/// let addr = IpAddr::from([13u8, 12u8, 11u8, 10u8]);
/// assert_eq!(IpAddr::V4(Ipv4Addr::new(13, 12, 11, 10)), addr);
/// ```
fn from(octets: [u8; 4]) -> IpAddr {
IpAddr::V4(Ipv4Addr::from(octets))
}
}
impl Ipv6Addr {
/// Creates a new IPv6 address from eight 16-bit segments.
///
/// The result will represent the IP address `a:b:c:d:e:f:g:h`.
///
/// # Examples
///
/// ```
/// use std::net::Ipv6Addr;
///
/// let addr = Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_stable(feature = "const_ipv6", since = "1.32.0")]
#[allow_internal_unstable(const_fn_transmute)]
pub const fn new(a: u16, b: u16, c: u16, d: u16, e: u16, f: u16, g: u16, h: u16) -> Ipv6Addr {
let addr16 = [
a.to_be(),
b.to_be(),
c.to_be(),
d.to_be(),
e.to_be(),
f.to_be(),
g.to_be(),
h.to_be(),
];
Ipv6Addr {
inner: c::in6_addr {
// All elements in `addr16` are big endian.
// SAFETY: `[u16; 8]` is always safe to transmute to `[u8; 16]`.
s6_addr: unsafe { transmute::<_, [u8; 16]>(addr16) },
},
}
}
/// An IPv6 address representing localhost: `::1`.
///
/// # Examples
///
/// ```
/// use std::net::Ipv6Addr;
///
/// let addr = Ipv6Addr::LOCALHOST;
/// assert_eq!(addr, Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1));
/// ```
#[stable(feature = "ip_constructors", since = "1.30.0")]
pub const LOCALHOST: Self = Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1);
/// An IPv6 address representing the unspecified address: `::`
///
/// # Examples
///
/// ```
/// use std::net::Ipv6Addr;
///
/// let addr = Ipv6Addr::UNSPECIFIED;
/// assert_eq!(addr, Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0));
/// ```
#[stable(feature = "ip_constructors", since = "1.30.0")]
pub const UNSPECIFIED: Self = Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0);
/// Returns the eight 16-bit segments that make up this address.
///
/// # Examples
///
/// ```
/// use std::net::Ipv6Addr;
///
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).segments(),
/// [0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn segments(&self) -> [u16; 8] {
// All elements in `s6_addr` must be big endian.
// SAFETY: `[u8; 16]` is always safe to transmute to `[u16; 8]`.
let [a, b, c, d, e, f, g, h] = unsafe { transmute::<_, [u16; 8]>(self.inner.s6_addr) };
// We want native endian u16
[
u16::from_be(a),
u16::from_be(b),
u16::from_be(c),
u16::from_be(d),
u16::from_be(e),
u16::from_be(f),
u16::from_be(g),
u16::from_be(h),
]
}
/// Returns [`true`] for the special 'unspecified' address (::).
///
/// This property is defined in [IETF RFC 4291].
///
/// [`true`]: ../../std/primitive.bool.html
/// [IETF RFC 4291]: https://tools.ietf.org/html/rfc4291
///
/// # Examples
///
/// ```
/// use std::net::Ipv6Addr;
///
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_unspecified(), false);
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0).is_unspecified(), true);
/// ```
#[stable(since = "1.7.0", feature = "ip_17")]
pub fn is_unspecified(&self) -> bool {
self.segments() == [0, 0, 0, 0, 0, 0, 0, 0]
}
/// Returns [`true`] if this is a loopback address (::1).
///
/// This property is defined in [IETF RFC 4291].
///
/// [`true`]: ../../std/primitive.bool.html
/// [IETF RFC 4291]: https://tools.ietf.org/html/rfc4291
///
/// # Examples
///
/// ```
/// use std::net::Ipv6Addr;
///
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_loopback(), false);
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0x1).is_loopback(), true);
/// ```
#[stable(since = "1.7.0", feature = "ip_17")]
pub fn is_loopback(&self) -> bool {
self.segments() == [0, 0, 0, 0, 0, 0, 0, 1]
}
/// Returns [`true`] if the address appears to be globally routable.
///
/// The following return [`false`]:
///
/// - the loopback address
/// - link-local and unique local unicast addresses
/// - interface-, link-, realm-, admin- and site-local multicast addresses
///
/// [`true`]: ../../std/primitive.bool.html
/// [`false`]: ../../std/primitive.bool.html
///
/// # Examples
///
/// ```
/// #![feature(ip)]
///
/// use std::net::Ipv6Addr;
///
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_global(), true);
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0x1).is_global(), false);
/// assert_eq!(Ipv6Addr::new(0, 0, 0x1c9, 0, 0, 0xafc8, 0, 0x1).is_global(), true);
/// ```
pub fn is_global(&self) -> bool {
match self.multicast_scope() {
Some(Ipv6MulticastScope::Global) => true,
None => self.is_unicast_global(),
_ => false,
}
}
/// Returns [`true`] if this is a unique local address (`fc00::/7`).
///
/// This property is defined in [IETF RFC 4193].
///
/// [IETF RFC 4193]: https://tools.ietf.org/html/rfc4193
///
/// [`true`]: ../../std/primitive.bool.html
///
/// # Examples
///
/// ```
/// #![feature(ip)]
///
/// use std::net::Ipv6Addr;
///
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_unique_local(), false);
/// assert_eq!(Ipv6Addr::new(0xfc02, 0, 0, 0, 0, 0, 0, 0).is_unique_local(), true);
/// ```
pub fn is_unique_local(&self) -> bool {
(self.segments()[0] & 0xfe00) == 0xfc00
}
/// Returns [`true`] if the address is a unicast link-local address (`fe80::/64`).
///
/// A common mis-conception is to think that "unicast link-local addresses start with
/// `fe80::`", but the [IETF RFC 4291] actually defines a stricter format for these addresses:
///
/// ```no_rust
/// | 10 |
/// | bits | 54 bits | 64 bits |
/// +----------+-------------------------+----------------------------+
/// |1111111010| 0 | interface ID |
/// +----------+-------------------------+----------------------------+
/// ```
///
/// This method validates the format defined in the RFC and won't recognize the following
/// addresses such as `fe80:0:0:1::` or `fe81::` as unicast link-local addresses for example.
/// If you need a less strict validation use [`Ipv6Addr::is_unicast_link_local()`] instead.
///
/// [`true`]: ../../std/primitive.bool.html
///
/// # Examples
///
/// ```
/// #![feature(ip)]
///
/// use std::net::Ipv6Addr;
///
/// let ip = Ipv6Addr::new(0xfe80, 0, 0, 0, 0, 0, 0, 0);
/// assert!(ip.is_unicast_link_local_strict());
///
/// let ip = Ipv6Addr::new(0xfe80, 0, 0, 0, 0xffff, 0xffff, 0xffff, 0xffff);
/// assert!(ip.is_unicast_link_local_strict());
///
/// let ip = Ipv6Addr::new(0xfe80, 0, 0, 1, 0, 0, 0, 0);
/// assert!(!ip.is_unicast_link_local_strict());
/// assert!(ip.is_unicast_link_local());
///
/// let ip = Ipv6Addr::new(0xfe81, 0, 0, 0, 0, 0, 0, 0);
/// assert!(!ip.is_unicast_link_local_strict());
/// assert!(ip.is_unicast_link_local());
/// ```
///
/// # See also
///
/// - [IETF RFC 4291 section 2.5.6]
/// - [RFC 4291 errata 4406] (which has been rejected but provides useful
/// insight)
/// - [`Ipv6Addr::is_unicast_link_local()`]
///
/// [IETF RFC 4291]: https://tools.ietf.org/html/rfc4291
/// [IETF RFC 4291 section 2.5.6]: https://tools.ietf.org/html/rfc4291#section-2.5.6
/// [RFC 4291 errata 4406]: https://www.rfc-editor.org/errata/eid4406
pub fn is_unicast_link_local_strict(&self) -> bool {
(self.segments()[0] & 0xffff) == 0xfe80
&& (self.segments()[1] & 0xffff) == 0
&& (self.segments()[2] & 0xffff) == 0
&& (self.segments()[3] & 0xffff) == 0
}
/// Returns [`true`] if the address is a unicast link-local address (`fe80::/10`).
///
/// This method returns [`true`] for addresses in the range reserved by [RFC 4291 section 2.4],
/// i.e. addresses with the following format:
///
/// ```no_rust
/// | 10 |
/// | bits | 54 bits | 64 bits |
/// +----------+-------------------------+----------------------------+
/// |1111111010| arbitratry value | interface ID |
/// +----------+-------------------------+----------------------------+
/// ```
///
/// As a result, this method consider addresses such as `fe80:0:0:1::` or `fe81::` to be
/// unicast link-local addresses, whereas [`Ipv6Addr::is_unicast_link_local_strict()`] does not.
/// If you need a strict validation fully compliant with the RFC, use
/// [`Ipv6Addr::is_unicast_link_local_strict()`] instead.
///
/// [`true`]: ../../std/primitive.bool.html
///
/// # Examples
///
/// ```
/// #![feature(ip)]
///
/// use std::net::Ipv6Addr;
///
/// let ip = Ipv6Addr::new(0xfe80, 0, 0, 0, 0, 0, 0, 0);
/// assert!(ip.is_unicast_link_local());
///
/// let ip = Ipv6Addr::new(0xfe80, 0, 0, 0, 0xffff, 0xffff, 0xffff, 0xffff);
/// assert!(ip.is_unicast_link_local());
///
/// let ip = Ipv6Addr::new(0xfe80, 0, 0, 1, 0, 0, 0, 0);
/// assert!(ip.is_unicast_link_local());
/// assert!(!ip.is_unicast_link_local_strict());
///
/// let ip = Ipv6Addr::new(0xfe81, 0, 0, 0, 0, 0, 0, 0);
/// assert!(ip.is_unicast_link_local());
/// assert!(!ip.is_unicast_link_local_strict());
/// ```
///
/// # See also
///
/// - [IETF RFC 4291 section 2.4]
/// - [RFC 4291 errata 4406] (which has been rejected but provides useful
/// insight)
///
/// [IETF RFC 4291 section 2.4]: https://tools.ietf.org/html/rfc4291#section-2.4
/// [RFC 4291 errata 4406]: https://www.rfc-editor.org/errata/eid4406
pub fn is_unicast_link_local(&self) -> bool {
(self.segments()[0] & 0xffc0) == 0xfe80
}
/// Returns [`true`] if this is a deprecated unicast site-local address (fec0::/10). The
/// unicast site-local address format is defined in [RFC 4291 section 2.5.7] as:
///
/// ```no_rust
/// | 10 |
/// | bits | 54 bits | 64 bits |
/// +----------+-------------------------+----------------------------+
/// |1111111011| subnet ID | interface ID |
/// +----------+-------------------------+----------------------------+
/// ```
///
/// [`true`]: ../../std/primitive.bool.html
/// [RFC 4291 section 2.5.7]: https://tools.ietf.org/html/rfc4291#section-2.5.7
///
/// # Examples
///
/// ```
/// #![feature(ip)]
///
/// use std::net::Ipv6Addr;
///
/// assert_eq!(
/// Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_unicast_site_local(),
/// false
/// );
/// assert_eq!(Ipv6Addr::new(0xfec2, 0, 0, 0, 0, 0, 0, 0).is_unicast_site_local(), true);
/// ```
///
/// # Warning
///
/// As per [RFC 3879], the whole `FEC0::/10` prefix is
/// deprecated. New software must not support site-local
/// addresses.
///
/// [RFC 3879]: https://tools.ietf.org/html/rfc3879
pub fn is_unicast_site_local(&self) -> bool {
(self.segments()[0] & 0xffc0) == 0xfec0
}
/// Returns [`true`] if this is an address reserved for documentation
/// (2001:db8::/32).
///
/// This property is defined in [IETF RFC 3849].
///
/// [`true`]: ../../std/primitive.bool.html
/// [IETF RFC 3849]: https://tools.ietf.org/html/rfc3849
///
/// # Examples
///
/// ```
/// #![feature(ip)]
///
/// use std::net::Ipv6Addr;
///
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_documentation(), false);
/// assert_eq!(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0).is_documentation(), true);
/// ```
pub fn is_documentation(&self) -> bool {
(self.segments()[0] == 0x2001) && (self.segments()[1] == 0xdb8)
}
/// Returns [`true`] if the address is a globally routable unicast address.
///
/// The following return false:
///
/// - the loopback address
/// - the link-local addresses
/// - unique local addresses
/// - the unspecified address
/// - the address range reserved for documentation
///
/// This method returns [`true`] for site-local addresses as per [RFC 4291 section 2.5.7]
///
/// ```no_rust
/// The special behavior of [the site-local unicast] prefix defined in [RFC3513] must no longer
/// be supported in new implementations (i.e., new implementations must treat this prefix as
/// Global Unicast).
/// ```
///
/// [`true`]: ../../std/primitive.bool.html
/// [RFC 4291 section 2.5.7]: https://tools.ietf.org/html/rfc4291#section-2.5.7
///
/// # Examples
///
/// ```
/// #![feature(ip)]
///
/// use std::net::Ipv6Addr;
///
/// assert_eq!(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0).is_unicast_global(), false);
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_unicast_global(), true);
/// ```
pub fn is_unicast_global(&self) -> bool {
!self.is_multicast()
&& !self.is_loopback()
&& !self.is_unicast_link_local()
&& !self.is_unique_local()
&& !self.is_unspecified()
&& !self.is_documentation()
}
/// Returns the address's multicast scope if the address is multicast.
///
/// # Examples
///
/// ```
/// #![feature(ip)]
///
/// use std::net::{Ipv6Addr, Ipv6MulticastScope};
///
/// assert_eq!(
/// Ipv6Addr::new(0xff0e, 0, 0, 0, 0, 0, 0, 0).multicast_scope(),
/// Some(Ipv6MulticastScope::Global)
/// );
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).multicast_scope(), None);
/// ```
pub fn multicast_scope(&self) -> Option<Ipv6MulticastScope> {
if self.is_multicast() {
match self.segments()[0] & 0x000f {
1 => Some(Ipv6MulticastScope::InterfaceLocal),
2 => Some(Ipv6MulticastScope::LinkLocal),
3 => Some(Ipv6MulticastScope::RealmLocal),
4 => Some(Ipv6MulticastScope::AdminLocal),
5 => Some(Ipv6MulticastScope::SiteLocal),
8 => Some(Ipv6MulticastScope::OrganizationLocal),
14 => Some(Ipv6MulticastScope::Global),
_ => None,
}
} else {
None
}
}
/// Returns [`true`] if this is a multicast address (ff00::/8).
///
/// This property is defined by [IETF RFC 4291].
///
/// [`true`]: ../../std/primitive.bool.html
/// [IETF RFC 4291]: https://tools.ietf.org/html/rfc4291
///
/// # Examples
///
/// ```
/// use std::net::Ipv6Addr;
///
/// assert_eq!(Ipv6Addr::new(0xff00, 0, 0, 0, 0, 0, 0, 0).is_multicast(), true);
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_multicast(), false);
/// ```
#[stable(since = "1.7.0", feature = "ip_17")]
pub fn is_multicast(&self) -> bool {
(self.segments()[0] & 0xff00) == 0xff00
}
/// Converts this address to an [`IPv4` address] if it's an "IPv4-mapped IPv6 address"
/// defined in [IETF RFC 4291 section 2.5.5.2], otherwise returns [`None`].
///
/// `::ffff:a.b.c.d` becomes `a.b.c.d`.
/// All addresses *not* starting with `::ffff` will return `None`.
///
/// [`IPv4` address]: Ipv4Addr
/// [IETF RFC 4291 section 2.5.5.2]: https://tools.ietf.org/html/rfc4291#section-2.5.5.2
///
/// # Examples
///
/// ```
/// #![feature(ip)]
///
/// use std::net::{Ipv4Addr, Ipv6Addr};
///
/// assert_eq!(Ipv6Addr::new(0xff00, 0, 0, 0, 0, 0, 0, 0).to_ipv4_mapped(), None);
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).to_ipv4_mapped(),
/// Some(Ipv4Addr::new(192, 10, 2, 255)));
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1).to_ipv4_mapped(), None);
/// ```
pub fn to_ipv4_mapped(&self) -> Option<Ipv4Addr> {
match self.octets() {
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, a, b, c, d] => {
Some(Ipv4Addr::new(a, b, c, d))
}
_ => None,
}
}
/// Converts this address to an [`IPv4` address]. Returns [`None`] if this address is
/// neither IPv4-compatible or IPv4-mapped.
///
/// ::a.b.c.d and ::ffff:a.b.c.d become a.b.c.d
///
/// [`IPv4` address]: Ipv4Addr
///
/// # Examples
///
/// ```
/// use std::net::{Ipv4Addr, Ipv6Addr};
///
/// assert_eq!(Ipv6Addr::new(0xff00, 0, 0, 0, 0, 0, 0, 0).to_ipv4(), None);
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).to_ipv4(),
/// Some(Ipv4Addr::new(192, 10, 2, 255)));
/// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1).to_ipv4(),
/// Some(Ipv4Addr::new(0, 0, 0, 1)));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_ipv4(&self) -> Option<Ipv4Addr> {
if let [0, 0, 0, 0, 0, 0 | 0xffff, ab, cd] = self.segments() {
let [a, b] = ab.to_be_bytes();
let [c, d] = cd.to_be_bytes();
Some(Ipv4Addr::new(a, b, c, d))
} else {
None
}
}
/// Returns the sixteen eight-bit integers the IPv6 address consists of.
///
/// ```
/// use std::net::Ipv6Addr;
///
/// assert_eq!(Ipv6Addr::new(0xff00, 0, 0, 0, 0, 0, 0, 0).octets(),
/// [255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
/// ```
#[stable(feature = "ipv6_to_octets", since = "1.12.0")]
#[rustc_const_stable(feature = "const_ipv6", since = "1.32.0")]
pub const fn octets(&self) -> [u8; 16] {
self.inner.s6_addr
}
}
/// Write an Ipv6Addr, conforming to the canonical style described by
/// [RFC 5952](https://tools.ietf.org/html/rfc5952).
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Display for Ipv6Addr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// If there are no alignment requirements, write out the IP address to
// f. Otherwise, write it to a local buffer, then use f.pad.
if f.precision().is_none() && f.width().is_none() {
let segments = self.segments();
// Special case for :: and ::1; otherwise they get written with the
// IPv4 formatter
if self.is_unspecified() {
f.write_str("::")
} else if self.is_loopback() {
f.write_str("::1")
} else if let Some(ipv4) = self.to_ipv4() {
match segments[5] {
// IPv4 Compatible address
0 => write!(f, "::{}", ipv4),
// IPv4 Mapped address
0xffff => write!(f, "::ffff:{}", ipv4),
_ => unreachable!(),
}
} else {
#[derive(Copy, Clone, Default)]
struct Span {
start: usize,
len: usize,
}
// Find the inner 0 span
let zeroes = {
let mut longest = Span::default();
let mut current = Span::default();
for (i, &segment) in segments.iter().enumerate() {
if segment == 0 {
if current.len == 0 {
current.start = i;
}
current.len += 1;
if current.len > longest.len {
longest = current;
}
} else {
current = Span::default();
}
}
longest
};
/// Write a colon-separated part of the address
#[inline]
fn fmt_subslice(f: &mut fmt::Formatter<'_>, chunk: &[u16]) -> fmt::Result {
if let Some(first) = chunk.first() {
fmt::LowerHex::fmt(first, f)?;
for segment in &chunk[1..] {
f.write_char(':')?;
fmt::LowerHex::fmt(segment, f)?;
}
}
Ok(())
}
if zeroes.len > 1 {
fmt_subslice(f, &segments[..zeroes.start])?;
f.write_str("::")?;
fmt_subslice(f, &segments[zeroes.start + zeroes.len..])
} else {
fmt_subslice(f, &segments)
}
}
} else {
// Slow path: write the address to a local buffer, the use f.pad.
// Defined recursively by using the fast path to write to the
// buffer.
// This is the largest possible size of an IPv6 address
const IPV6_BUF_LEN: usize = (4 * 8) + 7;
let mut buf = [0u8; IPV6_BUF_LEN];
let mut buf_slice = &mut buf[..];
// Note: This call to write should never fail, so unwrap is okay.
write!(buf_slice, "{}", self).unwrap();
let len = IPV6_BUF_LEN - buf_slice.len();
// This is safe because we know exactly what can be in this buffer
let buf = unsafe { crate::str::from_utf8_unchecked(&buf[..len]) };
f.pad(buf)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Debug for Ipv6Addr {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, fmt)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Clone for Ipv6Addr {
fn clone(&self) -> Ipv6Addr {
*self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl PartialEq for Ipv6Addr {
fn eq(&self, other: &Ipv6Addr) -> bool {
self.inner.s6_addr == other.inner.s6_addr
}
}
#[stable(feature = "ip_cmp", since = "1.16.0")]
impl PartialEq<IpAddr> for Ipv6Addr {
fn eq(&self, other: &IpAddr) -> bool {
match other {
IpAddr::V4(_) => false,
IpAddr::V6(v6) => self == v6,
}
}
}
#[stable(feature = "ip_cmp", since = "1.16.0")]
impl PartialEq<Ipv6Addr> for IpAddr {
fn eq(&self, other: &Ipv6Addr) -> bool {
match self {
IpAddr::V4(_) => false,
IpAddr::V6(v6) => v6 == other,
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Eq for Ipv6Addr {}
#[stable(feature = "rust1", since = "1.0.0")]
impl hash::Hash for Ipv6Addr {
fn hash<H: hash::Hasher>(&self, s: &mut H) {
self.inner.s6_addr.hash(s)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl PartialOrd for Ipv6Addr {
fn partial_cmp(&self, other: &Ipv6Addr) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[stable(feature = "ip_cmp", since = "1.16.0")]
impl PartialOrd<Ipv6Addr> for IpAddr {
fn partial_cmp(&self, other: &Ipv6Addr) -> Option<Ordering> {
match self {
IpAddr::V4(_) => Some(Ordering::Less),
IpAddr::V6(v6) => v6.partial_cmp(other),
}
}
}
#[stable(feature = "ip_cmp", since = "1.16.0")]
impl PartialOrd<IpAddr> for Ipv6Addr {
fn partial_cmp(&self, other: &IpAddr) -> Option<Ordering> {
match other {
IpAddr::V4(_) => Some(Ordering::Greater),
IpAddr::V6(v6) => self.partial_cmp(v6),
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Ord for Ipv6Addr {
fn cmp(&self, other: &Ipv6Addr) -> Ordering {
self.segments().cmp(&other.segments())
}
}
impl AsInner<c::in6_addr> for Ipv6Addr {
fn as_inner(&self) -> &c::in6_addr {
&self.inner
}
}
impl FromInner<c::in6_addr> for Ipv6Addr {
fn from_inner(addr: c::in6_addr) -> Ipv6Addr {
Ipv6Addr { inner: addr }
}
}
#[stable(feature = "i128", since = "1.26.0")]
impl From<Ipv6Addr> for u128 {
/// Convert an `Ipv6Addr` into a host byte order `u128`.
///
/// # Examples
///
/// ```
/// use std::net::Ipv6Addr;
///
/// let addr = Ipv6Addr::new(
/// 0x1020, 0x3040, 0x5060, 0x7080,
/// 0x90A0, 0xB0C0, 0xD0E0, 0xF00D,
/// );
/// assert_eq!(0x102030405060708090A0B0C0D0E0F00D_u128, u128::from(addr));
/// ```
fn from(ip: Ipv6Addr) -> u128 {
let ip = ip.octets();
u128::from_be_bytes(ip)
}
}
#[stable(feature = "i128", since = "1.26.0")]
impl From<u128> for Ipv6Addr {
/// Convert a host byte order `u128` into an `Ipv6Addr`.
///
/// # Examples
///
/// ```
/// use std::net::Ipv6Addr;
///
/// let addr = Ipv6Addr::from(0x102030405060708090A0B0C0D0E0F00D_u128);
/// assert_eq!(
/// Ipv6Addr::new(
/// 0x1020, 0x3040, 0x5060, 0x7080,
/// 0x90A0, 0xB0C0, 0xD0E0, 0xF00D,
/// ),
/// addr);
/// ```
fn from(ip: u128) -> Ipv6Addr {
Ipv6Addr::from(ip.to_be_bytes())
}
}
#[stable(feature = "ipv6_from_octets", since = "1.9.0")]
impl From<[u8; 16]> for Ipv6Addr {
/// Creates an `Ipv6Addr` from a sixteen element byte array.
///
/// # Examples
///
/// ```
/// use std::net::Ipv6Addr;
///
/// let addr = Ipv6Addr::from([
/// 25u8, 24u8, 23u8, 22u8, 21u8, 20u8, 19u8, 18u8,
/// 17u8, 16u8, 15u8, 14u8, 13u8, 12u8, 11u8, 10u8,
/// ]);
/// assert_eq!(
/// Ipv6Addr::new(
/// 0x1918, 0x1716,
/// 0x1514, 0x1312,
/// 0x1110, 0x0f0e,
/// 0x0d0c, 0x0b0a
/// ),
/// addr
/// );
/// ```
fn from(octets: [u8; 16]) -> Ipv6Addr {
let inner = c::in6_addr { s6_addr: octets };
Ipv6Addr::from_inner(inner)
}
}
#[stable(feature = "ipv6_from_segments", since = "1.16.0")]
impl From<[u16; 8]> for Ipv6Addr {
/// Creates an `Ipv6Addr` from an eight element 16-bit array.
///
/// # Examples
///
/// ```
/// use std::net::Ipv6Addr;
///
/// let addr = Ipv6Addr::from([
/// 525u16, 524u16, 523u16, 522u16,
/// 521u16, 520u16, 519u16, 518u16,
/// ]);
/// assert_eq!(
/// Ipv6Addr::new(
/// 0x20d, 0x20c,
/// 0x20b, 0x20a,
/// 0x209, 0x208,
/// 0x207, 0x206
/// ),
/// addr
/// );
/// ```
fn from(segments: [u16; 8]) -> Ipv6Addr {
let [a, b, c, d, e, f, g, h] = segments;
Ipv6Addr::new(a, b, c, d, e, f, g, h)
}
}
#[stable(feature = "ip_from_slice", since = "1.17.0")]
impl From<[u8; 16]> for IpAddr {
/// Creates an `IpAddr::V6` from a sixteen element byte array.
///
/// # Examples
///
/// ```
/// use std::net::{IpAddr, Ipv6Addr};
///
/// let addr = IpAddr::from([
/// 25u8, 24u8, 23u8, 22u8, 21u8, 20u8, 19u8, 18u8,
/// 17u8, 16u8, 15u8, 14u8, 13u8, 12u8, 11u8, 10u8,
/// ]);
/// assert_eq!(
/// IpAddr::V6(Ipv6Addr::new(
/// 0x1918, 0x1716,
/// 0x1514, 0x1312,
/// 0x1110, 0x0f0e,
/// 0x0d0c, 0x0b0a
/// )),
/// addr
/// );
/// ```
fn from(octets: [u8; 16]) -> IpAddr {
IpAddr::V6(Ipv6Addr::from(octets))
}
}
#[stable(feature = "ip_from_slice", since = "1.17.0")]
impl From<[u16; 8]> for IpAddr {
/// Creates an `IpAddr::V6` from an eight element 16-bit array.
///
/// # Examples
///
/// ```
/// use std::net::{IpAddr, Ipv6Addr};
///
/// let addr = IpAddr::from([
/// 525u16, 524u16, 523u16, 522u16,
/// 521u16, 520u16, 519u16, 518u16,
/// ]);
/// assert_eq!(
/// IpAddr::V6(Ipv6Addr::new(
/// 0x20d, 0x20c,
/// 0x20b, 0x20a,
/// 0x209, 0x208,
/// 0x207, 0x206
/// )),
/// addr
/// );
/// ```
fn from(segments: [u16; 8]) -> IpAddr {
IpAddr::V6(Ipv6Addr::from(segments))
}
}
// Tests for this module
#[cfg(all(test, not(target_os = "emscripten")))]
mod tests {
use crate::net::test::{sa4, sa6, tsa};
use crate::net::*;
use crate::str::FromStr;
#[test]
fn test_from_str_ipv4() {
assert_eq!(Ok(Ipv4Addr::new(127, 0, 0, 1)), "127.0.0.1".parse());
assert_eq!(Ok(Ipv4Addr::new(255, 255, 255, 255)), "255.255.255.255".parse());
assert_eq!(Ok(Ipv4Addr::new(0, 0, 0, 0)), "0.0.0.0".parse());
// out of range
let none: Option<Ipv4Addr> = "256.0.0.1".parse().ok();
assert_eq!(None, none);
// too short
let none: Option<Ipv4Addr> = "255.0.0".parse().ok();
assert_eq!(None, none);
// too long
let none: Option<Ipv4Addr> = "255.0.0.1.2".parse().ok();
assert_eq!(None, none);
// no number between dots
let none: Option<Ipv4Addr> = "255.0..1".parse().ok();
assert_eq!(None, none);
}
#[test]
fn test_from_str_ipv6() {
assert_eq!(Ok(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0)), "0:0:0:0:0:0:0:0".parse());
assert_eq!(Ok(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), "0:0:0:0:0:0:0:1".parse());
assert_eq!(Ok(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), "::1".parse());
assert_eq!(Ok(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0)), "::".parse());
assert_eq!(
Ok(Ipv6Addr::new(0x2a02, 0x6b8, 0, 0, 0, 0, 0x11, 0x11)),
"2a02:6b8::11:11".parse()
);
// too long group
let none: Option<Ipv6Addr> = "::00000".parse().ok();
assert_eq!(None, none);
// too short
let none: Option<Ipv6Addr> = "1:2:3:4:5:6:7".parse().ok();
assert_eq!(None, none);
// too long
let none: Option<Ipv6Addr> = "1:2:3:4:5:6:7:8:9".parse().ok();
assert_eq!(None, none);
// triple colon
let none: Option<Ipv6Addr> = "1:2:::6:7:8".parse().ok();
assert_eq!(None, none);
// two double colons
let none: Option<Ipv6Addr> = "1:2::6::8".parse().ok();
assert_eq!(None, none);
// `::` indicating zero groups of zeros
let none: Option<Ipv6Addr> = "1:2:3:4::5:6:7:8".parse().ok();
assert_eq!(None, none);
}
#[test]
fn test_from_str_ipv4_in_ipv6() {
assert_eq!(Ok(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 49152, 545)), "::192.0.2.33".parse());
assert_eq!(
Ok(Ipv6Addr::new(0, 0, 0, 0, 0, 0xFFFF, 49152, 545)),
"::FFFF:192.0.2.33".parse()
);
assert_eq!(
Ok(Ipv6Addr::new(0x64, 0xff9b, 0, 0, 0, 0, 49152, 545)),
"64:ff9b::192.0.2.33".parse()
);
assert_eq!(
Ok(Ipv6Addr::new(0x2001, 0xdb8, 0x122, 0xc000, 0x2, 0x2100, 49152, 545)),
"2001:db8:122:c000:2:2100:192.0.2.33".parse()
);
// colon after v4
let none: Option<Ipv4Addr> = "::127.0.0.1:".parse().ok();
assert_eq!(None, none);
// not enough groups
let none: Option<Ipv6Addr> = "1.2.3.4.5:127.0.0.1".parse().ok();
assert_eq!(None, none);
// too many groups
let none: Option<Ipv6Addr> = "1.2.3.4.5:6:7:127.0.0.1".parse().ok();
assert_eq!(None, none);
}
#[test]
fn test_from_str_socket_addr() {
assert_eq!(Ok(sa4(Ipv4Addr::new(77, 88, 21, 11), 80)), "77.88.21.11:80".parse());
assert_eq!(
Ok(SocketAddrV4::new(Ipv4Addr::new(77, 88, 21, 11), 80)),
"77.88.21.11:80".parse()
);
assert_eq!(
Ok(sa6(Ipv6Addr::new(0x2a02, 0x6b8, 0, 1, 0, 0, 0, 1), 53)),
"[2a02:6b8:0:1::1]:53".parse()
);
assert_eq!(
Ok(SocketAddrV6::new(Ipv6Addr::new(0x2a02, 0x6b8, 0, 1, 0, 0, 0, 1), 53, 0, 0)),
"[2a02:6b8:0:1::1]:53".parse()
);
assert_eq!(
Ok(sa6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0x7F00, 1), 22)),
"[::127.0.0.1]:22".parse()
);
assert_eq!(
Ok(SocketAddrV6::new(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0x7F00, 1), 22, 0, 0)),
"[::127.0.0.1]:22".parse()
);
// without port
let none: Option<SocketAddr> = "127.0.0.1".parse().ok();
assert_eq!(None, none);
// without port
let none: Option<SocketAddr> = "127.0.0.1:".parse().ok();
assert_eq!(None, none);
// wrong brackets around v4
let none: Option<SocketAddr> = "[127.0.0.1]:22".parse().ok();
assert_eq!(None, none);
// port out of range
let none: Option<SocketAddr> = "127.0.0.1:123456".parse().ok();
assert_eq!(None, none);
}
#[test]
fn ipv4_addr_to_string() {
// Short address
assert_eq!(Ipv4Addr::new(1, 1, 1, 1).to_string(), "1.1.1.1");
// Long address
assert_eq!(Ipv4Addr::new(127, 127, 127, 127).to_string(), "127.127.127.127");
// Test padding
assert_eq!(&format!("{:16}", Ipv4Addr::new(1, 1, 1, 1)), "1.1.1.1 ");
assert_eq!(&format!("{:>16}", Ipv4Addr::new(1, 1, 1, 1)), " 1.1.1.1");
}
#[test]
fn ipv6_addr_to_string() {
// ipv4-mapped address
let a1 = Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc000, 0x280);
assert_eq!(a1.to_string(), "::ffff:192.0.2.128");
// ipv4-compatible address
let a1 = Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0xc000, 0x280);
assert_eq!(a1.to_string(), "::192.0.2.128");
// v6 address with no zero segments
assert_eq!(Ipv6Addr::new(8, 9, 10, 11, 12, 13, 14, 15).to_string(), "8:9:a:b:c:d:e:f");
// longest possible IPv6 length
assert_eq!(
Ipv6Addr::new(0x1111, 0x2222, 0x3333, 0x4444, 0x5555, 0x6666, 0x7777, 0x8888)
.to_string(),
"1111:2222:3333:4444:5555:6666:7777:8888"
);
// padding
assert_eq!(
&format!("{:20}", Ipv6Addr::new(1, 2, 3, 4, 5, 6, 7, 8)),
"1:2:3:4:5:6:7:8 "
);
assert_eq!(
&format!("{:>20}", Ipv6Addr::new(1, 2, 3, 4, 5, 6, 7, 8)),
" 1:2:3:4:5:6:7:8"
);
// reduce a single run of zeros
assert_eq!(
"ae::ffff:102:304",
Ipv6Addr::new(0xae, 0, 0, 0, 0, 0xffff, 0x0102, 0x0304).to_string()
);
// don't reduce just a single zero segment
assert_eq!("1:2:3:4:5:6:0:8", Ipv6Addr::new(1, 2, 3, 4, 5, 6, 0, 8).to_string());
// 'any' address
assert_eq!("::", Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0).to_string());
// loopback address
assert_eq!("::1", Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1).to_string());
// ends in zeros
assert_eq!("1::", Ipv6Addr::new(1, 0, 0, 0, 0, 0, 0, 0).to_string());
// two runs of zeros, second one is longer
assert_eq!("1:0:0:4::8", Ipv6Addr::new(1, 0, 0, 4, 0, 0, 0, 8).to_string());
// two runs of zeros, equal length
assert_eq!("1::4:5:0:0:8", Ipv6Addr::new(1, 0, 0, 4, 5, 0, 0, 8).to_string());
}
#[test]
fn ipv4_to_ipv6() {
assert_eq!(
Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0x1234, 0x5678),
Ipv4Addr::new(0x12, 0x34, 0x56, 0x78).to_ipv6_mapped()
);
assert_eq!(
Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0x1234, 0x5678),
Ipv4Addr::new(0x12, 0x34, 0x56, 0x78).to_ipv6_compatible()
);
}
#[test]
fn ipv6_to_ipv4_mapped() {
assert_eq!(
Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0x1234, 0x5678).to_ipv4_mapped(),
Some(Ipv4Addr::new(0x12, 0x34, 0x56, 0x78))
);
assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0x1234, 0x5678).to_ipv4_mapped(), None);
}
#[test]
fn ipv6_to_ipv4() {
assert_eq!(
Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0x1234, 0x5678).to_ipv4(),
Some(Ipv4Addr::new(0x12, 0x34, 0x56, 0x78))
);
assert_eq!(
Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0x1234, 0x5678).to_ipv4(),
Some(Ipv4Addr::new(0x12, 0x34, 0x56, 0x78))
);
assert_eq!(Ipv6Addr::new(0, 0, 1, 0, 0, 0, 0x1234, 0x5678).to_ipv4(), None);
}
#[test]
fn ip_properties() {
macro_rules! ip {
($s:expr) => {
IpAddr::from_str($s).unwrap()
};
}
macro_rules! check {
($s:expr) => {
check!($s, 0);
};
($s:expr, $mask:expr) => {{
let unspec: u8 = 1 << 0;
let loopback: u8 = 1 << 1;
let global: u8 = 1 << 2;
let multicast: u8 = 1 << 3;
let doc: u8 = 1 << 4;
if ($mask & unspec) == unspec {
assert!(ip!($s).is_unspecified());
} else {
assert!(!ip!($s).is_unspecified());
}
if ($mask & loopback) == loopback {
assert!(ip!($s).is_loopback());
} else {
assert!(!ip!($s).is_loopback());
}
if ($mask & global) == global {
assert!(ip!($s).is_global());
} else {
assert!(!ip!($s).is_global());
}
if ($mask & multicast) == multicast {
assert!(ip!($s).is_multicast());
} else {
assert!(!ip!($s).is_multicast());
}
if ($mask & doc) == doc {
assert!(ip!($s).is_documentation());
} else {
assert!(!ip!($s).is_documentation());
}
}};
}
let unspec: u8 = 1 << 0;
let loopback: u8 = 1 << 1;
let global: u8 = 1 << 2;
let multicast: u8 = 1 << 3;
let doc: u8 = 1 << 4;
check!("0.0.0.0", unspec);
check!("0.0.0.1");
check!("0.1.0.0");
check!("10.9.8.7");
check!("127.1.2.3", loopback);
check!("172.31.254.253");
check!("169.254.253.242");
check!("192.0.2.183", doc);
check!("192.1.2.183", global);
check!("192.168.254.253");
check!("198.51.100.0", doc);
check!("203.0.113.0", doc);
check!("203.2.113.0", global);
check!("224.0.0.0", global | multicast);
check!("239.255.255.255", global | multicast);
check!("255.255.255.255");
// make sure benchmarking addresses are not global
check!("198.18.0.0");
check!("198.18.54.2");
check!("198.19.255.255");
// make sure addresses reserved for protocol assignment are not global
check!("192.0.0.0");
check!("192.0.0.255");
check!("192.0.0.100");
// make sure reserved addresses are not global
check!("240.0.0.0");
check!("251.54.1.76");
check!("254.255.255.255");
// make sure shared addresses are not global
check!("100.64.0.0");
check!("100.127.255.255");
check!("100.100.100.0");
check!("::", unspec);
check!("::1", loopback);
check!("::0.0.0.2", global);
check!("1::", global);
check!("fc00::");
check!("fdff:ffff::");
check!("fe80:ffff::");
check!("febf:ffff::");
check!("fec0::", global);
check!("ff01::", multicast);
check!("ff02::", multicast);
check!("ff03::", multicast);
check!("ff04::", multicast);
check!("ff05::", multicast);
check!("ff08::", multicast);
check!("ff0e::", global | multicast);
check!("2001:db8:85a3::8a2e:370:7334", doc);
check!("102:304:506:708:90a:b0c:d0e:f10", global);
}
#[test]
fn ipv4_properties() {
macro_rules! ip {
($s:expr) => {
Ipv4Addr::from_str($s).unwrap()
};
}
macro_rules! check {
($s:expr) => {
check!($s, 0);
};
($s:expr, $mask:expr) => {{
let unspec: u16 = 1 << 0;
let loopback: u16 = 1 << 1;
let private: u16 = 1 << 2;
let link_local: u16 = 1 << 3;
let global: u16 = 1 << 4;
let multicast: u16 = 1 << 5;
let broadcast: u16 = 1 << 6;
let documentation: u16 = 1 << 7;
let benchmarking: u16 = 1 << 8;
let ietf_protocol_assignment: u16 = 1 << 9;
let reserved: u16 = 1 << 10;
let shared: u16 = 1 << 11;
if ($mask & unspec) == unspec {
assert!(ip!($s).is_unspecified());
} else {
assert!(!ip!($s).is_unspecified());
}
if ($mask & loopback) == loopback {
assert!(ip!($s).is_loopback());
} else {
assert!(!ip!($s).is_loopback());
}
if ($mask & private) == private {
assert!(ip!($s).is_private());
} else {
assert!(!ip!($s).is_private());
}
if ($mask & link_local) == link_local {
assert!(ip!($s).is_link_local());
} else {
assert!(!ip!($s).is_link_local());
}
if ($mask & global) == global {
assert!(ip!($s).is_global());
} else {
assert!(!ip!($s).is_global());
}
if ($mask & multicast) == multicast {
assert!(ip!($s).is_multicast());
} else {
assert!(!ip!($s).is_multicast());
}
if ($mask & broadcast) == broadcast {
assert!(ip!($s).is_broadcast());
} else {
assert!(!ip!($s).is_broadcast());
}
if ($mask & documentation) == documentation {
assert!(ip!($s).is_documentation());
} else {
assert!(!ip!($s).is_documentation());
}
if ($mask & benchmarking) == benchmarking {
assert!(ip!($s).is_benchmarking());
} else {
assert!(!ip!($s).is_benchmarking());
}
if ($mask & ietf_protocol_assignment) == ietf_protocol_assignment {
assert!(ip!($s).is_ietf_protocol_assignment());
} else {
assert!(!ip!($s).is_ietf_protocol_assignment());
}
if ($mask & reserved) == reserved {
assert!(ip!($s).is_reserved());
} else {
assert!(!ip!($s).is_reserved());
}
if ($mask & shared) == shared {
assert!(ip!($s).is_shared());
} else {
assert!(!ip!($s).is_shared());
}
}};
}
let unspec: u16 = 1 << 0;
let loopback: u16 = 1 << 1;
let private: u16 = 1 << 2;
let link_local: u16 = 1 << 3;
let global: u16 = 1 << 4;
let multicast: u16 = 1 << 5;
let broadcast: u16 = 1 << 6;
let documentation: u16 = 1 << 7;
let benchmarking: u16 = 1 << 8;
let ietf_protocol_assignment: u16 = 1 << 9;
let reserved: u16 = 1 << 10;
let shared: u16 = 1 << 11;
check!("0.0.0.0", unspec);
check!("0.0.0.1");
check!("0.1.0.0");
check!("10.9.8.7", private);
check!("127.1.2.3", loopback);
check!("172.31.254.253", private);
check!("169.254.253.242", link_local);
check!("192.0.2.183", documentation);
check!("192.1.2.183", global);
check!("192.168.254.253", private);
check!("198.51.100.0", documentation);
check!("203.0.113.0", documentation);
check!("203.2.113.0", global);
check!("224.0.0.0", global | multicast);
check!("239.255.255.255", global | multicast);
check!("255.255.255.255", broadcast);
check!("198.18.0.0", benchmarking);
check!("198.18.54.2", benchmarking);
check!("198.19.255.255", benchmarking);
check!("192.0.0.0", ietf_protocol_assignment);
check!("192.0.0.255", ietf_protocol_assignment);
check!("192.0.0.100", ietf_protocol_assignment);
check!("240.0.0.0", reserved);
check!("251.54.1.76", reserved);
check!("254.255.255.255", reserved);
check!("100.64.0.0", shared);
check!("100.127.255.255", shared);
check!("100.100.100.0", shared);
}
#[test]
fn ipv6_properties() {
macro_rules! ip {
($s:expr) => {
Ipv6Addr::from_str($s).unwrap()
};
}
macro_rules! check {
($s:expr, &[$($octet:expr),*], $mask:expr) => {
assert_eq!($s, ip!($s).to_string());
let octets = &[$($octet),*];
assert_eq!(&ip!($s).octets(), octets);
assert_eq!(Ipv6Addr::from(*octets), ip!($s));
let unspecified: u16 = 1 << 0;
let loopback: u16 = 1 << 1;
let unique_local: u16 = 1 << 2;
let global: u16 = 1 << 3;
let unicast_link_local: u16 = 1 << 4;
let unicast_link_local_strict: u16 = 1 << 5;
let unicast_site_local: u16 = 1 << 6;
let unicast_global: u16 = 1 << 7;
let documentation: u16 = 1 << 8;
let multicast_interface_local: u16 = 1 << 9;
let multicast_link_local: u16 = 1 << 10;
let multicast_realm_local: u16 = 1 << 11;
let multicast_admin_local: u16 = 1 << 12;
let multicast_site_local: u16 = 1 << 13;
let multicast_organization_local: u16 = 1 << 14;
let multicast_global: u16 = 1 << 15;
let multicast: u16 = multicast_interface_local
| multicast_admin_local
| multicast_global
| multicast_link_local
| multicast_realm_local
| multicast_site_local
| multicast_organization_local;
if ($mask & unspecified) == unspecified {
assert!(ip!($s).is_unspecified());
} else {
assert!(!ip!($s).is_unspecified());
}
if ($mask & loopback) == loopback {
assert!(ip!($s).is_loopback());
} else {
assert!(!ip!($s).is_loopback());
}
if ($mask & unique_local) == unique_local {
assert!(ip!($s).is_unique_local());
} else {
assert!(!ip!($s).is_unique_local());
}
if ($mask & global) == global {
assert!(ip!($s).is_global());
} else {
assert!(!ip!($s).is_global());
}
if ($mask & unicast_link_local) == unicast_link_local {
assert!(ip!($s).is_unicast_link_local());
} else {
assert!(!ip!($s).is_unicast_link_local());
}
if ($mask & unicast_link_local_strict) == unicast_link_local_strict {
assert!(ip!($s).is_unicast_link_local_strict());
} else {
assert!(!ip!($s).is_unicast_link_local_strict());
}
if ($mask & unicast_site_local) == unicast_site_local {
assert!(ip!($s).is_unicast_site_local());
} else {
assert!(!ip!($s).is_unicast_site_local());
}
if ($mask & unicast_global) == unicast_global {
assert!(ip!($s).is_unicast_global());
} else {
assert!(!ip!($s).is_unicast_global());
}
if ($mask & documentation) == documentation {
assert!(ip!($s).is_documentation());
} else {
assert!(!ip!($s).is_documentation());
}
if ($mask & multicast) != 0 {
assert!(ip!($s).multicast_scope().is_some());
assert!(ip!($s).is_multicast());
} else {
assert!(ip!($s).multicast_scope().is_none());
assert!(!ip!($s).is_multicast());
}
if ($mask & multicast_interface_local) == multicast_interface_local {
assert_eq!(ip!($s).multicast_scope().unwrap(),
Ipv6MulticastScope::InterfaceLocal);
}
if ($mask & multicast_link_local) == multicast_link_local {
assert_eq!(ip!($s).multicast_scope().unwrap(),
Ipv6MulticastScope::LinkLocal);
}
if ($mask & multicast_realm_local) == multicast_realm_local {
assert_eq!(ip!($s).multicast_scope().unwrap(),
Ipv6MulticastScope::RealmLocal);
}
if ($mask & multicast_admin_local) == multicast_admin_local {
assert_eq!(ip!($s).multicast_scope().unwrap(),
Ipv6MulticastScope::AdminLocal);
}
if ($mask & multicast_site_local) == multicast_site_local {
assert_eq!(ip!($s).multicast_scope().unwrap(),
Ipv6MulticastScope::SiteLocal);
}
if ($mask & multicast_organization_local) == multicast_organization_local {
assert_eq!(ip!($s).multicast_scope().unwrap(),
Ipv6MulticastScope::OrganizationLocal);
}
if ($mask & multicast_global) == multicast_global {
assert_eq!(ip!($s).multicast_scope().unwrap(),
Ipv6MulticastScope::Global);
}
}
}
let unspecified: u16 = 1 << 0;
let loopback: u16 = 1 << 1;
let unique_local: u16 = 1 << 2;
let global: u16 = 1 << 3;
let unicast_link_local: u16 = 1 << 4;
let unicast_link_local_strict: u16 = 1 << 5;
let unicast_site_local: u16 = 1 << 6;
let unicast_global: u16 = 1 << 7;
let documentation: u16 = 1 << 8;
let multicast_interface_local: u16 = 1 << 9;
let multicast_link_local: u16 = 1 << 10;
let multicast_realm_local: u16 = 1 << 11;
let multicast_admin_local: u16 = 1 << 12;
let multicast_site_local: u16 = 1 << 13;
let multicast_organization_local: u16 = 1 << 14;
let multicast_global: u16 = 1 << 15;
check!("::", &[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], unspecified);
check!("::1", &[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1], loopback);
check!(
"::0.0.0.2",
&[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2],
global | unicast_global
);
check!("1::", &[0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], global | unicast_global);
check!("fc00::", &[0xfc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], unique_local);
check!(
"fdff:ffff::",
&[0xfd, 0xff, 0xff, 0xff, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
unique_local
);
check!(
"fe80:ffff::",
&[0xfe, 0x80, 0xff, 0xff, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
unicast_link_local
);
check!(
"fe80::",
&[0xfe, 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
unicast_link_local | unicast_link_local_strict
);
check!(
"febf:ffff::",
&[0xfe, 0xbf, 0xff, 0xff, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
unicast_link_local
);
check!(
"febf::",
&[0xfe, 0xbf, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
unicast_link_local
);
check!(
"febf:ffff:ffff:ffff:ffff:ffff:ffff:ffff",
&[
0xfe, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff
],
unicast_link_local
);
check!(
"fe80::ffff:ffff:ffff:ffff",
&[
0xfe, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff
],
unicast_link_local | unicast_link_local_strict
);
check!(
"fe80:0:0:1::",
&[0xfe, 0x80, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
unicast_link_local
);
check!(
"fec0::",
&[0xfe, 0xc0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
unicast_site_local | unicast_global | global
);
check!(
"ff01::",
&[0xff, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
multicast_interface_local
);
check!(
"ff02::",
&[0xff, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
multicast_link_local
);
check!(
"ff03::",
&[0xff, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
multicast_realm_local
);
check!(
"ff04::",
&[0xff, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
multicast_admin_local
);
check!(
"ff05::",
&[0xff, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
multicast_site_local
);
check!(
"ff08::",
&[0xff, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
multicast_organization_local
);
check!(
"ff0e::",
&[0xff, 0xe, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
multicast_global | global
);
check!(
"2001:db8:85a3::8a2e:370:7334",
&[0x20, 1, 0xd, 0xb8, 0x85, 0xa3, 0, 0, 0, 0, 0x8a, 0x2e, 3, 0x70, 0x73, 0x34],
documentation
);
check!(
"102:304:506:708:90a:b0c:d0e:f10",
&[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16],
global | unicast_global
);
}
#[test]
fn to_socket_addr_socketaddr() {
let a = sa4(Ipv4Addr::new(77, 88, 21, 11), 12345);
assert_eq!(Ok(vec![a]), tsa(a));
}
#[test]
fn test_ipv4_to_int() {
let a = Ipv4Addr::new(0x11, 0x22, 0x33, 0x44);
assert_eq!(u32::from(a), 0x11223344);
}
#[test]
fn test_int_to_ipv4() {
let a = Ipv4Addr::new(0x11, 0x22, 0x33, 0x44);
assert_eq!(Ipv4Addr::from(0x11223344), a);
}
#[test]
fn test_ipv6_to_int() {
let a = Ipv6Addr::new(0x1122, 0x3344, 0x5566, 0x7788, 0x99aa, 0xbbcc, 0xddee, 0xff11);
assert_eq!(u128::from(a), 0x112233445566778899aabbccddeeff11u128);
}
#[test]
fn test_int_to_ipv6() {
let a = Ipv6Addr::new(0x1122, 0x3344, 0x5566, 0x7788, 0x99aa, 0xbbcc, 0xddee, 0xff11);
assert_eq!(Ipv6Addr::from(0x112233445566778899aabbccddeeff11u128), a);
}
#[test]
fn ipv4_from_constructors() {
assert_eq!(Ipv4Addr::LOCALHOST, Ipv4Addr::new(127, 0, 0, 1));
assert!(Ipv4Addr::LOCALHOST.is_loopback());
assert_eq!(Ipv4Addr::UNSPECIFIED, Ipv4Addr::new(0, 0, 0, 0));
assert!(Ipv4Addr::UNSPECIFIED.is_unspecified());
assert_eq!(Ipv4Addr::BROADCAST, Ipv4Addr::new(255, 255, 255, 255));
assert!(Ipv4Addr::BROADCAST.is_broadcast());
}
#[test]
fn ipv6_from_contructors() {
assert_eq!(Ipv6Addr::LOCALHOST, Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1));
assert!(Ipv6Addr::LOCALHOST.is_loopback());
assert_eq!(Ipv6Addr::UNSPECIFIED, Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0));
assert!(Ipv6Addr::UNSPECIFIED.is_unspecified());
}
#[test]
fn ipv4_from_octets() {
assert_eq!(Ipv4Addr::from([127, 0, 0, 1]), Ipv4Addr::new(127, 0, 0, 1))
}
#[test]
fn ipv6_from_segments() {
let from_u16s =
Ipv6Addr::from([0x0011, 0x2233, 0x4455, 0x6677, 0x8899, 0xaabb, 0xccdd, 0xeeff]);
let new = Ipv6Addr::new(0x0011, 0x2233, 0x4455, 0x6677, 0x8899, 0xaabb, 0xccdd, 0xeeff);
assert_eq!(new, from_u16s);
}
#[test]
fn ipv6_from_octets() {
let from_u16s =
Ipv6Addr::from([0x0011, 0x2233, 0x4455, 0x6677, 0x8899, 0xaabb, 0xccdd, 0xeeff]);
let from_u8s = Ipv6Addr::from([
0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd,
0xee, 0xff,
]);
assert_eq!(from_u16s, from_u8s);
}
#[test]
fn cmp() {
let v41 = Ipv4Addr::new(100, 64, 3, 3);
let v42 = Ipv4Addr::new(192, 0, 2, 2);
let v61 = "2001:db8:f00::1002".parse::<Ipv6Addr>().unwrap();
let v62 = "2001:db8:f00::2001".parse::<Ipv6Addr>().unwrap();
assert!(v41 < v42);
assert!(v61 < v62);
assert_eq!(v41, IpAddr::V4(v41));
assert_eq!(v61, IpAddr::V6(v61));
assert!(v41 != IpAddr::V4(v42));
assert!(v61 != IpAddr::V6(v62));
assert!(v41 < IpAddr::V4(v42));
assert!(v61 < IpAddr::V6(v62));
assert!(IpAddr::V4(v41) < v42);
assert!(IpAddr::V6(v61) < v62);
assert!(v41 < IpAddr::V6(v61));
assert!(IpAddr::V4(v41) < v61);
}
#[test]
fn is_v4() {
let ip = IpAddr::V4(Ipv4Addr::new(100, 64, 3, 3));
assert!(ip.is_ipv4());
assert!(!ip.is_ipv6());
}
#[test]
fn is_v6() {
let ip = IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0x1234, 0x5678));
assert!(!ip.is_ipv4());
assert!(ip.is_ipv6());
}
}
| 33.928779 | 111 | 0.499154 |
48a51813ba4ab5a44f3b55f8ca77444322eda640
| 8,262 |
// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use libc;
use core::prelude::*;
use self::imp::{make_handler, drop_handler};
pub use self::imp::{init, cleanup};
pub struct Handler {
_data: *mut libc::c_void
}
impl Handler {
pub unsafe fn new() -> Handler {
make_handler()
}
}
impl Drop for Handler {
fn drop(&mut self) {
unsafe {
drop_handler(self);
}
}
}
#[cfg(any(target_os = "linux", target_os = "macos"))]
mod imp {
use core::prelude::*;
use sys_common::stack;
use super::Handler;
use rt::util::report_overflow;
use mem;
use ptr;
use intrinsics;
use self::signal::{siginfo, sigaction, SIGBUS, SIG_DFL,
SA_SIGINFO, SA_ONSTACK, sigaltstack,
SIGSTKSZ};
use libc;
use libc::funcs::posix88::mman::{mmap, munmap};
use libc::consts::os::posix88::{SIGSEGV,
PROT_READ,
PROT_WRITE,
MAP_PRIVATE,
MAP_ANON,
MAP_FAILED};
use sys_common::thread_info;
// This is initialized in init() and only read from after
static mut PAGE_SIZE: uint = 0;
#[no_stack_check]
unsafe extern fn signal_handler(signum: libc::c_int,
info: *mut siginfo,
_data: *mut libc::c_void) {
// We can not return from a SIGSEGV or SIGBUS signal.
// See: https://www.gnu.org/software/libc/manual/html_node/Handler-Returns.html
unsafe fn term(signum: libc::c_int) -> ! {
use core::mem::transmute;
signal(signum, transmute(SIG_DFL));
raise(signum);
intrinsics::abort();
}
// We're calling into functions with stack checks
stack::record_sp_limit(0);
let guard = thread_info::stack_guard();
let addr = (*info).si_addr as uint;
if guard == 0 || addr < guard - PAGE_SIZE || addr >= guard {
term(signum);
}
report_overflow();
intrinsics::abort()
}
static mut MAIN_ALTSTACK: *mut libc::c_void = 0 as *mut libc::c_void;
pub unsafe fn init() {
let psize = libc::sysconf(libc::consts::os::sysconf::_SC_PAGESIZE);
if psize == -1 {
panic!("failed to get page size");
}
PAGE_SIZE = psize as uint;
let mut action: sigaction = mem::zeroed();
action.sa_flags = SA_SIGINFO | SA_ONSTACK;
action.sa_sigaction = signal_handler as sighandler_t;
sigaction(SIGSEGV, &action, ptr::null_mut());
sigaction(SIGBUS, &action, ptr::null_mut());
let handler = make_handler();
MAIN_ALTSTACK = handler._data;
mem::forget(handler);
}
pub unsafe fn cleanup() {
Handler { _data: MAIN_ALTSTACK };
}
pub unsafe fn make_handler() -> Handler {
let alt_stack = mmap(ptr::null_mut(),
signal::SIGSTKSZ,
PROT_READ | PROT_WRITE,
MAP_PRIVATE | MAP_ANON,
-1,
0);
if alt_stack == MAP_FAILED {
panic!("failed to allocate an alternative stack");
}
let mut stack: sigaltstack = mem::zeroed();
stack.ss_sp = alt_stack;
stack.ss_flags = 0;
stack.ss_size = SIGSTKSZ;
sigaltstack(&stack, ptr::null_mut());
Handler { _data: alt_stack }
}
pub unsafe fn drop_handler(handler: &mut Handler) {
munmap(handler._data, SIGSTKSZ);
}
type sighandler_t = *mut libc::c_void;
#[cfg(any(all(target_os = "linux", target_arch = "x86"), // may not match
all(target_os = "linux", target_arch = "x86_64"),
all(target_os = "linux", target_arch = "arm"), // may not match
all(target_os = "linux", target_arch = "aarch64"),
all(target_os = "linux", target_arch = "mips"), // may not match
all(target_os = "linux", target_arch = "mipsel"), // may not match
target_os = "android"))] // may not match
mod signal {
use libc;
use super::sighandler_t;
pub static SA_ONSTACK: libc::c_int = 0x08000000;
pub static SA_SIGINFO: libc::c_int = 0x00000004;
pub static SIGBUS: libc::c_int = 7;
pub static SIGSTKSZ: libc::size_t = 8192;
pub const SIG_DFL: sighandler_t = 0i as sighandler_t;
// This definition is not as accurate as it could be, {si_addr} is
// actually a giant union. Currently we're only interested in that field,
// however.
#[repr(C)]
pub struct siginfo {
si_signo: libc::c_int,
si_errno: libc::c_int,
si_code: libc::c_int,
pub si_addr: *mut libc::c_void
}
#[repr(C)]
pub struct sigaction {
pub sa_sigaction: sighandler_t,
pub sa_mask: sigset_t,
pub sa_flags: libc::c_int,
sa_restorer: *mut libc::c_void,
}
#[cfg(any(all(stage0, target_word_size = "32"),
all(not(stage0), target_pointer_width = "32")))]
#[repr(C)]
pub struct sigset_t {
__val: [libc::c_ulong; 32],
}
#[cfg(any(all(stage0, target_word_size = "64"),
all(not(stage0), target_pointer_width = "64")))]
#[repr(C)]
pub struct sigset_t {
__val: [libc::c_ulong; 16],
}
#[repr(C)]
pub struct sigaltstack {
pub ss_sp: *mut libc::c_void,
pub ss_flags: libc::c_int,
pub ss_size: libc::size_t
}
}
#[cfg(target_os = "macos")]
mod signal {
use libc;
use super::sighandler_t;
pub const SA_ONSTACK: libc::c_int = 0x0001;
pub const SA_SIGINFO: libc::c_int = 0x0040;
pub const SIGBUS: libc::c_int = 10;
pub const SIGSTKSZ: libc::size_t = 131072;
pub const SIG_DFL: sighandler_t = 0i as sighandler_t;
pub type sigset_t = u32;
// This structure has more fields, but we're not all that interested in
// them.
#[repr(C)]
pub struct siginfo {
pub si_signo: libc::c_int,
pub si_errno: libc::c_int,
pub si_code: libc::c_int,
pub pid: libc::pid_t,
pub uid: libc::uid_t,
pub status: libc::c_int,
pub si_addr: *mut libc::c_void
}
#[repr(C)]
pub struct sigaltstack {
pub ss_sp: *mut libc::c_void,
pub ss_size: libc::size_t,
pub ss_flags: libc::c_int
}
#[repr(C)]
pub struct sigaction {
pub sa_sigaction: sighandler_t,
pub sa_mask: sigset_t,
pub sa_flags: libc::c_int,
}
}
extern {
pub fn signal(signum: libc::c_int, handler: sighandler_t) -> sighandler_t;
pub fn raise(signum: libc::c_int) -> libc::c_int;
pub fn sigaction(signum: libc::c_int,
act: *const sigaction,
oldact: *mut sigaction) -> libc::c_int;
pub fn sigaltstack(ss: *const sigaltstack,
oss: *mut sigaltstack) -> libc::c_int;
}
}
#[cfg(not(any(target_os = "linux",
target_os = "macos")))]
mod imp {
use libc;
pub unsafe fn init() {
}
pub unsafe fn cleanup() {
}
pub unsafe fn make_handler() -> super::Handler {
super::Handler { _data: 0i as *mut libc::c_void }
}
pub unsafe fn drop_handler(_handler: &mut super::Handler) {
}
}
| 29.402135 | 87 | 0.540547 |
0ad82d99a48c560fa8d851cd318e33d4ba15d3f3
| 1,988 |
use anyhow::{anyhow, Result};
use itertools::Itertools;
use std::collections::HashMap;
fn get_orbiting<'a>(orbits: &HashMap<&'a str, &'a str>, mut satellite: &'a str) -> Vec<&'a str> {
let mut orbiting = Vec::new();
while let Some(&o) = orbits.get(satellite) {
orbiting.push(o);
satellite = o;
}
orbiting
}
#[aoc_generator(day6)]
pub fn input_generator(input: &str) -> Vec<Vec<String>> {
input
.lines()
.map(|l| l.split(')').map(|x| x.to_owned()).collect())
.collect()
}
#[aoc(day6, part1)]
fn answer_1<'a>(input: &'a [Vec<String>]) -> Result<usize> {
let mut tree: HashMap<&'a str, Vec<&'a str>> = HashMap::new();
for pairs in input {
let orbits = tree.entry(&pairs[0]).or_insert_with(Vec::new);
orbits.push(&pairs[1]);
}
let mut to_visit = vec![("COM", 1)];
let mut total_orbits = 0;
while let Some((x, count)) = to_visit.pop() {
if let Some(orbits) = tree.get(x) {
for o in orbits {
total_orbits += count;
to_visit.push((o, count + 1));
}
}
}
Ok(total_orbits)
}
#[aoc(day6, part2)]
fn answer_2<'a>(input: &'a [Vec<String>]) -> Result<usize> {
let mut satellites: HashMap<&'a str, &'a str> = HashMap::new();
input.iter().for_each(|pairs| {
satellites.insert(&pairs[1], &pairs[0]);
});
let you = get_orbiting(&satellites, "YOU");
let san = get_orbiting(&satellites, "SAN");
you.iter()
.enumerate()
.cartesian_product(san.iter().enumerate())
.find_map(|((i, x), (j, y))| if x == y { Some(i + j) } else { None })
.ok_or_else(|| anyhow!("path not found"))
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn examples_1() {
assert_eq!(
42,
answer_1(&input_generator(
r#"COM)B
B)C
C)D
D)E
E)F
B)G
G)H
D)I
E)J
J)K
K)L"#
))
.unwrap()
);
}
}
| 22.088889 | 97 | 0.522636 |
56510861dfb2aac1878e9075dfffa8e9588f3b10
| 20,936 |
use hir::HirDisplay;
use ra_db::FileId;
use ra_syntax::{
ast::{
self,
edit::{AstNodeEdit, IndentLevel},
make, ArgListOwner, AstNode, ModuleItemOwner,
},
SyntaxKind, SyntaxNode, TextSize,
};
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{
assist_config::SnippetCap,
utils::{render_snippet, Cursor},
AssistContext, AssistId, AssistKind, Assists,
};
// Assist: generate_function
//
// Adds a stub function with a signature matching the function under the cursor.
//
// ```
// struct Baz;
// fn baz() -> Baz { Baz }
// fn foo() {
// bar<|>("", baz());
// }
//
// ```
// ->
// ```
// struct Baz;
// fn baz() -> Baz { Baz }
// fn foo() {
// bar("", baz());
// }
//
// fn bar(arg: &str, baz: Baz) {
// ${0:todo!()}
// }
//
// ```
pub(crate) fn generate_function(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let path_expr: ast::PathExpr = ctx.find_node_at_offset()?;
let call = path_expr.syntax().parent().and_then(ast::CallExpr::cast)?;
let path = path_expr.path()?;
if ctx.sema.resolve_path(&path).is_some() {
// The function call already resolves, no need to add a function
return None;
}
let target_module = match path.qualifier() {
Some(qualifier) => match ctx.sema.resolve_path(&qualifier) {
Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) => Some(module),
_ => return None,
},
None => None,
};
let function_builder = FunctionBuilder::from_call(&ctx, &call, &path, target_module)?;
let target = call.syntax().text_range();
acc.add(
AssistId("generate_function", AssistKind::Generate),
format!("Generate `{}` function", function_builder.fn_name),
target,
|builder| {
let function_template = function_builder.render();
builder.edit_file(function_template.file);
let new_fn = function_template.to_string(ctx.config.snippet_cap);
match ctx.config.snippet_cap {
Some(cap) => builder.insert_snippet(cap, function_template.insert_offset, new_fn),
None => builder.insert(function_template.insert_offset, new_fn),
}
},
)
}
struct FunctionTemplate {
insert_offset: TextSize,
placeholder_expr: ast::MacroCall,
leading_ws: String,
fn_def: ast::Fn,
trailing_ws: String,
file: FileId,
}
impl FunctionTemplate {
fn to_string(&self, cap: Option<SnippetCap>) -> String {
let f = match cap {
Some(cap) => render_snippet(
cap,
self.fn_def.syntax(),
Cursor::Replace(self.placeholder_expr.syntax()),
),
None => self.fn_def.to_string(),
};
format!("{}{}{}", self.leading_ws, f, self.trailing_ws)
}
}
struct FunctionBuilder {
target: GeneratedFunctionTarget,
fn_name: ast::Name,
type_params: Option<ast::GenericParamList>,
params: ast::ParamList,
file: FileId,
needs_pub: bool,
}
impl FunctionBuilder {
/// Prepares a generated function that matches `call`.
/// The function is generated in `target_module` or next to `call`
fn from_call(
ctx: &AssistContext,
call: &ast::CallExpr,
path: &ast::Path,
target_module: Option<hir::Module>,
) -> Option<Self> {
let mut file = ctx.frange.file_id;
let target = match &target_module {
Some(target_module) => {
let module_source = target_module.definition_source(ctx.db());
let (in_file, target) = next_space_for_fn_in_module(ctx.sema.db, &module_source)?;
file = in_file;
target
}
None => next_space_for_fn_after_call_site(&call)?,
};
let needs_pub = target_module.is_some();
let target_module = target_module.or_else(|| ctx.sema.scope(target.syntax()).module())?;
let fn_name = fn_name(&path)?;
let (type_params, params) = fn_args(ctx, target_module, &call)?;
Some(Self { target, fn_name, type_params, params, file, needs_pub })
}
fn render(self) -> FunctionTemplate {
let placeholder_expr = make::expr_todo();
let fn_body = make::block_expr(vec![], Some(placeholder_expr));
let visibility = if self.needs_pub { Some(make::visibility_pub_crate()) } else { None };
let mut fn_def =
make::fn_def(visibility, self.fn_name, self.type_params, self.params, fn_body);
let leading_ws;
let trailing_ws;
let insert_offset = match self.target {
GeneratedFunctionTarget::BehindItem(it) => {
let indent = IndentLevel::from_node(&it);
leading_ws = format!("\n\n{}", indent);
fn_def = fn_def.indent(indent);
trailing_ws = String::new();
it.text_range().end()
}
GeneratedFunctionTarget::InEmptyItemList(it) => {
let indent = IndentLevel::from_node(it.syntax());
leading_ws = format!("\n{}", indent + 1);
fn_def = fn_def.indent(indent + 1);
trailing_ws = format!("\n{}", indent);
it.syntax().text_range().start() + TextSize::of('{')
}
};
let placeholder_expr =
fn_def.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
FunctionTemplate {
insert_offset,
placeholder_expr,
leading_ws,
fn_def,
trailing_ws,
file: self.file,
}
}
}
enum GeneratedFunctionTarget {
BehindItem(SyntaxNode),
InEmptyItemList(ast::ItemList),
}
impl GeneratedFunctionTarget {
fn syntax(&self) -> &SyntaxNode {
match self {
GeneratedFunctionTarget::BehindItem(it) => it,
GeneratedFunctionTarget::InEmptyItemList(it) => it.syntax(),
}
}
}
fn fn_name(call: &ast::Path) -> Option<ast::Name> {
let name = call.segment()?.syntax().to_string();
Some(make::name(&name))
}
/// Computes the type variables and arguments required for the generated function
fn fn_args(
ctx: &AssistContext,
target_module: hir::Module,
call: &ast::CallExpr,
) -> Option<(Option<ast::GenericParamList>, ast::ParamList)> {
let mut arg_names = Vec::new();
let mut arg_types = Vec::new();
for arg in call.arg_list()?.args() {
arg_names.push(match fn_arg_name(&arg) {
Some(name) => name,
None => String::from("arg"),
});
arg_types.push(match fn_arg_type(ctx, target_module, &arg) {
Some(ty) => ty,
None => String::from("()"),
});
}
deduplicate_arg_names(&mut arg_names);
let params = arg_names.into_iter().zip(arg_types).map(|(name, ty)| make::param(name, ty));
Some((None, make::param_list(params)))
}
/// Makes duplicate argument names unique by appending incrementing numbers.
///
/// ```
/// let mut names: Vec<String> =
/// vec!["foo".into(), "foo".into(), "bar".into(), "baz".into(), "bar".into()];
/// deduplicate_arg_names(&mut names);
/// let expected: Vec<String> =
/// vec!["foo_1".into(), "foo_2".into(), "bar_1".into(), "baz".into(), "bar_2".into()];
/// assert_eq!(names, expected);
/// ```
fn deduplicate_arg_names(arg_names: &mut Vec<String>) {
let arg_name_counts = arg_names.iter().fold(FxHashMap::default(), |mut m, name| {
*m.entry(name).or_insert(0) += 1;
m
});
let duplicate_arg_names: FxHashSet<String> = arg_name_counts
.into_iter()
.filter(|(_, count)| *count >= 2)
.map(|(name, _)| name.clone())
.collect();
let mut counter_per_name = FxHashMap::default();
for arg_name in arg_names.iter_mut() {
if duplicate_arg_names.contains(arg_name) {
let counter = counter_per_name.entry(arg_name.clone()).or_insert(1);
arg_name.push('_');
arg_name.push_str(&counter.to_string());
*counter += 1;
}
}
}
fn fn_arg_name(fn_arg: &ast::Expr) -> Option<String> {
match fn_arg {
ast::Expr::CastExpr(cast_expr) => fn_arg_name(&cast_expr.expr()?),
_ => Some(
fn_arg
.syntax()
.descendants()
.filter(|d| ast::NameRef::can_cast(d.kind()))
.last()?
.to_string(),
),
}
}
fn fn_arg_type(
ctx: &AssistContext,
target_module: hir::Module,
fn_arg: &ast::Expr,
) -> Option<String> {
let ty = ctx.sema.type_of_expr(fn_arg)?;
if ty.is_unknown() {
return None;
}
if let Ok(rendered) = ty.display_source_code(ctx.db(), target_module.into()) {
Some(rendered)
} else {
None
}
}
/// Returns the position inside the current mod or file
/// directly after the current block
/// We want to write the generated function directly after
/// fns, impls or macro calls, but inside mods
fn next_space_for_fn_after_call_site(expr: &ast::CallExpr) -> Option<GeneratedFunctionTarget> {
let mut ancestors = expr.syntax().ancestors().peekable();
let mut last_ancestor: Option<SyntaxNode> = None;
while let Some(next_ancestor) = ancestors.next() {
match next_ancestor.kind() {
SyntaxKind::SOURCE_FILE => {
break;
}
SyntaxKind::ITEM_LIST => {
if ancestors.peek().map(|a| a.kind()) == Some(SyntaxKind::MODULE) {
break;
}
}
_ => {}
}
last_ancestor = Some(next_ancestor);
}
last_ancestor.map(GeneratedFunctionTarget::BehindItem)
}
fn next_space_for_fn_in_module(
db: &dyn hir::db::AstDatabase,
module_source: &hir::InFile<hir::ModuleSource>,
) -> Option<(FileId, GeneratedFunctionTarget)> {
let file = module_source.file_id.original_file(db);
let assist_item = match &module_source.value {
hir::ModuleSource::SourceFile(it) => {
if let Some(last_item) = it.items().last() {
GeneratedFunctionTarget::BehindItem(last_item.syntax().clone())
} else {
GeneratedFunctionTarget::BehindItem(it.syntax().clone())
}
}
hir::ModuleSource::Module(it) => {
if let Some(last_item) = it.item_list().and_then(|it| it.items().last()) {
GeneratedFunctionTarget::BehindItem(last_item.syntax().clone())
} else {
GeneratedFunctionTarget::InEmptyItemList(it.item_list()?)
}
}
};
Some((file, assist_item))
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::*;
#[test]
fn add_function_with_no_args() {
check_assist(
generate_function,
r"
fn foo() {
bar<|>();
}
",
r"
fn foo() {
bar();
}
fn bar() {
${0:todo!()}
}
",
)
}
#[test]
fn add_function_from_method() {
// This ensures that the function is correctly generated
// in the next outer mod or file
check_assist(
generate_function,
r"
impl Foo {
fn foo() {
bar<|>();
}
}
",
r"
impl Foo {
fn foo() {
bar();
}
}
fn bar() {
${0:todo!()}
}
",
)
}
#[test]
fn add_function_directly_after_current_block() {
// The new fn should not be created at the end of the file or module
check_assist(
generate_function,
r"
fn foo1() {
bar<|>();
}
fn foo2() {}
",
r"
fn foo1() {
bar();
}
fn bar() {
${0:todo!()}
}
fn foo2() {}
",
)
}
#[test]
fn add_function_with_no_args_in_same_module() {
check_assist(
generate_function,
r"
mod baz {
fn foo() {
bar<|>();
}
}
",
r"
mod baz {
fn foo() {
bar();
}
fn bar() {
${0:todo!()}
}
}
",
)
}
#[test]
fn add_function_with_function_call_arg() {
check_assist(
generate_function,
r"
struct Baz;
fn baz() -> Baz { todo!() }
fn foo() {
bar<|>(baz());
}
",
r"
struct Baz;
fn baz() -> Baz { todo!() }
fn foo() {
bar(baz());
}
fn bar(baz: Baz) {
${0:todo!()}
}
",
);
}
#[test]
fn add_function_with_method_call_arg() {
check_assist(
generate_function,
r"
struct Baz;
impl Baz {
fn foo(&self) -> Baz {
ba<|>r(self.baz())
}
fn baz(&self) -> Baz {
Baz
}
}
",
r"
struct Baz;
impl Baz {
fn foo(&self) -> Baz {
bar(self.baz())
}
fn baz(&self) -> Baz {
Baz
}
}
fn bar(baz: Baz) {
${0:todo!()}
}
",
)
}
#[test]
fn add_function_with_string_literal_arg() {
check_assist(
generate_function,
r#"
fn foo() {
<|>bar("bar")
}
"#,
r#"
fn foo() {
bar("bar")
}
fn bar(arg: &str) {
${0:todo!()}
}
"#,
)
}
#[test]
fn add_function_with_char_literal_arg() {
check_assist(
generate_function,
r#"
fn foo() {
<|>bar('x')
}
"#,
r#"
fn foo() {
bar('x')
}
fn bar(arg: char) {
${0:todo!()}
}
"#,
)
}
#[test]
fn add_function_with_int_literal_arg() {
check_assist(
generate_function,
r"
fn foo() {
<|>bar(42)
}
",
r"
fn foo() {
bar(42)
}
fn bar(arg: i32) {
${0:todo!()}
}
",
)
}
#[test]
fn add_function_with_cast_int_literal_arg() {
check_assist(
generate_function,
r"
fn foo() {
<|>bar(42 as u8)
}
",
r"
fn foo() {
bar(42 as u8)
}
fn bar(arg: u8) {
${0:todo!()}
}
",
)
}
#[test]
fn name_of_cast_variable_is_used() {
// Ensures that the name of the cast type isn't used
// in the generated function signature.
check_assist(
generate_function,
r"
fn foo() {
let x = 42;
bar<|>(x as u8)
}
",
r"
fn foo() {
let x = 42;
bar(x as u8)
}
fn bar(x: u8) {
${0:todo!()}
}
",
)
}
#[test]
fn add_function_with_variable_arg() {
check_assist(
generate_function,
r"
fn foo() {
let worble = ();
<|>bar(worble)
}
",
r"
fn foo() {
let worble = ();
bar(worble)
}
fn bar(worble: ()) {
${0:todo!()}
}
",
)
}
#[test]
fn add_function_with_impl_trait_arg() {
check_assist(
generate_function,
r"
trait Foo {}
fn foo() -> impl Foo {
todo!()
}
fn baz() {
<|>bar(foo())
}
",
r"
trait Foo {}
fn foo() -> impl Foo {
todo!()
}
fn baz() {
bar(foo())
}
fn bar(foo: impl Foo) {
${0:todo!()}
}
",
)
}
#[test]
fn borrowed_arg() {
check_assist(
generate_function,
r"
struct Baz;
fn baz() -> Baz { todo!() }
fn foo() {
bar<|>(&baz())
}
",
r"
struct Baz;
fn baz() -> Baz { todo!() }
fn foo() {
bar(&baz())
}
fn bar(baz: &Baz) {
${0:todo!()}
}
",
)
}
#[test]
fn add_function_with_qualified_path_arg() {
check_assist(
generate_function,
r"
mod Baz {
pub struct Bof;
pub fn baz() -> Bof { Bof }
}
fn foo() {
<|>bar(Baz::baz())
}
",
r"
mod Baz {
pub struct Bof;
pub fn baz() -> Bof { Bof }
}
fn foo() {
bar(Baz::baz())
}
fn bar(baz: Baz::Bof) {
${0:todo!()}
}
",
)
}
#[test]
#[ignore]
// FIXME fix printing the generics of a `Ty` to make this test pass
fn add_function_with_generic_arg() {
check_assist(
generate_function,
r"
fn foo<T>(t: T) {
<|>bar(t)
}
",
r"
fn foo<T>(t: T) {
bar(t)
}
fn bar<T>(t: T) {
${0:todo!()}
}
",
)
}
#[test]
#[ignore]
// FIXME Fix function type printing to make this test pass
fn add_function_with_fn_arg() {
check_assist(
generate_function,
r"
struct Baz;
impl Baz {
fn new() -> Self { Baz }
}
fn foo() {
<|>bar(Baz::new);
}
",
r"
struct Baz;
impl Baz {
fn new() -> Self { Baz }
}
fn foo() {
bar(Baz::new);
}
fn bar(arg: fn() -> Baz) {
${0:todo!()}
}
",
)
}
#[test]
#[ignore]
// FIXME Fix closure type printing to make this test pass
fn add_function_with_closure_arg() {
check_assist(
generate_function,
r"
fn foo() {
let closure = |x: i64| x - 1;
<|>bar(closure)
}
",
r"
fn foo() {
let closure = |x: i64| x - 1;
bar(closure)
}
fn bar(closure: impl Fn(i64) -> i64) {
${0:todo!()}
}
",
)
}
#[test]
fn unresolveable_types_default_to_unit() {
check_assist(
generate_function,
r"
fn foo() {
<|>bar(baz)
}
",
r"
fn foo() {
bar(baz)
}
fn bar(baz: ()) {
${0:todo!()}
}
",
)
}
#[test]
fn arg_names_dont_overlap() {
check_assist(
generate_function,
r"
struct Baz;
fn baz() -> Baz { Baz }
fn foo() {
<|>bar(baz(), baz())
}
",
r"
struct Baz;
fn baz() -> Baz { Baz }
fn foo() {
bar(baz(), baz())
}
fn bar(baz_1: Baz, baz_2: Baz) {
${0:todo!()}
}
",
)
}
#[test]
fn arg_name_counters_start_at_1_per_name() {
check_assist(
generate_function,
r#"
struct Baz;
fn baz() -> Baz { Baz }
fn foo() {
<|>bar(baz(), baz(), "foo", "bar")
}
"#,
r#"
struct Baz;
fn baz() -> Baz { Baz }
fn foo() {
bar(baz(), baz(), "foo", "bar")
}
fn bar(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) {
${0:todo!()}
}
"#,
)
}
#[test]
fn add_function_in_module() {
check_assist(
generate_function,
r"
mod bar {}
fn foo() {
bar::my_fn<|>()
}
",
r"
mod bar {
pub(crate) fn my_fn() {
${0:todo!()}
}
}
fn foo() {
bar::my_fn()
}
",
)
}
#[test]
#[ignore]
// Ignored until local imports are supported.
// See https://github.com/rust-analyzer/rust-analyzer/issues/1165
fn qualified_path_uses_correct_scope() {
check_assist(
generate_function,
"
mod foo {
pub struct Foo;
}
fn bar() {
use foo::Foo;
let foo = Foo;
baz<|>(foo)
}
",
"
mod foo {
pub struct Foo;
}
fn bar() {
use foo::Foo;
let foo = Foo;
baz(foo)
}
fn baz(foo: foo::Foo) {
${0:todo!()}
}
",
)
}
#[test]
fn add_function_in_module_containing_other_items() {
check_assist(
generate_function,
r"
mod bar {
fn something_else() {}
}
fn foo() {
bar::my_fn<|>()
}
",
r"
mod bar {
fn something_else() {}
pub(crate) fn my_fn() {
${0:todo!()}
}
}
fn foo() {
bar::my_fn()
}
",
)
}
#[test]
fn add_function_in_nested_module() {
check_assist(
generate_function,
r"
mod bar {
mod baz {}
}
fn foo() {
bar::baz::my_fn<|>()
}
",
r"
mod bar {
mod baz {
pub(crate) fn my_fn() {
${0:todo!()}
}
}
}
fn foo() {
bar::baz::my_fn()
}
",
)
}
#[test]
fn add_function_in_another_file() {
check_assist(
generate_function,
r"
//- /main.rs
mod foo;
fn main() {
foo::bar<|>()
}
//- /foo.rs
",
r"
pub(crate) fn bar() {
${0:todo!()}
}",
)
}
#[test]
fn add_function_not_applicable_if_function_already_exists() {
check_assist_not_applicable(
generate_function,
r"
fn foo() {
bar<|>();
}
fn bar() {}
",
)
}
#[test]
fn add_function_not_applicable_if_unresolved_variable_in_call_is_selected() {
check_assist_not_applicable(
// bar is resolved, but baz isn't.
// The assist is only active if the cursor is on an unresolved path,
// but the assist should only be offered if the path is a function call.
generate_function,
r"
fn foo() {
bar(b<|>az);
}
fn bar(baz: ()) {}
",
)
}
#[test]
#[ignore]
fn create_method_with_no_args() {
check_assist(
generate_function,
r"
struct Foo;
impl Foo {
fn foo(&self) {
self.bar()<|>;
}
}
",
r"
struct Foo;
impl Foo {
fn foo(&self) {
self.bar();
}
fn bar(&self) {
todo!();
}
}
",
)
}
}
| 19.769594 | 98 | 0.515906 |
096841b4cc416473ec5f084d19d07588de7c81ec
| 22,535 |
use crate::evaluate::evaluate_baseline_expr;
use crate::futures::ThreadedReceiver;
use crate::prelude::*;
use std::io::Write;
use std::ops::Deref;
use std::process::{Command, Stdio};
use std::sync::mpsc;
use bytes::{BufMut, Bytes, BytesMut};
use futures::executor::block_on_stream;
// use futures::stream::StreamExt;
use futures_codec::FramedRead;
use log::trace;
use nu_errors::ShellError;
use nu_protocol::hir::ExternalCommand;
use nu_protocol::{Primitive, Scope, ShellTypeName, UntaggedValue, Value};
use nu_source::Tag;
pub enum StringOrBinary {
String(String),
Binary(Vec<u8>),
}
pub struct MaybeTextCodec;
impl futures_codec::Encoder for MaybeTextCodec {
type Item = StringOrBinary;
type Error = std::io::Error;
fn encode(&mut self, item: Self::Item, dst: &mut BytesMut) -> Result<(), Self::Error> {
match item {
StringOrBinary::String(s) => {
dst.reserve(s.len());
dst.put(s.as_bytes());
Ok(())
}
StringOrBinary::Binary(b) => {
dst.reserve(b.len());
dst.put(Bytes::from(b));
Ok(())
}
}
}
}
impl futures_codec::Decoder for MaybeTextCodec {
type Item = StringOrBinary;
type Error = std::io::Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
let v: Vec<u8> = src.to_vec();
match String::from_utf8(v) {
Ok(s) => {
src.clear();
if s.is_empty() {
Ok(None)
} else {
Ok(Some(StringOrBinary::String(s)))
}
}
Err(err) => {
// Note: the longest UTF-8 character per Unicode spec is currently 6 bytes. If we fail somewhere earlier than the last 6 bytes,
// we know that we're failing to understand the string encoding and not just seeing a partial character. When this happens, let's
// fall back to assuming it's a binary buffer.
if src.is_empty() {
Ok(None)
} else if src.len() > 6 && (src.len() - err.utf8_error().valid_up_to() > 6) {
// Fall back to assuming binary
let buf = src.to_vec();
src.clear();
Ok(Some(StringOrBinary::Binary(buf)))
} else {
// Looks like a utf-8 string, so let's assume that
let buf = src.split_to(err.utf8_error().valid_up_to() + 1);
String::from_utf8(buf.to_vec())
.map(|x| Some(StringOrBinary::String(x)))
.map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))
}
}
}
}
}
pub(crate) async fn run_external_command(
command: ExternalCommand,
context: &mut Context,
input: InputStream,
scope: &Scope,
is_last: bool,
) -> Result<InputStream, ShellError> {
trace!(target: "nu::run::external", "-> {}", command.name);
if !did_find_command(&command.name).await {
return Err(ShellError::labeled_error(
"Command not found",
"command not found",
&command.name_tag,
));
}
run_with_stdin(command, context, input, scope, is_last).await
}
async fn run_with_stdin(
command: ExternalCommand,
context: &mut Context,
input: InputStream,
scope: &Scope,
is_last: bool,
) -> Result<InputStream, ShellError> {
let path = context.shell_manager.path();
let input = trace_stream!(target: "nu::trace_stream::external::stdin", "input" = input);
let mut command_args = vec![];
for arg in command.args.iter() {
let value = evaluate_baseline_expr(arg, &context.registry, scope).await?;
// Skip any arguments that don't really exist, treating them as optional
// FIXME: we may want to preserve the gap in the future, though it's hard to say
// what value we would put in its place.
if value.value.is_none() {
continue;
}
// Do the cleanup that we need to do on any argument going out:
let trimmed_value_string = value.as_string()?.trim_end_matches('\n').to_string();
let value_string;
#[cfg(not(windows))]
{
value_string = trimmed_value_string
.replace('$', "\\$")
.replace('"', "\\\"")
.to_string()
}
#[cfg(windows)]
{
value_string = trimmed_value_string
}
command_args.push(value_string);
}
let process_args = command_args
.iter()
.map(|arg| {
let arg = expand_tilde(arg.deref(), dirs::home_dir);
#[cfg(not(windows))]
{
if argument_contains_whitespace(&arg) && !argument_is_quoted(&arg) {
add_quotes(&arg)
} else {
arg.as_ref().to_string()
}
}
#[cfg(windows)]
{
if let Some(unquoted) = remove_quotes(&arg) {
unquoted.to_string()
} else {
arg.as_ref().to_string()
}
}
})
.collect::<Vec<String>>();
// Find if there is a filter to apply to this
let filters = config::filters()?;
let filter_commands = filters
.find(MatchScheme::ExactCommand(command.name.clone()))
.map(|x| nu_parser::classify_block(&x.output_pipeline, context.registry()));
let external_input_stream = spawn(
&command,
&path,
&process_args[..],
input,
is_last && filter_commands.is_none(),
scope,
);
if let Some(block) = filter_commands {
if let Ok(stream) = external_input_stream {
crate::commands::classified::block::run_block(&block.block, context, stream, scope)
.await
} else {
external_input_stream
}
} else {
external_input_stream
}
}
fn spawn(
command: &ExternalCommand,
path: &str,
args: &[String],
input: InputStream,
is_last: bool,
scope: &Scope,
) -> Result<InputStream, ShellError> {
let command = command.clone();
let mut process = {
#[cfg(windows)]
{
let mut process = Command::new("cmd");
process.arg("/c");
process.arg(&command.name);
for arg in args {
// Clean the args before we use them:
let arg = arg.replace("|", "\\|");
process.arg(&arg);
}
process
}
#[cfg(not(windows))]
{
let cmd_with_args = vec![command.name.clone(), args.join(" ")].join(" ");
let mut process = Command::new("sh");
process.arg("-c").arg(cmd_with_args);
process
}
};
process.current_dir(path);
trace!(target: "nu::run::external", "cwd = {:?}", &path);
process.env_clear();
process.envs(scope.env.iter());
// We want stdout regardless of what
// we are doing ($it case or pipe stdin)
if !is_last {
process.stdout(Stdio::piped());
trace!(target: "nu::run::external", "set up stdout pipe");
}
// open since we have some contents for stdin
if !input.is_empty() {
process.stdin(Stdio::piped());
trace!(target: "nu::run::external", "set up stdin pipe");
}
trace!(target: "nu::run::external", "built command {:?}", process);
// TODO Switch to async_std::process once it's stabilized
if let Ok(mut child) = process.spawn() {
let (tx, rx) = mpsc::sync_channel(0);
let mut stdin = child.stdin.take();
let stdin_write_tx = tx.clone();
let stdout_read_tx = tx;
let stdin_name_tag = command.name_tag.clone();
let stdout_name_tag = command.name_tag;
std::thread::spawn(move || {
if !input.is_empty() {
let mut stdin_write = stdin
.take()
.expect("Internal error: could not get stdin pipe for external command");
for value in block_on_stream(input) {
match &value.value {
UntaggedValue::Primitive(Primitive::Nothing) => continue,
UntaggedValue::Primitive(Primitive::String(s))
| UntaggedValue::Primitive(Primitive::Line(s)) => {
if let Err(e) = stdin_write.write(s.as_bytes()) {
let message = format!("Unable to write to stdin (error = {})", e);
let _ = stdin_write_tx.send(Ok(Value {
value: UntaggedValue::Error(ShellError::labeled_error(
message,
"application may have closed before completing pipeline",
&stdin_name_tag,
)),
tag: stdin_name_tag,
}));
return Err(());
}
}
UntaggedValue::Primitive(Primitive::Binary(b)) => {
if let Err(e) = stdin_write.write(b) {
let message = format!("Unable to write to stdin (error = {})", e);
let _ = stdin_write_tx.send(Ok(Value {
value: UntaggedValue::Error(ShellError::labeled_error(
message,
"application may have closed before completing pipeline",
&stdin_name_tag,
)),
tag: stdin_name_tag,
}));
return Err(());
}
}
unsupported => {
let _ = stdin_write_tx.send(Ok(Value {
value: UntaggedValue::Error(ShellError::labeled_error(
format!(
"Received unexpected type from pipeline ({})",
unsupported.type_name()
),
"expected a string",
stdin_name_tag.clone(),
)),
tag: stdin_name_tag,
}));
return Err(());
}
};
}
}
Ok(())
});
std::thread::spawn(move || {
if !is_last {
let stdout = if let Some(stdout) = child.stdout.take() {
stdout
} else {
let _ = stdout_read_tx.send(Ok(Value {
value: UntaggedValue::Error(ShellError::labeled_error(
"Can't redirect the stdout for external command",
"can't redirect stdout",
&stdout_name_tag,
)),
tag: stdout_name_tag,
}));
return Err(());
};
let file = futures::io::AllowStdIo::new(stdout);
let stream = FramedRead::new(file, MaybeTextCodec);
for line in block_on_stream(stream) {
match line {
Ok(line) => match line {
StringOrBinary::String(s) => {
let result = stdout_read_tx.send(Ok(Value {
value: UntaggedValue::Primitive(Primitive::String(s.clone())),
tag: stdout_name_tag.clone(),
}));
if result.is_err() {
break;
}
}
StringOrBinary::Binary(b) => {
let result = stdout_read_tx.send(Ok(Value {
value: UntaggedValue::Primitive(Primitive::Binary(
b.into_iter().collect(),
)),
tag: stdout_name_tag.clone(),
}));
if result.is_err() {
break;
}
}
},
Err(e) => {
// If there's an exit status, it makes sense that we may error when
// trying to read from its stdout pipe (likely been closed). In that
// case, don't emit an error.
let should_error = match child.wait() {
Ok(exit_status) => !exit_status.success(),
Err(_) => true,
};
if should_error {
let _ = stdout_read_tx.send(Ok(Value {
value: UntaggedValue::Error(ShellError::labeled_error(
format!("Unable to read from stdout ({})", e),
"unable to read from stdout",
&stdout_name_tag,
)),
tag: stdout_name_tag.clone(),
}));
}
return Ok(());
}
}
}
}
// We can give an error when we see a non-zero exit code, but this is different
// than what other shells will do.
let external_failed = match child.wait() {
Err(_) => true,
Ok(exit_status) => !exit_status.success(),
};
if external_failed {
let cfg = crate::data::config::config(Tag::unknown());
if let Ok(cfg) = cfg {
if cfg.contains_key("nonzero_exit_errors") {
let _ = stdout_read_tx.send(Ok(Value {
value: UntaggedValue::Error(ShellError::labeled_error(
"External command failed",
"command failed",
&stdout_name_tag,
)),
tag: stdout_name_tag.clone(),
}));
}
}
let _ = stdout_read_tx.send(Ok(Value {
value: UntaggedValue::Error(ShellError::external_non_zero()),
tag: stdout_name_tag,
}));
}
Ok(())
});
let stream = ThreadedReceiver::new(rx);
Ok(stream.to_input_stream())
} else {
Err(ShellError::labeled_error(
"Failed to spawn process",
"failed to spawn",
&command.name_tag,
))
}
}
async fn did_find_command(name: &str) -> bool {
#[cfg(not(windows))]
{
which::which(name).is_ok()
}
#[cfg(windows)]
{
if which::which(name).is_ok() {
true
} else {
let cmd_builtins = [
"call", "cls", "color", "date", "dir", "echo", "find", "hostname", "pause",
"start", "time", "title", "ver", "copy", "mkdir", "rename", "rd", "rmdir", "type",
"mklink",
];
cmd_builtins.contains(&name)
}
}
}
fn expand_tilde<SI: ?Sized, P, HD>(input: &SI, home_dir: HD) -> std::borrow::Cow<str>
where
SI: AsRef<str>,
P: AsRef<std::path::Path>,
HD: FnOnce() -> Option<P>,
{
shellexpand::tilde_with_context(input, home_dir)
}
#[allow(unused)]
pub fn argument_contains_whitespace(argument: &str) -> bool {
argument.chars().any(|c| c.is_whitespace())
}
fn argument_is_quoted(argument: &str) -> bool {
if argument.len() < 2 {
return false;
}
(argument.starts_with('"') && argument.ends_with('"'))
|| (argument.starts_with('\'') && argument.ends_with('\''))
}
#[allow(unused)]
fn add_quotes(argument: &str) -> String {
format!("\"{}\"", argument)
}
#[allow(unused)]
fn remove_quotes(argument: &str) -> Option<&str> {
if !argument_is_quoted(argument) {
return None;
}
let size = argument.len();
Some(&argument[1..size - 1])
}
#[allow(unused)]
fn shell_os_paths() -> Vec<std::path::PathBuf> {
let mut original_paths = vec![];
if let Some(paths) = std::env::var_os("PATH") {
original_paths = std::env::split_paths(&paths).collect::<Vec<_>>();
}
original_paths
}
#[cfg(test)]
mod tests {
use super::{
add_quotes, argument_contains_whitespace, argument_is_quoted, expand_tilde, remove_quotes,
run_external_command, Context, InputStream,
};
use futures::executor::block_on;
use nu_errors::ShellError;
use nu_protocol::Scope;
use nu_test_support::commands::ExternalBuilder;
// async fn read(mut stream: OutputStream) -> Option<Value> {
// match stream.try_next().await {
// Ok(val) => {
// if let Some(val) = val {
// val.raw_value()
// } else {
// None
// }
// }
// Err(_) => None,
// }
// }
async fn non_existent_run() -> Result<(), ShellError> {
let cmd = ExternalBuilder::for_name("i_dont_exist.exe").build();
let input = InputStream::empty();
let mut ctx = Context::basic().expect("There was a problem creating a basic context.");
assert!(
run_external_command(cmd, &mut ctx, input, &Scope::empty(), false)
.await
.is_err()
);
Ok(())
}
// async fn failure_run() -> Result<(), ShellError> {
// let cmd = ExternalBuilder::for_name("fail").build();
// let mut ctx = Context::basic().expect("There was a problem creating a basic context.");
// let stream = run_external_command(cmd, &mut ctx, None, false)
// .await?
// .expect("There was a problem running the external command.");
// match read(stream.into()).await {
// Some(Value {
// value: UntaggedValue::Error(_),
// ..
// }) => {}
// None | _ => panic!("Command didn't fail."),
// }
// Ok(())
// }
// #[test]
// fn identifies_command_failed() -> Result<(), ShellError> {
// block_on(failure_run())
// }
#[test]
fn identifies_command_not_found() -> Result<(), ShellError> {
block_on(non_existent_run())
}
#[test]
fn checks_contains_whitespace_from_argument_to_be_passed_in() {
assert_eq!(argument_contains_whitespace("andrés"), false);
assert_eq!(argument_contains_whitespace("and rés"), true);
assert_eq!(argument_contains_whitespace(r#"and\ rés"#), true);
}
#[test]
fn checks_quotes_from_argument_to_be_passed_in() {
assert_eq!(argument_is_quoted(""), false);
assert_eq!(argument_is_quoted("'"), false);
assert_eq!(argument_is_quoted("'a"), false);
assert_eq!(argument_is_quoted("a"), false);
assert_eq!(argument_is_quoted("a'"), false);
assert_eq!(argument_is_quoted("''"), true);
assert_eq!(argument_is_quoted(r#"""#), false);
assert_eq!(argument_is_quoted(r#""a"#), false);
assert_eq!(argument_is_quoted(r#"a"#), false);
assert_eq!(argument_is_quoted(r#"a""#), false);
assert_eq!(argument_is_quoted(r#""""#), true);
assert_eq!(argument_is_quoted("'andrés"), false);
assert_eq!(argument_is_quoted("andrés'"), false);
assert_eq!(argument_is_quoted(r#""andrés"#), false);
assert_eq!(argument_is_quoted(r#"andrés""#), false);
assert_eq!(argument_is_quoted("'andrés'"), true);
assert_eq!(argument_is_quoted(r#""andrés""#), true);
}
#[test]
fn adds_quotes_to_argument_to_be_passed_in() {
assert_eq!(add_quotes("andrés"), "\"andrés\"");
//assert_eq!(add_quotes("\"andrés\""), "\"andrés\"");
}
#[test]
fn strips_quotes_from_argument_to_be_passed_in() {
assert_eq!(remove_quotes(""), None);
assert_eq!(remove_quotes("'"), None);
assert_eq!(remove_quotes("'a"), None);
assert_eq!(remove_quotes("a"), None);
assert_eq!(remove_quotes("a'"), None);
assert_eq!(remove_quotes("''"), Some(""));
assert_eq!(remove_quotes(r#"""#), None);
assert_eq!(remove_quotes(r#""a"#), None);
assert_eq!(remove_quotes(r#"a"#), None);
assert_eq!(remove_quotes(r#"a""#), None);
assert_eq!(remove_quotes(r#""""#), Some(""));
assert_eq!(remove_quotes("'andrés"), None);
assert_eq!(remove_quotes("andrés'"), None);
assert_eq!(remove_quotes(r#""andrés"#), None);
assert_eq!(remove_quotes(r#"andrés""#), None);
assert_eq!(remove_quotes("'andrés'"), Some("andrés"));
assert_eq!(remove_quotes(r#""andrés""#), Some("andrés"));
}
#[test]
fn expands_tilde_if_starts_with_tilde_character() {
assert_eq!(
expand_tilde("~", || Some(std::path::Path::new("the_path_to_nu_light"))),
"the_path_to_nu_light"
);
}
#[test]
fn does_not_expand_tilde_if_tilde_is_not_first_character() {
assert_eq!(
expand_tilde("1~1", || Some(std::path::Path::new("the_path_to_nu_light"))),
"1~1"
);
}
}
| 34.669231 | 145 | 0.479743 |
6a54e53f146382c174207819b94d7feabfd0bfba
| 1,361 |
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(private_in_public)]
#![allow(unused)]
struct SemiPriv;
mod m1 {
struct Priv;
impl ::SemiPriv {
pub fn f(_: Priv) {} //~ ERROR private type in public interface
//~^ WARNING hard error
}
impl Priv {
pub fn f(_: Priv) {} // ok
}
}
mod m2 {
struct Priv;
impl ::std::ops::Deref for ::SemiPriv {
type Target = Priv; //~ ERROR private type in public interface
//~^ WARNING hard error
fn deref(&self) -> &Self::Target { unimplemented!() }
}
impl ::std::ops::Deref for Priv {
type Target = Priv; // ok
fn deref(&self) -> &Self::Target { unimplemented!() }
}
}
trait SemiPrivTrait {
type Assoc;
}
mod m3 {
struct Priv;
impl ::SemiPrivTrait for () {
type Assoc = Priv; //~ ERROR private type in public interface
//~^ WARNING hard error
}
}
fn main() {}
| 24.745455 | 71 | 0.618663 |
b95bf9a0642e13c10426ce12d973071ed29df860
| 14,334 |
use std::sync::{Arc, Mutex, RwLock};
use std::collections::{BTreeMap, HashMap};
use std::path::PathBuf;
use time;
use mysql::Pool;
use rocket::State;
use rocket::response::status;
use rocket_contrib::{Json, Value};
use regex::Regex;
use worker;
use models::{self, QueryString, Session, SmsFactory};
use error::E;
#[error(502)]
fn bad_gateway() -> E {
E::Unknown
}
#[error(500)]
fn internal_server_error() -> E {
E::Unknown
}
#[error(400)]
fn bad_request() -> E {
E::Unknown
}
/// ### send authorization sms
/// - /api/sms
/// - Content-Type: application/json
/// - post
/// ```js
/// {
/// "mobile": "1xxxxxxxxxx"
/// }
/// ```
/// - http 200:
/// ```js
/// {
/// "interval": 123
/// }
/// ```
/// - http 400:
/// ```js
/// {
/// "err": 123,
/// "msg": "error message"
/// }
/// ```
#[post("/sms", format = "application/json", data = "<data>")]
fn sms(
data: Json<Value>,
mysql_pool: State<Pool>,
sms_fac_lock: State<Mutex<SmsFactory>>,
) -> Result<Json<Value>, E> {
let sms_fac = sms_fac_lock.lock()?;
let mobile = data["mobile"].as_str()?;
if !Regex::new(r"^1\d{10}$")?.is_match(mobile) {
return Err(E::SmsMobileInvalid);
}
let interval = sms_fac.gen_code(&mysql_pool, mobile)?;
Ok(Json(json!({
"interval": interval,
})))
}
/// ### verify authorization sms code
/// - /api/sms/auth
/// - Content-Type: application/json
/// - post
/// ```js
/// {
/// "mobile": "1xxxxxxxxxx",
/// "code": 1234
/// }
/// ```
/// - http 200:
/// ```js
/// {
/// "user": {
/// "id": 123,
/// "name": "user name",
/// "is_signup": true
/// },
/// "access_token": "xxxx"
/// }
/// ```
/// - http 400:
/// ```js
/// {
/// "err": 123,
/// "msg": "error message"
/// }
/// ```
#[post("/sms/auth", format = "application/json", data = "<data>")]
fn sms_auth(
data: Json<Value>,
mysql_pool: State<Pool>,
sms_fac_lock: State<Mutex<SmsFactory>>,
) -> Result<Json<Value>, E> {
let sms_fac = sms_fac_lock.lock()?;
let mobile = data["mobile"].as_str()?;
let code = data["code"].as_i64()?;
if !Regex::new(r"^1\d{10}$")?.is_match(mobile) {
return Err(E::SmsMobileInvalid);
}
sms_fac
.check_code(&mysql_pool, mobile, code as u32)
.and_then(|_| {
// create session
let sess = Session::new(&mysql_pool, mobile)?;
Ok(Json(json!({
"access_token": Session::id_to_access_token(&sess.id)?
})))
})
}
/// ### fetch session owner's info
/// - /api/me?access_token={access_token}
/// - Content-Type: application/json
/// - get
/// - http 200:
/// ```js
/// {
/// "id": 123,
/// "name": "abc",
/// "created": 123
/// }
/// ```
/// - http 400:
/// ```js
/// {
/// "err": 123,
/// "msg": "error message"
/// }
/// ```
#[get("/me")]
fn me_get(
qs: QueryString,
mysql_pool: State<Pool>,
worker_state_lock: State<Arc<RwLock<worker::State>>>,
) -> Result<Json<Value>, E> {
let sess = Session::from_query_string(&mysql_pool, &qs)?;
let user = sess.user()?;
let worker_state = &*(worker_state_lock.read().unwrap());
Ok(Json(json!({
"id": user.id,
"name": user.name,
"created": user.created,
"usd2cny_rate": worker_state.usd2cny_rate
})))
}
/// ### register session owner
/// - /api/me?access_token={access_token}
/// - Content-Type: application/json
/// - post
/// ```js
/// {
/// "name": "abc"
/// }
/// ```
/// - http 200:
/// ```js
/// {
/// "id": 123,
/// "name": "abc",
/// "created": 123
/// }
/// ```
/// - http 400:
/// ```js
/// {
/// "err": 123,
/// "msg": "error message"
/// }
/// ```
#[post("/me", data = "<data>")]
fn me_post(qs: QueryString, mysql_pool: State<Pool>, data: Json<Value>) -> Result<Json<Value>, E> {
let mut sess = Session::from_query_string(&mysql_pool, &qs)?;
let name = data["name"].as_str()?;
sess.signup(&mysql_pool, name)?;
let user = sess.user()?;
Ok(Json(json!({
"id": user.id,
"name": user.name,
"created": user.created
})))
}
/// ### user current coins states
/// - /api/states?access_token={access_token}
/// - Content-Type: application/json
/// - get
/// - http 200:
/// ```js
/// {
/// "balance": 123,
/// "states":
/// [
/// {
/// "coin_id": "abc",
/// "amount": 12.3,
/// "created": 123,
/// "value_cny": 12.3, //invalid state if this is None
/// "coin": { //invalid state if this is None
/// "id": "abc",
/// "name": "abc",
/// "symbol": "abc",
/// "price_usd": 12.3,
/// "volume_usd": 12.3,
/// "market_cap_usd": 12.3,
/// "percent_change_24h": 12.3, //percent
/// "rank": 123
/// }
/// },
/// ...
/// ]
/// }
/// ```
/// - http 400:
/// ```js
/// {
/// "err": 123,
/// "msg": "error message"
/// }
/// ```
#[get("/states")]
fn states(
qs: QueryString,
mysql_pool: State<Pool>,
worker_state_lock: State<Arc<RwLock<worker::State>>>,
) -> Result<Json<Value>, E> {
let sess = Session::from_query_string(&mysql_pool, &qs)?;
let user = sess.user()?;
let worker_state = &*(worker_state_lock.read().unwrap());
let balance = user.balance(&mysql_pool)?;
let mut user_states = user.states(&mysql_pool, worker_state, None)?;
user_states.reverse();
let mut rt_states_list = vec![];
let mut got_coins = vec![];
for state in user_states {
if !got_coins.contains(&state.coin_id) {
got_coins.push(state.coin_id.clone());
let coin = state.coin.unwrap();
let coin_json = json!({
"id": coin.id.clone(),
"name": coin.name.clone(),
"symbol": coin.symbol.clone(),
"price_usd": coin.price_usd,
"volume_usd": coin.volume_usd,
"market_cap_usd": coin.market_cap_usd,
"percent_change_24h": coin.percent_change_24h,
"rank": coin.rank,
"no": coin.no,
});
rt_states_list.push(json!({
"coin_id": state.coin_id,
"amount": state.amount,
"created": state.created,
"value_cny": coin.price_usd * state.amount * worker_state.usd2cny_rate,
"coin": coin_json
}));
}
}
let sum = rt_states_list
.iter()
.fold(0.0, |acc, x| acc + x["value_cny"].as_f64().unwrap());
println!("Sum: ¥{}", sum);
Ok(Json(json!({
"balance": balance,
"states": rt_states_list,
})))
}
#[get("/states/<coin_id>")]
fn coin_states(
qs: QueryString,
mysql_pool: State<Pool>,
worker_state_lock: State<Arc<RwLock<worker::State>>>,
coin_id: String,
) -> Result<Json<Value>, E> {
let sess = Session::from_query_string(&mysql_pool, &qs)?;
let user = sess.user()?;
let worker_state = &*(worker_state_lock.read().unwrap());
let ret = user.states(&mysql_pool, worker_state, Some(&coin_id))?;
let data: Vec<Value> = ret.iter().map(|record| {
json!({
"id": record.id,
"amount": record.amount,
"created": record.created,
})
}).collect();
Ok(Json(json!(data)))
}
#[put("/states", format = "application/json", data = "<data>")]
fn put_states(
qs: QueryString,
mysql_pool: State<Pool>,
data: Json<Value>,
) -> Result<Json<Value>, E> {
let sess = Session::from_query_string(&mysql_pool, &qs)?;
let user = sess.user()?;
let id = data["id"].as_i64()?;
let coin_id = data["coin_id"].as_str()?;
let created = data["created"].as_i64()?;
let amount = data["amount"].as_f64()?;
user.put_states(&mysql_pool, id, coin_id, created, amount)?;
Ok(Json(json!(null)))
}
#[delete("/states", format = "application/json", data = "<data>")]
fn delete_states(
qs: QueryString,
mysql_pool: State<Pool>,
data: Json<Value>,
) -> Result<Json<Value>, E> {
let sess = Session::from_query_string(&mysql_pool, &qs)?;
let user = sess.user()?;
let id = data["id"].as_i64()?;
user.del_states(&mysql_pool, id)?;
Ok(Json(json!(null)))
}
#[put("/balance", format = "application/json", data = "<data>")]
fn put_balance(
qs: QueryString,
mysql_pool: State<Pool>,
data: Json<Value>,
) -> Result<Json<Value>, E> {
let sess = Session::from_query_string(&mysql_pool, &qs)?;
let user = sess.user()?;
let id = data["id"].as_i64()?;
let created = data["created"].as_i64()?;
let amount = data["amount"].as_f64()?;
user.put_balance(&mysql_pool, id, created, amount)?;
Ok(Json(json!(null)))
}
#[delete("/balance", format = "application/json", data = "<data>")]
fn delete_balance(
qs: QueryString,
mysql_pool: State<Pool>,
data: Json<Value>,
) -> Result<Json<Value>, E> {
let sess = Session::from_query_string(&mysql_pool, &qs)?;
let user = sess.user()?;
let id = data["id"].as_i64()?;
user.del_balance(&mysql_pool, id)?;
Ok(Json(json!(null)))
}
/// ### user portfolio historical value
/// - /api/states/history?access_token={access_token}
/// - Content-Type: application/json
/// - get
/// - http 200:
/// ```js
/// [
/// [123, 12.3],
/// ...
/// ]
/// ```
/// - http 400:
/// ```js
/// {
/// "err": 123,
/// "msg": "error message"
/// }
/// ```
#[get("/states/history")]
fn states_history(
qs: QueryString,
mysql_pool: State<Pool>,
worker_state_lock: State<Arc<RwLock<worker::State>>>,
) -> Result<Json<Value>, E> {
let sess = Session::from_query_string(&mysql_pool, &qs)?;
let user = sess.user()?;
let worker_state = &*(worker_state_lock.read().unwrap());
// all states order by created time asc
let user_states = user.states(&mysql_pool, worker_state, None)?;
let end_ts = time::get_time().sec;
let mut origin_ts = 0i64;
// all states group by coin type and map to state points {COIN => [(ASC TIMESTAMP, AMOUNT)]}
let mut coin_to_states = HashMap::<String, Vec<(i64, f64)>>::new();
for state in user_states.iter() {
if origin_ts == 0i64 {
origin_ts = state.created;
}
if !coin_to_states.contains_key(&state.coin_id) {
coin_to_states.insert(state.coin_id.clone(), vec![]);
}
let vec = coin_to_states.get_mut(&state.coin_id)?;
vec.push((state.created, state.amount));
}
println!("USER STATES GROUP BY COIN: {:?}", coin_to_states);
// {ASC TIMESTAMP => (TIMESTAMP, VALUE)}
let mut mix_points = BTreeMap::<i64, (i64, f64)>::new();
// each type of coin
for (coin_id, states) in coin_to_states {
// get the coin historical points among timestamp window
// {ASC TIMESTAMP => (PRICE, AMOUNT)}
let coin_points = models::coin_history(&mysql_pool, &coin_id, origin_ts, end_ts, &states)?;
for (ts, item) in coin_points {
let value_cny = item.0 * item.1 * worker_state.usd2cny_rate;
if !mix_points.contains_key(&ts) {
mix_points.insert(ts, (ts, value_cny));
} else {
let exist_item = mix_points.get_mut(&ts).unwrap();
exist_item.1 = exist_item.1 + value_cny;
}
}
}
let ret: Vec<(i64, f64)> = mix_points.values().cloned().collect();
Ok(Json(json!(ret)))
}
/// ### get coin detail
/// - /api/coins/<coin_id>?access_token={access_token}
/// - Content-Type: application/json
/// - get
/// - http 200:
/// ```js
/// {
/// "id": "abc",
/// "name": "abc",
/// "symbol": "abc",
/// "rank": 123,
/// "price_usd": 12.3,
/// "volume_usd": 12.3,
/// "market_cap_usd": 12.3,
/// "percent_change_24h": 12.3,
/// "percent_change_1h": 12.3,
/// "history": [
/// [123, 12.3],
/// ...
/// ]
/// }
/// ```
/// - http 400:
/// ```js
/// {
/// "err": 123,
/// "msg": "error message"
/// }
/// ```
#[get("/coins/<coin_id>")]
fn coin(
qs: QueryString,
mysql_pool: State<Pool>,
worker_state_lock: State<Arc<RwLock<worker::State>>>,
coin_id: String,
) -> Result<Json<Value>, E> {
let sess = Session::from_query_string(&mysql_pool, &qs)?;
sess.user()?;
let worker_state = &*(worker_state_lock.read().unwrap());
let coin = worker_state.coins.iter().find(|&x| x.id == coin_id);
if coin.is_none() {
return Err(E::CoinNotFound);
}
let coin = coin.unwrap();
let end_ts = time::get_time().sec;
let origin_ts = end_ts - 30 * 24 * 3600;
let states = vec![(origin_ts, 0.0)];
let points = models::coin_history(&mysql_pool, &coin_id, origin_ts, end_ts, &states)?;
let history: Vec<(&i64, f64)> = points
.iter()
.map(|(k, item)| (k, item.0 * worker_state.usd2cny_rate))
.collect();
Ok(Json(json!({
"id": coin.id,
"name": coin.name,
"symbol": coin.symbol,
"rank": coin.rank,
"price_usd": coin.price_usd,
"price_cny": coin.price_cny,
"volume_usd": coin.volume_usd,
"market_cap_usd": coin.market_cap_usd,
"percent_change_24h": coin.percent_change_24h,
"percent_change_7d": coin.percent_change_7d,
"history": history,
"no": coin.no,
})))
}
#[get("/coins")]
fn coins(worker_state_lock: State<Arc<RwLock<worker::State>>>) -> Result<Json<Value>, E> {
let worker_state = &*(worker_state_lock.read().unwrap());
let mut arr = vec![];
for coin in worker_state.coins.iter() {
arr.push(json!({
"id": coin.id,
"name": coin.name,
"symbol": coin.symbol,
"rank": coin.rank,
"price_usd": coin.price_usd,
"volume_usd": coin.volume_usd,
"market_cap_usd": coin.market_cap_usd,
"percent_change_24h": coin.percent_change_24h,
"percent_change_1h": coin.percent_change_1h,
"no": coin.no,
}))
}
arr.sort_by(|a, b|{
a["rank"].as_i64().cmp(&(b["rank"].as_i64()))
});
Ok(Json(json!(arr)))
}
#[options("/<_path..>", rank = 1)]
fn options_all(_path: PathBuf) -> status::NoContent {
status::NoContent
}
| 26.792523 | 99 | 0.540742 |
48e94387f6593abb31756a64c5bee5fb5eae46ac
| 3,563 |
#![allow(clippy::default_trait_access)]
//! GPU POD data types.
use amethyst_assets::{AssetStorage, Handle};
use amethyst_core::math::Point3;
use amethyst_rendy::{
pod::IntoPod,
rendy::{
hal::format::Format,
mesh::{AsVertex, VertexFormat},
},
resources::Tint as TintComponent,
sprite::SpriteSheet,
Texture,
};
use glsl_layout::*;
/// `TileMapArgs`
/// ```glsl,ignore
/// uniform TileMapArgs {
/// uniform mat4 proj;
/// uniform mat4 view;
/// uniform mat4 map_coordinate_transform;
/// uniform mat4 map_transform;
/// };
/// ```
#[derive(Clone, Copy, Debug, AsStd140)]
#[repr(C, align(16))]
pub struct TileMapArgs {
/// Projection matrix
pub proj: mat4,
/// View matrix
pub view: mat4,
/// Projection matrix
pub map_coordinate_transform: mat4,
/// View matrix
pub map_transform: mat4,
/// Sprite Dimensions. Because we assume tiles are uniform for a map, we can store these here.
pub sprite_dimensions: vec2,
}
/// Tile Vertex Data
/// ```glsl,ignore
/// vec2 dir_x;
/// vec2 dir_y;
/// vec2 pos;
/// vec2 u_offset;
/// vec2 v_offset;
/// float depth;
/// vec4 tint;
/// ```
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, AsStd140)]
#[repr(C, align(4))]
pub struct TileArgs {
/// Upper-left coordinate of the sprite in the spritesheet
pub u_offset: vec2,
/// Bottom-right coordinate of the sprite in the spritesheet
pub v_offset: vec2,
/// Tint for this this sprite
pub tint: vec4,
/// Tile coordinate
pub tile_coordinate: uvec3,
}
impl AsVertex for TileArgs {
fn vertex() -> VertexFormat {
VertexFormat::new((
(Format::Rg32Sfloat, "u_offset"),
(Format::Rg32Sfloat, "v_offset"),
(Format::Rgba32Sfloat, "tint"),
(Format::Rgb32Uint, "tile_coordinate"),
))
}
}
impl TileArgs {
#[allow(clippy::cast_precision_loss)]
/// Extracts POD vertex data from the provided storages for a tile.
///
/// # Arguments
/// * `tex_storage` - `Texture` Storage
/// * `sprite_sheet` - `SpriteSheet` Storage
/// * `sprite_number` - The number index of the sprite in the sprite sheet.
/// * `tint` - An optional `TintComponent` reference for tinting this tile, if applicable.
/// * `tile_coordinate` - The Point3<u32> position of this tile (in Tile Coordinate Space)
pub fn from_data<'a>(
tex_storage: &AssetStorage<Texture>,
sprite_sheet: &'a SpriteSheet,
sprite_number: usize,
tint: Option<&TintComponent>,
tile_coordinate: &Point3<u32>,
flip: (bool, bool),
) -> Option<(Self, &'a Handle<Texture>)> {
if !tex_storage.contains(&sprite_sheet.texture) {
return None;
}
let sprite = &sprite_sheet.sprites[sprite_number];
let mut u_offset: vec2 = [sprite.tex_coords.left, sprite.tex_coords.right].into();
let mut v_offset: vec2 = [sprite.tex_coords.top, sprite.tex_coords.bottom].into();
if flip.0 {
u_offset = [sprite.tex_coords.right, sprite.tex_coords.left].into();
}
if flip.1 {
v_offset = [sprite.tex_coords.bottom, sprite.tex_coords.top].into();
}
Some((
Self {
u_offset,
v_offset,
tint: tint.map_or([1.0; 4].into(), |t| t.0.into_pod()),
tile_coordinate: [tile_coordinate.x, tile_coordinate.y, tile_coordinate.z].into(),
},
&sprite_sheet.texture,
))
}
}
| 29.691667 | 98 | 0.607353 |
33db881e3f1283ab0eaad9437a384bc47bf75dd0
| 5,023 |
/**
* Holo-REA satisfaction zome library API
*
* Contains helper methods that can be used to manipulate `Satisfaction` data
* structures in either the local Holochain zome, or a separate DNA-local zome.
*
* Contains functionality for the "destination" side of an "indirect remote index" pair
* (@see `hdk_records` README).
*
* @package Holo-REA
*/
use hdk::prelude::*;
use hdk_records::{
RecordAPIResult, DataIntegrityError,
records::{
create_record,
read_record_entry,
read_record_entry_by_header,
update_record,
delete_record,
},
local_indexes::{
query_index,
},
foreign_indexes::{
create_foreign_index,
update_foreign_index,
},
};
use hc_zome_rea_economic_event_storage_consts::{EVENT_SATISFIES_LINK_TAG};
use hc_zome_rea_satisfaction_storage_consts::*;
use hc_zome_rea_satisfaction_storage::*;
use hc_zome_rea_satisfaction_rpc::*;
use hc_zome_rea_satisfaction_lib::construct_response;
pub fn handle_create_satisfaction<S>(entry_def_id: S, satisfaction: CreateRequest) -> RecordAPIResult<ResponseData>
where S: AsRef<str>
{
let (revision_id, satisfaction_address, entry_resp): (_,_, EntryData) = create_record(&entry_def_id, satisfaction.to_owned())?;
// link entries in the local DNA
let _results = create_foreign_index(
read_foreign_index_zome,
&SATISFACTION_SATISFIEDBY_INDEXING_API_METHOD,
&satisfaction_address,
read_foreign_event_index_zome,
&EVENT_INDEXING_API_METHOD,
satisfaction.get_satisfied_by(),
)?;
// :TODO: figure out if necessary/desirable to do bidirectional bridging between observation and other planning DNAs
construct_response(&satisfaction_address, &revision_id, &entry_resp)
}
pub fn handle_get_satisfaction<S>(entry_def_id: S, address: SatisfactionAddress) -> RecordAPIResult<ResponseData>
where S: AsRef<str>
{
let (revision, base_address, entry) = read_record_entry::<EntryData, EntryStorage, _,_>(&entry_def_id, address.as_ref())?;
construct_response(&base_address, &revision, &entry)
}
pub fn handle_update_satisfaction<S>(entry_def_id: S, satisfaction: UpdateRequest) -> RecordAPIResult<ResponseData>
where S: AsRef<str>
{
let (revision_id, base_address, new_entry, prev_entry): (_, SatisfactionAddress, EntryData, EntryData) = update_record(&entry_def_id, &satisfaction.get_revision_id(), satisfaction.to_owned())?;
if new_entry.satisfied_by != prev_entry.satisfied_by {
let _results = update_foreign_index(
read_foreign_index_zome,
&SATISFACTION_SATISFIEDBY_INDEXING_API_METHOD,
&base_address,
read_foreign_event_index_zome,
&EVENT_INDEXING_API_METHOD,
vec![new_entry.satisfied_by.clone()].as_slice(), vec![prev_entry.satisfied_by].as_slice(),
)?;
}
construct_response(&base_address, &revision_id, &new_entry)
}
pub fn handle_delete_satisfaction(revision_id: RevisionHash) -> RecordAPIResult<bool>
{
// read any referencing indexes
let (base_address, entry) = read_record_entry_by_header::<EntryData, EntryStorage, _>(&revision_id)?;
// handle link fields
let _results = update_foreign_index(
read_foreign_index_zome,
&SATISFACTION_SATISFIEDBY_INDEXING_API_METHOD,
&base_address,
read_foreign_event_index_zome,
&EVENT_INDEXING_API_METHOD,
vec![].as_slice(), vec![entry.satisfied_by].as_slice(),
)?;
delete_record::<EntryStorage, _>(&revision_id)
}
/// Properties accessor for zome config.
fn read_foreign_index_zome(conf: DnaConfigSliceObservation) -> Option<String> {
Some(conf.satisfaction.index_zome)
}
/// Properties accessor for zome config.
fn read_foreign_event_index_zome(conf: DnaConfigSliceObservation) -> Option<String> {
Some(conf.satisfaction.economic_event_index_zome)
}
const READ_FN_NAME: &str = "get_satisfaction";
pub fn generate_query_handler<S, C, F>(
foreign_zome_name_from_config: F,
event_entry_def_id: S,
) -> impl FnOnce(&QueryParams) -> RecordAPIResult<Vec<ResponseData>>
where S: AsRef<str>,
C: std::fmt::Debug,
SerializedBytes: TryInto<C, Error = SerializedBytesError>,
F: Fn(C) -> Option<String>,
{
move |params| {
let mut entries_result: RecordAPIResult<Vec<RecordAPIResult<ResponseData>>> = Err(DataIntegrityError::EmptyQuery);
match ¶ms.satisfied_by {
Some(satisfied_by) => {
entries_result = query_index::<ResponseData, SatisfactionAddress, C,F,_,_,_,_>(
&event_entry_def_id,
satisfied_by, EVENT_SATISFIES_LINK_TAG,
&foreign_zome_name_from_config, &READ_FN_NAME,
);
},
_ => (),
};
// :TODO: return errors for UI, rather than filtering
Ok(entries_result?.iter()
.cloned()
.filter_map(Result::ok)
.collect())
}
}
| 35.125874 | 197 | 0.697392 |
bbeb30971a03b8a45279abd99e1c44cbc29b344a
| 653 |
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
let v = ~[-1f, 0f, 1f, 2f, 3f];
// Trailing expressions don't require parentheses:
let y = do vec::foldl(0f, v) |x, y| { x + *y } + 10f;
assert y == 15f;
}
| 34.368421 | 68 | 0.678407 |
fe877e39bc6bda1c585c32b02e5fba52cb6a03a7
| 12,776 |
use core::convert::TryFrom;
use embedded_time::duration::Milliseconds;
use embedded_time::fixed_point::FixedPoint;
use crate::bus::{InterfaceNumber, StringIndex, UsbBus};
use crate::device;
use crate::endpoint::{Endpoint, EndpointDirection};
use crate::{Result, UsbError};
/// Standard descriptor types
#[allow(missing_docs)]
pub mod descriptor_type {
pub const DEVICE: u8 = 1;
pub const CONFIGURATION: u8 = 2;
pub const STRING: u8 = 3;
pub const INTERFACE: u8 = 4;
pub const ENDPOINT: u8 = 5;
pub const IAD: u8 = 11;
pub const BOS: u8 = 15;
pub const CAPABILITY: u8 = 16;
}
/// String descriptor language IDs.
pub mod lang_id {
/// English (US)
///
/// Recommended for use as the first language ID for compatibility.
pub const ENGLISH_US: u16 = 0x0409;
}
/// Standard capability descriptor types
#[allow(missing_docs)]
pub mod capability_type {
pub const WIRELESS_USB: u8 = 1;
pub const USB_2_0_EXTENSION: u8 = 2;
pub const SS_USB_DEVICE: u8 = 3;
pub const CONTAINER_ID: u8 = 4;
pub const PLATFORM: u8 = 5;
}
/// A writer for USB descriptors.
pub struct DescriptorWriter<'a> {
buf: &'a mut [u8],
position: usize,
num_interfaces_mark: Option<usize>,
num_endpoints_mark: Option<usize>,
write_iads: bool,
}
impl DescriptorWriter<'_> {
pub(crate) fn new(buf: &mut [u8]) -> DescriptorWriter<'_> {
DescriptorWriter {
buf,
position: 0,
num_interfaces_mark: None,
num_endpoints_mark: None,
write_iads: false,
}
}
/// Gets the current position in the buffer, i.e. the number of bytes written so far.
pub fn position(&self) -> usize {
self.position
}
/// Writes an arbitrary (usually class-specific) descriptor.
pub fn write(&mut self, descriptor_type: u8, descriptor: &[u8]) -> Result<()> {
let length = descriptor.len();
if (self.position + 2 + length) > self.buf.len() || (length + 2) > 255 {
return Err(UsbError::BufferOverflow);
}
self.buf[self.position] = (length + 2) as u8;
self.buf[self.position + 1] = descriptor_type;
let start = self.position + 2;
self.buf[start..start + length].copy_from_slice(descriptor);
self.position = start + length;
Ok(())
}
pub(crate) fn device(&mut self, config: &device::Config) -> Result<()> {
self.write(
descriptor_type::DEVICE,
&[
0x10,
0x02, // bcdUSB 2.1
config.device_class, // bDeviceClass
config.device_sub_class, // bDeviceSubClass
config.device_protocol, // bDeviceProtocol
config.max_packet_size_0, // bMaxPacketSize0
config.vendor_id as u8,
(config.vendor_id >> 8) as u8, // idVendor
config.product_id as u8,
(config.product_id >> 8) as u8, // idProduct
config.device_release as u8,
(config.device_release >> 8) as u8, // bcdDevice
config.manufacturer.map_or(0, |_| 1), // iManufacturer
config.product.map_or(0, |_| 2), // iProduct
config.serial_number.map_or(0, |_| 3), // iSerialNumber
1, // bNumConfigurations
],
)
}
pub(crate) fn configuration(&mut self, config: &device::Config) -> Result<()> {
self.num_interfaces_mark = Some(self.position + 4);
self.write_iads = config.composite_with_iads;
self.write(
descriptor_type::CONFIGURATION,
&[
0,
0, // wTotalLength
0, // bNumInterfaces
device::CONFIGURATION_VALUE, // bConfigurationValue
0, // iConfiguration
0x80 | if config.self_powered { 0x40 } else { 0x00 }
| if config.supports_remote_wakeup {
0x20
} else {
0x00
}, // bmAttributes
config.max_power, // bMaxPower
],
)
}
pub(crate) fn end_class(&mut self) {
self.num_endpoints_mark = None;
}
pub(crate) fn end_configuration(&mut self) {
let position = self.position as u16;
self.buf[2..4].copy_from_slice(&position.to_le_bytes());
}
/// Writes a interface association descriptor. Call from `UsbClass::get_configuration_descriptors`
/// before writing the USB class or function's interface descriptors if your class has more than
/// one interface and wants to play nicely with composite devices on Windows. If the USB device
/// hosting the class was not configured as composite with IADs enabled, calling this function
/// does nothing, so it is safe to call from libraries.
///
/// # Arguments
///
/// * `first_interface` - Number of the function's first interface, previously allocated with
/// [`UsbBusAllocator::interface`](crate::bus::UsbBusAllocator::interface).
/// * `interface_count` - Number of interfaces in the function.
/// * `function_class` - Class code assigned by USB.org. Use `0xff` for vendor-specific devices
/// that do not conform to any class.
/// * `function_sub_class` - Sub-class code. Depends on class.
/// * `function_protocol` - Protocol code. Depends on class and sub-class.
pub fn iad(
&mut self,
first_interface: InterfaceNumber,
interface_count: u8,
function_class: u8,
function_sub_class: u8,
function_protocol: u8,
) -> Result<()> {
if !self.write_iads {
return Ok(());
}
self.write(
descriptor_type::IAD,
&[
first_interface.into(), // bFirstInterface
interface_count, // bInterfaceCount
function_class,
function_sub_class,
function_protocol,
0,
],
)?;
Ok(())
}
/// Writes a interface descriptor.
///
/// # Arguments
///
/// * `number` - Interface number previously allocated with
/// [`UsbBusAllocator::interface`](crate::bus::UsbBusAllocator::interface).
/// * `interface_class` - Class code assigned by USB.org. Use `0xff` for vendor-specific devices
/// that do not conform to any class.
/// * `interface_sub_class` - Sub-class code. Depends on class.
/// * `interface_protocol` - Protocol code. Depends on class and sub-class.
pub fn interface(
&mut self,
number: InterfaceNumber,
interface_class: u8,
interface_sub_class: u8,
interface_protocol: u8,
) -> Result<()> {
self.interface_alt(
number,
device::DEFAULT_ALTERNATE_SETTING,
interface_class,
interface_sub_class,
interface_protocol,
None,
)
}
/// Writes a interface descriptor with a specific alternate setting and
/// interface string identifier.
///
/// # Arguments
///
/// * `number` - Interface number previously allocated with
/// [`UsbBusAllocator::interface`](crate::bus::UsbBusAllocator::interface).
/// * `alternate_setting` - Number of the alternate setting
/// * `interface_class` - Class code assigned by USB.org. Use `0xff` for vendor-specific devices
/// that do not conform to any class.
/// * `interface_sub_class` - Sub-class code. Depends on class.
/// * `interface_protocol` - Protocol code. Depends on class and sub-class.
/// * `interface_string` - Index of string descriptor describing this interface
pub fn interface_alt(
&mut self,
number: InterfaceNumber,
alternate_setting: u8,
interface_class: u8,
interface_sub_class: u8,
interface_protocol: u8,
interface_string: Option<StringIndex>,
) -> Result<()> {
if alternate_setting == device::DEFAULT_ALTERNATE_SETTING {
match self.num_interfaces_mark {
Some(mark) => self.buf[mark] += 1,
None => return Err(UsbError::InvalidState),
};
}
let str_index = interface_string.map_or(0, Into::into);
self.num_endpoints_mark = Some(self.position + 4);
self.write(
descriptor_type::INTERFACE,
&[
number.into(), // bInterfaceNumber
alternate_setting, // bAlternateSetting
0, // bNumEndpoints
interface_class, // bInterfaceClass
interface_sub_class, // bInterfaceSubClass
interface_protocol, // bInterfaceProtocol
str_index, // iInterface
],
)?;
Ok(())
}
/// Writes an endpoint descriptor.
///
/// # Arguments
///
/// * `endpoint` - Endpoint previously allocated with
/// [`UsbBusAllocator`](crate::bus::UsbBusAllocator).
pub fn endpoint<'e, B: UsbBus, D: EndpointDirection>(
&mut self,
endpoint: &Endpoint<'e, B, D>,
) -> Result<()> {
match self.num_endpoints_mark {
Some(mark) => self.buf[mark] += 1,
None => return Err(UsbError::InvalidState),
};
let mps = endpoint.max_packet_size();
self.write(
descriptor_type::ENDPOINT,
&[
endpoint.address().into(), // bEndpointAddress
endpoint.ep_type() as u8, // bmAttributes
mps as u8,
(mps >> 8) as u8, // wMaxPacketSize
Milliseconds::<u32>::try_from(endpoint.interval())
.unwrap()
.integer() as u8, // bInterval
],
)?;
Ok(())
}
/// Writes a string descriptor.
pub(crate) fn string(&mut self, string: &str) -> Result<()> {
let mut pos = self.position;
if pos + 2 > self.buf.len() {
return Err(UsbError::BufferOverflow);
}
self.buf[pos] = 0; // length placeholder
self.buf[pos + 1] = descriptor_type::STRING;
pos += 2;
for c in string.encode_utf16() {
if pos >= self.buf.len() {
return Err(UsbError::BufferOverflow);
}
self.buf[pos..pos + 2].copy_from_slice(&c.to_le_bytes());
pos += 2;
}
self.buf[self.position] = (pos - self.position) as u8;
self.position = pos;
Ok(())
}
}
/// A writer for Binary Object Store descriptor.
pub struct BosWriter<'w, 'a: 'w> {
writer: &'w mut DescriptorWriter<'a>,
num_caps_mark: Option<usize>,
}
impl<'w, 'a: 'w> BosWriter<'w, 'a> {
pub(crate) fn new(writer: &'w mut DescriptorWriter<'a>) -> Self {
Self {
writer,
num_caps_mark: None,
}
}
pub(crate) fn bos(&mut self) -> Result<()> {
self.num_caps_mark = Some(self.writer.position + 4);
self.writer.write(
descriptor_type::BOS,
&[
0x00, 0x00, // wTotalLength
0x00, // bNumDeviceCaps
],
)?;
self.capability(capability_type::USB_2_0_EXTENSION, &[0; 4])?;
Ok(())
}
/// Writes capability descriptor to a BOS
///
/// # Arguments
///
/// * `capability_type` - Type of a capability
/// * `data` - Binary data of the descriptor
pub fn capability(&mut self, capability_type: u8, data: &[u8]) -> Result<()> {
match self.num_caps_mark {
Some(mark) => self.writer.buf[mark] += 1,
None => return Err(UsbError::InvalidState),
}
let mut start = self.writer.position;
let blen = data.len();
if (start + blen + 3) > self.writer.buf.len() || (blen + 3) > 255 {
return Err(UsbError::BufferOverflow);
}
self.writer.buf[start] = (blen + 3) as u8;
self.writer.buf[start + 1] = descriptor_type::CAPABILITY;
self.writer.buf[start + 2] = capability_type;
start += 3;
self.writer.buf[start..start + blen].copy_from_slice(data);
self.writer.position = start + blen;
Ok(())
}
pub(crate) fn end_bos(&mut self) {
self.num_caps_mark = None;
let position = self.writer.position as u16;
self.writer.buf[2..4].copy_from_slice(&position.to_le_bytes());
}
}
| 32.675192 | 102 | 0.55479 |
9cc4552fac43329749992c318ccf3d846cf88dbb
| 3,556 |
#![deny(warnings, rust_2018_idioms)]
use futures::{Future, Poll};
use linkerd2_error::Error;
use linkerd2_stack::Proxy;
use std::time::Duration;
use tokio_connect::Connect;
use tokio_timer as timer;
pub mod error;
mod failfast;
mod idle;
mod probe_ready;
pub use self::{
failfast::{FailFast, FailFastError, FailFastLayer},
idle::{Idle, IdleError, IdleLayer},
probe_ready::{ProbeReady, ProbeReadyLayer},
};
/// A timeout that wraps an underlying operation.
#[derive(Debug, Clone)]
pub struct Timeout<T> {
inner: T,
duration: Option<Duration>,
}
pub enum TimeoutFuture<F> {
Passthru(F),
Timeout(timer::Timeout<F>, Duration),
}
//===== impl Timeout =====
impl<T> Timeout<T> {
/// Construct a new `Timeout` wrapping `inner`.
pub fn new(inner: T, duration: Duration) -> Self {
Timeout {
inner,
duration: Some(duration),
}
}
pub fn passthru(inner: T) -> Self {
Timeout {
inner,
duration: None,
}
}
}
impl<P, S, Req> Proxy<Req, S> for Timeout<P>
where
P: Proxy<Req, S>,
S: tower::Service<P::Request>,
{
type Request = P::Request;
type Response = P::Response;
type Error = Error;
type Future = TimeoutFuture<P::Future>;
fn proxy(&self, svc: &mut S, req: Req) -> Self::Future {
let inner = self.inner.proxy(svc, req);
match self.duration {
None => TimeoutFuture::Passthru(inner),
Some(t) => TimeoutFuture::Timeout(timer::Timeout::new(inner, t), t),
}
}
}
impl<S, Req> tower::Service<Req> for Timeout<S>
where
S: tower::Service<Req>,
S::Error: Into<Error>,
{
type Response = S::Response;
type Error = Error;
type Future = TimeoutFuture<S::Future>;
fn poll_ready(&mut self) -> Poll<(), Self::Error> {
self.inner.poll_ready().map_err(Into::into)
}
fn call(&mut self, req: Req) -> Self::Future {
let inner = self.inner.call(req);
match self.duration {
None => TimeoutFuture::Passthru(inner),
Some(t) => TimeoutFuture::Timeout(timer::Timeout::new(inner, t), t),
}
}
}
impl<C> Connect for Timeout<C>
where
C: Connect,
C::Error: Into<Error>,
{
type Connected = C::Connected;
type Error = Error;
type Future = TimeoutFuture<C::Future>;
fn connect(&self) -> Self::Future {
let inner = self.inner.connect();
match self.duration {
None => TimeoutFuture::Passthru(inner),
Some(t) => TimeoutFuture::Timeout(timer::Timeout::new(inner, t), t),
}
}
}
impl<F> Future for TimeoutFuture<F>
where
F: Future,
F::Error: Into<Error>,
{
type Item = F::Item;
type Error = Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
match self {
TimeoutFuture::Passthru(f) => f.poll().map_err(Into::into),
TimeoutFuture::Timeout(f, duration) => f.poll().map_err(|error| {
if error.is_timer() {
return error
.into_timer()
.expect("error.into_timer() must succeed if error.is_timer()")
.into();
}
if error.is_elapsed() {
return error::ResponseTimeout(*duration).into();
}
error
.into_inner()
.expect("if error is not elapsed or timer, must be inner")
.into()
}),
}
}
}
| 25.042254 | 86 | 0.55315 |
f8fab05300c8ba7d36ea4420905ce002af23dc1e
| 4,064 |
#![allow(dead_code, unused_variables, unused_mut)]
use std::collections::HashMap;
use std::fs::read_to_string;
fn part1(input_file: String) {
let mut answer: usize = 0;
let mut lines: Vec<&str> = input_file.split("\n").collect();
if lines.last().unwrap().is_empty() {
lines.pop();
}
let opening_brackets: Vec<char> = vec!['(', '[', '{', '<'];
let mut closing_brackets_lookup: HashMap<char, char> = HashMap::new();
closing_brackets_lookup.insert(')', '(');
closing_brackets_lookup.insert(']', '[');
closing_brackets_lookup.insert('}', '{');
closing_brackets_lookup.insert('>', '<');
let mut points_lookup: HashMap<char, usize> = HashMap::new();
points_lookup.insert(')', 3);
points_lookup.insert(']', 57);
points_lookup.insert('}', 1197);
points_lookup.insert('>', 25137);
let mut work_vec: Vec<char> = vec![];
for line in &lines {
for chr in line.chars() {
if opening_brackets.contains(&chr) {
work_vec.push(chr);
} else {
if work_vec.last().unwrap() == &closing_brackets_lookup[&chr] {
work_vec.pop();
} else {
answer += points_lookup[&chr];
break;
}
}
}
work_vec.clear();
}
println!("Answer: {}", answer);
}
fn part2(input_file: String) {
let answer: usize;
let mut lines: Vec<&str> = input_file.split("\n").collect();
let mut incomplete: Vec<&str> = vec![];
if lines.last().unwrap().is_empty() {
lines.pop();
}
let opening_brackets: Vec<char> = vec!['(', '[', '{', '<'];
let mut closing_brackets_lookup: HashMap<char, char> = HashMap::new();
closing_brackets_lookup.insert(')', '(');
closing_brackets_lookup.insert(']', '[');
closing_brackets_lookup.insert('}', '{');
closing_brackets_lookup.insert('>', '<');
let mut opening_brackets_lookup: HashMap<char, char> = HashMap::new();
opening_brackets_lookup.insert('(', ')');
opening_brackets_lookup.insert('[', ']');
opening_brackets_lookup.insert('{', '}');
opening_brackets_lookup.insert('<', '>');
let mut points_lookup: HashMap<char, usize> = HashMap::new();
points_lookup.insert(')', 1);
points_lookup.insert(']', 2);
points_lookup.insert('}', 3);
points_lookup.insert('>', 4);
let mut work_vec: Vec<char> = vec![];
for line in &lines {
let mut corrupted: bool = false;
for chr in line.chars() {
if opening_brackets.contains(&chr) {
work_vec.push(chr);
} else {
if work_vec.last().unwrap() == &closing_brackets_lookup[&chr] {
work_vec.pop();
} else {
corrupted = true;
}
}
}
if !corrupted {
incomplete.push(line);
}
work_vec.clear();
}
let mut complete_vec: Vec<char> = vec![];
let mut scores_vec: Vec<usize> = vec![];
for line in &incomplete {
for chr in line.chars() {
if opening_brackets.contains(&chr) {
work_vec.push(chr);
} else {
if work_vec.last().unwrap() == &closing_brackets_lookup[&chr] {
work_vec.pop();
}
}
}
for chr in work_vec.iter().rev() {
complete_vec.push(opening_brackets_lookup[chr]);
}
let mut score: usize = 0;
for chr in &complete_vec {
score *= 5;
score += points_lookup[chr];
}
scores_vec.push(score);
work_vec.clear();
complete_vec.clear();
}
scores_vec.sort();
answer = scores_vec[(scores_vec.len() - 1) / 2];
println!("Answer: {}", answer);
}
fn main() {
let part: i32 = 2;
let input_file: String = read_to_string("input.txt").unwrap();
if part == 1 {
part1(input_file);
} else if part == 2 {
part2(input_file);
}
}
| 32.774194 | 79 | 0.535679 |
62b74400a01b56ecb7bfbbb10c2c9431de6e9ada
| 2,205 |
// Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
peer_manager::{PeerManager, PeerScoreUpdateType},
PeerId,
};
use solana_libra_channel;
use solana_libra_network::validator_network::StateSynchronizerSender;
use std::collections::HashMap;
#[test]
fn test_peer_manager() {
let peers = vec![
PeerId::random(),
PeerId::random(),
PeerId::random(),
PeerId::random(),
];
let mut peer_manager = PeerManager::new(peers.clone());
let (network_reqs_tx, _) = solana_libra_channel::new_test(8);
let sender = StateSynchronizerSender::new(network_reqs_tx);
for peer_id in peers.clone() {
peer_manager.enable_peer(peer_id, sender.clone());
}
for _ in 0..50 {
peer_manager.update_score(&peers[0], PeerScoreUpdateType::InvalidChunk);
}
let mut pick_counts = HashMap::new();
for _ in 0..1000 {
let (picked_peer_id, _) = peer_manager.pick_peer().unwrap();
let counter = pick_counts.entry(picked_peer_id).or_insert(0);
*counter += 1;
}
// unwrap_or needed because the peer with bad score may never be picked, and may be
// missing from pick_counts
assert!(pick_counts.get(&peers[0]).unwrap_or(&0) < pick_counts.get(&peers[1]).unwrap());
assert!(pick_counts.get(&peers[0]).unwrap_or(&0) < pick_counts.get(&peers[2]).unwrap());
assert!(pick_counts.get(&peers[0]).unwrap_or(&0) < pick_counts.get(&peers[3]).unwrap());
}
#[test]
fn test_remove_requests() {
let peers = vec![PeerId::random(), PeerId::random()];
let mut peer_manager = PeerManager::new(peers.clone());
peer_manager.process_request(1, peers[0]);
peer_manager.process_request(3, peers[1]);
peer_manager.process_request(5, peers[0]);
peer_manager.process_request(10, peers[0]);
peer_manager.process_request(12, peers[1]);
peer_manager.remove_requests(5);
assert!(!peer_manager.has_requested(1, peers[0]));
assert!(!peer_manager.has_requested(3, peers[1]));
assert!(!peer_manager.has_requested(5, peers[0]));
assert!(peer_manager.has_requested(10, peers[0]));
assert!(peer_manager.has_requested(12, peers[1]));
}
| 34.453125 | 92 | 0.677098 |
e544c9490a0f0eae7aac712a045054ddac97a5cd
| 20,592 |
// Take a look at the license at the top of the repository in the LICENSE file.
use glib::object::IsA;
use glib::translate::*;
use glib::Error;
use libc::{c_uchar, c_void};
use std::io::Read;
use std::mem;
use std::path::Path;
use std::pin::Pin;
use std::ptr;
use std::slice;
use std::future::Future;
use crate::{Colorspace, Pixbuf, PixbufFormat};
impl Pixbuf {
#[doc(alias = "gdk_pixbuf_new_from_data")]
pub fn from_mut_slice<T: AsMut<[u8]>>(
data: T,
colorspace: Colorspace,
has_alpha: bool,
bits_per_sample: i32,
width: i32,
height: i32,
row_stride: i32,
) -> Pixbuf {
unsafe extern "C" fn destroy<T: AsMut<[u8]>>(_: *mut c_uchar, data: *mut c_void) {
let _data: Box<T> = Box::from_raw(data as *mut T); // the data will be destroyed now
}
assert!(width > 0, "width must be greater than 0");
assert!(height > 0, "height must be greater than 0");
assert!(row_stride > 0, "row_stride must be greater than 0");
assert!(
bits_per_sample == 8,
"bits_per_sample == 8 is the only supported value"
);
let width = width as usize;
let height = height as usize;
let row_stride = row_stride as usize;
let bits_per_sample = bits_per_sample as usize;
let n_channels = if has_alpha { 4 } else { 3 };
let last_row_len = width * ((n_channels * bits_per_sample + 7) / 8);
let mut data: Box<T> = Box::new(data);
let ptr = {
let data: &mut [u8] = (*data).as_mut();
assert!(
data.len() >= ((height - 1) * row_stride + last_row_len) as usize,
"data.len() must fit the width, height, and row_stride"
);
data.as_mut_ptr()
};
unsafe {
from_glib_full(ffi::gdk_pixbuf_new_from_data(
ptr,
colorspace.to_glib(),
has_alpha.to_glib(),
bits_per_sample as i32,
width as i32,
height as i32,
row_stride as i32,
Some(destroy::<T>),
Box::into_raw(data) as *mut _,
))
}
}
// rustdoc-stripper-ignore-next
/// Creates a `Pixbuf` from a type implementing `Read` (like `File`).
///
/// ```no_run
/// use std::fs::File;
/// use gdk_pixbuf::Pixbuf;
///
/// let f = File::open("some_file.png").expect("failed to open image");
/// let pixbuf = Pixbuf::from_read(f).expect("failed to load image");
/// ```
pub fn from_read<R: Read + Send + 'static>(r: R) -> Result<Pixbuf, Error> {
Pixbuf::from_stream(&gio::ReadInputStream::new(r), None::<&gio::Cancellable>)
}
#[doc(alias = "gdk_pixbuf_new_from_file")]
#[doc(alias = "gdk_pixbuf_new_from_file_utf8")]
pub fn from_file<T: AsRef<Path>>(filename: T) -> Result<Pixbuf, Error> {
#[cfg(not(windows))]
use ffi::gdk_pixbuf_new_from_file;
#[cfg(windows)]
use ffi::gdk_pixbuf_new_from_file_utf8 as gdk_pixbuf_new_from_file;
unsafe {
let mut error = ptr::null_mut();
let ptr = gdk_pixbuf_new_from_file(filename.as_ref().to_glib_none().0, &mut error);
if error.is_null() {
Ok(from_glib_full(ptr))
} else {
Err(from_glib_full(error))
}
}
}
#[doc(alias = "gdk_pixbuf_new_from_file_at_size")]
#[doc(alias = "gdk_pixbuf_new_from_file_at_size_utf8")]
pub fn from_file_at_size<T: AsRef<Path>>(
filename: T,
width: i32,
height: i32,
) -> Result<Pixbuf, Error> {
#[cfg(not(windows))]
use ffi::gdk_pixbuf_new_from_file_at_size;
#[cfg(windows)]
use ffi::gdk_pixbuf_new_from_file_at_size_utf8 as gdk_pixbuf_new_from_file_at_size;
unsafe {
let mut error = ptr::null_mut();
let ptr = gdk_pixbuf_new_from_file_at_size(
filename.as_ref().to_glib_none().0,
width,
height,
&mut error,
);
if error.is_null() {
Ok(from_glib_full(ptr))
} else {
Err(from_glib_full(error))
}
}
}
#[doc(alias = "gdk_pixbuf_new_from_file_at_scale")]
#[doc(alias = "gdk_pixbuf_new_from_file_at_scale_utf8")]
pub fn from_file_at_scale<T: AsRef<Path>>(
filename: T,
width: i32,
height: i32,
preserve_aspect_ratio: bool,
) -> Result<Pixbuf, Error> {
#[cfg(not(windows))]
use ffi::gdk_pixbuf_new_from_file_at_scale;
#[cfg(windows)]
use ffi::gdk_pixbuf_new_from_file_at_scale_utf8 as gdk_pixbuf_new_from_file_at_scale;
unsafe {
let mut error = ptr::null_mut();
let ptr = gdk_pixbuf_new_from_file_at_scale(
filename.as_ref().to_glib_none().0,
width,
height,
preserve_aspect_ratio.to_glib(),
&mut error,
);
if error.is_null() {
Ok(from_glib_full(ptr))
} else {
Err(from_glib_full(error))
}
}
}
#[doc(alias = "gdk_pixbuf_new_from_stream_async")]
pub fn from_stream_async<
P: IsA<gio::InputStream>,
Q: IsA<gio::Cancellable>,
R: FnOnce(Result<Pixbuf, Error>) + Send + 'static,
>(
stream: &P,
cancellable: Option<&Q>,
callback: R,
) {
let cancellable = cancellable.map(|p| p.as_ref());
let user_data: Box<R> = Box::new(callback);
unsafe extern "C" fn from_stream_async_trampoline<
R: FnOnce(Result<Pixbuf, Error>) + Send + 'static,
>(
_source_object: *mut glib::gobject_ffi::GObject,
res: *mut gio::ffi::GAsyncResult,
user_data: glib::ffi::gpointer,
) {
let mut error = ptr::null_mut();
let ptr = ffi::gdk_pixbuf_new_from_stream_finish(res, &mut error);
let result = if error.is_null() {
Ok(from_glib_full(ptr))
} else {
Err(from_glib_full(error))
};
let callback: Box<R> = Box::from_raw(user_data as *mut _);
callback(result);
}
let callback = from_stream_async_trampoline::<R>;
unsafe {
ffi::gdk_pixbuf_new_from_stream_async(
stream.as_ref().to_glib_none().0,
cancellable.to_glib_none().0,
Some(callback),
Box::into_raw(user_data) as *mut _,
);
}
}
pub fn from_stream_async_future<P: IsA<gio::InputStream> + Clone + 'static>(
stream: &P,
) -> Pin<Box<dyn Future<Output = Result<Pixbuf, Error>> + 'static>> {
let stream = stream.clone();
Box::pin(gio::GioFuture::new(&(), move |_obj, send| {
let cancellable = gio::Cancellable::new();
Self::from_stream_async(&stream, Some(&cancellable), move |res| {
send.resolve(res);
});
cancellable
}))
}
#[doc(alias = "gdk_pixbuf_new_from_stream_at_scale_async")]
pub fn from_stream_at_scale_async<
P: IsA<gio::InputStream>,
Q: IsA<gio::Cancellable>,
R: FnOnce(Result<Pixbuf, Error>) + Send + 'static,
>(
stream: &P,
width: i32,
height: i32,
preserve_aspect_ratio: bool,
cancellable: Option<&Q>,
callback: R,
) {
let cancellable = cancellable.map(|p| p.as_ref());
let user_data: Box<R> = Box::new(callback);
unsafe extern "C" fn from_stream_at_scale_async_trampoline<
R: FnOnce(Result<Pixbuf, Error>) + Send + 'static,
>(
_source_object: *mut glib::gobject_ffi::GObject,
res: *mut gio::ffi::GAsyncResult,
user_data: glib::ffi::gpointer,
) {
let mut error = ptr::null_mut();
let ptr = ffi::gdk_pixbuf_new_from_stream_finish(res, &mut error);
let result = if error.is_null() {
Ok(from_glib_full(ptr))
} else {
Err(from_glib_full(error))
};
let callback: Box<R> = Box::from_raw(user_data as *mut _);
callback(result);
}
let callback = from_stream_at_scale_async_trampoline::<R>;
unsafe {
ffi::gdk_pixbuf_new_from_stream_at_scale_async(
stream.as_ref().to_glib_none().0,
width,
height,
preserve_aspect_ratio.to_glib(),
cancellable.to_glib_none().0,
Some(callback),
Box::into_raw(user_data) as *mut _,
);
}
}
pub fn from_stream_at_scale_async_future<P: IsA<gio::InputStream> + Clone + 'static>(
stream: &P,
width: i32,
height: i32,
preserve_aspect_ratio: bool,
) -> Pin<Box<dyn Future<Output = Result<Pixbuf, Error>> + 'static>> {
let stream = stream.clone();
Box::pin(gio::GioFuture::new(&(), move |_obj, send| {
let cancellable = gio::Cancellable::new();
Self::from_stream_at_scale_async(
&stream,
width,
height,
preserve_aspect_ratio,
Some(&cancellable),
move |res| {
send.resolve(res);
},
);
cancellable
}))
}
#[allow(clippy::mut_from_ref)]
#[allow(clippy::missing_safety_doc)]
#[doc(alias = "gdk_pixbuf_get_pixels_with_length")]
pub unsafe fn pixels(&self) -> &mut [u8] {
let mut len = 0;
let ptr = ffi::gdk_pixbuf_get_pixels_with_length(self.to_glib_none().0, &mut len);
slice::from_raw_parts_mut(ptr, len as usize)
}
pub fn put_pixel(&self, x: u32, y: u32, red: u8, green: u8, blue: u8, alpha: u8) {
assert!(
x < self.width() as u32,
"x must be less than the pixbuf's width"
);
assert!(
y < self.height() as u32,
"y must be less than the pixbuf's height"
);
unsafe {
let x = x as usize;
let y = y as usize;
let n_channels = self.n_channels() as usize;
assert!(n_channels == 3 || n_channels == 4);
let rowstride = self.rowstride() as usize;
let pixels = self.pixels();
let pos = y * rowstride + x * n_channels;
pixels[pos] = red;
pixels[pos + 1] = green;
pixels[pos + 2] = blue;
if n_channels == 4 {
pixels[pos + 3] = alpha;
}
}
}
#[doc(alias = "gdk_pixbuf_get_file_info")]
pub fn file_info<T: AsRef<Path>>(filename: T) -> Option<(PixbufFormat, i32, i32)> {
unsafe {
let mut width = mem::MaybeUninit::uninit();
let mut height = mem::MaybeUninit::uninit();
let ret = ffi::gdk_pixbuf_get_file_info(
filename.as_ref().to_glib_none().0,
width.as_mut_ptr(),
height.as_mut_ptr(),
);
if !ret.is_null() {
Some((
from_glib_none(ret),
width.assume_init(),
height.assume_init(),
))
} else {
None
}
}
}
#[cfg(any(feature = "v2_32", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_32")))]
#[doc(alias = "gdk_pixbuf_get_file_info_async")]
pub fn file_info_async<
P: IsA<gio::Cancellable>,
Q: FnOnce(Result<Option<(PixbufFormat, i32, i32)>, Error>) + Send + 'static,
T: AsRef<Path>,
>(
filename: T,
cancellable: Option<&P>,
callback: Q,
) {
let cancellable = cancellable.map(|p| p.as_ref());
let user_data: Box<Q> = Box::new(callback);
unsafe extern "C" fn get_file_info_async_trampoline<
Q: FnOnce(Result<Option<(PixbufFormat, i32, i32)>, Error>) + Send + 'static,
>(
_source_object: *mut glib::gobject_ffi::GObject,
res: *mut gio::ffi::GAsyncResult,
user_data: glib::ffi::gpointer,
) {
let mut error = ptr::null_mut();
let mut width = mem::MaybeUninit::uninit();
let mut height = mem::MaybeUninit::uninit();
let ret = ffi::gdk_pixbuf_get_file_info_finish(
res,
width.as_mut_ptr(),
height.as_mut_ptr(),
&mut error,
);
let result = if !error.is_null() {
Err(from_glib_full(error))
} else if ret.is_null() {
Ok(None)
} else {
Ok(Some((
from_glib_none(ret),
width.assume_init(),
height.assume_init(),
)))
};
let callback: Box<Q> = Box::from_raw(user_data as *mut _);
callback(result);
}
let callback = get_file_info_async_trampoline::<Q>;
unsafe {
ffi::gdk_pixbuf_get_file_info_async(
filename.as_ref().to_glib_none().0,
cancellable.to_glib_none().0,
Some(callback),
Box::into_raw(user_data) as *mut _,
);
}
}
#[cfg(any(feature = "v2_32", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_32")))]
#[allow(clippy::type_complexity)]
pub fn file_info_async_future<T: AsRef<Path> + Clone + 'static>(
filename: T,
) -> Pin<Box<dyn Future<Output = Result<Option<(PixbufFormat, i32, i32)>, Error>> + 'static>>
{
Box::pin(gio::GioFuture::new(&(), move |_obj, send| {
let cancellable = gio::Cancellable::new();
Self::file_info_async(filename, Some(&cancellable), move |res| {
send.resolve(res);
});
cancellable
}))
}
#[doc(alias = "gdk_pixbuf_save_to_bufferv")]
pub fn save_to_bufferv(&self, type_: &str, options: &[(&str, &str)]) -> Result<Vec<u8>, Error> {
unsafe {
let mut buffer = ptr::null_mut();
let mut buffer_size = mem::MaybeUninit::uninit();
let mut error = ptr::null_mut();
let option_keys: Vec<&str> = options.iter().map(|o| o.0).collect();
let option_values: Vec<&str> = options.iter().map(|o| o.1).collect();
let _ = ffi::gdk_pixbuf_save_to_bufferv(
self.to_glib_none().0,
&mut buffer,
buffer_size.as_mut_ptr(),
type_.to_glib_none().0,
option_keys.to_glib_none().0,
option_values.to_glib_none().0,
&mut error,
);
if error.is_null() {
Ok(FromGlibContainer::from_glib_full_num(
buffer,
buffer_size.assume_init() as usize,
))
} else {
Err(from_glib_full(error))
}
}
}
#[cfg(any(feature = "v2_36", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_36")))]
pub fn save_to_streamv<P: IsA<gio::OutputStream>, Q: IsA<gio::Cancellable>>(
&self,
stream: &P,
type_: &str,
options: &[(&str, &str)],
cancellable: Option<&Q>,
) -> Result<(), Error> {
let cancellable = cancellable.map(|p| p.as_ref());
unsafe {
let mut error = ptr::null_mut();
let option_keys: Vec<&str> = options.iter().map(|o| o.0).collect();
let option_values: Vec<&str> = options.iter().map(|o| o.1).collect();
let _ = ffi::gdk_pixbuf_save_to_streamv(
self.to_glib_none().0,
stream.as_ref().to_glib_none().0,
type_.to_glib_none().0,
option_keys.to_glib_none().0,
option_values.to_glib_none().0,
cancellable.to_glib_none().0,
&mut error,
);
if error.is_null() {
Ok(())
} else {
Err(from_glib_full(error))
}
}
}
#[cfg(any(feature = "v2_36", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_36")))]
#[doc(alias = "gdk_pixbuf_save_to_streamv_async")]
pub fn save_to_streamv_async<
P: IsA<gio::OutputStream>,
Q: IsA<gio::Cancellable>,
R: FnOnce(Result<(), Error>) + Send + 'static,
>(
&self,
stream: &P,
type_: &str,
options: &[(&str, &str)],
cancellable: Option<&Q>,
callback: R,
) {
let cancellable = cancellable.map(|p| p.as_ref());
let user_data: Box<R> = Box::new(callback);
unsafe extern "C" fn save_to_streamv_async_trampoline<
R: FnOnce(Result<(), Error>) + Send + 'static,
>(
_source_object: *mut glib::gobject_ffi::GObject,
res: *mut gio::ffi::GAsyncResult,
user_data: glib::ffi::gpointer,
) {
let mut error = ptr::null_mut();
let _ = ffi::gdk_pixbuf_save_to_stream_finish(res, &mut error);
let result = if error.is_null() {
Ok(())
} else {
Err(from_glib_full(error))
};
let callback: Box<R> = Box::from_raw(user_data as *mut _);
callback(result);
}
let callback = save_to_streamv_async_trampoline::<R>;
unsafe {
let option_keys: Vec<&str> = options.iter().map(|o| o.0).collect();
let option_values: Vec<&str> = options.iter().map(|o| o.1).collect();
ffi::gdk_pixbuf_save_to_streamv_async(
self.to_glib_none().0,
stream.as_ref().to_glib_none().0,
type_.to_glib_none().0,
option_keys.to_glib_none().0,
option_values.to_glib_none().0,
cancellable.to_glib_none().0,
Some(callback),
Box::into_raw(user_data) as *mut _,
);
}
}
#[cfg(any(feature = "v2_36", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_36")))]
pub fn save_to_streamv_async_future<P: IsA<gio::OutputStream> + Clone + 'static>(
&self,
stream: &P,
type_: &str,
options: &[(&str, &str)],
) -> Pin<Box<dyn Future<Output = Result<(), Error>> + 'static>> {
let stream = stream.clone();
let type_ = String::from(type_);
let options = options
.iter()
.map(|&(k, v)| (String::from(k), String::from(v)))
.collect::<Vec<(String, String)>>();
Box::pin(gio::GioFuture::new(self, move |obj, send| {
let cancellable = gio::Cancellable::new();
let options = options
.iter()
.map(|&(ref k, ref v)| (k.as_str(), v.as_str()))
.collect::<Vec<(&str, &str)>>();
obj.save_to_streamv_async(
&stream,
&type_,
options.as_slice(),
Some(&cancellable),
move |res| {
send.resolve(res);
},
);
cancellable
}))
}
#[doc(alias = "gdk_pixbuf_savev")]
pub fn savev<T: AsRef<Path>>(
&self,
filename: T,
type_: &str,
options: &[(&str, &str)],
) -> Result<(), Error> {
unsafe {
let mut error = ptr::null_mut();
let option_keys: Vec<&str> = options.iter().map(|o| o.0).collect();
let option_values: Vec<&str> = options.iter().map(|o| o.1).collect();
let _ = ffi::gdk_pixbuf_savev(
self.to_glib_none().0,
filename.as_ref().to_glib_none().0,
type_.to_glib_none().0,
option_keys.to_glib_none().0,
option_values.to_glib_none().0,
&mut error,
);
if error.is_null() {
Ok(())
} else {
Err(from_glib_full(error))
}
}
}
}
| 34.783784 | 100 | 0.510441 |
22dfbec16c62627f62de8429ce21e0a224ef3de2
| 4,353 |
use solana_sdk::pubkey::Pubkey;
#[derive(Debug)]
pub struct PubkeyBinCalculator16 {
// how many bits from the first 2 bytes to shift away to ignore when calculating bin
shift_bits: u32,
}
impl PubkeyBinCalculator16 {
const fn num_bits<T>() -> usize {
std::mem::size_of::<T>() * 8
}
fn log_2(x: u32) -> u32 {
assert!(x > 0);
Self::num_bits::<u32>() as u32 - x.leading_zeros() - 1
}
pub fn new(bins: usize) -> Self {
const MAX_BITS: u32 = 16;
assert!(bins > 0);
let max_plus_1 = 1 << MAX_BITS;
assert!(bins <= max_plus_1);
assert!(bins.is_power_of_two());
let bits = Self::log_2(bins as u32);
Self {
shift_bits: MAX_BITS - bits,
}
}
pub fn bin_from_pubkey(&self, pubkey: &Pubkey) -> usize {
let as_ref = pubkey.as_ref();
((as_ref[0] as usize * 256 + as_ref[1] as usize) as usize) >> self.shift_bits
}
}
#[cfg(test)]
pub mod tests {
use super::*;
#[test]
fn test_pubkey_bins_log2() {
assert_eq!(PubkeyBinCalculator16::num_bits::<u8>(), 8);
assert_eq!(PubkeyBinCalculator16::num_bits::<u32>(), 32);
for i in 0..32 {
assert_eq!(PubkeyBinCalculator16::log_2(2u32.pow(i)), i);
}
}
#[test]
fn test_pubkey_bins() {
for i in 0..=16 {
let bins = 2u32.pow(i);
let calc = PubkeyBinCalculator16::new(bins as usize);
assert_eq!(calc.shift_bits, 16 - i, "i: {}", i);
}
}
#[test]
fn test_pubkey_bins_pubkeys() {
let mut pk = Pubkey::new(&[0; 32]);
for i in 0..=8 {
let bins = 2usize.pow(i);
let calc = PubkeyBinCalculator16::new(bins);
let shift_bits = calc.shift_bits - 8; // we are only dealing with first byte
pk.as_mut()[0] = 0;
assert_eq!(0, calc.bin_from_pubkey(&pk));
pk.as_mut()[0] = 0xff;
assert_eq!(bins - 1, calc.bin_from_pubkey(&pk));
for bin in 0..bins {
pk.as_mut()[0] = (bin << shift_bits) as u8;
assert_eq!(
bin,
calc.bin_from_pubkey(&pk),
"bin: {}/{}, bits: {}, val: {}",
bin,
bins,
shift_bits,
pk.as_ref()[0]
);
if bin > 0 {
pk.as_mut()[0] = ((bin << shift_bits) - 1) as u8;
assert_eq!(bin - 1, calc.bin_from_pubkey(&pk));
}
}
}
for i in 9..=16 {
let mut pk = Pubkey::new(&[0; 32]);
let bins = 2usize.pow(i);
let calc = PubkeyBinCalculator16::new(bins);
let shift_bits = calc.shift_bits;
pk.as_mut()[1] = 0;
assert_eq!(0, calc.bin_from_pubkey(&pk));
pk.as_mut()[0] = 0xff;
pk.as_mut()[1] = 0xff;
assert_eq!(bins - 1, calc.bin_from_pubkey(&pk));
let mut pk = Pubkey::new(&[0; 32]);
for bin in 0..bins {
let mut target = (bin << shift_bits) as u16;
pk.as_mut()[0] = (target / 256) as u8;
pk.as_mut()[1] = (target % 256) as u8;
assert_eq!(
bin,
calc.bin_from_pubkey(&pk),
"bin: {}/{}, bits: {}, val: {}",
bin,
bins,
shift_bits,
pk.as_ref()[0]
);
if bin > 0 {
target -= 1;
pk.as_mut()[0] = (target / 256) as u8;
pk.as_mut()[1] = (target % 256) as u8;
assert_eq!(bin - 1, calc.bin_from_pubkey(&pk));
}
}
}
}
#[test]
#[should_panic(expected = "bins.is_power_of_two()")]
fn test_pubkey_bins_illegal_bins3() {
PubkeyBinCalculator16::new(3);
}
#[test]
#[should_panic(expected = "bins <= max_plus_1")]
fn test_pubkey_bins_illegal_bins2() {
PubkeyBinCalculator16::new(65537);
}
#[test]
#[should_panic(expected = "bins > 0")]
fn test_pubkey_bins_illegal_bins() {
PubkeyBinCalculator16::new(0);
}
}
| 30.02069 | 88 | 0.469331 |
91505034c78466be912d131b97dcd3b1e15b3c92
| 6,736 |
//! The `rpc_service` module implements the Solana JSON RPC service.
use crate::bank_forks::BankForks;
use crate::cluster_info::ClusterInfo;
use crate::rpc::*;
use crate::service::Service;
use crate::storage_stage::StorageState;
use jsonrpc_core::MetaIoHandler;
use jsonrpc_http_server::{
hyper, AccessControlAllowOrigin, DomainsValidation, RequestMiddleware, RequestMiddlewareAction,
ServerBuilder,
};
use std::net::SocketAddr;
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, RwLock};
use std::thread::{self, sleep, Builder, JoinHandle};
use std::time::Duration;
use tokio::prelude::Future;
pub struct JsonRpcService {
thread_hdl: JoinHandle<()>,
#[cfg(test)]
pub request_processor: Arc<RwLock<JsonRpcRequestProcessor>>, // Used only by test_rpc_new()...
}
#[derive(Default)]
struct RpcRequestMiddleware {
ledger_path: PathBuf,
}
impl RpcRequestMiddleware {
pub fn new(ledger_path: PathBuf) -> Self {
Self { ledger_path }
}
fn not_found() -> hyper::Response<hyper::Body> {
hyper::Response::builder()
.status(hyper::StatusCode::NOT_FOUND)
.body(hyper::Body::empty())
.unwrap()
}
fn internal_server_error() -> hyper::Response<hyper::Body> {
hyper::Response::builder()
.status(hyper::StatusCode::INTERNAL_SERVER_ERROR)
.body(hyper::Body::empty())
.unwrap()
}
fn get(&self, filename: &str) -> RequestMiddlewareAction {
let filename = self.ledger_path.join(filename);
RequestMiddlewareAction::Respond {
should_validate_hosts: true,
response: Box::new(
tokio_fs::file::File::open(filename)
.and_then(|file| {
let buf: Vec<u8> = Vec::new();
tokio_io::io::read_to_end(file, buf)
.and_then(|item| Ok(hyper::Response::new(item.1.into())))
.or_else(|_| Ok(RpcRequestMiddleware::internal_server_error()))
})
.or_else(|_| Ok(RpcRequestMiddleware::not_found())),
),
}
}
}
impl RequestMiddleware for RpcRequestMiddleware {
fn on_request(&self, request: hyper::Request<hyper::Body>) -> RequestMiddlewareAction {
trace!("request uri: {}", request.uri());
match request.uri().path() {
"/snapshot.tar.bz2" => self.get("snapshot.tar.bz2"),
"/genesis.tar.bz2" => self.get("genesis.tar.bz2"),
_ => RequestMiddlewareAction::Proceed {
should_continue_on_invalid_cors: false,
request,
},
}
}
}
impl JsonRpcService {
pub fn new(
cluster_info: &Arc<RwLock<ClusterInfo>>,
rpc_addr: SocketAddr,
storage_state: StorageState,
config: JsonRpcConfig,
bank_forks: Arc<RwLock<BankForks>>,
ledger_path: &Path,
exit: &Arc<AtomicBool>,
) -> Self {
info!("rpc bound to {:?}", rpc_addr);
info!("rpc configuration: {:?}", config);
let request_processor = Arc::new(RwLock::new(JsonRpcRequestProcessor::new(
storage_state,
config,
bank_forks,
exit,
)));
let request_processor_ = request_processor.clone();
let cluster_info = cluster_info.clone();
let exit_ = exit.clone();
let ledger_path = ledger_path.to_path_buf();
let thread_hdl = Builder::new()
.name("solana-jsonrpc".to_string())
.spawn(move || {
let mut io = MetaIoHandler::default();
let rpc = RpcSolImpl;
io.extend_with(rpc.to_delegate());
let server =
ServerBuilder::with_meta_extractor(io, move |_req: &hyper::Request<hyper::Body>| Meta {
request_processor: request_processor_.clone(),
cluster_info: cluster_info.clone(),
}).threads(4)
.cors(DomainsValidation::AllowOnly(vec![
AccessControlAllowOrigin::Any,
]))
.request_middleware(RpcRequestMiddleware::new(ledger_path))
.start_http(&rpc_addr);
if let Err(e) = server {
warn!("JSON RPC service unavailable error: {:?}. \nAlso, check that port {} is not already in use by another application", e, rpc_addr.port());
return;
}
while !exit_.load(Ordering::Relaxed) {
sleep(Duration::from_millis(100));
}
server.unwrap().close();
})
.unwrap();
Self {
thread_hdl,
#[cfg(test)]
request_processor,
}
}
}
impl Service for JsonRpcService {
type JoinReturnType = ();
fn join(self) -> thread::Result<()> {
self.thread_hdl.join()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::contact_info::ContactInfo;
use crate::genesis_utils::{create_genesis_block, GenesisBlockInfo};
use solana_runtime::bank::Bank;
use solana_sdk::signature::KeypairUtil;
use std::net::{IpAddr, Ipv4Addr, SocketAddr};
#[test]
fn test_rpc_new() {
let GenesisBlockInfo {
genesis_block,
mint_keypair,
..
} = create_genesis_block(10_000);
let exit = Arc::new(AtomicBool::new(false));
let bank = Bank::new(&genesis_block);
let cluster_info = Arc::new(RwLock::new(ClusterInfo::new_with_invalid_keypair(
ContactInfo::default(),
)));
let rpc_addr = SocketAddr::new(
IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)),
solana_netutil::find_available_port_in_range((10000, 65535)).unwrap(),
);
let bank_forks = Arc::new(RwLock::new(BankForks::new(bank.slot(), bank)));
let rpc_service = JsonRpcService::new(
&cluster_info,
rpc_addr,
StorageState::default(),
JsonRpcConfig::default(),
bank_forks,
&PathBuf::from("farf"),
&exit,
);
let thread = rpc_service.thread_hdl.thread();
assert_eq!(thread.name().unwrap(), "solana-jsonrpc");
assert_eq!(
10_000,
rpc_service
.request_processor
.read()
.unwrap()
.get_balance(&mint_keypair.pubkey())
);
exit.store(true, Ordering::Relaxed);
rpc_service.join().unwrap();
}
}
| 33.512438 | 163 | 0.556562 |
1662917d0f988c47a418f8d46a2d6727493721a8
| 2,751 |
use crate::common::*;
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub(crate) struct Matter {
pub(crate) name: String,
pub(crate) tags: Option<Vec<String>>,
pub(crate) links: Option<Vec<String>>,
}
impl Matter {
pub(crate) fn new(name: &str, tags: Option<Vec<String>>, links: Option<Vec<String>>) -> Self {
Self {
name: name.to_owned(),
tags,
links,
}
}
/// Return the default YAML frontmatter as bytes.
pub(crate) fn default(name: &str) -> Result<Vec<u8>> {
Ok(
Self::into(Matter::new(name, None, None))?
.as_bytes()
.to_owned(),
)
}
/// Parse a string `content` into a `Matter` instance.
pub(crate) fn from(content: &str) -> Result<Self> {
Ok(serde_yaml::from_str(
content
.strip_prefix("---\n")
.unwrap_or(content)
.strip_suffix("---\n")
.unwrap_or(content),
)?)
}
/// Parse a `Matter` struct into a string.
pub(crate) fn into(matter: Matter) -> Result<String> {
Ok(format!("{}---\n", serde_yaml::to_string(&matter)?))
}
}
#[cfg(test)]
mod tests {
use super::*;
fn cases<'a>() -> Vec<(Matter, &'a str)> {
vec![
(
Matter {
name: "a".into(),
tags: Some(vec![String::from("code"), String::from("software")]),
links: Some(vec![String::from("b"), String::from("c")]),
},
indoc! {"
---
name: a
tags:
- code
- software
links:
- b
- c
---
"},
),
(
Matter {
name: "b".into(),
tags: Some(vec![]),
links: Some(vec![String::from("b"), String::from("c")]),
},
indoc! {"
---
name: b
tags: []
links:
- b
- c
---
"},
),
(
Matter {
name: "c".into(),
tags: Some(vec![String::from("code"), String::from("software")]),
links: Some(vec![]),
},
indoc! {"
---
name: c
tags:
- code
- software
links: []
---
"},
),
(
Matter {
name: "d".into(),
tags: Some(vec![]),
links: Some(vec![]),
},
indoc! {"
---
name: d
tags: []
links: []
---
"},
),
]
}
#[test]
fn serialize() {
for (have, want) in cases() {
assert_eq!(Matter::into(have).unwrap(), want);
}
}
#[test]
fn deserialize() {
for (want, have) in cases() {
assert_eq!(Matter::from(have).unwrap(), want);
}
}
}
| 20.840909 | 96 | 0.427844 |
7ad1e44e027a265b4c80d679501f4cd4d368764b
| 71,809 |
/*
* MailSlurp API
*
* MailSlurp is an API for sending and receiving emails from dynamically allocated email addresses. It's designed for developers and QA teams to test applications, process inbound emails, send templated notifications, attachments, and more. ## Resources - [Homepage](https://www.mailslurp.com) - Get an [API KEY](https://app.mailslurp.com/sign-up/) - Generated [SDK Clients](https://www.mailslurp.com/docs/) - [Examples](https://github.com/mailslurp/examples) repository
*
* The version of the OpenAPI document: 6.5.2
*
* Generated by: https://openapi-generator.tech
*/
use reqwest;
use crate::apis::ResponseContent;
use super::{Error, configuration};
/// struct for passing parameters to the method `create_inbox`
#[derive(Clone, Debug)]
pub struct CreateInboxParams {
/// DEPRECATED (team access is always true). Grant team access to this inbox and the emails that belong to it for team members of your organization.
pub allow_team_access: Option<bool>,
/// Optional description of the inbox for labelling purposes. Is shown in the dashboard and can be used with
pub description: Option<String>,
/// A custom email address to use with the inbox. Defaults to null. When null MailSlurp will assign a random email address to the inbox such as `[email protected]`. If you use the `useDomainPool` option when the email address is null it will generate an email address with a more varied domain ending such as `[email protected]` or `[email protected]`. When a custom email address is provided the address is split into a domain and the domain is queried against your user. If you have created the domain in the MailSlurp dashboard and verified it you can use any email address that ends with the domain. Note domain types must match the inbox type - so `SMTP` inboxes will only work with `SMTP` type domains. Send an email to this address and the inbox will receive and store it for you. To retrieve the email use the Inbox and Email Controller endpoints with the inbox ID.
pub email_address: Option<String>,
/// Optional inbox expiration date. If null then this inbox is permanent and the emails in it won't be deleted. If an expiration date is provided or is required by your plan the inbox will be closed when the expiration time is reached. Expired inboxes still contain their emails but can no longer send or receive emails. An ExpiredInboxRecord is created when an inbox and the email address and inbox ID are recorded. The expiresAt property is a timestamp string in ISO DateTime Format yyyy-MM-dd'T'HH:mm:ss.SSSXXX.
pub expires_at: Option<String>,
/// Number of milliseconds that inbox should exist for
pub expires_in: Option<i64>,
/// Is the inbox a favorite. Marking an inbox as a favorite is typically done in the dashboard for quick access or filtering
pub favourite: Option<bool>,
/// HTTP (default) or SMTP inbox type. HTTP inboxes are best for testing while SMTP inboxes are more reliable for public inbound email consumption. When using custom domains the domain type must match the inbox type. HTTP inboxes are processed by AWS SES while SMTP inboxes use a custom mail server running at `mx.mailslurp.com`.
pub inbox_type: Option<String>,
/// Optional name of the inbox. Displayed in the dashboard for easier search and used as the sender name when sending emails.
pub name: Option<String>,
/// Tags that inbox has been tagged with. Tags can be added to inboxes to group different inboxes within an account. You can also search for inboxes by tag in the dashboard UI.
pub tags: Option<Vec<String>>,
/// Use the MailSlurp domain name pool with this inbox when creating the email address. Defaults to null. If enabled the inbox will be an email address with a domain randomly chosen from a list of the MailSlurp domains. This is useful when the default `@mailslurp.com` email addresses used with inboxes are blocked or considered spam by a provider or receiving service. When domain pool is enabled an email address will be generated ending in `@mailslurp.{world,info,xyz,...}` . This means a TLD is randomly selecting from a list of `.biz`, `.info`, `.xyz` etc to add variance to the generated email addresses. When null or false MailSlurp uses the default behavior of `@mailslurp.com` or custom email address provided by the emailAddress field. Note this feature is only available for `HTTP` inbox types.
pub use_domain_pool: Option<bool>
}
/// struct for passing parameters to the method `create_inbox_ruleset`
#[derive(Clone, Debug)]
pub struct CreateInboxRulesetParams {
/// inboxId
pub inbox_id: String,
/// createInboxRulesetOptions
pub create_inbox_ruleset_options: crate::models::CreateInboxRulesetOptions
}
/// struct for passing parameters to the method `create_inbox_with_options`
#[derive(Clone, Debug)]
pub struct CreateInboxWithOptionsParams {
/// createInboxDto
pub create_inbox_dto: crate::models::CreateInboxDto
}
/// struct for passing parameters to the method `delete_inbox`
#[derive(Clone, Debug)]
pub struct DeleteInboxParams {
/// inboxId
pub inbox_id: String
}
/// struct for passing parameters to the method `get_all_inboxes`
#[derive(Clone, Debug)]
pub struct GetAllInboxesParams {
/// Optional filter by created before given date time
pub before: Option<String>,
/// Optionally filter results for favourites only
pub favourite: Option<bool>,
/// Optional page index in list pagination
pub page: Option<i32>,
/// Optionally filter by search words partial matching ID, tags, name, and email address
pub search: Option<String>,
/// Optional filter by created after given date time
pub since: Option<String>,
/// Optional page size in list pagination
pub size: Option<i32>,
/// Optional createdAt sort direction ASC or DESC
pub sort: Option<String>,
/// Optionally filter by tags. Will return inboxes that include given tags
pub tag: Option<String>,
/// DEPRECATED. Optionally filter by team access.
pub team_access: Option<bool>
}
/// struct for passing parameters to the method `get_emails`
#[derive(Clone, Debug)]
pub struct GetEmailsParams {
/// Id of inbox that emails belongs to
pub inbox_id: String,
/// Exclude emails received after this ISO 8601 date time
pub before: Option<String>,
/// delayTimeout
pub delay_timeout: Option<i64>,
/// Limit the result set, ordered by received date time sort direction. Maximum 100. For more listing options see the email controller
pub limit: Option<i32>,
/// Minimum acceptable email count. Will cause request to hang (and retry) until minCount is satisfied or retryTimeout is reached.
pub min_count: Option<i64>,
/// Maximum milliseconds to spend retrying inbox database until minCount emails are returned
pub retry_timeout: Option<i64>,
/// Exclude emails received before this ISO 8601 date time
pub since: Option<String>,
/// Alias for limit. Assessed first before assessing any passed limit.
pub size: Option<i32>,
/// Sort the results by received date and direction ASC or DESC
pub sort: Option<String>,
/// unreadOnly
pub unread_only: Option<bool>
}
/// struct for passing parameters to the method `get_inbox`
#[derive(Clone, Debug)]
pub struct GetInboxParams {
/// inboxId
pub inbox_id: String
}
/// struct for passing parameters to the method `get_inbox_emails_paginated`
#[derive(Clone, Debug)]
pub struct GetInboxEmailsPaginatedParams {
/// Id of inbox that emails belongs to
pub inbox_id: String,
/// Optional filter by received before given date time
pub before: Option<String>,
/// Optional page index in inbox emails list pagination
pub page: Option<i32>,
/// Optional filter by received after given date time
pub since: Option<String>,
/// Optional page size in inbox emails list pagination
pub size: Option<i32>,
/// Optional createdAt sort direction ASC or DESC
pub sort: Option<String>
}
/// struct for passing parameters to the method `get_inbox_sent_emails`
#[derive(Clone, Debug)]
pub struct GetInboxSentEmailsParams {
/// inboxId
pub inbox_id: String,
/// Optional filter by sent before given date time
pub before: Option<String>,
/// Optional page index in inbox sent email list pagination
pub page: Option<i32>,
/// Optional sent email search
pub search_filter: Option<String>,
/// Optional filter by sent after given date time
pub since: Option<String>,
/// Optional page size in inbox sent email list pagination
pub size: Option<i32>,
/// Optional createdAt sort direction ASC or DESC
pub sort: Option<String>
}
/// struct for passing parameters to the method `get_inboxes`
#[derive(Clone, Debug)]
pub struct GetInboxesParams {
/// Optional filter by created before given date time
pub before: Option<String>,
/// Optional filter by created after given date time
pub since: Option<String>,
/// Optional result size limit. Note an automatic limit of 100 results is applied. See the paginated `getAllEmails` for larger queries.
pub size: Option<i32>,
/// Optional createdAt sort direction ASC or DESC
pub sort: Option<String>
}
/// struct for passing parameters to the method `get_organization_inboxes`
#[derive(Clone, Debug)]
pub struct GetOrganizationInboxesParams {
/// Optional filter by created before given date time
pub before: Option<String>,
/// Optional page index in list pagination
pub page: Option<i32>,
/// Optional search filter
pub search_filter: Option<String>,
/// Optional filter by created after given date time
pub since: Option<String>,
/// Optional page size in list pagination
pub size: Option<i32>,
/// Optional createdAt sort direction ASC or DESC
pub sort: Option<String>
}
/// struct for passing parameters to the method `list_inbox_rulesets`
#[derive(Clone, Debug)]
pub struct ListInboxRulesetsParams {
/// inboxId
pub inbox_id: String,
/// Optional filter by created before given date time
pub before: Option<String>,
/// Optional page index in inbox ruleset list pagination
pub page: Option<i32>,
/// Optional search filter
pub search_filter: Option<String>,
/// Optional filter by created after given date time
pub since: Option<String>,
/// Optional page size in inbox ruleset list pagination
pub size: Option<i32>,
/// Optional createdAt sort direction ASC or DESC
pub sort: Option<String>
}
/// struct for passing parameters to the method `list_inbox_tracking_pixels`
#[derive(Clone, Debug)]
pub struct ListInboxTrackingPixelsParams {
/// inboxId
pub inbox_id: String,
/// Optional filter by created before given date time
pub before: Option<String>,
/// Optional page index in inbox tracking pixel list pagination
pub page: Option<i32>,
/// Optional search filter
pub search_filter: Option<String>,
/// Optional filter by created after given date time
pub since: Option<String>,
/// Optional page size in inbox tracking pixel list pagination
pub size: Option<i32>,
/// Optional createdAt sort direction ASC or DESC
pub sort: Option<String>
}
/// struct for passing parameters to the method `send_email`
#[derive(Clone, Debug)]
pub struct SendEmailParams {
/// ID of the inbox you want to send the email from
pub inbox_id: String,
/// Options for the email
pub send_email_options: Option<crate::models::SendEmailOptions>
}
/// struct for passing parameters to the method `send_email_and_confirm`
#[derive(Clone, Debug)]
pub struct SendEmailAndConfirmParams {
/// ID of the inbox you want to send the email from
pub inbox_id: String,
/// Options for the email
pub send_email_options: Option<crate::models::SendEmailOptions>
}
/// struct for passing parameters to the method `send_test_email`
#[derive(Clone, Debug)]
pub struct SendTestEmailParams {
/// inboxId
pub inbox_id: String
}
/// struct for passing parameters to the method `set_inbox_favourited`
#[derive(Clone, Debug)]
pub struct SetInboxFavouritedParams {
/// inboxId
pub inbox_id: String,
/// setInboxFavouritedOptions
pub set_inbox_favourited_options: crate::models::SetInboxFavouritedOptions
}
/// struct for passing parameters to the method `update_inbox`
#[derive(Clone, Debug)]
pub struct UpdateInboxParams {
/// inboxId
pub inbox_id: String,
/// updateInboxOptions
pub update_inbox_options: crate::models::UpdateInboxOptions
}
/// struct for typed errors of method `create_inbox`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum CreateInboxError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `create_inbox_ruleset`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum CreateInboxRulesetError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `create_inbox_with_defaults`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum CreateInboxWithDefaultsError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `create_inbox_with_options`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum CreateInboxWithOptionsError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `delete_all_inboxes`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DeleteAllInboxesError {
Status401(),
Status403(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `delete_inbox`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DeleteInboxError {
Status401(),
Status403(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `get_all_inboxes`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GetAllInboxesError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `get_emails`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GetEmailsError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `get_inbox`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GetInboxError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `get_inbox_emails_paginated`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GetInboxEmailsPaginatedError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `get_inbox_sent_emails`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GetInboxSentEmailsError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `get_inbox_tags`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GetInboxTagsError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `get_inboxes`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GetInboxesError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `get_organization_inboxes`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GetOrganizationInboxesError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `list_inbox_rulesets`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum ListInboxRulesetsError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `list_inbox_tracking_pixels`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum ListInboxTrackingPixelsError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `send_email`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum SendEmailError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `send_email_and_confirm`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum SendEmailAndConfirmError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `send_test_email`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum SendTestEmailError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `set_inbox_favourited`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum SetInboxFavouritedError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `update_inbox`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum UpdateInboxError {
Status401(),
Status403(),
UnknownValue(serde_json::Value),
}
/// Create a new inbox and with a randomized email address to send and receive from. Pass emailAddress parameter if you wish to use a specific email address. Creating an inbox is required before sending or receiving emails. If writing tests it is recommended that you create a new inbox during each test method so that it is unique and empty.
pub async fn create_inbox(configuration: &configuration::Configuration, params: CreateInboxParams) -> Result<crate::models::Inbox, Error<CreateInboxError>> {
// unbox the parameters
let allow_team_access = params.allow_team_access;
let description = params.description;
let email_address = params.email_address;
let expires_at = params.expires_at;
let expires_in = params.expires_in;
let favourite = params.favourite;
let inbox_type = params.inbox_type;
let name = params.name;
let tags = params.tags;
let use_domain_pool = params.use_domain_pool;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_str) = allow_team_access {
local_var_req_builder = local_var_req_builder.query(&[("allowTeamAccess", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = description {
local_var_req_builder = local_var_req_builder.query(&[("description", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = email_address {
local_var_req_builder = local_var_req_builder.query(&[("emailAddress", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = expires_at {
local_var_req_builder = local_var_req_builder.query(&[("expiresAt", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = expires_in {
local_var_req_builder = local_var_req_builder.query(&[("expiresIn", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = favourite {
local_var_req_builder = local_var_req_builder.query(&[("favourite", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = inbox_type {
local_var_req_builder = local_var_req_builder.query(&[("inboxType", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = name {
local_var_req_builder = local_var_req_builder.query(&[("name", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = tags {
local_var_req_builder = local_var_req_builder.query(&[("tags", &local_var_str.into_iter().map(|p| p.to_string()).collect::<Vec<String>>().join(",").to_string())]);
}
if let Some(ref local_var_str) = use_domain_pool {
local_var_req_builder = local_var_req_builder.query(&[("useDomainPool", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<CreateInboxError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Create a new inbox rule for forwarding, blocking, and allowing emails when sending and receiving
pub async fn create_inbox_ruleset(configuration: &configuration::Configuration, params: CreateInboxRulesetParams) -> Result<crate::models::InboxRulesetDto, Error<CreateInboxRulesetError>> {
// unbox the parameters
let inbox_id = params.inbox_id;
let create_inbox_ruleset_options = params.create_inbox_ruleset_options;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/{inboxId}/rulesets", configuration.base_path, inboxId=inbox_id);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&create_inbox_ruleset_options);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<CreateInboxRulesetError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
pub async fn create_inbox_with_defaults(configuration: &configuration::Configuration) -> Result<crate::models::Inbox, Error<CreateInboxWithDefaultsError>> {
// unbox the parameters
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/withDefaults", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<CreateInboxWithDefaultsError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Additional endpoint that allows inbox creation with request body options. Can be more flexible that other methods for some clients.
pub async fn create_inbox_with_options(configuration: &configuration::Configuration, params: CreateInboxWithOptionsParams) -> Result<crate::models::Inbox, Error<CreateInboxWithOptionsError>> {
// unbox the parameters
let create_inbox_dto = params.create_inbox_dto;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/withOptions", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&create_inbox_dto);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<CreateInboxWithOptionsError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Permanently delete all inboxes and associated email addresses. This will also delete all emails within the inboxes. Be careful as inboxes cannot be recovered once deleted. Note: deleting inboxes will not impact your usage limits. Monthly inbox creation limits are based on how many inboxes were created in the last 30 days, not how many inboxes you currently have.
pub async fn delete_all_inboxes(configuration: &configuration::Configuration) -> Result<(), Error<DeleteAllInboxesError>> {
// unbox the parameters
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes", configuration.base_path);
let mut local_var_req_builder = local_var_client.delete(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
Ok(())
} else {
let local_var_entity: Option<DeleteAllInboxesError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Permanently delete an inbox and associated email address as well as all emails within the given inbox. This action cannot be undone. Note: deleting an inbox will not affect your account usage. Monthly inbox usage is based on how many inboxes you create within 30 days, not how many exist at time of request.
pub async fn delete_inbox(configuration: &configuration::Configuration, params: DeleteInboxParams) -> Result<(), Error<DeleteInboxError>> {
// unbox the parameters
let inbox_id = params.inbox_id;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/{inboxId}", configuration.base_path, inboxId=inbox_id);
let mut local_var_req_builder = local_var_client.delete(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
Ok(())
} else {
let local_var_entity: Option<DeleteInboxError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// List inboxes in paginated form. The results are available on the `content` property of the returned object. This method allows for page index (zero based), page size (how many results to return), and a sort direction (based on createdAt time). You Can also filter by whether an inbox is favorited or use email address pattern. This method is the recommended way to query inboxes. The alternative `getInboxes` method returns a full list of inboxes but is limited to 100 results.
pub async fn get_all_inboxes(configuration: &configuration::Configuration, params: GetAllInboxesParams) -> Result<crate::models::PageInboxProjection, Error<GetAllInboxesError>> {
// unbox the parameters
let before = params.before;
let favourite = params.favourite;
let page = params.page;
let search = params.search;
let since = params.since;
let size = params.size;
let sort = params.sort;
let tag = params.tag;
let team_access = params.team_access;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/paginated", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = before {
local_var_req_builder = local_var_req_builder.query(&[("before", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = favourite {
local_var_req_builder = local_var_req_builder.query(&[("favourite", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = page {
local_var_req_builder = local_var_req_builder.query(&[("page", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = search {
local_var_req_builder = local_var_req_builder.query(&[("search", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = since {
local_var_req_builder = local_var_req_builder.query(&[("since", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = size {
local_var_req_builder = local_var_req_builder.query(&[("size", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort {
local_var_req_builder = local_var_req_builder.query(&[("sort", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = tag {
local_var_req_builder = local_var_req_builder.query(&[("tag", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = team_access {
local_var_req_builder = local_var_req_builder.query(&[("teamAccess", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<GetAllInboxesError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// List emails that an inbox has received. Only emails that are sent to the inbox's email address will appear in the inbox. It may take several seconds for any email you send to an inbox's email address to appear in the inbox. To make this endpoint wait for a minimum number of emails use the `minCount` parameter. The server will retry the inbox database until the `minCount` is satisfied or the `retryTimeout` is reached
pub async fn get_emails(configuration: &configuration::Configuration, params: GetEmailsParams) -> Result<Vec<crate::models::EmailPreview>, Error<GetEmailsError>> {
// unbox the parameters
let inbox_id = params.inbox_id;
let before = params.before;
let delay_timeout = params.delay_timeout;
let limit = params.limit;
let min_count = params.min_count;
let retry_timeout = params.retry_timeout;
let since = params.since;
let size = params.size;
let sort = params.sort;
let unread_only = params.unread_only;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/{inboxId}/emails", configuration.base_path, inboxId=inbox_id);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = before {
local_var_req_builder = local_var_req_builder.query(&[("before", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = delay_timeout {
local_var_req_builder = local_var_req_builder.query(&[("delayTimeout", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = limit {
local_var_req_builder = local_var_req_builder.query(&[("limit", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = min_count {
local_var_req_builder = local_var_req_builder.query(&[("minCount", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = retry_timeout {
local_var_req_builder = local_var_req_builder.query(&[("retryTimeout", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = since {
local_var_req_builder = local_var_req_builder.query(&[("since", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = size {
local_var_req_builder = local_var_req_builder.query(&[("size", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort {
local_var_req_builder = local_var_req_builder.query(&[("sort", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = unread_only {
local_var_req_builder = local_var_req_builder.query(&[("unreadOnly", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<GetEmailsError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Returns an inbox's properties, including its email address and ID.
pub async fn get_inbox(configuration: &configuration::Configuration, params: GetInboxParams) -> Result<crate::models::Inbox, Error<GetInboxError>> {
// unbox the parameters
let inbox_id = params.inbox_id;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/{inboxId}", configuration.base_path, inboxId=inbox_id);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<GetInboxError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Get a paginated list of emails in an inbox. Does not hold connections open.
pub async fn get_inbox_emails_paginated(configuration: &configuration::Configuration, params: GetInboxEmailsPaginatedParams) -> Result<crate::models::PageEmailPreview, Error<GetInboxEmailsPaginatedError>> {
// unbox the parameters
let inbox_id = params.inbox_id;
let before = params.before;
let page = params.page;
let since = params.since;
let size = params.size;
let sort = params.sort;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/{inboxId}/emails/paginated", configuration.base_path, inboxId=inbox_id);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = before {
local_var_req_builder = local_var_req_builder.query(&[("before", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = page {
local_var_req_builder = local_var_req_builder.query(&[("page", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = since {
local_var_req_builder = local_var_req_builder.query(&[("since", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = size {
local_var_req_builder = local_var_req_builder.query(&[("size", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort {
local_var_req_builder = local_var_req_builder.query(&[("sort", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<GetInboxEmailsPaginatedError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Returns an inbox's sent email receipts. Call individual sent email endpoints for more details. Note for privacy reasons the full body of sent emails is never stored. An MD5 hash hex is available for comparison instead.
pub async fn get_inbox_sent_emails(configuration: &configuration::Configuration, params: GetInboxSentEmailsParams) -> Result<crate::models::PageSentEmailProjection, Error<GetInboxSentEmailsError>> {
// unbox the parameters
let inbox_id = params.inbox_id;
let before = params.before;
let page = params.page;
let search_filter = params.search_filter;
let since = params.since;
let size = params.size;
let sort = params.sort;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/{inboxId}/sent", configuration.base_path, inboxId=inbox_id);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = before {
local_var_req_builder = local_var_req_builder.query(&[("before", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = page {
local_var_req_builder = local_var_req_builder.query(&[("page", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = search_filter {
local_var_req_builder = local_var_req_builder.query(&[("searchFilter", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = since {
local_var_req_builder = local_var_req_builder.query(&[("since", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = size {
local_var_req_builder = local_var_req_builder.query(&[("size", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort {
local_var_req_builder = local_var_req_builder.query(&[("sort", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<GetInboxSentEmailsError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Get all inbox tags
pub async fn get_inbox_tags(configuration: &configuration::Configuration) -> Result<Vec<String>, Error<GetInboxTagsError>> {
// unbox the parameters
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/tags", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<GetInboxTagsError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// List the inboxes you have created. Note use of the more advanced `getAllEmails` is recommended and allows paginated access using a limit and sort parameter.
pub async fn get_inboxes(configuration: &configuration::Configuration, params: GetInboxesParams) -> Result<Vec<crate::models::Inbox>, Error<GetInboxesError>> {
// unbox the parameters
let before = params.before;
let since = params.since;
let size = params.size;
let sort = params.sort;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = before {
local_var_req_builder = local_var_req_builder.query(&[("before", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = since {
local_var_req_builder = local_var_req_builder.query(&[("since", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = size {
local_var_req_builder = local_var_req_builder.query(&[("size", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort {
local_var_req_builder = local_var_req_builder.query(&[("sort", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<GetInboxesError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// List organization inboxes in paginated form. These are inboxes created with `allowTeamAccess` flag enabled. Organization inboxes are `readOnly` for non-admin users. The results are available on the `content` property of the returned object. This method allows for page index (zero based), page size (how many results to return), and a sort direction (based on createdAt time).
pub async fn get_organization_inboxes(configuration: &configuration::Configuration, params: GetOrganizationInboxesParams) -> Result<crate::models::PageOrganizationInboxProjection, Error<GetOrganizationInboxesError>> {
// unbox the parameters
let before = params.before;
let page = params.page;
let search_filter = params.search_filter;
let since = params.since;
let size = params.size;
let sort = params.sort;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/organization", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = before {
local_var_req_builder = local_var_req_builder.query(&[("before", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = page {
local_var_req_builder = local_var_req_builder.query(&[("page", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = search_filter {
local_var_req_builder = local_var_req_builder.query(&[("searchFilter", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = since {
local_var_req_builder = local_var_req_builder.query(&[("since", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = size {
local_var_req_builder = local_var_req_builder.query(&[("size", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort {
local_var_req_builder = local_var_req_builder.query(&[("sort", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<GetOrganizationInboxesError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// List all rulesets attached to an inbox
pub async fn list_inbox_rulesets(configuration: &configuration::Configuration, params: ListInboxRulesetsParams) -> Result<crate::models::PageInboxRulesetDto, Error<ListInboxRulesetsError>> {
// unbox the parameters
let inbox_id = params.inbox_id;
let before = params.before;
let page = params.page;
let search_filter = params.search_filter;
let since = params.since;
let size = params.size;
let sort = params.sort;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/{inboxId}/rulesets", configuration.base_path, inboxId=inbox_id);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = before {
local_var_req_builder = local_var_req_builder.query(&[("before", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = page {
local_var_req_builder = local_var_req_builder.query(&[("page", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = search_filter {
local_var_req_builder = local_var_req_builder.query(&[("searchFilter", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = since {
local_var_req_builder = local_var_req_builder.query(&[("since", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = size {
local_var_req_builder = local_var_req_builder.query(&[("size", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort {
local_var_req_builder = local_var_req_builder.query(&[("sort", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<ListInboxRulesetsError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// List all tracking pixels sent from an inbox
pub async fn list_inbox_tracking_pixels(configuration: &configuration::Configuration, params: ListInboxTrackingPixelsParams) -> Result<crate::models::PageTrackingPixelProjection, Error<ListInboxTrackingPixelsError>> {
// unbox the parameters
let inbox_id = params.inbox_id;
let before = params.before;
let page = params.page;
let search_filter = params.search_filter;
let since = params.since;
let size = params.size;
let sort = params.sort;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/{inboxId}/tracking-pixels", configuration.base_path, inboxId=inbox_id);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = before {
local_var_req_builder = local_var_req_builder.query(&[("before", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = page {
local_var_req_builder = local_var_req_builder.query(&[("page", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = search_filter {
local_var_req_builder = local_var_req_builder.query(&[("searchFilter", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = since {
local_var_req_builder = local_var_req_builder.query(&[("since", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = size {
local_var_req_builder = local_var_req_builder.query(&[("size", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort {
local_var_req_builder = local_var_req_builder.query(&[("sort", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<ListInboxTrackingPixelsError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Send an email from an inbox's email address. The request body should contain the `SendEmailOptions` that include recipients, attachments, body etc. See `SendEmailOptions` for all available properties. Note the `inboxId` refers to the inbox's id not the inbox's email address. See https://www.mailslurp.com/guides/ for more information on how to send emails. This method does not return a sent email entity due to legacy reasons. To send and get a sent email as returned response use the sister method `sendEmailAndConfirm`.
pub async fn send_email(configuration: &configuration::Configuration, params: SendEmailParams) -> Result<(), Error<SendEmailError>> {
// unbox the parameters
let inbox_id = params.inbox_id;
let send_email_options = params.send_email_options;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/{inboxId}", configuration.base_path, inboxId=inbox_id);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&send_email_options);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
Ok(())
} else {
let local_var_entity: Option<SendEmailError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Sister method for standard `sendEmail` method with the benefit of returning a `SentEmail` entity confirming the successful sending of the email with a link to the sent object created for it.
pub async fn send_email_and_confirm(configuration: &configuration::Configuration, params: SendEmailAndConfirmParams) -> Result<crate::models::SentEmailDto, Error<SendEmailAndConfirmError>> {
// unbox the parameters
let inbox_id = params.inbox_id;
let send_email_options = params.send_email_options;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/{inboxId}/confirm", configuration.base_path, inboxId=inbox_id);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&send_email_options);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<SendEmailAndConfirmError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Send an inbox a test email to test email receiving is working
pub async fn send_test_email(configuration: &configuration::Configuration, params: SendTestEmailParams) -> Result<(), Error<SendTestEmailError>> {
// unbox the parameters
let inbox_id = params.inbox_id;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/{inboxId}/send-test-email", configuration.base_path, inboxId=inbox_id);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
Ok(())
} else {
let local_var_entity: Option<SendTestEmailError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Set and return new favourite state for an inbox
pub async fn set_inbox_favourited(configuration: &configuration::Configuration, params: SetInboxFavouritedParams) -> Result<crate::models::Inbox, Error<SetInboxFavouritedError>> {
// unbox the parameters
let inbox_id = params.inbox_id;
let set_inbox_favourited_options = params.set_inbox_favourited_options;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/{inboxId}/favourite", configuration.base_path, inboxId=inbox_id);
let mut local_var_req_builder = local_var_client.put(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&set_inbox_favourited_options);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<SetInboxFavouritedError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Update editable fields on an inbox
pub async fn update_inbox(configuration: &configuration::Configuration, params: UpdateInboxParams) -> Result<crate::models::Inbox, Error<UpdateInboxError>> {
// unbox the parameters
let inbox_id = params.inbox_id;
let update_inbox_options = params.update_inbox_options;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/inboxes/{inboxId}", configuration.base_path, inboxId=inbox_id);
let mut local_var_req_builder = local_var_client.patch(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&update_inbox_options);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<UpdateInboxError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
| 47.211703 | 878 | 0.725048 |
de0401320759420c4c1f1c82f9cc39d5f3cfd928
| 110,695 |
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)]
pub struct OrderCancelReplaceRequest {
/// MsgType = G
#[serde(flatten)]
pub standard_message_header: super::super::standard_message_header::StandardMessageHeader<'G', ' '>,
/// Unique identifier of most recent order as assigned by sell-side (broker, exchange, ECN).
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "37")]
pub order_id: Option<String>,
/// Insert here the set of "Parties" (firm identification) fields defined in "Common Components of Application Messages".
#[serde(flatten)]
pub parties: Option<super::super::parties::Parties>,
/// TradeOriginationDate
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "229")]
pub trade_origination_date: Option<fix_common::LocalMktDate>,
/// TradeDate
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "75")]
pub trade_date: Option<fix_common::LocalMktDate>,
/// ClOrdID of the previous non rejected order (NOT the initial order of the day) when canceling or replacing an order.
#[serde(rename = "41")]
pub orig_cl_ord_id: String,
/// Unique identifier of replacement order as assigned by institution or by the intermediary with closest association with the
/// investor.. Note that this identifier will be used in <a href="tag_11_ClOrdID.html" target="bottom">ClOrdID (11)</a> field of the Cancel Reject message if the replacement request is rejected.
#[serde(rename = "11")]
pub cl_ord_id: String,
/// SecondaryClOrdID
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "526")]
pub secondary_cl_ord_id: Option<String>,
/// ClOrdLinkID
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "583")]
pub cl_ord_link_id: Option<String>,
/// Required for List Orders
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "66")]
pub list_id: Option<String>,
/// TransactTime of the last state change that occurred to the original order
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "586")]
pub orig_ord_mod_time: Option<fix_common::UTCTimestamp>,
/// Account
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "1")]
pub account: Option<String>,
/// AcctIDSource
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "660")]
pub acct_id_source: Option<AcctIDSource>,
/// AccountType
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "581")]
pub account_type: Option<AccountType>,
/// DayBookingInst
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "589")]
pub day_booking_inst: Option<DayBookingInst>,
/// BookingUnit
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "590")]
pub booking_unit: Option<BookingUnit>,
/// PreallocMethod
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "591")]
pub prealloc_method: Option<PreallocMethod>,
/// Used to assign an overall allocation id to the block of preallocations
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "70")]
pub alloc_id: Option<String>,
/// Number of repeating groups for pre-trade allocation
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "78")]
pub allocs: Option<fix_common::RepeatingValues<Alloc>>,
/// SettlType
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "63")]
pub settl_type: Option<SettlType>,
/// Takes precedence over <a href="tag_63_SettlType.html" target="bottom">SettlType (63)</a> value and conditionally required/omitted for specific <a href="tag_63_SettlType.html" target="bottom">SettlType (63)</a> values.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "64")]
pub settl_date: Option<fix_common::LocalMktDate>,
/// CashMargin
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "544")]
pub cash_margin: Option<CashMargin>,
/// ClearingFeeIndicator
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "635")]
pub clearing_fee_indicator: Option<ClearingFeeIndicator>,
/// HandlInst
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "21")]
pub handl_inst: Option<HandlInst>,
/// Can contain multiple instructions, space delimited. Replacement order must be created with new parameters (i.e. original order
/// values will not be brought forward to replacement order unless redefined within this message).
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "18")]
pub exec_inst: Option<fix_common::SeparatedValues<ExecInst>>,
/// MinQty
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "110")]
pub min_qty: Option<f64>,
/// MatchIncrement
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "1089")]
pub match_increment: Option<f64>,
/// MaxPriceLevels
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "1090")]
pub max_price_levels: Option<i32>,
/// Insert here the set of "DisplayInstruction" fields defined in "common components of application messages".
#[serde(flatten)]
pub display_instruction: Option<super::super::display_instruction::DisplayInstruction>,
/// (Deprecated in FIX.5.0)
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "111")]
pub max_floor: Option<f64>,
/// ExDestination
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "100")]
pub ex_destination: Option<String>,
/// ExDestinationIDSource
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "1133")]
pub ex_destination_id_source: Option<ExDestinationIDSource>,
/// Specifies the number of repeating TradingSessionIDs
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "386")]
pub trading_sessions: Option<fix_common::RepeatingValues<TradingSession>>,
/// Insert here the set of "Instrument" fields defined in "Common Components of Application Messages". Must match original order
#[serde(flatten)]
pub instrument: super::super::instrument::Instrument,
/// Insert here the set of "FinancingDetails" fields defined in "Common Components of Application Messages". Must match original
/// order
#[serde(flatten)]
pub financing_details: Option<super::super::financing_details::FinancingDetails>,
/// Number of underlyings
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "711")]
pub underlyings: Option<fix_common::RepeatingValues<super::super::underlying_instrument::UnderlyingInstrument>>,
/// Should match original order's side, however, if bilaterally agreed to the following groups could potentially be interchanged:
/// Buy and Buy Minus Sell, Sell Plus, Sell Short, and Sell Short Exempt Cross, Cross Short, and Cross Short Exempt
#[serde(rename = "54")]
pub side: Side,
/// Time this order request was initiated/released by the trader or trading system.
#[serde(rename = "60")]
pub transact_time: fix_common::UTCTimestamp,
/// QtyType
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "854")]
pub qty_type: Option<QtyType>,
/// Insert here the set of "OrderQtyData" fields defined in "Common Components of Application Messages". Note: <a href="tag_38_OrderQty.html" target="bottom">OrderQty (38)</a> value should be the "Total Intended Order Quantity" (including the amount already executed for this chain of orders)
#[serde(flatten)]
pub order_qty_data: super::super::order_qty_data::OrderQtyData,
/// OrdType
#[serde(rename = "40")]
pub ord_type: OrdType,
/// PriceType
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "423")]
pub price_type: Option<PriceType>,
/// Required for limit OrdTypes. For F/X orders, should be the "all-in" rate (spot rate adjusted for forward points). Can be used
/// to specify a limit price for a pegged order, previously indicated, etc.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "44")]
pub price: Option<f64>,
/// PriceProtectionScope
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "1092")]
pub price_protection_scope: Option<PriceProtectionScope>,
/// Required for <a href="tag_40_OrdType.html" target="bottom">OrdType (40)</a> = "Stop" or <a href="tag_40_OrdType.html" target="bottom">OrdType (40)</a> = "Stop limit".
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "99")]
pub stop_px: Option<f64>,
/// Insert here the set of "TriggeringInstruction" fields defined in "common components of application messages".
#[serde(flatten)]
pub triggering_instruction: Option<super::super::triggering_instruction::TriggeringInstruction>,
/// Insert here the set of "SpreadOrBenchmarkCurveData" (Fixed Income spread or benchmark curve) fields defined in "Common Components
/// of Application Messages".
#[serde(flatten)]
pub spread_or_benchmark_curve_data: Option<super::super::spread_or_benchmark_curve_data::SpreadOrBenchmarkCurveData>,
/// Insert here the set of "YieldData" (yield-related) fields defined in "Common Components of Application Messages".
#[serde(flatten)]
pub yield_data: Option<super::super::yield_data::YieldData>,
/// Insert here the set of "PegInstruction" fields defined in "Common Components of Application Messages".
#[serde(flatten)]
pub peg_instructions: Option<super::super::peg_instructions::PegInstructions>,
/// Insert here the set of "DiscretionInstruction" fields defined in "Common Components of Application Messages".
#[serde(flatten)]
pub discretion_instructions: Option<super::super::discretion_instructions::DiscretionInstructions>,
/// The target strategy of the order
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "847")]
pub target_strategy: Option<TargetStrategy>,
/// Indicates number of strategy parameters
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "957")]
pub strategy_parameters: Option<fix_common::RepeatingValues<StrategyParameter>>,
/// (Deprecated in FIX.5.0)For further specification of the TargetStrategy
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "848")]
pub target_strategy_parameters: Option<String>,
/// (Deprecated in FIX.5.0)Mandatory for a TargetStrategy=Participate order and specifies the target particpation rate. For other
/// order types optionally specifies a volume limit (i.e. do not be more than this percent of the market volume)
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "849")]
pub participation_rate: Option<f32>,
/// ComplianceID
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "376")]
pub compliance_id: Option<String>,
/// SolicitedFlag
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "377")]
pub solicited_flag: Option<SolicitedFlag>,
/// Must match original order.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "15")]
pub currency: Option<Currency>,
/// Absence of this field indicates Day order
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "59")]
pub time_in_force: Option<TimeInForce>,
/// Can specify the time at which the order should be considered valid
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "168")]
pub effective_time: Option<fix_common::UTCTimestamp>,
/// Conditionally required if <a href="tag_59_TimeInForce.html" target="bottom">TimeInForce (59)</a> = GTD and <a href="tag_126_ExpireTime.html" target="bottom">ExpireTime (126)</a> is not specified.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "432")]
pub expire_date: Option<fix_common::LocalMktDate>,
/// Conditionally required if <a href="tag_59_TimeInForce.html" target="bottom">TimeInForce (59)</a> = GTD and <a href="tag_432_ExpireDate.html" target="bottom">ExpireDate (432)</a> is not specified.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "126")]
pub expire_time: Option<fix_common::UTCTimestamp>,
/// States whether executions are booked out or accumulated on a partially filled GT order
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "427")]
pub gt_booking_inst: Option<GTBookingInst>,
/// Insert here the set of "CommissionData" fields defined in "Common Components of Application Messages".
#[serde(flatten)]
pub commission_data: Option<super::super::commission_data::CommissionData>,
/// OrderCapacity
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "528")]
pub order_capacity: Option<OrderCapacity>,
/// OrderRestrictions
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "529")]
pub order_restrictions: Option<fix_common::SeparatedValues<OrderRestrictions>>,
/// PreTradeAnonymity
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "1091")]
pub pre_trade_anonymity: Option<fix_common::Boolean>,
/// CustOrderCapacity
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "582")]
pub cust_order_capacity: Option<CustOrderCapacity>,
/// Indicates that broker is requested to execute a Forex accommodation trade in conjunction with the security trade.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "121")]
pub forex_req: Option<ForexReq>,
/// Required if <a href="tag_121_ForexReq.html" target="bottom">ForexReq (121)</a> = Y.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "120")]
pub settl_currency: Option<SettlCurrency>,
/// Method for booking out this order. Used when notifying a broker that an order to be settled by that broker is to be booked
/// out as an OTC derivative (e.g. CFD or similar). Absence of this field implies regular booking.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "775")]
pub booking_type: Option<BookingType>,
/// Text
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "58")]
pub text: Option<String>,
/// Must be set if <a href="tag_355_EncodedText.html" target="bottom">EncodedText (355)</a> field is specified and must immediately precede it.
#[serde(rename = "354")]
/// Encoded (non-ASCII characters) representation of the <a href="tag_58_Text.html" target="bottom">Text (58)</a> field in the encoded format specified via the <a href="tag_347_MessageEncoding.html" target="bottom">MessageEncoding (347)</a> field.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(alias = "355")]
pub encoded_text: Option<fix_common::EncodedText<355>>,
/// (Deprecated in FIX.5.0)Can be used with <a href="tag_40_OrdType.html" target="bottom">OrdType (40)</a> = "Forex - Swap" to specify the "value date" for the future portion of a F/X swap.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "193")]
pub settl_date_2: Option<fix_common::LocalMktDate>,
/// (Deprecated in FIX.5.0)Can be used with <a href="tag_40_OrdType.html" target="bottom">OrdType (40)</a> = "Forex - Swap" to specify the order quantity for the future portion of a F/X swap.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "192")]
pub order_qty_2: Option<f64>,
/// (Deprecated in FIX.5.0)Can be used with <a href="tag_40_OrdType.html" target="bottom">OrdType (40)</a> = "Forex - Swap" to specify the price for the future portion of a F/X swap.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "640")]
pub price_2: Option<f64>,
/// For use in derivatives omnibus accounting
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "77")]
pub position_effect: Option<PositionEffect>,
/// For use with derivatives, such as options
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "203")]
pub covered_or_uncovered: Option<CoveredOrUncovered>,
/// (Deprecated in FIX.5.0)
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "210")]
pub max_show: Option<f64>,
/// Required for short sell orders
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "114")]
pub locate_reqd: Option<LocateReqd>,
/// For CIV - Optional
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "480")]
pub cancellation_rights: Option<CancellationRights>,
/// MoneyLaunderingStatus
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "481")]
pub money_laundering_status: Option<MoneyLaunderingStatus>,
/// Reference to <a href="message_Registration_Instructions_o.html" target="main">Registration Instructions (o)</a> message for this Order.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "513")]
pub regist_id: Option<String>,
/// Supplementary registration information for this Order
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "494")]
pub designation: Option<String>,
/// ManualOrderIndicator
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "1028")]
pub manual_order_indicator: Option<fix_common::Boolean>,
/// CustDirectedOrder
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "1029")]
pub cust_directed_order: Option<fix_common::Boolean>,
/// ReceivedDeptID
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "1030")]
pub received_dept_id: Option<String>,
/// CustOrderHandlingInst
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "1031")]
pub cust_order_handling_inst: Option<fix_common::SeparatedValues<CustOrderHandlingInst>>,
/// OrderHandlingInstSource
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "1032")]
pub order_handling_inst_source: Option<OrderHandlingInstSource>,
/// TrdRegTimestamps
#[serde(flatten)]
pub trd_reg_timestamps: Option<super::super::trd_reg_timestamps::TrdRegTimestamps>,
/// Standard Message Trailer
#[serde(flatten)]
pub standard_message_trailer: super::super::standard_message_trailer::StandardMessageTrailer,
}
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)]
pub struct Alloc {
/// Required if <a href="tag_78_NoAllocs.html" target="bottom">NoAllocs (78)</a> > 0. Must be first field in repeating group.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "79")]
pub alloc_account: Option<String>,
/// AllocAcctIDSource
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "661")]
pub alloc_acct_id_source: Option<AllocAcctIDSource>,
/// AllocSettlCurrency
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "736")]
pub alloc_settl_currency: Option<AllocSettlCurrency>,
/// IndividualAllocID
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "467")]
pub individual_alloc_id: Option<String>,
/// AllocQty
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(deserialize_with = "fix_common::workarounds::from_opt_str")]// https://github.com/serde-rs/serde/issues/1183
#[serde(default)]
#[serde(rename = "80")]
pub alloc_qty: Option<f64>,
}
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)]
pub struct TradingSession {
/// Required if <a href="tag_386_NoTradingSessions.html" target="bottom">NoTradingSessions (386)</a> is > 0.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "336")]
pub trading_session_id: Option<String>,
/// TradingSessionSubID
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "625")]
pub trading_session_sub_id: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)]
pub struct StrategyParameter {
/// Name of parameter
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "958")]
pub strategy_parameter_name: Option<String>,
/// Datatype of the parameter.
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "959")]
pub strategy_parameter_type: Option<StrategyParameterType>,
/// Value of the parameter
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "960")]
pub strategy_parameter_value: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum AcctIDSource {
/// BIC
#[serde(rename = "1")]
Bic,
/// SID code
#[serde(rename = "2")]
SidCode,
/// TFM (GSPTA)
#[serde(rename = "3")]
Tfm,
/// OMGEO (AlertID)
#[serde(rename = "4")]
Omgeo,
/// DTCC code
#[serde(rename = "5")]
DtccCode,
/// Other (custom or proprietary)
#[serde(rename = "99")]
Other,
}
impl Default for AcctIDSource {
fn default() -> Self {
AcctIDSource::Bic
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum AccountType {
/// Account is carried on customer Side of Books
#[serde(rename = "1")]
AccountIsCarriedOnCustomerSideOfBooks,
/// Account is carried on non-Customer Side of books
#[serde(rename = "2")]
AccountIsCarriedOnNonCustomerSideOfBooks,
/// House Trader
#[serde(rename = "3")]
HouseTrader,
/// Floor Trader
#[serde(rename = "4")]
FloorTrader,
/// Account is carried on non-customer side of books and is cross margined
#[serde(rename = "6")]
AccountIsCarriedOnNonCustomerSideOfBooksAndIsCrossMargined,
/// Account is house trader and is cross margined
#[serde(rename = "7")]
AccountIsHouseTraderAndIsCrossMargined,
/// Joint Backoffice Account (JBO)
#[serde(rename = "8")]
JointBackofficeAccount,
}
impl Default for AccountType {
fn default() -> Self {
AccountType::AccountIsCarriedOnCustomerSideOfBooks
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum DayBookingInst {
/// Can trigger booking without reference to the order initiator ("auto")
#[serde(rename = "0")]
CanTriggerBookingWithoutReferenceToTheOrderInitiator,
/// Speak with order initiator before booking ("speak first")
#[serde(rename = "1")]
SpeakWithOrderInitiatorBeforeBooking,
/// Accumulate
#[serde(rename = "2")]
Accumulate,
}
impl Default for DayBookingInst {
fn default() -> Self {
DayBookingInst::CanTriggerBookingWithoutReferenceToTheOrderInitiator
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum BookingUnit {
/// Each partial execution is a bookable unit
#[serde(rename = "0")]
EachPartialExecutionIsABookableUnit,
/// Aggregate partial executions on this order, and book one trade per order
#[serde(rename = "1")]
AggregatePartialExecutionsOnThisOrderAndBookOneTradePerOrder,
/// Aggregate executions for this symbol, side, and settlement date
#[serde(rename = "2")]
AggregateExecutionsForThisSymbolSideAndSettlementDate,
}
impl Default for BookingUnit {
fn default() -> Self {
BookingUnit::EachPartialExecutionIsABookableUnit
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum PreallocMethod {
/// Pro-rata
#[serde(rename = "0")]
ProRata,
/// Do not pro-rata - discuss first
#[serde(rename = "1")]
DoNotProRataDiscussFirst,
}
impl Default for PreallocMethod {
fn default() -> Self {
PreallocMethod::ProRata
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum SettlType {
/// Regular / FX Spot settlement (T+1 or T+2 depending on currency)
#[serde(rename = "0")]
RegularFxSpotSettlement,
/// Cash (TOD / T+0)
#[serde(rename = "1")]
Cash,
/// Next Day (TOM / T+1)
#[serde(rename = "2")]
NextDay,
/// T+2
#[serde(rename = "3")]
T2,
/// T+3
#[serde(rename = "4")]
T3,
/// T+4
#[serde(rename = "5")]
T4,
/// Future
#[serde(rename = "6")]
Future,
/// When And If Issued
#[serde(rename = "7")]
WhenAndIfIssued,
/// Sellers Option
#[serde(rename = "8")]
SellersOption,
/// T+5
#[serde(rename = "9")]
T5,
/// Broken date - for FX expressing non-standard tenor, <a href="tag_64_SettlDate.html" target="bottom">SettlDate (64)</a> must be specified
#[serde(rename = "B")]
BrokenDateForFxExpressingNonStandardTenorSettlDateMustBeSpecified,
/// FX Spot Next settlement (Spot+1, aka next day)
#[serde(rename = "C")]
FxSpotNextSettlement,
}
impl Default for SettlType {
fn default() -> Self {
SettlType::RegularFxSpotSettlement
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum CashMargin {
/// Cash
#[serde(rename = "1")]
Cash,
/// Margin Open
#[serde(rename = "2")]
MarginOpen,
/// Margin Close
#[serde(rename = "3")]
MarginClose,
}
impl Default for CashMargin {
fn default() -> Self {
CashMargin::Cash
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum ClearingFeeIndicator {
/// 1st year delegate trading for own account
#[serde(rename = "1")]
N1StYearDelegateTradingForOwnAccount,
/// 2nd year delegate trading for own account
#[serde(rename = "2")]
N2NdYearDelegateTradingForOwnAccount,
/// 3rd year delegate trading for own account
#[serde(rename = "3")]
N3RdYearDelegateTradingForOwnAccount,
/// 4th year delegate trading for own account
#[serde(rename = "4")]
N4ThYearDelegateTradingForOwnAccount,
/// 5th year delegate trading for own account
#[serde(rename = "5")]
N5ThYearDelegateTradingForOwnAccount,
/// 6th year delegate trading for own account
#[serde(rename = "9")]
N6ThYearDelegateTradingForOwnAccount,
/// CBOE Member
#[serde(rename = "B")]
CboeMember,
/// Non-member and Customer
#[serde(rename = "C")]
NonMemberAndCustomer,
/// Equity Member and Clearing Member
#[serde(rename = "E")]
EquityMemberAndClearingMember,
/// Full and Associate Member trading for own account and as floor brokers
#[serde(rename = "F")]
FullAndAssociateMemberTradingForOwnAccountAndAsFloorBrokers,
/// 106.H and 106.J firms
#[serde(rename = "H")]
N106HAnd106JFirms,
/// GIM, IDEM and COM Membership Interest Holders
#[serde(rename = "I")]
GimIdemAndComMembershipInterestHolders,
/// Lessee 106.F Employees
#[serde(rename = "L")]
Lessee106FEmployees,
/// All other ownership types
#[serde(rename = "M")]
AllOtherOwnershipTypes,
}
impl Default for ClearingFeeIndicator {
fn default() -> Self {
ClearingFeeIndicator::N1StYearDelegateTradingForOwnAccount
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum HandlInst {
/// Automated execution order, private, no Broker intervention
#[serde(rename = "1")]
AutomatedExecutionOrderPrivateNoBrokerIntervention,
/// Automated execution order, public, Broker intervention OK
#[serde(rename = "2")]
AutomatedExecutionOrderPublicBrokerInterventionOk,
/// Manual order, best execution
#[serde(rename = "3")]
ManualOrderBestExecution,
}
impl Default for HandlInst {
fn default() -> Self {
HandlInst::AutomatedExecutionOrderPrivateNoBrokerIntervention
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum ExecInst {
/// Stay on offerside
#[serde(rename = "0")]
StayOnOfferside,
/// Not held
#[serde(rename = "1")]
NotHeld,
/// Work
#[serde(rename = "2")]
Work,
/// Go along
#[serde(rename = "3")]
GoAlong,
/// Over the day
#[serde(rename = "4")]
OverTheDay,
/// Held
#[serde(rename = "5")]
Held,
/// Participate don't initiate
#[serde(rename = "6")]
ParticipateDonTInitiate,
/// Strict scale
#[serde(rename = "7")]
StrictScale,
/// Try to scale
#[serde(rename = "8")]
TryToScale,
/// Stay on bidside
#[serde(rename = "9")]
StayOnBidside,
/// No cross (cross is forbidden)
#[serde(rename = "A")]
NoCross,
/// OK to cross
#[serde(rename = "B")]
OkToCross,
/// Call first
#[serde(rename = "C")]
CallFirst,
/// Percent of volume (indicates that the sender does not want to be all of the volume on the floor vs. a specific percentage)
#[serde(rename = "D")]
PercentOfVolume,
/// Do not increase - DNI
#[serde(rename = "E")]
DoNotIncreaseDni,
/// Do not reduce - DNR
#[serde(rename = "F")]
DoNotReduceDnr,
/// All or none - AON
#[serde(rename = "G")]
AllOrNoneAon,
/// Reinstate on System Failure (mutually exclusive with Q)
#[serde(rename = "H")]
ReinstateOnSystemFailure,
/// Institutions only
#[serde(rename = "I")]
InstitutionsOnly,
/// Reinstate on Trading Halt (mutually exclusive with K)
#[serde(rename = "J")]
ReinstateOnTradingHalt,
/// Cancel on Trading Halt (mutually exclusive with J)
#[serde(rename = "K")]
CancelOnTradingHalt,
/// Last peg (last sale)
#[serde(rename = "L")]
LastPeg,
/// Mid-price peg (midprice of inside quote)
#[serde(rename = "M")]
MidPricePeg,
/// Non-negotiable
#[serde(rename = "N")]
NonNegotiable,
/// Opening peg
#[serde(rename = "O")]
OpeningPeg,
/// Market peg
#[serde(rename = "P")]
MarketPeg,
/// Cancel on System Failure (mutually exclusive with H)
#[serde(rename = "Q")]
CancelOnSystemFailure,
/// Primary peg (primary market - buy at bid/sell at offer)
#[serde(rename = "R")]
PrimaryPeg,
/// Suspend
#[serde(rename = "S")]
Suspend,
/// Fixed Peg to Local best bid or offer at time of order
#[serde(rename = "T")]
FixedPegToLocalBestBidOrOfferAtTimeOfOrder,
/// Customer Display Instruction (Rule11Ac1-1/4)
#[serde(rename = "U")]
CustomerDisplayInstruction,
/// Netting (for Forex)
#[serde(rename = "V")]
Netting,
/// Peg to VWAP
#[serde(rename = "W")]
PegToVwap,
/// Trade Along
#[serde(rename = "X")]
TradeAlong,
/// Try to Stop
#[serde(rename = "Y")]
TryToStop,
/// Cancel if Not Best
#[serde(rename = "Z")]
CancelIfNotBest,
/// Trailing Stop Peg
#[serde(rename = "a")]
TrailingStopPeg,
/// Strict Limit (No Price Improvement)
#[serde(rename = "b")]
StrictLimit,
/// Ignore Price Validity Checks
#[serde(rename = "c")]
IgnorePriceValidityChecks,
/// Peg to Limit Price
#[serde(rename = "d")]
PegToLimitPrice,
/// Work to Target Strategy
#[serde(rename = "e")]
WorkToTargetStrategy,
/// Intermarket Sweep
#[serde(rename = "f")]
IntermarketSweep,
/// External Routing Allowed
#[serde(rename = "g")]
ExternalRoutingAllowed,
/// External Routing Not Allowed
#[serde(rename = "h")]
ExternalRoutingNotAllowed,
/// Imbalance Only
#[serde(rename = "i")]
ImbalanceOnly,
/// Single execution requested for block trade
#[serde(rename = "j")]
SingleExecutionRequestedForBlockTrade,
/// Best Execution
#[serde(rename = "k")]
BestExecution,
}
impl Default for ExecInst {
fn default() -> Self {
ExecInst::StayOnOfferside
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum ExDestinationIDSource {
/// BIC (Bank Identification Code) (ISO 9362)
#[serde(rename = "B")]
Bic,
/// Generally accepted market participant identifier (e.g. NASD mnemonic)
#[serde(rename = "C")]
GenerallyAcceptedMarketParticipantIdentifier,
/// Proprietary / Custom code
#[serde(rename = "D")]
ProprietaryCustomCode,
/// ISO Country Code
#[serde(rename = "E")]
IsoCountryCode,
/// MIC (ISO 10383 - Market Identifier Code)
#[serde(rename = "G")]
Mic,
}
impl Default for ExDestinationIDSource {
fn default() -> Self {
ExDestinationIDSource::Bic
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum Side {
/// Buy
#[serde(rename = "1")]
Buy,
/// Sell
#[serde(rename = "2")]
Sell,
/// Buy minus
#[serde(rename = "3")]
BuyMinus,
/// Sell plus
#[serde(rename = "4")]
SellPlus,
/// Sell short
#[serde(rename = "5")]
SellShort,
/// Sell short exempt
#[serde(rename = "6")]
SellShortExempt,
/// Undisclosed (valid for IOI and List Order messages only)
#[serde(rename = "7")]
Undisclosed,
/// Cross (orders where counterparty is an exchange, valid for all messages except IOIs)
#[serde(rename = "8")]
Cross,
/// Cross short
#[serde(rename = "9")]
CrossShort,
/// Cross short exempt
#[serde(rename = "A")]
CrossShortExempt,
/// "As Defined" (for use with multileg instruments)
#[serde(rename = "B")]
AsDefined,
/// "Opposite" (for use with multileg instruments)
#[serde(rename = "C")]
Opposite,
/// Subscribe (e.g. CIV)
#[serde(rename = "D")]
Subscribe,
/// Redeem (e.g. CIV)
#[serde(rename = "E")]
Redeem,
/// Lend (FINANCING - identifies direction of collateral)
#[serde(rename = "F")]
Lend,
/// Borrow (FINANCING - identifies direction of collateral)
#[serde(rename = "G")]
Borrow,
}
impl Default for Side {
fn default() -> Self {
Side::Buy
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum QtyType {
/// Units (shares, par, currency)
#[serde(rename = "0")]
Units,
/// Contracts (if used - must specify <a href="tag_231_ContractMultiplier.html" target="bottom">ContractMultiplier (231)</a> )
#[serde(rename = "1")]
ContractsA,
/// Units of Measure per Time Unit (if used - must specify <a href="tag_996_UnitofMeasure.html" target="bottom">UnitofMeasure (996)</a> and <a href="tag_997_TimeUnit.html" target="bottom">TimeUnit (997)</a> )
#[serde(rename = "2")]
UnitsOfMeasurePerTimeUnitAAndTimeUnit,
}
impl Default for QtyType {
fn default() -> Self {
QtyType::Units
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum OrdType {
/// Market
#[serde(rename = "1")]
Market,
/// Limit
#[serde(rename = "2")]
Limit,
/// Stop / Stop Loss
#[serde(rename = "3")]
StopStopLoss,
/// Stop Limit
#[serde(rename = "4")]
StopLimit,
/// Market On Close (No longer used)
#[serde(rename = "5")]
MarketOnClose,
/// With Or Without
#[serde(rename = "6")]
WithOrWithout,
/// Limit Or Better
#[serde(rename = "7")]
LimitOrBetter,
/// Limit With Or Without
#[serde(rename = "8")]
LimitWithOrWithout,
/// On Basis
#[serde(rename = "9")]
OnBasis,
/// On Close (No longer used)
#[serde(rename = "A")]
OnClose,
/// Limit On Close (No longer used)
#[serde(rename = "B")]
LimitOnClose,
/// Forex Market (No longer used)
#[serde(rename = "C")]
ForexMarket,
/// Previously Quoted
#[serde(rename = "D")]
PreviouslyQuoted,
/// Previously Indicated
#[serde(rename = "E")]
PreviouslyIndicated,
/// Forex Limit (No longer used)
#[serde(rename = "F")]
ForexLimit,
/// Forex Swap
#[serde(rename = "G")]
ForexSwap,
/// Forex Previously Quoted (No longer used)
#[serde(rename = "H")]
ForexPreviouslyQuoted,
/// Funari (Limit day order with unexecuted portion handles as Market On Close. E.g. Japan)
#[serde(rename = "I")]
Funari,
/// Market If Touched (MIT)
#[serde(rename = "J")]
MarketIfTouched,
/// Market With Left Over as Limit (market order with unexecuted quantity becoming limit order at last price)
#[serde(rename = "K")]
MarketWithLeftOverAsLimit,
/// Previous Fund Valuation Point (Historic pricing; for CIV)"
#[serde(rename = "L")]
PreviousFundValuationPoint,
/// Next Fund Valuation Point (Forward pricing; for CIV)"
#[serde(rename = "M")]
NextFundValuationPoint,
/// Pegged
#[serde(rename = "P")]
Pegged,
/// Counter-order selection
#[serde(rename = "Q")]
CounterOrderSelection,
}
impl Default for OrdType {
fn default() -> Self {
OrdType::Market
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum PriceType {
/// Percentage (e.g. percent of par) (often called "dollar price" for fixed income)
#[serde(rename = "1")]
Percentage,
/// Per unit (i.e. per share or contract)
#[serde(rename = "2")]
PerUnit,
/// Fixed Amount (absolute value)
#[serde(rename = "3")]
FixedAmount,
/// Discount - percentage points below par
#[serde(rename = "4")]
DiscountPercentagePointsBelowPar,
/// Premium - percentage points over par
#[serde(rename = "5")]
PremiumPercentagePointsOverPar,
/// Spread
#[serde(rename = "6")]
Spread,
/// TED price
#[serde(rename = "7")]
TedPrice,
/// TED yield
#[serde(rename = "8")]
TedYield,
/// Yield
#[serde(rename = "9")]
Yield,
/// Fixed cabinet trade price (primarily for listed futures and options)
#[serde(rename = "10")]
FixedCabinetTradePrice,
/// Variable cabinet trade price (primarily for listed futures and options)
#[serde(rename = "11")]
VariableCabinetTradePrice,
/// Product ticks in halfs
#[serde(rename = "13")]
ProductTicksInHalfs,
/// Product ticks in fourths
#[serde(rename = "14")]
ProductTicksInFourths,
/// Product ticks in eights
#[serde(rename = "15")]
ProductTicksInEights,
/// Product ticks in sixteenths
#[serde(rename = "16")]
ProductTicksInSixteenths,
/// Product ticks in thirty-seconds
#[serde(rename = "17")]
ProductTicksInThirtySeconds,
/// Product ticks in sixty-forths
#[serde(rename = "18")]
ProductTicksInSixtyForths,
/// Product ticks in one-twenty-eights
#[serde(rename = "19")]
ProductTicksInOneTwentyEights,
}
impl Default for PriceType {
fn default() -> Self {
PriceType::Percentage
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum PriceProtectionScope {
/// None
#[serde(rename = "0")]
None,
/// Local (Exchange, ECN, ATS)
#[serde(rename = "1")]
Local,
/// National (Across all national markets)
#[serde(rename = "2")]
National,
/// Global (Across all markets)
#[serde(rename = "3")]
Global,
}
impl Default for PriceProtectionScope {
fn default() -> Self {
PriceProtectionScope::None
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum TargetStrategy {
/// VWAP
#[serde(rename = "1")]
Vwap,
/// Participate (i.e. aim to be x percent of the market volume)
#[serde(rename = "2")]
Participate,
/// Mininize market impact
#[serde(rename = "3")]
MininizeMarketImpact,
}
impl Default for TargetStrategy {
fn default() -> Self {
TargetStrategy::Vwap
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum SolicitedFlag {
/// Was not solicited
#[serde(rename = "N")]
WasNotSolicited,
/// Was solicited
#[serde(rename = "Y")]
WasSolicited,
}
impl Default for SolicitedFlag {
fn default() -> Self {
SolicitedFlag::WasNotSolicited
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum Currency {
/// Afghani
#[serde(rename = "AFA")]
Afa,
/// Algerian Dinar
#[serde(rename = "DZD")]
Dzd,
/// Andorran Peseta
#[serde(rename = "ADP")]
Adp,
/// Argentine Peso
#[serde(rename = "ARS")]
Ars,
/// Armenian Dram
#[serde(rename = "AMD")]
Amd,
/// Aruban Guilder
#[serde(rename = "AWG")]
Awg,
/// Australian Dollar
#[serde(rename = "AUD")]
Aud,
/// Azerbaijanian Manat
#[serde(rename = "AZM")]
Azm,
/// Bahamian Dollar
#[serde(rename = "BSD")]
Bsd,
/// Bahraini Dinar
#[serde(rename = "BHD")]
Bhd,
/// Baht
#[serde(rename = "THB")]
Thb,
/// Balboa
#[serde(rename = "PAB")]
Pab,
/// Barbados Dollar
#[serde(rename = "BBD")]
Bbd,
/// Belarussian Ruble
#[serde(rename = "BYB")]
Byb,
/// Belgian Franc
#[serde(rename = "BEF")]
Bef,
/// Belize Dollar
#[serde(rename = "BZD")]
Bzd,
/// Bermudian Dollar
#[serde(rename = "BMD")]
Bmd,
/// Bolivar
#[serde(rename = "VEB")]
Veb,
/// Boliviano
#[serde(rename = "BOB")]
Bob,
/// Brazilian Real
#[serde(rename = "BRL")]
Brl,
/// Brunei Dollar
#[serde(rename = "BND")]
Bnd,
/// Burundi Franc
#[serde(rename = "BIF")]
Bif,
/// CFA Franc BCEAO+
#[serde(rename = "XOF")]
Xof,
/// CFA Franc BEAC#
#[serde(rename = "XAF")]
Xaf,
/// CFP Franc
#[serde(rename = "XPF")]
Xpf,
/// Canadian Dollar
#[serde(rename = "CAD")]
Cad,
/// Cape Verde Escudo
#[serde(rename = "CVE")]
Cve,
/// Cayman Islands Dollar
#[serde(rename = "KYD")]
Kyd,
/// Cedi
#[serde(rename = "GHC")]
Ghc,
/// Chilean Peso
#[serde(rename = "CLP")]
Clp,
/// Colombian Peso
#[serde(rename = "COP")]
Cop,
/// Comoro Franc
#[serde(rename = "KMF")]
Kmf,
/// Convertible Marks
#[serde(rename = "BAM")]
Bam,
/// Cordoba Oro
#[serde(rename = "NIO")]
Nio,
/// Costa Rican Colon
#[serde(rename = "CRC")]
Crc,
/// Cuban Peso
#[serde(rename = "CUP")]
Cup,
/// Cyprus Pound
#[serde(rename = "CYP")]
Cyp,
/// Czech Koruna
#[serde(rename = "CZK")]
Czk,
/// Dalasi
#[serde(rename = "GMD")]
Gmd,
/// Danish Krone
#[serde(rename = "DKK")]
Dkk,
/// Denar
#[serde(rename = "MKD")]
Mkd,
/// Deutsche Mark
#[serde(rename = "DEM")]
Dem,
/// Djibouti Franc
#[serde(rename = "DJF")]
Djf,
/// Dobra
#[serde(rename = "STD")]
Std,
/// Dominican Peso
#[serde(rename = "DOP")]
Dop,
/// Dong
#[serde(rename = "VND")]
Vnd,
/// Drachma
#[serde(rename = "GRD")]
Grd,
/// East Caribbean Dollar
#[serde(rename = "XCD")]
Xcd,
/// Egyptian Pound
#[serde(rename = "EGP")]
Egp,
/// El Salvador Colon
#[serde(rename = "SVC")]
Svc,
/// Ethiopian Birr
#[serde(rename = "ETB")]
Etb,
/// Euro
#[serde(rename = "EUR")]
Eur,
/// Falkland Islands Pound
#[serde(rename = "FKP")]
Fkp,
/// Fiji Dollar
#[serde(rename = "FJD")]
Fjd,
/// Forint
#[serde(rename = "HUF")]
Huf,
/// Franc Congolais
#[serde(rename = "CDF")]
Cdf,
/// French Franc
#[serde(rename = "FRF")]
Frf,
/// Gibraltar Pound
#[serde(rename = "GIP")]
Gip,
/// Gourde
#[serde(rename = "HTG")]
Htg,
/// Guarani
#[serde(rename = "PYG")]
Pyg,
/// Guinea Franc
#[serde(rename = "GNF")]
Gnf,
/// Guinea-Bissau Peso
#[serde(rename = "GWP")]
Gwp,
/// Guyana Dollar
#[serde(rename = "GYD")]
Gyd,
/// Hong Kong Dollar
#[serde(rename = "HKD")]
Hkd,
/// Hryvnia
#[serde(rename = "UAH")]
Uah,
/// Iceland Krona
#[serde(rename = "ISK")]
Isk,
/// Indian Rupee
#[serde(rename = "INR")]
Inr,
/// Iranian Rial
#[serde(rename = "IRR")]
Irr,
/// Iraqi Dinar
#[serde(rename = "IQD")]
Iqd,
/// Irish Pound
#[serde(rename = "IEP")]
Iep,
/// Italian Lira
#[serde(rename = "ITL")]
Itl,
/// Jamaican Dollar
#[serde(rename = "JMD")]
Jmd,
/// Jordanian Dinar
#[serde(rename = "JOD")]
Jod,
/// Kenyan Shilling
#[serde(rename = "KES")]
Kes,
/// Kina
#[serde(rename = "PGK")]
Pgk,
/// Kip
#[serde(rename = "LAK")]
Lak,
/// Kroon
#[serde(rename = "EEK")]
Eek,
/// Kuna
#[serde(rename = "HRK")]
Hrk,
/// Kuwaiti Dinar
#[serde(rename = "KWD")]
Kwd,
/// Kwacha
#[serde(rename = "MWK")]
Mwk,
/// Kwacha
#[serde(rename = "ZMK")]
Zmk,
/// Kwanza Reajustado
#[serde(rename = "AOR")]
Aor,
/// Kyat
#[serde(rename = "MMK")]
Mmk,
/// Lari
#[serde(rename = "GEL")]
Gel,
/// Latvian Lats
#[serde(rename = "LVL")]
Lvl,
/// Lebanese Pound
#[serde(rename = "LBP")]
Lbp,
/// Lek
#[serde(rename = "ALL")]
All,
/// Lempira
#[serde(rename = "HNL")]
Hnl,
/// Leone
#[serde(rename = "SLL")]
Sll,
/// Leu
#[serde(rename = "ROL")]
Rol,
/// Lev
#[serde(rename = "BGL")]
Bgl,
/// Liberian Dollar
#[serde(rename = "LRD")]
Lrd,
/// Libyan Dinar
#[serde(rename = "LYD")]
Lyd,
/// Lilangeni
#[serde(rename = "SZL")]
Szl,
/// Lithuanian Litas
#[serde(rename = "LTL")]
Ltl,
/// Loti
#[serde(rename = "LSL")]
Lsl,
/// Luxembourg Franc
#[serde(rename = "LUF")]
Luf,
/// Malagasy Franc
#[serde(rename = "MGF")]
Mgf,
/// Malaysian Ringgit
#[serde(rename = "MYR")]
Myr,
/// Maltese Lira
#[serde(rename = "MTL")]
Mtl,
/// Manat
#[serde(rename = "TMM")]
Tmm,
/// Markka
#[serde(rename = "FIM")]
Fim,
/// Mauritius Rupee
#[serde(rename = "MUR")]
Mur,
/// Metical
#[serde(rename = "MZM")]
Mzm,
/// Mexican Peso
#[serde(rename = "MXN")]
Mxn,
/// Mexican Unidad de Inversion (UDI)
#[serde(rename = "MXV")]
Mxv,
/// Moldovan Leu
#[serde(rename = "MDL")]
Mdl,
/// Moroccan Dirham
#[serde(rename = "MAD")]
Mad,
/// Mvdol
#[serde(rename = "BOV")]
Bov,
/// Naira
#[serde(rename = "NGN")]
Ngn,
/// Nakfa
#[serde(rename = "ERN")]
Ern,
/// Namibia Dollar
#[serde(rename = "NAD")]
Nad,
/// Nepalese Rupee
#[serde(rename = "NPR")]
Npr,
/// Netherlands Antillian Guilder
#[serde(rename = "ANG")]
Ang,
/// Netherlands Guilder
#[serde(rename = "NLG")]
Nlg,
/// New Dinar
#[serde(rename = "YUM")]
Yum,
/// New Israeli Sheqel
#[serde(rename = "ILS")]
Ils,
/// New Kwanza
#[serde(rename = "AON")]
Aon,
/// New Taiwan Dollar
#[serde(rename = "TWD")]
Twd,
/// New Zaire
#[serde(rename = "ZRN")]
Zrn,
/// New Zealand Dollar
#[serde(rename = "NZD")]
Nzd,
/// Next day
#[serde(rename = "USN")]
Usn,
/// Ngultrum
#[serde(rename = "BTN")]
Btn,
/// North Korean Won
#[serde(rename = "KPW")]
Kpw,
/// Norwegian Krone
#[serde(rename = "NOK")]
Nok,
/// Nuevo Sol
#[serde(rename = "PEN")]
Pen,
/// Ouguiya
#[serde(rename = "MRO")]
Mro,
/// Pa'anga
#[serde(rename = "TOP")]
Top,
/// Pakistan Rupee
#[serde(rename = "PKR")]
Pkr,
/// Pataca
#[serde(rename = "MOP")]
Mop,
/// Peso Uruguayo
#[serde(rename = "UYU")]
Uyu,
/// Philippine Peso
#[serde(rename = "PHP")]
Php,
/// Portuguese Escudo
#[serde(rename = "PTE")]
Pte,
/// Pound Sterling
#[serde(rename = "GBP")]
Gbp,
/// Pula
#[serde(rename = "BWP")]
Bwp,
/// Qatari Rial
#[serde(rename = "QAR")]
Qar,
/// Quetzal
#[serde(rename = "GTQ")]
Gtq,
/// Rand
#[serde(rename = "ZAR")]
Zar,
/// Rial Omani
#[serde(rename = "OMR")]
Omr,
/// Riel
#[serde(rename = "KHR")]
Khr,
/// Rufiyaa
#[serde(rename = "MVR")]
Mvr,
/// Rupiah
#[serde(rename = "IDR")]
Idr,
/// Russian Ruble
#[serde(rename = "RUB")]
Rub,
/// Russian Ruble
#[serde(rename = "RUR")]
Rur,
/// Rwanda Franc
#[serde(rename = "RWF")]
Rwf,
/// SDR
#[serde(rename = "XDR")]
Xdr,
/// Same day
#[serde(rename = "USS")]
Uss,
/// Saudi Riyal
#[serde(rename = "SAR")]
Sar,
/// Schilling
#[serde(rename = "ATS")]
Ats,
/// Seychelles Rupee
#[serde(rename = "SCR")]
Scr,
/// Singapore Dollar
#[serde(rename = "SGD")]
Sgd,
/// Slovak Koruna
#[serde(rename = "SKK")]
Skk,
/// Solomon Islands Dollar
#[serde(rename = "SBD")]
Sbd,
/// Som
#[serde(rename = "KGS")]
Kgs,
/// Somali Shilling
#[serde(rename = "SOS")]
Sos,
/// Spanish Peseta
#[serde(rename = "ESP")]
Esp,
/// Sri Lanka Rupee
#[serde(rename = "LKR")]
Lkr,
/// St Helena Pound
#[serde(rename = "SHP")]
Shp,
/// Sucre
#[serde(rename = "ECS")]
Ecs,
/// Sudanese Dinar
#[serde(rename = "SDD")]
Sdd,
/// Surinam Guilder
#[serde(rename = "SRG")]
Srg,
/// Swedish Krona
#[serde(rename = "SEK")]
Sek,
/// Swiss Franc
#[serde(rename = "CHF")]
Chf,
/// Syrian Pound
#[serde(rename = "SYP")]
Syp,
/// Tajik Ruble
#[serde(rename = "TJR")]
Tjr,
/// Taka
#[serde(rename = "BDT")]
Bdt,
/// Tala
#[serde(rename = "WST")]
Wst,
/// Tanzanian Shilling
#[serde(rename = "TZS")]
Tzs,
/// Tenge
#[serde(rename = "KZT")]
Kzt,
/// Timor Escudo
#[serde(rename = "TPE")]
Tpe,
/// Tolar
#[serde(rename = "SIT")]
Sit,
/// Trinidad and Tobago Dollar
#[serde(rename = "TTD")]
Ttd,
/// Tugrik
#[serde(rename = "MNT")]
Mnt,
/// Tunisian Dinar
#[serde(rename = "TND")]
Tnd,
/// Turkish Lira
#[serde(rename = "TRL")]
Trl,
/// UAE Dirham
#[serde(rename = "AED")]
Aed,
/// US Dollar
#[serde(rename = "USD")]
Usd,
/// Uganda Shilling
#[serde(rename = "UGX")]
Ugx,
/// Unidad de Valor Constante (UVC)
#[serde(rename = "ECV")]
Ecv,
/// Unidades de fomento
#[serde(rename = "CLF")]
Clf,
/// Uzbekistan Sum
#[serde(rename = "UZS")]
Uzs,
/// Vatu
#[serde(rename = "VUV")]
Vuv,
/// Won
#[serde(rename = "KRW")]
Krw,
/// Yemeni Rial
#[serde(rename = "YER")]
Yer,
/// Yen
#[serde(rename = "JPY")]
Jpy,
/// Yuan Renminbi
#[serde(rename = "CNY")]
Cny,
/// Zimbabwe Dollar
#[serde(rename = "ZWD")]
Zwd,
/// Zloty
#[serde(rename = "PLN")]
Pln,
/// financial Rand
#[serde(rename = "ZAL")]
Zal,
/// Afghani
#[serde(rename = "004")]
N004,
/// Algerian Dinar
#[serde(rename = "01")]
N01,
/// Andorran Peseta
#[serde(rename = "020")]
N020,
/// Argentine Peso
#[serde(rename = "032")]
N032,
/// Armenian Dram
#[serde(rename = "051")]
N051,
/// Aruban Guilder
#[serde(rename = "533")]
N533,
/// Australian Dollar
#[serde(rename = "036")]
N036,
/// Azerbaijanian Manat
#[serde(rename = "031")]
N031,
/// Bahamian Dollar
#[serde(rename = "044")]
N044,
/// Bahraini Dinar
#[serde(rename = "048")]
N048,
/// Baht
#[serde(rename = "764")]
N764,
/// Balboa
#[serde(rename = "590")]
N590,
/// Barbados Dollar
#[serde(rename = "052")]
N052,
/// Belarussian Ruble
#[serde(rename = "112")]
N112,
/// Belgian Franc
#[serde(rename = "056")]
N056,
/// Belize Dollar
#[serde(rename = "084")]
N084,
/// Bermudian Dollar
#[serde(rename = "060")]
N060,
/// Bolivar
#[serde(rename = "862")]
N862,
/// Boliviano
#[serde(rename = "068")]
N068,
/// Brazilian Real
#[serde(rename = "986")]
N986,
/// Brunei Dollar
#[serde(rename = "096")]
N096,
/// Burundi Franc
#[serde(rename = "108")]
N108,
/// CFA Franc BCEAO+
#[serde(rename = "952")]
N952,
/// CFA Franc BEAC#
#[serde(rename = "950")]
N950,
/// CFP Franc
#[serde(rename = "953")]
N953,
/// Canadian Dollar
#[serde(rename = "124")]
N124,
/// Cape Verde Escudo
#[serde(rename = "132")]
N132,
/// Cayman Islands Dollar
#[serde(rename = "136")]
N136,
/// Cedi
#[serde(rename = "288")]
N288,
/// Chilean Peso
#[serde(rename = "152")]
N152,
/// Colombian Peso
#[serde(rename = "170")]
N170,
/// Comoro Franc
#[serde(rename = "174")]
N174,
/// Convertible Marks
#[serde(rename = "977")]
N977,
/// Cordoba Oro
#[serde(rename = "558")]
N558,
/// Costa Rican Colon
#[serde(rename = "188")]
N188,
/// Cuban Peso
#[serde(rename = "192")]
N192,
/// Cyprus Pound
#[serde(rename = "196")]
N196,
/// Czech Koruna
#[serde(rename = "203")]
N203,
/// Dalasi
#[serde(rename = "270")]
N270,
/// Danish Krone
#[serde(rename = "208")]
N208,
/// Denar
#[serde(rename = "807")]
N807,
/// Deutsche Mark
#[serde(rename = "280")]
N280,
/// Djibouti Franc
#[serde(rename = "262")]
N262,
/// Dobra
#[serde(rename = "678")]
N678,
/// Dominican Peso
#[serde(rename = "214")]
N214,
/// Dong
#[serde(rename = "704")]
N704,
/// Drachma
#[serde(rename = "300")]
N300,
/// East Caribbean Dollar
#[serde(rename = "951")]
N951,
/// Egyptian Pound
#[serde(rename = "818")]
N818,
/// El Salvador Colon
#[serde(rename = "222")]
N222,
/// Ethiopian Birr
#[serde(rename = "230")]
N230,
/// Euro
#[serde(rename = "978")]
N978,
/// Falkland Islands Pound
#[serde(rename = "238")]
N238,
/// Fiji Dollar
#[serde(rename = "242")]
N242,
/// Forint
#[serde(rename = "348")]
N348,
/// Franc Congolais
#[serde(rename = "976")]
N976,
/// French Franc
#[serde(rename = "250")]
N250,
/// Gibraltar Pound
#[serde(rename = "292")]
N292,
/// Gourde
#[serde(rename = "332")]
N332,
/// Guarani
#[serde(rename = "600")]
N600,
/// Guinea Franc
#[serde(rename = "324")]
N324,
/// Guinea-Bissau Peso
#[serde(rename = "624")]
N624,
/// Guyana Dollar
#[serde(rename = "328")]
N328,
/// Hong Kong Dollar
#[serde(rename = "344")]
N344,
/// Hryvnia
#[serde(rename = "980")]
N980,
/// Iceland Krona
#[serde(rename = "352")]
N352,
/// Indian Rupee
#[serde(rename = "356")]
N356,
/// Iranian Rial
#[serde(rename = "364")]
N364,
/// Iraqi Dinar
#[serde(rename = "368")]
N368,
/// Irish Pound
#[serde(rename = "372")]
N372,
/// Italian Lira
#[serde(rename = "380")]
N380,
/// Jamaican Dollar
#[serde(rename = "388")]
N388,
/// Jordanian Dinar
#[serde(rename = "400")]
N400,
/// Kenyan Shilling
#[serde(rename = "404")]
N404,
/// Kina
#[serde(rename = "598")]
N598,
/// Kip
#[serde(rename = "418")]
N418,
/// Kroon
#[serde(rename = "233")]
N233,
/// Kuna
#[serde(rename = "191")]
N191,
/// Kuwaiti Dinar
#[serde(rename = "414")]
N414,
/// Kwacha
#[serde(rename = "454")]
N454,
/// Kwacha
#[serde(rename = "894")]
N894,
/// Kwanza Reajustado
#[serde(rename = "982")]
N982,
/// Kyat
#[serde(rename = "104")]
N104,
/// Lari
#[serde(rename = "981")]
N981,
/// Latvian Lats
#[serde(rename = "428")]
N428,
/// Lebanese Pound
#[serde(rename = "422")]
N422,
/// Lek
#[serde(rename = "008")]
N008,
/// Lempira
#[serde(rename = "340")]
N340,
/// Leone
#[serde(rename = "694")]
N694,
/// Leu
#[serde(rename = "642")]
N642,
/// Lev
#[serde(rename = "100")]
N100,
/// Liberian Dollar
#[serde(rename = "430")]
N430,
/// Libyan Dinar
#[serde(rename = "434")]
N434,
/// Lilangeni
#[serde(rename = "748")]
N748,
/// Lithuanian Litas
#[serde(rename = "440")]
N440,
/// Loti
#[serde(rename = "426")]
N426,
/// Luxembourg Franc
#[serde(rename = "442")]
N442,
/// Malagasy Franc
#[serde(rename = "450")]
N450,
/// Malaysian Ringgit
#[serde(rename = "458")]
N458,
/// Maltese Lira
#[serde(rename = "470")]
N470,
/// Manat
#[serde(rename = "795")]
N795,
/// Markka
#[serde(rename = "246")]
N246,
/// Mauritius Rupee
#[serde(rename = "480")]
N480,
/// Metical
#[serde(rename = "508")]
N508,
/// Mexican Peso
#[serde(rename = "484")]
N484,
/// Mexican Unidad de Inversion (UDI)
#[serde(rename = "979")]
N979,
/// Moldovan Leu
#[serde(rename = "498")]
N498,
/// Moroccan Dirham
#[serde(rename = "504")]
N504,
/// Mvdol
#[serde(rename = "984")]
N984,
/// Naira
#[serde(rename = "566")]
N566,
/// Nakfa
#[serde(rename = "232")]
N232,
/// Namibia Dollar
#[serde(rename = "516")]
N516,
/// Nepalese Rupee
#[serde(rename = "524")]
N524,
/// Netherlands Antillian Guilder
#[serde(rename = "532")]
N532,
/// Netherlands Guilder
#[serde(rename = "528")]
N528,
/// New Dinar
#[serde(rename = "891")]
N891,
/// New Israeli Sheqel
#[serde(rename = "376")]
N376,
/// New Kwanza
#[serde(rename = "02")]
N02,
/// New Taiwan Dollar
#[serde(rename = "901")]
N901,
/// New Zaire
#[serde(rename = "180")]
N180,
/// New Zealand Dollar
#[serde(rename = "554")]
N554,
/// Next day
#[serde(rename = "997")]
N997,
/// Ngultrum
#[serde(rename = "064")]
N064,
/// North Korean Won
#[serde(rename = "408")]
N408,
/// Norwegian Krone
#[serde(rename = "578")]
N578,
/// Nuevo Sol
#[serde(rename = "604")]
N604,
/// Ouguiya
#[serde(rename = "478")]
N478,
/// Pa'anga
#[serde(rename = "776")]
N776,
/// Pakistan Rupee
#[serde(rename = "586")]
N586,
/// Pataca
#[serde(rename = "446")]
N446,
/// Peso Uruguayo
#[serde(rename = "858")]
N858,
/// Philippine Peso
#[serde(rename = "608")]
N608,
/// Portuguese Escudo
#[serde(rename = "620")]
N620,
/// Pound Sterling
#[serde(rename = "826")]
N826,
/// Pula
#[serde(rename = "072")]
N072,
/// Qatari Rial
#[serde(rename = "634")]
N634,
/// Quetzal
#[serde(rename = "320")]
N320,
/// Rand
#[serde(rename = "710")]
N710,
/// Rial Omani
#[serde(rename = "512")]
N512,
/// Riel
#[serde(rename = "116")]
N116,
/// Rufiyaa
#[serde(rename = "462")]
N462,
/// Rupiah
#[serde(rename = "360")]
N360,
/// Russian Ruble
#[serde(rename = "643")]
N643,
/// Russian Ruble
#[serde(rename = "810")]
N810,
/// Rwanda Franc
#[serde(rename = "646")]
N646,
/// SDR
#[serde(rename = "960")]
N960,
/// Same day
#[serde(rename = "998")]
N998,
/// Saudi Riyal
#[serde(rename = "682")]
N682,
/// Schilling
#[serde(rename = "040")]
N040,
/// Seychelles Rupee
#[serde(rename = "690")]
N690,
/// Singapore Dollar
#[serde(rename = "702")]
N702,
/// Slovak Koruna
#[serde(rename = "703")]
N703,
/// Solomon Islands Dollar
#[serde(rename = "090")]
N090,
/// Som
#[serde(rename = "417")]
N417,
/// Somali Shilling
#[serde(rename = "706")]
N706,
/// Spanish Peseta
#[serde(rename = "724")]
N724,
/// Sri Lanka Rupee
#[serde(rename = "144")]
N144,
/// St Helena Pound
#[serde(rename = "654")]
N654,
/// Sucre
#[serde(rename = "218")]
N218,
/// Sudanese Dinar
#[serde(rename = "736")]
N736,
/// Surinam Guilder
#[serde(rename = "740")]
N740,
/// Swedish Krona
#[serde(rename = "752")]
N752,
/// Swiss Franc
#[serde(rename = "756")]
N756,
/// Syrian Pound
#[serde(rename = "760")]
N760,
/// Tajik Ruble
#[serde(rename = "762")]
N762,
/// Taka
#[serde(rename = "050")]
N050,
/// Tala
#[serde(rename = "882")]
N882,
/// Tanzanian Shilling
#[serde(rename = "834")]
N834,
/// Tenge
#[serde(rename = "398")]
N398,
/// Timor Escudo
#[serde(rename = "626")]
N626,
/// Tolar
#[serde(rename = "705")]
N705,
/// Trinidad and Tobago Dollar
#[serde(rename = "780")]
N780,
/// Tugrik
#[serde(rename = "496")]
N496,
/// Tunisian Dinar
#[serde(rename = "788")]
N788,
/// Turkish Lira
#[serde(rename = "792")]
N792,
/// UAE Dirham
#[serde(rename = "784")]
N784,
/// US Dollar
#[serde(rename = "840")]
N840,
/// Uganda Shilling
#[serde(rename = "800")]
N800,
/// Unidad de Valor Constante (UVC)
#[serde(rename = "983")]
N983,
/// Unidades de fomento
#[serde(rename = "990")]
N990,
/// Uzbekistan Sum
#[serde(rename = "860")]
N860,
/// Vatu
#[serde(rename = "548")]
N548,
/// Won
#[serde(rename = "410")]
N410,
/// Yemeni Rial
#[serde(rename = "886")]
N886,
/// Yen
#[serde(rename = "392")]
N392,
/// Yuan Renminbi
#[serde(rename = "156")]
N156,
/// Zimbabwe Dollar
#[serde(rename = "716")]
N716,
/// Zloty
#[serde(rename = "985")]
N985,
/// financial Rand
#[serde(rename = "991")]
N991,
/// Gold
#[serde(rename = "XAU")]
Xau,
/// European Composite Unit (EURCO)
#[serde(rename = "XBA")]
Xba,
/// European Monetary Unit (E.M.U.-6)
#[serde(rename = "XBB")]
Xbb,
/// European Unit of Account 9 (E.U.A.- 9)
#[serde(rename = "XBC")]
Xbc,
/// European Unit of Account 17 (E.U.A.- 17)
#[serde(rename = "XBD")]
Xbd,
/// Palladium
#[serde(rename = "XPD")]
Xpd,
/// Platinum
#[serde(rename = "XPT")]
Xpt,
/// Silver
#[serde(rename = "XAG")]
Xag,
/// UIC-Franc
#[serde(rename = "XFU")]
Xfu,
/// Gold-Franc
#[serde(rename = "XFO")]
Xfo,
/// Codes specifically reserved for testing purposes
#[serde(rename = "XTS")]
Xts,
/// Codes assigned for transactions where no currency is involved
#[serde(rename = "XXX")]
Xxx,
/// Gold
#[serde(rename = "959")]
N959,
/// European Composite Unit (EURCO)
#[serde(rename = "955")]
N955,
/// European Monetary Unit (E.M.U.-6)
#[serde(rename = "956")]
N956,
/// European Unit of Account 9 (E.U.A.- 9)
#[serde(rename = "957")]
N957,
/// European Unit of Account 17 (E.U.A.- 17)
#[serde(rename = "958")]
N958,
/// Palladium
#[serde(rename = "964")]
N964,
/// Platinum
#[serde(rename = "962")]
N962,
/// Silver
#[serde(rename = "961")]
N961,
/// Codes specifically reserved for testing purposes
#[serde(rename = "963")]
N963,
/// Codes assigned for transactions where no currency is involved
#[serde(rename = "999")]
N999,
}
impl Default for Currency {
fn default() -> Self {
Currency::Afa
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum TimeInForce {
/// Day (or session)
#[serde(rename = "0")]
Day,
/// Good Till Cancel (GTC)
#[serde(rename = "1")]
GoodTillCancel,
/// At the Opening (OPG)
#[serde(rename = "2")]
AtTheOpening,
/// Immediate Or Cancel (IOC)
#[serde(rename = "3")]
ImmediateOrCancel,
/// Fill Or Kill (FOK)
#[serde(rename = "4")]
FillOrKill,
/// Good Till Crossing (GTX)
#[serde(rename = "5")]
GoodTillCrossing,
/// Good Till Date (GTD)
#[serde(rename = "6")]
GoodTillDate,
/// At the Close
#[serde(rename = "7")]
AtTheClose,
}
impl Default for TimeInForce {
fn default() -> Self {
TimeInForce::Day
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum GTBookingInst {
/// Book out all trades on day of execution
#[serde(rename = "0")]
BookOutAllTradesOnDayOfExecution,
/// Accumulate executions until order is filled or expires
#[serde(rename = "1")]
AccumulateExecutionsUntilOrderIsFilledOrExpires,
/// Accumulate until verbally notified otherwise
#[serde(rename = "2")]
AccumulateUntilVerballyNotifiedOtherwise,
}
impl Default for GTBookingInst {
fn default() -> Self {
GTBookingInst::BookOutAllTradesOnDayOfExecution
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum OrderCapacity {
/// Agency
#[serde(rename = "A")]
Agency,
/// Proprietary
#[serde(rename = "G")]
Proprietary,
/// Individual
#[serde(rename = "I")]
Individual,
/// Principal (Note for CMS purposes, "Principal" includes "Proprietary")"
#[serde(rename = "P")]
Principal,
/// Riskless Principal
#[serde(rename = "R")]
RisklessPrincipal,
/// Agent for Other Member
#[serde(rename = "W")]
AgentForOtherMember,
}
impl Default for OrderCapacity {
fn default() -> Self {
OrderCapacity::Agency
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum OrderRestrictions {
/// Program Trade
#[serde(rename = "1")]
ProgramTrade,
/// Index Arbitrage
#[serde(rename = "2")]
IndexArbitrage,
/// Non-Index Arbitrage
#[serde(rename = "3")]
NonIndexArbitrage,
/// Competing Market Maker
#[serde(rename = "4")]
CompetingMarketMaker,
/// Acting as Market Maker or Specialist in the security
#[serde(rename = "5")]
ActingAsMarketMakerOrSpecialistInTheSecurity,
/// Acting as Market Maker or Specialist in the underlying security of a derivative security
#[serde(rename = "6")]
ActingAsMarketMakerOrSpecialistInTheUnderlyingSecurityOfADerivativeSecurity,
/// Foreign Entity (of foreign governmnet or regulatory jurisdiction)
#[serde(rename = "7")]
ForeignEntity,
/// External Market Participant
#[serde(rename = "8")]
ExternalMarketParticipant,
/// External Inter-connected Market Linkage
#[serde(rename = "9")]
ExternalInterConnectedMarketLinkage,
/// Riskless Arbitrage
#[serde(rename = "A")]
RisklessArbitrage,
}
impl Default for OrderRestrictions {
fn default() -> Self {
OrderRestrictions::ProgramTrade
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum CustOrderCapacity {
/// Member trading for their own account
#[serde(rename = "1")]
MemberTradingForTheirOwnAccount,
/// Clearing Firm trading for its proprietary account
#[serde(rename = "2")]
ClearingFirmTradingForItsProprietaryAccount,
/// Member trading for another member
#[serde(rename = "3")]
MemberTradingForAnotherMember,
/// All other
#[serde(rename = "4")]
AllOther,
}
impl Default for CustOrderCapacity {
fn default() -> Self {
CustOrderCapacity::MemberTradingForTheirOwnAccount
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum ForexReq {
/// Execute Forex after security trade
#[serde(rename = "Y")]
ExecuteForexAfterSecurityTrade,
/// Do not execute Forex after security trade
#[serde(rename = "N")]
DoNotExecuteForexAfterSecurityTrade,
}
impl Default for ForexReq {
fn default() -> Self {
ForexReq::ExecuteForexAfterSecurityTrade
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum SettlCurrency {
/// Afghani
#[serde(rename = "AFA")]
Afa,
/// Algerian Dinar
#[serde(rename = "DZD")]
Dzd,
/// Andorran Peseta
#[serde(rename = "ADP")]
Adp,
/// Argentine Peso
#[serde(rename = "ARS")]
Ars,
/// Armenian Dram
#[serde(rename = "AMD")]
Amd,
/// Aruban Guilder
#[serde(rename = "AWG")]
Awg,
/// Australian Dollar
#[serde(rename = "AUD")]
Aud,
/// Azerbaijanian Manat
#[serde(rename = "AZM")]
Azm,
/// Bahamian Dollar
#[serde(rename = "BSD")]
Bsd,
/// Bahraini Dinar
#[serde(rename = "BHD")]
Bhd,
/// Baht
#[serde(rename = "THB")]
Thb,
/// Balboa
#[serde(rename = "PAB")]
Pab,
/// Barbados Dollar
#[serde(rename = "BBD")]
Bbd,
/// Belarussian Ruble
#[serde(rename = "BYB")]
Byb,
/// Belgian Franc
#[serde(rename = "BEF")]
Bef,
/// Belize Dollar
#[serde(rename = "BZD")]
Bzd,
/// Bermudian Dollar
#[serde(rename = "BMD")]
Bmd,
/// Bolivar
#[serde(rename = "VEB")]
Veb,
/// Boliviano
#[serde(rename = "BOB")]
Bob,
/// Brazilian Real
#[serde(rename = "BRL")]
Brl,
/// Brunei Dollar
#[serde(rename = "BND")]
Bnd,
/// Burundi Franc
#[serde(rename = "BIF")]
Bif,
/// CFA Franc BCEAO+
#[serde(rename = "XOF")]
Xof,
/// CFA Franc BEAC#
#[serde(rename = "XAF")]
Xaf,
/// CFP Franc
#[serde(rename = "XPF")]
Xpf,
/// Canadian Dollar
#[serde(rename = "CAD")]
Cad,
/// Cape Verde Escudo
#[serde(rename = "CVE")]
Cve,
/// Cayman Islands Dollar
#[serde(rename = "KYD")]
Kyd,
/// Cedi
#[serde(rename = "GHC")]
Ghc,
/// Chilean Peso
#[serde(rename = "CLP")]
Clp,
/// Colombian Peso
#[serde(rename = "COP")]
Cop,
/// Comoro Franc
#[serde(rename = "KMF")]
Kmf,
/// Convertible Marks
#[serde(rename = "BAM")]
Bam,
/// Cordoba Oro
#[serde(rename = "NIO")]
Nio,
/// Costa Rican Colon
#[serde(rename = "CRC")]
Crc,
/// Cuban Peso
#[serde(rename = "CUP")]
Cup,
/// Cyprus Pound
#[serde(rename = "CYP")]
Cyp,
/// Czech Koruna
#[serde(rename = "CZK")]
Czk,
/// Dalasi
#[serde(rename = "GMD")]
Gmd,
/// Danish Krone
#[serde(rename = "DKK")]
Dkk,
/// Denar
#[serde(rename = "MKD")]
Mkd,
/// Deutsche Mark
#[serde(rename = "DEM")]
Dem,
/// Djibouti Franc
#[serde(rename = "DJF")]
Djf,
/// Dobra
#[serde(rename = "STD")]
Std,
/// Dominican Peso
#[serde(rename = "DOP")]
Dop,
/// Dong
#[serde(rename = "VND")]
Vnd,
/// Drachma
#[serde(rename = "GRD")]
Grd,
/// East Caribbean Dollar
#[serde(rename = "XCD")]
Xcd,
/// Egyptian Pound
#[serde(rename = "EGP")]
Egp,
/// El Salvador Colon
#[serde(rename = "SVC")]
Svc,
/// Ethiopian Birr
#[serde(rename = "ETB")]
Etb,
/// Euro
#[serde(rename = "EUR")]
Eur,
/// Falkland Islands Pound
#[serde(rename = "FKP")]
Fkp,
/// Fiji Dollar
#[serde(rename = "FJD")]
Fjd,
/// Forint
#[serde(rename = "HUF")]
Huf,
/// Franc Congolais
#[serde(rename = "CDF")]
Cdf,
/// French Franc
#[serde(rename = "FRF")]
Frf,
/// Gibraltar Pound
#[serde(rename = "GIP")]
Gip,
/// Gourde
#[serde(rename = "HTG")]
Htg,
/// Guarani
#[serde(rename = "PYG")]
Pyg,
/// Guinea Franc
#[serde(rename = "GNF")]
Gnf,
/// Guinea-Bissau Peso
#[serde(rename = "GWP")]
Gwp,
/// Guyana Dollar
#[serde(rename = "GYD")]
Gyd,
/// Hong Kong Dollar
#[serde(rename = "HKD")]
Hkd,
/// Hryvnia
#[serde(rename = "UAH")]
Uah,
/// Iceland Krona
#[serde(rename = "ISK")]
Isk,
/// Indian Rupee
#[serde(rename = "INR")]
Inr,
/// Iranian Rial
#[serde(rename = "IRR")]
Irr,
/// Iraqi Dinar
#[serde(rename = "IQD")]
Iqd,
/// Irish Pound
#[serde(rename = "IEP")]
Iep,
/// Italian Lira
#[serde(rename = "ITL")]
Itl,
/// Jamaican Dollar
#[serde(rename = "JMD")]
Jmd,
/// Jordanian Dinar
#[serde(rename = "JOD")]
Jod,
/// Kenyan Shilling
#[serde(rename = "KES")]
Kes,
/// Kina
#[serde(rename = "PGK")]
Pgk,
/// Kip
#[serde(rename = "LAK")]
Lak,
/// Kroon
#[serde(rename = "EEK")]
Eek,
/// Kuna
#[serde(rename = "HRK")]
Hrk,
/// Kuwaiti Dinar
#[serde(rename = "KWD")]
Kwd,
/// Kwacha
#[serde(rename = "MWK")]
Mwk,
/// Kwacha
#[serde(rename = "ZMK")]
Zmk,
/// Kwanza Reajustado
#[serde(rename = "AOR")]
Aor,
/// Kyat
#[serde(rename = "MMK")]
Mmk,
/// Lari
#[serde(rename = "GEL")]
Gel,
/// Latvian Lats
#[serde(rename = "LVL")]
Lvl,
/// Lebanese Pound
#[serde(rename = "LBP")]
Lbp,
/// Lek
#[serde(rename = "ALL")]
All,
/// Lempira
#[serde(rename = "HNL")]
Hnl,
/// Leone
#[serde(rename = "SLL")]
Sll,
/// Leu
#[serde(rename = "ROL")]
Rol,
/// Lev
#[serde(rename = "BGL")]
Bgl,
/// Liberian Dollar
#[serde(rename = "LRD")]
Lrd,
/// Libyan Dinar
#[serde(rename = "LYD")]
Lyd,
/// Lilangeni
#[serde(rename = "SZL")]
Szl,
/// Lithuanian Litas
#[serde(rename = "LTL")]
Ltl,
/// Loti
#[serde(rename = "LSL")]
Lsl,
/// Luxembourg Franc
#[serde(rename = "LUF")]
Luf,
/// Malagasy Franc
#[serde(rename = "MGF")]
Mgf,
/// Malaysian Ringgit
#[serde(rename = "MYR")]
Myr,
/// Maltese Lira
#[serde(rename = "MTL")]
Mtl,
/// Manat
#[serde(rename = "TMM")]
Tmm,
/// Markka
#[serde(rename = "FIM")]
Fim,
/// Mauritius Rupee
#[serde(rename = "MUR")]
Mur,
/// Metical
#[serde(rename = "MZM")]
Mzm,
/// Mexican Peso
#[serde(rename = "MXN")]
Mxn,
/// Mexican Unidad de Inversion (UDI)
#[serde(rename = "MXV")]
Mxv,
/// Moldovan Leu
#[serde(rename = "MDL")]
Mdl,
/// Moroccan Dirham
#[serde(rename = "MAD")]
Mad,
/// Mvdol
#[serde(rename = "BOV")]
Bov,
/// Naira
#[serde(rename = "NGN")]
Ngn,
/// Nakfa
#[serde(rename = "ERN")]
Ern,
/// Namibia Dollar
#[serde(rename = "NAD")]
Nad,
/// Nepalese Rupee
#[serde(rename = "NPR")]
Npr,
/// Netherlands Antillian Guilder
#[serde(rename = "ANG")]
Ang,
/// Netherlands Guilder
#[serde(rename = "NLG")]
Nlg,
/// New Dinar
#[serde(rename = "YUM")]
Yum,
/// New Israeli Sheqel
#[serde(rename = "ILS")]
Ils,
/// New Kwanza
#[serde(rename = "AON")]
Aon,
/// New Taiwan Dollar
#[serde(rename = "TWD")]
Twd,
/// New Zaire
#[serde(rename = "ZRN")]
Zrn,
/// New Zealand Dollar
#[serde(rename = "NZD")]
Nzd,
/// Next day
#[serde(rename = "USN")]
Usn,
/// Ngultrum
#[serde(rename = "BTN")]
Btn,
/// North Korean Won
#[serde(rename = "KPW")]
Kpw,
/// Norwegian Krone
#[serde(rename = "NOK")]
Nok,
/// Nuevo Sol
#[serde(rename = "PEN")]
Pen,
/// Ouguiya
#[serde(rename = "MRO")]
Mro,
/// Pa'anga
#[serde(rename = "TOP")]
Top,
/// Pakistan Rupee
#[serde(rename = "PKR")]
Pkr,
/// Pataca
#[serde(rename = "MOP")]
Mop,
/// Peso Uruguayo
#[serde(rename = "UYU")]
Uyu,
/// Philippine Peso
#[serde(rename = "PHP")]
Php,
/// Portuguese Escudo
#[serde(rename = "PTE")]
Pte,
/// Pound Sterling
#[serde(rename = "GBP")]
Gbp,
/// Pula
#[serde(rename = "BWP")]
Bwp,
/// Qatari Rial
#[serde(rename = "QAR")]
Qar,
/// Quetzal
#[serde(rename = "GTQ")]
Gtq,
/// Rand
#[serde(rename = "ZAR")]
Zar,
/// Rial Omani
#[serde(rename = "OMR")]
Omr,
/// Riel
#[serde(rename = "KHR")]
Khr,
/// Rufiyaa
#[serde(rename = "MVR")]
Mvr,
/// Rupiah
#[serde(rename = "IDR")]
Idr,
/// Russian Ruble
#[serde(rename = "RUB")]
Rub,
/// Russian Ruble
#[serde(rename = "RUR")]
Rur,
/// Rwanda Franc
#[serde(rename = "RWF")]
Rwf,
/// SDR
#[serde(rename = "XDR")]
Xdr,
/// Same day
#[serde(rename = "USS")]
Uss,
/// Saudi Riyal
#[serde(rename = "SAR")]
Sar,
/// Schilling
#[serde(rename = "ATS")]
Ats,
/// Seychelles Rupee
#[serde(rename = "SCR")]
Scr,
/// Singapore Dollar
#[serde(rename = "SGD")]
Sgd,
/// Slovak Koruna
#[serde(rename = "SKK")]
Skk,
/// Solomon Islands Dollar
#[serde(rename = "SBD")]
Sbd,
/// Som
#[serde(rename = "KGS")]
Kgs,
/// Somali Shilling
#[serde(rename = "SOS")]
Sos,
/// Spanish Peseta
#[serde(rename = "ESP")]
Esp,
/// Sri Lanka Rupee
#[serde(rename = "LKR")]
Lkr,
/// St Helena Pound
#[serde(rename = "SHP")]
Shp,
/// Sucre
#[serde(rename = "ECS")]
Ecs,
/// Sudanese Dinar
#[serde(rename = "SDD")]
Sdd,
/// Surinam Guilder
#[serde(rename = "SRG")]
Srg,
/// Swedish Krona
#[serde(rename = "SEK")]
Sek,
/// Swiss Franc
#[serde(rename = "CHF")]
Chf,
/// Syrian Pound
#[serde(rename = "SYP")]
Syp,
/// Tajik Ruble
#[serde(rename = "TJR")]
Tjr,
/// Taka
#[serde(rename = "BDT")]
Bdt,
/// Tala
#[serde(rename = "WST")]
Wst,
/// Tanzanian Shilling
#[serde(rename = "TZS")]
Tzs,
/// Tenge
#[serde(rename = "KZT")]
Kzt,
/// Timor Escudo
#[serde(rename = "TPE")]
Tpe,
/// Tolar
#[serde(rename = "SIT")]
Sit,
/// Trinidad and Tobago Dollar
#[serde(rename = "TTD")]
Ttd,
/// Tugrik
#[serde(rename = "MNT")]
Mnt,
/// Tunisian Dinar
#[serde(rename = "TND")]
Tnd,
/// Turkish Lira
#[serde(rename = "TRL")]
Trl,
/// UAE Dirham
#[serde(rename = "AED")]
Aed,
/// US Dollar
#[serde(rename = "USD")]
Usd,
/// Uganda Shilling
#[serde(rename = "UGX")]
Ugx,
/// Unidad de Valor Constante (UVC)
#[serde(rename = "ECV")]
Ecv,
/// Unidades de fomento
#[serde(rename = "CLF")]
Clf,
/// Uzbekistan Sum
#[serde(rename = "UZS")]
Uzs,
/// Vatu
#[serde(rename = "VUV")]
Vuv,
/// Won
#[serde(rename = "KRW")]
Krw,
/// Yemeni Rial
#[serde(rename = "YER")]
Yer,
/// Yen
#[serde(rename = "JPY")]
Jpy,
/// Yuan Renminbi
#[serde(rename = "CNY")]
Cny,
/// Zimbabwe Dollar
#[serde(rename = "ZWD")]
Zwd,
/// Zloty
#[serde(rename = "PLN")]
Pln,
/// financial Rand
#[serde(rename = "ZAL")]
Zal,
/// Afghani
#[serde(rename = "004")]
N004,
/// Algerian Dinar
#[serde(rename = "01")]
N01,
/// Andorran Peseta
#[serde(rename = "020")]
N020,
/// Argentine Peso
#[serde(rename = "032")]
N032,
/// Armenian Dram
#[serde(rename = "051")]
N051,
/// Aruban Guilder
#[serde(rename = "533")]
N533,
/// Australian Dollar
#[serde(rename = "036")]
N036,
/// Azerbaijanian Manat
#[serde(rename = "031")]
N031,
/// Bahamian Dollar
#[serde(rename = "044")]
N044,
/// Bahraini Dinar
#[serde(rename = "048")]
N048,
/// Baht
#[serde(rename = "764")]
N764,
/// Balboa
#[serde(rename = "590")]
N590,
/// Barbados Dollar
#[serde(rename = "052")]
N052,
/// Belarussian Ruble
#[serde(rename = "112")]
N112,
/// Belgian Franc
#[serde(rename = "056")]
N056,
/// Belize Dollar
#[serde(rename = "084")]
N084,
/// Bermudian Dollar
#[serde(rename = "060")]
N060,
/// Bolivar
#[serde(rename = "862")]
N862,
/// Boliviano
#[serde(rename = "068")]
N068,
/// Brazilian Real
#[serde(rename = "986")]
N986,
/// Brunei Dollar
#[serde(rename = "096")]
N096,
/// Burundi Franc
#[serde(rename = "108")]
N108,
/// CFA Franc BCEAO+
#[serde(rename = "952")]
N952,
/// CFA Franc BEAC#
#[serde(rename = "950")]
N950,
/// CFP Franc
#[serde(rename = "953")]
N953,
/// Canadian Dollar
#[serde(rename = "124")]
N124,
/// Cape Verde Escudo
#[serde(rename = "132")]
N132,
/// Cayman Islands Dollar
#[serde(rename = "136")]
N136,
/// Cedi
#[serde(rename = "288")]
N288,
/// Chilean Peso
#[serde(rename = "152")]
N152,
/// Colombian Peso
#[serde(rename = "170")]
N170,
/// Comoro Franc
#[serde(rename = "174")]
N174,
/// Convertible Marks
#[serde(rename = "977")]
N977,
/// Cordoba Oro
#[serde(rename = "558")]
N558,
/// Costa Rican Colon
#[serde(rename = "188")]
N188,
/// Cuban Peso
#[serde(rename = "192")]
N192,
/// Cyprus Pound
#[serde(rename = "196")]
N196,
/// Czech Koruna
#[serde(rename = "203")]
N203,
/// Dalasi
#[serde(rename = "270")]
N270,
/// Danish Krone
#[serde(rename = "208")]
N208,
/// Denar
#[serde(rename = "807")]
N807,
/// Deutsche Mark
#[serde(rename = "280")]
N280,
/// Djibouti Franc
#[serde(rename = "262")]
N262,
/// Dobra
#[serde(rename = "678")]
N678,
/// Dominican Peso
#[serde(rename = "214")]
N214,
/// Dong
#[serde(rename = "704")]
N704,
/// Drachma
#[serde(rename = "300")]
N300,
/// East Caribbean Dollar
#[serde(rename = "951")]
N951,
/// Egyptian Pound
#[serde(rename = "818")]
N818,
/// El Salvador Colon
#[serde(rename = "222")]
N222,
/// Ethiopian Birr
#[serde(rename = "230")]
N230,
/// Euro
#[serde(rename = "978")]
N978,
/// Falkland Islands Pound
#[serde(rename = "238")]
N238,
/// Fiji Dollar
#[serde(rename = "242")]
N242,
/// Forint
#[serde(rename = "348")]
N348,
/// Franc Congolais
#[serde(rename = "976")]
N976,
/// French Franc
#[serde(rename = "250")]
N250,
/// Gibraltar Pound
#[serde(rename = "292")]
N292,
/// Gourde
#[serde(rename = "332")]
N332,
/// Guarani
#[serde(rename = "600")]
N600,
/// Guinea Franc
#[serde(rename = "324")]
N324,
/// Guinea-Bissau Peso
#[serde(rename = "624")]
N624,
/// Guyana Dollar
#[serde(rename = "328")]
N328,
/// Hong Kong Dollar
#[serde(rename = "344")]
N344,
/// Hryvnia
#[serde(rename = "980")]
N980,
/// Iceland Krona
#[serde(rename = "352")]
N352,
/// Indian Rupee
#[serde(rename = "356")]
N356,
/// Iranian Rial
#[serde(rename = "364")]
N364,
/// Iraqi Dinar
#[serde(rename = "368")]
N368,
/// Irish Pound
#[serde(rename = "372")]
N372,
/// Italian Lira
#[serde(rename = "380")]
N380,
/// Jamaican Dollar
#[serde(rename = "388")]
N388,
/// Jordanian Dinar
#[serde(rename = "400")]
N400,
/// Kenyan Shilling
#[serde(rename = "404")]
N404,
/// Kina
#[serde(rename = "598")]
N598,
/// Kip
#[serde(rename = "418")]
N418,
/// Kroon
#[serde(rename = "233")]
N233,
/// Kuna
#[serde(rename = "191")]
N191,
/// Kuwaiti Dinar
#[serde(rename = "414")]
N414,
/// Kwacha
#[serde(rename = "454")]
N454,
/// Kwacha
#[serde(rename = "894")]
N894,
/// Kwanza Reajustado
#[serde(rename = "982")]
N982,
/// Kyat
#[serde(rename = "104")]
N104,
/// Lari
#[serde(rename = "981")]
N981,
/// Latvian Lats
#[serde(rename = "428")]
N428,
/// Lebanese Pound
#[serde(rename = "422")]
N422,
/// Lek
#[serde(rename = "008")]
N008,
/// Lempira
#[serde(rename = "340")]
N340,
/// Leone
#[serde(rename = "694")]
N694,
/// Leu
#[serde(rename = "642")]
N642,
/// Lev
#[serde(rename = "100")]
N100,
/// Liberian Dollar
#[serde(rename = "430")]
N430,
/// Libyan Dinar
#[serde(rename = "434")]
N434,
/// Lilangeni
#[serde(rename = "748")]
N748,
/// Lithuanian Litas
#[serde(rename = "440")]
N440,
/// Loti
#[serde(rename = "426")]
N426,
/// Luxembourg Franc
#[serde(rename = "442")]
N442,
/// Malagasy Franc
#[serde(rename = "450")]
N450,
/// Malaysian Ringgit
#[serde(rename = "458")]
N458,
/// Maltese Lira
#[serde(rename = "470")]
N470,
/// Manat
#[serde(rename = "795")]
N795,
/// Markka
#[serde(rename = "246")]
N246,
/// Mauritius Rupee
#[serde(rename = "480")]
N480,
/// Metical
#[serde(rename = "508")]
N508,
/// Mexican Peso
#[serde(rename = "484")]
N484,
/// Mexican Unidad de Inversion (UDI)
#[serde(rename = "979")]
N979,
/// Moldovan Leu
#[serde(rename = "498")]
N498,
/// Moroccan Dirham
#[serde(rename = "504")]
N504,
/// Mvdol
#[serde(rename = "984")]
N984,
/// Naira
#[serde(rename = "566")]
N566,
/// Nakfa
#[serde(rename = "232")]
N232,
/// Namibia Dollar
#[serde(rename = "516")]
N516,
/// Nepalese Rupee
#[serde(rename = "524")]
N524,
/// Netherlands Antillian Guilder
#[serde(rename = "532")]
N532,
/// Netherlands Guilder
#[serde(rename = "528")]
N528,
/// New Dinar
#[serde(rename = "891")]
N891,
/// New Israeli Sheqel
#[serde(rename = "376")]
N376,
/// New Kwanza
#[serde(rename = "02")]
N02,
/// New Taiwan Dollar
#[serde(rename = "901")]
N901,
/// New Zaire
#[serde(rename = "180")]
N180,
/// New Zealand Dollar
#[serde(rename = "554")]
N554,
/// Next day
#[serde(rename = "997")]
N997,
/// Ngultrum
#[serde(rename = "064")]
N064,
/// North Korean Won
#[serde(rename = "408")]
N408,
/// Norwegian Krone
#[serde(rename = "578")]
N578,
/// Nuevo Sol
#[serde(rename = "604")]
N604,
/// Ouguiya
#[serde(rename = "478")]
N478,
/// Pa'anga
#[serde(rename = "776")]
N776,
/// Pakistan Rupee
#[serde(rename = "586")]
N586,
/// Pataca
#[serde(rename = "446")]
N446,
/// Peso Uruguayo
#[serde(rename = "858")]
N858,
/// Philippine Peso
#[serde(rename = "608")]
N608,
/// Portuguese Escudo
#[serde(rename = "620")]
N620,
/// Pound Sterling
#[serde(rename = "826")]
N826,
/// Pula
#[serde(rename = "072")]
N072,
/// Qatari Rial
#[serde(rename = "634")]
N634,
/// Quetzal
#[serde(rename = "320")]
N320,
/// Rand
#[serde(rename = "710")]
N710,
/// Rial Omani
#[serde(rename = "512")]
N512,
/// Riel
#[serde(rename = "116")]
N116,
/// Rufiyaa
#[serde(rename = "462")]
N462,
/// Rupiah
#[serde(rename = "360")]
N360,
/// Russian Ruble
#[serde(rename = "643")]
N643,
/// Russian Ruble
#[serde(rename = "810")]
N810,
/// Rwanda Franc
#[serde(rename = "646")]
N646,
/// SDR
#[serde(rename = "960")]
N960,
/// Same day
#[serde(rename = "998")]
N998,
/// Saudi Riyal
#[serde(rename = "682")]
N682,
/// Schilling
#[serde(rename = "040")]
N040,
/// Seychelles Rupee
#[serde(rename = "690")]
N690,
/// Singapore Dollar
#[serde(rename = "702")]
N702,
/// Slovak Koruna
#[serde(rename = "703")]
N703,
/// Solomon Islands Dollar
#[serde(rename = "090")]
N090,
/// Som
#[serde(rename = "417")]
N417,
/// Somali Shilling
#[serde(rename = "706")]
N706,
/// Spanish Peseta
#[serde(rename = "724")]
N724,
/// Sri Lanka Rupee
#[serde(rename = "144")]
N144,
/// St Helena Pound
#[serde(rename = "654")]
N654,
/// Sucre
#[serde(rename = "218")]
N218,
/// Sudanese Dinar
#[serde(rename = "736")]
N736,
/// Surinam Guilder
#[serde(rename = "740")]
N740,
/// Swedish Krona
#[serde(rename = "752")]
N752,
/// Swiss Franc
#[serde(rename = "756")]
N756,
/// Syrian Pound
#[serde(rename = "760")]
N760,
/// Tajik Ruble
#[serde(rename = "762")]
N762,
/// Taka
#[serde(rename = "050")]
N050,
/// Tala
#[serde(rename = "882")]
N882,
/// Tanzanian Shilling
#[serde(rename = "834")]
N834,
/// Tenge
#[serde(rename = "398")]
N398,
/// Timor Escudo
#[serde(rename = "626")]
N626,
/// Tolar
#[serde(rename = "705")]
N705,
/// Trinidad and Tobago Dollar
#[serde(rename = "780")]
N780,
/// Tugrik
#[serde(rename = "496")]
N496,
/// Tunisian Dinar
#[serde(rename = "788")]
N788,
/// Turkish Lira
#[serde(rename = "792")]
N792,
/// UAE Dirham
#[serde(rename = "784")]
N784,
/// US Dollar
#[serde(rename = "840")]
N840,
/// Uganda Shilling
#[serde(rename = "800")]
N800,
/// Unidad de Valor Constante (UVC)
#[serde(rename = "983")]
N983,
/// Unidades de fomento
#[serde(rename = "990")]
N990,
/// Uzbekistan Sum
#[serde(rename = "860")]
N860,
/// Vatu
#[serde(rename = "548")]
N548,
/// Won
#[serde(rename = "410")]
N410,
/// Yemeni Rial
#[serde(rename = "886")]
N886,
/// Yen
#[serde(rename = "392")]
N392,
/// Yuan Renminbi
#[serde(rename = "156")]
N156,
/// Zimbabwe Dollar
#[serde(rename = "716")]
N716,
/// Zloty
#[serde(rename = "985")]
N985,
/// financial Rand
#[serde(rename = "991")]
N991,
/// Gold
#[serde(rename = "XAU")]
Xau,
/// European Composite Unit (EURCO)
#[serde(rename = "XBA")]
Xba,
/// European Monetary Unit (E.M.U.-6)
#[serde(rename = "XBB")]
Xbb,
/// European Unit of Account 9 (E.U.A.- 9)
#[serde(rename = "XBC")]
Xbc,
/// European Unit of Account 17 (E.U.A.- 17)
#[serde(rename = "XBD")]
Xbd,
/// Palladium
#[serde(rename = "XPD")]
Xpd,
/// Platinum
#[serde(rename = "XPT")]
Xpt,
/// Silver
#[serde(rename = "XAG")]
Xag,
/// UIC-Franc
#[serde(rename = "XFU")]
Xfu,
/// Gold-Franc
#[serde(rename = "XFO")]
Xfo,
/// Codes specifically reserved for testing purposes
#[serde(rename = "XTS")]
Xts,
/// Codes assigned for transactions where no currency is involved
#[serde(rename = "XXX")]
Xxx,
/// Gold
#[serde(rename = "959")]
N959,
/// European Composite Unit (EURCO)
#[serde(rename = "955")]
N955,
/// European Monetary Unit (E.M.U.-6)
#[serde(rename = "956")]
N956,
/// European Unit of Account 9 (E.U.A.- 9)
#[serde(rename = "957")]
N957,
/// European Unit of Account 17 (E.U.A.- 17)
#[serde(rename = "958")]
N958,
/// Palladium
#[serde(rename = "964")]
N964,
/// Platinum
#[serde(rename = "962")]
N962,
/// Silver
#[serde(rename = "961")]
N961,
/// Codes specifically reserved for testing purposes
#[serde(rename = "963")]
N963,
/// Codes assigned for transactions where no currency is involved
#[serde(rename = "999")]
N999,
}
impl Default for SettlCurrency {
fn default() -> Self {
SettlCurrency::Afa
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum BookingType {
/// Regular booking
#[serde(rename = "0")]
RegularBooking,
/// CFD (Contract for difference)
#[serde(rename = "1")]
Cfd,
/// Total Return Swap
#[serde(rename = "2")]
TotalReturnSwap,
}
impl Default for BookingType {
fn default() -> Self {
BookingType::RegularBooking
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum PositionEffect {
/// Close
#[serde(rename = "C")]
Close,
/// FIFO
#[serde(rename = "F")]
Fifo,
/// Open
#[serde(rename = "O")]
Open,
/// Rolled
#[serde(rename = "R")]
Rolled,
}
impl Default for PositionEffect {
fn default() -> Self {
PositionEffect::Close
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum CoveredOrUncovered {
/// Covered
#[serde(rename = "0")]
Covered,
/// Uncovered
#[serde(rename = "1")]
Uncovered,
}
impl Default for CoveredOrUncovered {
fn default() -> Self {
CoveredOrUncovered::Covered
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum LocateReqd {
/// Indicates the broker is responsible for locating the stock
#[serde(rename = "Y")]
IndicatesTheBrokerIsResponsibleForLocatingTheStock,
/// Indicates the broker is not required to locate
#[serde(rename = "N")]
IndicatesTheBrokerIsNotRequiredToLocate,
}
impl Default for LocateReqd {
fn default() -> Self {
LocateReqd::IndicatesTheBrokerIsResponsibleForLocatingTheStock
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum CancellationRights {
/// Yes
#[serde(rename = "Y")]
Yes,
/// No - execution only
#[serde(rename = "N")]
NoExecutionOnly,
/// No - waiver agreement
#[serde(rename = "M")]
NoWaiverAgreement,
/// No - institutional
#[serde(rename = "O")]
NoInstitutional,
}
impl Default for CancellationRights {
fn default() -> Self {
CancellationRights::Yes
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum MoneyLaunderingStatus {
/// Passed
#[serde(rename = "Y")]
Passed,
/// Not Checked
#[serde(rename = "N")]
NotChecked,
/// Exempt - Below The Limit
#[serde(rename = "1")]
ExemptBelowTheLimit,
/// Exempt - Client Money Type Exemption
#[serde(rename = "2")]
ExemptClientMoneyTypeExemption,
/// Exempt - Authorised Credit or Financial Institution
#[serde(rename = "3")]
ExemptAuthorisedCreditOrFinancialInstitution,
}
impl Default for MoneyLaunderingStatus {
fn default() -> Self {
MoneyLaunderingStatus::Passed
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum CustOrderHandlingInst {
/// Add-on Order
#[serde(rename = "ADD")]
AddOnOrder,
/// All or None
#[serde(rename = "AON")]
AllOrNone,
/// Cash Not Held
#[serde(rename = "CNH")]
CashNotHeld,
/// Directed Order
#[serde(rename = "DIR")]
DirectedOrder,
/// Exchange for Physical Transaction
#[serde(rename = "E.W")]
ExchangeForPhysicalTransaction,
/// Fill or Kill
#[serde(rename = "FOK")]
FillOrKill,
/// Imbalance Only
#[serde(rename = "IO")]
ImbalanceOnly,
/// Immediate or Cancel
#[serde(rename = "IOC")]
ImmediateOrCancel,
/// Limit On Open
#[serde(rename = "LOO")]
LimitOnOpen,
/// Limit on Close
#[serde(rename = "LOC")]
LimitOnClose,
/// Market at Open
#[serde(rename = "MAO")]
MarketAtOpen,
/// Market at Close
#[serde(rename = "MAC")]
MarketAtClose,
/// Market on Open
#[serde(rename = "MOO")]
MarketOnOpen,
/// Market On Close
#[serde(rename = "MOC")]
MarketOnClose,
/// Minimum Quantity
#[serde(rename = "MQT")]
MinimumQuantity,
/// Not Held
#[serde(rename = "NH")]
NotHeld,
/// Over the Day
#[serde(rename = "OVD")]
OverTheDay,
/// Pegged
#[serde(rename = "PEG")]
Pegged,
/// Reserve Size Order
#[serde(rename = "RSV")]
ReserveSizeOrder,
/// Stop Stock Transaction
#[serde(rename = "S.W")]
StopStockTransaction,
/// Scale
#[serde(rename = "SCL")]
Scale,
/// Time Order
#[serde(rename = "TMO")]
TimeOrder,
/// Trailing Stop
#[serde(rename = "TS")]
TrailingStop,
/// Work
#[serde(rename = "WRK")]
Work,
}
impl Default for CustOrderHandlingInst {
fn default() -> Self {
CustOrderHandlingInst::AddOnOrder
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum OrderHandlingInstSource {
/// NASD OATS
#[serde(rename = "1")]
NasdOats,
}
impl Default for OrderHandlingInstSource {
fn default() -> Self {
OrderHandlingInstSource::NasdOats
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum AllocAcctIDSource {
/// BIC
#[serde(rename = "1")]
Bic,
/// SID code
#[serde(rename = "2")]
SidCode,
/// TFM (GSPTA)
#[serde(rename = "3")]
Tfm,
/// OMGEO (AlertID)
#[serde(rename = "4")]
Omgeo,
/// DTCC code
#[serde(rename = "5")]
DtccCode,
/// Other (custom or proprietary)
#[serde(rename = "99")]
Other,
}
impl Default for AllocAcctIDSource {
fn default() -> Self {
AllocAcctIDSource::Bic
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum AllocSettlCurrency {
/// Afghani
#[serde(rename = "AFA")]
Afa,
/// Algerian Dinar
#[serde(rename = "DZD")]
Dzd,
/// Andorran Peseta
#[serde(rename = "ADP")]
Adp,
/// Argentine Peso
#[serde(rename = "ARS")]
Ars,
/// Armenian Dram
#[serde(rename = "AMD")]
Amd,
/// Aruban Guilder
#[serde(rename = "AWG")]
Awg,
/// Australian Dollar
#[serde(rename = "AUD")]
Aud,
/// Azerbaijanian Manat
#[serde(rename = "AZM")]
Azm,
/// Bahamian Dollar
#[serde(rename = "BSD")]
Bsd,
/// Bahraini Dinar
#[serde(rename = "BHD")]
Bhd,
/// Baht
#[serde(rename = "THB")]
Thb,
/// Balboa
#[serde(rename = "PAB")]
Pab,
/// Barbados Dollar
#[serde(rename = "BBD")]
Bbd,
/// Belarussian Ruble
#[serde(rename = "BYB")]
Byb,
/// Belgian Franc
#[serde(rename = "BEF")]
Bef,
/// Belize Dollar
#[serde(rename = "BZD")]
Bzd,
/// Bermudian Dollar
#[serde(rename = "BMD")]
Bmd,
/// Bolivar
#[serde(rename = "VEB")]
Veb,
/// Boliviano
#[serde(rename = "BOB")]
Bob,
/// Brazilian Real
#[serde(rename = "BRL")]
Brl,
/// Brunei Dollar
#[serde(rename = "BND")]
Bnd,
/// Burundi Franc
#[serde(rename = "BIF")]
Bif,
/// CFA Franc BCEAO+
#[serde(rename = "XOF")]
Xof,
/// CFA Franc BEAC#
#[serde(rename = "XAF")]
Xaf,
/// CFP Franc
#[serde(rename = "XPF")]
Xpf,
/// Canadian Dollar
#[serde(rename = "CAD")]
Cad,
/// Cape Verde Escudo
#[serde(rename = "CVE")]
Cve,
/// Cayman Islands Dollar
#[serde(rename = "KYD")]
Kyd,
/// Cedi
#[serde(rename = "GHC")]
Ghc,
/// Chilean Peso
#[serde(rename = "CLP")]
Clp,
/// Colombian Peso
#[serde(rename = "COP")]
Cop,
/// Comoro Franc
#[serde(rename = "KMF")]
Kmf,
/// Convertible Marks
#[serde(rename = "BAM")]
Bam,
/// Cordoba Oro
#[serde(rename = "NIO")]
Nio,
/// Costa Rican Colon
#[serde(rename = "CRC")]
Crc,
/// Cuban Peso
#[serde(rename = "CUP")]
Cup,
/// Cyprus Pound
#[serde(rename = "CYP")]
Cyp,
/// Czech Koruna
#[serde(rename = "CZK")]
Czk,
/// Dalasi
#[serde(rename = "GMD")]
Gmd,
/// Danish Krone
#[serde(rename = "DKK")]
Dkk,
/// Denar
#[serde(rename = "MKD")]
Mkd,
/// Deutsche Mark
#[serde(rename = "DEM")]
Dem,
/// Djibouti Franc
#[serde(rename = "DJF")]
Djf,
/// Dobra
#[serde(rename = "STD")]
Std,
/// Dominican Peso
#[serde(rename = "DOP")]
Dop,
/// Dong
#[serde(rename = "VND")]
Vnd,
/// Drachma
#[serde(rename = "GRD")]
Grd,
/// East Caribbean Dollar
#[serde(rename = "XCD")]
Xcd,
/// Egyptian Pound
#[serde(rename = "EGP")]
Egp,
/// El Salvador Colon
#[serde(rename = "SVC")]
Svc,
/// Ethiopian Birr
#[serde(rename = "ETB")]
Etb,
/// Euro
#[serde(rename = "EUR")]
Eur,
/// Falkland Islands Pound
#[serde(rename = "FKP")]
Fkp,
/// Fiji Dollar
#[serde(rename = "FJD")]
Fjd,
/// Forint
#[serde(rename = "HUF")]
Huf,
/// Franc Congolais
#[serde(rename = "CDF")]
Cdf,
/// French Franc
#[serde(rename = "FRF")]
Frf,
/// Gibraltar Pound
#[serde(rename = "GIP")]
Gip,
/// Gourde
#[serde(rename = "HTG")]
Htg,
/// Guarani
#[serde(rename = "PYG")]
Pyg,
/// Guinea Franc
#[serde(rename = "GNF")]
Gnf,
/// Guinea-Bissau Peso
#[serde(rename = "GWP")]
Gwp,
/// Guyana Dollar
#[serde(rename = "GYD")]
Gyd,
/// Hong Kong Dollar
#[serde(rename = "HKD")]
Hkd,
/// Hryvnia
#[serde(rename = "UAH")]
Uah,
/// Iceland Krona
#[serde(rename = "ISK")]
Isk,
/// Indian Rupee
#[serde(rename = "INR")]
Inr,
/// Iranian Rial
#[serde(rename = "IRR")]
Irr,
/// Iraqi Dinar
#[serde(rename = "IQD")]
Iqd,
/// Irish Pound
#[serde(rename = "IEP")]
Iep,
/// Italian Lira
#[serde(rename = "ITL")]
Itl,
/// Jamaican Dollar
#[serde(rename = "JMD")]
Jmd,
/// Jordanian Dinar
#[serde(rename = "JOD")]
Jod,
/// Kenyan Shilling
#[serde(rename = "KES")]
Kes,
/// Kina
#[serde(rename = "PGK")]
Pgk,
/// Kip
#[serde(rename = "LAK")]
Lak,
/// Kroon
#[serde(rename = "EEK")]
Eek,
/// Kuna
#[serde(rename = "HRK")]
Hrk,
/// Kuwaiti Dinar
#[serde(rename = "KWD")]
Kwd,
/// Kwacha
#[serde(rename = "MWK")]
Mwk,
/// Kwacha
#[serde(rename = "ZMK")]
Zmk,
/// Kwanza Reajustado
#[serde(rename = "AOR")]
Aor,
/// Kyat
#[serde(rename = "MMK")]
Mmk,
/// Lari
#[serde(rename = "GEL")]
Gel,
/// Latvian Lats
#[serde(rename = "LVL")]
Lvl,
/// Lebanese Pound
#[serde(rename = "LBP")]
Lbp,
/// Lek
#[serde(rename = "ALL")]
All,
/// Lempira
#[serde(rename = "HNL")]
Hnl,
/// Leone
#[serde(rename = "SLL")]
Sll,
/// Leu
#[serde(rename = "ROL")]
Rol,
/// Lev
#[serde(rename = "BGL")]
Bgl,
/// Liberian Dollar
#[serde(rename = "LRD")]
Lrd,
/// Libyan Dinar
#[serde(rename = "LYD")]
Lyd,
/// Lilangeni
#[serde(rename = "SZL")]
Szl,
/// Lithuanian Litas
#[serde(rename = "LTL")]
Ltl,
/// Loti
#[serde(rename = "LSL")]
Lsl,
/// Luxembourg Franc
#[serde(rename = "LUF")]
Luf,
/// Malagasy Franc
#[serde(rename = "MGF")]
Mgf,
/// Malaysian Ringgit
#[serde(rename = "MYR")]
Myr,
/// Maltese Lira
#[serde(rename = "MTL")]
Mtl,
/// Manat
#[serde(rename = "TMM")]
Tmm,
/// Markka
#[serde(rename = "FIM")]
Fim,
/// Mauritius Rupee
#[serde(rename = "MUR")]
Mur,
/// Metical
#[serde(rename = "MZM")]
Mzm,
/// Mexican Peso
#[serde(rename = "MXN")]
Mxn,
/// Mexican Unidad de Inversion (UDI)
#[serde(rename = "MXV")]
Mxv,
/// Moldovan Leu
#[serde(rename = "MDL")]
Mdl,
/// Moroccan Dirham
#[serde(rename = "MAD")]
Mad,
/// Mvdol
#[serde(rename = "BOV")]
Bov,
/// Naira
#[serde(rename = "NGN")]
Ngn,
/// Nakfa
#[serde(rename = "ERN")]
Ern,
/// Namibia Dollar
#[serde(rename = "NAD")]
Nad,
/// Nepalese Rupee
#[serde(rename = "NPR")]
Npr,
/// Netherlands Antillian Guilder
#[serde(rename = "ANG")]
Ang,
/// Netherlands Guilder
#[serde(rename = "NLG")]
Nlg,
/// New Dinar
#[serde(rename = "YUM")]
Yum,
/// New Israeli Sheqel
#[serde(rename = "ILS")]
Ils,
/// New Kwanza
#[serde(rename = "AON")]
Aon,
/// New Taiwan Dollar
#[serde(rename = "TWD")]
Twd,
/// New Zaire
#[serde(rename = "ZRN")]
Zrn,
/// New Zealand Dollar
#[serde(rename = "NZD")]
Nzd,
/// Next day
#[serde(rename = "USN")]
Usn,
/// Ngultrum
#[serde(rename = "BTN")]
Btn,
/// North Korean Won
#[serde(rename = "KPW")]
Kpw,
/// Norwegian Krone
#[serde(rename = "NOK")]
Nok,
/// Nuevo Sol
#[serde(rename = "PEN")]
Pen,
/// Ouguiya
#[serde(rename = "MRO")]
Mro,
/// Pa'anga
#[serde(rename = "TOP")]
Top,
/// Pakistan Rupee
#[serde(rename = "PKR")]
Pkr,
/// Pataca
#[serde(rename = "MOP")]
Mop,
/// Peso Uruguayo
#[serde(rename = "UYU")]
Uyu,
/// Philippine Peso
#[serde(rename = "PHP")]
Php,
/// Portuguese Escudo
#[serde(rename = "PTE")]
Pte,
/// Pound Sterling
#[serde(rename = "GBP")]
Gbp,
/// Pula
#[serde(rename = "BWP")]
Bwp,
/// Qatari Rial
#[serde(rename = "QAR")]
Qar,
/// Quetzal
#[serde(rename = "GTQ")]
Gtq,
/// Rand
#[serde(rename = "ZAR")]
Zar,
/// Rial Omani
#[serde(rename = "OMR")]
Omr,
/// Riel
#[serde(rename = "KHR")]
Khr,
/// Rufiyaa
#[serde(rename = "MVR")]
Mvr,
/// Rupiah
#[serde(rename = "IDR")]
Idr,
/// Russian Ruble
#[serde(rename = "RUB")]
Rub,
/// Russian Ruble
#[serde(rename = "RUR")]
Rur,
/// Rwanda Franc
#[serde(rename = "RWF")]
Rwf,
/// SDR
#[serde(rename = "XDR")]
Xdr,
/// Same day
#[serde(rename = "USS")]
Uss,
/// Saudi Riyal
#[serde(rename = "SAR")]
Sar,
/// Schilling
#[serde(rename = "ATS")]
Ats,
/// Seychelles Rupee
#[serde(rename = "SCR")]
Scr,
/// Singapore Dollar
#[serde(rename = "SGD")]
Sgd,
/// Slovak Koruna
#[serde(rename = "SKK")]
Skk,
/// Solomon Islands Dollar
#[serde(rename = "SBD")]
Sbd,
/// Som
#[serde(rename = "KGS")]
Kgs,
/// Somali Shilling
#[serde(rename = "SOS")]
Sos,
/// Spanish Peseta
#[serde(rename = "ESP")]
Esp,
/// Sri Lanka Rupee
#[serde(rename = "LKR")]
Lkr,
/// St Helena Pound
#[serde(rename = "SHP")]
Shp,
/// Sucre
#[serde(rename = "ECS")]
Ecs,
/// Sudanese Dinar
#[serde(rename = "SDD")]
Sdd,
/// Surinam Guilder
#[serde(rename = "SRG")]
Srg,
/// Swedish Krona
#[serde(rename = "SEK")]
Sek,
/// Swiss Franc
#[serde(rename = "CHF")]
Chf,
/// Syrian Pound
#[serde(rename = "SYP")]
Syp,
/// Tajik Ruble
#[serde(rename = "TJR")]
Tjr,
/// Taka
#[serde(rename = "BDT")]
Bdt,
/// Tala
#[serde(rename = "WST")]
Wst,
/// Tanzanian Shilling
#[serde(rename = "TZS")]
Tzs,
/// Tenge
#[serde(rename = "KZT")]
Kzt,
/// Timor Escudo
#[serde(rename = "TPE")]
Tpe,
/// Tolar
#[serde(rename = "SIT")]
Sit,
/// Trinidad and Tobago Dollar
#[serde(rename = "TTD")]
Ttd,
/// Tugrik
#[serde(rename = "MNT")]
Mnt,
/// Tunisian Dinar
#[serde(rename = "TND")]
Tnd,
/// Turkish Lira
#[serde(rename = "TRL")]
Trl,
/// UAE Dirham
#[serde(rename = "AED")]
Aed,
/// US Dollar
#[serde(rename = "USD")]
Usd,
/// Uganda Shilling
#[serde(rename = "UGX")]
Ugx,
/// Unidad de Valor Constante (UVC)
#[serde(rename = "ECV")]
Ecv,
/// Unidades de fomento
#[serde(rename = "CLF")]
Clf,
/// Uzbekistan Sum
#[serde(rename = "UZS")]
Uzs,
/// Vatu
#[serde(rename = "VUV")]
Vuv,
/// Won
#[serde(rename = "KRW")]
Krw,
/// Yemeni Rial
#[serde(rename = "YER")]
Yer,
/// Yen
#[serde(rename = "JPY")]
Jpy,
/// Yuan Renminbi
#[serde(rename = "CNY")]
Cny,
/// Zimbabwe Dollar
#[serde(rename = "ZWD")]
Zwd,
/// Zloty
#[serde(rename = "PLN")]
Pln,
/// financial Rand
#[serde(rename = "ZAL")]
Zal,
/// Afghani
#[serde(rename = "004")]
N004,
/// Algerian Dinar
#[serde(rename = "01")]
N01,
/// Andorran Peseta
#[serde(rename = "020")]
N020,
/// Argentine Peso
#[serde(rename = "032")]
N032,
/// Armenian Dram
#[serde(rename = "051")]
N051,
/// Aruban Guilder
#[serde(rename = "533")]
N533,
/// Australian Dollar
#[serde(rename = "036")]
N036,
/// Azerbaijanian Manat
#[serde(rename = "031")]
N031,
/// Bahamian Dollar
#[serde(rename = "044")]
N044,
/// Bahraini Dinar
#[serde(rename = "048")]
N048,
/// Baht
#[serde(rename = "764")]
N764,
/// Balboa
#[serde(rename = "590")]
N590,
/// Barbados Dollar
#[serde(rename = "052")]
N052,
/// Belarussian Ruble
#[serde(rename = "112")]
N112,
/// Belgian Franc
#[serde(rename = "056")]
N056,
/// Belize Dollar
#[serde(rename = "084")]
N084,
/// Bermudian Dollar
#[serde(rename = "060")]
N060,
/// Bolivar
#[serde(rename = "862")]
N862,
/// Boliviano
#[serde(rename = "068")]
N068,
/// Brazilian Real
#[serde(rename = "986")]
N986,
/// Brunei Dollar
#[serde(rename = "096")]
N096,
/// Burundi Franc
#[serde(rename = "108")]
N108,
/// CFA Franc BCEAO+
#[serde(rename = "952")]
N952,
/// CFA Franc BEAC#
#[serde(rename = "950")]
N950,
/// CFP Franc
#[serde(rename = "953")]
N953,
/// Canadian Dollar
#[serde(rename = "124")]
N124,
/// Cape Verde Escudo
#[serde(rename = "132")]
N132,
/// Cayman Islands Dollar
#[serde(rename = "136")]
N136,
/// Cedi
#[serde(rename = "288")]
N288,
/// Chilean Peso
#[serde(rename = "152")]
N152,
/// Colombian Peso
#[serde(rename = "170")]
N170,
/// Comoro Franc
#[serde(rename = "174")]
N174,
/// Convertible Marks
#[serde(rename = "977")]
N977,
/// Cordoba Oro
#[serde(rename = "558")]
N558,
/// Costa Rican Colon
#[serde(rename = "188")]
N188,
/// Cuban Peso
#[serde(rename = "192")]
N192,
/// Cyprus Pound
#[serde(rename = "196")]
N196,
/// Czech Koruna
#[serde(rename = "203")]
N203,
/// Dalasi
#[serde(rename = "270")]
N270,
/// Danish Krone
#[serde(rename = "208")]
N208,
/// Denar
#[serde(rename = "807")]
N807,
/// Deutsche Mark
#[serde(rename = "280")]
N280,
/// Djibouti Franc
#[serde(rename = "262")]
N262,
/// Dobra
#[serde(rename = "678")]
N678,
/// Dominican Peso
#[serde(rename = "214")]
N214,
/// Dong
#[serde(rename = "704")]
N704,
/// Drachma
#[serde(rename = "300")]
N300,
/// East Caribbean Dollar
#[serde(rename = "951")]
N951,
/// Egyptian Pound
#[serde(rename = "818")]
N818,
/// El Salvador Colon
#[serde(rename = "222")]
N222,
/// Ethiopian Birr
#[serde(rename = "230")]
N230,
/// Euro
#[serde(rename = "978")]
N978,
/// Falkland Islands Pound
#[serde(rename = "238")]
N238,
/// Fiji Dollar
#[serde(rename = "242")]
N242,
/// Forint
#[serde(rename = "348")]
N348,
/// Franc Congolais
#[serde(rename = "976")]
N976,
/// French Franc
#[serde(rename = "250")]
N250,
/// Gibraltar Pound
#[serde(rename = "292")]
N292,
/// Gourde
#[serde(rename = "332")]
N332,
/// Guarani
#[serde(rename = "600")]
N600,
/// Guinea Franc
#[serde(rename = "324")]
N324,
/// Guinea-Bissau Peso
#[serde(rename = "624")]
N624,
/// Guyana Dollar
#[serde(rename = "328")]
N328,
/// Hong Kong Dollar
#[serde(rename = "344")]
N344,
/// Hryvnia
#[serde(rename = "980")]
N980,
/// Iceland Krona
#[serde(rename = "352")]
N352,
/// Indian Rupee
#[serde(rename = "356")]
N356,
/// Iranian Rial
#[serde(rename = "364")]
N364,
/// Iraqi Dinar
#[serde(rename = "368")]
N368,
/// Irish Pound
#[serde(rename = "372")]
N372,
/// Italian Lira
#[serde(rename = "380")]
N380,
/// Jamaican Dollar
#[serde(rename = "388")]
N388,
/// Jordanian Dinar
#[serde(rename = "400")]
N400,
/// Kenyan Shilling
#[serde(rename = "404")]
N404,
/// Kina
#[serde(rename = "598")]
N598,
/// Kip
#[serde(rename = "418")]
N418,
/// Kroon
#[serde(rename = "233")]
N233,
/// Kuna
#[serde(rename = "191")]
N191,
/// Kuwaiti Dinar
#[serde(rename = "414")]
N414,
/// Kwacha
#[serde(rename = "454")]
N454,
/// Kwacha
#[serde(rename = "894")]
N894,
/// Kwanza Reajustado
#[serde(rename = "982")]
N982,
/// Kyat
#[serde(rename = "104")]
N104,
/// Lari
#[serde(rename = "981")]
N981,
/// Latvian Lats
#[serde(rename = "428")]
N428,
/// Lebanese Pound
#[serde(rename = "422")]
N422,
/// Lek
#[serde(rename = "008")]
N008,
/// Lempira
#[serde(rename = "340")]
N340,
/// Leone
#[serde(rename = "694")]
N694,
/// Leu
#[serde(rename = "642")]
N642,
/// Lev
#[serde(rename = "100")]
N100,
/// Liberian Dollar
#[serde(rename = "430")]
N430,
/// Libyan Dinar
#[serde(rename = "434")]
N434,
/// Lilangeni
#[serde(rename = "748")]
N748,
/// Lithuanian Litas
#[serde(rename = "440")]
N440,
/// Loti
#[serde(rename = "426")]
N426,
/// Luxembourg Franc
#[serde(rename = "442")]
N442,
/// Malagasy Franc
#[serde(rename = "450")]
N450,
/// Malaysian Ringgit
#[serde(rename = "458")]
N458,
/// Maltese Lira
#[serde(rename = "470")]
N470,
/// Manat
#[serde(rename = "795")]
N795,
/// Markka
#[serde(rename = "246")]
N246,
/// Mauritius Rupee
#[serde(rename = "480")]
N480,
/// Metical
#[serde(rename = "508")]
N508,
/// Mexican Peso
#[serde(rename = "484")]
N484,
/// Mexican Unidad de Inversion (UDI)
#[serde(rename = "979")]
N979,
/// Moldovan Leu
#[serde(rename = "498")]
N498,
/// Moroccan Dirham
#[serde(rename = "504")]
N504,
/// Mvdol
#[serde(rename = "984")]
N984,
/// Naira
#[serde(rename = "566")]
N566,
/// Nakfa
#[serde(rename = "232")]
N232,
/// Namibia Dollar
#[serde(rename = "516")]
N516,
/// Nepalese Rupee
#[serde(rename = "524")]
N524,
/// Netherlands Antillian Guilder
#[serde(rename = "532")]
N532,
/// Netherlands Guilder
#[serde(rename = "528")]
N528,
/// New Dinar
#[serde(rename = "891")]
N891,
/// New Israeli Sheqel
#[serde(rename = "376")]
N376,
/// New Kwanza
#[serde(rename = "02")]
N02,
/// New Taiwan Dollar
#[serde(rename = "901")]
N901,
/// New Zaire
#[serde(rename = "180")]
N180,
/// New Zealand Dollar
#[serde(rename = "554")]
N554,
/// Next day
#[serde(rename = "997")]
N997,
/// Ngultrum
#[serde(rename = "064")]
N064,
/// North Korean Won
#[serde(rename = "408")]
N408,
/// Norwegian Krone
#[serde(rename = "578")]
N578,
/// Nuevo Sol
#[serde(rename = "604")]
N604,
/// Ouguiya
#[serde(rename = "478")]
N478,
/// Pa'anga
#[serde(rename = "776")]
N776,
/// Pakistan Rupee
#[serde(rename = "586")]
N586,
/// Pataca
#[serde(rename = "446")]
N446,
/// Peso Uruguayo
#[serde(rename = "858")]
N858,
/// Philippine Peso
#[serde(rename = "608")]
N608,
/// Portuguese Escudo
#[serde(rename = "620")]
N620,
/// Pound Sterling
#[serde(rename = "826")]
N826,
/// Pula
#[serde(rename = "072")]
N072,
/// Qatari Rial
#[serde(rename = "634")]
N634,
/// Quetzal
#[serde(rename = "320")]
N320,
/// Rand
#[serde(rename = "710")]
N710,
/// Rial Omani
#[serde(rename = "512")]
N512,
/// Riel
#[serde(rename = "116")]
N116,
/// Rufiyaa
#[serde(rename = "462")]
N462,
/// Rupiah
#[serde(rename = "360")]
N360,
/// Russian Ruble
#[serde(rename = "643")]
N643,
/// Russian Ruble
#[serde(rename = "810")]
N810,
/// Rwanda Franc
#[serde(rename = "646")]
N646,
/// SDR
#[serde(rename = "960")]
N960,
/// Same day
#[serde(rename = "998")]
N998,
/// Saudi Riyal
#[serde(rename = "682")]
N682,
/// Schilling
#[serde(rename = "040")]
N040,
/// Seychelles Rupee
#[serde(rename = "690")]
N690,
/// Singapore Dollar
#[serde(rename = "702")]
N702,
/// Slovak Koruna
#[serde(rename = "703")]
N703,
/// Solomon Islands Dollar
#[serde(rename = "090")]
N090,
/// Som
#[serde(rename = "417")]
N417,
/// Somali Shilling
#[serde(rename = "706")]
N706,
/// Spanish Peseta
#[serde(rename = "724")]
N724,
/// Sri Lanka Rupee
#[serde(rename = "144")]
N144,
/// St Helena Pound
#[serde(rename = "654")]
N654,
/// Sucre
#[serde(rename = "218")]
N218,
/// Sudanese Dinar
#[serde(rename = "736")]
N736,
/// Surinam Guilder
#[serde(rename = "740")]
N740,
/// Swedish Krona
#[serde(rename = "752")]
N752,
/// Swiss Franc
#[serde(rename = "756")]
N756,
/// Syrian Pound
#[serde(rename = "760")]
N760,
/// Tajik Ruble
#[serde(rename = "762")]
N762,
/// Taka
#[serde(rename = "050")]
N050,
/// Tala
#[serde(rename = "882")]
N882,
/// Tanzanian Shilling
#[serde(rename = "834")]
N834,
/// Tenge
#[serde(rename = "398")]
N398,
/// Timor Escudo
#[serde(rename = "626")]
N626,
/// Tolar
#[serde(rename = "705")]
N705,
/// Trinidad and Tobago Dollar
#[serde(rename = "780")]
N780,
/// Tugrik
#[serde(rename = "496")]
N496,
/// Tunisian Dinar
#[serde(rename = "788")]
N788,
/// Turkish Lira
#[serde(rename = "792")]
N792,
/// UAE Dirham
#[serde(rename = "784")]
N784,
/// US Dollar
#[serde(rename = "840")]
N840,
/// Uganda Shilling
#[serde(rename = "800")]
N800,
/// Unidad de Valor Constante (UVC)
#[serde(rename = "983")]
N983,
/// Unidades de fomento
#[serde(rename = "990")]
N990,
/// Uzbekistan Sum
#[serde(rename = "860")]
N860,
/// Vatu
#[serde(rename = "548")]
N548,
/// Won
#[serde(rename = "410")]
N410,
/// Yemeni Rial
#[serde(rename = "886")]
N886,
/// Yen
#[serde(rename = "392")]
N392,
/// Yuan Renminbi
#[serde(rename = "156")]
N156,
/// Zimbabwe Dollar
#[serde(rename = "716")]
N716,
/// Zloty
#[serde(rename = "985")]
N985,
/// financial Rand
#[serde(rename = "991")]
N991,
/// Gold
#[serde(rename = "XAU")]
Xau,
/// European Composite Unit (EURCO)
#[serde(rename = "XBA")]
Xba,
/// European Monetary Unit (E.M.U.-6)
#[serde(rename = "XBB")]
Xbb,
/// European Unit of Account 9 (E.U.A.- 9)
#[serde(rename = "XBC")]
Xbc,
/// European Unit of Account 17 (E.U.A.- 17)
#[serde(rename = "XBD")]
Xbd,
/// Palladium
#[serde(rename = "XPD")]
Xpd,
/// Platinum
#[serde(rename = "XPT")]
Xpt,
/// Silver
#[serde(rename = "XAG")]
Xag,
/// UIC-Franc
#[serde(rename = "XFU")]
Xfu,
/// Gold-Franc
#[serde(rename = "XFO")]
Xfo,
/// Codes specifically reserved for testing purposes
#[serde(rename = "XTS")]
Xts,
/// Codes assigned for transactions where no currency is involved
#[serde(rename = "XXX")]
Xxx,
/// Gold
#[serde(rename = "959")]
N959,
/// European Composite Unit (EURCO)
#[serde(rename = "955")]
N955,
/// European Monetary Unit (E.M.U.-6)
#[serde(rename = "956")]
N956,
/// European Unit of Account 9 (E.U.A.- 9)
#[serde(rename = "957")]
N957,
/// European Unit of Account 17 (E.U.A.- 17)
#[serde(rename = "958")]
N958,
/// Palladium
#[serde(rename = "964")]
N964,
/// Platinum
#[serde(rename = "962")]
N962,
/// Silver
#[serde(rename = "961")]
N961,
/// Codes specifically reserved for testing purposes
#[serde(rename = "963")]
N963,
/// Codes assigned for transactions where no currency is involved
#[serde(rename = "999")]
N999,
}
impl Default for AllocSettlCurrency {
fn default() -> Self {
AllocSettlCurrency::Afa
}
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
pub enum StrategyParameterType {
/// Int
#[serde(rename = "1")]
Int,
/// Length
#[serde(rename = "2")]
Length,
/// NumInGroup
#[serde(rename = "3")]
NumInGroup,
/// SeqNum
#[serde(rename = "4")]
SeqNum,
/// TagNum
#[serde(rename = "5")]
TagNum,
/// Float
#[serde(rename = "6")]
Float,
/// Qty
#[serde(rename = "7")]
Qty,
/// Price
#[serde(rename = "8")]
Price,
/// PriceOffset
#[serde(rename = "9")]
PriceOffset,
/// Amt
#[serde(rename = "10")]
Amt,
/// Percentage
#[serde(rename = "11")]
Percentage,
/// Char
#[serde(rename = "12")]
Char,
/// Boolean
#[serde(rename = "13")]
Boolean,
/// String
#[serde(rename = "14")]
String,
/// MultipleCharValue
#[serde(rename = "15")]
MultipleCharValue,
/// Currency
#[serde(rename = "16")]
Currency,
/// Exchange
#[serde(rename = "17")]
Exchange,
/// Month-Year
#[serde(rename = "18")]
MonthYear,
/// UTCTimeStamp
#[serde(rename = "19")]
UtcTimeStamp,
/// UTCTimeOnly
#[serde(rename = "20")]
UtcTimeOnly,
/// LocalMktDate
#[serde(rename = "21")]
LocalMktDate,
/// UTCDateOnly
#[serde(rename = "22")]
UtcDateOnly,
/// Data
#[serde(rename = "23")]
Data,
/// MultipleStringValue
#[serde(rename = "24")]
MultipleStringValue,
}
impl Default for StrategyParameterType {
fn default() -> Self {
StrategyParameterType::Int
}
}
| 21.07272 | 298 | 0.627851 |
795688ee105b74a3db33edf10a051d93c06e06a3
| 52,352 |
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
use std::fmt::Write;
/// See [`DescribeAcceleratorOfferingsInput`](crate::input::DescribeAcceleratorOfferingsInput)
pub mod describe_accelerator_offerings_input {
/// A builder for [`DescribeAcceleratorOfferingsInput`](crate::input::DescribeAcceleratorOfferingsInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) location_type: std::option::Option<crate::model::LocationType>,
pub(crate) accelerator_types: std::option::Option<std::vec::Vec<std::string::String>>,
}
impl Builder {
/// <p> The location type that you want to describe accelerator type offerings for. It can assume the following values: region: will return the accelerator type offering at the regional level. availability-zone: will return the accelerator type offering at the availability zone level. availability-zone-id: will return the accelerator type offering at the availability zone level returning the availability zone id. </p>
pub fn location_type(mut self, input: crate::model::LocationType) -> Self {
self.location_type = Some(input);
self
}
/// <p> The location type that you want to describe accelerator type offerings for. It can assume the following values: region: will return the accelerator type offering at the regional level. availability-zone: will return the accelerator type offering at the availability zone level. availability-zone-id: will return the accelerator type offering at the availability zone level returning the availability zone id. </p>
pub fn set_location_type(
mut self,
input: std::option::Option<crate::model::LocationType>,
) -> Self {
self.location_type = input;
self
}
/// Appends an item to `accelerator_types`.
///
/// To override the contents of this collection use [`set_accelerator_types`](Self::set_accelerator_types).
///
/// <p> The list of accelerator types to describe. </p>
pub fn accelerator_types(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.accelerator_types.unwrap_or_default();
v.push(input.into());
self.accelerator_types = Some(v);
self
}
/// <p> The list of accelerator types to describe. </p>
pub fn set_accelerator_types(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.accelerator_types = input;
self
}
/// Consumes the builder and constructs a [`DescribeAcceleratorOfferingsInput`](crate::input::DescribeAcceleratorOfferingsInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::DescribeAcceleratorOfferingsInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::DescribeAcceleratorOfferingsInput {
location_type: self.location_type,
accelerator_types: self.accelerator_types,
})
}
}
}
#[doc(hidden)]
pub type DescribeAcceleratorOfferingsInputOperationOutputAlias =
crate::operation::DescribeAcceleratorOfferings;
#[doc(hidden)]
pub type DescribeAcceleratorOfferingsInputOperationRetryAlias =
aws_http::retry::AwsErrorRetryPolicy;
impl DescribeAcceleratorOfferingsInput {
/// Consumes the builder and constructs an Operation<[`DescribeAcceleratorOfferings`](crate::operation::DescribeAcceleratorOfferings)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::DescribeAcceleratorOfferings,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::DescribeAcceleratorOfferingsInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/describe-accelerator-offerings")
.expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::DescribeAcceleratorOfferingsInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/json",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_describe_accelerator_offerings(&self)?
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
request
.properties_mut()
.insert(aws_smithy_http::http_versions::DEFAULT_HTTP_VERSION_LIST.clone());
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::DescribeAcceleratorOfferings::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"DescribeAcceleratorOfferings",
"elasticinference",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`DescribeAcceleratorOfferingsInput`](crate::input::DescribeAcceleratorOfferingsInput)
pub fn builder() -> crate::input::describe_accelerator_offerings_input::Builder {
crate::input::describe_accelerator_offerings_input::Builder::default()
}
}
/// See [`DescribeAcceleratorsInput`](crate::input::DescribeAcceleratorsInput)
pub mod describe_accelerators_input {
/// A builder for [`DescribeAcceleratorsInput`](crate::input::DescribeAcceleratorsInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) accelerator_ids: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) filters: std::option::Option<std::vec::Vec<crate::model::Filter>>,
pub(crate) max_results: std::option::Option<i32>,
pub(crate) next_token: std::option::Option<std::string::String>,
}
impl Builder {
/// Appends an item to `accelerator_ids`.
///
/// To override the contents of this collection use [`set_accelerator_ids`](Self::set_accelerator_ids).
///
/// <p> The IDs of the accelerators to describe. </p>
pub fn accelerator_ids(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.accelerator_ids.unwrap_or_default();
v.push(input.into());
self.accelerator_ids = Some(v);
self
}
/// <p> The IDs of the accelerators to describe. </p>
pub fn set_accelerator_ids(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.accelerator_ids = input;
self
}
/// Appends an item to `filters`.
///
/// To override the contents of this collection use [`set_filters`](Self::set_filters).
///
/// <p> One or more filters. Filter names and values are case-sensitive. Valid filter names are: accelerator-types: can provide a list of accelerator type names to filter for. instance-id: can provide a list of EC2 instance ids to filter for. </p>
pub fn filters(mut self, input: crate::model::Filter) -> Self {
let mut v = self.filters.unwrap_or_default();
v.push(input);
self.filters = Some(v);
self
}
/// <p> One or more filters. Filter names and values are case-sensitive. Valid filter names are: accelerator-types: can provide a list of accelerator type names to filter for. instance-id: can provide a list of EC2 instance ids to filter for. </p>
pub fn set_filters(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::Filter>>,
) -> Self {
self.filters = input;
self
}
/// <p> The total number of items to return in the command's output. If the total number of items available is more than the value specified, a NextToken is provided in the command's output. To resume pagination, provide the NextToken value in the starting-token argument of a subsequent command. Do not use the NextToken response element directly outside of the AWS CLI. </p>
pub fn max_results(mut self, input: i32) -> Self {
self.max_results = Some(input);
self
}
/// <p> The total number of items to return in the command's output. If the total number of items available is more than the value specified, a NextToken is provided in the command's output. To resume pagination, provide the NextToken value in the starting-token argument of a subsequent command. Do not use the NextToken response element directly outside of the AWS CLI. </p>
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.max_results = input;
self
}
/// <p> A token to specify where to start paginating. This is the NextToken from a previously truncated response. </p>
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.next_token = Some(input.into());
self
}
/// <p> A token to specify where to start paginating. This is the NextToken from a previously truncated response. </p>
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.next_token = input;
self
}
/// Consumes the builder and constructs a [`DescribeAcceleratorsInput`](crate::input::DescribeAcceleratorsInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::DescribeAcceleratorsInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::DescribeAcceleratorsInput {
accelerator_ids: self.accelerator_ids,
filters: self.filters,
max_results: self.max_results.unwrap_or_default(),
next_token: self.next_token,
})
}
}
}
#[doc(hidden)]
pub type DescribeAcceleratorsInputOperationOutputAlias = crate::operation::DescribeAccelerators;
#[doc(hidden)]
pub type DescribeAcceleratorsInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl DescribeAcceleratorsInput {
/// Consumes the builder and constructs an Operation<[`DescribeAccelerators`](crate::operation::DescribeAccelerators)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::DescribeAccelerators,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::DescribeAcceleratorsInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/describe-accelerators").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::DescribeAcceleratorsInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/json",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_describe_accelerators(&self)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
request
.properties_mut()
.insert(aws_smithy_http::http_versions::DEFAULT_HTTP_VERSION_LIST.clone());
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::DescribeAccelerators::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"DescribeAccelerators",
"elasticinference",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`DescribeAcceleratorsInput`](crate::input::DescribeAcceleratorsInput)
pub fn builder() -> crate::input::describe_accelerators_input::Builder {
crate::input::describe_accelerators_input::Builder::default()
}
}
/// See [`DescribeAcceleratorTypesInput`](crate::input::DescribeAcceleratorTypesInput)
pub mod describe_accelerator_types_input {
/// A builder for [`DescribeAcceleratorTypesInput`](crate::input::DescribeAcceleratorTypesInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {}
impl Builder {
/// Consumes the builder and constructs a [`DescribeAcceleratorTypesInput`](crate::input::DescribeAcceleratorTypesInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::DescribeAcceleratorTypesInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::DescribeAcceleratorTypesInput {})
}
}
}
#[doc(hidden)]
pub type DescribeAcceleratorTypesInputOperationOutputAlias =
crate::operation::DescribeAcceleratorTypes;
#[doc(hidden)]
pub type DescribeAcceleratorTypesInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl DescribeAcceleratorTypesInput {
/// Consumes the builder and constructs an Operation<[`DescribeAcceleratorTypes`](crate::operation::DescribeAcceleratorTypes)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::DescribeAcceleratorTypes,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::DescribeAcceleratorTypesInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/describe-accelerator-types").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::DescribeAcceleratorTypesInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("GET").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from("");
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
request
.properties_mut()
.insert(aws_smithy_http::http_versions::DEFAULT_HTTP_VERSION_LIST.clone());
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::DescribeAcceleratorTypes::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"DescribeAcceleratorTypes",
"elasticinference",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`DescribeAcceleratorTypesInput`](crate::input::DescribeAcceleratorTypesInput)
pub fn builder() -> crate::input::describe_accelerator_types_input::Builder {
crate::input::describe_accelerator_types_input::Builder::default()
}
}
/// See [`ListTagsForResourceInput`](crate::input::ListTagsForResourceInput)
pub mod list_tags_for_resource_input {
/// A builder for [`ListTagsForResourceInput`](crate::input::ListTagsForResourceInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) resource_arn: std::option::Option<std::string::String>,
}
impl Builder {
/// <p> The ARN of the Elastic Inference Accelerator to list the tags for. </p>
pub fn resource_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.resource_arn = Some(input.into());
self
}
/// <p> The ARN of the Elastic Inference Accelerator to list the tags for. </p>
pub fn set_resource_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.resource_arn = input;
self
}
/// Consumes the builder and constructs a [`ListTagsForResourceInput`](crate::input::ListTagsForResourceInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::ListTagsForResourceInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::ListTagsForResourceInput {
resource_arn: self.resource_arn,
})
}
}
}
#[doc(hidden)]
pub type ListTagsForResourceInputOperationOutputAlias = crate::operation::ListTagsForResource;
#[doc(hidden)]
pub type ListTagsForResourceInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl ListTagsForResourceInput {
/// Consumes the builder and constructs an Operation<[`ListTagsForResource`](crate::operation::ListTagsForResource)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::ListTagsForResource,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::ListTagsForResourceInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
let input_1 = &_input.resource_arn;
let input_1 = input_1.as_ref().ok_or(
aws_smithy_http::operation::BuildError::MissingField {
field: "resource_arn",
details: "cannot be empty or unset",
},
)?;
let resource_arn = aws_smithy_http::label::fmt_string(input_1, false);
if resource_arn.is_empty() {
return Err(aws_smithy_http::operation::BuildError::MissingField {
field: "resource_arn",
details: "cannot be empty or unset",
});
}
write!(output, "/tags/{resourceArn}", resourceArn = resource_arn)
.expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::ListTagsForResourceInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("GET").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from("");
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
request
.properties_mut()
.insert(aws_smithy_http::http_versions::DEFAULT_HTTP_VERSION_LIST.clone());
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::ListTagsForResource::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"ListTagsForResource",
"elasticinference",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`ListTagsForResourceInput`](crate::input::ListTagsForResourceInput)
pub fn builder() -> crate::input::list_tags_for_resource_input::Builder {
crate::input::list_tags_for_resource_input::Builder::default()
}
}
/// See [`TagResourceInput`](crate::input::TagResourceInput)
pub mod tag_resource_input {
/// A builder for [`TagResourceInput`](crate::input::TagResourceInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) resource_arn: std::option::Option<std::string::String>,
pub(crate) tags: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
}
impl Builder {
/// <p> The ARN of the Elastic Inference Accelerator to tag. </p>
pub fn resource_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.resource_arn = Some(input.into());
self
}
/// <p> The ARN of the Elastic Inference Accelerator to tag. </p>
pub fn set_resource_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.resource_arn = input;
self
}
/// Adds a key-value pair to `tags`.
///
/// To override the contents of this collection use [`set_tags`](Self::set_tags).
///
/// <p> The tags to add to the Elastic Inference Accelerator. </p>
pub fn tags(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.tags.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.tags = Some(hash_map);
self
}
/// <p> The tags to add to the Elastic Inference Accelerator. </p>
pub fn set_tags(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.tags = input;
self
}
/// Consumes the builder and constructs a [`TagResourceInput`](crate::input::TagResourceInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::TagResourceInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::TagResourceInput {
resource_arn: self.resource_arn,
tags: self.tags,
})
}
}
}
#[doc(hidden)]
pub type TagResourceInputOperationOutputAlias = crate::operation::TagResource;
#[doc(hidden)]
pub type TagResourceInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl TagResourceInput {
/// Consumes the builder and constructs an Operation<[`TagResource`](crate::operation::TagResource)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::TagResource,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::TagResourceInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
let input_2 = &_input.resource_arn;
let input_2 = input_2.as_ref().ok_or(
aws_smithy_http::operation::BuildError::MissingField {
field: "resource_arn",
details: "cannot be empty or unset",
},
)?;
let resource_arn = aws_smithy_http::label::fmt_string(input_2, false);
if resource_arn.is_empty() {
return Err(aws_smithy_http::operation::BuildError::MissingField {
field: "resource_arn",
details: "cannot be empty or unset",
});
}
write!(output, "/tags/{resourceArn}", resourceArn = resource_arn)
.expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::TagResourceInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/json",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_tag_resource(&self)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
request
.properties_mut()
.insert(aws_smithy_http::http_versions::DEFAULT_HTTP_VERSION_LIST.clone());
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::TagResource::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"TagResource",
"elasticinference",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`TagResourceInput`](crate::input::TagResourceInput)
pub fn builder() -> crate::input::tag_resource_input::Builder {
crate::input::tag_resource_input::Builder::default()
}
}
/// See [`UntagResourceInput`](crate::input::UntagResourceInput)
pub mod untag_resource_input {
/// A builder for [`UntagResourceInput`](crate::input::UntagResourceInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) resource_arn: std::option::Option<std::string::String>,
pub(crate) tag_keys: std::option::Option<std::vec::Vec<std::string::String>>,
}
impl Builder {
/// <p> The ARN of the Elastic Inference Accelerator to untag. </p>
pub fn resource_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.resource_arn = Some(input.into());
self
}
/// <p> The ARN of the Elastic Inference Accelerator to untag. </p>
pub fn set_resource_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.resource_arn = input;
self
}
/// Appends an item to `tag_keys`.
///
/// To override the contents of this collection use [`set_tag_keys`](Self::set_tag_keys).
///
/// <p> The list of tags to remove from the Elastic Inference Accelerator. </p>
pub fn tag_keys(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.tag_keys.unwrap_or_default();
v.push(input.into());
self.tag_keys = Some(v);
self
}
/// <p> The list of tags to remove from the Elastic Inference Accelerator. </p>
pub fn set_tag_keys(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.tag_keys = input;
self
}
/// Consumes the builder and constructs a [`UntagResourceInput`](crate::input::UntagResourceInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::UntagResourceInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::UntagResourceInput {
resource_arn: self.resource_arn,
tag_keys: self.tag_keys,
})
}
}
}
#[doc(hidden)]
pub type UntagResourceInputOperationOutputAlias = crate::operation::UntagResource;
#[doc(hidden)]
pub type UntagResourceInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl UntagResourceInput {
/// Consumes the builder and constructs an Operation<[`UntagResource`](crate::operation::UntagResource)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::UntagResource,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::UntagResourceInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
let input_3 = &_input.resource_arn;
let input_3 = input_3.as_ref().ok_or(
aws_smithy_http::operation::BuildError::MissingField {
field: "resource_arn",
details: "cannot be empty or unset",
},
)?;
let resource_arn = aws_smithy_http::label::fmt_string(input_3, false);
if resource_arn.is_empty() {
return Err(aws_smithy_http::operation::BuildError::MissingField {
field: "resource_arn",
details: "cannot be empty or unset",
});
}
write!(output, "/tags/{resourceArn}", resourceArn = resource_arn)
.expect("formatting should succeed");
Ok(())
}
fn uri_query(
_input: &crate::input::UntagResourceInput,
mut output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
let mut query = aws_smithy_http::query::Writer::new(&mut output);
if let Some(inner_4) = &_input.tag_keys {
for inner_5 in inner_4 {
query.push_kv("tagKeys", &aws_smithy_http::query::fmt_string(&inner_5));
}
}
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::UntagResourceInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
uri_query(input, &mut uri)?;
Ok(builder.method("DELETE").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from("");
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
request
.properties_mut()
.insert(aws_smithy_http::http_versions::DEFAULT_HTTP_VERSION_LIST.clone());
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::UntagResource::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"UntagResource",
"elasticinference",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`UntagResourceInput`](crate::input::UntagResourceInput)
pub fn builder() -> crate::input::untag_resource_input::Builder {
crate::input::untag_resource_input::Builder::default()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct UntagResourceInput {
/// <p> The ARN of the Elastic Inference Accelerator to untag. </p>
pub resource_arn: std::option::Option<std::string::String>,
/// <p> The list of tags to remove from the Elastic Inference Accelerator. </p>
pub tag_keys: std::option::Option<std::vec::Vec<std::string::String>>,
}
impl UntagResourceInput {
/// <p> The ARN of the Elastic Inference Accelerator to untag. </p>
pub fn resource_arn(&self) -> std::option::Option<&str> {
self.resource_arn.as_deref()
}
/// <p> The list of tags to remove from the Elastic Inference Accelerator. </p>
pub fn tag_keys(&self) -> std::option::Option<&[std::string::String]> {
self.tag_keys.as_deref()
}
}
impl std::fmt::Debug for UntagResourceInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("UntagResourceInput");
formatter.field("resource_arn", &self.resource_arn);
formatter.field("tag_keys", &self.tag_keys);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct TagResourceInput {
/// <p> The ARN of the Elastic Inference Accelerator to tag. </p>
pub resource_arn: std::option::Option<std::string::String>,
/// <p> The tags to add to the Elastic Inference Accelerator. </p>
pub tags:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
}
impl TagResourceInput {
/// <p> The ARN of the Elastic Inference Accelerator to tag. </p>
pub fn resource_arn(&self) -> std::option::Option<&str> {
self.resource_arn.as_deref()
}
/// <p> The tags to add to the Elastic Inference Accelerator. </p>
pub fn tags(
&self,
) -> std::option::Option<&std::collections::HashMap<std::string::String, std::string::String>>
{
self.tags.as_ref()
}
}
impl std::fmt::Debug for TagResourceInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("TagResourceInput");
formatter.field("resource_arn", &self.resource_arn);
formatter.field("tags", &self.tags);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ListTagsForResourceInput {
/// <p> The ARN of the Elastic Inference Accelerator to list the tags for. </p>
pub resource_arn: std::option::Option<std::string::String>,
}
impl ListTagsForResourceInput {
/// <p> The ARN of the Elastic Inference Accelerator to list the tags for. </p>
pub fn resource_arn(&self) -> std::option::Option<&str> {
self.resource_arn.as_deref()
}
}
impl std::fmt::Debug for ListTagsForResourceInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ListTagsForResourceInput");
formatter.field("resource_arn", &self.resource_arn);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DescribeAcceleratorTypesInput {}
impl std::fmt::Debug for DescribeAcceleratorTypesInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DescribeAcceleratorTypesInput");
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DescribeAcceleratorsInput {
/// <p> The IDs of the accelerators to describe. </p>
pub accelerator_ids: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p> One or more filters. Filter names and values are case-sensitive. Valid filter names are: accelerator-types: can provide a list of accelerator type names to filter for. instance-id: can provide a list of EC2 instance ids to filter for. </p>
pub filters: std::option::Option<std::vec::Vec<crate::model::Filter>>,
/// <p> The total number of items to return in the command's output. If the total number of items available is more than the value specified, a NextToken is provided in the command's output. To resume pagination, provide the NextToken value in the starting-token argument of a subsequent command. Do not use the NextToken response element directly outside of the AWS CLI. </p>
pub max_results: i32,
/// <p> A token to specify where to start paginating. This is the NextToken from a previously truncated response. </p>
pub next_token: std::option::Option<std::string::String>,
}
impl DescribeAcceleratorsInput {
/// <p> The IDs of the accelerators to describe. </p>
pub fn accelerator_ids(&self) -> std::option::Option<&[std::string::String]> {
self.accelerator_ids.as_deref()
}
/// <p> One or more filters. Filter names and values are case-sensitive. Valid filter names are: accelerator-types: can provide a list of accelerator type names to filter for. instance-id: can provide a list of EC2 instance ids to filter for. </p>
pub fn filters(&self) -> std::option::Option<&[crate::model::Filter]> {
self.filters.as_deref()
}
/// <p> The total number of items to return in the command's output. If the total number of items available is more than the value specified, a NextToken is provided in the command's output. To resume pagination, provide the NextToken value in the starting-token argument of a subsequent command. Do not use the NextToken response element directly outside of the AWS CLI. </p>
pub fn max_results(&self) -> i32 {
self.max_results
}
/// <p> A token to specify where to start paginating. This is the NextToken from a previously truncated response. </p>
pub fn next_token(&self) -> std::option::Option<&str> {
self.next_token.as_deref()
}
}
impl std::fmt::Debug for DescribeAcceleratorsInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DescribeAcceleratorsInput");
formatter.field("accelerator_ids", &self.accelerator_ids);
formatter.field("filters", &self.filters);
formatter.field("max_results", &self.max_results);
formatter.field("next_token", &self.next_token);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DescribeAcceleratorOfferingsInput {
/// <p> The location type that you want to describe accelerator type offerings for. It can assume the following values: region: will return the accelerator type offering at the regional level. availability-zone: will return the accelerator type offering at the availability zone level. availability-zone-id: will return the accelerator type offering at the availability zone level returning the availability zone id. </p>
pub location_type: std::option::Option<crate::model::LocationType>,
/// <p> The list of accelerator types to describe. </p>
pub accelerator_types: std::option::Option<std::vec::Vec<std::string::String>>,
}
impl DescribeAcceleratorOfferingsInput {
/// <p> The location type that you want to describe accelerator type offerings for. It can assume the following values: region: will return the accelerator type offering at the regional level. availability-zone: will return the accelerator type offering at the availability zone level. availability-zone-id: will return the accelerator type offering at the availability zone level returning the availability zone id. </p>
pub fn location_type(&self) -> std::option::Option<&crate::model::LocationType> {
self.location_type.as_ref()
}
/// <p> The list of accelerator types to describe. </p>
pub fn accelerator_types(&self) -> std::option::Option<&[std::string::String]> {
self.accelerator_types.as_deref()
}
}
impl std::fmt::Debug for DescribeAcceleratorOfferingsInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DescribeAcceleratorOfferingsInput");
formatter.field("location_type", &self.location_type);
formatter.field("accelerator_types", &self.accelerator_types);
formatter.finish()
}
}
| 46.784629 | 429 | 0.619021 |
62dbc21495a5405a0ff0ee2bd4221c86c4c32fdc
| 723 |
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[derive(Copy(Bad))]
//~^ ERROR malformed `derive` entry
struct Test1;
#[derive(Copy="bad")]
//~^ ERROR malformed `derive` entry
struct Test2;
#[derive()]
//~^ WARNING empty trait list
struct Test3;
#[derive]
//~^ WARNING empty trait list
struct Test4;
| 27.807692 | 68 | 0.721992 |
acfbfb5d1e7563d0cda2bde4208fc24407d3692d
| 2,482 |
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::DR {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct DRR {
bits: u32,
}
impl DRR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _DRW<'a> {
w: &'a mut W,
}
impl<'a> _DRW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u32) -> &'a mut W {
const MASK: u32 = 4294967295;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:31 - Data register bits"]
#[inline]
pub fn dr(&self) -> DRR {
let bits = {
const MASK: u32 = 4294967295;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u32
};
DRR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 4294967295 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:31 - Data register bits"]
#[inline]
pub fn dr(&mut self) -> _DRW {
_DRW { w: self }
}
}
| 23.415094 | 59 | 0.490733 |
114662a8e2cf098644a2a87d0eb549f81dd4a36c
| 5,728 |
use anyhow::Context;
use core::settings::S3Settings;
use rusoto_core::{
credential::{AwsCredentials, StaticProvider},
HttpClient, Region, RusotoError,
};
use rusoto_s3::{DeleteObjectRequest, GetObjectError, GetObjectRequest, PutObjectRequest, S3};
use shared::media::{self, media_key, FileKind, MediaLibrary, PngImageFile};
use tokio::io::AsyncReadExt;
use uuid::Uuid;
#[derive(Clone)]
pub struct Client {
creds: AwsCredentials,
region: Region,
bucket: String,
client: Option<rusoto_s3::S3Client>,
}
impl Client {
pub fn new(s3_settings: S3Settings) -> anyhow::Result<Self> {
let S3Settings {
endpoint,
bucket,
access_key_id,
secret_access_key,
use_client,
} = s3_settings;
let region = Region::Custom {
name: "auto".to_owned(),
endpoint,
};
let creds = AwsCredentials::new(access_key_id, secret_access_key, None, None);
let credentials_provider = StaticProvider::from(creds.clone());
let client = if use_client {
Some(rusoto_s3::S3Client::new_with(
HttpClient::new()?,
credentials_provider,
region.clone(),
))
} else {
None
};
Ok(Self {
region,
creds,
bucket,
client,
})
}
pub async fn upload_png_images_resized_thumb(
&self,
library: MediaLibrary,
image: Uuid,
resized: Vec<u8>,
thumbnail: Vec<u8>,
) -> anyhow::Result<()> {
let upload = |data, file| self.upload_media(data, library, image, FileKind::ImagePng(file));
let resized = upload(resized, PngImageFile::Resized);
let thumbnail = upload(thumbnail, PngImageFile::Thumbnail);
futures::future::try_join(resized, thumbnail).await?;
Ok(())
}
pub async fn upload_png_images(
&self,
library: MediaLibrary,
image: Uuid,
original: Vec<u8>,
resized: Vec<u8>,
thumbnail: Vec<u8>,
) -> anyhow::Result<()> {
let upload = |data, file| self.upload_media(data, library, image, FileKind::ImagePng(file));
let original = upload(original, PngImageFile::Original);
let resized = upload(resized, PngImageFile::Resized);
let thumbnail = upload(thumbnail, PngImageFile::Thumbnail);
futures::future::try_join3(original, resized, thumbnail).await?;
Ok(())
}
pub async fn delete_media(&self, library: MediaLibrary, file: FileKind, id: Uuid) {
let key = media_key(library, id, file);
if let Err(err) = self.try_delete(key.clone()).await {
log::warn!("failed to delete {} from s3: {}", key, err);
sentry::with_scope(
|scope| scope.set_level(Some(sentry::Level::Warning)),
|| {
sentry::add_breadcrumb(sentry::Breadcrumb {
ty: "info".to_owned(),
data: {
let mut map = sentry::protocol::Map::new();
map.insert("key".to_owned(), key.clone().into());
map
},
..Default::default()
});
sentry::integrations::anyhow::capture_anyhow(&err);
},
);
}
}
// note: does nothing if object doesn't exist, or if the client is disabled.
async fn try_delete(&self, key: String) -> anyhow::Result<()> {
if let Some(client) = self.client.as_ref() {
client
.delete_object(DeleteObjectRequest {
key,
bucket: self.bucket.clone(),
..DeleteObjectRequest::default()
})
.await
.context("failed to delete object from s3")?;
}
Ok(())
}
pub async fn upload_media(
&self,
data: Vec<u8>,
library: MediaLibrary,
id: Uuid,
file_kind: FileKind,
) -> anyhow::Result<()> {
let client = match &self.client {
Some(client) => client,
None => return Ok(()),
};
client
.put_object(PutObjectRequest {
bucket: self.bucket.clone(),
key: media::media_key(library, id, file_kind),
content_type: Some(file_kind.content_type().to_owned()),
body: Some(data.into()),
..PutObjectRequest::default()
})
.await?;
Ok(())
}
pub async fn download_media_file(
&self,
library: MediaLibrary,
id: Uuid,
file_kind: FileKind,
) -> anyhow::Result<Option<Option<Vec<u8>>>> {
let client = match &self.client {
Some(client) => client,
None => return Ok(None),
};
let resp = client
.get_object(GetObjectRequest {
bucket: self.bucket.clone(),
key: media::media_key(library, id, file_kind),
..GetObjectRequest::default()
})
.await;
let resp = match resp {
Ok(resp) => resp,
Err(RusotoError::Service(GetObjectError::NoSuchKey(_))) => return Ok(Some(None)),
Err(e) => return Err(e.into()),
};
let mut body = vec![];
resp.body
.ok_or_else(|| anyhow::anyhow!("missing response"))?
.into_async_read()
.read_to_end(&mut body)
.await?;
Ok(Some(Some(body)))
}
}
| 29.678756 | 100 | 0.516934 |
91cbe4c6941a63746938af4a662ec64bf009096c
| 1,587 |
// Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use serde::{de, ser};
use std::fmt;
use thiserror::Error;
pub type Result<T, E = Error> = std::result::Result<T, E>;
#[derive(Clone, Debug, Error, PartialEq)]
pub enum Error {
#[error("unexpected end of input")]
Eof,
#[error("I/O error: {0}")]
Io(String),
#[error("exceeded max sequence length")]
ExceededMaxLen(usize),
#[error("expected boolean")]
ExpectedBoolean,
#[error("expected map key")]
ExpectedMapKey,
#[error("expected map value")]
ExpectedMapValue,
#[error("keys of serialized maps must be unique and in increasing order")]
NonCanonicalMap,
#[error("expected option type")]
ExpectedOption,
#[error("{0}")]
Custom(String),
#[error("sequence missing length")]
MissingLen,
#[error("not supported: {0}")]
NotSupported(&'static str),
#[error("remaining input")]
RemainingInput,
#[error("malformed utf8")]
Utf8,
#[error("ULEB128 encoding was not minimal in size")]
NonCanonicalUleb128Encoding,
#[error("ULEB128-encoded integer did not fit in the target size")]
IntegerOverflowDuringUleb128Decoding,
}
impl From<std::io::Error> for Error {
fn from(err: std::io::Error) -> Self {
Error::Io(err.to_string())
}
}
impl ser::Error for Error {
fn custom<T: fmt::Display>(msg: T) -> Self {
Error::Custom(msg.to_string())
}
}
impl de::Error for Error {
fn custom<T: fmt::Display>(msg: T) -> Self {
Error::Custom(msg.to_string())
}
}
| 26.016393 | 78 | 0.63012 |
695e8de661c35f1c64acc3d36d5c14092176b5c2
| 695 |
use std::io;
use std::path::PathBuf;
use std::process::exit;
use std::process::Command;
pub(crate) fn run_certbot(config_file: PathBuf, additional_args: &[String]) -> io::Result<()> {
const CERTBOT_EXECUTABLE: &str = "certbot";
let exit_status = Command::new(CERTBOT_EXECUTABLE)
.arg("--config")
.arg(config_file)
.args(additional_args)
.status()?;
if !exit_status.success() {
if let Some(code) = exit_status.code() {
eprintln!("certbot exited with non-zero exit code: {}", code);
exit(code);
} else {
eprintln!("certbot was killed by signal");
exit(1);
}
}
Ok(())
}
| 25.740741 | 95 | 0.569784 |
feb00990cf1c5ad145039aca91fb92ecf945ef34
| 153 |
use matrix::StackMatrix;
const M: usize = 3;
const N: usize = 3;
fn main() {
let _ = StackMatrix::<i32, M, N>::new([[1, 2, 3], [4, 5, 6], [7]]);
}
| 17 | 71 | 0.522876 |
18bcc92586cd4a59ce6bca10e4ddff3f1dc3cded
| 6,152 |
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::compiler::{as_module, compile_units};
use move_binary_format::errors::VMResult;
use move_core_types::{
account_address::AccountAddress,
identifier::Identifier,
language_storage::{ModuleId, TypeTag},
value::{MoveStruct, MoveValue},
vm_status::StatusCode,
};
use move_vm_runtime::move_vm::MoveVM;
use move_vm_test_utils::InMemoryStorage;
use move_vm_types::gas_schedule::GasStatus;
const TEST_ADDR: AccountAddress = AccountAddress::new([42; AccountAddress::LENGTH]);
fn run(
ty_params: &[&str],
params: &[&str],
ty_args: Vec<TypeTag>,
args: Vec<MoveValue>,
) -> VMResult<()> {
let ty_params = ty_params
.iter()
.map(|var| format!("{}: copy + drop", var))
.collect::<Vec<_>>()
.join(", ");
let params = params
.iter()
.enumerate()
.map(|(idx, ty)| format!("_x{}: {}", idx, ty))
.collect::<Vec<_>>()
.join(", ");
let code = format!(
r#"
module 0x{}::M {{
struct Foo has copy, drop {{ x: u64 }}
struct Bar<T> has copy, drop {{ x: T }}
fun foo<{}>({}) {{ }}
}}
"#,
TEST_ADDR, ty_params, params
);
let mut units = compile_units(&code).unwrap();
let m = as_module(units.pop().unwrap());
let mut blob = vec![];
m.serialize(&mut blob).unwrap();
let mut storage = InMemoryStorage::new();
let module_id = ModuleId::new(TEST_ADDR, Identifier::new("M").unwrap());
storage.publish_or_overwrite_module(module_id.clone(), blob);
let vm = MoveVM::new(vec![]).unwrap();
let mut sess = vm.new_session(&storage);
let fun_name = Identifier::new("foo").unwrap();
let mut gas_status = GasStatus::new_unmetered();
let args: Vec<_> = args
.into_iter()
.map(|val| val.simple_serialize().unwrap())
.collect();
sess.execute_function_bypass_visibility(&module_id, &fun_name, ty_args, args, &mut gas_status)?;
Ok(())
}
fn expect_err(params: &[&str], args: Vec<MoveValue>, expected_status: StatusCode) {
assert!(run(&[], params, vec![], args).unwrap_err().major_status() == expected_status);
}
fn expect_err_generic(
ty_params: &[&str],
params: &[&str],
ty_args: Vec<TypeTag>,
args: Vec<MoveValue>,
expected_status: StatusCode,
) {
assert!(
run(ty_params, params, ty_args, args)
.unwrap_err()
.major_status()
== expected_status
);
}
fn expect_ok(params: &[&str], args: Vec<MoveValue>) {
run(&[], params, vec![], args).unwrap()
}
fn expect_ok_generic(
ty_params: &[&str],
params: &[&str],
ty_args: Vec<TypeTag>,
args: Vec<MoveValue>,
) {
run(ty_params, params, ty_args, args).unwrap()
}
#[test]
fn expected_0_args_got_0() {
expect_ok(&[], vec![])
}
#[test]
fn expected_0_args_got_1() {
expect_err(
&[],
vec![MoveValue::U64(0)],
StatusCode::NUMBER_OF_ARGUMENTS_MISMATCH,
)
}
#[test]
fn expected_1_arg_got_0() {
expect_err(&["u64"], vec![], StatusCode::NUMBER_OF_ARGUMENTS_MISMATCH)
}
#[test]
fn expected_2_arg_got_1() {
expect_err(
&["u64", "bool"],
vec![MoveValue::U64(0)],
StatusCode::NUMBER_OF_ARGUMENTS_MISMATCH,
)
}
#[test]
fn expected_2_arg_got_3() {
expect_err(
&["u64", "bool"],
vec![
MoveValue::U64(0),
MoveValue::Bool(true),
MoveValue::Bool(false),
],
StatusCode::NUMBER_OF_ARGUMENTS_MISMATCH,
)
}
#[test]
fn expected_u64_got_u64() {
expect_ok(&["u64"], vec![MoveValue::U64(0)])
}
#[test]
#[allow(non_snake_case)]
fn expected_Foo_got_Foo() {
expect_ok(
&["Foo"],
vec![MoveValue::Struct(MoveStruct::new(vec![MoveValue::U64(0)]))],
)
}
#[test]
fn expected_signer_ref_got_signer() {
expect_ok(&["&signer"], vec![MoveValue::Signer(TEST_ADDR)])
}
#[test]
fn expected_u64_signer_ref_got_u64_signer() {
expect_ok(
&["u64", "&signer"],
vec![MoveValue::U64(0), MoveValue::Signer(TEST_ADDR)],
)
}
#[test]
fn expected_u64_got_bool() {
expect_err(
&["u64"],
vec![MoveValue::Bool(false)],
StatusCode::FAILED_TO_DESERIALIZE_ARGUMENT,
)
}
#[test]
fn param_type_u64_ref() {
expect_ok(&["&u64"], vec![MoveValue::U64(0)])
}
#[test]
#[allow(non_snake_case)]
fn expected_T__T_got_u64__u64() {
expect_ok_generic(&["T"], &["T"], vec![TypeTag::U64], vec![MoveValue::U64(0)])
}
#[test]
#[allow(non_snake_case)]
fn expected_A_B__A_u64_vector_B_got_u8_u128__u8_u64_vector_u128() {
expect_ok_generic(
&["A", "B"],
&["A", "u64", "vector<B>"],
vec![TypeTag::U8, TypeTag::U128],
vec![
MoveValue::U8(0),
MoveValue::U64(0),
MoveValue::Vector(vec![MoveValue::U128(0), MoveValue::U128(0)]),
],
)
}
#[test]
#[allow(non_snake_case)]
fn expected_T__Bar_T_got_bool__Bar_bool() {
expect_ok_generic(
&["T"],
&["Bar<T>"],
vec![TypeTag::Bool],
vec![MoveValue::Struct(MoveStruct::new(vec![MoveValue::Bool(
false,
)]))],
)
}
#[test]
#[allow(non_snake_case)]
fn expected_T__T_got_bool__bool() {
expect_ok_generic(
&["T"],
&["T"],
vec![TypeTag::Bool],
vec![MoveValue::Bool(false)],
)
}
#[test]
#[allow(non_snake_case)]
fn expected_T__T_got_bool__u64() {
expect_err_generic(
&["T"],
&["T"],
vec![TypeTag::Bool],
vec![MoveValue::U64(0)],
StatusCode::FAILED_TO_DESERIALIZE_ARGUMENT,
)
}
#[test]
#[allow(non_snake_case)]
fn expected_T__T_ref_got_u64__u64() {
expect_ok_generic(&["T"], &["&T"], vec![TypeTag::U64], vec![MoveValue::U64(0)])
}
#[test]
#[allow(non_snake_case)]
fn expected_T__Bar_T_got_bool__Bar_u64() {
expect_err_generic(
&["T"],
&["Bar<T>"],
vec![TypeTag::Bool],
vec![MoveValue::Struct(MoveStruct::new(vec![MoveValue::U64(0)]))],
StatusCode::FAILED_TO_DESERIALIZE_ARGUMENT,
)
}
| 23.480916 | 100 | 0.589077 |
1c100a1947adacc05651a2daaec753da3a810eff
| 267 |
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR MIT
pub fn main() {
let x = 2;
let f = |y| x + y;
let z = f(100);
let g = |y| z + f(y);
assert!(z == 102);
assert!(g(z) == 206);
}
| 24.272727 | 69 | 0.543071 |
fbd74c9dfe7fde419e3ae9e027292b8baa42726e
| 7,129 |
use super::{Ast, Malvi, Result, SAst};
use std::rc::Rc;
use crate::im::Vector;
impl Malvi {
pub fn stdfn_part2(&mut self) {
declare_macros_for_builtins!(self);
builtin_func!("list", |_,_,x|Ok(Ast::Round(x)));
builtin_func1!("list?", |_,_,x:Rc<Ast>|Ok(match *x {
Ast::Round(..) => True!(),
_ => False!(),
}));
builtin_func1!("count", |_,_,x:Rc<Ast>|Ok(match &*x {
Ast::Round(x) => Int!(x.len() as i64),
Ast::Square(x) => Int!(x.len() as i64),
Ast::Curly(x) => Int!(x.len() as i64),
Nil!() => Int!(0),
_ => bail!("Can't count elements of this"),
}));
builtin_func1!("empty?", |_,_,x:Rc<Ast>|Ok(match &*x {
Ast::Round(x) => Ast::Simple(SAst::Bool(x.is_empty())),
Ast::Square(x) =>Ast::Simple(SAst::Bool(x.is_empty())),
Ast::Curly(x) => Ast::Simple(SAst::Bool(x.is_empty())),
_ => bail!("Can't check emptiness of this"),
}));
builtin_macro!("if", |m,env,mut x:Vector<Rc<Ast>>| {
if x.len() != 3 && x.len() != 2 {
bail!("`if` has exactly two or three arguments");
}
let cond = x.pop_front().unwrap();
let iftrue = x.pop_front().unwrap();
let iffalse = x.pop_front().unwrap_or(Rc::new(Nil!()));
let iftrue = ||Ok(Ast::EvalMeAgain{obj:iftrue, env:env.clone()});
let iffalse = ||Ok(Ast::EvalMeAgain{obj:iffalse, env:env.clone()});
match m.eval_impl(env,&cond)? {
True!() => iftrue(),
False!() => iffalse(),
Int!(_) => iftrue(),
| Ast::Round(..)
| Ast::Square(..)
| Ast::Curly(..)
=> iftrue(),
Ast::Simple(SAst::StrLit(_)) => iftrue(),
Nil!() => iffalse(),
_ => bail!("Wrong type used in `if` conditional"),
}
});
builtin_func2!("=", |_,_,arg1:Rc<Ast>,arg2:Rc<Ast>|
Ok(Ast::Simple(SAst::Bool(mal_eq(&arg1, &arg2)?))));
builtin_func2!(">", |_,_,arg1:Rc<Ast>,arg2:Rc<Ast>| Ok(match (&*arg1,&*arg2){
(Int!(x),Int!(y)) if x>y => True!(),
(Int!(_),Int!(_)) => False!(),
(_,_) => bail!("Can only compare integers"),
}));
builtin_func2!(">=", |_,_,arg1:Rc<Ast>,arg2:Rc<Ast>| Ok(match (&*arg1,&*arg2){
(Int!(x),Int!(y)) if x>=y => True!(),
(Int!(_),Int!(_)) => False!(),
(_,_) => bail!("Can only compare integers"),
}));
builtin_func2!("<", |_,_,arg1:Rc<Ast>,arg2:Rc<Ast>| Ok(match (&*arg1,&*arg2){
(Int!(x),Int!(y)) if x<y => True!(),
(Int!(_),Int!(_)) => False!(),
(_,_) => bail!("Can only compare integers"),
}));
builtin_func2!("<=", |_,_,arg1:Rc<Ast>,arg2:Rc<Ast>| Ok(match (&*arg1,&*arg2){
(Int!(x),Int!(y)) if x<=y => True!(),
(Int!(_),Int!(_)) => False!(),
(_,_) => bail!("Can only compare integers"),
}));
builtin_func!("prn",|m,_env,args:Vector<Rc<Ast>>| {
let mut first = true;
for x in args {
if !first {
print!(" ")
};
print!("{}", super::BoundAstRef(&*x, m, crate::DisplayMode::PrStr));
first = false;
}
println!();
Ok(Nil!())
});
builtin_func!("pr-str",|m,_env,args:Vector<Rc<Ast>>| {
let mut s = String::new();
let mut first = true;
use ::std::fmt::Write;
for x in args {
if !first {
write!(s, " ");
};
write!(s, "{}", super::BoundAstRef(&*x, m, crate::DisplayMode::PrStr));
first = false;
}
Ok(StrLit!(s))
});
builtin_func!("str",|m,_env,args:Vector<Rc<Ast>>| {
let mut s = String::new();
use ::std::fmt::Write;
for x in args {
write!(s, "{}", super::BoundAstRef(&*x, m, crate::DisplayMode::Str));
};
Ok(StrLit!(s))
});
builtin_func!("println",|m,_env,args:Vector<Rc<Ast>>| {
let mut first = true;
for x in args {
if !first {
print!(" ")
};
print!("{}", super::BoundAstRef(&*x, m, crate::DisplayMode::Str));
first = false;
}
println!();
Ok(Nil!())
});
}
}
fn mal_eq(arg1: &Rc<Ast>, arg2: &Rc<Ast>) -> Result<bool> {
Ok(match (&**arg1,&**arg2){
(Ast::UserFunction{..}, _) => bail!("Can't compare functions"),
(_, Ast::UserFunction{..}) => bail!("Can't compare functions"),
(Ast::BuiltinFunction(..),_) => bail!("Can't compare functions"),
(_, Ast::BuiltinFunction(..)) => bail!("Can't compare functions"),
(Ast::BuiltinMacro(..),_) => bail!("Can't compare macros"),
(_, Ast::BuiltinMacro(..)) => bail!("Can't compare macros"),
(Ast::EvalMeAgain{..},_) => bail!("Can't compare TCO thunks"),
(_, Ast::EvalMeAgain{..}) => bail!("Can't compare TCO thunks"),
(Int!(x),Int!(y)) if x==y => true,
(Int!(_),_) => false,
| (Ast::Round(x),Ast::Round(y))
| (Ast::Square(x),Ast::Square(y))
| (Ast::Round(x),Ast::Square(y))
| (Ast::Square(x),Ast::Round(y))
=> {
if x.len() != y.len() {
return Ok(false);
};
for (a1,a2) in x.iter().zip(y.iter()) {
if !mal_eq(a1,a2)? {
return Ok(false);
};
};
true
}
| (Ast::Curly(x),Ast::Curly(y))
=> {
if x.len() != y.len() {
return Ok(false);
};
for (s1,a1) in x.iter() {
if let Some(a2) = y.get(s1) {
if !mal_eq(a1,a2)? {
return Ok(false);
};
} else {
return Ok(false);
}
};
true
}
| (Ast::Round(_),_)
| (Ast::Square(_),_)
=> false,
| (Ast::Curly(_),_)
=> false,
(Nil!(), Nil!()) => true,
(Nil!(), _) => false,
(Ast::Simple(SAst::Symbol(x)),Ast::Simple(SAst::Symbol(y)))
if x == y => true,
(Ast::Simple(SAst::Symbol(_)),_) => false,
(Ast::Simple(SAst::Bool(x)),Ast::Simple(SAst::Bool(y)))
if x == y => true,
(Ast::Simple(SAst::Bool(_)),_) => false,
(Ast::Simple(SAst::Atom(x)),Ast::Simple(SAst::Atom(y)))
if x == y => true,
(Ast::Simple(SAst::Atom(_)),_) => false,
(Ast::Simple(SAst::StrLit(x)),Ast::Simple(SAst::StrLit(y)))
if x == y => true,
(Ast::Simple(SAst::StrLit(_)),_) => false,
})
}
| 36.187817 | 87 | 0.41759 |
fe26965213fdb7396849a5c52ff60c55f1822dd7
| 4,197 |
use std::str::FromStr;
use config::defaults::DEFAULT_VARIANT;
use {Error, ErrorKind};
impl Default for Variant {
/// Returns [`Variant::Argon2id`](enum.Variant.html#variant.Argon2id)
fn default() -> Variant {
DEFAULT_VARIANT
}
}
impl FromStr for Variant {
///
type Err = Error;
/// Performs the following mapping:
/// * `"argon2d"` => `Ok(Variant::Argon2d)`<br/>
/// * `"argon2i"` => `Ok(Variant::Argon2i)`<br/>
/// * `"argon2id"` => `Ok(Variant::Argon2id)`<br/>
/// * anything else => an error
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"argon2d" => Ok(Variant::Argon2d),
"argon2i" => Ok(Variant::Argon2i),
"argon2id" => Ok(Variant::Argon2id),
_ => {
Err(Error::new(ErrorKind::VariantEncodeError).add_context(format!("String: {}", s)))
}
}
}
}
/// Enum representing the various variants of the Argon2 algorithm (
/// [`Argon2d`](enum.Variant.html#variant.Argon2d),
/// [`Argon2i`](enum.Variant.html#variant.Argon2i), and
/// [`Argon2id`](enum.Variant.html#variant.Argon2id)).
///
/// According to the [latest (as of 5/18) Argon2 RFC](https://tools.ietf.org/html/draft-irtf-cfrg-argon2-03) ...
/// "Argon2 has one primary variant: Argon2id, and two supplementary
/// variants: Argon2d and Argon2i. Argon2d uses data-dependent memory
/// access, which makes it suitable for ... applications with no threats from
/// side-channel timing attacks. Argon2i uses data-independent memory access,
/// which is preferred for password hashing and password-based key derivation.
/// Argon2id works as Argon2i for the first half of the first iteration over the memory,
/// and as Argon2d for the rest, thus providing both side-channel attack
/// protection and brute-force cost savings due to time-memory tradeoffs." If you do not
/// know which variant to use, use the default, which is [`Argon2id`](enum.Variant.html#variant.Argon2id))
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
pub enum Variant {
/// Variant of the Argon2 algorithm that is faster and uses data-depending memory access,
/// which makes it suitable for applications with no threats from side-channel timing attackes.
/// Do <b><u>not</b></u> use this unless you have a specific reason to.
Argon2d = 0,
/// Variant of the Argon2 algorithm that uses data-independent memory access, which is
/// preferred for password hashing and password-based key derivation. Do <b><u>not</b></u> use
/// this unless you have a specific reason to.
Argon2i = 1,
/// Default variant of the Argon2 algorithm that works as Argon2i for the first half of the
/// first iteration over the memory, and as Argon2d for the rest, thus providing both
/// side-channel attack protection and brute-force cost savings due to time-memory tradeoffs.
/// Use this unless you have a specific reason not to.
Argon2id = 2,
}
impl Variant {
/// Performs the following mapping:
/// * `Variant::Argond2d` => `"argon2d"`<br/>
/// * `Variant::Argond2i` => `"argon2i"`<br/>
/// * `Variant::Argond2id` => `"argon2id"`
pub fn as_str(&self) -> &'static str {
match *self {
Variant::Argon2d => "argon2d",
Variant::Argon2i => "argon2i",
Variant::Argon2id => "argon2id",
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_send() {
fn assert_send<T: Send>() {}
assert_send::<Variant>();
}
#[test]
fn test_sync() {
fn assert_sync<T: Sync>() {}
assert_sync::<Variant>();
}
#[cfg(feature = "serde")]
#[test]
fn test_serialize() {
use serde;
fn assert_serialize<T: serde::Serialize>() {}
assert_serialize::<Variant>();
}
#[cfg(feature = "serde")]
#[test]
fn test_deserialize() {
use serde;
fn assert_deserialize<'de, T: serde::Deserialize<'de>>() {}
assert_deserialize::<Variant>();
}
}
| 36.181034 | 112 | 0.629497 |
7623c3afefe94ade7e0e5ebb328c2c9e4534d432
| 6,070 |
mod utils;
use crypto_market_type::MarketType;
use crypto_msg_parser::{extract_symbol, extract_timestamp, parse_l2, parse_trade, TradeSide};
use crypto_msg_type::MessageType;
const EXCHANGE_NAME: &str = "bithumb";
#[test]
fn trade() {
let raw_msg = r#"{"code":"00006","data":[{"p":"59023.7500000000","s":"sell","symbol":"BTC-USDT","t":"1616271104","v":"0.002873","ver":"19894683"},{"p":"59017.5100000000","s":"sell","symbol":"BTC-USDT","t":"1616271104","v":"0.001587","ver":"19894682"}],"timestamp":1616271105098,"topic":"TRADE"}"#;
let trades = &parse_trade(EXCHANGE_NAME, MarketType::Spot, raw_msg).unwrap();
assert_eq!(trades.len(), 2);
for trade in trades.iter() {
crate::utils::check_trade_fields(
EXCHANGE_NAME,
MarketType::Spot,
"BTC/USDT".to_string(),
extract_symbol(EXCHANGE_NAME, MarketType::Spot, raw_msg).unwrap(),
trade,
raw_msg,
);
assert_eq!(trade.side, TradeSide::Sell);
}
assert_eq!(
1616271105098,
extract_timestamp(EXCHANGE_NAME, MarketType::Spot, raw_msg)
.unwrap()
.unwrap()
);
let raw_msg = r#"{"code":"00007","data":{"p":"1674.7700000000","symbol":"ETH-USDT","ver":"15186035","s":"buy","t":"1616487024","v":"0.065614"},"topic":"TRADE","timestamp":1616487024837}"#;
let trades = &parse_trade(EXCHANGE_NAME, MarketType::Spot, raw_msg).unwrap();
assert_eq!(trades.len(), 1);
let trade = &trades[0];
assert_eq!(&trade.trade_id, "15186035");
crate::utils::check_trade_fields(
EXCHANGE_NAME,
MarketType::Spot,
"ETH/USDT".to_string(),
extract_symbol(EXCHANGE_NAME, MarketType::Spot, raw_msg).unwrap(),
trade,
raw_msg,
);
assert_eq!(trade.quantity_base, 0.065614);
assert_eq!(trade.side, TradeSide::Buy);
}
#[test]
fn l2_orderbook_snapshot() {
let raw_msg = r#"{"code":"00006","data":{"b":[["35909.4500000000","0.007308"],["35905.3800000000","0.015820"],["35898.7500000000","0.016811"]],"s":[["34578.8700000000","0.000000"],["35927.4900000000","0.019198"],["35934.6800000000","0.016004"]],"symbol":"BTC-USDT","ver":"509670288"},"timestamp":1622446974153,"topic":"ORDERBOOK"}"#;
let orderbook = &parse_l2(EXCHANGE_NAME, MarketType::Spot, raw_msg, None).unwrap()[0];
assert_eq!(orderbook.asks.len(), 3);
assert_eq!(orderbook.bids.len(), 3);
assert!(orderbook.snapshot);
crate::utils::check_orderbook_fields(
EXCHANGE_NAME,
MarketType::Spot,
MessageType::L2Event,
"BTC/USDT".to_string(),
extract_symbol(EXCHANGE_NAME, MarketType::Spot, raw_msg).unwrap(),
orderbook,
raw_msg,
);
assert_eq!(
1622446974153,
extract_timestamp(EXCHANGE_NAME, MarketType::Spot, raw_msg)
.unwrap()
.unwrap()
);
assert_eq!(orderbook.timestamp, 1622446974153);
assert_eq!(orderbook.seq_id, Some(509670288));
assert_eq!(orderbook.bids[0].price, 35909.45);
assert_eq!(orderbook.bids[0].quantity_base, 0.007308);
assert_eq!(orderbook.bids[0].quantity_quote, 35909.45 * 0.007308);
assert_eq!(orderbook.bids[2].price, 35898.75);
assert_eq!(orderbook.bids[2].quantity_base, 0.016811);
assert_eq!(orderbook.bids[2].quantity_quote, 35898.75 * 0.016811);
assert_eq!(orderbook.asks[0].price, 34578.87);
assert_eq!(orderbook.asks[0].quantity_base, 0.0);
assert_eq!(orderbook.asks[0].quantity_quote, 0.0);
assert_eq!(orderbook.asks[2].price, 35934.68);
assert_eq!(orderbook.asks[2].quantity_base, 0.016004);
assert_eq!(orderbook.asks[2].quantity_quote, 35934.68 * 0.016004);
}
#[test]
fn l2_orderbook_update() {
let raw_msg = r#"{"code":"00007","data":{"symbol":"BTC-USDT","b":[["34613.4400000000","0.015396"]],"ver":"509670303","s":[]},"topic":"ORDERBOOK","timestamp":1622446975394}"#;
let orderbook = &parse_l2(EXCHANGE_NAME, MarketType::Spot, raw_msg, None).unwrap()[0];
assert_eq!(orderbook.asks.len(), 0);
assert_eq!(orderbook.bids.len(), 1);
assert!(!orderbook.snapshot);
crate::utils::check_orderbook_fields(
EXCHANGE_NAME,
MarketType::Spot,
MessageType::L2Event,
"BTC/USDT".to_string(),
extract_symbol(EXCHANGE_NAME, MarketType::Spot, raw_msg).unwrap(),
orderbook,
raw_msg,
);
assert_eq!(
1622446975394,
extract_timestamp(EXCHANGE_NAME, MarketType::Spot, raw_msg)
.unwrap()
.unwrap()
);
assert_eq!(orderbook.timestamp, 1622446975394);
assert_eq!(orderbook.seq_id, Some(509670303));
assert_eq!(orderbook.bids[0].price, 34613.44);
assert_eq!(orderbook.bids[0].quantity_base, 0.015396);
assert_eq!(orderbook.bids[0].quantity_quote, 34613.44 * 0.015396);
}
#[test]
fn ticker() {
let raw_msg = r#"{"code":"00007","data":{"p":"-0.0512","symbol":"BTC-USDT","ver":"70013048","vol":"22818095.72371200","c":"29951.93","t":"22818095.72371200","v":"747.110521","h":"32252.34","l":"29250.95"},"topic":"TICKER","timestamp":1654161207269}"#;
assert_eq!(
1654161207269,
extract_timestamp(EXCHANGE_NAME, MarketType::Spot, raw_msg)
.unwrap()
.unwrap()
);
assert_eq!(
"BTC-USDT",
extract_symbol(EXCHANGE_NAME, MarketType::Spot, raw_msg).unwrap()
);
}
#[test]
fn l2_snapshot() {
let raw_msg = r#"{"data":{"symbol":"BTC-USDT","b":[["30402.440000000000","0.001458"],["30370.910000000000","0.002482"],["30338.010000000000","0.000540"]],"ver":"876388569","s":[["30651.830000000000","0.003630"],["30686.780000000000","0.003420"],["30698.550000000000","0.004859"]]},"code":"0","msg":"success","timestamp":1654234202305,"startTime":null}"#;
assert_eq!(
1654234202305,
extract_timestamp(EXCHANGE_NAME, MarketType::Spot, raw_msg)
.unwrap()
.unwrap()
);
assert_eq!(
"BTC-USDT",
extract_symbol(EXCHANGE_NAME, MarketType::Spot, raw_msg).unwrap()
);
}
| 36.787879 | 358 | 0.631796 |
0a98d75255f9b5fc898bf7050a6716ac6beeb321
| 18,827 |
#[cfg(all(
target_arch = "x86",
target_feature = "sse2",
not(feature = "scalar-math")
))]
use std::arch::x86::*;
#[cfg(all(
target_arch = "x86_64",
target_feature = "sse2",
not(feature = "scalar-math")
))]
use std::arch::x86_64::*;
use super::{scalar_acos, scalar_sin_cos, Mat3, Mat4, Vec3, Vec4};
use std::{
cmp::Ordering,
fmt,
ops::{Mul, MulAssign, Neg},
};
/// A quaternion representing an orientation.
///
/// This quaternion is intended to be of unit length but may denormalize due to
/// floating point "error creep" which can occur when successive quaternion
/// operations are applied.
///
/// This type is 16 byte aligned.
#[derive(Clone, Copy)]
#[repr(C)]
pub struct Quat(pub(crate) Vec4);
#[inline]
pub fn quat(x: f32, y: f32, z: f32, w: f32) -> Quat {
Quat::from_xyzw(x, y, z, w)
}
impl Quat {
/// Creates a new rotation quaternion.
///
/// This should generally not be called manually unless you know what you are doing. Use one of
/// the other constructors instead such as `identity` or `from_axis_angle`.
///
/// `from_xyzw` is mostly used by unit tests and `serde` deserialization.
#[inline]
pub fn from_xyzw(x: f32, y: f32, z: f32, w: f32) -> Self {
Self(Vec4::new(x, y, z, w))
}
#[inline]
#[deprecated(since = "0.8.3", note = "please use `Quat::from_xyzw` instead")]
pub fn new(x: f32, y: f32, z: f32, w: f32) -> Self {
Self(Vec4::new(x, y, z, w))
}
#[inline]
pub fn identity() -> Self {
Self(Vec4::new(0.0, 0.0, 0.0, 1.0))
}
/// Creates a new rotation quaternion from an unaligned `&[f32]`.
///
/// # Preconditions
///
/// The resulting quaternion is expected to be of unit length.
///
/// # Panics
///
/// Panics if `slice` length is less than 4.
#[inline]
pub fn from_slice_unaligned(slice: &[f32]) -> Self {
let q = Self(Vec4::from_slice_unaligned(slice));
glam_assert!(q.is_normalized());
q
}
/// Writes the quaternion to an unaligned `&mut [f32]`.
///
/// # Panics
///
/// Panics if `slice` length is less than 4.
#[inline]
pub fn write_to_slice_unaligned(self, slice: &mut [f32]) {
self.0.write_to_slice_unaligned(slice)
}
/// Create a new quaterion for a normalized rotation axis and angle
/// (in radians).
#[inline]
pub fn from_axis_angle(axis: Vec3, angle: f32) -> Self {
glam_assert!(axis.is_normalized());
let (s, c) = scalar_sin_cos(angle * 0.5);
Self((axis * s).extend(c))
}
/// Creates a new quaternion from the angle (in radians) around the x axis.
#[inline]
pub fn from_rotation_x(angle: f32) -> Self {
let (s, c) = scalar_sin_cos(angle * 0.5);
Self::from_xyzw(s, 0.0, 0.0, c)
}
/// Creates a new quaternion from the angle (in radians) around the y axis.
#[inline]
pub fn from_rotation_y(angle: f32) -> Self {
let (s, c) = scalar_sin_cos(angle * 0.5);
Self::from_xyzw(0.0, s, 0.0, c)
}
/// Creates a new quaternion from the angle (in radians) around the z axis.
#[inline]
pub fn from_rotation_z(angle: f32) -> Self {
let (s, c) = scalar_sin_cos(angle * 0.5);
Self::from_xyzw(0.0, 0.0, s, c)
}
#[inline]
/// Create a quaternion from the given yaw (around y), pitch (around x) and roll (around z)
/// in radians.
pub fn from_rotation_ypr(yaw: f32, pitch: f32, roll: f32) -> Self {
// Self::from_rotation_y(yaw) * Self::from_rotation_x(pitch) * Self::from_rotation_z(roll)
let (y0, w0) = scalar_sin_cos(yaw * 0.5);
let (x1, w1) = scalar_sin_cos(pitch * 0.5);
let (z2, w2) = scalar_sin_cos(roll * 0.5);
let x3 = w0 * x1;
let y3 = y0 * w1;
let z3 = -y0 * x1;
let w3 = w0 * w1;
let x4 = x3 * w2 + y3 * z2;
let y4 = -x3 * z2 + y3 * w2;
let z4 = w3 * z2 + z3 * w2;
let w4 = w3 * w2 - z3 * z2;
Self(Vec4::new(x4, y4, z4, w4))
}
#[inline]
fn from_rotation_axes(x_axis: Vec3, y_axis: Vec3, z_axis: Vec3) -> Self {
// from DirectXMath XMQuaternionRotationMatrix
// TODO: sse2 version
let (m00, m01, m02) = x_axis.into();
let (m10, m11, m12) = y_axis.into();
let (m20, m21, m22) = z_axis.into();
if m22 <= 0.0 {
// x^2 + y^2 >= z^2 + w^2
let dif10 = m11 - m00;
let omm22 = 1.0 - m22;
if dif10 <= 0.0 {
// x^2 >= y^2
let four_xsq = omm22 - dif10;
let inv4x = 0.5 / four_xsq.sqrt();
Self::from_xyzw(
four_xsq * inv4x,
(m01 + m10) * inv4x,
(m02 + m20) * inv4x,
(m12 - m21) * inv4x,
)
} else {
// y^2 >= x^2
let four_ysq = omm22 + dif10;
let inv4y = 0.5 / four_ysq.sqrt();
Self::from_xyzw(
(m01 + m10) * inv4y,
four_ysq * inv4y,
(m12 + m21) * inv4y,
(m20 - m02) * inv4y,
)
}
} else {
// z^2 + w^2 >= x^2 + y^2
let sum10 = m11 + m00;
let opm22 = 1.0 + m22;
if sum10 <= 0.0 {
// z^2 >= w^2
let four_zsq = opm22 - sum10;
let inv4z = 0.5 / four_zsq.sqrt();
Self::from_xyzw(
(m02 + m20) * inv4z,
(m12 + m21) * inv4z,
four_zsq * inv4z,
(m01 - m10) * inv4z,
)
} else {
// w^2 >= z^2
let four_wsq = opm22 + sum10;
let inv4w = 0.5 / four_wsq.sqrt();
Self::from_xyzw(
(m12 - m21) * inv4w,
(m20 - m02) * inv4w,
(m01 - m10) * inv4w,
four_wsq * inv4w,
)
}
}
}
/// Creates a new quaternion from a 3x3 rotation matrix.
#[inline]
pub fn from_rotation_mat3(mat: &Mat3) -> Self {
Self::from_rotation_axes(mat.x_axis(), mat.y_axis(), mat.z_axis())
}
/// Creates a new quaternion from a 3x3 rotation matrix inside a homogeneous
/// 4x4 matrix.
#[inline]
pub fn from_rotation_mat4(mat: &Mat4) -> Self {
Self::from_rotation_axes(
mat.x_axis().truncate(),
mat.y_axis().truncate(),
mat.z_axis().truncate(),
)
}
/// Returns the rotation axis and angle of `self`.
#[inline]
pub fn to_axis_angle(self) -> (Vec3, f32) {
const EPSILON: f32 = 1.0e-8;
const EPSILON_SQUARED: f32 = EPSILON * EPSILON;
let (x, y, z, w) = self.0.into();
let angle = scalar_acos(w) * 2.0;
let scale_sq = (1.0 - w * w).max(0.0);
if scale_sq >= EPSILON_SQUARED {
(Vec3::new(x, y, z) / scale_sq.sqrt(), angle)
} else {
(Vec3::unit_x(), angle)
}
}
/// Returns the quaternion conjugate of `self`. For a unit quaternion the
/// conjugate is also the inverse.
#[inline]
pub fn conjugate(self) -> Self {
#[cfg(not(all(target_feature = "sse2", not(feature = "scalar-math"))))]
{
Self::from_xyzw(-(self.0).0, -(self.0).1, -(self.0).2, (self.0).3)
}
#[cfg(all(target_feature = "sse2", not(feature = "scalar-math")))]
unsafe {
Self(Vec4(_mm_xor_ps(
(self.0).0,
_mm_set_ps(0.0, -0.0, -0.0, -0.0),
)))
}
}
/// Computes the dot product of `self` and `other`. The dot product is
/// equal to the the cosine of the angle between two quaterion rotations.
#[inline]
pub fn dot(self, other: Self) -> f32 {
self.0.dot(other.0)
}
/// Computes the length of `self`.
#[inline]
pub fn length(self) -> f32 {
self.0.length()
}
/// Computes the squared length of `self`.
///
/// This is generally faster than `Quat::length()` as it avoids a square
/// root operation.
#[inline]
pub fn length_squared(self) -> f32 {
self.0.length_squared()
}
/// Computes `1.0 / Quat::length()`.
///
/// For valid results, `self` must _not_ be of length zero.
#[inline]
pub fn length_reciprocal(self) -> f32 {
1.0 / self.0.length()
}
/// Returns `self` normalized to length 1.0.
///
/// For valid results, `self` must _not_ be of length zero.
#[inline]
pub fn normalize(self) -> Self {
let inv_len = self.0.length_reciprocal();
Self(self.0.mul(inv_len))
}
/// Returns whether `self` of length `1.0` or not.
///
/// Uses a precision threshold of `1e-6`.
#[inline]
pub fn is_normalized(self) -> bool {
is_normalized!(self)
}
#[inline]
pub fn is_near_identity(self) -> bool {
// from rtm quat_near_identity
const THRESHOLD_ANGLE: f32 = 0.002_847_144_6;
// Because of floating point precision, we cannot represent very small rotations.
// The closest f32 to 1.0 that is not 1.0 itself yields:
// 0.99999994.acos() * 2.0 = 0.000690533954 rad
//
// An error threshold of 1.e-6 is used by default.
// (1.0 - 1.e-6).acos() * 2.0 = 0.00284714461 rad
// (1.0 - 1.e-7).acos() * 2.0 = 0.00097656250 rad
//
// We don't really care about the angle value itself, only if it's close to 0.
// This will happen whenever quat.w is close to 1.0.
// If the quat.w is close to -1.0, the angle will be near 2*PI which is close to
// a negative 0 rotation. By forcing quat.w to be positive, we'll end up with
// the shortest path.
let positive_w_angle = scalar_acos(self.0.w().abs()) * 2.0;
positive_w_angle < THRESHOLD_ANGLE
}
/// Returns true if the absolute difference of all elements between `self`
/// and `other` is less than or equal to `max_abs_diff`.
///
/// This can be used to compare if two `Quat`'s contain similar elements. It
/// works best when comparing with a known value. The `max_abs_diff` that
/// should be used used depends on the values being compared against.
///
/// For more on floating point comparisons see
/// https://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/
#[inline]
pub fn abs_diff_eq(self, other: Self, max_abs_diff: f32) -> bool {
self.0.abs_diff_eq(other.0, max_abs_diff)
}
/// Performs a linear interpolation between `self` and `other` based on
/// the value `s`.
///
/// When `s` is `0.0`, the result will be equal to `self`. When `s`
/// is `1.0`, the result will be equal to `other`.
#[inline]
pub fn lerp(self, end: Self, s: f32) -> Self {
glam_assert!(self.is_normalized());
glam_assert!(end.is_normalized());
#[cfg(all(target_feature = "sse2", not(feature = "scalar-math")))]
unsafe {
let start = self.0;
let end = end.0;
let dot = start.dot_as_vec4(end);
// Calculate the bias, if the dot product is positive or zero, there is no bias
// but if it is negative, we want to flip the 'end' rotation XYZW components
let bias = _mm_and_ps(dot.into(), _mm_set_ps1(-0.0));
let interpolated = Vec4(_mm_add_ps(
_mm_mul_ps(
_mm_sub_ps(_mm_xor_ps(end.into(), bias), start.0),
_mm_set_ps1(s),
),
start.0,
));
Self(interpolated.normalize())
}
#[cfg(not(all(target_feature = "sse2", not(feature = "scalar-math"))))]
{
let start = self.0;
let end = end.0;
let dot = start.dot(end);
let bias = if dot >= 0.0 { 1.0 } else { -1.0 };
let interpolated = start + (s * ((end * bias) - start));
Self(interpolated.normalize())
}
}
#[inline]
/// Multiplies a quaternion and a 3D vector, rotating it.
pub fn mul_vec3(self, other: Vec3) -> Vec3 {
glam_assert!(self.is_normalized());
#[cfg(any(not(target_feature = "sse2"), feature = "scalar-math"))]
{
let w = self.0.w();
let b = self.0.truncate();
let b2 = b.dot(b);
other * (w * w - b2) + b * (other.dot(b) * 2.0) + b.cross(other) * (w * 2.0)
}
#[cfg(all(target_feature = "sse2", not(feature = "scalar-math")))]
{
let w = self.0.dup_w().truncate();
let two = Vec3::splat(2.0);
let b = self.0.truncate();
let b2 = b.dot_as_vec3(b);
other * (w * w - b2) + b * (other.dot_as_vec3(b) * two) + b.cross(other) * (w * two)
}
}
#[inline]
/// Multiplies two quaternions.
/// Note that due to floating point rounding the result may not be perfectly normalized.
pub fn mul_quat(self, other: Self) -> Self {
glam_assert!(self.is_normalized());
glam_assert!(other.is_normalized());
#[cfg(any(not(target_feature = "sse2"), feature = "scalar-math"))]
{
let (x0, y0, z0, w0) = self.0.into();
let (x1, y1, z1, w1) = other.0.into();
Self::from_xyzw(
w0 * x1 + x0 * w1 + y0 * z1 - z0 * y1,
w0 * y1 - x0 * z1 + y0 * w1 + z0 * x1,
w0 * z1 + x0 * y1 - y0 * x1 + z0 * w1,
w0 * w1 - x0 * x1 - y0 * y1 - z0 * z1,
)
}
#[cfg(all(target_feature = "sse2", not(feature = "scalar-math")))]
unsafe {
// from rtm quat_mul
let lhs = self.0.into();
let rhs = other.0.into();
let control_wzyx = _mm_set_ps(-1.0, 1.0, -1.0, 1.0);
let control_zwxy = _mm_set_ps(-1.0, -1.0, 1.0, 1.0);
let control_yxwz = _mm_set_ps(-1.0, 1.0, 1.0, -1.0);
let r_xxxx = _mm_shuffle_ps(lhs, lhs, 0b00_00_00_00);
let r_yyyy = _mm_shuffle_ps(lhs, lhs, 0b01_01_01_01);
let r_zzzz = _mm_shuffle_ps(lhs, lhs, 0b10_10_10_10);
let r_wwww = _mm_shuffle_ps(lhs, lhs, 0b11_11_11_11);
let lxrw_lyrw_lzrw_lwrw = _mm_mul_ps(r_wwww, rhs);
let l_wzyx = _mm_shuffle_ps(rhs, rhs, 0b00_01_10_11);
let lwrx_lzrx_lyrx_lxrx = _mm_mul_ps(r_xxxx, l_wzyx);
let l_zwxy = _mm_shuffle_ps(l_wzyx, l_wzyx, 0b10_11_00_01);
let lwrx_nlzrx_lyrx_nlxrx = _mm_mul_ps(lwrx_lzrx_lyrx_lxrx, control_wzyx);
let lzry_lwry_lxry_lyry = _mm_mul_ps(r_yyyy, l_zwxy);
let l_yxwz = _mm_shuffle_ps(l_zwxy, l_zwxy, 0b00_01_10_11);
let lzry_lwry_nlxry_nlyry = _mm_mul_ps(lzry_lwry_lxry_lyry, control_zwxy);
let lyrz_lxrz_lwrz_lzrz = _mm_mul_ps(r_zzzz, l_yxwz);
let result0 = _mm_add_ps(lxrw_lyrw_lzrw_lwrw, lwrx_nlzrx_lyrx_nlxrx);
let nlyrz_lxrz_lwrz_wlzrz = _mm_mul_ps(lyrz_lxrz_lwrz_lzrz, control_yxwz);
let result1 = _mm_add_ps(lzry_lwry_nlxry_nlyry, nlyrz_lxrz_lwrz_wlzrz);
Self(Vec4(_mm_add_ps(result0, result1)))
}
}
/// Returns element `x`.
#[inline]
pub fn x(self) -> f32 {
self.0.x()
}
/// Returns element `y`.
#[inline]
pub fn y(self) -> f32 {
self.0.y()
}
/// Returns element `z`.
#[inline]
pub fn z(self) -> f32 {
self.0.z()
}
/// Returns element `w`.
#[inline]
pub fn w(self) -> f32 {
self.0.w()
}
}
impl fmt::Debug for Quat {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
#[cfg(all(target_feature = "sse2", not(feature = "scalar-math")))]
return fmt.debug_tuple("Quat").field(&(self.0).0).finish();
#[cfg(any(not(target_feature = "sse2"), feature = "scalar-math"))]
return fmt
.debug_tuple("Quat")
.field(&self.0.x())
.field(&self.0.y())
.field(&self.0.z())
.field(&self.0.w())
.finish();
}
}
impl fmt::Display for Quat {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let (x, y, z, w) = self.0.into();
write!(fmt, "[{}, {}, {}, {}]", x, y, z, w)
}
}
impl Mul<Quat> for Quat {
type Output = Self;
#[inline]
fn mul(self, other: Self) -> Self {
self.mul_quat(other)
}
}
impl MulAssign<Quat> for Quat {
#[inline]
fn mul_assign(&mut self, other: Self) {
*self = self.mul_quat(other);
}
}
impl Mul<Vec3> for Quat {
type Output = Vec3;
#[inline]
fn mul(self, other: Vec3) -> Vec3 {
self.mul_vec3(other)
}
}
impl Neg for Quat {
type Output = Self;
#[inline]
fn neg(self) -> Self {
Self(-1.0 * self.0)
}
}
impl Default for Quat {
#[inline]
fn default() -> Self {
Self::identity()
}
}
impl PartialEq for Quat {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.0.cmpeq(other.0).all()
}
}
impl PartialOrd for Quat {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.as_ref().partial_cmp(other.as_ref())
}
}
impl AsRef<[f32; 4]> for Quat {
#[inline]
fn as_ref(&self) -> &[f32; 4] {
self.0.as_ref()
}
}
impl AsMut<[f32; 4]> for Quat {
#[inline]
fn as_mut(&mut self) -> &mut [f32; 4] {
self.0.as_mut()
}
}
impl From<Vec4> for Quat {
#[inline]
fn from(v: Vec4) -> Self {
Self(v)
}
}
impl From<Quat> for Vec4 {
#[inline]
fn from(q: Quat) -> Self {
q.0
}
}
impl From<(f32, f32, f32, f32)> for Quat {
#[inline]
fn from(t: (f32, f32, f32, f32)) -> Self {
Quat::from_xyzw(t.0, t.1, t.2, t.3)
}
}
impl From<Quat> for (f32, f32, f32, f32) {
#[inline]
fn from(q: Quat) -> Self {
q.0.into()
}
}
impl From<[f32; 4]> for Quat {
#[inline]
fn from(a: [f32; 4]) -> Self {
Self(a.into())
}
}
impl From<Quat> for [f32; 4] {
#[inline]
fn from(q: Quat) -> Self {
q.0.into()
}
}
#[cfg(all(target_feature = "sse2", not(feature = "scalar-math")))]
impl From<Quat> for __m128 {
// TODO: write test
#[cfg_attr(tarpaulin, skip)]
#[inline]
fn from(q: Quat) -> Self {
(q.0).0
}
}
#[cfg(all(target_feature = "sse2", not(feature = "scalar-math")))]
impl From<__m128> for Quat {
#[inline]
fn from(t: __m128) -> Self {
Self(Vec4(t))
}
}
| 30.563312 | 99 | 0.525256 |
d56e563eb664847e49accc77f32388f406f3f400
| 2,858 |
use carbonbot::{crawl_other, create_writer_threads};
use crypto_crawler::*;
use log::*;
use std::{env, str::FromStr};
pub fn crawl(
exchange: &'static str,
market_type: MarketType,
msg_type: MessageType,
data_dir: Option<String>,
redis_url: Option<String>,
) {
let (tx, rx) = std::sync::mpsc::channel::<Message>();
let writer_threads = create_writer_threads(rx, data_dir, redis_url);
if msg_type == MessageType::Candlestick {
crawl_candlestick(exchange, market_type, None, tx, None);
} else if msg_type == MessageType::OpenInterest {
crawl_open_interest(exchange, market_type, tx, None);
} else if msg_type == MessageType::Other {
crawl_other(exchange, market_type, tx, None);
} else {
let crawl_func = match msg_type {
MessageType::BBO => crawl_bbo,
MessageType::Trade => crawl_trade,
MessageType::L2Event => crawl_l2_event,
MessageType::L3Event => crawl_l3_event,
MessageType::L2Snapshot => crawl_l2_snapshot,
MessageType::L2TopK => crawl_l2_topk,
MessageType::L3Snapshot => crawl_l3_snapshot,
MessageType::Ticker => crawl_ticker,
MessageType::FundingRate => crawl_funding_rate,
_ => panic!("Not implemented"),
};
crawl_func(exchange, market_type, None, tx, None);
}
for thread in writer_threads {
thread.join().unwrap();
}
}
fn main() {
env_logger::init();
let args: Vec<String> = env::args().collect();
if args.len() != 4 {
println!("Usage: carbonbot <exchange> <market_type> <msg_type>");
return;
}
let exchange: &'static str = Box::leak(args[1].clone().into_boxed_str());
let market_type = MarketType::from_str(&args[2]);
if market_type.is_err() {
println!("Unknown market type: {}", &args[2]);
return;
}
let market_type = market_type.unwrap();
let msg_type = MessageType::from_str(&args[3]);
if msg_type.is_err() {
println!("Unknown msg type: {}", &args[3]);
return;
}
let msg_type = msg_type.unwrap();
let data_dir = if std::env::var("DATA_DIR").is_err() {
info!("The DATA_DIR environment variable does not exist");
None
} else {
let url = std::env::var("DATA_DIR").unwrap();
Some(url)
};
let redis_url = if std::env::var("REDIS_URL").is_err() {
info!("The REDIS_URL environment variable does not exist");
None
} else {
let url = std::env::var("REDIS_URL").unwrap();
Some(url)
};
if data_dir.is_none() && redis_url.is_none() {
panic!("The environment variable DATA_DIR and REDIS_URL are not set, at least one of them should be set");
}
crawl(exchange, market_type, msg_type, data_dir, redis_url);
}
| 32.11236 | 114 | 0.609867 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.