hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
d785749afc9c20ad47e8a91c4b38e919f1a921fb | 1,126 | // Checks whether declaring a lang item with the wrong number
// of generic arguments crashes the compiler (issue #83893, #87573, and part of #9307).
#![feature(lang_items, no_core)]
#![no_core]
#![crate_type = "lib"]
#[lang = "sized"]
trait MySized {}
#[lang = "add"]
trait MyAdd<'a, T> {}
//~^^ ERROR: `add` language item must be applied to a trait with 1 generic argument [E0718]
#[lang = "drop_in_place"]
//~^ ERROR `drop_in_place` language item must be applied to a function with at least 1 generic
fn my_ptr_drop() {}
#[lang = "index"]
trait MyIndex<'a, T> {}
//~^^ ERROR: `index` language item must be applied to a trait with 1 generic argument [E0718]
#[lang = "phantom_data"]
//~^ ERROR `phantom_data` language item must be applied to a struct with 1 generic argument
struct MyPhantomData<T, U>;
//~^ ERROR parameter `T` is never used
//~| ERROR parameter `U` is never used
fn ice() {
// Use add
let r = 5;
let a = 6;
r + a;
// Use drop in place
my_ptr_drop();
// Use index
let arr = [0; 5];
let _ = arr[2];
// Use phantomdata
let _ = MyPhantomData::<(), i32>;
}
| 25.022222 | 94 | 0.642096 |
2f307f03f9ea649bc979668eb396fb7ec510d11d | 2,059 | use super::util::SinkExt;
use crate::buffers::Acker;
use crate::event::{self, Event};
use futures::{future, Sink};
use serde::{Deserialize, Serialize};
use tokio::codec::{FramedWrite, LinesCodec};
use tokio::io;
#[derive(Deserialize, Serialize, Debug)]
#[serde(rename_all = "lowercase")]
pub enum Target {
Stdout,
Stderr,
}
impl Default for Target {
fn default() -> Self {
Target::Stdout
}
}
#[derive(Deserialize, Serialize, Debug)]
#[serde(deny_unknown_fields)]
pub struct ConsoleSinkConfig {
#[serde(default)]
pub target: Target,
pub encoding: Option<Encoding>,
}
#[derive(Deserialize, Serialize, Debug, Eq, PartialEq, Clone)]
#[serde(rename_all = "snake_case")]
pub enum Encoding {
Text,
Json,
}
#[typetag::serde(name = "console")]
impl crate::topology::config::SinkConfig for ConsoleSinkConfig {
fn build(&self, acker: Acker) -> Result<(super::RouterSink, super::Healthcheck), String> {
let encoding = self.encoding.clone();
let output: Box<dyn io::AsyncWrite + Send> = match self.target {
Target::Stdout => Box::new(io::stdout()),
Target::Stderr => Box::new(io::stderr()),
};
let sink = FramedWrite::new(output, LinesCodec::new())
.stream_ack(acker)
.sink_map_err(|_| ())
.with(move |event| encode_event(event, &encoding));
Ok((Box::new(sink), Box::new(future::ok(()))))
}
}
fn encode_event(event: Event, encoding: &Option<Encoding>) -> Result<String, ()> {
let log = event.into_log();
if (log.is_structured() && encoding != &Some(Encoding::Text))
|| encoding == &Some(Encoding::Json)
{
let bytes =
serde_json::to_vec(&log.all_fields()).map_err(|e| panic!("Error encoding: {}", e))?;
String::from_utf8(bytes).map_err(|e| panic!("Unable to convert json to utf8: {}", e))
} else {
let s = log
.get(&event::MESSAGE)
.map(|v| v.to_string_lossy())
.unwrap_or_else(|| "".into());
Ok(s)
}
}
| 28.205479 | 96 | 0.600777 |
bb7b8443d3ad5b7512dc11585a8f3f012e0d9d17 | 39,884 | #[doc = "Reader of register SYNC"]
pub type R = crate::R<u32, super::SYNC>;
#[doc = "Writer for register SYNC"]
pub type W = crate::W<u32, super::SYNC>;
#[doc = "Register SYNC `reset()`'s with value 0"]
impl crate::ResetValue for super::SYNC {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Synchronize GPTM Timer 0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYNCT0_A {
#[doc = "0: GPTM0 is not affected"]
NONE,
#[doc = "1: A timeout event for Timer A of GPTM0 is triggered"]
TA,
#[doc = "2: A timeout event for Timer B of GPTM0 is triggered"]
TB,
#[doc = "3: A timeout event for both Timer A and Timer B of GPTM0 is triggered"]
TATB,
}
impl From<SYNCT0_A> for u8 {
#[inline(always)]
fn from(variant: SYNCT0_A) -> Self {
match variant {
SYNCT0_A::NONE => 0,
SYNCT0_A::TA => 1,
SYNCT0_A::TB => 2,
SYNCT0_A::TATB => 3,
}
}
}
#[doc = "Reader of field `SYNCT0`"]
pub type SYNCT0_R = crate::R<u8, SYNCT0_A>;
impl SYNCT0_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SYNCT0_A {
match self.bits {
0 => SYNCT0_A::NONE,
1 => SYNCT0_A::TA,
2 => SYNCT0_A::TB,
3 => SYNCT0_A::TATB,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `NONE`"]
#[inline(always)]
pub fn is_none(&self) -> bool {
*self == SYNCT0_A::NONE
}
#[doc = "Checks if the value of the field is `TA`"]
#[inline(always)]
pub fn is_ta(&self) -> bool {
*self == SYNCT0_A::TA
}
#[doc = "Checks if the value of the field is `TB`"]
#[inline(always)]
pub fn is_tb(&self) -> bool {
*self == SYNCT0_A::TB
}
#[doc = "Checks if the value of the field is `TATB`"]
#[inline(always)]
pub fn is_tatb(&self) -> bool {
*self == SYNCT0_A::TATB
}
}
#[doc = "Write proxy for field `SYNCT0`"]
pub struct SYNCT0_W<'a> {
w: &'a mut W,
}
impl<'a> SYNCT0_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYNCT0_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "GPTM0 is not affected"]
#[inline(always)]
pub fn none(self) -> &'a mut W {
self.variant(SYNCT0_A::NONE)
}
#[doc = "A timeout event for Timer A of GPTM0 is triggered"]
#[inline(always)]
pub fn ta(self) -> &'a mut W {
self.variant(SYNCT0_A::TA)
}
#[doc = "A timeout event for Timer B of GPTM0 is triggered"]
#[inline(always)]
pub fn tb(self) -> &'a mut W {
self.variant(SYNCT0_A::TB)
}
#[doc = "A timeout event for both Timer A and Timer B of GPTM0 is triggered"]
#[inline(always)]
pub fn tatb(self) -> &'a mut W {
self.variant(SYNCT0_A::TATB)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03);
self.w
}
}
#[doc = "Synchronize GPTM Timer 1\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYNCT1_A {
#[doc = "0: GPTM1 is not affected"]
NONE,
#[doc = "1: A timeout event for Timer A of GPTM1 is triggered"]
TA,
#[doc = "2: A timeout event for Timer B of GPTM1 is triggered"]
TB,
#[doc = "3: A timeout event for both Timer A and Timer B of GPTM1 is triggered"]
TATB,
}
impl From<SYNCT1_A> for u8 {
#[inline(always)]
fn from(variant: SYNCT1_A) -> Self {
match variant {
SYNCT1_A::NONE => 0,
SYNCT1_A::TA => 1,
SYNCT1_A::TB => 2,
SYNCT1_A::TATB => 3,
}
}
}
#[doc = "Reader of field `SYNCT1`"]
pub type SYNCT1_R = crate::R<u8, SYNCT1_A>;
impl SYNCT1_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SYNCT1_A {
match self.bits {
0 => SYNCT1_A::NONE,
1 => SYNCT1_A::TA,
2 => SYNCT1_A::TB,
3 => SYNCT1_A::TATB,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `NONE`"]
#[inline(always)]
pub fn is_none(&self) -> bool {
*self == SYNCT1_A::NONE
}
#[doc = "Checks if the value of the field is `TA`"]
#[inline(always)]
pub fn is_ta(&self) -> bool {
*self == SYNCT1_A::TA
}
#[doc = "Checks if the value of the field is `TB`"]
#[inline(always)]
pub fn is_tb(&self) -> bool {
*self == SYNCT1_A::TB
}
#[doc = "Checks if the value of the field is `TATB`"]
#[inline(always)]
pub fn is_tatb(&self) -> bool {
*self == SYNCT1_A::TATB
}
}
#[doc = "Write proxy for field `SYNCT1`"]
pub struct SYNCT1_W<'a> {
w: &'a mut W,
}
impl<'a> SYNCT1_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYNCT1_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "GPTM1 is not affected"]
#[inline(always)]
pub fn none(self) -> &'a mut W {
self.variant(SYNCT1_A::NONE)
}
#[doc = "A timeout event for Timer A of GPTM1 is triggered"]
#[inline(always)]
pub fn ta(self) -> &'a mut W {
self.variant(SYNCT1_A::TA)
}
#[doc = "A timeout event for Timer B of GPTM1 is triggered"]
#[inline(always)]
pub fn tb(self) -> &'a mut W {
self.variant(SYNCT1_A::TB)
}
#[doc = "A timeout event for both Timer A and Timer B of GPTM1 is triggered"]
#[inline(always)]
pub fn tatb(self) -> &'a mut W {
self.variant(SYNCT1_A::TATB)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 2)) | (((value as u32) & 0x03) << 2);
self.w
}
}
#[doc = "Synchronize GPTM Timer 2\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYNCT2_A {
#[doc = "0: GPTM2 is not affected"]
NONE,
#[doc = "1: A timeout event for Timer A of GPTM2 is triggered"]
TA,
#[doc = "2: A timeout event for Timer B of GPTM2 is triggered"]
TB,
#[doc = "3: A timeout event for both Timer A and Timer B of GPTM2 is triggered"]
TATB,
}
impl From<SYNCT2_A> for u8 {
#[inline(always)]
fn from(variant: SYNCT2_A) -> Self {
match variant {
SYNCT2_A::NONE => 0,
SYNCT2_A::TA => 1,
SYNCT2_A::TB => 2,
SYNCT2_A::TATB => 3,
}
}
}
#[doc = "Reader of field `SYNCT2`"]
pub type SYNCT2_R = crate::R<u8, SYNCT2_A>;
impl SYNCT2_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SYNCT2_A {
match self.bits {
0 => SYNCT2_A::NONE,
1 => SYNCT2_A::TA,
2 => SYNCT2_A::TB,
3 => SYNCT2_A::TATB,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `NONE`"]
#[inline(always)]
pub fn is_none(&self) -> bool {
*self == SYNCT2_A::NONE
}
#[doc = "Checks if the value of the field is `TA`"]
#[inline(always)]
pub fn is_ta(&self) -> bool {
*self == SYNCT2_A::TA
}
#[doc = "Checks if the value of the field is `TB`"]
#[inline(always)]
pub fn is_tb(&self) -> bool {
*self == SYNCT2_A::TB
}
#[doc = "Checks if the value of the field is `TATB`"]
#[inline(always)]
pub fn is_tatb(&self) -> bool {
*self == SYNCT2_A::TATB
}
}
#[doc = "Write proxy for field `SYNCT2`"]
pub struct SYNCT2_W<'a> {
w: &'a mut W,
}
impl<'a> SYNCT2_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYNCT2_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "GPTM2 is not affected"]
#[inline(always)]
pub fn none(self) -> &'a mut W {
self.variant(SYNCT2_A::NONE)
}
#[doc = "A timeout event for Timer A of GPTM2 is triggered"]
#[inline(always)]
pub fn ta(self) -> &'a mut W {
self.variant(SYNCT2_A::TA)
}
#[doc = "A timeout event for Timer B of GPTM2 is triggered"]
#[inline(always)]
pub fn tb(self) -> &'a mut W {
self.variant(SYNCT2_A::TB)
}
#[doc = "A timeout event for both Timer A and Timer B of GPTM2 is triggered"]
#[inline(always)]
pub fn tatb(self) -> &'a mut W {
self.variant(SYNCT2_A::TATB)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 4)) | (((value as u32) & 0x03) << 4);
self.w
}
}
#[doc = "Synchronize GPTM Timer 3\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYNCT3_A {
#[doc = "0: GPTM3 is not affected"]
NONE,
#[doc = "1: A timeout event for Timer A of GPTM3 is triggered"]
TA,
#[doc = "2: A timeout event for Timer B of GPTM3 is triggered"]
TB,
#[doc = "3: A timeout event for both Timer A and Timer B of GPTM3 is triggered"]
TATB,
}
impl From<SYNCT3_A> for u8 {
#[inline(always)]
fn from(variant: SYNCT3_A) -> Self {
match variant {
SYNCT3_A::NONE => 0,
SYNCT3_A::TA => 1,
SYNCT3_A::TB => 2,
SYNCT3_A::TATB => 3,
}
}
}
#[doc = "Reader of field `SYNCT3`"]
pub type SYNCT3_R = crate::R<u8, SYNCT3_A>;
impl SYNCT3_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SYNCT3_A {
match self.bits {
0 => SYNCT3_A::NONE,
1 => SYNCT3_A::TA,
2 => SYNCT3_A::TB,
3 => SYNCT3_A::TATB,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `NONE`"]
#[inline(always)]
pub fn is_none(&self) -> bool {
*self == SYNCT3_A::NONE
}
#[doc = "Checks if the value of the field is `TA`"]
#[inline(always)]
pub fn is_ta(&self) -> bool {
*self == SYNCT3_A::TA
}
#[doc = "Checks if the value of the field is `TB`"]
#[inline(always)]
pub fn is_tb(&self) -> bool {
*self == SYNCT3_A::TB
}
#[doc = "Checks if the value of the field is `TATB`"]
#[inline(always)]
pub fn is_tatb(&self) -> bool {
*self == SYNCT3_A::TATB
}
}
#[doc = "Write proxy for field `SYNCT3`"]
pub struct SYNCT3_W<'a> {
w: &'a mut W,
}
impl<'a> SYNCT3_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYNCT3_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "GPTM3 is not affected"]
#[inline(always)]
pub fn none(self) -> &'a mut W {
self.variant(SYNCT3_A::NONE)
}
#[doc = "A timeout event for Timer A of GPTM3 is triggered"]
#[inline(always)]
pub fn ta(self) -> &'a mut W {
self.variant(SYNCT3_A::TA)
}
#[doc = "A timeout event for Timer B of GPTM3 is triggered"]
#[inline(always)]
pub fn tb(self) -> &'a mut W {
self.variant(SYNCT3_A::TB)
}
#[doc = "A timeout event for both Timer A and Timer B of GPTM3 is triggered"]
#[inline(always)]
pub fn tatb(self) -> &'a mut W {
self.variant(SYNCT3_A::TATB)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 6)) | (((value as u32) & 0x03) << 6);
self.w
}
}
#[doc = "Synchronize GPTM Timer 4\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYNCT4_A {
#[doc = "0: GPTM4 is not affected"]
NONE,
#[doc = "1: A timeout event for Timer A of GPTM4 is triggered"]
TA,
#[doc = "2: A timeout event for Timer B of GPTM4 is triggered"]
TB,
#[doc = "3: A timeout event for both Timer A and Timer B of GPTM4 is triggered"]
TATB,
}
impl From<SYNCT4_A> for u8 {
#[inline(always)]
fn from(variant: SYNCT4_A) -> Self {
match variant {
SYNCT4_A::NONE => 0,
SYNCT4_A::TA => 1,
SYNCT4_A::TB => 2,
SYNCT4_A::TATB => 3,
}
}
}
#[doc = "Reader of field `SYNCT4`"]
pub type SYNCT4_R = crate::R<u8, SYNCT4_A>;
impl SYNCT4_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SYNCT4_A {
match self.bits {
0 => SYNCT4_A::NONE,
1 => SYNCT4_A::TA,
2 => SYNCT4_A::TB,
3 => SYNCT4_A::TATB,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `NONE`"]
#[inline(always)]
pub fn is_none(&self) -> bool {
*self == SYNCT4_A::NONE
}
#[doc = "Checks if the value of the field is `TA`"]
#[inline(always)]
pub fn is_ta(&self) -> bool {
*self == SYNCT4_A::TA
}
#[doc = "Checks if the value of the field is `TB`"]
#[inline(always)]
pub fn is_tb(&self) -> bool {
*self == SYNCT4_A::TB
}
#[doc = "Checks if the value of the field is `TATB`"]
#[inline(always)]
pub fn is_tatb(&self) -> bool {
*self == SYNCT4_A::TATB
}
}
#[doc = "Write proxy for field `SYNCT4`"]
pub struct SYNCT4_W<'a> {
w: &'a mut W,
}
impl<'a> SYNCT4_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYNCT4_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "GPTM4 is not affected"]
#[inline(always)]
pub fn none(self) -> &'a mut W {
self.variant(SYNCT4_A::NONE)
}
#[doc = "A timeout event for Timer A of GPTM4 is triggered"]
#[inline(always)]
pub fn ta(self) -> &'a mut W {
self.variant(SYNCT4_A::TA)
}
#[doc = "A timeout event for Timer B of GPTM4 is triggered"]
#[inline(always)]
pub fn tb(self) -> &'a mut W {
self.variant(SYNCT4_A::TB)
}
#[doc = "A timeout event for both Timer A and Timer B of GPTM4 is triggered"]
#[inline(always)]
pub fn tatb(self) -> &'a mut W {
self.variant(SYNCT4_A::TATB)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8);
self.w
}
}
#[doc = "Synchronize GPTM Timer 5\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYNCT5_A {
#[doc = "0: GPTM5 is not affected"]
NONE,
#[doc = "1: A timeout event for Timer A of GPTM5 is triggered"]
TA,
#[doc = "2: A timeout event for Timer B of GPTM5 is triggered"]
TB,
#[doc = "3: A timeout event for both Timer A and Timer B of GPTM5 is triggered"]
TATB,
}
impl From<SYNCT5_A> for u8 {
#[inline(always)]
fn from(variant: SYNCT5_A) -> Self {
match variant {
SYNCT5_A::NONE => 0,
SYNCT5_A::TA => 1,
SYNCT5_A::TB => 2,
SYNCT5_A::TATB => 3,
}
}
}
#[doc = "Reader of field `SYNCT5`"]
pub type SYNCT5_R = crate::R<u8, SYNCT5_A>;
impl SYNCT5_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SYNCT5_A {
match self.bits {
0 => SYNCT5_A::NONE,
1 => SYNCT5_A::TA,
2 => SYNCT5_A::TB,
3 => SYNCT5_A::TATB,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `NONE`"]
#[inline(always)]
pub fn is_none(&self) -> bool {
*self == SYNCT5_A::NONE
}
#[doc = "Checks if the value of the field is `TA`"]
#[inline(always)]
pub fn is_ta(&self) -> bool {
*self == SYNCT5_A::TA
}
#[doc = "Checks if the value of the field is `TB`"]
#[inline(always)]
pub fn is_tb(&self) -> bool {
*self == SYNCT5_A::TB
}
#[doc = "Checks if the value of the field is `TATB`"]
#[inline(always)]
pub fn is_tatb(&self) -> bool {
*self == SYNCT5_A::TATB
}
}
#[doc = "Write proxy for field `SYNCT5`"]
pub struct SYNCT5_W<'a> {
w: &'a mut W,
}
impl<'a> SYNCT5_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYNCT5_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "GPTM5 is not affected"]
#[inline(always)]
pub fn none(self) -> &'a mut W {
self.variant(SYNCT5_A::NONE)
}
#[doc = "A timeout event for Timer A of GPTM5 is triggered"]
#[inline(always)]
pub fn ta(self) -> &'a mut W {
self.variant(SYNCT5_A::TA)
}
#[doc = "A timeout event for Timer B of GPTM5 is triggered"]
#[inline(always)]
pub fn tb(self) -> &'a mut W {
self.variant(SYNCT5_A::TB)
}
#[doc = "A timeout event for both Timer A and Timer B of GPTM5 is triggered"]
#[inline(always)]
pub fn tatb(self) -> &'a mut W {
self.variant(SYNCT5_A::TATB)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 10)) | (((value as u32) & 0x03) << 10);
self.w
}
}
#[doc = "Synchronize GPTM 32/64-Bit Timer 0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYNCWT0_A {
#[doc = "0: GPTM 32/64-Bit Timer 0 is not affected"]
NONE,
#[doc = "1: A timeout event for Timer A of GPTM 32/64-Bit Timer 0 is triggered"]
TA,
#[doc = "2: A timeout event for Timer B of GPTM 32/64-Bit Timer 0 is triggered"]
TB,
#[doc = "3: A timeout event for both Timer A and Timer B of GPTM 32/64-Bit Timer 0 is triggered"]
TATB,
}
impl From<SYNCWT0_A> for u8 {
#[inline(always)]
fn from(variant: SYNCWT0_A) -> Self {
match variant {
SYNCWT0_A::NONE => 0,
SYNCWT0_A::TA => 1,
SYNCWT0_A::TB => 2,
SYNCWT0_A::TATB => 3,
}
}
}
#[doc = "Reader of field `SYNCWT0`"]
pub type SYNCWT0_R = crate::R<u8, SYNCWT0_A>;
impl SYNCWT0_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SYNCWT0_A {
match self.bits {
0 => SYNCWT0_A::NONE,
1 => SYNCWT0_A::TA,
2 => SYNCWT0_A::TB,
3 => SYNCWT0_A::TATB,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `NONE`"]
#[inline(always)]
pub fn is_none(&self) -> bool {
*self == SYNCWT0_A::NONE
}
#[doc = "Checks if the value of the field is `TA`"]
#[inline(always)]
pub fn is_ta(&self) -> bool {
*self == SYNCWT0_A::TA
}
#[doc = "Checks if the value of the field is `TB`"]
#[inline(always)]
pub fn is_tb(&self) -> bool {
*self == SYNCWT0_A::TB
}
#[doc = "Checks if the value of the field is `TATB`"]
#[inline(always)]
pub fn is_tatb(&self) -> bool {
*self == SYNCWT0_A::TATB
}
}
#[doc = "Write proxy for field `SYNCWT0`"]
pub struct SYNCWT0_W<'a> {
w: &'a mut W,
}
impl<'a> SYNCWT0_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYNCWT0_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "GPTM 32/64-Bit Timer 0 is not affected"]
#[inline(always)]
pub fn none(self) -> &'a mut W {
self.variant(SYNCWT0_A::NONE)
}
#[doc = "A timeout event for Timer A of GPTM 32/64-Bit Timer 0 is triggered"]
#[inline(always)]
pub fn ta(self) -> &'a mut W {
self.variant(SYNCWT0_A::TA)
}
#[doc = "A timeout event for Timer B of GPTM 32/64-Bit Timer 0 is triggered"]
#[inline(always)]
pub fn tb(self) -> &'a mut W {
self.variant(SYNCWT0_A::TB)
}
#[doc = "A timeout event for both Timer A and Timer B of GPTM 32/64-Bit Timer 0 is triggered"]
#[inline(always)]
pub fn tatb(self) -> &'a mut W {
self.variant(SYNCWT0_A::TATB)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 12)) | (((value as u32) & 0x03) << 12);
self.w
}
}
#[doc = "Synchronize GPTM 32/64-Bit Timer 1\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYNCWT1_A {
#[doc = "0: GPTM 32/64-Bit Timer 1 is not affected"]
NONE,
#[doc = "1: A timeout event for Timer A of GPTM 32/64-Bit Timer 1 is triggered"]
TA,
#[doc = "2: A timeout event for Timer B of GPTM 32/64-Bit Timer 1 is triggered"]
TB,
#[doc = "3: A timeout event for both Timer A and Timer B of GPTM 32/64-Bit Timer 1 is triggered"]
TATB,
}
impl From<SYNCWT1_A> for u8 {
#[inline(always)]
fn from(variant: SYNCWT1_A) -> Self {
match variant {
SYNCWT1_A::NONE => 0,
SYNCWT1_A::TA => 1,
SYNCWT1_A::TB => 2,
SYNCWT1_A::TATB => 3,
}
}
}
#[doc = "Reader of field `SYNCWT1`"]
pub type SYNCWT1_R = crate::R<u8, SYNCWT1_A>;
impl SYNCWT1_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SYNCWT1_A {
match self.bits {
0 => SYNCWT1_A::NONE,
1 => SYNCWT1_A::TA,
2 => SYNCWT1_A::TB,
3 => SYNCWT1_A::TATB,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `NONE`"]
#[inline(always)]
pub fn is_none(&self) -> bool {
*self == SYNCWT1_A::NONE
}
#[doc = "Checks if the value of the field is `TA`"]
#[inline(always)]
pub fn is_ta(&self) -> bool {
*self == SYNCWT1_A::TA
}
#[doc = "Checks if the value of the field is `TB`"]
#[inline(always)]
pub fn is_tb(&self) -> bool {
*self == SYNCWT1_A::TB
}
#[doc = "Checks if the value of the field is `TATB`"]
#[inline(always)]
pub fn is_tatb(&self) -> bool {
*self == SYNCWT1_A::TATB
}
}
#[doc = "Write proxy for field `SYNCWT1`"]
pub struct SYNCWT1_W<'a> {
w: &'a mut W,
}
impl<'a> SYNCWT1_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYNCWT1_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "GPTM 32/64-Bit Timer 1 is not affected"]
#[inline(always)]
pub fn none(self) -> &'a mut W {
self.variant(SYNCWT1_A::NONE)
}
#[doc = "A timeout event for Timer A of GPTM 32/64-Bit Timer 1 is triggered"]
#[inline(always)]
pub fn ta(self) -> &'a mut W {
self.variant(SYNCWT1_A::TA)
}
#[doc = "A timeout event for Timer B of GPTM 32/64-Bit Timer 1 is triggered"]
#[inline(always)]
pub fn tb(self) -> &'a mut W {
self.variant(SYNCWT1_A::TB)
}
#[doc = "A timeout event for both Timer A and Timer B of GPTM 32/64-Bit Timer 1 is triggered"]
#[inline(always)]
pub fn tatb(self) -> &'a mut W {
self.variant(SYNCWT1_A::TATB)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 14)) | (((value as u32) & 0x03) << 14);
self.w
}
}
#[doc = "Synchronize GPTM 32/64-Bit Timer 2\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYNCWT2_A {
#[doc = "0: GPTM 32/64-Bit Timer 2 is not affected"]
NONE,
#[doc = "1: A timeout event for Timer A of GPTM 32/64-Bit Timer 2 is triggered"]
TA,
#[doc = "2: A timeout event for Timer B of GPTM 32/64-Bit Timer 2 is triggered"]
TB,
#[doc = "3: A timeout event for both Timer A and Timer B of GPTM 32/64-Bit Timer 2 is triggered"]
TATB,
}
impl From<SYNCWT2_A> for u8 {
#[inline(always)]
fn from(variant: SYNCWT2_A) -> Self {
match variant {
SYNCWT2_A::NONE => 0,
SYNCWT2_A::TA => 1,
SYNCWT2_A::TB => 2,
SYNCWT2_A::TATB => 3,
}
}
}
#[doc = "Reader of field `SYNCWT2`"]
pub type SYNCWT2_R = crate::R<u8, SYNCWT2_A>;
impl SYNCWT2_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SYNCWT2_A {
match self.bits {
0 => SYNCWT2_A::NONE,
1 => SYNCWT2_A::TA,
2 => SYNCWT2_A::TB,
3 => SYNCWT2_A::TATB,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `NONE`"]
#[inline(always)]
pub fn is_none(&self) -> bool {
*self == SYNCWT2_A::NONE
}
#[doc = "Checks if the value of the field is `TA`"]
#[inline(always)]
pub fn is_ta(&self) -> bool {
*self == SYNCWT2_A::TA
}
#[doc = "Checks if the value of the field is `TB`"]
#[inline(always)]
pub fn is_tb(&self) -> bool {
*self == SYNCWT2_A::TB
}
#[doc = "Checks if the value of the field is `TATB`"]
#[inline(always)]
pub fn is_tatb(&self) -> bool {
*self == SYNCWT2_A::TATB
}
}
#[doc = "Write proxy for field `SYNCWT2`"]
pub struct SYNCWT2_W<'a> {
w: &'a mut W,
}
impl<'a> SYNCWT2_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYNCWT2_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "GPTM 32/64-Bit Timer 2 is not affected"]
#[inline(always)]
pub fn none(self) -> &'a mut W {
self.variant(SYNCWT2_A::NONE)
}
#[doc = "A timeout event for Timer A of GPTM 32/64-Bit Timer 2 is triggered"]
#[inline(always)]
pub fn ta(self) -> &'a mut W {
self.variant(SYNCWT2_A::TA)
}
#[doc = "A timeout event for Timer B of GPTM 32/64-Bit Timer 2 is triggered"]
#[inline(always)]
pub fn tb(self) -> &'a mut W {
self.variant(SYNCWT2_A::TB)
}
#[doc = "A timeout event for both Timer A and Timer B of GPTM 32/64-Bit Timer 2 is triggered"]
#[inline(always)]
pub fn tatb(self) -> &'a mut W {
self.variant(SYNCWT2_A::TATB)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 16)) | (((value as u32) & 0x03) << 16);
self.w
}
}
#[doc = "Synchronize GPTM 32/64-Bit Timer 3\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYNCWT3_A {
#[doc = "0: GPTM 32/64-Bit Timer 3 is not affected"]
NONE,
#[doc = "1: A timeout event for Timer A of GPTM 32/64-Bit Timer 3 is triggered"]
TA,
#[doc = "2: A timeout event for Timer B of GPTM 32/64-Bit Timer 3 is triggered"]
TB,
#[doc = "3: A timeout event for both Timer A and Timer B of GPTM 32/64-Bit Timer 3 is triggered"]
TATB,
}
impl From<SYNCWT3_A> for u8 {
#[inline(always)]
fn from(variant: SYNCWT3_A) -> Self {
match variant {
SYNCWT3_A::NONE => 0,
SYNCWT3_A::TA => 1,
SYNCWT3_A::TB => 2,
SYNCWT3_A::TATB => 3,
}
}
}
#[doc = "Reader of field `SYNCWT3`"]
pub type SYNCWT3_R = crate::R<u8, SYNCWT3_A>;
impl SYNCWT3_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SYNCWT3_A {
match self.bits {
0 => SYNCWT3_A::NONE,
1 => SYNCWT3_A::TA,
2 => SYNCWT3_A::TB,
3 => SYNCWT3_A::TATB,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `NONE`"]
#[inline(always)]
pub fn is_none(&self) -> bool {
*self == SYNCWT3_A::NONE
}
#[doc = "Checks if the value of the field is `TA`"]
#[inline(always)]
pub fn is_ta(&self) -> bool {
*self == SYNCWT3_A::TA
}
#[doc = "Checks if the value of the field is `TB`"]
#[inline(always)]
pub fn is_tb(&self) -> bool {
*self == SYNCWT3_A::TB
}
#[doc = "Checks if the value of the field is `TATB`"]
#[inline(always)]
pub fn is_tatb(&self) -> bool {
*self == SYNCWT3_A::TATB
}
}
#[doc = "Write proxy for field `SYNCWT3`"]
pub struct SYNCWT3_W<'a> {
w: &'a mut W,
}
impl<'a> SYNCWT3_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYNCWT3_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "GPTM 32/64-Bit Timer 3 is not affected"]
#[inline(always)]
pub fn none(self) -> &'a mut W {
self.variant(SYNCWT3_A::NONE)
}
#[doc = "A timeout event for Timer A of GPTM 32/64-Bit Timer 3 is triggered"]
#[inline(always)]
pub fn ta(self) -> &'a mut W {
self.variant(SYNCWT3_A::TA)
}
#[doc = "A timeout event for Timer B of GPTM 32/64-Bit Timer 3 is triggered"]
#[inline(always)]
pub fn tb(self) -> &'a mut W {
self.variant(SYNCWT3_A::TB)
}
#[doc = "A timeout event for both Timer A and Timer B of GPTM 32/64-Bit Timer 3 is triggered"]
#[inline(always)]
pub fn tatb(self) -> &'a mut W {
self.variant(SYNCWT3_A::TATB)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 18)) | (((value as u32) & 0x03) << 18);
self.w
}
}
#[doc = "Synchronize GPTM 32/64-Bit Timer 4\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYNCWT4_A {
#[doc = "0: GPTM 32/64-Bit Timer 4 is not affected"]
NONE,
#[doc = "1: A timeout event for Timer A of GPTM 32/64-Bit Timer 4 is triggered"]
TA,
#[doc = "2: A timeout event for Timer B of GPTM 32/64-Bit Timer 4 is triggered"]
TB,
#[doc = "3: A timeout event for both Timer A and Timer B of GPTM 32/64-Bit Timer 4 is triggered"]
TATB,
}
impl From<SYNCWT4_A> for u8 {
#[inline(always)]
fn from(variant: SYNCWT4_A) -> Self {
match variant {
SYNCWT4_A::NONE => 0,
SYNCWT4_A::TA => 1,
SYNCWT4_A::TB => 2,
SYNCWT4_A::TATB => 3,
}
}
}
#[doc = "Reader of field `SYNCWT4`"]
pub type SYNCWT4_R = crate::R<u8, SYNCWT4_A>;
impl SYNCWT4_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SYNCWT4_A {
match self.bits {
0 => SYNCWT4_A::NONE,
1 => SYNCWT4_A::TA,
2 => SYNCWT4_A::TB,
3 => SYNCWT4_A::TATB,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `NONE`"]
#[inline(always)]
pub fn is_none(&self) -> bool {
*self == SYNCWT4_A::NONE
}
#[doc = "Checks if the value of the field is `TA`"]
#[inline(always)]
pub fn is_ta(&self) -> bool {
*self == SYNCWT4_A::TA
}
#[doc = "Checks if the value of the field is `TB`"]
#[inline(always)]
pub fn is_tb(&self) -> bool {
*self == SYNCWT4_A::TB
}
#[doc = "Checks if the value of the field is `TATB`"]
#[inline(always)]
pub fn is_tatb(&self) -> bool {
*self == SYNCWT4_A::TATB
}
}
#[doc = "Write proxy for field `SYNCWT4`"]
pub struct SYNCWT4_W<'a> {
w: &'a mut W,
}
impl<'a> SYNCWT4_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYNCWT4_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "GPTM 32/64-Bit Timer 4 is not affected"]
#[inline(always)]
pub fn none(self) -> &'a mut W {
self.variant(SYNCWT4_A::NONE)
}
#[doc = "A timeout event for Timer A of GPTM 32/64-Bit Timer 4 is triggered"]
#[inline(always)]
pub fn ta(self) -> &'a mut W {
self.variant(SYNCWT4_A::TA)
}
#[doc = "A timeout event for Timer B of GPTM 32/64-Bit Timer 4 is triggered"]
#[inline(always)]
pub fn tb(self) -> &'a mut W {
self.variant(SYNCWT4_A::TB)
}
#[doc = "A timeout event for both Timer A and Timer B of GPTM 32/64-Bit Timer 4 is triggered"]
#[inline(always)]
pub fn tatb(self) -> &'a mut W {
self.variant(SYNCWT4_A::TATB)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 20)) | (((value as u32) & 0x03) << 20);
self.w
}
}
#[doc = "Synchronize GPTM 32/64-Bit Timer 5\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYNCWT5_A {
#[doc = "0: GPTM 32/64-Bit Timer 5 is not affected"]
NONE,
#[doc = "1: A timeout event for Timer A of GPTM 32/64-Bit Timer 5 is triggered"]
TA,
#[doc = "2: A timeout event for Timer B of GPTM 32/64-Bit Timer 5 is triggered"]
TB,
#[doc = "3: A timeout event for both Timer A and Timer B of GPTM 32/64-Bit Timer 5 is triggered"]
TATB,
}
impl From<SYNCWT5_A> for u8 {
#[inline(always)]
fn from(variant: SYNCWT5_A) -> Self {
match variant {
SYNCWT5_A::NONE => 0,
SYNCWT5_A::TA => 1,
SYNCWT5_A::TB => 2,
SYNCWT5_A::TATB => 3,
}
}
}
#[doc = "Reader of field `SYNCWT5`"]
pub type SYNCWT5_R = crate::R<u8, SYNCWT5_A>;
impl SYNCWT5_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SYNCWT5_A {
match self.bits {
0 => SYNCWT5_A::NONE,
1 => SYNCWT5_A::TA,
2 => SYNCWT5_A::TB,
3 => SYNCWT5_A::TATB,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `NONE`"]
#[inline(always)]
pub fn is_none(&self) -> bool {
*self == SYNCWT5_A::NONE
}
#[doc = "Checks if the value of the field is `TA`"]
#[inline(always)]
pub fn is_ta(&self) -> bool {
*self == SYNCWT5_A::TA
}
#[doc = "Checks if the value of the field is `TB`"]
#[inline(always)]
pub fn is_tb(&self) -> bool {
*self == SYNCWT5_A::TB
}
#[doc = "Checks if the value of the field is `TATB`"]
#[inline(always)]
pub fn is_tatb(&self) -> bool {
*self == SYNCWT5_A::TATB
}
}
#[doc = "Write proxy for field `SYNCWT5`"]
pub struct SYNCWT5_W<'a> {
w: &'a mut W,
}
impl<'a> SYNCWT5_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYNCWT5_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "GPTM 32/64-Bit Timer 5 is not affected"]
#[inline(always)]
pub fn none(self) -> &'a mut W {
self.variant(SYNCWT5_A::NONE)
}
#[doc = "A timeout event for Timer A of GPTM 32/64-Bit Timer 5 is triggered"]
#[inline(always)]
pub fn ta(self) -> &'a mut W {
self.variant(SYNCWT5_A::TA)
}
#[doc = "A timeout event for Timer B of GPTM 32/64-Bit Timer 5 is triggered"]
#[inline(always)]
pub fn tb(self) -> &'a mut W {
self.variant(SYNCWT5_A::TB)
}
#[doc = "A timeout event for both Timer A and Timer B of GPTM 32/64-Bit Timer 5 is triggered"]
#[inline(always)]
pub fn tatb(self) -> &'a mut W {
self.variant(SYNCWT5_A::TATB)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 22)) | (((value as u32) & 0x03) << 22);
self.w
}
}
impl R {
#[doc = "Bits 0:1 - Synchronize GPTM Timer 0"]
#[inline(always)]
pub fn synct0(&self) -> SYNCT0_R {
SYNCT0_R::new((self.bits & 0x03) as u8)
}
#[doc = "Bits 2:3 - Synchronize GPTM Timer 1"]
#[inline(always)]
pub fn synct1(&self) -> SYNCT1_R {
SYNCT1_R::new(((self.bits >> 2) & 0x03) as u8)
}
#[doc = "Bits 4:5 - Synchronize GPTM Timer 2"]
#[inline(always)]
pub fn synct2(&self) -> SYNCT2_R {
SYNCT2_R::new(((self.bits >> 4) & 0x03) as u8)
}
#[doc = "Bits 6:7 - Synchronize GPTM Timer 3"]
#[inline(always)]
pub fn synct3(&self) -> SYNCT3_R {
SYNCT3_R::new(((self.bits >> 6) & 0x03) as u8)
}
#[doc = "Bits 8:9 - Synchronize GPTM Timer 4"]
#[inline(always)]
pub fn synct4(&self) -> SYNCT4_R {
SYNCT4_R::new(((self.bits >> 8) & 0x03) as u8)
}
#[doc = "Bits 10:11 - Synchronize GPTM Timer 5"]
#[inline(always)]
pub fn synct5(&self) -> SYNCT5_R {
SYNCT5_R::new(((self.bits >> 10) & 0x03) as u8)
}
#[doc = "Bits 12:13 - Synchronize GPTM 32/64-Bit Timer 0"]
#[inline(always)]
pub fn syncwt0(&self) -> SYNCWT0_R {
SYNCWT0_R::new(((self.bits >> 12) & 0x03) as u8)
}
#[doc = "Bits 14:15 - Synchronize GPTM 32/64-Bit Timer 1"]
#[inline(always)]
pub fn syncwt1(&self) -> SYNCWT1_R {
SYNCWT1_R::new(((self.bits >> 14) & 0x03) as u8)
}
#[doc = "Bits 16:17 - Synchronize GPTM 32/64-Bit Timer 2"]
#[inline(always)]
pub fn syncwt2(&self) -> SYNCWT2_R {
SYNCWT2_R::new(((self.bits >> 16) & 0x03) as u8)
}
#[doc = "Bits 18:19 - Synchronize GPTM 32/64-Bit Timer 3"]
#[inline(always)]
pub fn syncwt3(&self) -> SYNCWT3_R {
SYNCWT3_R::new(((self.bits >> 18) & 0x03) as u8)
}
#[doc = "Bits 20:21 - Synchronize GPTM 32/64-Bit Timer 4"]
#[inline(always)]
pub fn syncwt4(&self) -> SYNCWT4_R {
SYNCWT4_R::new(((self.bits >> 20) & 0x03) as u8)
}
#[doc = "Bits 22:23 - Synchronize GPTM 32/64-Bit Timer 5"]
#[inline(always)]
pub fn syncwt5(&self) -> SYNCWT5_R {
SYNCWT5_R::new(((self.bits >> 22) & 0x03) as u8)
}
}
impl W {
#[doc = "Bits 0:1 - Synchronize GPTM Timer 0"]
#[inline(always)]
pub fn synct0(&mut self) -> SYNCT0_W {
SYNCT0_W { w: self }
}
#[doc = "Bits 2:3 - Synchronize GPTM Timer 1"]
#[inline(always)]
pub fn synct1(&mut self) -> SYNCT1_W {
SYNCT1_W { w: self }
}
#[doc = "Bits 4:5 - Synchronize GPTM Timer 2"]
#[inline(always)]
pub fn synct2(&mut self) -> SYNCT2_W {
SYNCT2_W { w: self }
}
#[doc = "Bits 6:7 - Synchronize GPTM Timer 3"]
#[inline(always)]
pub fn synct3(&mut self) -> SYNCT3_W {
SYNCT3_W { w: self }
}
#[doc = "Bits 8:9 - Synchronize GPTM Timer 4"]
#[inline(always)]
pub fn synct4(&mut self) -> SYNCT4_W {
SYNCT4_W { w: self }
}
#[doc = "Bits 10:11 - Synchronize GPTM Timer 5"]
#[inline(always)]
pub fn synct5(&mut self) -> SYNCT5_W {
SYNCT5_W { w: self }
}
#[doc = "Bits 12:13 - Synchronize GPTM 32/64-Bit Timer 0"]
#[inline(always)]
pub fn syncwt0(&mut self) -> SYNCWT0_W {
SYNCWT0_W { w: self }
}
#[doc = "Bits 14:15 - Synchronize GPTM 32/64-Bit Timer 1"]
#[inline(always)]
pub fn syncwt1(&mut self) -> SYNCWT1_W {
SYNCWT1_W { w: self }
}
#[doc = "Bits 16:17 - Synchronize GPTM 32/64-Bit Timer 2"]
#[inline(always)]
pub fn syncwt2(&mut self) -> SYNCWT2_W {
SYNCWT2_W { w: self }
}
#[doc = "Bits 18:19 - Synchronize GPTM 32/64-Bit Timer 3"]
#[inline(always)]
pub fn syncwt3(&mut self) -> SYNCWT3_W {
SYNCWT3_W { w: self }
}
#[doc = "Bits 20:21 - Synchronize GPTM 32/64-Bit Timer 4"]
#[inline(always)]
pub fn syncwt4(&mut self) -> SYNCWT4_W {
SYNCWT4_W { w: self }
}
#[doc = "Bits 22:23 - Synchronize GPTM 32/64-Bit Timer 5"]
#[inline(always)]
pub fn syncwt5(&mut self) -> SYNCWT5_W {
SYNCWT5_W { w: self }
}
}
| 30.656418 | 101 | 0.544078 |
8f0e3448362a4aca7fa531f043ef4032ead9879c | 4,901 | use std::fs::File;
use std::io::{Cursor, Read, Write};
use std::path::{Path, PathBuf};
use rocket::http::hyper::header::{Charset, ContentDisposition, DispositionParam, DispositionType};
use rocket::http::uri::{SegmentError, Segments};
use rocket::http::{ContentType, Status};
use rocket::request::FromSegments;
use rocket::response::{NamedFile, Responder};
use rocket::{response, Request, Response};
use walkdir::WalkDir;
use zip::write::FileOptions;
use zip::ZipWriter;
use crate::response_status::ResponseStatus;
pub struct Downloader {
pub obex_path: PathBuf,
}
pub struct UnsafePathBuf(pub PathBuf);
impl<'a> FromSegments<'a> for UnsafePathBuf {
type Error = SegmentError;
fn from_segments(segments: Segments<'a>) -> Result<UnsafePathBuf, SegmentError> {
Ok(UnsafePathBuf {
0: segments.into_path_buf(true)?,
})
}
}
pub struct DownloadResponse {
pub path: Option<PathBuf>,
pub data: Option<Vec<u8>>,
}
impl DownloadResponse {
pub fn from(path: PathBuf) -> Self {
DownloadResponse {
path: Some(path),
data: None,
}
}
pub fn from_zip(path: PathBuf, data: Vec<u8>) -> Self {
DownloadResponse {
path: Some(path),
data: Some(data),
}
}
}
impl<'r> Responder<'r> for DownloadResponse {
fn respond_to(self, req: &Request) -> response::Result<'r> {
if self.data.is_some() {
Response::build()
.header(ContentType::ZIP)
.header(ContentDisposition {
disposition: DispositionType::Attachment,
parameters: vec![DispositionParam::Filename(
Charset::Us_Ascii,
None,
Vec::from(self.path.unwrap().file_name().unwrap().to_str().unwrap()),
)],
})
.sized_body(Cursor::new(self.data.unwrap()))
.ok()
} else if self.path.is_some() {
let mut response = NamedFile::open(self.path.unwrap())
.ok()
.unwrap()
.respond_to(req)?;
if !response.headers().contains("Content-Type") {
response.set_header(ContentType::new("text", "plain"));
}
Ok(response)
} else {
Err(Status::InternalServerError)
}
}
}
impl Downloader {
pub fn download(&self, path: PathBuf) -> Result<DownloadResponse, ResponseStatus> {
let relative_path_str =
urlencoding::decode(path.to_str().unwrap_or("/")).map_err(ResponseStatus::from)?;
let absolute_path = self.obex_path.join(Path::new(relative_path_str.as_ref()));
let abs_path_without_zip =
PathBuf::from(absolute_path.to_str().unwrap().trim_end_matches(".zip"));
if relative_path_str.ends_with(".zip") && abs_path_without_zip.is_dir() {
self.get_dir(abs_path_without_zip, absolute_path)
} else if absolute_path.exists() {
Ok(DownloadResponse::from(absolute_path))
} else if relative_path_str.ends_with(".zip") {
Err(ResponseStatus {
status: Status::BadRequest,
message: format!(
"No such file ({}) or directory ({}",
relative_path_str,
abs_path_without_zip.to_str().unwrap()
),
})
} else {
Err(ResponseStatus {
status: Status::BadRequest,
message: format!("No such file {}", relative_path_str),
})
}
}
fn get_dir(
&self,
directory: PathBuf,
requested_path: PathBuf,
) -> Result<DownloadResponse, ResponseStatus> {
let mut data: Vec<u8> = Vec::new();
{
let mut zip = ZipWriter::new(Cursor::new(&mut data));
for entry in WalkDir::new(&directory) {
let entry = entry.map_err(ResponseStatus::from)?;
if !entry.path().is_dir() {
let mut next_file = File::open(entry.path())?;
zip.start_file(
entry
.path()
.strip_prefix(directory.as_path())
.unwrap()
.to_str()
.unwrap(),
FileOptions::default(),
)
.map_err(ResponseStatus::from)?;
let mut entry_data: Vec<u8> = Vec::new();
next_file.read_to_end(&mut entry_data)?;
zip.write(entry_data.as_slice())?;
}
}
zip.finish().map_err(ResponseStatus::from)?;
}
Ok(DownloadResponse::from_zip(requested_path, data))
}
}
| 33.114865 | 98 | 0.5303 |
76f038383856d841a1afcbbae3525aa220355889 | 4,925 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib::glib_wrapper! {
pub struct ColorChooser(Interface<ffi::GtkColorChooser>);
match fn {
get_type => || ffi::gtk_color_chooser_get_type(),
}
}
pub const NONE_COLOR_CHOOSER: Option<&ColorChooser> = None;
pub trait ColorChooserExt: 'static {
fn get_rgba(&self) -> gdk::RGBA;
fn get_use_alpha(&self) -> bool;
fn set_rgba(&self, color: &gdk::RGBA);
fn set_use_alpha(&self, use_alpha: bool);
fn connect_color_activated<F: Fn(&Self, &gdk::RGBA) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_use_alpha_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<ColorChooser>> ColorChooserExt for O {
fn get_rgba(&self) -> gdk::RGBA {
unsafe {
let mut color = gdk::RGBA::uninitialized();
ffi::gtk_color_chooser_get_rgba(
self.as_ref().to_glib_none().0,
color.to_glib_none_mut().0,
);
color
}
}
fn get_use_alpha(&self) -> bool {
unsafe {
from_glib(ffi::gtk_color_chooser_get_use_alpha(
self.as_ref().to_glib_none().0,
))
}
}
fn set_rgba(&self, color: &gdk::RGBA) {
unsafe {
ffi::gtk_color_chooser_set_rgba(self.as_ref().to_glib_none().0, color.to_glib_none().0);
}
}
fn set_use_alpha(&self, use_alpha: bool) {
unsafe {
ffi::gtk_color_chooser_set_use_alpha(
self.as_ref().to_glib_none().0,
use_alpha.to_glib(),
);
}
}
fn connect_color_activated<F: Fn(&Self, &gdk::RGBA) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn color_activated_trampoline<P, F: Fn(&P, &gdk::RGBA) + 'static>(
this: *mut ffi::GtkColorChooser,
color: *mut gdk::ffi::GdkRGBA,
f: glib::ffi::gpointer,
) where
P: IsA<ColorChooser>,
{
let f: &F = &*(f as *const F);
f(
&ColorChooser::from_glib_borrow(this).unsafe_cast_ref(),
&from_glib_borrow(color),
)
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"color-activated\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
color_activated_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_rgba_trampoline<P, F: Fn(&P) + 'static>(
this: *mut ffi::GtkColorChooser,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) where
P: IsA<ColorChooser>,
{
let f: &F = &*(f as *const F);
f(&ColorChooser::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::rgba\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_rgba_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_use_alpha_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_use_alpha_trampoline<P, F: Fn(&P) + 'static>(
this: *mut ffi::GtkColorChooser,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) where
P: IsA<ColorChooser>,
{
let f: &F = &*(f as *const F);
f(&ColorChooser::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::use-alpha\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_use_alpha_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for ColorChooser {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("ColorChooser")
}
}
| 31.570513 | 100 | 0.527107 |
697a93fdc59c7516352aa5d9d540214a887f6423 | 1,209 |
// =================================================================
//
// * WARNING *
//
// This file is generated!
//
// Changes made to this file will be overwritten. If changes are
// required to the generated code, the service_crategen project
// must be updated to generate the changes.
//
// =================================================================
#![doc(html_logo_url = "https://raw.githubusercontent.com/rusoto/rusoto/master/assets/logo-square.png")]
//! <p><fullname>Amazon Kinesis Firehose API Reference</fullname> <p>Amazon Kinesis Firehose is a fully managed service that delivers real-time streaming data to destinations such as Amazon Simple Storage Service (Amazon S3), Amazon Elasticsearch Service (Amazon ES), and Amazon Redshift.</p></p>
//!
//! If you're using the service, you're probably looking for [KinesisFirehoseClient](struct.KinesisFirehoseClient.html) and [KinesisFirehose](trait.KinesisFirehose.html).
extern crate futures;
extern crate rusoto_core;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
mod generated;
mod custom;
pub use generated::*;
pub use custom::*;
| 37.78125 | 296 | 0.638544 |
79428275dcf6f3d83c8085ad7192d1007e2d25c0 | 575 | use rune_tests::*;
#[test]
fn test_binop_override() {
// The right hand side of the `is` expression requires a type, and therefore
// won't be used as an empty tuple constructor.
let out: (bool, bool, bool, bool) = rune! {
struct Timeout;
pub fn main() {
let timeout = Timeout;
(
timeout is Timeout,
timeout is not Timeout,
!(timeout is Timeout),
!(timeout is not Timeout),
)
}
};
assert_eq!(out, (true, false, false, true));
}
| 25 | 80 | 0.514783 |
09957ab5e52db14864bd0fa458465555d2088498 | 1,679 | // Copyright 2020-2021 The Datafuse Authors.
//
// SPDX-License-Identifier: Apache-2.0.
pub use transform_aggregator_final::AggregatorFinalTransform;
pub use transform_aggregator_partial::AggregatorPartialTransform;
pub use transform_expression::ExpressionTransform;
pub use transform_expression_executor::ExpressionExecutor;
pub use transform_filter::FilterTransform;
pub use transform_groupby_final::GroupByFinalTransform;
pub use transform_groupby_partial::GroupByPartialTransform;
pub use transform_limit::LimitTransform;
pub use transform_limit_by::LimitByTransform;
pub use transform_projection::ProjectionTransform;
pub use transform_remote::RemoteTransform;
pub use transform_sort_merge::SortMergeTransform;
pub use transform_sort_partial::SortPartialTransform;
pub use transform_source::SourceTransform;
#[cfg(test)]
mod transform_aggregator_final_test;
#[cfg(test)]
mod transform_aggregator_partial_test;
#[cfg(test)]
mod transform_expression_test;
#[cfg(test)]
mod transform_filter_test;
#[cfg(test)]
mod transform_groupby_final_test;
#[cfg(test)]
mod transform_groupby_partial_test;
#[cfg(test)]
mod transform_limit_by_test;
#[cfg(test)]
mod transform_limit_test;
#[cfg(test)]
mod transform_projection_test;
#[cfg(test)]
mod transform_sort_test;
#[cfg(test)]
mod transform_source_test;
mod transform_aggregator_final;
mod transform_aggregator_partial;
mod transform_expression;
mod transform_expression_executor;
mod transform_filter;
mod transform_groupby_final;
mod transform_groupby_partial;
mod transform_limit;
mod transform_limit_by;
mod transform_projection;
mod transform_remote;
mod transform_sort_merge;
mod transform_sort_partial;
mod transform_source;
| 29.45614 | 65 | 0.849911 |
abd1cc627a9ea67b6b60781661aab48c1e25413b | 1,182 | #[derive(Clone, PartialEq, ::prost::Message)]
pub struct UnsignedMessage {
#[prost(string, tag = "1")]
pub to: std::string::String,
#[prost(string, tag = "2")]
pub from: std::string::String,
#[prost(uint64, tag = "3")]
pub nonce: u64,
#[prost(string, tag = "4")]
pub value: std::string::String,
#[prost(int64, tag = "5")]
pub gas_limit: i64,
#[prost(string, tag = "6")]
pub gas_fee_cap: std::string::String,
#[prost(string, tag = "7")]
pub gas_premium: std::string::String,
#[prost(uint64, tag = "8")]
pub method: u64,
#[prost(string, tag = "9")]
pub params: std::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Signature {
#[prost(uint32, tag = "1")]
pub r#type: u32,
#[prost(string, tag = "2")]
pub data: std::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SignedMessage {
#[prost(string, tag = "1")]
pub cid: std::string::String,
#[prost(message, optional, tag = "2")]
pub message: ::std::option::Option<UnsignedMessage>,
#[prost(message, optional, tag = "3")]
pub signature: ::std::option::Option<Signature>,
}
| 31.105263 | 56 | 0.593909 |
0e837297562ff484aa44880f9a07633ec8ec6798 | 10,051 | /// # Defense Data
/// Converts pitch by pitch data into a defense database with relevant fields. This module provides one struct for the serializable defense vector,
/// as well as a From impl for the Pitch to Defense conversion. Specifically, each pitch that has an "inplay" event, is captured and then 8 records (one for each fielder type)
/// is created.
///
///
use serde::{Deserialize, Serialize};
use crate::game::Pitch;
use crate::boxscore::Pos;
use crate::play_by_play::{Trajectory, Hardness};
use crate::schedule::GameType;
use crate::players::{Player, SideCode, SideDescription};
use std::collections::hash_map::HashMap;
use tree_buf::{Read, Write};
#[derive(Deserialize, Serialize, Debug, Read, Write, PartialEq)]
pub struct Defense {
pub game_date: String,
pub game_type: GameType,
pub fielder: u32,
pub fielder_name: String,
pub fielder_dob: String,
pub fielder_draft_pick_number: Option<u16>,
pub fielder_throws_code: Option<SideCode>,
pub fielder_throws_desc: Option<SideDescription>,
pub fielder_height_str: Option<String>,
pub fielder_height_in: u8,
pub fielder_weight: Option<u16>,
pub fielder_college_name: Option<String>,
pub fielder_birth_country: Option<String>,
pub fielder_mlb_debut: String,
pub fielded_by_id: Option<u32>,
pub fielded_by_name: String,
pub fielded_by_pos: Option<Pos>,
pub position: Pos,
pub outs_start: u8,
pub outs_end: u8,
pub balls_start: u8,
pub strikes_start: u8,
pub base_value_start: u8,
pub base_value_end: u8,
pub double_play_opp: bool,
pub runs: u8,
pub in_play_result: Option<crate::play_by_play::Event>,
pub batter_bats: crate::play_by_play::SideCode,
pub batter_bats_desc: Option<crate::play_by_play::SideDescription>,
pub batter: u32,
pub batter_name: String,
pub pitcher: u32,
pub pitcher_name: String,
pub pitcher_throws: crate::play_by_play::SideCode,
pub pitcher_throws_desc: Option<crate::play_by_play::SideDescription>,
pub hit_data_trajectory: Option<Trajectory>,
pub hit_data_contact_quality: Option<Hardness>,
pub hit_data_launch_angle: Option<f32>,
pub hit_data_exit_velocity: Option<f32>,
pub hit_data_total_distance: Option<f32>,
pub hit_data_spray_angle: Option<f32>,
pub hit_data_calc_distance: Option<f32>,
pub sport_id: u32,
pub sport_code: String,
pub sport_name: String,
pub sport_abbr: String,
pub sport_affilliation: crate::sports::MLB,
pub sport_level_of_play: u8,
pub league_name: Option<String>,
pub team_id: u32,
pub team_name: String,
pub parent_team_id: u32,
pub parent_team_name: String,
pub venue_id: u32,
pub venue_name: String,
pub venue_city: String,
pub venue_capacity: Option<u32>,
pub venue_surface: Option<crate::venues::SurfaceType>,
pub venue_roof: Option<crate::venues::RoofType>,
pub venue_left_line: Option<u16>,
pub venue_left: Option<u16>,
pub venue_left_center: Option<u16>,
pub venue_center: Option<u16>,
pub venue_right_center: Option<u16>,
pub venue_right: Option<u16>,
pub venue_right_line: Option<u16>,
pub venue_retrosheet_id: String,
}
pub (crate) struct DefenseData <'d> {
pub (crate) pitch: Pitch,
pub (crate) players: &'d HashMap<u32, Player>,
}
// fn get_fielder (id: u32, players: &HashMap<u32, Player>) -> Player {
// // We check in the main From impl that we have an id before we pull the data.
// let player = players.get(&id).unwrap().to_owned();
// player
// }
impl <'d> From<DefenseData<'d>> for Vec<Defense> {
fn from (data: DefenseData) -> Vec<Defense> {
let pitch = data.pitch;
let players = data.players;
let batter_bats = pitch.batter_bats;
let batter_bats_desc = pitch.batter_bats_desc;
let batter = pitch.batter;
let batter_name = pitch.batter_name;
let pitcher = pitch.pitcher;
let pitcher_name = pitch.pitcher_name;
let mut fielders: Vec<(u32, Pos)> = Vec::with_capacity(9);
if let Some(id) = pitch.catcher_id {fielders.push((id, Pos::Catcher))};
if let Some(id) = pitch.first_base_id {fielders.push((id, Pos::FirstBase))};
if let Some(id) = pitch.second_base_id {fielders.push((id, Pos::SecondBase))};
if let Some(id) = pitch.third_base_id {fielders.push((id, Pos::ThirdBase))};
if let Some(id) = pitch.short_stop_id {fielders.push((id, Pos::ShortStop))};
if let Some(id) = pitch.left_field_id {fielders.push((id, Pos::LeftField))};
if let Some(id) = pitch.right_field_id {fielders.push((id, Pos::RightField))};
if let Some(id) = pitch.center_field_id {fielders.push((id, Pos::CenterField))};
//We always have a pitcher on record
fielders.push((pitcher, Pos::Pitcher));
let mut fielder_data: Vec<Defense> = Vec::new();
for fielder in fielders {
// We'll ignore all records where we don't have fielder metadata.
// This should be somewhat rare. So far this only occurs for the
// the following ids: 580899, 581659
let meta = match players.get(&fielder.0) {
Some (player) => player.to_owned(),
None => {
println!("Couldn't find metadata for: {}", &fielder.0);
return vec![];
},
};
let fielder_dob = match meta.birth_date {
Some(date) => date.to_string(),
None => "".to_string(),
};
let fielder_mlb_debut = match meta.mlb_debut_date {
Some (date) => date.to_string(),
None => "".to_string(),
};
fielder_data.push(
Defense {
game_date: pitch.game_date.clone(),
game_type: pitch.game_type,
fielder: fielder.0,
fielder_name: meta.name,
fielder_dob,
fielder_draft_pick_number: meta.draft_pick_number,
fielder_throws_code: meta.throws_code,
fielder_throws_desc: meta.throws_description,
fielder_height_str: meta.height_str,
fielder_height_in: meta.height_in,
fielder_weight: meta.weight,
fielder_college_name: meta.college_name,
fielder_birth_country: meta.birth_country,
fielder_mlb_debut,
fielded_by_id: pitch.fielded_by_id,
fielded_by_name: pitch.fielded_by_name.clone(),
fielded_by_pos: pitch.fielded_by_pos,
outs_start: pitch.outs_start,
outs_end: pitch.outs_end,
balls_start: pitch.balls_start,
strikes_start: pitch.strikes_start,
base_value_start: pitch.base_value_start,
base_value_end: pitch.base_value_end,
double_play_opp: pitch.double_play_opportunity,
runs: pitch.runs_scored,
in_play_result: pitch.in_play_result,
position: fielder.1,
batter_bats,
batter_bats_desc,
batter,
batter_name: batter_name.clone(),
pitcher,
pitcher_throws: pitch.pitcher_throws,
pitcher_throws_desc: pitch.pitcher_throws_desc,
pitcher_name: pitcher_name.clone(),
hit_data_trajectory: pitch.hit_data_trajectory,
hit_data_contact_quality: pitch.hit_data_contact_quality,
hit_data_launch_angle: pitch.hit_data_launch_angle,
hit_data_exit_velocity: pitch.hit_data_exit_velocity,
hit_data_total_distance: pitch.hit_data_total_distance,
hit_data_spray_angle: pitch.hit_data_spray_angle,
hit_data_calc_distance: pitch.hit_data_calc_distance,
sport_id: pitch.sport_id,
sport_name: pitch.sport_name.clone(),
sport_code: pitch.sport_code.clone(),
sport_abbr: pitch.sport_abbr.clone(),
sport_affilliation: pitch.sport_affilliation,
sport_level_of_play: pitch.sport_level_of_play,
league_name: pitch.league_name_home.clone(),
team_id: pitch.pitcher_team_id,
parent_team_id: pitch.pitcher_parent_team_id,
team_name: pitch.pitcher_team_name.clone(),
parent_team_name: pitch.pitcher_parent_team_name.clone(),
venue_id: pitch.venue_id,
venue_name: pitch.venue_name.clone(),
venue_city: pitch.venue_city.clone(),
venue_capacity: pitch.venue_capacity,
venue_surface: pitch.venue_surface,
venue_roof: pitch.venue_roof,
venue_left_line: pitch.venue_left_line,
venue_left: pitch.venue_left,
venue_left_center: pitch.venue_left_center,
venue_center: pitch.venue_center,
venue_right_center: pitch.venue_right_center,
venue_right: pitch.venue_right,
venue_right_line: pitch.venue_right_line,
venue_retrosheet_id: pitch.venue_retrosheet_id.clone(),
}
)
}
// Only return the data if we have a ball in play. Could arguably save a little work
// by checking for this earlier, but I think it's cleaner this way.
match pitch.in_play {
1 => fielder_data,
_ => vec![],
}
}
} | 41.533058 | 175 | 0.606507 |
f43ff0421a9a76d4cef0f1e36261f3f3592fa811 | 713 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-test - #2093
fn let_in<T>(x: T, f: |T|) {}
fn main() {
let_in(3u, |i| { assert!(i == 3); });
//~^ ERROR expected `uint` but found `int`
let_in(3, |i| { assert!(i == 3u); });
//~^ ERROR expected `int` but found `uint`
}
| 33.952381 | 68 | 0.659187 |
1a32fb8772261bd160a3cd07a290013deed2d1eb | 2,535 | use crate::{Error, IaqCore, Measurement};
use embedded_hal::blocking::i2c::Read;
const DEV_ADDR: u8 = 0x5A;
impl<E, I2C> IaqCore<I2C>
where
I2C: Read<Error = E>,
{
/// Create new instance of the iAQ-Core device.
pub fn new(i2c: I2C) -> Self {
IaqCore { i2c }
}
/// Destroy driver instance, return I²C bus instance.
pub fn destroy(self) -> I2C {
self.i2c
}
/// Get all data from the sensor measurement
///
/// Returns `nb::Error::WouldBlock` in case the device reports a busy or warm up status.
pub fn data(&mut self) -> nb::Result<Measurement, Error<E>> {
let mut data = [0; 9];
self.read(&mut data)?;
Ok(Measurement {
co2: (u16::from(data[0]) << 8) | u16::from(data[1]),
tvoc: (u16::from(data[7]) << 8) | u16::from(data[8]),
resistance: (u32::from(data[4]) << 16) | (u32::from(data[5]) << 8) | u32::from(data[6]),
})
}
/// Get the CO2 (ppm) equivalent prediction value
///
/// Returns `nb::Error::WouldBlock` in case the device reports a busy or warm up status.
pub fn co2(&mut self) -> nb::Result<u16, Error<E>> {
let mut data = [0; 3];
self.read(&mut data)?;
Ok((u16::from(data[0]) << 8) | u16::from(data[1]))
}
/// Get the TVOC (ppb) equivalent prediction value
///
/// Returns `nb::Error::WouldBlock` in case the device reports a busy or warm up status.
pub fn tvoc(&mut self) -> nb::Result<u16, Error<E>> {
let mut data = [0; 9];
self.read(&mut data)?;
Ok((u16::from(data[7]) << 8) | u16::from(data[8]))
}
/// Get the sensor resistance in Ohm
///
/// Returns `nb::Error::WouldBlock` in case the device reports a busy or warm up status.
pub fn resistance(&mut self) -> nb::Result<u32, Error<E>> {
let mut data = [0; 7];
self.read(&mut data)?;
Ok((u32::from(data[4]) << 16) | (u32::from(data[5]) << 8) | u32::from(data[6]))
}
fn read(&mut self, data: &mut [u8]) -> nb::Result<(), Error<E>> {
self.i2c
.read(DEV_ADDR, data)
.map_err(Error::I2C)
.map_err(nb::Error::Other)?;
Self::check_status(data[2])
}
fn check_status(status: u8) -> nb::Result<(), Error<E>> {
if status == 0x80 {
Err(nb::Error::Other(Error::Device))
} else if status == 0 {
Ok(())
} else {
// warm up or busy
Err(nb::Error::WouldBlock)
}
}
}
| 32.088608 | 100 | 0.531755 |
4aab43cbec701cfd0a87bcfb33e3d275172ea4c5 | 160,913 | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![deny(warnings)]
#![feature(rustc_diagnostic_macros)]
#[macro_use]
extern crate log;
#[macro_use]
extern crate syntax;
extern crate syntax_pos;
extern crate rustc_errors as errors;
extern crate arena;
#[macro_use]
extern crate rustc;
use self::Namespace::*;
use self::TypeParameters::*;
use self::RibKind::*;
use rustc::hir::map::{Definitions, DefCollector};
use rustc::hir::{self, PrimTy, TyBool, TyChar, TyFloat, TyInt, TyUint, TyStr};
use rustc::middle::cstore::{CrateStore, CrateLoader};
use rustc::session::Session;
use rustc::lint;
use rustc::hir::def::*;
use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, DefId};
use rustc::ty;
use rustc::hir::{Freevar, FreevarMap, TraitCandidate, TraitMap, GlobMap};
use rustc::util::nodemap::{NodeMap, NodeSet, FxHashMap, FxHashSet, DefIdMap};
use syntax::codemap::{dummy_spanned, respan};
use syntax::ext::hygiene::{Mark, SyntaxContext};
use syntax::ast::{self, Name, NodeId, Ident, SpannedIdent, FloatTy, IntTy, UintTy};
use syntax::ext::base::SyntaxExtension;
use syntax::ext::base::Determinacy::{self, Determined, Undetermined};
use syntax::ext::base::MacroKind;
use syntax::symbol::{Symbol, keywords};
use syntax::util::lev_distance::find_best_match_for_name;
use syntax::visit::{self, FnKind, Visitor};
use syntax::attr;
use syntax::ast::{Arm, BindingMode, Block, Crate, Expr, ExprKind};
use syntax::ast::{FnDecl, ForeignItem, ForeignItemKind, Generics};
use syntax::ast::{Item, ItemKind, ImplItem, ImplItemKind};
use syntax::ast::{Local, Mutability, Pat, PatKind, Path};
use syntax::ast::{QSelf, TraitItemKind, TraitRef, Ty, TyKind};
use syntax::feature_gate::{feature_err, emit_feature_err, GateIssue};
use syntax_pos::{Span, DUMMY_SP, MultiSpan};
use errors::DiagnosticBuilder;
use std::cell::{Cell, RefCell};
use std::cmp;
use std::collections::BTreeSet;
use std::fmt;
use std::mem::replace;
use std::rc::Rc;
use resolve_imports::{ImportDirective, ImportDirectiveSubclass, NameResolution, ImportResolver};
use macros::{InvocationData, LegacyBinding, LegacyScope, MacroBinding};
// NB: This module needs to be declared first so diagnostics are
// registered before they are used.
mod diagnostics;
mod macros;
mod check_unused;
mod build_reduced_graph;
mod resolve_imports;
/// A free importable items suggested in case of resolution failure.
struct ImportSuggestion {
path: Path,
}
/// A field or associated item from self type suggested in case of resolution failure.
enum AssocSuggestion {
Field,
MethodWithSelf,
AssocItem,
}
#[derive(Eq)]
struct BindingError {
name: Name,
origin: BTreeSet<Span>,
target: BTreeSet<Span>,
}
impl PartialOrd for BindingError {
fn partial_cmp(&self, other: &BindingError) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for BindingError {
fn eq(&self, other: &BindingError) -> bool {
self.name == other.name
}
}
impl Ord for BindingError {
fn cmp(&self, other: &BindingError) -> cmp::Ordering {
self.name.cmp(&other.name)
}
}
enum ResolutionError<'a> {
/// error E0401: can't use type parameters from outer function
TypeParametersFromOuterFunction,
/// error E0403: the name is already used for a type parameter in this type parameter list
NameAlreadyUsedInTypeParameterList(Name, &'a Span),
/// error E0407: method is not a member of trait
MethodNotMemberOfTrait(Name, &'a str),
/// error E0437: type is not a member of trait
TypeNotMemberOfTrait(Name, &'a str),
/// error E0438: const is not a member of trait
ConstNotMemberOfTrait(Name, &'a str),
/// error E0408: variable `{}` is not bound in all patterns
VariableNotBoundInPattern(&'a BindingError),
/// error E0409: variable `{}` is bound in inconsistent ways within the same match arm
VariableBoundWithDifferentMode(Name, Span),
/// error E0415: identifier is bound more than once in this parameter list
IdentifierBoundMoreThanOnceInParameterList(&'a str),
/// error E0416: identifier is bound more than once in the same pattern
IdentifierBoundMoreThanOnceInSamePattern(&'a str),
/// error E0426: use of undeclared label
UndeclaredLabel(&'a str, Option<Name>),
/// error E0429: `self` imports are only allowed within a { } list
SelfImportsOnlyAllowedWithin,
/// error E0430: `self` import can only appear once in the list
SelfImportCanOnlyAppearOnceInTheList,
/// error E0431: `self` import can only appear in an import list with a non-empty prefix
SelfImportOnlyInImportListWithNonEmptyPrefix,
/// error E0432: unresolved import
UnresolvedImport(Option<(Span, &'a str, &'a str)>),
/// error E0433: failed to resolve
FailedToResolve(&'a str),
/// error E0434: can't capture dynamic environment in a fn item
CannotCaptureDynamicEnvironmentInFnItem,
/// error E0435: attempt to use a non-constant value in a constant
AttemptToUseNonConstantValueInConstant,
/// error E0530: X bindings cannot shadow Ys
BindingShadowsSomethingUnacceptable(&'a str, Name, &'a NameBinding<'a>),
/// error E0128: type parameters with a default cannot use forward declared identifiers
ForwardDeclaredTyParam,
}
fn resolve_error<'sess, 'a>(resolver: &'sess Resolver,
span: Span,
resolution_error: ResolutionError<'a>) {
resolve_struct_error(resolver, span, resolution_error).emit();
}
fn resolve_struct_error<'sess, 'a>(resolver: &'sess Resolver,
span: Span,
resolution_error: ResolutionError<'a>)
-> DiagnosticBuilder<'sess> {
match resolution_error {
ResolutionError::TypeParametersFromOuterFunction => {
let mut err = struct_span_err!(resolver.session,
span,
E0401,
"can't use type parameters from outer function; \
try using a local type parameter instead");
err.span_label(span, "use of type variable from outer function");
err
}
ResolutionError::NameAlreadyUsedInTypeParameterList(name, first_use_span) => {
let mut err = struct_span_err!(resolver.session,
span,
E0403,
"the name `{}` is already used for a type parameter \
in this type parameter list",
name);
err.span_label(span, "already used");
err.span_label(first_use_span.clone(), format!("first use of `{}`", name));
err
}
ResolutionError::MethodNotMemberOfTrait(method, trait_) => {
let mut err = struct_span_err!(resolver.session,
span,
E0407,
"method `{}` is not a member of trait `{}`",
method,
trait_);
err.span_label(span, format!("not a member of trait `{}`", trait_));
err
}
ResolutionError::TypeNotMemberOfTrait(type_, trait_) => {
let mut err = struct_span_err!(resolver.session,
span,
E0437,
"type `{}` is not a member of trait `{}`",
type_,
trait_);
err.span_label(span, format!("not a member of trait `{}`", trait_));
err
}
ResolutionError::ConstNotMemberOfTrait(const_, trait_) => {
let mut err = struct_span_err!(resolver.session,
span,
E0438,
"const `{}` is not a member of trait `{}`",
const_,
trait_);
err.span_label(span, format!("not a member of trait `{}`", trait_));
err
}
ResolutionError::VariableNotBoundInPattern(binding_error) => {
let target_sp = binding_error.target.iter().map(|x| *x).collect::<Vec<_>>();
let msp = MultiSpan::from_spans(target_sp.clone());
let msg = format!("variable `{}` is not bound in all patterns", binding_error.name);
let mut err = resolver.session.struct_span_err_with_code(msp, &msg, "E0408");
for sp in target_sp {
err.span_label(sp, format!("pattern doesn't bind `{}`", binding_error.name));
}
let origin_sp = binding_error.origin.iter().map(|x| *x).collect::<Vec<_>>();
for sp in origin_sp {
err.span_label(sp, "variable not in all patterns");
}
err
}
ResolutionError::VariableBoundWithDifferentMode(variable_name,
first_binding_span) => {
let mut err = struct_span_err!(resolver.session,
span,
E0409,
"variable `{}` is bound in inconsistent \
ways within the same match arm",
variable_name);
err.span_label(span, "bound in different ways");
err.span_label(first_binding_span, "first binding");
err
}
ResolutionError::IdentifierBoundMoreThanOnceInParameterList(identifier) => {
let mut err = struct_span_err!(resolver.session,
span,
E0415,
"identifier `{}` is bound more than once in this parameter list",
identifier);
err.span_label(span, "used as parameter more than once");
err
}
ResolutionError::IdentifierBoundMoreThanOnceInSamePattern(identifier) => {
let mut err = struct_span_err!(resolver.session,
span,
E0416,
"identifier `{}` is bound more than once in the same pattern",
identifier);
err.span_label(span, "used in a pattern more than once");
err
}
ResolutionError::UndeclaredLabel(name, lev_candidate) => {
let mut err = struct_span_err!(resolver.session,
span,
E0426,
"use of undeclared label `{}`",
name);
if let Some(lev_candidate) = lev_candidate {
err.span_label(span, format!("did you mean `{}`?", lev_candidate));
} else {
err.span_label(span, format!("undeclared label `{}`", name));
}
err
}
ResolutionError::SelfImportsOnlyAllowedWithin => {
struct_span_err!(resolver.session,
span,
E0429,
"{}",
"`self` imports are only allowed within a { } list")
}
ResolutionError::SelfImportCanOnlyAppearOnceInTheList => {
struct_span_err!(resolver.session,
span,
E0430,
"`self` import can only appear once in the list")
}
ResolutionError::SelfImportOnlyInImportListWithNonEmptyPrefix => {
struct_span_err!(resolver.session,
span,
E0431,
"`self` import can only appear in an import list with a \
non-empty prefix")
}
ResolutionError::UnresolvedImport(name) => {
let (span, msg) = match name {
Some((sp, n, _)) => (sp, format!("unresolved import `{}`", n)),
None => (span, "unresolved import".to_owned()),
};
let mut err = struct_span_err!(resolver.session, span, E0432, "{}", msg);
if let Some((_, _, p)) = name {
err.span_label(span, p);
}
err
}
ResolutionError::FailedToResolve(msg) => {
let mut err = struct_span_err!(resolver.session, span, E0433,
"failed to resolve. {}", msg);
err.span_label(span, msg);
err
}
ResolutionError::CannotCaptureDynamicEnvironmentInFnItem => {
struct_span_err!(resolver.session,
span,
E0434,
"{}",
"can't capture dynamic environment in a fn item; use the || { ... } \
closure form instead")
}
ResolutionError::AttemptToUseNonConstantValueInConstant => {
let mut err = struct_span_err!(resolver.session,
span,
E0435,
"attempt to use a non-constant value in a constant");
err.span_label(span, "non-constant value");
err
}
ResolutionError::BindingShadowsSomethingUnacceptable(what_binding, name, binding) => {
let shadows_what = PathResolution::new(binding.def()).kind_name();
let mut err = struct_span_err!(resolver.session,
span,
E0530,
"{}s cannot shadow {}s", what_binding, shadows_what);
err.span_label(span, format!("cannot be named the same as a {}", shadows_what));
let participle = if binding.is_import() { "imported" } else { "defined" };
let msg = format!("a {} `{}` is {} here", shadows_what, name, participle);
err.span_label(binding.span, msg);
err
}
ResolutionError::ForwardDeclaredTyParam => {
let mut err = struct_span_err!(resolver.session, span, E0128,
"type parameters with a default cannot use \
forward declared identifiers");
err.span_label(span, format!("defaulted type parameters \
cannot be forward declared"));
err
}
}
}
#[derive(Copy, Clone, Debug)]
struct BindingInfo {
span: Span,
binding_mode: BindingMode,
}
// Map from the name in a pattern to its binding mode.
type BindingMap = FxHashMap<Ident, BindingInfo>;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum PatternSource {
Match,
IfLet,
WhileLet,
Let,
For,
FnParam,
}
impl PatternSource {
fn is_refutable(self) -> bool {
match self {
PatternSource::Match | PatternSource::IfLet | PatternSource::WhileLet => true,
PatternSource::Let | PatternSource::For | PatternSource::FnParam => false,
}
}
fn descr(self) -> &'static str {
match self {
PatternSource::Match => "match binding",
PatternSource::IfLet => "if let binding",
PatternSource::WhileLet => "while let binding",
PatternSource::Let => "let binding",
PatternSource::For => "for binding",
PatternSource::FnParam => "function parameter",
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum PathSource<'a> {
// Type paths `Path`.
Type,
// Trait paths in bounds or impls.
Trait,
// Expression paths `path`, with optional parent context.
Expr(Option<&'a Expr>),
// Paths in path patterns `Path`.
Pat,
// Paths in struct expressions and patterns `Path { .. }`.
Struct,
// Paths in tuple struct patterns `Path(..)`.
TupleStruct,
// `m::A::B` in `<T as m::A>::B::C`.
TraitItem(Namespace),
// Path in `pub(path)`
Visibility,
// Path in `use a::b::{...};`
ImportPrefix,
}
impl<'a> PathSource<'a> {
fn namespace(self) -> Namespace {
match self {
PathSource::Type | PathSource::Trait | PathSource::Struct |
PathSource::Visibility | PathSource::ImportPrefix => TypeNS,
PathSource::Expr(..) | PathSource::Pat | PathSource::TupleStruct => ValueNS,
PathSource::TraitItem(ns) => ns,
}
}
fn global_by_default(self) -> bool {
match self {
PathSource::Visibility | PathSource::ImportPrefix => true,
PathSource::Type | PathSource::Expr(..) | PathSource::Pat |
PathSource::Struct | PathSource::TupleStruct |
PathSource::Trait | PathSource::TraitItem(..) => false,
}
}
fn defer_to_typeck(self) -> bool {
match self {
PathSource::Type | PathSource::Expr(..) | PathSource::Pat |
PathSource::Struct | PathSource::TupleStruct => true,
PathSource::Trait | PathSource::TraitItem(..) |
PathSource::Visibility | PathSource::ImportPrefix => false,
}
}
fn descr_expected(self) -> &'static str {
match self {
PathSource::Type => "type",
PathSource::Trait => "trait",
PathSource::Pat => "unit struct/variant or constant",
PathSource::Struct => "struct, variant or union type",
PathSource::TupleStruct => "tuple struct/variant",
PathSource::Visibility => "module",
PathSource::ImportPrefix => "module or enum",
PathSource::TraitItem(ns) => match ns {
TypeNS => "associated type",
ValueNS => "method or associated constant",
MacroNS => bug!("associated macro"),
},
PathSource::Expr(parent) => match parent.map(|p| &p.node) {
// "function" here means "anything callable" rather than `Def::Fn`,
// this is not precise but usually more helpful than just "value".
Some(&ExprKind::Call(..)) => "function",
_ => "value",
},
}
}
fn is_expected(self, def: Def) -> bool {
match self {
PathSource::Type => match def {
Def::Struct(..) | Def::Union(..) | Def::Enum(..) |
Def::Trait(..) | Def::TyAlias(..) | Def::AssociatedTy(..) |
Def::PrimTy(..) | Def::TyParam(..) | Def::SelfTy(..) => true,
_ => false,
},
PathSource::Trait => match def {
Def::Trait(..) => true,
_ => false,
},
PathSource::Expr(..) => match def {
Def::StructCtor(_, CtorKind::Const) | Def::StructCtor(_, CtorKind::Fn) |
Def::VariantCtor(_, CtorKind::Const) | Def::VariantCtor(_, CtorKind::Fn) |
Def::Const(..) | Def::Static(..) | Def::Local(..) | Def::Upvar(..) |
Def::Fn(..) | Def::Method(..) | Def::AssociatedConst(..) => true,
_ => false,
},
PathSource::Pat => match def {
Def::StructCtor(_, CtorKind::Const) |
Def::VariantCtor(_, CtorKind::Const) |
Def::Const(..) | Def::AssociatedConst(..) => true,
_ => false,
},
PathSource::TupleStruct => match def {
Def::StructCtor(_, CtorKind::Fn) | Def::VariantCtor(_, CtorKind::Fn) => true,
_ => false,
},
PathSource::Struct => match def {
Def::Struct(..) | Def::Union(..) | Def::Variant(..) |
Def::TyAlias(..) | Def::AssociatedTy(..) | Def::SelfTy(..) => true,
_ => false,
},
PathSource::TraitItem(ns) => match def {
Def::AssociatedConst(..) | Def::Method(..) if ns == ValueNS => true,
Def::AssociatedTy(..) if ns == TypeNS => true,
_ => false,
},
PathSource::ImportPrefix => match def {
Def::Mod(..) | Def::Enum(..) => true,
_ => false,
},
PathSource::Visibility => match def {
Def::Mod(..) => true,
_ => false,
},
}
}
fn error_code(self, has_unexpected_resolution: bool) -> &'static str {
__diagnostic_used!(E0404);
__diagnostic_used!(E0405);
__diagnostic_used!(E0412);
__diagnostic_used!(E0422);
__diagnostic_used!(E0423);
__diagnostic_used!(E0425);
__diagnostic_used!(E0531);
__diagnostic_used!(E0532);
__diagnostic_used!(E0573);
__diagnostic_used!(E0574);
__diagnostic_used!(E0575);
__diagnostic_used!(E0576);
__diagnostic_used!(E0577);
__diagnostic_used!(E0578);
match (self, has_unexpected_resolution) {
(PathSource::Trait, true) => "E0404",
(PathSource::Trait, false) => "E0405",
(PathSource::Type, true) => "E0573",
(PathSource::Type, false) => "E0412",
(PathSource::Struct, true) => "E0574",
(PathSource::Struct, false) => "E0422",
(PathSource::Expr(..), true) => "E0423",
(PathSource::Expr(..), false) => "E0425",
(PathSource::Pat, true) | (PathSource::TupleStruct, true) => "E0532",
(PathSource::Pat, false) | (PathSource::TupleStruct, false) => "E0531",
(PathSource::TraitItem(..), true) => "E0575",
(PathSource::TraitItem(..), false) => "E0576",
(PathSource::Visibility, true) | (PathSource::ImportPrefix, true) => "E0577",
(PathSource::Visibility, false) | (PathSource::ImportPrefix, false) => "E0578",
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum Namespace {
TypeNS,
ValueNS,
MacroNS,
}
#[derive(Clone, Default, Debug)]
pub struct PerNS<T> {
value_ns: T,
type_ns: T,
macro_ns: Option<T>,
}
impl<T> ::std::ops::Index<Namespace> for PerNS<T> {
type Output = T;
fn index(&self, ns: Namespace) -> &T {
match ns {
ValueNS => &self.value_ns,
TypeNS => &self.type_ns,
MacroNS => self.macro_ns.as_ref().unwrap(),
}
}
}
impl<T> ::std::ops::IndexMut<Namespace> for PerNS<T> {
fn index_mut(&mut self, ns: Namespace) -> &mut T {
match ns {
ValueNS => &mut self.value_ns,
TypeNS => &mut self.type_ns,
MacroNS => self.macro_ns.as_mut().unwrap(),
}
}
}
struct UsePlacementFinder {
target_module: NodeId,
span: Option<Span>,
found_use: bool,
}
impl<'tcx> Visitor<'tcx> for UsePlacementFinder {
fn visit_mod(
&mut self,
module: &'tcx ast::Mod,
_: Span,
_: &[ast::Attribute],
node_id: NodeId,
) {
if self.span.is_some() {
return;
}
if node_id != self.target_module {
visit::walk_mod(self, module);
return;
}
// find a use statement
for item in &module.items {
match item.node {
ItemKind::Use(..) => {
// don't suggest placing a use before the prelude
// import or other generated ones
if item.span.ctxt().outer().expn_info().is_none() {
self.span = Some(item.span.with_hi(item.span.lo()));
self.found_use = true;
return;
}
},
// don't place use before extern crate
ItemKind::ExternCrate(_) => {}
// but place them before the first other item
_ => if self.span.map_or(true, |span| item.span < span ) {
if item.span.ctxt().outer().expn_info().is_none() {
// don't insert between attributes and an item
if item.attrs.is_empty() {
self.span = Some(item.span.with_hi(item.span.lo()));
} else {
// find the first attribute on the item
for attr in &item.attrs {
if self.span.map_or(true, |span| attr.span < span) {
self.span = Some(attr.span.with_hi(attr.span.lo()));
}
}
}
}
},
}
}
}
}
impl<'a, 'tcx> Visitor<'tcx> for Resolver<'a> {
fn visit_item(&mut self, item: &'tcx Item) {
self.resolve_item(item);
}
fn visit_arm(&mut self, arm: &'tcx Arm) {
self.resolve_arm(arm);
}
fn visit_block(&mut self, block: &'tcx Block) {
self.resolve_block(block);
}
fn visit_expr(&mut self, expr: &'tcx Expr) {
self.resolve_expr(expr, None);
}
fn visit_local(&mut self, local: &'tcx Local) {
self.resolve_local(local);
}
fn visit_ty(&mut self, ty: &'tcx Ty) {
match ty.node {
TyKind::Path(ref qself, ref path) => {
self.smart_resolve_path(ty.id, qself.as_ref(), path, PathSource::Type);
}
TyKind::ImplicitSelf => {
let self_ty = keywords::SelfType.ident();
let def = self.resolve_ident_in_lexical_scope(self_ty, TypeNS, true, ty.span)
.map_or(Def::Err, |d| d.def());
self.record_def(ty.id, PathResolution::new(def));
}
TyKind::Array(ref element, ref length) => {
self.visit_ty(element);
self.with_constant_rib(|this| {
this.visit_expr(length);
});
return;
}
_ => (),
}
visit::walk_ty(self, ty);
}
fn visit_poly_trait_ref(&mut self,
tref: &'tcx ast::PolyTraitRef,
m: &'tcx ast::TraitBoundModifier) {
self.smart_resolve_path(tref.trait_ref.ref_id, None,
&tref.trait_ref.path, PathSource::Trait);
visit::walk_poly_trait_ref(self, tref, m);
}
fn visit_variant(&mut self,
variant: &'tcx ast::Variant,
generics: &'tcx Generics,
item_id: ast::NodeId) {
if let Some(ref dis_expr) = variant.node.disr_expr {
// resolve the discriminator expr as a constant
self.with_constant_rib(|this| {
this.visit_expr(dis_expr);
});
}
// `visit::walk_variant` without the discriminant expression.
self.visit_variant_data(&variant.node.data,
variant.node.name,
generics,
item_id,
variant.span);
}
fn visit_foreign_item(&mut self, foreign_item: &'tcx ForeignItem) {
let type_parameters = match foreign_item.node {
ForeignItemKind::Fn(_, ref generics) => {
HasTypeParameters(generics, ItemRibKind)
}
ForeignItemKind::Static(..) => NoTypeParameters,
};
self.with_type_parameter_rib(type_parameters, |this| {
visit::walk_foreign_item(this, foreign_item);
});
}
fn visit_fn(&mut self,
function_kind: FnKind<'tcx>,
declaration: &'tcx FnDecl,
_: Span,
node_id: NodeId) {
let rib_kind = match function_kind {
FnKind::ItemFn(_, generics, ..) => {
self.visit_generics(generics);
ItemRibKind
}
FnKind::Method(_, sig, _, _) => {
self.visit_generics(&sig.generics);
MethodRibKind(!sig.decl.has_self())
}
FnKind::Closure(_) => ClosureRibKind(node_id),
};
// Create a value rib for the function.
self.ribs[ValueNS].push(Rib::new(rib_kind));
// Create a label rib for the function.
self.label_ribs.push(Rib::new(rib_kind));
// Add each argument to the rib.
let mut bindings_list = FxHashMap();
for argument in &declaration.inputs {
self.resolve_pattern(&argument.pat, PatternSource::FnParam, &mut bindings_list);
self.visit_ty(&argument.ty);
debug!("(resolving function) recorded argument");
}
visit::walk_fn_ret_ty(self, &declaration.output);
// Resolve the function body.
match function_kind {
FnKind::ItemFn(.., body) |
FnKind::Method(.., body) => {
self.visit_block(body);
}
FnKind::Closure(body) => {
self.visit_expr(body);
}
};
debug!("(resolving function) leaving function");
self.label_ribs.pop();
self.ribs[ValueNS].pop();
}
fn visit_generics(&mut self, generics: &'tcx Generics) {
// For type parameter defaults, we have to ban access
// to following type parameters, as the Substs can only
// provide previous type parameters as they're built.
let mut default_ban_rib = Rib::new(ForwardTyParamBanRibKind);
default_ban_rib.bindings.extend(generics.ty_params.iter()
.skip_while(|p| p.default.is_none())
.map(|p| (Ident::with_empty_ctxt(p.ident.name), Def::Err)));
for param in &generics.ty_params {
for bound in ¶m.bounds {
self.visit_ty_param_bound(bound);
}
if let Some(ref ty) = param.default {
self.ribs[TypeNS].push(default_ban_rib);
self.visit_ty(ty);
default_ban_rib = self.ribs[TypeNS].pop().unwrap();
}
// Allow all following defaults to refer to this type parameter.
default_ban_rib.bindings.remove(&Ident::with_empty_ctxt(param.ident.name));
}
for lt in &generics.lifetimes { self.visit_lifetime_def(lt); }
for p in &generics.where_clause.predicates { self.visit_where_predicate(p); }
}
}
#[derive(Copy, Clone)]
enum TypeParameters<'a, 'b> {
NoTypeParameters,
HasTypeParameters(// Type parameters.
&'b Generics,
// The kind of the rib used for type parameters.
RibKind<'a>),
}
// The rib kind controls the translation of local
// definitions (`Def::Local`) to upvars (`Def::Upvar`).
#[derive(Copy, Clone, Debug)]
enum RibKind<'a> {
// No translation needs to be applied.
NormalRibKind,
// We passed through a closure scope at the given node ID.
// Translate upvars as appropriate.
ClosureRibKind(NodeId /* func id */),
// We passed through an impl or trait and are now in one of its
// methods. Allow references to ty params that impl or trait
// binds. Disallow any other upvars (including other ty params that are
// upvars).
//
// The boolean value represents the fact that this method is static or not.
MethodRibKind(bool),
// We passed through an item scope. Disallow upvars.
ItemRibKind,
// We're in a constant item. Can't refer to dynamic stuff.
ConstantItemRibKind,
// We passed through a module.
ModuleRibKind(Module<'a>),
// We passed through a `macro_rules!` statement
MacroDefinition(DefId),
// All bindings in this rib are type parameters that can't be used
// from the default of a type parameter because they're not declared
// before said type parameter. Also see the `visit_generics` override.
ForwardTyParamBanRibKind,
}
/// One local scope.
#[derive(Debug)]
struct Rib<'a> {
bindings: FxHashMap<Ident, Def>,
kind: RibKind<'a>,
}
impl<'a> Rib<'a> {
fn new(kind: RibKind<'a>) -> Rib<'a> {
Rib {
bindings: FxHashMap(),
kind,
}
}
}
enum LexicalScopeBinding<'a> {
Item(&'a NameBinding<'a>),
Def(Def),
}
impl<'a> LexicalScopeBinding<'a> {
fn item(self) -> Option<&'a NameBinding<'a>> {
match self {
LexicalScopeBinding::Item(binding) => Some(binding),
_ => None,
}
}
fn def(self) -> Def {
match self {
LexicalScopeBinding::Item(binding) => binding.def(),
LexicalScopeBinding::Def(def) => def,
}
}
}
#[derive(Clone)]
enum PathResult<'a> {
Module(Module<'a>),
NonModule(PathResolution),
Indeterminate,
Failed(Span, String, bool /* is the error from the last segment? */),
}
enum ModuleKind {
Block(NodeId),
Def(Def, Name),
}
/// One node in the tree of modules.
pub struct ModuleData<'a> {
parent: Option<Module<'a>>,
kind: ModuleKind,
// The def id of the closest normal module (`mod`) ancestor (including this module).
normal_ancestor_id: DefId,
resolutions: RefCell<FxHashMap<(Ident, Namespace), &'a RefCell<NameResolution<'a>>>>,
legacy_macro_resolutions: RefCell<Vec<(Mark, Ident, Span, MacroKind)>>,
macro_resolutions: RefCell<Vec<(Box<[Ident]>, Span)>>,
// Macro invocations that can expand into items in this module.
unresolved_invocations: RefCell<FxHashSet<Mark>>,
no_implicit_prelude: bool,
glob_importers: RefCell<Vec<&'a ImportDirective<'a>>>,
globs: RefCell<Vec<&'a ImportDirective<'a>>>,
// Used to memoize the traits in this module for faster searches through all traits in scope.
traits: RefCell<Option<Box<[(Ident, &'a NameBinding<'a>)]>>>,
// Whether this module is populated. If not populated, any attempt to
// access the children must be preceded with a
// `populate_module_if_necessary` call.
populated: Cell<bool>,
/// Span of the module itself. Used for error reporting.
span: Span,
expansion: Mark,
}
type Module<'a> = &'a ModuleData<'a>;
impl<'a> ModuleData<'a> {
fn new(parent: Option<Module<'a>>,
kind: ModuleKind,
normal_ancestor_id: DefId,
expansion: Mark,
span: Span) -> Self {
ModuleData {
parent,
kind,
normal_ancestor_id,
resolutions: RefCell::new(FxHashMap()),
legacy_macro_resolutions: RefCell::new(Vec::new()),
macro_resolutions: RefCell::new(Vec::new()),
unresolved_invocations: RefCell::new(FxHashSet()),
no_implicit_prelude: false,
glob_importers: RefCell::new(Vec::new()),
globs: RefCell::new((Vec::new())),
traits: RefCell::new(None),
populated: Cell::new(normal_ancestor_id.is_local()),
span,
expansion,
}
}
fn for_each_child<F: FnMut(Ident, Namespace, &'a NameBinding<'a>)>(&self, mut f: F) {
for (&(ident, ns), name_resolution) in self.resolutions.borrow().iter() {
name_resolution.borrow().binding.map(|binding| f(ident, ns, binding));
}
}
fn for_each_child_stable<F: FnMut(Ident, Namespace, &'a NameBinding<'a>)>(&self, mut f: F) {
let resolutions = self.resolutions.borrow();
let mut resolutions = resolutions.iter().map(|(&(ident, ns), &resolution)| {
// Pre-compute keys for sorting
(ident.name.as_str(), ns, ident, resolution)
})
.collect::<Vec<_>>();
resolutions.sort_unstable_by_key(|&(str, ns, ..)| (str, ns));
for &(_, ns, ident, resolution) in resolutions.iter() {
resolution.borrow().binding.map(|binding| f(ident, ns, binding));
}
}
fn def(&self) -> Option<Def> {
match self.kind {
ModuleKind::Def(def, _) => Some(def),
_ => None,
}
}
fn def_id(&self) -> Option<DefId> {
self.def().as_ref().map(Def::def_id)
}
// `self` resolves to the first module ancestor that `is_normal`.
fn is_normal(&self) -> bool {
match self.kind {
ModuleKind::Def(Def::Mod(_), _) => true,
_ => false,
}
}
fn is_trait(&self) -> bool {
match self.kind {
ModuleKind::Def(Def::Trait(_), _) => true,
_ => false,
}
}
fn is_local(&self) -> bool {
self.normal_ancestor_id.is_local()
}
fn nearest_item_scope(&'a self) -> Module<'a> {
if self.is_trait() { self.parent.unwrap() } else { self }
}
}
impl<'a> fmt::Debug for ModuleData<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self.def())
}
}
// Records a possibly-private value, type, or module definition.
#[derive(Clone, Debug)]
pub struct NameBinding<'a> {
kind: NameBindingKind<'a>,
expansion: Mark,
span: Span,
vis: ty::Visibility,
}
pub trait ToNameBinding<'a> {
fn to_name_binding(self, arenas: &'a ResolverArenas<'a>) -> &'a NameBinding<'a>;
}
impl<'a> ToNameBinding<'a> for &'a NameBinding<'a> {
fn to_name_binding(self, _: &'a ResolverArenas<'a>) -> &'a NameBinding<'a> {
self
}
}
#[derive(Clone, Debug)]
enum NameBindingKind<'a> {
Def(Def),
Module(Module<'a>),
Import {
binding: &'a NameBinding<'a>,
directive: &'a ImportDirective<'a>,
used: Cell<bool>,
legacy_self_import: bool,
},
Ambiguity {
b1: &'a NameBinding<'a>,
b2: &'a NameBinding<'a>,
legacy: bool,
}
}
struct PrivacyError<'a>(Span, Name, &'a NameBinding<'a>);
struct UseError<'a> {
err: DiagnosticBuilder<'a>,
/// Attach `use` statements for these candidates
candidates: Vec<ImportSuggestion>,
/// The node id of the module to place the use statements in
node_id: NodeId,
/// Whether the diagnostic should state that it's "better"
better: bool,
}
struct AmbiguityError<'a> {
span: Span,
name: Name,
lexical: bool,
b1: &'a NameBinding<'a>,
b2: &'a NameBinding<'a>,
legacy: bool,
}
impl<'a> NameBinding<'a> {
fn module(&self) -> Option<Module<'a>> {
match self.kind {
NameBindingKind::Module(module) => Some(module),
NameBindingKind::Import { binding, .. } => binding.module(),
NameBindingKind::Ambiguity { legacy: true, b1, .. } => b1.module(),
_ => None,
}
}
fn def(&self) -> Def {
match self.kind {
NameBindingKind::Def(def) => def,
NameBindingKind::Module(module) => module.def().unwrap(),
NameBindingKind::Import { binding, .. } => binding.def(),
NameBindingKind::Ambiguity { legacy: true, b1, .. } => b1.def(),
NameBindingKind::Ambiguity { .. } => Def::Err,
}
}
fn def_ignoring_ambiguity(&self) -> Def {
match self.kind {
NameBindingKind::Import { binding, .. } => binding.def_ignoring_ambiguity(),
NameBindingKind::Ambiguity { b1, .. } => b1.def_ignoring_ambiguity(),
_ => self.def(),
}
}
fn get_macro(&self, resolver: &mut Resolver<'a>) -> Rc<SyntaxExtension> {
resolver.get_macro(self.def_ignoring_ambiguity())
}
// We sometimes need to treat variants as `pub` for backwards compatibility
fn pseudo_vis(&self) -> ty::Visibility {
if self.is_variant() { ty::Visibility::Public } else { self.vis }
}
fn is_variant(&self) -> bool {
match self.kind {
NameBindingKind::Def(Def::Variant(..)) |
NameBindingKind::Def(Def::VariantCtor(..)) => true,
_ => false,
}
}
fn is_extern_crate(&self) -> bool {
match self.kind {
NameBindingKind::Import {
directive: &ImportDirective {
subclass: ImportDirectiveSubclass::ExternCrate, ..
}, ..
} => true,
_ => false,
}
}
fn is_import(&self) -> bool {
match self.kind {
NameBindingKind::Import { .. } => true,
_ => false,
}
}
fn is_glob_import(&self) -> bool {
match self.kind {
NameBindingKind::Import { directive, .. } => directive.is_glob(),
NameBindingKind::Ambiguity { b1, .. } => b1.is_glob_import(),
_ => false,
}
}
fn is_importable(&self) -> bool {
match self.def() {
Def::AssociatedConst(..) | Def::Method(..) | Def::AssociatedTy(..) => false,
_ => true,
}
}
fn is_macro_def(&self) -> bool {
match self.kind {
NameBindingKind::Def(Def::Macro(..)) => true,
_ => false,
}
}
fn descr(&self) -> &'static str {
if self.is_extern_crate() { "extern crate" } else { self.def().kind_name() }
}
}
/// Interns the names of the primitive types.
struct PrimitiveTypeTable {
primitive_types: FxHashMap<Name, PrimTy>,
}
impl PrimitiveTypeTable {
fn new() -> PrimitiveTypeTable {
let mut table = PrimitiveTypeTable { primitive_types: FxHashMap() };
table.intern("bool", TyBool);
table.intern("char", TyChar);
table.intern("f32", TyFloat(FloatTy::F32));
table.intern("f64", TyFloat(FloatTy::F64));
table.intern("isize", TyInt(IntTy::Is));
table.intern("i8", TyInt(IntTy::I8));
table.intern("i16", TyInt(IntTy::I16));
table.intern("i32", TyInt(IntTy::I32));
table.intern("i64", TyInt(IntTy::I64));
table.intern("i128", TyInt(IntTy::I128));
table.intern("str", TyStr);
table.intern("usize", TyUint(UintTy::Us));
table.intern("u8", TyUint(UintTy::U8));
table.intern("u16", TyUint(UintTy::U16));
table.intern("u32", TyUint(UintTy::U32));
table.intern("u64", TyUint(UintTy::U64));
table.intern("u128", TyUint(UintTy::U128));
table
}
fn intern(&mut self, string: &str, primitive_type: PrimTy) {
self.primitive_types.insert(Symbol::intern(string), primitive_type);
}
}
/// The main resolver class.
pub struct Resolver<'a> {
session: &'a Session,
cstore: &'a CrateStore,
pub definitions: Definitions,
graph_root: Module<'a>,
prelude: Option<Module<'a>>,
// n.b. This is used only for better diagnostics, not name resolution itself.
has_self: FxHashSet<DefId>,
// Names of fields of an item `DefId` accessible with dot syntax.
// Used for hints during error reporting.
field_names: FxHashMap<DefId, Vec<Name>>,
// All imports known to succeed or fail.
determined_imports: Vec<&'a ImportDirective<'a>>,
// All non-determined imports.
indeterminate_imports: Vec<&'a ImportDirective<'a>>,
// The module that represents the current item scope.
current_module: Module<'a>,
// The current set of local scopes for types and values.
// FIXME #4948: Reuse ribs to avoid allocation.
ribs: PerNS<Vec<Rib<'a>>>,
// The current set of local scopes, for labels.
label_ribs: Vec<Rib<'a>>,
// The trait that the current context can refer to.
current_trait_ref: Option<(Module<'a>, TraitRef)>,
// The current self type if inside an impl (used for better errors).
current_self_type: Option<Ty>,
// The idents for the primitive types.
primitive_type_table: PrimitiveTypeTable,
def_map: DefMap,
pub freevars: FreevarMap,
freevars_seen: NodeMap<NodeMap<usize>>,
pub export_map: ExportMap,
pub trait_map: TraitMap,
// A map from nodes to anonymous modules.
// Anonymous modules are pseudo-modules that are implicitly created around items
// contained within blocks.
//
// For example, if we have this:
//
// fn f() {
// fn g() {
// ...
// }
// }
//
// There will be an anonymous module created around `g` with the ID of the
// entry block for `f`.
block_map: NodeMap<Module<'a>>,
module_map: FxHashMap<DefId, Module<'a>>,
extern_module_map: FxHashMap<(DefId, bool /* MacrosOnly? */), Module<'a>>,
pub make_glob_map: bool,
/// Maps imports to the names of items actually imported (this actually maps
/// all imports, but only glob imports are actually interesting).
pub glob_map: GlobMap,
used_imports: FxHashSet<(NodeId, Namespace)>,
pub maybe_unused_trait_imports: NodeSet,
pub maybe_unused_extern_crates: Vec<(NodeId, Span)>,
/// privacy errors are delayed until the end in order to deduplicate them
privacy_errors: Vec<PrivacyError<'a>>,
/// ambiguity errors are delayed for deduplication
ambiguity_errors: Vec<AmbiguityError<'a>>,
/// `use` injections are delayed for better placement and deduplication
use_injections: Vec<UseError<'a>>,
gated_errors: FxHashSet<Span>,
disallowed_shadowing: Vec<&'a LegacyBinding<'a>>,
arenas: &'a ResolverArenas<'a>,
dummy_binding: &'a NameBinding<'a>,
use_extern_macros: bool, // true if `#![feature(use_extern_macros)]`
crate_loader: &'a mut CrateLoader,
macro_names: FxHashSet<Ident>,
global_macros: FxHashMap<Name, &'a NameBinding<'a>>,
lexical_macro_resolutions: Vec<(Ident, &'a Cell<LegacyScope<'a>>)>,
macro_map: FxHashMap<DefId, Rc<SyntaxExtension>>,
macro_defs: FxHashMap<Mark, DefId>,
local_macro_def_scopes: FxHashMap<NodeId, Module<'a>>,
macro_exports: Vec<Export>,
pub whitelisted_legacy_custom_derives: Vec<Name>,
pub found_unresolved_macro: bool,
// List of crate local macros that we need to warn about as being unused.
// Right now this only includes macro_rules! macros, and macros 2.0.
unused_macros: FxHashSet<DefId>,
// Maps the `Mark` of an expansion to its containing module or block.
invocations: FxHashMap<Mark, &'a InvocationData<'a>>,
// Avoid duplicated errors for "name already defined".
name_already_seen: FxHashMap<Name, Span>,
// If `#![feature(proc_macro)]` is set
proc_macro_enabled: bool,
// A set of procedural macros imported by `#[macro_use]` that have already been warned about
warned_proc_macros: FxHashSet<Name>,
potentially_unused_imports: Vec<&'a ImportDirective<'a>>,
// This table maps struct IDs into struct constructor IDs,
// it's not used during normal resolution, only for better error reporting.
struct_constructors: DefIdMap<(Def, ty::Visibility)>,
// Only used for better errors on `fn(): fn()`
current_type_ascription: Vec<Span>,
}
pub struct ResolverArenas<'a> {
modules: arena::TypedArena<ModuleData<'a>>,
local_modules: RefCell<Vec<Module<'a>>>,
name_bindings: arena::TypedArena<NameBinding<'a>>,
import_directives: arena::TypedArena<ImportDirective<'a>>,
name_resolutions: arena::TypedArena<RefCell<NameResolution<'a>>>,
invocation_data: arena::TypedArena<InvocationData<'a>>,
legacy_bindings: arena::TypedArena<LegacyBinding<'a>>,
}
impl<'a> ResolverArenas<'a> {
fn alloc_module(&'a self, module: ModuleData<'a>) -> Module<'a> {
let module = self.modules.alloc(module);
if module.def_id().map(|def_id| def_id.is_local()).unwrap_or(true) {
self.local_modules.borrow_mut().push(module);
}
module
}
fn local_modules(&'a self) -> ::std::cell::Ref<'a, Vec<Module<'a>>> {
self.local_modules.borrow()
}
fn alloc_name_binding(&'a self, name_binding: NameBinding<'a>) -> &'a NameBinding<'a> {
self.name_bindings.alloc(name_binding)
}
fn alloc_import_directive(&'a self, import_directive: ImportDirective<'a>)
-> &'a ImportDirective {
self.import_directives.alloc(import_directive)
}
fn alloc_name_resolution(&'a self) -> &'a RefCell<NameResolution<'a>> {
self.name_resolutions.alloc(Default::default())
}
fn alloc_invocation_data(&'a self, expansion_data: InvocationData<'a>)
-> &'a InvocationData<'a> {
self.invocation_data.alloc(expansion_data)
}
fn alloc_legacy_binding(&'a self, binding: LegacyBinding<'a>) -> &'a LegacyBinding<'a> {
self.legacy_bindings.alloc(binding)
}
}
impl<'a, 'b: 'a> ty::DefIdTree for &'a Resolver<'b> {
fn parent(self, id: DefId) -> Option<DefId> {
match id.krate {
LOCAL_CRATE => self.definitions.def_key(id.index).parent,
_ => self.cstore.def_key(id).parent,
}.map(|index| DefId { index: index, ..id })
}
}
impl<'a> hir::lowering::Resolver for Resolver<'a> {
fn resolve_hir_path(&mut self, path: &mut hir::Path, is_value: bool) {
let namespace = if is_value { ValueNS } else { TypeNS };
let hir::Path { ref segments, span, ref mut def } = *path;
let path: Vec<SpannedIdent> = segments.iter()
.map(|seg| respan(span, Ident::with_empty_ctxt(seg.name)))
.collect();
match self.resolve_path(&path, Some(namespace), true, span) {
PathResult::Module(module) => *def = module.def().unwrap(),
PathResult::NonModule(path_res) if path_res.unresolved_segments() == 0 =>
*def = path_res.base_def(),
PathResult::NonModule(..) => match self.resolve_path(&path, None, true, span) {
PathResult::Failed(span, msg, _) => {
resolve_error(self, span, ResolutionError::FailedToResolve(&msg));
}
_ => {}
},
PathResult::Indeterminate => unreachable!(),
PathResult::Failed(span, msg, _) => {
resolve_error(self, span, ResolutionError::FailedToResolve(&msg));
}
}
}
fn get_resolution(&mut self, id: NodeId) -> Option<PathResolution> {
self.def_map.get(&id).cloned()
}
fn definitions(&mut self) -> &mut Definitions {
&mut self.definitions
}
}
impl<'a> Resolver<'a> {
pub fn new(session: &'a Session,
cstore: &'a CrateStore,
krate: &Crate,
crate_name: &str,
make_glob_map: MakeGlobMap,
crate_loader: &'a mut CrateLoader,
arenas: &'a ResolverArenas<'a>)
-> Resolver<'a> {
let root_def_id = DefId::local(CRATE_DEF_INDEX);
let root_module_kind = ModuleKind::Def(Def::Mod(root_def_id), keywords::Invalid.name());
let graph_root = arenas.alloc_module(ModuleData {
no_implicit_prelude: attr::contains_name(&krate.attrs, "no_implicit_prelude"),
..ModuleData::new(None, root_module_kind, root_def_id, Mark::root(), krate.span)
});
let mut module_map = FxHashMap();
module_map.insert(DefId::local(CRATE_DEF_INDEX), graph_root);
let mut definitions = Definitions::new();
DefCollector::new(&mut definitions, Mark::root())
.collect_root(crate_name, &session.local_crate_disambiguator().as_str());
let mut invocations = FxHashMap();
invocations.insert(Mark::root(),
arenas.alloc_invocation_data(InvocationData::root(graph_root)));
let features = session.features.borrow();
let mut macro_defs = FxHashMap();
macro_defs.insert(Mark::root(), root_def_id);
Resolver {
session,
cstore,
definitions,
// The outermost module has def ID 0; this is not reflected in the
// AST.
graph_root,
prelude: None,
has_self: FxHashSet(),
field_names: FxHashMap(),
determined_imports: Vec::new(),
indeterminate_imports: Vec::new(),
current_module: graph_root,
ribs: PerNS {
value_ns: vec![Rib::new(ModuleRibKind(graph_root))],
type_ns: vec![Rib::new(ModuleRibKind(graph_root))],
macro_ns: Some(vec![Rib::new(ModuleRibKind(graph_root))]),
},
label_ribs: Vec::new(),
current_trait_ref: None,
current_self_type: None,
primitive_type_table: PrimitiveTypeTable::new(),
def_map: NodeMap(),
freevars: NodeMap(),
freevars_seen: NodeMap(),
export_map: FxHashMap(),
trait_map: NodeMap(),
module_map,
block_map: NodeMap(),
extern_module_map: FxHashMap(),
make_glob_map: make_glob_map == MakeGlobMap::Yes,
glob_map: NodeMap(),
used_imports: FxHashSet(),
maybe_unused_trait_imports: NodeSet(),
maybe_unused_extern_crates: Vec::new(),
privacy_errors: Vec::new(),
ambiguity_errors: Vec::new(),
use_injections: Vec::new(),
gated_errors: FxHashSet(),
disallowed_shadowing: Vec::new(),
arenas,
dummy_binding: arenas.alloc_name_binding(NameBinding {
kind: NameBindingKind::Def(Def::Err),
expansion: Mark::root(),
span: DUMMY_SP,
vis: ty::Visibility::Public,
}),
// The `proc_macro` and `decl_macro` features imply `use_extern_macros`
use_extern_macros:
features.use_extern_macros || features.proc_macro || features.decl_macro,
crate_loader,
macro_names: FxHashSet(),
global_macros: FxHashMap(),
lexical_macro_resolutions: Vec::new(),
macro_map: FxHashMap(),
macro_exports: Vec::new(),
invocations,
macro_defs,
local_macro_def_scopes: FxHashMap(),
name_already_seen: FxHashMap(),
whitelisted_legacy_custom_derives: Vec::new(),
proc_macro_enabled: features.proc_macro,
warned_proc_macros: FxHashSet(),
potentially_unused_imports: Vec::new(),
struct_constructors: DefIdMap(),
found_unresolved_macro: false,
unused_macros: FxHashSet(),
current_type_ascription: Vec::new(),
}
}
pub fn arenas() -> ResolverArenas<'a> {
ResolverArenas {
modules: arena::TypedArena::new(),
local_modules: RefCell::new(Vec::new()),
name_bindings: arena::TypedArena::new(),
import_directives: arena::TypedArena::new(),
name_resolutions: arena::TypedArena::new(),
invocation_data: arena::TypedArena::new(),
legacy_bindings: arena::TypedArena::new(),
}
}
fn per_ns<T, F: FnMut(&mut Self, Namespace) -> T>(&mut self, mut f: F) -> PerNS<T> {
PerNS {
type_ns: f(self, TypeNS),
value_ns: f(self, ValueNS),
macro_ns: match self.use_extern_macros {
true => Some(f(self, MacroNS)),
false => None,
},
}
}
/// Entry point to crate resolution.
pub fn resolve_crate(&mut self, krate: &Crate) {
ImportResolver { resolver: self }.finalize_imports();
self.current_module = self.graph_root;
self.finalize_current_module_macro_resolutions();
visit::walk_crate(self, krate);
check_unused::check_crate(self, krate);
self.report_errors(krate);
self.crate_loader.postprocess(krate);
}
fn new_module(
&self,
parent: Module<'a>,
kind: ModuleKind,
normal_ancestor_id: DefId,
expansion: Mark,
span: Span,
) -> Module<'a> {
let module = ModuleData::new(Some(parent), kind, normal_ancestor_id, expansion, span);
self.arenas.alloc_module(module)
}
fn record_use(&mut self, ident: Ident, ns: Namespace, binding: &'a NameBinding<'a>, span: Span)
-> bool /* true if an error was reported */ {
match binding.kind {
NameBindingKind::Import { directive, binding, ref used, legacy_self_import }
if !used.get() => {
used.set(true);
directive.used.set(true);
if legacy_self_import {
self.warn_legacy_self_import(directive);
return false;
}
self.used_imports.insert((directive.id, ns));
self.add_to_glob_map(directive.id, ident);
self.record_use(ident, ns, binding, span)
}
NameBindingKind::Import { .. } => false,
NameBindingKind::Ambiguity { b1, b2, legacy } => {
self.ambiguity_errors.push(AmbiguityError {
span: span, name: ident.name, lexical: false, b1: b1, b2: b2, legacy,
});
if legacy {
self.record_use(ident, ns, b1, span);
}
!legacy
}
_ => false
}
}
fn add_to_glob_map(&mut self, id: NodeId, ident: Ident) {
if self.make_glob_map {
self.glob_map.entry(id).or_insert_with(FxHashSet).insert(ident.name);
}
}
/// This resolves the identifier `ident` in the namespace `ns` in the current lexical scope.
/// More specifically, we proceed up the hierarchy of scopes and return the binding for
/// `ident` in the first scope that defines it (or None if no scopes define it).
///
/// A block's items are above its local variables in the scope hierarchy, regardless of where
/// the items are defined in the block. For example,
/// ```rust
/// fn f() {
/// g(); // Since there are no local variables in scope yet, this resolves to the item.
/// let g = || {};
/// fn g() {}
/// g(); // This resolves to the local variable `g` since it shadows the item.
/// }
/// ```
///
/// Invariant: This must only be called during main resolution, not during
/// import resolution.
fn resolve_ident_in_lexical_scope(&mut self,
mut ident: Ident,
ns: Namespace,
record_used: bool,
path_span: Span)
-> Option<LexicalScopeBinding<'a>> {
if ns == TypeNS {
ident.ctxt = if ident.name == keywords::SelfType.name() {
SyntaxContext::empty() // FIXME(jseyfried) improve `Self` hygiene
} else {
ident.ctxt.modern()
}
}
// Walk backwards up the ribs in scope.
let mut module = self.graph_root;
for i in (0 .. self.ribs[ns].len()).rev() {
if let Some(def) = self.ribs[ns][i].bindings.get(&ident).cloned() {
// The ident resolves to a type parameter or local variable.
return Some(LexicalScopeBinding::Def(
self.adjust_local_def(ns, i, def, record_used, path_span)
));
}
module = match self.ribs[ns][i].kind {
ModuleRibKind(module) => module,
MacroDefinition(def) if def == self.macro_defs[&ident.ctxt.outer()] => {
// If an invocation of this macro created `ident`, give up on `ident`
// and switch to `ident`'s source from the macro definition.
ident.ctxt.remove_mark();
continue
}
_ => continue,
};
let item = self.resolve_ident_in_module_unadjusted(
module, ident, ns, false, record_used, path_span,
);
if let Ok(binding) = item {
// The ident resolves to an item.
return Some(LexicalScopeBinding::Item(binding));
}
match module.kind {
ModuleKind::Block(..) => {}, // We can see through blocks
_ => break,
}
}
ident.ctxt = ident.ctxt.modern();
loop {
module = unwrap_or!(self.hygienic_lexical_parent(module, &mut ident.ctxt), break);
let orig_current_module = self.current_module;
self.current_module = module; // Lexical resolutions can never be a privacy error.
let result = self.resolve_ident_in_module_unadjusted(
module, ident, ns, false, record_used, path_span,
);
self.current_module = orig_current_module;
match result {
Ok(binding) => return Some(LexicalScopeBinding::Item(binding)),
Err(Undetermined) => return None,
Err(Determined) => {}
}
}
match self.prelude {
Some(prelude) if !module.no_implicit_prelude => {
self.resolve_ident_in_module_unadjusted(prelude, ident, ns, false, false, path_span)
.ok().map(LexicalScopeBinding::Item)
}
_ => None,
}
}
fn hygienic_lexical_parent(&mut self, mut module: Module<'a>, ctxt: &mut SyntaxContext)
-> Option<Module<'a>> {
if !module.expansion.is_descendant_of(ctxt.outer()) {
return Some(self.macro_def_scope(ctxt.remove_mark()));
}
if let ModuleKind::Block(..) = module.kind {
return Some(module.parent.unwrap());
}
let mut module_expansion = module.expansion.modern(); // for backward compatibility
while let Some(parent) = module.parent {
let parent_expansion = parent.expansion.modern();
if module_expansion.is_descendant_of(parent_expansion) &&
parent_expansion != module_expansion {
return if parent_expansion.is_descendant_of(ctxt.outer()) {
Some(parent)
} else {
None
};
}
module = parent;
module_expansion = parent_expansion;
}
None
}
fn resolve_ident_in_module(&mut self,
module: Module<'a>,
mut ident: Ident,
ns: Namespace,
ignore_unresolved_invocations: bool,
record_used: bool,
span: Span)
-> Result<&'a NameBinding<'a>, Determinacy> {
ident.ctxt = ident.ctxt.modern();
let orig_current_module = self.current_module;
if let Some(def) = ident.ctxt.adjust(module.expansion) {
self.current_module = self.macro_def_scope(def);
}
let result = self.resolve_ident_in_module_unadjusted(
module, ident, ns, ignore_unresolved_invocations, record_used, span,
);
self.current_module = orig_current_module;
result
}
fn resolve_crate_root(&mut self, mut ctxt: SyntaxContext) -> Module<'a> {
let module = match ctxt.adjust(Mark::root()) {
Some(def) => self.macro_def_scope(def),
None => return self.graph_root,
};
self.get_module(DefId { index: CRATE_DEF_INDEX, ..module.normal_ancestor_id })
}
fn resolve_self(&mut self, ctxt: &mut SyntaxContext, module: Module<'a>) -> Module<'a> {
let mut module = self.get_module(module.normal_ancestor_id);
while module.span.ctxt().modern() != *ctxt {
let parent = module.parent.unwrap_or_else(|| self.macro_def_scope(ctxt.remove_mark()));
module = self.get_module(parent.normal_ancestor_id);
}
module
}
// AST resolution
//
// We maintain a list of value ribs and type ribs.
//
// Simultaneously, we keep track of the current position in the module
// graph in the `current_module` pointer. When we go to resolve a name in
// the value or type namespaces, we first look through all the ribs and
// then query the module graph. When we resolve a name in the module
// namespace, we can skip all the ribs (since nested modules are not
// allowed within blocks in Rust) and jump straight to the current module
// graph node.
//
// Named implementations are handled separately. When we find a method
// call, we consult the module node to find all of the implementations in
// scope. This information is lazily cached in the module node. We then
// generate a fake "implementation scope" containing all the
// implementations thus found, for compatibility with old resolve pass.
fn with_scope<F>(&mut self, id: NodeId, f: F)
where F: FnOnce(&mut Resolver)
{
let id = self.definitions.local_def_id(id);
let module = self.module_map.get(&id).cloned(); // clones a reference
if let Some(module) = module {
// Move down in the graph.
let orig_module = replace(&mut self.current_module, module);
self.ribs[ValueNS].push(Rib::new(ModuleRibKind(module)));
self.ribs[TypeNS].push(Rib::new(ModuleRibKind(module)));
self.finalize_current_module_macro_resolutions();
f(self);
self.current_module = orig_module;
self.ribs[ValueNS].pop();
self.ribs[TypeNS].pop();
} else {
f(self);
}
}
/// Searches the current set of local scopes for labels. Returns the first non-None label that
/// is returned by the given predicate function
///
/// Stops after meeting a closure.
fn search_label<P, R>(&self, mut ident: Ident, pred: P) -> Option<R>
where P: Fn(&Rib, Ident) -> Option<R>
{
for rib in self.label_ribs.iter().rev() {
match rib.kind {
NormalRibKind => {}
// If an invocation of this macro created `ident`, give up on `ident`
// and switch to `ident`'s source from the macro definition.
MacroDefinition(def) => {
if def == self.macro_defs[&ident.ctxt.outer()] {
ident.ctxt.remove_mark();
}
}
_ => {
// Do not resolve labels across function boundary
return None;
}
}
let r = pred(rib, ident);
if r.is_some() {
return r;
}
}
None
}
fn resolve_item(&mut self, item: &Item) {
let name = item.ident.name;
debug!("(resolving item) resolving {}", name);
self.check_proc_macro_attrs(&item.attrs);
match item.node {
ItemKind::Enum(_, ref generics) |
ItemKind::Ty(_, ref generics) |
ItemKind::Struct(_, ref generics) |
ItemKind::Union(_, ref generics) |
ItemKind::Fn(.., ref generics, _) => {
self.with_type_parameter_rib(HasTypeParameters(generics, ItemRibKind),
|this| visit::walk_item(this, item));
}
ItemKind::DefaultImpl(_, ref trait_ref) => {
self.with_optional_trait_ref(Some(trait_ref), |this, _| {
// Resolve type arguments in trait path
visit::walk_trait_ref(this, trait_ref);
});
}
ItemKind::Impl(.., ref generics, ref opt_trait_ref, ref self_type, ref impl_items) =>
self.resolve_implementation(generics,
opt_trait_ref,
&self_type,
item.id,
impl_items),
ItemKind::Trait(_, ref generics, ref bounds, ref trait_items) => {
// Create a new rib for the trait-wide type parameters.
self.with_type_parameter_rib(HasTypeParameters(generics, ItemRibKind), |this| {
let local_def_id = this.definitions.local_def_id(item.id);
this.with_self_rib(Def::SelfTy(Some(local_def_id), None), |this| {
this.visit_generics(generics);
walk_list!(this, visit_ty_param_bound, bounds);
for trait_item in trait_items {
this.check_proc_macro_attrs(&trait_item.attrs);
match trait_item.node {
TraitItemKind::Const(ref ty, ref default) => {
this.visit_ty(ty);
// Only impose the restrictions of
// ConstRibKind for an actual constant
// expression in a provided default.
if let Some(ref expr) = *default{
this.with_constant_rib(|this| {
this.visit_expr(expr);
});
}
}
TraitItemKind::Method(ref sig, _) => {
let type_parameters =
HasTypeParameters(&sig.generics,
MethodRibKind(!sig.decl.has_self()));
this.with_type_parameter_rib(type_parameters, |this| {
visit::walk_trait_item(this, trait_item)
});
}
TraitItemKind::Type(..) => {
this.with_type_parameter_rib(NoTypeParameters, |this| {
visit::walk_trait_item(this, trait_item)
});
}
TraitItemKind::Macro(_) => panic!("unexpanded macro in resolve!"),
};
}
});
});
}
ItemKind::Mod(_) | ItemKind::ForeignMod(_) => {
self.with_scope(item.id, |this| {
visit::walk_item(this, item);
});
}
ItemKind::Static(ref ty, _, ref expr) |
ItemKind::Const(ref ty, ref expr) => {
self.with_item_rib(|this| {
this.visit_ty(ty);
this.with_constant_rib(|this| {
this.visit_expr(expr);
});
});
}
ItemKind::Use(ref view_path) => {
match view_path.node {
ast::ViewPathList(ref prefix, ref items) if items.is_empty() => {
// Resolve prefix of an import with empty braces (issue #28388).
self.smart_resolve_path(item.id, None, prefix, PathSource::ImportPrefix);
}
_ => {}
}
}
ItemKind::ExternCrate(_) | ItemKind::MacroDef(..) | ItemKind::GlobalAsm(_)=> {
// do nothing, these are just around to be encoded
}
ItemKind::Mac(_) => panic!("unexpanded macro in resolve!"),
}
}
fn with_type_parameter_rib<'b, F>(&'b mut self, type_parameters: TypeParameters<'a, 'b>, f: F)
where F: FnOnce(&mut Resolver)
{
match type_parameters {
HasTypeParameters(generics, rib_kind) => {
let mut function_type_rib = Rib::new(rib_kind);
let mut seen_bindings = FxHashMap();
for type_parameter in &generics.ty_params {
let ident = type_parameter.ident.modern();
debug!("with_type_parameter_rib: {}", type_parameter.id);
if seen_bindings.contains_key(&ident) {
let span = seen_bindings.get(&ident).unwrap();
let err =
ResolutionError::NameAlreadyUsedInTypeParameterList(ident.name, span);
resolve_error(self, type_parameter.span, err);
}
seen_bindings.entry(ident).or_insert(type_parameter.span);
// plain insert (no renaming)
let def_id = self.definitions.local_def_id(type_parameter.id);
let def = Def::TyParam(def_id);
function_type_rib.bindings.insert(ident, def);
self.record_def(type_parameter.id, PathResolution::new(def));
}
self.ribs[TypeNS].push(function_type_rib);
}
NoTypeParameters => {
// Nothing to do.
}
}
f(self);
if let HasTypeParameters(..) = type_parameters {
self.ribs[TypeNS].pop();
}
}
fn with_label_rib<F>(&mut self, f: F)
where F: FnOnce(&mut Resolver)
{
self.label_ribs.push(Rib::new(NormalRibKind));
f(self);
self.label_ribs.pop();
}
fn with_item_rib<F>(&mut self, f: F)
where F: FnOnce(&mut Resolver)
{
self.ribs[ValueNS].push(Rib::new(ItemRibKind));
self.ribs[TypeNS].push(Rib::new(ItemRibKind));
f(self);
self.ribs[TypeNS].pop();
self.ribs[ValueNS].pop();
}
fn with_constant_rib<F>(&mut self, f: F)
where F: FnOnce(&mut Resolver)
{
self.ribs[ValueNS].push(Rib::new(ConstantItemRibKind));
f(self);
self.ribs[ValueNS].pop();
}
fn with_current_self_type<T, F>(&mut self, self_type: &Ty, f: F) -> T
where F: FnOnce(&mut Resolver) -> T
{
// Handle nested impls (inside fn bodies)
let previous_value = replace(&mut self.current_self_type, Some(self_type.clone()));
let result = f(self);
self.current_self_type = previous_value;
result
}
fn with_optional_trait_ref<T, F>(&mut self, opt_trait_ref: Option<&TraitRef>, f: F) -> T
where F: FnOnce(&mut Resolver, Option<DefId>) -> T
{
let mut new_val = None;
let mut new_id = None;
if let Some(trait_ref) = opt_trait_ref {
let path: Vec<_> = trait_ref.path.segments.iter()
.map(|seg| respan(seg.span, seg.identifier))
.collect();
let def = self.smart_resolve_path_fragment(trait_ref.ref_id,
None,
&path,
trait_ref.path.span,
trait_ref.path.segments.last().unwrap().span,
PathSource::Trait)
.base_def();
if def != Def::Err {
new_id = Some(def.def_id());
let span = trait_ref.path.span;
if let PathResult::Module(module) = self.resolve_path(&path, None, false, span) {
new_val = Some((module, trait_ref.clone()));
}
}
}
let original_trait_ref = replace(&mut self.current_trait_ref, new_val);
let result = f(self, new_id);
self.current_trait_ref = original_trait_ref;
result
}
fn with_self_rib<F>(&mut self, self_def: Def, f: F)
where F: FnOnce(&mut Resolver)
{
let mut self_type_rib = Rib::new(NormalRibKind);
// plain insert (no renaming, types are not currently hygienic....)
self_type_rib.bindings.insert(keywords::SelfType.ident(), self_def);
self.ribs[TypeNS].push(self_type_rib);
f(self);
self.ribs[TypeNS].pop();
}
fn resolve_implementation(&mut self,
generics: &Generics,
opt_trait_reference: &Option<TraitRef>,
self_type: &Ty,
item_id: NodeId,
impl_items: &[ImplItem]) {
// If applicable, create a rib for the type parameters.
self.with_type_parameter_rib(HasTypeParameters(generics, ItemRibKind), |this| {
// Dummy self type for better errors if `Self` is used in the trait path.
this.with_self_rib(Def::SelfTy(None, None), |this| {
// Resolve the trait reference, if necessary.
this.with_optional_trait_ref(opt_trait_reference.as_ref(), |this, trait_id| {
let item_def_id = this.definitions.local_def_id(item_id);
this.with_self_rib(Def::SelfTy(trait_id, Some(item_def_id)), |this| {
if let Some(trait_ref) = opt_trait_reference.as_ref() {
// Resolve type arguments in trait path
visit::walk_trait_ref(this, trait_ref);
}
// Resolve the self type.
this.visit_ty(self_type);
// Resolve the type parameters.
this.visit_generics(generics);
this.with_current_self_type(self_type, |this| {
for impl_item in impl_items {
this.check_proc_macro_attrs(&impl_item.attrs);
this.resolve_visibility(&impl_item.vis);
match impl_item.node {
ImplItemKind::Const(..) => {
// If this is a trait impl, ensure the const
// exists in trait
this.check_trait_item(impl_item.ident,
ValueNS,
impl_item.span,
|n, s| ResolutionError::ConstNotMemberOfTrait(n, s));
visit::walk_impl_item(this, impl_item);
}
ImplItemKind::Method(ref sig, _) => {
// If this is a trait impl, ensure the method
// exists in trait
this.check_trait_item(impl_item.ident,
ValueNS,
impl_item.span,
|n, s| ResolutionError::MethodNotMemberOfTrait(n, s));
// We also need a new scope for the method-
// specific type parameters.
let type_parameters =
HasTypeParameters(&sig.generics,
MethodRibKind(!sig.decl.has_self()));
this.with_type_parameter_rib(type_parameters, |this| {
visit::walk_impl_item(this, impl_item);
});
}
ImplItemKind::Type(ref ty) => {
// If this is a trait impl, ensure the type
// exists in trait
this.check_trait_item(impl_item.ident,
TypeNS,
impl_item.span,
|n, s| ResolutionError::TypeNotMemberOfTrait(n, s));
this.visit_ty(ty);
}
ImplItemKind::Macro(_) =>
panic!("unexpanded macro in resolve!"),
}
}
});
});
});
});
});
}
fn check_trait_item<F>(&mut self, ident: Ident, ns: Namespace, span: Span, err: F)
where F: FnOnce(Name, &str) -> ResolutionError
{
// If there is a TraitRef in scope for an impl, then the method must be in the
// trait.
if let Some((module, _)) = self.current_trait_ref {
if self.resolve_ident_in_module(module, ident, ns, false, false, span).is_err() {
let path = &self.current_trait_ref.as_ref().unwrap().1.path;
resolve_error(self, span, err(ident.name, &path_names_to_string(path)));
}
}
}
fn resolve_local(&mut self, local: &Local) {
// Resolve the type.
walk_list!(self, visit_ty, &local.ty);
// Resolve the initializer.
walk_list!(self, visit_expr, &local.init);
// Resolve the pattern.
self.resolve_pattern(&local.pat, PatternSource::Let, &mut FxHashMap());
}
// build a map from pattern identifiers to binding-info's.
// this is done hygienically. This could arise for a macro
// that expands into an or-pattern where one 'x' was from the
// user and one 'x' came from the macro.
fn binding_mode_map(&mut self, pat: &Pat) -> BindingMap {
let mut binding_map = FxHashMap();
pat.walk(&mut |pat| {
if let PatKind::Ident(binding_mode, ident, ref sub_pat) = pat.node {
if sub_pat.is_some() || match self.def_map.get(&pat.id).map(|res| res.base_def()) {
Some(Def::Local(..)) => true,
_ => false,
} {
let binding_info = BindingInfo { span: ident.span, binding_mode: binding_mode };
binding_map.insert(ident.node, binding_info);
}
}
true
});
binding_map
}
// check that all of the arms in an or-pattern have exactly the
// same set of bindings, with the same binding modes for each.
fn check_consistent_bindings(&mut self, arm: &Arm) {
if arm.pats.is_empty() {
return;
}
let mut missing_vars = FxHashMap();
let mut inconsistent_vars = FxHashMap();
for (i, p) in arm.pats.iter().enumerate() {
let map_i = self.binding_mode_map(&p);
for (j, q) in arm.pats.iter().enumerate() {
if i == j {
continue;
}
let map_j = self.binding_mode_map(&q);
for (&key, &binding_i) in &map_i {
if map_j.len() == 0 { // Account for missing bindings when
let binding_error = missing_vars // map_j has none.
.entry(key.name)
.or_insert(BindingError {
name: key.name,
origin: BTreeSet::new(),
target: BTreeSet::new(),
});
binding_error.origin.insert(binding_i.span);
binding_error.target.insert(q.span);
}
for (&key_j, &binding_j) in &map_j {
match map_i.get(&key_j) {
None => { // missing binding
let binding_error = missing_vars
.entry(key_j.name)
.or_insert(BindingError {
name: key_j.name,
origin: BTreeSet::new(),
target: BTreeSet::new(),
});
binding_error.origin.insert(binding_j.span);
binding_error.target.insert(p.span);
}
Some(binding_i) => { // check consistent binding
if binding_i.binding_mode != binding_j.binding_mode {
inconsistent_vars
.entry(key.name)
.or_insert((binding_j.span, binding_i.span));
}
}
}
}
}
}
}
let mut missing_vars = missing_vars.iter().collect::<Vec<_>>();
missing_vars.sort();
for (_, v) in missing_vars {
resolve_error(self,
*v.origin.iter().next().unwrap(),
ResolutionError::VariableNotBoundInPattern(v));
}
let mut inconsistent_vars = inconsistent_vars.iter().collect::<Vec<_>>();
inconsistent_vars.sort();
for (name, v) in inconsistent_vars {
resolve_error(self, v.0, ResolutionError::VariableBoundWithDifferentMode(*name, v.1));
}
}
fn resolve_arm(&mut self, arm: &Arm) {
self.ribs[ValueNS].push(Rib::new(NormalRibKind));
let mut bindings_list = FxHashMap();
for pattern in &arm.pats {
self.resolve_pattern(&pattern, PatternSource::Match, &mut bindings_list);
}
// This has to happen *after* we determine which
// pat_idents are variants
self.check_consistent_bindings(arm);
walk_list!(self, visit_expr, &arm.guard);
self.visit_expr(&arm.body);
self.ribs[ValueNS].pop();
}
fn resolve_block(&mut self, block: &Block) {
debug!("(resolving block) entering block");
// Move down in the graph, if there's an anonymous module rooted here.
let orig_module = self.current_module;
let anonymous_module = self.block_map.get(&block.id).cloned(); // clones a reference
let mut num_macro_definition_ribs = 0;
if let Some(anonymous_module) = anonymous_module {
debug!("(resolving block) found anonymous module, moving down");
self.ribs[ValueNS].push(Rib::new(ModuleRibKind(anonymous_module)));
self.ribs[TypeNS].push(Rib::new(ModuleRibKind(anonymous_module)));
self.current_module = anonymous_module;
self.finalize_current_module_macro_resolutions();
} else {
self.ribs[ValueNS].push(Rib::new(NormalRibKind));
}
// Descend into the block.
for stmt in &block.stmts {
if let ast::StmtKind::Item(ref item) = stmt.node {
if let ast::ItemKind::MacroDef(..) = item.node {
num_macro_definition_ribs += 1;
let def = self.definitions.local_def_id(item.id);
self.ribs[ValueNS].push(Rib::new(MacroDefinition(def)));
self.label_ribs.push(Rib::new(MacroDefinition(def)));
}
}
self.visit_stmt(stmt);
}
// Move back up.
self.current_module = orig_module;
for _ in 0 .. num_macro_definition_ribs {
self.ribs[ValueNS].pop();
self.label_ribs.pop();
}
self.ribs[ValueNS].pop();
if let Some(_) = anonymous_module {
self.ribs[TypeNS].pop();
}
debug!("(resolving block) leaving block");
}
fn fresh_binding(&mut self,
ident: &SpannedIdent,
pat_id: NodeId,
outer_pat_id: NodeId,
pat_src: PatternSource,
bindings: &mut FxHashMap<Ident, NodeId>)
-> PathResolution {
// Add the binding to the local ribs, if it
// doesn't already exist in the bindings map. (We
// must not add it if it's in the bindings map
// because that breaks the assumptions later
// passes make about or-patterns.)
let mut def = Def::Local(pat_id);
match bindings.get(&ident.node).cloned() {
Some(id) if id == outer_pat_id => {
// `Variant(a, a)`, error
resolve_error(
self,
ident.span,
ResolutionError::IdentifierBoundMoreThanOnceInSamePattern(
&ident.node.name.as_str())
);
}
Some(..) if pat_src == PatternSource::FnParam => {
// `fn f(a: u8, a: u8)`, error
resolve_error(
self,
ident.span,
ResolutionError::IdentifierBoundMoreThanOnceInParameterList(
&ident.node.name.as_str())
);
}
Some(..) if pat_src == PatternSource::Match => {
// `Variant1(a) | Variant2(a)`, ok
// Reuse definition from the first `a`.
def = self.ribs[ValueNS].last_mut().unwrap().bindings[&ident.node];
}
Some(..) => {
span_bug!(ident.span, "two bindings with the same name from \
unexpected pattern source {:?}", pat_src);
}
None => {
// A completely fresh binding, add to the lists if it's valid.
if ident.node.name != keywords::Invalid.name() {
bindings.insert(ident.node, outer_pat_id);
self.ribs[ValueNS].last_mut().unwrap().bindings.insert(ident.node, def);
}
}
}
PathResolution::new(def)
}
fn resolve_pattern(&mut self,
pat: &Pat,
pat_src: PatternSource,
// Maps idents to the node ID for the
// outermost pattern that binds them.
bindings: &mut FxHashMap<Ident, NodeId>) {
// Visit all direct subpatterns of this pattern.
let outer_pat_id = pat.id;
pat.walk(&mut |pat| {
match pat.node {
PatKind::Ident(bmode, ref ident, ref opt_pat) => {
// First try to resolve the identifier as some existing
// entity, then fall back to a fresh binding.
let binding = self.resolve_ident_in_lexical_scope(ident.node, ValueNS,
false, pat.span)
.and_then(LexicalScopeBinding::item);
let resolution = binding.map(NameBinding::def).and_then(|def| {
let ivmode = BindingMode::ByValue(Mutability::Immutable);
let always_binding = !pat_src.is_refutable() || opt_pat.is_some() ||
bmode != ivmode;
match def {
Def::StructCtor(_, CtorKind::Const) |
Def::VariantCtor(_, CtorKind::Const) |
Def::Const(..) if !always_binding => {
// A unit struct/variant or constant pattern.
self.record_use(ident.node, ValueNS, binding.unwrap(), ident.span);
Some(PathResolution::new(def))
}
Def::StructCtor(..) | Def::VariantCtor(..) |
Def::Const(..) | Def::Static(..) => {
// A fresh binding that shadows something unacceptable.
resolve_error(
self,
ident.span,
ResolutionError::BindingShadowsSomethingUnacceptable(
pat_src.descr(), ident.node.name, binding.unwrap())
);
None
}
Def::Local(..) | Def::Upvar(..) | Def::Fn(..) | Def::Err => {
// These entities are explicitly allowed
// to be shadowed by fresh bindings.
None
}
def => {
span_bug!(ident.span, "unexpected definition for an \
identifier in pattern: {:?}", def);
}
}
}).unwrap_or_else(|| {
self.fresh_binding(ident, pat.id, outer_pat_id, pat_src, bindings)
});
self.record_def(pat.id, resolution);
}
PatKind::TupleStruct(ref path, ..) => {
self.smart_resolve_path(pat.id, None, path, PathSource::TupleStruct);
}
PatKind::Path(ref qself, ref path) => {
self.smart_resolve_path(pat.id, qself.as_ref(), path, PathSource::Pat);
}
PatKind::Struct(ref path, ..) => {
self.smart_resolve_path(pat.id, None, path, PathSource::Struct);
}
_ => {}
}
true
});
visit::walk_pat(self, pat);
}
// High-level and context dependent path resolution routine.
// Resolves the path and records the resolution into definition map.
// If resolution fails tries several techniques to find likely
// resolution candidates, suggest imports or other help, and report
// errors in user friendly way.
fn smart_resolve_path(&mut self,
id: NodeId,
qself: Option<&QSelf>,
path: &Path,
source: PathSource)
-> PathResolution {
let segments = &path.segments.iter()
.map(|seg| respan(seg.span, seg.identifier))
.collect::<Vec<_>>();
let ident_span = path.segments.last().map_or(path.span, |seg| seg.span);
self.smart_resolve_path_fragment(id, qself, segments, path.span, ident_span, source)
}
fn smart_resolve_path_fragment(&mut self,
id: NodeId,
qself: Option<&QSelf>,
path: &[SpannedIdent],
span: Span,
ident_span: Span,
source: PathSource)
-> PathResolution {
let ns = source.namespace();
let is_expected = &|def| source.is_expected(def);
let is_enum_variant = &|def| if let Def::Variant(..) = def { true } else { false };
// Base error is amended with one short label and possibly some longer helps/notes.
let report_errors = |this: &mut Self, def: Option<Def>| {
// Make the base error.
let expected = source.descr_expected();
let path_str = names_to_string(path);
let code = source.error_code(def.is_some());
let (base_msg, fallback_label, base_span) = if let Some(def) = def {
(format!("expected {}, found {} `{}`", expected, def.kind_name(), path_str),
format!("not a {}", expected), span)
} else {
let item_str = path[path.len() - 1].node;
let item_span = path[path.len() - 1].span;
let (mod_prefix, mod_str) = if path.len() == 1 {
(format!(""), format!("this scope"))
} else if path.len() == 2 && path[0].node.name == keywords::CrateRoot.name() {
(format!(""), format!("the crate root"))
} else {
let mod_path = &path[..path.len() - 1];
let mod_prefix = match this.resolve_path(mod_path, Some(TypeNS), false, span) {
PathResult::Module(module) => module.def(),
_ => None,
}.map_or(format!(""), |def| format!("{} ", def.kind_name()));
(mod_prefix, format!("`{}`", names_to_string(mod_path)))
};
(format!("cannot find {} `{}` in {}{}", expected, item_str, mod_prefix, mod_str),
format!("not found in {}", mod_str), item_span)
};
let mut err = this.session.struct_span_err_with_code(base_span, &base_msg, code);
// Emit special messages for unresolved `Self` and `self`.
if is_self_type(path, ns) {
__diagnostic_used!(E0411);
err.code("E0411".into());
err.span_label(span, "`Self` is only available in traits and impls");
return (err, Vec::new());
}
if is_self_value(path, ns) {
__diagnostic_used!(E0424);
err.code("E0424".into());
err.span_label(span, format!("`self` value is only available in \
methods with `self` parameter"));
return (err, Vec::new());
}
// Try to lookup the name in more relaxed fashion for better error reporting.
let ident = *path.last().unwrap();
let candidates = this.lookup_import_candidates(ident.node.name, ns, is_expected);
if candidates.is_empty() && is_expected(Def::Enum(DefId::local(CRATE_DEF_INDEX))) {
let enum_candidates =
this.lookup_import_candidates(ident.node.name, ns, is_enum_variant);
let mut enum_candidates = enum_candidates.iter()
.map(|suggestion| import_candidate_to_paths(&suggestion)).collect::<Vec<_>>();
enum_candidates.sort();
for (sp, variant_path, enum_path) in enum_candidates {
if sp == DUMMY_SP {
let msg = format!("there is an enum variant `{}`, \
try using `{}`?",
variant_path,
enum_path);
err.help(&msg);
} else {
err.span_suggestion(span, "you can try using the variant's enum",
enum_path);
}
}
}
if path.len() == 1 && this.self_type_is_available(span) {
if let Some(candidate) = this.lookup_assoc_candidate(ident.node, ns, is_expected) {
let self_is_available = this.self_value_is_available(path[0].node.ctxt, span);
match candidate {
AssocSuggestion::Field => {
err.span_suggestion(span, "try",
format!("self.{}", path_str));
if !self_is_available {
err.span_label(span, format!("`self` value is only available in \
methods with `self` parameter"));
}
}
AssocSuggestion::MethodWithSelf if self_is_available => {
err.span_suggestion(span, "try",
format!("self.{}", path_str));
}
AssocSuggestion::MethodWithSelf | AssocSuggestion::AssocItem => {
err.span_suggestion(span, "try",
format!("Self::{}", path_str));
}
}
return (err, candidates);
}
}
let mut levenshtein_worked = false;
// Try Levenshtein.
if let Some(candidate) = this.lookup_typo_candidate(path, ns, is_expected, span) {
err.span_label(ident_span, format!("did you mean `{}`?", candidate));
levenshtein_worked = true;
}
// Try context dependent help if relaxed lookup didn't work.
if let Some(def) = def {
match (def, source) {
(Def::Macro(..), _) => {
err.span_label(span, format!("did you mean `{}!(...)`?", path_str));
return (err, candidates);
}
(Def::TyAlias(..), PathSource::Trait) => {
err.span_label(span, "type aliases cannot be used for traits");
return (err, candidates);
}
(Def::Mod(..), PathSource::Expr(Some(parent))) => match parent.node {
ExprKind::Field(_, ident) => {
err.span_label(parent.span, format!("did you mean `{}::{}`?",
path_str, ident.node));
return (err, candidates);
}
ExprKind::MethodCall(ref segment, ..) => {
err.span_label(parent.span, format!("did you mean `{}::{}(...)`?",
path_str, segment.identifier));
return (err, candidates);
}
_ => {}
},
_ if ns == ValueNS && is_struct_like(def) => {
if let Def::Struct(def_id) = def {
if let Some((ctor_def, ctor_vis))
= this.struct_constructors.get(&def_id).cloned() {
if is_expected(ctor_def) && !this.is_accessible(ctor_vis) {
err.span_label(span, format!("constructor is not visible \
here due to private fields"));
}
}
}
err.span_label(span, format!("did you mean `{} {{ /* fields */ }}`?",
path_str));
return (err, candidates);
}
_ => {}
}
}
// Fallback label.
if !levenshtein_worked {
err.span_label(base_span, fallback_label);
this.type_ascription_suggestion(&mut err, base_span);
}
(err, candidates)
};
let report_errors = |this: &mut Self, def: Option<Def>| {
let (err, candidates) = report_errors(this, def);
let def_id = this.current_module.normal_ancestor_id;
let node_id = this.definitions.as_local_node_id(def_id).unwrap();
let better = def.is_some();
this.use_injections.push(UseError { err, candidates, node_id, better });
err_path_resolution()
};
let resolution = match self.resolve_qpath_anywhere(id, qself, path, ns, span,
source.defer_to_typeck(),
source.global_by_default()) {
Some(resolution) if resolution.unresolved_segments() == 0 => {
if is_expected(resolution.base_def()) || resolution.base_def() == Def::Err {
resolution
} else {
// Add a temporary hack to smooth the transition to new struct ctor
// visibility rules. See #38932 for more details.
let mut res = None;
if let Def::Struct(def_id) = resolution.base_def() {
if let Some((ctor_def, ctor_vis))
= self.struct_constructors.get(&def_id).cloned() {
if is_expected(ctor_def) && self.is_accessible(ctor_vis) {
let lint = lint::builtin::LEGACY_CONSTRUCTOR_VISIBILITY;
self.session.buffer_lint(lint, id, span,
"private struct constructors are not usable through \
reexports in outer modules",
);
res = Some(PathResolution::new(ctor_def));
}
}
}
res.unwrap_or_else(|| report_errors(self, Some(resolution.base_def())))
}
}
Some(resolution) if source.defer_to_typeck() => {
// Not fully resolved associated item `T::A::B` or `<T as Tr>::A::B`
// or `<T>::A::B`. If `B` should be resolved in value namespace then
// it needs to be added to the trait map.
if ns == ValueNS {
let item_name = path.last().unwrap().node;
let traits = self.get_traits_containing_item(item_name, ns);
self.trait_map.insert(id, traits);
}
resolution
}
_ => report_errors(self, None)
};
if let PathSource::TraitItem(..) = source {} else {
// Avoid recording definition of `A::B` in `<T as A>::B::C`.
self.record_def(id, resolution);
}
resolution
}
fn type_ascription_suggestion(&self,
err: &mut DiagnosticBuilder,
base_span: Span) {
debug!("type_ascription_suggetion {:?}", base_span);
let cm = self.session.codemap();
debug!("self.current_type_ascription {:?}", self.current_type_ascription);
if let Some(sp) = self.current_type_ascription.last() {
let mut sp = *sp;
loop { // try to find the `:`, bail on first non-':'/non-whitespace
sp = sp.next_point();
if let Ok(snippet) = cm.span_to_snippet(sp.to(sp.next_point())) {
debug!("snippet {:?}", snippet);
let line_sp = cm.lookup_char_pos(sp.hi()).line;
let line_base_sp = cm.lookup_char_pos(base_span.lo()).line;
debug!("{:?} {:?}", line_sp, line_base_sp);
if snippet == ":" {
err.span_label(base_span,
"expecting a type here because of type ascription");
if line_sp != line_base_sp {
err.span_suggestion_short(sp,
"did you mean to use `;` here instead?",
";".to_string());
}
break;
} else if snippet.trim().len() != 0 {
debug!("tried to find type ascription `:` token, couldn't find it");
break;
}
} else {
break;
}
}
}
}
fn self_type_is_available(&mut self, span: Span) -> bool {
let binding = self.resolve_ident_in_lexical_scope(keywords::SelfType.ident(),
TypeNS, false, span);
if let Some(LexicalScopeBinding::Def(def)) = binding { def != Def::Err } else { false }
}
fn self_value_is_available(&mut self, ctxt: SyntaxContext, span: Span) -> bool {
let ident = Ident { name: keywords::SelfValue.name(), ctxt: ctxt };
let binding = self.resolve_ident_in_lexical_scope(ident, ValueNS, false, span);
if let Some(LexicalScopeBinding::Def(def)) = binding { def != Def::Err } else { false }
}
// Resolve in alternative namespaces if resolution in the primary namespace fails.
fn resolve_qpath_anywhere(&mut self,
id: NodeId,
qself: Option<&QSelf>,
path: &[SpannedIdent],
primary_ns: Namespace,
span: Span,
defer_to_typeck: bool,
global_by_default: bool)
-> Option<PathResolution> {
let mut fin_res = None;
// FIXME: can't resolve paths in macro namespace yet, macros are
// processed by the little special hack below.
for (i, ns) in [primary_ns, TypeNS, ValueNS, /*MacroNS*/].iter().cloned().enumerate() {
if i == 0 || ns != primary_ns {
match self.resolve_qpath(id, qself, path, ns, span, global_by_default) {
// If defer_to_typeck, then resolution > no resolution,
// otherwise full resolution > partial resolution > no resolution.
Some(res) if res.unresolved_segments() == 0 || defer_to_typeck =>
return Some(res),
res => if fin_res.is_none() { fin_res = res },
};
}
}
let is_global = self.global_macros.get(&path[0].node.name).cloned()
.map(|binding| binding.get_macro(self).kind() == MacroKind::Bang).unwrap_or(false);
if primary_ns != MacroNS && (is_global ||
self.macro_names.contains(&path[0].node.modern())) {
// Return some dummy definition, it's enough for error reporting.
return Some(
PathResolution::new(Def::Macro(DefId::local(CRATE_DEF_INDEX), MacroKind::Bang))
);
}
fin_res
}
/// Handles paths that may refer to associated items.
fn resolve_qpath(&mut self,
id: NodeId,
qself: Option<&QSelf>,
path: &[SpannedIdent],
ns: Namespace,
span: Span,
global_by_default: bool)
-> Option<PathResolution> {
if let Some(qself) = qself {
if qself.position == 0 {
// FIXME: Create some fake resolution that can't possibly be a type.
return Some(PathResolution::with_unresolved_segments(
Def::Mod(DefId::local(CRATE_DEF_INDEX)), path.len()
));
}
// Make sure `A::B` in `<T as A>::B::C` is a trait item.
let ns = if qself.position + 1 == path.len() { ns } else { TypeNS };
let res = self.smart_resolve_path_fragment(id, None, &path[..qself.position + 1],
span, span, PathSource::TraitItem(ns));
return Some(PathResolution::with_unresolved_segments(
res.base_def(), res.unresolved_segments() + path.len() - qself.position - 1
));
}
let result = match self.resolve_path(&path, Some(ns), true, span) {
PathResult::NonModule(path_res) => path_res,
PathResult::Module(module) if !module.is_normal() => {
PathResolution::new(module.def().unwrap())
}
// In `a(::assoc_item)*` `a` cannot be a module. If `a` does resolve to a module we
// don't report an error right away, but try to fallback to a primitive type.
// So, we are still able to successfully resolve something like
//
// use std::u8; // bring module u8 in scope
// fn f() -> u8 { // OK, resolves to primitive u8, not to std::u8
// u8::max_value() // OK, resolves to associated function <u8>::max_value,
// // not to non-existent std::u8::max_value
// }
//
// Such behavior is required for backward compatibility.
// The same fallback is used when `a` resolves to nothing.
PathResult::Module(..) | PathResult::Failed(..)
if (ns == TypeNS || path.len() > 1) &&
self.primitive_type_table.primitive_types
.contains_key(&path[0].node.name) => {
let prim = self.primitive_type_table.primitive_types[&path[0].node.name];
match prim {
TyUint(UintTy::U128) | TyInt(IntTy::I128) => {
if !self.session.features.borrow().i128_type {
emit_feature_err(&self.session.parse_sess,
"i128_type", span, GateIssue::Language,
"128-bit type is unstable");
}
}
_ => {}
}
PathResolution::with_unresolved_segments(Def::PrimTy(prim), path.len() - 1)
}
PathResult::Module(module) => PathResolution::new(module.def().unwrap()),
PathResult::Failed(span, msg, false) => {
resolve_error(self, span, ResolutionError::FailedToResolve(&msg));
err_path_resolution()
}
PathResult::Failed(..) => return None,
PathResult::Indeterminate => bug!("indetermined path result in resolve_qpath"),
};
if path.len() > 1 && !global_by_default && result.base_def() != Def::Err &&
path[0].node.name != keywords::CrateRoot.name() &&
path[0].node.name != keywords::DollarCrate.name() {
let unqualified_result = {
match self.resolve_path(&[*path.last().unwrap()], Some(ns), false, span) {
PathResult::NonModule(path_res) => path_res.base_def(),
PathResult::Module(module) => module.def().unwrap(),
_ => return Some(result),
}
};
if result.base_def() == unqualified_result {
let lint = lint::builtin::UNUSED_QUALIFICATIONS;
self.session.buffer_lint(lint, id, span, "unnecessary qualification")
}
}
Some(result)
}
fn resolve_path(&mut self,
path: &[SpannedIdent],
opt_ns: Option<Namespace>, // `None` indicates a module path
record_used: bool,
path_span: Span)
-> PathResult<'a> {
let mut module = None;
let mut allow_super = true;
for (i, &ident) in path.iter().enumerate() {
debug!("resolve_path ident {} {:?}", i, ident);
let is_last = i == path.len() - 1;
let ns = if is_last { opt_ns.unwrap_or(TypeNS) } else { TypeNS };
if i == 0 && ns == TypeNS && ident.node.name == keywords::SelfValue.name() {
let mut ctxt = ident.node.ctxt.modern();
module = Some(self.resolve_self(&mut ctxt, self.current_module));
continue
} else if allow_super && ns == TypeNS && ident.node.name == keywords::Super.name() {
let mut ctxt = ident.node.ctxt.modern();
let self_module = match i {
0 => self.resolve_self(&mut ctxt, self.current_module),
_ => module.unwrap(),
};
if let Some(parent) = self_module.parent {
module = Some(self.resolve_self(&mut ctxt, parent));
continue
} else {
let msg = "There are too many initial `super`s.".to_string();
return PathResult::Failed(ident.span, msg, false);
}
}
allow_super = false;
if i == 0 && ns == TypeNS && ident.node.name == keywords::CrateRoot.name() {
module = Some(self.resolve_crate_root(ident.node.ctxt.modern()));
continue
} else if i == 0 && ns == TypeNS && ident.node.name == keywords::DollarCrate.name() {
module = Some(self.resolve_crate_root(ident.node.ctxt));
continue
}
let binding = if let Some(module) = module {
self.resolve_ident_in_module(module, ident.node, ns, false, record_used, path_span)
} else if opt_ns == Some(MacroNS) {
self.resolve_lexical_macro_path_segment(ident.node, ns, record_used, path_span)
.map(MacroBinding::binding)
} else {
match self.resolve_ident_in_lexical_scope(ident.node, ns, record_used, path_span) {
Some(LexicalScopeBinding::Item(binding)) => Ok(binding),
Some(LexicalScopeBinding::Def(def))
if opt_ns == Some(TypeNS) || opt_ns == Some(ValueNS) => {
return PathResult::NonModule(PathResolution::with_unresolved_segments(
def, path.len() - 1
));
}
_ => Err(if record_used { Determined } else { Undetermined }),
}
};
match binding {
Ok(binding) => {
let def = binding.def();
let maybe_assoc = opt_ns != Some(MacroNS) && PathSource::Type.is_expected(def);
if let Some(next_module) = binding.module() {
module = Some(next_module);
} else if def == Def::Err {
return PathResult::NonModule(err_path_resolution());
} else if opt_ns.is_some() && (is_last || maybe_assoc) {
return PathResult::NonModule(PathResolution::with_unresolved_segments(
def, path.len() - i - 1
));
} else {
return PathResult::Failed(ident.span,
format!("Not a module `{}`", ident.node),
is_last);
}
}
Err(Undetermined) => return PathResult::Indeterminate,
Err(Determined) => {
if let Some(module) = module {
if opt_ns.is_some() && !module.is_normal() {
return PathResult::NonModule(PathResolution::with_unresolved_segments(
module.def().unwrap(), path.len() - i
));
}
}
let msg = if module.and_then(ModuleData::def) == self.graph_root.def() {
let is_mod = |def| match def { Def::Mod(..) => true, _ => false };
let mut candidates =
self.lookup_import_candidates(ident.node.name, TypeNS, is_mod);
candidates.sort_by_key(|c| (c.path.segments.len(), c.path.to_string()));
if let Some(candidate) = candidates.get(0) {
format!("Did you mean `{}`?", candidate.path)
} else {
format!("Maybe a missing `extern crate {};`?", ident.node)
}
} else if i == 0 {
format!("Use of undeclared type or module `{}`", ident.node)
} else {
format!("Could not find `{}` in `{}`", ident.node, path[i - 1].node)
};
return PathResult::Failed(ident.span, msg, is_last);
}
}
}
PathResult::Module(module.unwrap_or(self.graph_root))
}
// Resolve a local definition, potentially adjusting for closures.
fn adjust_local_def(&mut self,
ns: Namespace,
rib_index: usize,
mut def: Def,
record_used: bool,
span: Span) -> Def {
let ribs = &self.ribs[ns][rib_index + 1..];
// An invalid forward use of a type parameter from a previous default.
if let ForwardTyParamBanRibKind = self.ribs[ns][rib_index].kind {
if record_used {
resolve_error(self, span, ResolutionError::ForwardDeclaredTyParam);
}
assert_eq!(def, Def::Err);
return Def::Err;
}
match def {
Def::Upvar(..) => {
span_bug!(span, "unexpected {:?} in bindings", def)
}
Def::Local(node_id) => {
for rib in ribs {
match rib.kind {
NormalRibKind | ModuleRibKind(..) | MacroDefinition(..) |
ForwardTyParamBanRibKind => {
// Nothing to do. Continue.
}
ClosureRibKind(function_id) => {
let prev_def = def;
let seen = self.freevars_seen
.entry(function_id)
.or_insert_with(|| NodeMap());
if let Some(&index) = seen.get(&node_id) {
def = Def::Upvar(node_id, index, function_id);
continue;
}
let vec = self.freevars
.entry(function_id)
.or_insert_with(|| vec![]);
let depth = vec.len();
def = Def::Upvar(node_id, depth, function_id);
if record_used {
vec.push(Freevar {
def: prev_def,
span,
});
seen.insert(node_id, depth);
}
}
ItemRibKind | MethodRibKind(_) => {
// This was an attempt to access an upvar inside a
// named function item. This is not allowed, so we
// report an error.
if record_used {
resolve_error(self, span,
ResolutionError::CannotCaptureDynamicEnvironmentInFnItem);
}
return Def::Err;
}
ConstantItemRibKind => {
// Still doesn't deal with upvars
if record_used {
resolve_error(self, span,
ResolutionError::AttemptToUseNonConstantValueInConstant);
}
return Def::Err;
}
}
}
}
Def::TyParam(..) | Def::SelfTy(..) => {
for rib in ribs {
match rib.kind {
NormalRibKind | MethodRibKind(_) | ClosureRibKind(..) |
ModuleRibKind(..) | MacroDefinition(..) | ForwardTyParamBanRibKind |
ConstantItemRibKind => {
// Nothing to do. Continue.
}
ItemRibKind => {
// This was an attempt to use a type parameter outside
// its scope.
if record_used {
resolve_error(self, span,
ResolutionError::TypeParametersFromOuterFunction);
}
return Def::Err;
}
}
}
}
_ => {}
}
return def;
}
fn lookup_assoc_candidate<FilterFn>(&mut self,
ident: Ident,
ns: Namespace,
filter_fn: FilterFn)
-> Option<AssocSuggestion>
where FilterFn: Fn(Def) -> bool
{
fn extract_node_id(t: &Ty) -> Option<NodeId> {
match t.node {
TyKind::Path(None, _) => Some(t.id),
TyKind::Rptr(_, ref mut_ty) => extract_node_id(&mut_ty.ty),
// This doesn't handle the remaining `Ty` variants as they are not
// that commonly the self_type, it might be interesting to provide
// support for those in future.
_ => None,
}
}
// Fields are generally expected in the same contexts as locals.
if filter_fn(Def::Local(ast::DUMMY_NODE_ID)) {
if let Some(node_id) = self.current_self_type.as_ref().and_then(extract_node_id) {
// Look for a field with the same name in the current self_type.
if let Some(resolution) = self.def_map.get(&node_id) {
match resolution.base_def() {
Def::Struct(did) | Def::Union(did)
if resolution.unresolved_segments() == 0 => {
if let Some(field_names) = self.field_names.get(&did) {
if field_names.iter().any(|&field_name| ident.name == field_name) {
return Some(AssocSuggestion::Field);
}
}
}
_ => {}
}
}
}
}
// Look for associated items in the current trait.
if let Some((module, _)) = self.current_trait_ref {
if let Ok(binding) =
self.resolve_ident_in_module(module, ident, ns, false, false, module.span) {
let def = binding.def();
if filter_fn(def) {
return Some(if self.has_self.contains(&def.def_id()) {
AssocSuggestion::MethodWithSelf
} else {
AssocSuggestion::AssocItem
});
}
}
}
None
}
fn lookup_typo_candidate<FilterFn>(&mut self,
path: &[SpannedIdent],
ns: Namespace,
filter_fn: FilterFn,
span: Span)
-> Option<Symbol>
where FilterFn: Fn(Def) -> bool
{
let add_module_candidates = |module: Module, names: &mut Vec<Name>| {
for (&(ident, _), resolution) in module.resolutions.borrow().iter() {
if let Some(binding) = resolution.borrow().binding {
if filter_fn(binding.def()) {
names.push(ident.name);
}
}
}
};
let mut names = Vec::new();
if path.len() == 1 {
// Search in lexical scope.
// Walk backwards up the ribs in scope and collect candidates.
for rib in self.ribs[ns].iter().rev() {
// Locals and type parameters
for (ident, def) in &rib.bindings {
if filter_fn(*def) {
names.push(ident.name);
}
}
// Items in scope
if let ModuleRibKind(module) = rib.kind {
// Items from this module
add_module_candidates(module, &mut names);
if let ModuleKind::Block(..) = module.kind {
// We can see through blocks
} else {
// Items from the prelude
if let Some(prelude) = self.prelude {
if !module.no_implicit_prelude {
add_module_candidates(prelude, &mut names);
}
}
break;
}
}
}
// Add primitive types to the mix
if filter_fn(Def::PrimTy(TyBool)) {
for (name, _) in &self.primitive_type_table.primitive_types {
names.push(*name);
}
}
} else {
// Search in module.
let mod_path = &path[..path.len() - 1];
if let PathResult::Module(module) = self.resolve_path(mod_path, Some(TypeNS),
false, span) {
add_module_candidates(module, &mut names);
}
}
let name = path[path.len() - 1].node.name;
// Make sure error reporting is deterministic.
names.sort_by_key(|name| name.as_str());
match find_best_match_for_name(names.iter(), &name.as_str(), None) {
Some(found) if found != name => Some(found),
_ => None,
}
}
fn with_resolved_label<F>(&mut self, label: Option<SpannedIdent>, id: NodeId, f: F)
where F: FnOnce(&mut Resolver)
{
if let Some(label) = label {
let def = Def::Label(id);
self.with_label_rib(|this| {
this.label_ribs.last_mut().unwrap().bindings.insert(label.node, def);
f(this);
});
} else {
f(self);
}
}
fn resolve_labeled_block(&mut self, label: Option<SpannedIdent>, id: NodeId, block: &Block) {
self.with_resolved_label(label, id, |this| this.visit_block(block));
}
fn resolve_expr(&mut self, expr: &Expr, parent: Option<&Expr>) {
// First, record candidate traits for this expression if it could
// result in the invocation of a method call.
self.record_candidate_traits_for_expr_if_necessary(expr);
// Next, resolve the node.
match expr.node {
ExprKind::Path(ref qself, ref path) => {
self.smart_resolve_path(expr.id, qself.as_ref(), path, PathSource::Expr(parent));
visit::walk_expr(self, expr);
}
ExprKind::Struct(ref path, ..) => {
self.smart_resolve_path(expr.id, None, path, PathSource::Struct);
visit::walk_expr(self, expr);
}
ExprKind::Break(Some(label), _) | ExprKind::Continue(Some(label)) => {
match self.search_label(label.node, |rib, id| rib.bindings.get(&id).cloned()) {
None => {
// Search again for close matches...
// Picks the first label that is "close enough", which is not necessarily
// the closest match
let close_match = self.search_label(label.node, |rib, ident| {
let names = rib.bindings.iter().map(|(id, _)| &id.name);
find_best_match_for_name(names, &*ident.name.as_str(), None)
});
self.record_def(expr.id, err_path_resolution());
resolve_error(self,
label.span,
ResolutionError::UndeclaredLabel(&label.node.name.as_str(),
close_match));
}
Some(def @ Def::Label(_)) => {
// Since this def is a label, it is never read.
self.record_def(expr.id, PathResolution::new(def));
}
Some(_) => {
span_bug!(expr.span, "label wasn't mapped to a label def!");
}
}
// visit `break` argument if any
visit::walk_expr(self, expr);
}
ExprKind::IfLet(ref pattern, ref subexpression, ref if_block, ref optional_else) => {
self.visit_expr(subexpression);
self.ribs[ValueNS].push(Rib::new(NormalRibKind));
self.resolve_pattern(pattern, PatternSource::IfLet, &mut FxHashMap());
self.visit_block(if_block);
self.ribs[ValueNS].pop();
optional_else.as_ref().map(|expr| self.visit_expr(expr));
}
ExprKind::Loop(ref block, label) => self.resolve_labeled_block(label, expr.id, &block),
ExprKind::While(ref subexpression, ref block, label) => {
self.with_resolved_label(label, expr.id, |this| {
this.visit_expr(subexpression);
this.visit_block(block);
});
}
ExprKind::WhileLet(ref pattern, ref subexpression, ref block, label) => {
self.with_resolved_label(label, expr.id, |this| {
this.visit_expr(subexpression);
this.ribs[ValueNS].push(Rib::new(NormalRibKind));
this.resolve_pattern(pattern, PatternSource::WhileLet, &mut FxHashMap());
this.visit_block(block);
this.ribs[ValueNS].pop();
});
}
ExprKind::ForLoop(ref pattern, ref subexpression, ref block, label) => {
self.visit_expr(subexpression);
self.ribs[ValueNS].push(Rib::new(NormalRibKind));
self.resolve_pattern(pattern, PatternSource::For, &mut FxHashMap());
self.resolve_labeled_block(label, expr.id, block);
self.ribs[ValueNS].pop();
}
// Equivalent to `visit::walk_expr` + passing some context to children.
ExprKind::Field(ref subexpression, _) => {
self.resolve_expr(subexpression, Some(expr));
}
ExprKind::MethodCall(ref segment, ref arguments) => {
let mut arguments = arguments.iter();
self.resolve_expr(arguments.next().unwrap(), Some(expr));
for argument in arguments {
self.resolve_expr(argument, None);
}
self.visit_path_segment(expr.span, segment);
}
ExprKind::Repeat(ref element, ref count) => {
self.visit_expr(element);
self.with_constant_rib(|this| {
this.visit_expr(count);
});
}
ExprKind::Call(ref callee, ref arguments) => {
self.resolve_expr(callee, Some(expr));
for argument in arguments {
self.resolve_expr(argument, None);
}
}
ExprKind::Type(ref type_expr, _) => {
self.current_type_ascription.push(type_expr.span);
visit::walk_expr(self, expr);
self.current_type_ascription.pop();
}
_ => {
visit::walk_expr(self, expr);
}
}
}
fn record_candidate_traits_for_expr_if_necessary(&mut self, expr: &Expr) {
match expr.node {
ExprKind::Field(_, name) => {
// FIXME(#6890): Even though you can't treat a method like a
// field, we need to add any trait methods we find that match
// the field name so that we can do some nice error reporting
// later on in typeck.
let traits = self.get_traits_containing_item(name.node, ValueNS);
self.trait_map.insert(expr.id, traits);
}
ExprKind::MethodCall(ref segment, ..) => {
debug!("(recording candidate traits for expr) recording traits for {}",
expr.id);
let traits = self.get_traits_containing_item(segment.identifier, ValueNS);
self.trait_map.insert(expr.id, traits);
}
_ => {
// Nothing to do.
}
}
}
fn get_traits_containing_item(&mut self, mut ident: Ident, ns: Namespace)
-> Vec<TraitCandidate> {
debug!("(getting traits containing item) looking for '{}'", ident.name);
let mut found_traits = Vec::new();
// Look for the current trait.
if let Some((module, _)) = self.current_trait_ref {
if self.resolve_ident_in_module(module, ident, ns, false, false, module.span).is_ok() {
let def_id = module.def_id().unwrap();
found_traits.push(TraitCandidate { def_id: def_id, import_id: None });
}
}
ident.ctxt = ident.ctxt.modern();
let mut search_module = self.current_module;
loop {
self.get_traits_in_module_containing_item(ident, ns, search_module, &mut found_traits);
search_module =
unwrap_or!(self.hygienic_lexical_parent(search_module, &mut ident.ctxt), break);
}
if let Some(prelude) = self.prelude {
if !search_module.no_implicit_prelude {
self.get_traits_in_module_containing_item(ident, ns, prelude, &mut found_traits);
}
}
found_traits
}
fn get_traits_in_module_containing_item(&mut self,
ident: Ident,
ns: Namespace,
module: Module<'a>,
found_traits: &mut Vec<TraitCandidate>) {
let mut traits = module.traits.borrow_mut();
if traits.is_none() {
let mut collected_traits = Vec::new();
module.for_each_child(|name, ns, binding| {
if ns != TypeNS { return }
if let Def::Trait(_) = binding.def() {
collected_traits.push((name, binding));
}
});
*traits = Some(collected_traits.into_boxed_slice());
}
for &(trait_name, binding) in traits.as_ref().unwrap().iter() {
let module = binding.module().unwrap();
let mut ident = ident;
if ident.ctxt.glob_adjust(module.expansion, binding.span.ctxt().modern()).is_none() {
continue
}
if self.resolve_ident_in_module_unadjusted(module, ident, ns, false, false, module.span)
.is_ok() {
let import_id = match binding.kind {
NameBindingKind::Import { directive, .. } => {
self.maybe_unused_trait_imports.insert(directive.id);
self.add_to_glob_map(directive.id, trait_name);
Some(directive.id)
}
_ => None,
};
let trait_def_id = module.def_id().unwrap();
found_traits.push(TraitCandidate { def_id: trait_def_id, import_id: import_id });
}
}
}
/// When name resolution fails, this method can be used to look up candidate
/// entities with the expected name. It allows filtering them using the
/// supplied predicate (which should be used to only accept the types of
/// definitions expected e.g. traits). The lookup spans across all crates.
///
/// NOTE: The method does not look into imports, but this is not a problem,
/// since we report the definitions (thus, the de-aliased imports).
fn lookup_import_candidates<FilterFn>(&mut self,
lookup_name: Name,
namespace: Namespace,
filter_fn: FilterFn)
-> Vec<ImportSuggestion>
where FilterFn: Fn(Def) -> bool
{
let mut candidates = Vec::new();
let mut worklist = Vec::new();
let mut seen_modules = FxHashSet();
worklist.push((self.graph_root, Vec::new(), false));
while let Some((in_module,
path_segments,
in_module_is_extern)) = worklist.pop() {
self.populate_module_if_necessary(in_module);
// We have to visit module children in deterministic order to avoid
// instabilities in reported imports (#43552).
in_module.for_each_child_stable(|ident, ns, name_binding| {
// avoid imports entirely
if name_binding.is_import() && !name_binding.is_extern_crate() { return; }
// avoid non-importable candidates as well
if !name_binding.is_importable() { return; }
// collect results based on the filter function
if ident.name == lookup_name && ns == namespace {
if filter_fn(name_binding.def()) {
// create the path
let mut segms = path_segments.clone();
segms.push(ast::PathSegment::from_ident(ident, name_binding.span));
let path = Path {
span: name_binding.span,
segments: segms,
};
// the entity is accessible in the following cases:
// 1. if it's defined in the same crate, it's always
// accessible (since private entities can be made public)
// 2. if it's defined in another crate, it's accessible
// only if both the module is public and the entity is
// declared as public (due to pruning, we don't explore
// outside crate private modules => no need to check this)
if !in_module_is_extern || name_binding.vis == ty::Visibility::Public {
candidates.push(ImportSuggestion { path: path });
}
}
}
// collect submodules to explore
if let Some(module) = name_binding.module() {
// form the path
let mut path_segments = path_segments.clone();
path_segments.push(ast::PathSegment::from_ident(ident, name_binding.span));
if !in_module_is_extern || name_binding.vis == ty::Visibility::Public {
// add the module to the lookup
let is_extern = in_module_is_extern || name_binding.is_extern_crate();
if seen_modules.insert(module.def_id().unwrap()) {
worklist.push((module, path_segments, is_extern));
}
}
}
})
}
candidates
}
fn record_def(&mut self, node_id: NodeId, resolution: PathResolution) {
debug!("(recording def) recording {:?} for {}", resolution, node_id);
if let Some(prev_res) = self.def_map.insert(node_id, resolution) {
panic!("path resolved multiple times ({:?} before, {:?} now)", prev_res, resolution);
}
}
fn resolve_visibility(&mut self, vis: &ast::Visibility) -> ty::Visibility {
match *vis {
ast::Visibility::Public => ty::Visibility::Public,
ast::Visibility::Crate(..) => ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)),
ast::Visibility::Inherited => {
ty::Visibility::Restricted(self.current_module.normal_ancestor_id)
}
ast::Visibility::Restricted { ref path, id } => {
let def = self.smart_resolve_path(id, None, path,
PathSource::Visibility).base_def();
if def == Def::Err {
ty::Visibility::Public
} else {
let vis = ty::Visibility::Restricted(def.def_id());
if self.is_accessible(vis) {
vis
} else {
self.session.span_err(path.span, "visibilities can only be restricted \
to ancestor modules");
ty::Visibility::Public
}
}
}
}
}
fn is_accessible(&self, vis: ty::Visibility) -> bool {
vis.is_accessible_from(self.current_module.normal_ancestor_id, self)
}
fn is_accessible_from(&self, vis: ty::Visibility, module: Module<'a>) -> bool {
vis.is_accessible_from(module.normal_ancestor_id, self)
}
fn report_errors(&mut self, krate: &Crate) {
self.report_shadowing_errors();
self.report_with_use_injections(krate);
let mut reported_spans = FxHashSet();
for &AmbiguityError { span, name, b1, b2, lexical, legacy } in &self.ambiguity_errors {
if !reported_spans.insert(span) { continue }
let participle = |binding: &NameBinding| {
if binding.is_import() { "imported" } else { "defined" }
};
let msg1 = format!("`{}` could refer to the name {} here", name, participle(b1));
let msg2 = format!("`{}` could also refer to the name {} here", name, participle(b2));
let note = if b1.expansion == Mark::root() || !lexical && b1.is_glob_import() {
format!("consider adding an explicit import of `{}` to disambiguate", name)
} else if let Def::Macro(..) = b1.def() {
format!("macro-expanded {} do not shadow",
if b1.is_import() { "macro imports" } else { "macros" })
} else {
format!("macro-expanded {} do not shadow when used in a macro invocation path",
if b1.is_import() { "imports" } else { "items" })
};
if legacy {
let id = match b2.kind {
NameBindingKind::Import { directive, .. } => directive.id,
_ => unreachable!(),
};
let mut span = MultiSpan::from_span(span);
span.push_span_label(b1.span, msg1);
span.push_span_label(b2.span, msg2);
let msg = format!("`{}` is ambiguous", name);
self.session.buffer_lint(lint::builtin::LEGACY_IMPORTS, id, span, &msg);
} else {
let mut err =
self.session.struct_span_err(span, &format!("`{}` is ambiguous", name));
err.span_note(b1.span, &msg1);
match b2.def() {
Def::Macro(..) if b2.span == DUMMY_SP =>
err.note(&format!("`{}` is also a builtin macro", name)),
_ => err.span_note(b2.span, &msg2),
};
err.note(¬e).emit();
}
}
for &PrivacyError(span, name, binding) in &self.privacy_errors {
if !reported_spans.insert(span) { continue }
span_err!(self.session, span, E0603, "{} `{}` is private", binding.descr(), name);
}
}
fn report_with_use_injections(&mut self, krate: &Crate) {
for UseError { mut err, candidates, node_id, better } in self.use_injections.drain(..) {
let mut finder = UsePlacementFinder {
target_module: node_id,
span: None,
found_use: false,
};
visit::walk_crate(&mut finder, krate);
if !candidates.is_empty() {
show_candidates(&mut err, finder.span, &candidates, better, finder.found_use);
}
err.emit();
}
}
fn report_shadowing_errors(&mut self) {
for (ident, scope) in replace(&mut self.lexical_macro_resolutions, Vec::new()) {
self.resolve_legacy_scope(scope, ident, true);
}
let mut reported_errors = FxHashSet();
for binding in replace(&mut self.disallowed_shadowing, Vec::new()) {
if self.resolve_legacy_scope(&binding.parent, binding.ident, false).is_some() &&
reported_errors.insert((binding.ident, binding.span)) {
let msg = format!("`{}` is already in scope", binding.ident);
self.session.struct_span_err(binding.span, &msg)
.note("macro-expanded `macro_rules!`s may not shadow \
existing macros (see RFC 1560)")
.emit();
}
}
}
fn report_conflict(&mut self,
parent: Module,
ident: Ident,
ns: Namespace,
new_binding: &NameBinding,
old_binding: &NameBinding) {
// Error on the second of two conflicting names
if old_binding.span.lo() > new_binding.span.lo() {
return self.report_conflict(parent, ident, ns, old_binding, new_binding);
}
let container = match parent.kind {
ModuleKind::Def(Def::Mod(_), _) => "module",
ModuleKind::Def(Def::Trait(_), _) => "trait",
ModuleKind::Block(..) => "block",
_ => "enum",
};
let old_noun = match old_binding.is_import() {
true => "import",
false => "definition",
};
let new_participle = match new_binding.is_import() {
true => "imported",
false => "defined",
};
let (name, span) = (ident.name, new_binding.span);
if let Some(s) = self.name_already_seen.get(&name) {
if s == &span {
return;
}
}
let old_kind = match (ns, old_binding.module()) {
(ValueNS, _) => "value",
(MacroNS, _) => "macro",
(TypeNS, _) if old_binding.is_extern_crate() => "extern crate",
(TypeNS, Some(module)) if module.is_normal() => "module",
(TypeNS, Some(module)) if module.is_trait() => "trait",
(TypeNS, _) => "type",
};
let namespace = match ns {
ValueNS => "value",
MacroNS => "macro",
TypeNS => "type",
};
let msg = format!("the name `{}` is defined multiple times", name);
let mut err = match (old_binding.is_extern_crate(), new_binding.is_extern_crate()) {
(true, true) => struct_span_err!(self.session, span, E0259, "{}", msg),
(true, _) | (_, true) => match new_binding.is_import() && old_binding.is_import() {
true => struct_span_err!(self.session, span, E0254, "{}", msg),
false => struct_span_err!(self.session, span, E0260, "{}", msg),
},
_ => match (old_binding.is_import(), new_binding.is_import()) {
(false, false) => struct_span_err!(self.session, span, E0428, "{}", msg),
(true, true) => struct_span_err!(self.session, span, E0252, "{}", msg),
_ => struct_span_err!(self.session, span, E0255, "{}", msg),
},
};
err.note(&format!("`{}` must be defined only once in the {} namespace of this {}",
name,
namespace,
container));
err.span_label(span, format!("`{}` re{} here", name, new_participle));
if old_binding.span != syntax_pos::DUMMY_SP {
err.span_label(old_binding.span, format!("previous {} of the {} `{}` here",
old_noun, old_kind, name));
}
err.emit();
self.name_already_seen.insert(name, span);
}
fn warn_legacy_self_import(&self, directive: &'a ImportDirective<'a>) {
let (id, span) = (directive.id, directive.span);
let msg = "`self` no longer imports values";
self.session.buffer_lint(lint::builtin::LEGACY_IMPORTS, id, span, msg);
}
fn check_proc_macro_attrs(&mut self, attrs: &[ast::Attribute]) {
if self.proc_macro_enabled { return; }
for attr in attrs {
if attr.path.segments.len() > 1 {
continue
}
let ident = attr.path.segments[0].identifier;
let result = self.resolve_lexical_macro_path_segment(ident,
MacroNS,
false,
attr.path.span);
if let Ok(binding) = result {
if let SyntaxExtension::AttrProcMacro(..) = *binding.binding().get_macro(self) {
attr::mark_known(attr);
let msg = "attribute procedural macros are experimental";
let feature = "proc_macro";
feature_err(&self.session.parse_sess, feature,
attr.span, GateIssue::Language, msg)
.span_note(binding.span(), "procedural macro imported here")
.emit();
}
}
}
}
}
fn is_struct_like(def: Def) -> bool {
match def {
Def::VariantCtor(_, CtorKind::Fictive) => true,
_ => PathSource::Struct.is_expected(def),
}
}
fn is_self_type(path: &[SpannedIdent], namespace: Namespace) -> bool {
namespace == TypeNS && path.len() == 1 && path[0].node.name == keywords::SelfType.name()
}
fn is_self_value(path: &[SpannedIdent], namespace: Namespace) -> bool {
namespace == ValueNS && path.len() == 1 && path[0].node.name == keywords::SelfValue.name()
}
fn names_to_string(idents: &[SpannedIdent]) -> String {
let mut result = String::new();
for (i, ident) in idents.iter()
.filter(|i| i.node.name != keywords::CrateRoot.name())
.enumerate() {
if i > 0 {
result.push_str("::");
}
result.push_str(&ident.node.name.as_str());
}
result
}
fn path_names_to_string(path: &Path) -> String {
names_to_string(&path.segments.iter()
.map(|seg| respan(seg.span, seg.identifier))
.collect::<Vec<_>>())
}
/// Get the path for an enum and the variant from an `ImportSuggestion` for an enum variant.
fn import_candidate_to_paths(suggestion: &ImportSuggestion) -> (Span, String, String) {
let variant_path = &suggestion.path;
let variant_path_string = path_names_to_string(variant_path);
let path_len = suggestion.path.segments.len();
let enum_path = ast::Path {
span: suggestion.path.span,
segments: suggestion.path.segments[0..path_len - 1].to_vec(),
};
let enum_path_string = path_names_to_string(&enum_path);
(suggestion.path.span, variant_path_string, enum_path_string)
}
/// When an entity with a given name is not available in scope, we search for
/// entities with that name in all crates. This method allows outputting the
/// results of this search in a programmer-friendly way
fn show_candidates(err: &mut DiagnosticBuilder,
// This is `None` if all placement locations are inside expansions
span: Option<Span>,
candidates: &[ImportSuggestion],
better: bool,
found_use: bool) {
// we want consistent results across executions, but candidates are produced
// by iterating through a hash map, so make sure they are ordered:
let mut path_strings: Vec<_> =
candidates.into_iter().map(|c| path_names_to_string(&c.path)).collect();
path_strings.sort();
let better = if better { "better " } else { "" };
let msg_diff = match path_strings.len() {
1 => " is found in another module, you can import it",
_ => "s are found in other modules, you can import them",
};
let msg = format!("possible {}candidate{} into scope", better, msg_diff);
if let Some(span) = span {
for candidate in &mut path_strings {
// produce an additional newline to separate the new use statement
// from the directly following item.
let additional_newline = if found_use {
""
} else {
"\n"
};
*candidate = format!("use {};\n{}", candidate, additional_newline);
}
err.span_suggestions(span, &msg, path_strings);
} else {
let mut msg = msg;
msg.push(':');
for candidate in path_strings {
msg.push('\n');
msg.push_str(&candidate);
}
}
}
/// A somewhat inefficient routine to obtain the name of a module.
fn module_to_string(module: Module) -> String {
let mut names = Vec::new();
fn collect_mod(names: &mut Vec<Ident>, module: Module) {
if let ModuleKind::Def(_, name) = module.kind {
if let Some(parent) = module.parent {
names.push(Ident::with_empty_ctxt(name));
collect_mod(names, parent);
}
} else {
// danger, shouldn't be ident?
names.push(Ident::from_str("<opaque>"));
collect_mod(names, module.parent.unwrap());
}
}
collect_mod(&mut names, module);
if names.is_empty() {
return "???".to_string();
}
names_to_string(&names.into_iter()
.rev()
.map(|n| dummy_spanned(n))
.collect::<Vec<_>>())
}
fn err_path_resolution() -> PathResolution {
PathResolution::new(Def::Err)
}
#[derive(PartialEq,Copy, Clone)]
pub enum MakeGlobMap {
Yes,
No,
}
__build_diagnostic_array! { librustc_resolve, DIAGNOSTICS }
| 41.71973 | 100 | 0.505702 |
e68702bfe3ae028ba0cec8ca948dd173c8741487 | 50,512 | //! Mono Item Collection
//! ===========================
//!
//! This module is responsible for discovering all items that will contribute to
//! to code generation of the crate. The important part here is that it not only
//! needs to find syntax-level items (functions, structs, etc) but also all
//! their monomorphized instantiations. Every non-generic, non-const function
//! maps to one LLVM artifact. Every generic function can produce
//! from zero to N artifacts, depending on the sets of type arguments it
//! is instantiated with.
//! This also applies to generic items from other crates: A generic definition
//! in crate X might produce monomorphizations that are compiled into crate Y.
//! We also have to collect these here.
//!
//! The following kinds of "mono items" are handled here:
//!
//! - Functions
//! - Methods
//! - Closures
//! - Statics
//! - Drop glue
//!
//! The following things also result in LLVM artifacts, but are not collected
//! here, since we instantiate them locally on demand when needed in a given
//! codegen unit:
//!
//! - Constants
//! - Vtables
//! - Object Shims
//!
//!
//! General Algorithm
//! -----------------
//! Let's define some terms first:
//!
//! - A "mono item" is something that results in a function or global in
//! the LLVM IR of a codegen unit. Mono items do not stand on their
//! own, they can reference other mono items. For example, if function
//! `foo()` calls function `bar()` then the mono item for `foo()`
//! references the mono item for function `bar()`. In general, the
//! definition for mono item A referencing a mono item B is that
//! the LLVM artifact produced for A references the LLVM artifact produced
//! for B.
//!
//! - Mono items and the references between them form a directed graph,
//! where the mono items are the nodes and references form the edges.
//! Let's call this graph the "mono item graph".
//!
//! - The mono item graph for a program contains all mono items
//! that are needed in order to produce the complete LLVM IR of the program.
//!
//! The purpose of the algorithm implemented in this module is to build the
//! mono item graph for the current crate. It runs in two phases:
//!
//! 1. Discover the roots of the graph by traversing the HIR of the crate.
//! 2. Starting from the roots, find neighboring nodes by inspecting the MIR
//! representation of the item corresponding to a given node, until no more
//! new nodes are found.
//!
//! ### Discovering roots
//!
//! The roots of the mono item graph correspond to the non-generic
//! syntactic items in the source code. We find them by walking the HIR of the
//! crate, and whenever we hit upon a function, method, or static item, we
//! create a mono item consisting of the items DefId and, since we only
//! consider non-generic items, an empty type-substitution set.
//!
//! ### Finding neighbor nodes
//! Given a mono item node, we can discover neighbors by inspecting its
//! MIR. We walk the MIR and any time we hit upon something that signifies a
//! reference to another mono item, we have found a neighbor. Since the
//! mono item we are currently at is always monomorphic, we also know the
//! concrete type arguments of its neighbors, and so all neighbors again will be
//! monomorphic. The specific forms a reference to a neighboring node can take
//! in MIR are quite diverse. Here is an overview:
//!
//! #### Calling Functions/Methods
//! The most obvious form of one mono item referencing another is a
//! function or method call (represented by a CALL terminator in MIR). But
//! calls are not the only thing that might introduce a reference between two
//! function mono items, and as we will see below, they are just a
//! specialized of the form described next, and consequently will don't get any
//! special treatment in the algorithm.
//!
//! #### Taking a reference to a function or method
//! A function does not need to actually be called in order to be a neighbor of
//! another function. It suffices to just take a reference in order to introduce
//! an edge. Consider the following example:
//!
//! ```rust
//! fn print_val<T: Display>(x: T) {
//! println!("{}", x);
//! }
//!
//! fn call_fn(f: &Fn(i32), x: i32) {
//! f(x);
//! }
//!
//! fn main() {
//! let print_i32 = print_val::<i32>;
//! call_fn(&print_i32, 0);
//! }
//! ```
//! The MIR of none of these functions will contain an explicit call to
//! `print_val::<i32>`. Nonetheless, in order to mono this program, we need
//! an instance of this function. Thus, whenever we encounter a function or
//! method in operand position, we treat it as a neighbor of the current
//! mono item. Calls are just a special case of that.
//!
//! #### Closures
//! In a way, closures are a simple case. Since every closure object needs to be
//! constructed somewhere, we can reliably discover them by observing
//! `RValue::Aggregate` expressions with `AggregateKind::Closure`. This is also
//! true for closures inlined from other crates.
//!
//! #### Drop glue
//! Drop glue mono items are introduced by MIR drop-statements. The
//! generated mono item will again have drop-glue item neighbors if the
//! type to be dropped contains nested values that also need to be dropped. It
//! might also have a function item neighbor for the explicit `Drop::drop`
//! implementation of its type.
//!
//! #### Unsizing Casts
//! A subtle way of introducing neighbor edges is by casting to a trait object.
//! Since the resulting fat-pointer contains a reference to a vtable, we need to
//! instantiate all object-save methods of the trait, as we need to store
//! pointers to these functions even if they never get called anywhere. This can
//! be seen as a special case of taking a function reference.
//!
//! #### Boxes
//! Since `Box` expression have special compiler support, no explicit calls to
//! `exchange_malloc()` and `box_free()` may show up in MIR, even if the
//! compiler will generate them. We have to observe `Rvalue::Box` expressions
//! and Box-typed drop-statements for that purpose.
//!
//!
//! Interaction with Cross-Crate Inlining
//! -------------------------------------
//! The binary of a crate will not only contain machine code for the items
//! defined in the source code of that crate. It will also contain monomorphic
//! instantiations of any extern generic functions and of functions marked with
//! `#[inline]`.
//! The collection algorithm handles this more or less mono. If it is
//! about to create a mono item for something with an external `DefId`,
//! it will take a look if the MIR for that item is available, and if so just
//! proceed normally. If the MIR is not available, it assumes that the item is
//! just linked to and no node is created; which is exactly what we want, since
//! no machine code should be generated in the current crate for such an item.
//!
//! Eager and Lazy Collection Mode
//! ------------------------------
//! Mono item collection can be performed in one of two modes:
//!
//! - Lazy mode means that items will only be instantiated when actually
//! referenced. The goal is to produce the least amount of machine code
//! possible.
//!
//! - Eager mode is meant to be used in conjunction with incremental compilation
//! where a stable set of mono items is more important than a minimal
//! one. Thus, eager mode will instantiate drop-glue for every drop-able type
//! in the crate, even of no drop call for that type exists (yet). It will
//! also instantiate default implementations of trait methods, something that
//! otherwise is only done on demand.
//!
//!
//! Open Issues
//! -----------
//! Some things are not yet fully implemented in the current version of this
//! module.
//!
//! ### Const Fns
//! Ideally, no mono item should be generated for const fns unless there
//! is a call to them that cannot be evaluated at compile time. At the moment
//! this is not implemented however: a mono item will be produced
//! regardless of whether it is actually needed or not.
use rustc::hir::{self, CodegenFnAttrFlags};
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::hir::def_id::{DefId, LOCAL_CRATE};
use rustc::mir::interpret::{AllocId, ConstValue};
use rustc::middle::lang_items::{ExchangeMallocFnLangItem, StartFnLangItem};
use rustc::ty::subst::Substs;
use rustc::ty::{self, TypeFoldable, Ty, TyCtxt, GenericParamDefKind};
use rustc::ty::adjustment::CustomCoerceUnsized;
use rustc::session::config::EntryFnType;
use rustc::mir::{self, Location, Promoted};
use rustc::mir::visit::Visitor as MirVisitor;
use rustc::mir::mono::MonoItem;
use rustc::mir::interpret::{Scalar, GlobalId, AllocKind, ErrorHandled};
use crate::monomorphize::{self, Instance};
use rustc::util::nodemap::{FxHashSet, FxHashMap, DefIdMap};
use rustc::util::common::time;
use crate::monomorphize::item::{MonoItemExt, DefPathBasedNames, InstantiationMode};
use rustc_data_structures::bit_set::GrowableBitSet;
use rustc_data_structures::sync::{MTRef, MTLock, ParallelIterator, par_iter};
#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
pub enum MonoItemCollectionMode {
Eager,
Lazy
}
/// Maps every mono item to all mono items it references in its
/// body.
pub struct InliningMap<'tcx> {
// Maps a source mono item to the range of mono items
// accessed by it.
// The two numbers in the tuple are the start (inclusive) and
// end index (exclusive) within the `targets` vecs.
index: FxHashMap<MonoItem<'tcx>, (usize, usize)>,
targets: Vec<MonoItem<'tcx>>,
// Contains one bit per mono item in the `targets` field. That bit
// is true if that mono item needs to be inlined into every CGU.
inlines: GrowableBitSet<usize>,
}
impl<'tcx> InliningMap<'tcx> {
fn new() -> InliningMap<'tcx> {
InliningMap {
index: FxHashMap::default(),
targets: Vec::new(),
inlines: GrowableBitSet::with_capacity(1024),
}
}
fn record_accesses<I>(&mut self,
source: MonoItem<'tcx>,
new_targets: I)
where I: Iterator<Item=(MonoItem<'tcx>, bool)> + ExactSizeIterator
{
assert!(!self.index.contains_key(&source));
let start_index = self.targets.len();
let new_items_count = new_targets.len();
let new_items_count_total = new_items_count + self.targets.len();
self.targets.reserve(new_items_count);
self.inlines.ensure(new_items_count_total);
for (i, (target, inline)) in new_targets.enumerate() {
self.targets.push(target);
if inline {
self.inlines.insert(i + start_index);
}
}
let end_index = self.targets.len();
self.index.insert(source, (start_index, end_index));
}
// Internally iterate over all items referenced by `source` which will be
// made available for inlining.
pub fn with_inlining_candidates<F>(&self, source: MonoItem<'tcx>, mut f: F)
where F: FnMut(MonoItem<'tcx>)
{
if let Some(&(start_index, end_index)) = self.index.get(&source) {
for (i, candidate) in self.targets[start_index .. end_index]
.iter()
.enumerate() {
if self.inlines.contains(start_index + i) {
f(*candidate);
}
}
}
}
// Internally iterate over all items and the things each accesses.
pub fn iter_accesses<F>(&self, mut f: F)
where F: FnMut(MonoItem<'tcx>, &[MonoItem<'tcx>])
{
for (&accessor, &(start_index, end_index)) in &self.index {
f(accessor, &self.targets[start_index .. end_index])
}
}
}
pub fn collect_crate_mono_items<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mode: MonoItemCollectionMode)
-> (FxHashSet<MonoItem<'tcx>>,
InliningMap<'tcx>) {
let roots = time(tcx.sess, "collecting roots", || {
collect_roots(tcx, mode)
});
debug!("Building mono item graph, beginning at roots");
let mut visited = MTLock::new(FxHashSet::default());
let mut inlining_map = MTLock::new(InliningMap::new());
{
let visited: MTRef<'_, _> = &mut visited;
let inlining_map: MTRef<'_, _> = &mut inlining_map;
time(tcx.sess, "collecting mono items", || {
par_iter(roots).for_each(|root| {
let mut recursion_depths = DefIdMap::default();
collect_items_rec(tcx,
root,
visited,
&mut recursion_depths,
inlining_map);
});
});
}
(visited.into_inner(), inlining_map.into_inner())
}
// Find all non-generic items by walking the HIR. These items serve as roots to
// start monomorphizing from.
fn collect_roots<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mode: MonoItemCollectionMode)
-> Vec<MonoItem<'tcx>> {
debug!("Collecting roots");
let mut roots = Vec::new();
{
let entry_fn = tcx.entry_fn(LOCAL_CRATE);
debug!("collect_roots: entry_fn = {:?}", entry_fn);
let mut visitor = RootCollector {
tcx,
mode,
entry_fn,
output: &mut roots,
};
tcx.hir().krate().visit_all_item_likes(&mut visitor);
visitor.push_extra_entry_roots();
}
// We can only codegen items that are instantiable - items all of
// whose predicates hold. Luckily, items that aren't instantiable
// can't actually be used, so we can just skip codegenning them.
roots.retain(|root| root.is_instantiable(tcx));
roots
}
// Collect all monomorphized items reachable from `starting_point`
fn collect_items_rec<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
starting_point: MonoItem<'tcx>,
visited: MTRef<'_, MTLock<FxHashSet<MonoItem<'tcx>>>>,
recursion_depths: &mut DefIdMap<usize>,
inlining_map: MTRef<'_, MTLock<InliningMap<'tcx>>>) {
if !visited.lock_mut().insert(starting_point.clone()) {
// We've been here already, no need to search again.
return;
}
debug!("BEGIN collect_items_rec({})", starting_point.to_string(tcx, true));
let mut neighbors = Vec::new();
let recursion_depth_reset;
match starting_point {
MonoItem::Static(def_id) => {
let instance = Instance::mono(tcx, def_id);
// Sanity check whether this ended up being collected accidentally
debug_assert!(should_monomorphize_locally(tcx, &instance));
let ty = instance.ty(tcx);
visit_drop_use(tcx, ty, true, &mut neighbors);
recursion_depth_reset = None;
let cid = GlobalId {
instance,
promoted: None,
};
let param_env = ty::ParamEnv::reveal_all();
if let Ok(val) = tcx.const_eval(param_env.and(cid)) {
collect_const(tcx, val, &mut neighbors);
}
}
MonoItem::Fn(instance) => {
// Sanity check whether this ended up being collected accidentally
debug_assert!(should_monomorphize_locally(tcx, &instance));
// Keep track of the monomorphization recursion depth
recursion_depth_reset = Some(check_recursion_limit(tcx,
instance,
recursion_depths));
check_type_length_limit(tcx, instance);
collect_neighbours(tcx, instance, &mut neighbors);
}
MonoItem::GlobalAsm(..) => {
recursion_depth_reset = None;
}
}
record_accesses(tcx, starting_point, &neighbors[..], inlining_map);
for neighbour in neighbors {
collect_items_rec(tcx, neighbour, visited, recursion_depths, inlining_map);
}
if let Some((def_id, depth)) = recursion_depth_reset {
recursion_depths.insert(def_id, depth);
}
debug!("END collect_items_rec({})", starting_point.to_string(tcx, true));
}
fn record_accesses<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
caller: MonoItem<'tcx>,
callees: &[MonoItem<'tcx>],
inlining_map: MTRef<'_, MTLock<InliningMap<'tcx>>>) {
let is_inlining_candidate = |mono_item: &MonoItem<'tcx>| {
mono_item.instantiation_mode(tcx) == InstantiationMode::LocalCopy
};
let accesses = callees.into_iter()
.map(|mono_item| {
(*mono_item, is_inlining_candidate(mono_item))
});
inlining_map.lock_mut().record_accesses(caller, accesses);
}
fn check_recursion_limit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
instance: Instance<'tcx>,
recursion_depths: &mut DefIdMap<usize>)
-> (DefId, usize) {
let def_id = instance.def_id();
let recursion_depth = recursion_depths.get(&def_id).cloned().unwrap_or(0);
debug!(" => recursion depth={}", recursion_depth);
let recursion_depth = if Some(def_id) == tcx.lang_items().drop_in_place_fn() {
// HACK: drop_in_place creates tight monomorphization loops. Give
// it more margin.
recursion_depth / 4
} else {
recursion_depth
};
// Code that needs to instantiate the same function recursively
// more than the recursion limit is assumed to be causing an
// infinite expansion.
if recursion_depth > *tcx.sess.recursion_limit.get() {
let error = format!("reached the recursion limit while instantiating `{}`",
instance);
if let Some(hir_id) = tcx.hir().as_local_hir_id(def_id) {
tcx.sess.span_fatal(tcx.hir().span_by_hir_id(hir_id), &error);
} else {
tcx.sess.fatal(&error);
}
}
recursion_depths.insert(def_id, recursion_depth + 1);
(def_id, recursion_depth)
}
fn check_type_length_limit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
instance: Instance<'tcx>)
{
let type_length = instance.substs.types().flat_map(|ty| ty.walk()).count();
debug!(" => type length={}", type_length);
// Rust code can easily create exponentially-long types using only a
// polynomial recursion depth. Even with the default recursion
// depth, you can easily get cases that take >2^60 steps to run,
// which means that rustc basically hangs.
//
// Bail out in these cases to avoid that bad user experience.
let type_length_limit = *tcx.sess.type_length_limit.get();
if type_length > type_length_limit {
// The instance name is already known to be too long for rustc. Use
// `{:.64}` to avoid blasting the user's terminal with thousands of
// lines of type-name.
let instance_name = instance.to_string();
let msg = format!("reached the type-length limit while instantiating `{:.64}...`",
instance_name);
let mut diag = if let Some(hir_id) = tcx.hir().as_local_hir_id(instance.def_id()) {
tcx.sess.struct_span_fatal(tcx.hir().span_by_hir_id(hir_id), &msg)
} else {
tcx.sess.struct_fatal(&msg)
};
diag.note(&format!(
"consider adding a `#![type_length_limit=\"{}\"]` attribute to your crate",
type_length_limit*2));
diag.emit();
tcx.sess.abort_if_errors();
}
}
struct MirNeighborCollector<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir: &'a mir::Mir<'tcx>,
output: &'a mut Vec<MonoItem<'tcx>>,
param_substs: &'tcx Substs<'tcx>,
}
impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
debug!("visiting rvalue {:?}", *rvalue);
match *rvalue {
// When doing an cast from a regular pointer to a fat pointer, we
// have to instantiate all methods of the trait being cast to, so we
// can build the appropriate vtable.
mir::Rvalue::Cast(mir::CastKind::Unsize, ref operand, target_ty) => {
let target_ty = self.tcx.subst_and_normalize_erasing_regions(
self.param_substs,
ty::ParamEnv::reveal_all(),
&target_ty,
);
let source_ty = operand.ty(self.mir, self.tcx);
let source_ty = self.tcx.subst_and_normalize_erasing_regions(
self.param_substs,
ty::ParamEnv::reveal_all(),
&source_ty,
);
let (source_ty, target_ty) = find_vtable_types_for_unsizing(self.tcx,
source_ty,
target_ty);
// This could also be a different Unsize instruction, like
// from a fixed sized array to a slice. But we are only
// interested in things that produce a vtable.
if target_ty.is_trait() && !source_ty.is_trait() {
create_mono_items_for_vtable_methods(self.tcx,
target_ty,
source_ty,
self.output);
}
}
mir::Rvalue::Cast(mir::CastKind::ReifyFnPointer, ref operand, _) => {
let fn_ty = operand.ty(self.mir, self.tcx);
let fn_ty = self.tcx.subst_and_normalize_erasing_regions(
self.param_substs,
ty::ParamEnv::reveal_all(),
&fn_ty,
);
visit_fn_use(self.tcx, fn_ty, false, &mut self.output);
}
mir::Rvalue::Cast(mir::CastKind::ClosureFnPointer, ref operand, _) => {
let source_ty = operand.ty(self.mir, self.tcx);
let source_ty = self.tcx.subst_and_normalize_erasing_regions(
self.param_substs,
ty::ParamEnv::reveal_all(),
&source_ty,
);
match source_ty.sty {
ty::Closure(def_id, substs) => {
let instance = monomorphize::resolve_closure(
self.tcx, def_id, substs, ty::ClosureKind::FnOnce);
if should_monomorphize_locally(self.tcx, &instance) {
self.output.push(create_fn_mono_item(instance));
}
}
_ => bug!(),
}
}
mir::Rvalue::NullaryOp(mir::NullOp::Box, _) => {
let tcx = self.tcx;
let exchange_malloc_fn_def_id = tcx
.lang_items()
.require(ExchangeMallocFnLangItem)
.unwrap_or_else(|e| tcx.sess.fatal(&e));
let instance = Instance::mono(tcx, exchange_malloc_fn_def_id);
if should_monomorphize_locally(tcx, &instance) {
self.output.push(create_fn_mono_item(instance));
}
}
_ => { /* not interesting */ }
}
self.super_rvalue(rvalue, location);
}
fn visit_const(&mut self, constant: &&'tcx ty::LazyConst<'tcx>, location: Location) {
debug!("visiting const {:?} @ {:?}", *constant, location);
collect_lazy_const(self.tcx, constant, self.param_substs, self.output);
self.super_const(constant);
}
fn visit_terminator_kind(&mut self,
block: mir::BasicBlock,
kind: &mir::TerminatorKind<'tcx>,
location: Location) {
debug!("visiting terminator {:?} @ {:?}", kind, location);
let tcx = self.tcx;
match *kind {
mir::TerminatorKind::Call { ref func, .. } => {
let callee_ty = func.ty(self.mir, tcx);
let callee_ty = tcx.subst_and_normalize_erasing_regions(
self.param_substs,
ty::ParamEnv::reveal_all(),
&callee_ty,
);
visit_fn_use(self.tcx, callee_ty, true, &mut self.output);
}
mir::TerminatorKind::Drop { ref location, .. } |
mir::TerminatorKind::DropAndReplace { ref location, .. } => {
let ty = location.ty(self.mir, self.tcx)
.to_ty(self.tcx);
let ty = tcx.subst_and_normalize_erasing_regions(
self.param_substs,
ty::ParamEnv::reveal_all(),
&ty,
);
visit_drop_use(self.tcx, ty, true, self.output);
}
mir::TerminatorKind::Goto { .. } |
mir::TerminatorKind::SwitchInt { .. } |
mir::TerminatorKind::Resume |
mir::TerminatorKind::Abort |
mir::TerminatorKind::Return |
mir::TerminatorKind::Unreachable |
mir::TerminatorKind::Assert { .. } => {}
mir::TerminatorKind::GeneratorDrop |
mir::TerminatorKind::Yield { .. } |
mir::TerminatorKind::FalseEdges { .. } |
mir::TerminatorKind::FalseUnwind { .. } => bug!(),
}
self.super_terminator_kind(block, kind, location);
}
fn visit_static(&mut self,
static_: &mir::Static<'tcx>,
context: mir::visit::PlaceContext<'tcx>,
location: Location) {
debug!("visiting static {:?} @ {:?}", static_.def_id, location);
let tcx = self.tcx;
let instance = Instance::mono(tcx, static_.def_id);
if should_monomorphize_locally(tcx, &instance) {
self.output.push(MonoItem::Static(static_.def_id));
}
self.super_static(static_, context, location);
}
}
fn visit_drop_use<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ty: Ty<'tcx>,
is_direct_call: bool,
output: &mut Vec<MonoItem<'tcx>>)
{
let instance = monomorphize::resolve_drop_in_place(tcx, ty);
visit_instance_use(tcx, instance, is_direct_call, output);
}
fn visit_fn_use<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ty: Ty<'tcx>,
is_direct_call: bool,
output: &mut Vec<MonoItem<'tcx>>)
{
if let ty::FnDef(def_id, substs) = ty.sty {
let instance = ty::Instance::resolve(tcx,
ty::ParamEnv::reveal_all(),
def_id,
substs).unwrap();
visit_instance_use(tcx, instance, is_direct_call, output);
}
}
fn visit_instance_use<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
instance: ty::Instance<'tcx>,
is_direct_call: bool,
output: &mut Vec<MonoItem<'tcx>>)
{
debug!("visit_item_use({:?}, is_direct_call={:?})", instance, is_direct_call);
if !should_monomorphize_locally(tcx, &instance) {
return
}
match instance.def {
ty::InstanceDef::Intrinsic(def_id) => {
if !is_direct_call {
bug!("intrinsic {:?} being reified", def_id);
}
}
ty::InstanceDef::VtableShim(..) |
ty::InstanceDef::Virtual(..) |
ty::InstanceDef::DropGlue(_, None) => {
// don't need to emit shim if we are calling directly.
if !is_direct_call {
output.push(create_fn_mono_item(instance));
}
}
ty::InstanceDef::DropGlue(_, Some(_)) => {
output.push(create_fn_mono_item(instance));
}
ty::InstanceDef::ClosureOnceShim { .. } |
ty::InstanceDef::Item(..) |
ty::InstanceDef::FnPtrShim(..) |
ty::InstanceDef::CloneShim(..) => {
output.push(create_fn_mono_item(instance));
}
}
}
// Returns true if we should codegen an instance in the local crate.
// Returns false if we can just link to the upstream crate and therefore don't
// need a mono item.
fn should_monomorphize_locally<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, instance: &Instance<'tcx>)
-> bool {
let def_id = match instance.def {
ty::InstanceDef::Item(def_id) => def_id,
ty::InstanceDef::VtableShim(..) |
ty::InstanceDef::ClosureOnceShim { .. } |
ty::InstanceDef::Virtual(..) |
ty::InstanceDef::FnPtrShim(..) |
ty::InstanceDef::DropGlue(..) |
ty::InstanceDef::Intrinsic(_) |
ty::InstanceDef::CloneShim(..) => return true
};
if tcx.is_foreign_item(def_id) {
// We can always link to foreign items
return false;
}
if def_id.is_local() {
// local items cannot be referred to locally without monomorphizing them locally
return true;
}
if tcx.is_reachable_non_generic(def_id) ||
is_available_upstream_generic(tcx, def_id, instance.substs) {
// We can link to the item in question, no instance needed
// in this crate
return false;
}
if !tcx.is_mir_available(def_id) {
bug!("Cannot create local mono-item for {:?}", def_id)
}
return true;
fn is_available_upstream_generic<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
substs: &'tcx Substs<'tcx>)
-> bool {
debug_assert!(!def_id.is_local());
// If we are not in share generics mode, we don't link to upstream
// monomorphizations but always instantiate our own internal versions
// instead.
if !tcx.sess.opts.share_generics() {
return false
}
// If this instance has no type parameters, it cannot be a shared
// monomorphization. Non-generic instances are already handled above
// by `is_reachable_non_generic()`
if substs.types().next().is_none() {
return false
}
// Take a look at the available monomorphizations listed in the metadata
// of upstream crates.
tcx.upstream_monomorphizations_for(def_id)
.map(|set| set.contains_key(substs))
.unwrap_or(false)
}
}
/// For given pair of source and target type that occur in an unsizing coercion,
/// this function finds the pair of types that determines the vtable linking
/// them.
///
/// For example, the source type might be `&SomeStruct` and the target type\
/// might be `&SomeTrait` in a cast like:
///
/// let src: &SomeStruct = ...;
/// let target = src as &SomeTrait;
///
/// Then the output of this function would be (SomeStruct, SomeTrait) since for
/// constructing the `target` fat-pointer we need the vtable for that pair.
///
/// Things can get more complicated though because there's also the case where
/// the unsized type occurs as a field:
///
/// ```rust
/// struct ComplexStruct<T: ?Sized> {
/// a: u32,
/// b: f64,
/// c: T
/// }
/// ```
///
/// In this case, if `T` is sized, `&ComplexStruct<T>` is a thin pointer. If `T`
/// is unsized, `&SomeStruct` is a fat pointer, and the vtable it points to is
/// for the pair of `T` (which is a trait) and the concrete type that `T` was
/// originally coerced from:
///
/// let src: &ComplexStruct<SomeStruct> = ...;
/// let target = src as &ComplexStruct<SomeTrait>;
///
/// Again, we want this `find_vtable_types_for_unsizing()` to provide the pair
/// `(SomeStruct, SomeTrait)`.
///
/// Finally, there is also the case of custom unsizing coercions, e.g., for
/// smart pointers such as `Rc` and `Arc`.
fn find_vtable_types_for_unsizing<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
source_ty: Ty<'tcx>,
target_ty: Ty<'tcx>)
-> (Ty<'tcx>, Ty<'tcx>) {
let ptr_vtable = |inner_source: Ty<'tcx>, inner_target: Ty<'tcx>| {
let type_has_metadata = |ty: Ty<'tcx>| -> bool {
use syntax_pos::DUMMY_SP;
if ty.is_sized(tcx.at(DUMMY_SP), ty::ParamEnv::reveal_all()) {
return false;
}
let tail = tcx.struct_tail(ty);
match tail.sty {
ty::Foreign(..) => false,
ty::Str | ty::Slice(..) | ty::Dynamic(..) => true,
_ => bug!("unexpected unsized tail: {:?}", tail.sty),
}
};
if type_has_metadata(inner_source) {
(inner_source, inner_target)
} else {
tcx.struct_lockstep_tails(inner_source, inner_target)
}
};
match (&source_ty.sty, &target_ty.sty) {
(&ty::Ref(_, a, _),
&ty::Ref(_, b, _)) |
(&ty::Ref(_, a, _),
&ty::RawPtr(ty::TypeAndMut { ty: b, .. })) |
(&ty::RawPtr(ty::TypeAndMut { ty: a, .. }),
&ty::RawPtr(ty::TypeAndMut { ty: b, .. })) => {
ptr_vtable(a, b)
}
(&ty::Adt(def_a, _), &ty::Adt(def_b, _)) if def_a.is_box() && def_b.is_box() => {
ptr_vtable(source_ty.boxed_ty(), target_ty.boxed_ty())
}
(&ty::Adt(source_adt_def, source_substs),
&ty::Adt(target_adt_def, target_substs)) => {
assert_eq!(source_adt_def, target_adt_def);
let kind =
monomorphize::custom_coerce_unsize_info(tcx, source_ty, target_ty);
let coerce_index = match kind {
CustomCoerceUnsized::Struct(i) => i
};
let source_fields = &source_adt_def.non_enum_variant().fields;
let target_fields = &target_adt_def.non_enum_variant().fields;
assert!(coerce_index < source_fields.len() &&
source_fields.len() == target_fields.len());
find_vtable_types_for_unsizing(tcx,
source_fields[coerce_index].ty(tcx,
source_substs),
target_fields[coerce_index].ty(tcx,
target_substs))
}
_ => bug!("find_vtable_types_for_unsizing: invalid coercion {:?} -> {:?}",
source_ty,
target_ty)
}
}
fn create_fn_mono_item<'a, 'tcx>(instance: Instance<'tcx>) -> MonoItem<'tcx> {
debug!("create_fn_mono_item(instance={})", instance);
MonoItem::Fn(instance)
}
/// Creates a `MonoItem` for each method that is referenced by the vtable for
/// the given trait/impl pair.
fn create_mono_items_for_vtable_methods<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
trait_ty: Ty<'tcx>,
impl_ty: Ty<'tcx>,
output: &mut Vec<MonoItem<'tcx>>) {
assert!(!trait_ty.needs_subst() && !trait_ty.has_escaping_bound_vars() &&
!impl_ty.needs_subst() && !impl_ty.has_escaping_bound_vars());
if let ty::Dynamic(ref trait_ty, ..) = trait_ty.sty {
if let Some(principal) = trait_ty.principal() {
let poly_trait_ref = principal.with_self_ty(tcx, impl_ty);
assert!(!poly_trait_ref.has_escaping_bound_vars());
// Walk all methods of the trait, including those of its supertraits
let methods = tcx.vtable_methods(poly_trait_ref);
let methods = methods.iter().cloned().filter_map(|method| method)
.map(|(def_id, substs)| ty::Instance::resolve_for_vtable(
tcx,
ty::ParamEnv::reveal_all(),
def_id,
substs).unwrap())
.filter(|&instance| should_monomorphize_locally(tcx, &instance))
.map(|instance| create_fn_mono_item(instance));
output.extend(methods);
}
// Also add the destructor
visit_drop_use(tcx, impl_ty, false, output);
}
}
//=-----------------------------------------------------------------------------
// Root Collection
//=-----------------------------------------------------------------------------
struct RootCollector<'b, 'a: 'b, 'tcx: 'a + 'b> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
mode: MonoItemCollectionMode,
output: &'b mut Vec<MonoItem<'tcx>>,
entry_fn: Option<(DefId, EntryFnType)>,
}
impl<'b, 'a, 'v> ItemLikeVisitor<'v> for RootCollector<'b, 'a, 'v> {
fn visit_item(&mut self, item: &'v hir::Item) {
match item.node {
hir::ItemKind::ExternCrate(..) |
hir::ItemKind::Use(..) |
hir::ItemKind::ForeignMod(..) |
hir::ItemKind::Ty(..) |
hir::ItemKind::Trait(..) |
hir::ItemKind::TraitAlias(..) |
hir::ItemKind::Existential(..) |
hir::ItemKind::Mod(..) => {
// Nothing to do, just keep recursing...
}
hir::ItemKind::Impl(..) => {
if self.mode == MonoItemCollectionMode::Eager {
create_mono_items_for_default_impls(self.tcx,
item,
self.output);
}
}
hir::ItemKind::Enum(_, ref generics) |
hir::ItemKind::Struct(_, ref generics) |
hir::ItemKind::Union(_, ref generics) => {
if generics.params.is_empty() {
if self.mode == MonoItemCollectionMode::Eager {
let def_id = self.tcx.hir().local_def_id(item.id);
debug!("RootCollector: ADT drop-glue for {}",
def_id_to_string(self.tcx, def_id));
let ty = Instance::new(def_id, Substs::empty()).ty(self.tcx);
visit_drop_use(self.tcx, ty, true, self.output);
}
}
}
hir::ItemKind::GlobalAsm(..) => {
debug!("RootCollector: ItemKind::GlobalAsm({})",
def_id_to_string(self.tcx,
self.tcx.hir().local_def_id(item.id)));
self.output.push(MonoItem::GlobalAsm(item.id));
}
hir::ItemKind::Static(..) => {
let def_id = self.tcx.hir().local_def_id(item.id);
debug!("RootCollector: ItemKind::Static({})",
def_id_to_string(self.tcx, def_id));
self.output.push(MonoItem::Static(def_id));
}
hir::ItemKind::Const(..) => {
// const items only generate mono items if they are
// actually used somewhere. Just declaring them is insufficient.
// but even just declaring them must collect the items they refer to
let def_id = self.tcx.hir().local_def_id(item.id);
let instance = Instance::mono(self.tcx, def_id);
let cid = GlobalId {
instance,
promoted: None,
};
let param_env = ty::ParamEnv::reveal_all();
if let Ok(val) = self.tcx.const_eval(param_env.and(cid)) {
collect_const(self.tcx, val, &mut self.output);
}
}
hir::ItemKind::Fn(..) => {
let def_id = self.tcx.hir().local_def_id(item.id);
self.push_if_root(def_id);
}
}
}
fn visit_trait_item(&mut self, _: &'v hir::TraitItem) {
// Even if there's a default body with no explicit generics,
// it's still generic over some `Self: Trait`, so not a root.
}
fn visit_impl_item(&mut self, ii: &'v hir::ImplItem) {
match ii.node {
hir::ImplItemKind::Method(hir::MethodSig { .. }, _) => {
let def_id = self.tcx.hir().local_def_id(ii.id);
self.push_if_root(def_id);
}
_ => { /* Nothing to do here */ }
}
}
}
impl<'b, 'a, 'v> RootCollector<'b, 'a, 'v> {
fn is_root(&self, def_id: DefId) -> bool {
!item_has_type_parameters(self.tcx, def_id) && match self.mode {
MonoItemCollectionMode::Eager => {
true
}
MonoItemCollectionMode::Lazy => {
self.entry_fn.map(|(id, _)| id) == Some(def_id) ||
self.tcx.is_reachable_non_generic(def_id) ||
self.tcx.codegen_fn_attrs(def_id).flags.contains(
CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL)
}
}
}
/// If `def_id` represents a root, then push it onto the list of
/// outputs. (Note that all roots must be monomorphic.)
fn push_if_root(&mut self, def_id: DefId) {
if self.is_root(def_id) {
debug!("RootCollector::push_if_root: found root def_id={:?}", def_id);
let instance = Instance::mono(self.tcx, def_id);
self.output.push(create_fn_mono_item(instance));
}
}
/// As a special case, when/if we encounter the
/// `main()` function, we also have to generate a
/// monomorphized copy of the start lang item based on
/// the return type of `main`. This is not needed when
/// the user writes their own `start` manually.
fn push_extra_entry_roots(&mut self) {
let main_def_id = match self.entry_fn {
Some((def_id, EntryFnType::Main)) => def_id,
_ => return,
};
let start_def_id = match self.tcx.lang_items().require(StartFnLangItem) {
Ok(s) => s,
Err(err) => self.tcx.sess.fatal(&err),
};
let main_ret_ty = self.tcx.fn_sig(main_def_id).output();
// Given that `main()` has no arguments,
// then its return type cannot have
// late-bound regions, since late-bound
// regions must appear in the argument
// listing.
let main_ret_ty = self.tcx.erase_regions(
&main_ret_ty.no_bound_vars().unwrap(),
);
let start_instance = Instance::resolve(
self.tcx,
ty::ParamEnv::reveal_all(),
start_def_id,
self.tcx.intern_substs(&[main_ret_ty.into()])
).unwrap();
self.output.push(create_fn_mono_item(start_instance));
}
}
fn item_has_type_parameters<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> bool {
let generics = tcx.generics_of(def_id);
generics.requires_monomorphization(tcx)
}
fn create_mono_items_for_default_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
item: &'tcx hir::Item,
output: &mut Vec<MonoItem<'tcx>>) {
match item.node {
hir::ItemKind::Impl(_, _, _, ref generics, .., ref impl_item_refs) => {
for param in &generics.params {
match param.kind {
hir::GenericParamKind::Lifetime { .. } => {}
hir::GenericParamKind::Type { .. } |
hir::GenericParamKind::Const { .. } => {
return
}
}
}
let impl_def_id = tcx.hir().local_def_id(item.id);
debug!("create_mono_items_for_default_impls(item={})",
def_id_to_string(tcx, impl_def_id));
if let Some(trait_ref) = tcx.impl_trait_ref(impl_def_id) {
let overridden_methods: FxHashSet<_> =
impl_item_refs.iter()
.map(|iiref| iiref.ident.modern())
.collect();
for method in tcx.provided_trait_methods(trait_ref.def_id) {
if overridden_methods.contains(&method.ident.modern()) {
continue;
}
if tcx.generics_of(method.def_id).own_counts().types != 0 {
continue;
}
let substs = Substs::for_item(tcx, method.def_id, |param, _| {
match param.kind {
GenericParamDefKind::Lifetime => tcx.types.re_erased.into(),
GenericParamDefKind::Type {..} => {
trait_ref.substs[param.index as usize]
}
}
});
let instance = ty::Instance::resolve(tcx,
ty::ParamEnv::reveal_all(),
method.def_id,
substs).unwrap();
let mono_item = create_fn_mono_item(instance);
if mono_item.is_instantiable(tcx)
&& should_monomorphize_locally(tcx, &instance) {
output.push(mono_item);
}
}
}
}
_ => {
bug!()
}
}
}
/// Scan the miri alloc in order to find function calls, closures, and drop-glue
fn collect_miri<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
alloc_id: AllocId,
output: &mut Vec<MonoItem<'tcx>>,
) {
let alloc_kind = tcx.alloc_map.lock().get(alloc_id);
match alloc_kind {
Some(AllocKind::Static(did)) => {
let instance = Instance::mono(tcx, did);
if should_monomorphize_locally(tcx, &instance) {
trace!("collecting static {:?}", did);
output.push(MonoItem::Static(did));
}
}
Some(AllocKind::Memory(alloc)) => {
trace!("collecting {:?} with {:#?}", alloc_id, alloc);
for &((), inner) in alloc.relocations.values() {
collect_miri(tcx, inner, output);
}
},
Some(AllocKind::Function(fn_instance)) => {
if should_monomorphize_locally(tcx, &fn_instance) {
trace!("collecting {:?} with {:#?}", alloc_id, fn_instance);
output.push(create_fn_mono_item(fn_instance));
}
}
None => bug!("alloc id without corresponding allocation: {}", alloc_id),
}
}
/// Scan the MIR in order to find function calls, closures, and drop-glue
fn collect_neighbours<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
instance: Instance<'tcx>,
output: &mut Vec<MonoItem<'tcx>>)
{
let mir = tcx.instance_mir(instance.def);
MirNeighborCollector {
tcx,
mir: &mir,
output,
param_substs: instance.substs,
}.visit_mir(&mir);
let param_env = ty::ParamEnv::reveal_all();
for i in 0..mir.promoted.len() {
use rustc_data_structures::indexed_vec::Idx;
let i = Promoted::new(i);
let cid = GlobalId {
instance,
promoted: Some(i),
};
match tcx.const_eval(param_env.and(cid)) {
Ok(val) => collect_const(tcx, val, output),
Err(ErrorHandled::Reported) => {},
Err(ErrorHandled::TooGeneric) => span_bug!(
mir.promoted[i].span, "collection encountered polymorphic constant",
),
}
}
}
fn def_id_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
-> String {
let mut output = String::new();
let printer = DefPathBasedNames::new(tcx, false, false);
printer.push_def_path(def_id, &mut output);
output
}
fn collect_lazy_const<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
constant: &ty::LazyConst<'tcx>,
param_substs: &'tcx Substs<'tcx>,
output: &mut Vec<MonoItem<'tcx>>,
) {
let (def_id, substs) = match *constant {
ty::LazyConst::Evaluated(c) => return collect_const(tcx, c, output),
ty::LazyConst::Unevaluated(did, substs) => (did, substs),
};
let param_env = ty::ParamEnv::reveal_all();
let substs = tcx.subst_and_normalize_erasing_regions(
param_substs,
param_env,
&substs,
);
let instance = ty::Instance::resolve(tcx,
param_env,
def_id,
substs).unwrap();
let cid = GlobalId {
instance,
promoted: None,
};
match tcx.const_eval(param_env.and(cid)) {
Ok(val) => collect_const(tcx, val, output),
Err(ErrorHandled::Reported) => {},
Err(ErrorHandled::TooGeneric) => span_bug!(
tcx.def_span(def_id), "collection encountered polymorphic constant",
),
}
}
fn collect_const<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
constant: ty::Const<'tcx>,
output: &mut Vec<MonoItem<'tcx>>,
) {
debug!("visiting const {:?}", constant);
match constant.val {
ConstValue::Slice(Scalar::Ptr(ptr), _) |
ConstValue::Scalar(Scalar::Ptr(ptr)) =>
collect_miri(tcx, ptr.alloc_id, output),
ConstValue::ByRef(_id, alloc, _offset) => {
for &((), id) in alloc.relocations.values() {
collect_miri(tcx, id, output);
}
}
_ => {},
}
}
| 39.741935 | 96 | 0.553987 |
9cf11e4924c8c4ac826671ee6bf0a9774561fd24 | 1,719 | use std::io;
use std::path::PathBuf;
use std::thread::sleep;
use std::time::Duration;
use pueue_lib::log::{get_log_file_handles, get_log_paths};
/// Follow the log ouput of running task.
///
/// If no task is specified, this will check for the following cases:
///
/// - No running task: Print an error that there are no running tasks
/// - Single running task: Follow the output of that task
/// - Multiple running tasks: Print out the list of possible tasks to follow.
pub fn follow_local_task_logs(pueue_directory: &PathBuf, task_id: usize, stderr: bool) {
let (stdout_handle, stderr_handle) = match get_log_file_handles(task_id, &pueue_directory) {
Ok((stdout, stderr)) => (stdout, stderr),
Err(err) => {
println!("Failed to get log file handles: {}", err);
return;
}
};
let mut handle = if stderr { stderr_handle } else { stdout_handle };
let (out_path, err_path) = get_log_paths(task_id, &pueue_directory);
let handle_path = if stderr { err_path } else { out_path };
// Stdout handler to directly write log file output to io::stdout
// without having to load anything into memory.
let mut stdout = io::stdout();
loop {
// Check whether the file still exists. Exit if it doesn't.
if !handle_path.exists() {
println!("File has gone away. Did somebody remove the task?");
return;
}
// Read the next chunk of text from the last position.
if let Err(err) = io::copy(&mut handle, &mut stdout) {
println!("Error while reading file: {}", err);
return;
};
let timeout = Duration::from_millis(100);
sleep(timeout);
}
}
| 37.369565 | 96 | 0.634671 |
1eae4129ac1225ad88020134bac3215255b31d15 | 5,769 | /*!
`crypto_scalarmult_curve25519` specified in
[Cryptography in NaCl](http://nacl.cr.yp.to/valid.html), Sections 2, 3, and 4.
This function is conjectured to be strong. For background see Bernstein,
"Curve25519: new Diffie-Hellman speed records," Lecture Notes in Computer
Science 3958 (2006), 207–228, http://cr.yp.to/papers.html#curve25519.
*/
use std::ops::{Index, Range, RangeFrom, RangeFull, RangeTo};
use ffi;
pub const BYTES: usize = ffi::crypto_scalarmult_curve25519_BYTES;
pub const SCALARBYTES: usize = ffi::crypto_scalarmult_curve25519_SCALARBYTES;
/**
* `Scalar` value (integer in byte representation)
*/
#[derive(Copy)]
pub struct Scalar(pub [u8; SCALARBYTES]);
newtype_clone!(Scalar);
newtype_impl!(Scalar, SCALARBYTES);
/**
* `GroupElement`
*/
#[derive(Copy)]
pub struct GroupElement(pub [u8; BYTES]);
newtype_clone!(GroupElement);
newtype_impl!(GroupElement, BYTES);
/**
* `scalarmult()` multiplies a group element `p`
* by an integer `n`. It returns the resulting group element
* `q`.
*/
pub fn scalarmult(&Scalar(ref n): &Scalar,
&GroupElement(ref p): &GroupElement) -> GroupElement {
let mut q = [0; BYTES];
unsafe {
ffi::crypto_scalarmult_curve25519(&mut q, n, p);
}
GroupElement(q)
}
/**
* `scalarmult_base()` computes the scalar product of a standard
* group element and an integer `n`. It returns the resulting
* group element `q`/
*/
pub fn scalarmult_base(&Scalar(ref n): &Scalar) -> GroupElement {
let mut q = [0; BYTES];
unsafe {
ffi::crypto_scalarmult_curve25519_base(&mut q, n);
}
GroupElement(q)
}
#[test]
fn test_vector_1() {
// corresponding to tests/scalarmult.c and tests/scalarmult3.cpp from NaCl
let alicesk = Scalar([0x77,0x07,0x6d,0x0a,0x73,0x18,0xa5,0x7d
,0x3c,0x16,0xc1,0x72,0x51,0xb2,0x66,0x45
,0xdf,0x4c,0x2f,0x87,0xeb,0xc0,0x99,0x2a
,0xb1,0x77,0xfb,0xa5,0x1d,0xb9,0x2c,0x2a]);
let alicepk_expected = [0x85,0x20,0xf0,0x09,0x89,0x30,0xa7,0x54
,0x74,0x8b,0x7d,0xdc,0xb4,0x3e,0xf7,0x5a
,0x0d,0xbf,0x3a,0x0d,0x26,0x38,0x1a,0xf4
,0xeb,0xa4,0xa9,0x8e,0xaa,0x9b,0x4e,0x6a];
let GroupElement(alicepk) = scalarmult_base(&alicesk);
assert!(alicepk == alicepk_expected);
}
#[test]
fn test_vector_2() {
// corresponding to tests/scalarmult2.c and tests/scalarmult4.cpp from NaCl
let bobsk = Scalar([0x5d,0xab,0x08,0x7e,0x62,0x4a,0x8a,0x4b
,0x79,0xe1,0x7f,0x8b,0x83,0x80,0x0e,0xe6
,0x6f,0x3b,0xb1,0x29,0x26,0x18,0xb6,0xfd
,0x1c,0x2f,0x8b,0x27,0xff,0x88,0xe0,0xeb]);
let bobpk_expected = [0xde,0x9e,0xdb,0x7d,0x7b,0x7d,0xc1,0xb4
,0xd3,0x5b,0x61,0xc2,0xec,0xe4,0x35,0x37
,0x3f,0x83,0x43,0xc8,0x5b,0x78,0x67,0x4d
,0xad,0xfc,0x7e,0x14,0x6f,0x88,0x2b,0x4f];
let GroupElement(bobpk) = scalarmult_base(&bobsk);
assert!(bobpk == bobpk_expected);
}
#[test]
fn test_vector_3() {
// corresponding to tests/scalarmult5.c and tests/scalarmult7.cpp from NaCl
let alicesk = Scalar([0x77,0x07,0x6d,0x0a,0x73,0x18,0xa5,0x7d
,0x3c,0x16,0xc1,0x72,0x51,0xb2,0x66,0x45
,0xdf,0x4c,0x2f,0x87,0xeb,0xc0,0x99,0x2a
,0xb1,0x77,0xfb,0xa5,0x1d,0xb9,0x2c,0x2a]);
let bobpk = GroupElement([0xde,0x9e,0xdb,0x7d,0x7b,0x7d,0xc1,0xb4
,0xd3,0x5b,0x61,0xc2,0xec,0xe4,0x35,0x37
,0x3f,0x83,0x43,0xc8,0x5b,0x78,0x67,0x4d
,0xad,0xfc,0x7e,0x14,0x6f,0x88,0x2b,0x4f]);
let k_expected = [0x4a,0x5d,0x9d,0x5b,0xa4,0xce,0x2d,0xe1
,0x72,0x8e,0x3b,0xf4,0x80,0x35,0x0f,0x25
,0xe0,0x7e,0x21,0xc9,0x47,0xd1,0x9e,0x33
,0x76,0xf0,0x9b,0x3c,0x1e,0x16,0x17,0x42];
let GroupElement(k) = scalarmult(&alicesk, &bobpk);
assert!(k == k_expected);
}
#[test]
fn test_vector_4() {
// corresponding to tests/scalarmult6.c from NaCl
let bobsk = Scalar([0x5d,0xab,0x08,0x7e,0x62,0x4a,0x8a,0x4b
,0x79,0xe1,0x7f,0x8b,0x83,0x80,0x0e,0xe6
,0x6f,0x3b,0xb1,0x29,0x26,0x18,0xb6,0xfd
,0x1c,0x2f,0x8b,0x27,0xff,0x88,0xe0,0xeb]);
let alicepk = GroupElement([0x85,0x20,0xf0,0x09,0x89,0x30,0xa7,0x54
,0x74,0x8b,0x7d,0xdc,0xb4,0x3e,0xf7,0x5a
,0x0d,0xbf,0x3a,0x0d,0x26,0x38,0x1a,0xf4
,0xeb,0xa4,0xa9,0x8e,0xaa,0x9b,0x4e,0x6a]);
let k_expected = [0x4a,0x5d,0x9d,0x5b,0xa4,0xce,0x2d,0xe1
,0x72,0x8e,0x3b,0xf4,0x80,0x35,0x0f,0x25
,0xe0,0x7e,0x21,0xc9,0x47,0xd1,0x9e,0x33
,0x76,0xf0,0x9b,0x3c,0x1e,0x16,0x17,0x42];
let GroupElement(k) = scalarmult(&bobsk, &alicepk);
assert!(k == k_expected);
}
#[cfg(test)]
mod bench {
extern crate test;
use randombytes::randombytes_into;
use super::*;
#[bench]
fn bench_scalarmult(b: &mut test::Bencher) {
let mut gbs = [0u8; BYTES];
let mut sbs = [0u8; SCALARBYTES];
randombytes_into(&mut gbs);
randombytes_into(&mut sbs);
let g = GroupElement(gbs);
let s = Scalar(sbs);
b.iter(|| {
scalarmult(&s, &g);
});
}
#[bench]
fn bench_scalarmult_base(b: &mut test::Bencher) {
let mut sbs = [0u8; SCALARBYTES];
randombytes_into(&mut sbs);
let s = Scalar(sbs);
b.iter(|| {
scalarmult_base(&s);
});
}
}
| 36.745223 | 79 | 0.602357 |
1447bece52a273d874f185c90a770cc3343c9dc2 | 6,127 | // Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
use std::fmt;
/// Constant to convert seconds to nanoseconds.
pub const NANOS_PER_SECOND: u64 = 1_000_000_000;
/// Wrapper over `libc::clockid_t` to specify Linux Kernel clock source.
pub enum ClockType {
/// Equivalent to `libc::CLOCK_MONOTONIC`.
Monotonic,
/// Equivalent to `libc::CLOCK_REALTIME`.
Real,
/// Equivalent to `libc::CLOCK_PROCESS_CPUTIME_ID`.
ProcessCpu,
/// Equivalent to `libc::CLOCK_THREAD_CPUTIME_ID`.
ThreadCpu,
}
impl Into<libc::clockid_t> for ClockType {
fn into(self) -> libc::clockid_t {
match self {
ClockType::Monotonic => libc::CLOCK_MONOTONIC,
ClockType::Real => libc::CLOCK_REALTIME,
ClockType::ProcessCpu => libc::CLOCK_PROCESS_CPUTIME_ID,
ClockType::ThreadCpu => libc::CLOCK_THREAD_CPUTIME_ID,
}
}
}
/// Structure representing the date in local time with nanosecond precision.
pub struct LocalTime {
/// Seconds in current minute.
sec: i32,
/// Minutes in current hour.
min: i32,
/// Hours in current day, 24H format.
hour: i32,
/// Days in current month.
mday: i32,
/// Months in current year.
mon: i32,
/// Years passed since 1900 BC.
year: i32,
/// Nanoseconds in current second.
nsec: i64,
}
impl LocalTime {
/// Returns the [LocalTime](struct.LocalTime.html) structure for the calling moment.
pub fn now() -> LocalTime {
let mut timespec = libc::timespec {
tv_sec: 0,
tv_nsec: 0,
};
let mut tm: libc::tm = libc::tm {
tm_sec: 0,
tm_min: 0,
tm_hour: 0,
tm_mday: 0,
tm_mon: 0,
tm_year: 0,
tm_wday: 0,
tm_yday: 0,
tm_isdst: 0,
tm_gmtoff: 0,
tm_zone: std::ptr::null(),
};
// Safe because the parameters are valid.
unsafe {
libc::clock_gettime(libc::CLOCK_REALTIME, &mut timespec);
libc::localtime_r(×pec.tv_sec, &mut tm);
}
LocalTime {
sec: tm.tm_sec,
min: tm.tm_min,
hour: tm.tm_hour,
mday: tm.tm_mday,
mon: tm.tm_mon,
year: tm.tm_year,
nsec: timespec.tv_nsec,
}
}
}
impl fmt::Display for LocalTime {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}-{:02}-{:02}T{:02}:{:02}:{:02}.{:09}",
self.year + 1900,
self.mon + 1,
self.mday,
self.hour,
self.min,
self.sec,
self.nsec
)
}
}
/// Holds a micro-second resolution timestamp with both the real time and cpu time.
#[derive(Clone)]
pub struct TimestampUs {
/// Real time in microseconds.
pub time_us: u64,
/// Cpu time in microseconds.
pub cputime_us: u64,
}
impl Default for TimestampUs {
fn default() -> TimestampUs {
TimestampUs {
time_us: get_time(ClockType::Monotonic) / 1000,
cputime_us: get_time(ClockType::ProcessCpu) / 1000,
}
}
}
/// Returns a timestamp in nanoseconds from a monotonic clock.
///
/// Uses `_rdstc` on `x86_64` and [`get_time`](fn.get_time.html) on other architectures.
pub fn timestamp_cycles() -> u64 {
#[cfg(target_arch = "x86_64")]
// Safe because there's nothing that can go wrong with this call.
unsafe {
std::arch::x86_64::_rdtsc() as u64
}
#[cfg(not(target_arch = "x86_64"))]
{
get_time(ClockType::Monotonic)
}
}
/// Returns a timestamp in nanoseconds based on the provided clock type.
///
/// # Arguments
///
/// * `clock_type` - Identifier of the Linux Kernel clock on which to act.
pub fn get_time(clock_type: ClockType) -> u64 {
let mut time_struct = libc::timespec {
tv_sec: 0,
tv_nsec: 0,
};
// Safe because the parameters are valid.
unsafe { libc::clock_gettime(clock_type.into(), &mut time_struct) };
seconds_to_nanoseconds(time_struct.tv_sec).unwrap() as u64 + (time_struct.tv_nsec as u64)
}
/// Converts a timestamp in seconds to an equivalent one in nanoseconds.
/// Returns `None` if the conversion overflows.
///
/// # Arguments
///
/// * `value` - Timestamp in seconds.
pub fn seconds_to_nanoseconds(value: i64) -> Option<i64> {
value.checked_mul(NANOS_PER_SECOND as i64)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_time() {
for _ in 0..1000 {
assert!(get_time(ClockType::Monotonic) <= get_time(ClockType::Monotonic));
}
for _ in 0..1000 {
assert!(get_time(ClockType::ProcessCpu) <= get_time(ClockType::ProcessCpu));
}
for _ in 0..1000 {
assert!(get_time(ClockType::ThreadCpu) <= get_time(ClockType::ThreadCpu));
}
assert_ne!(get_time(ClockType::Real), 0);
}
#[test]
fn test_local_time_display() {
let local_time = LocalTime {
sec: 30,
min: 15,
hour: 10,
mday: 4,
mon: 6,
year: 119,
nsec: 123_456_789,
};
assert_eq!(
String::from("2019-07-04T10:15:30.123456789"),
local_time.to_string()
);
let local_time = LocalTime {
sec: 5,
min: 5,
hour: 5,
mday: 23,
mon: 7,
year: 44,
nsec: 123,
};
assert_eq!(
String::from("1944-08-23T05:05:05.000000123"),
local_time.to_string()
);
let local_time = LocalTime::now();
assert!(local_time.mon >= 0 && local_time.mon <= 11);
}
#[test]
fn test_seconds_to_nanoseconds() {
assert_eq!(
seconds_to_nanoseconds(100).unwrap() as u64,
100 * NANOS_PER_SECOND
);
assert!(seconds_to_nanoseconds(9_223_372_037).is_none());
}
}
| 26.872807 | 93 | 0.561776 |
f87b2f16660871251cc3653d7b4395f915abfcdc | 6,017 | // Generated from definition io.k8s.api.core.v1.ReplicationControllerList
/// ReplicationControllerList is a collection of replication controllers.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct ReplicationControllerList {
/// List of replication controllers. More info: https://kubernetes.io/docs/concepts/workloads/controllers/replicationcontroller
pub items: Vec<crate::v1_12::api::core::v1::ReplicationController>,
/// Standard list metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
pub metadata: Option<crate::v1_12::apimachinery::pkg::apis::meta::v1::ListMeta>,
}
impl crate::Resource for ReplicationControllerList {
fn api_version() -> &'static str {
"v1"
}
fn group() -> &'static str {
""
}
fn kind() -> &'static str {
"ReplicationControllerList"
}
fn version() -> &'static str {
"v1"
}
}
impl crate::Metadata for ReplicationControllerList {
type Ty = crate::v1_12::apimachinery::pkg::apis::meta::v1::ListMeta;
fn metadata(&self) -> Option<&<Self as crate::Metadata>::Ty> {
self.metadata.as_ref()
}
}
impl<'de> serde::Deserialize<'de> for ReplicationControllerList {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_api_version,
Key_kind,
Key_items,
Key_metadata,
Other,
}
impl<'de> serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error {
Ok(match v {
"apiVersion" => Field::Key_api_version,
"kind" => Field::Key_kind,
"items" => Field::Key_items,
"metadata" => Field::Key_metadata,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = ReplicationControllerList;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "struct ReplicationControllerList")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> {
let mut value_items: Option<Vec<crate::v1_12::api::core::v1::ReplicationController>> = None;
let mut value_metadata: Option<crate::v1_12::apimachinery::pkg::apis::meta::v1::ListMeta> = None;
while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_api_version => {
let value_api_version: String = serde::de::MapAccess::next_value(&mut map)?;
if value_api_version != <Self::Value as crate::Resource>::api_version() {
return Err(serde::de::Error::invalid_value(serde::de::Unexpected::Str(&value_api_version), &<Self::Value as crate::Resource>::api_version()));
}
},
Field::Key_kind => {
let value_kind: String = serde::de::MapAccess::next_value(&mut map)?;
if value_kind != <Self::Value as crate::Resource>::kind() {
return Err(serde::de::Error::invalid_value(serde::de::Unexpected::Str(&value_kind), &<Self::Value as crate::Resource>::kind()));
}
},
Field::Key_items => value_items = Some(serde::de::MapAccess::next_value(&mut map)?),
Field::Key_metadata => value_metadata = serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(ReplicationControllerList {
items: value_items.ok_or_else(|| serde::de::Error::missing_field("items"))?,
metadata: value_metadata,
})
}
}
deserializer.deserialize_struct(
"ReplicationControllerList",
&[
"apiVersion",
"kind",
"items",
"metadata",
],
Visitor,
)
}
}
impl serde::Serialize for ReplicationControllerList {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer {
let mut state = serializer.serialize_struct(
"ReplicationControllerList",
3 +
self.metadata.as_ref().map_or(0, |_| 1),
)?;
serde::ser::SerializeStruct::serialize_field(&mut state, "apiVersion", <Self as crate::Resource>::api_version())?;
serde::ser::SerializeStruct::serialize_field(&mut state, "kind", <Self as crate::Resource>::kind())?;
serde::ser::SerializeStruct::serialize_field(&mut state, "items", &self.items)?;
if let Some(value) = &self.metadata {
serde::ser::SerializeStruct::serialize_field(&mut state, "metadata", value)?;
}
serde::ser::SerializeStruct::end(state)
}
}
| 41.496552 | 174 | 0.534153 |
7a0b911ae89ef27b93b8402520eb15c50523e521 | 1,789 | // Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#![recursion_limit = "256"]
use anyhow::{Context as _, Error};
use fidl_fuchsia_bluetooth_bredr::ProfileMarker;
use fuchsia_component::server::ServiceFs;
use fuchsia_inspect_derive::Inspect;
use futures::{self, channel::mpsc, future, pin_mut};
use tracing::warn;
mod fidl_service;
mod profile;
mod profile_registrar;
mod rfcomm;
mod types;
use crate::fidl_service::run_services;
use crate::profile_registrar::ProfileRegistrar;
#[fuchsia::main]
pub async fn main() -> Result<(), Error> {
let profile_svc = fuchsia_component::client::connect_to_protocol::<ProfileMarker>()
.context("Failed to connect to Bluetooth Profile service")?;
let (service_sender, service_receiver) = mpsc::channel(1);
let mut fs = ServiceFs::new();
let inspect = fuchsia_inspect::Inspector::new();
if let Err(e) = inspect_runtime::serve(&inspect, &mut fs) {
warn!("Could not serve inspect: {}", e);
}
let services = run_services(fs, service_sender)?;
pin_mut!(services);
let mut profile_registrar = ProfileRegistrar::new(profile_svc);
if let Err(e) = profile_registrar.iattach(inspect.root(), "rfcomm_server") {
warn!("Failed to attach to inspect: {}", e);
}
let profile_registrar_fut = profile_registrar.start(service_receiver);
match future::select(services, profile_registrar_fut).await {
future::Either::Left(((), _)) => {
warn!("Service FS directory handle closed. Exiting.");
}
future::Either::Right(((), _)) => {
warn!("All Profile related connections have terminated. Exiting.");
}
}
Ok(())
}
| 31.946429 | 87 | 0.681945 |
f428d9734c1773f67e79c1148c6fc4f413f01bbc | 13,492 | #![type_length_limit = "2121396"]
use std::{
ascii, cmp,
ffi::OsStr,
fs,
net::{IpAddr, SocketAddr},
path::PathBuf,
pin::Pin,
str, sync,
task::{Context, Poll},
};
use anyhow::{anyhow, bail, Context as _, Result};
use bytes::Bytes;
use futures_util::{ready, Future, StreamExt, TryFutureExt};
use http::{Response, StatusCode};
use hyper::service::{make_service_fn, service_fn};
use structopt::{self, StructOpt};
use tokio::net::{TcpListener, TcpStream};
use tokio_rustls::{server::TlsStream, TlsAcceptor};
use tracing::{error, info, info_span};
use tracing_futures::Instrument as _;
use quinn::SendStream;
use sync::Arc;
#[derive(StructOpt, Debug, Clone)]
#[structopt(name = "h3_server")]
struct Opt {
/// TLS private key
#[structopt(
parse(from_os_str),
short = "k",
long = "key",
requires = "cert",
default_value = "key.der"
)]
key: PathBuf,
/// TLS certificate
#[structopt(
parse(from_os_str),
short = "c",
long = "cert",
requires = "key",
default_value = "cert.der"
)]
cert: PathBuf,
/// Address to listen on
#[structopt(long = "listen", short = "l", default_value = "::")]
listen: IpAddr,
}
#[tokio::main]
async fn main() -> Result<()> {
tracing::subscriber::set_global_default(
tracing_subscriber::FmtSubscriber::builder()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.finish(),
)
.unwrap();
let opt = Opt::from_args();
let key = fs::read(&opt.key).context("failed to read private key")?;
let key = match opt.key.as_path().extension().and_then(OsStr::to_str) {
Some("der") => quinn::PrivateKey::from_der(&key[..])?,
_ => quinn::PrivateKey::from_pem(&key[..])?,
};
let cert_chain = fs::read(&opt.cert).context("failed to read certificate chain")?;
let cert_chain = match opt.cert.as_path().extension().and_then(OsStr::to_str) {
Some("der") => {
quinn::CertificateChain::from_certs(vec![quinn::Certificate::from_der(&cert_chain)?])
}
_ => quinn::CertificateChain::from_pem(&cert_chain)?,
};
let mut server_config = quinn::ServerConfigBuilder::default();
server_config.certificate(cert_chain, key)?;
server_config.protocols(&[b"hq-29", b"siduck-00"]);
let main = server(server_config.clone(), SocketAddr::new(opt.listen, 4433));
let default = server(server_config.clone(), SocketAddr::new(opt.listen, 443));
server_config.use_stateless_retry(true);
let retry = server(server_config.clone(), SocketAddr::new(opt.listen, 4434));
tokio::try_join!(main, default, retry, h2_server(server_config.clone()))?;
Ok(())
}
async fn server(server_config: quinn::ServerConfigBuilder, addr: SocketAddr) -> Result<()> {
let mut transport = quinn::TransportConfig::default();
transport.send_window(1024 * 1024 * 3);
transport.receive_window(1024 * 1024).unwrap();
let mut server_config = server_config.build();
server_config.transport = Arc::new(transport);
let mut endpoint_builder = quinn::Endpoint::builder();
endpoint_builder.listen(server_config);
let (_, mut incoming) = endpoint_builder.bind(&addr)?;
println!("server listening on {}", addr);
while let Some(mut connecting) = incoming.next().await {
tokio::spawn(async move {
let proto = match connecting.handshake_data().await {
Err(_) => return,
Ok(x) => x.protocol.unwrap(),
};
println!("server received connection");
let result = match &proto[..] {
b"hq-29" => hq_handle_connection(connecting).await,
b"siduck-00" => siduck_handle_connection(connecting).await,
_ => unreachable!("unsupported protocol"),
};
if let Err(e) = result {
error!("handling connection failed: {:?}", e);
}
});
}
Ok(())
}
async fn hq_handle_connection(conn: quinn::Connecting) -> Result<()> {
let quinn::NewConnection {
connection,
mut bi_streams,
..
} = match conn.into_0rtt() {
Ok((c, _)) => c,
Err(c) => c.await?,
};
let span = info_span!(
"connection",
remote = %connection.remote_address(),
protocol = %connection
.handshake_data()
.unwrap()
.protocol
.map_or_else(|| "<none>".into(), |x| String::from_utf8_lossy(&x).into_owned())
);
async {
info!("established");
// Each stream initiated by the client constitutes a new request.
while let Some(stream) = bi_streams.next().await {
let stream = match stream {
Err(quinn::ConnectionError::ApplicationClosed { .. }) => {
info!("connection closed");
return Ok(());
}
Err(e) => {
return Err(e);
}
Ok(s) => s,
};
tokio::spawn(
hq_handle_request(stream)
.unwrap_or_else(move |e| error!("failed: {reason}", reason = e.to_string()))
.instrument(info_span!("request")),
);
}
Ok(())
}
.instrument(span)
.await?;
Ok(())
}
async fn hq_handle_request((send, recv): (quinn::SendStream, quinn::RecvStream)) -> Result<()> {
let req = recv
.read_to_end(64 * 1024)
.await
.map_err(|e| anyhow!("failed reading request: {}", e))?;
let mut escaped = String::new();
for &x in &req[..] {
let part = ascii::escape_default(x).collect::<Vec<_>>();
escaped.push_str(str::from_utf8(&part).unwrap());
}
info!(content = %escaped);
// Execute the request
hq_process_get(send, &req).await?;
Ok(())
}
async fn hq_process_get(mut send: SendStream, x: &[u8]) -> Result<()> {
if x.len() < 4 || &x[0..4] != b"GET " {
bail!("missing GET");
}
if x[4..].len() < 2 || &x[x.len() - 2..] != b"\r\n" {
bail!("missing \\r\\n");
}
let x = &x[4..x.len() - 2];
let end = x.iter().position(|&c| c == b' ').unwrap_or_else(|| x.len());
let path = str::from_utf8(&x[..end]).context("path is malformed UTF-8")?;
// Write the response
match parse_size(path) {
Ok(n) if n <= 1_000_000_000 => {
let mut remaining = n;
while remaining > 0 {
let size = cmp::min(remaining, TEXT.len());
send.write_all(&TEXT[..size])
.await
.map_err(|e| anyhow!("failed to send response: {}", e))?;
remaining -= size;
}
}
Ok(_) | Err(_) => {
send.write_all(HOME.as_bytes())
.await
.map_err(|e| anyhow!("failed to send response: {}", e))?;
}
}
// Gracefully terminate the stream
send.finish()
.await
.map_err(|e| anyhow!("failed to shutdown stream: {}", e))?;
Ok(())
}
fn parse_size(literal: &str) -> Result<usize> {
if literal.is_empty() {
return Err(anyhow!("path empty"));
}
let pos = literal[1..]
.find(|c: char| !c.is_ascii_digit())
.map(|p| p + 1)
.unwrap_or_else(|| literal.len());
let num: usize = literal[1..pos]
.parse()
.map_err(|_| anyhow!("parse failed"))?;
let scale = match literal[pos..].to_uppercase().as_str() {
"K" => 1000,
"M" => 1_000_000,
"G" => 1_000_000_000,
_ => 1,
};
Ok(num * scale)
}
const ALT_SVC: &str = "h3-29=\":443\"";
fn h2_home() -> hyper::Response<hyper::Body> {
Response::builder()
.status(StatusCode::OK)
.header("Alt-Svc", ALT_SVC)
.body(HOME.into())
.expect("failed to build response")
}
fn h2_payload(len: usize) -> hyper::Response<hyper::Body> {
if len > 1_000_000_000 {
let response = Response::builder()
.status(StatusCode::BAD_REQUEST)
.header("Alt-Svc", ALT_SVC)
.body(Bytes::from(format!("requested {}: too large", len)).into())
.expect("failed to build response");
return response;
}
let mut buf = TEXT.repeat(len / TEXT.len() + 1);
buf.truncate(len);
Response::builder()
.status(StatusCode::OK)
.body(buf.into())
.expect("failed to build response")
}
async fn h2_handle(request: hyper::Request<hyper::Body>) -> Result<hyper::Response<hyper::Body>> {
Ok(match request.uri().path() {
"/" => h2_home(),
path => match parse_size(path) {
Ok(n) => h2_payload(n),
Err(_) => h2_home(),
},
})
}
async fn h2_server(server_config: quinn::ServerConfigBuilder) -> Result<()> {
let mut tls_cfg = (*server_config.build().crypto).clone();
tls_cfg.set_protocols(&[b"h2".to_vec(), b"http/1.1".to_vec()]);
let tls_acceptor = TlsAcceptor::from(sync::Arc::new(tls_cfg));
let tcp = TcpListener::bind(&SocketAddr::new([0, 0, 0, 0].into(), 443)).await?;
let service = make_service_fn(|_conn| async { Ok::<_, anyhow::Error>(service_fn(h2_handle)) });
let server = hyper::Server::builder(HyperAcceptor::new(tcp, tls_acceptor)).serve(service);
if let Err(e) = server.await {
error!("server error: {}", e);
}
Ok(())
}
struct HyperAcceptor {
tcp: TcpListener,
tls: TlsAcceptor,
handshake: Option<tokio_rustls::Accept<TcpStream>>,
}
impl HyperAcceptor {
pub fn new(tcp: TcpListener, tls: TlsAcceptor) -> Self {
Self {
tls,
tcp,
handshake: None,
}
}
}
impl hyper::server::accept::Accept for HyperAcceptor {
type Conn = TlsStream<TcpStream>;
type Error = anyhow::Error;
fn poll_accept(
mut self: Pin<&mut Self>,
cx: &mut Context,
) -> Poll<Option<Result<Self::Conn, Self::Error>>> {
loop {
match self.handshake {
Some(ref mut h) => {
let conn = ready!(Pin::new(h).poll(cx))?;
self.handshake = None;
return Poll::Ready(Some(Ok(conn)));
}
None => {
let (stream, _) = ready!(self.tcp.poll_accept(cx))?;
self.handshake = Some(self.tls.accept(stream));
}
}
}
}
}
const TEXT: &[u8] =
b"It would be different if we could not step back and reflect on the process,\n\
but were merely led from impulse to impulse without self- consciousness. But human\n\
beings do not act solely on impulse. They are prudent, they reflect, they weigh\n\
consequences, they ask whether what they are doing is worth while. Not only are their\n\
lives full of particular choices that hang together in larger activities with temporal\n\
structure: they also decide in the broadest terms what to pursue and what to avoid, what\n\
the priorities among their various aims should be, and what kind of people they want to\n\
be or become. Some men are faced with such choices by the large decisions they make from\n\
time to time; some merely by reflection on the course their lives are taking as the product\n\
of countless small decisions. They decide whom to marry, what profession to follow, whether\n\
to join the Country Club, or the Resistance; or they may just wonder why they go on being\n\
salesmen or academics or taxi drivers, and then stop thinking about it after a certain period\n\
of inconclusive reflection.";
const HOME: &str = r##"
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<title>Quinn H3 interop server</title>
</head>
<body>
<h1>Welcome to the quinn-h3 interop server.</h1>
<p>
<strong>Draft version:</strong> draft-24<br/>
<strong>Available tests:</strong> VHDCRZSBU3
</p>
<p>
Use '/{n}' to get <i>n</i> bytes of deep thoughts.<br/>
For example <a href="/1000000">/1000000</a>
to get 1MB. Limit: 1GB
</p>
<p>Checkout our project's <a href="https://github.com/djc/quinn">repository</a>.</p>
<p>Say hi on quickdev slack workspace at `quinn`.</p>
</body>
</html>
"##;
/// https://tools.ietf.org/html/draft-pardue-quic-siduck-00
async fn siduck_handle_connection(conn: quinn::Connecting) -> Result<()> {
let quinn::NewConnection {
connection,
mut datagrams,
..
} = match conn.into_0rtt() {
Ok((c, _)) => c,
Err(c) => c.await?,
};
while let Some(datagram) = datagrams.next().await {
match datagram {
Err(quinn::ConnectionError::ApplicationClosed { .. }) => {
info!("connection closed");
return Ok(());
}
Err(e) => {
return Err(e.into());
}
Ok(data) => {
if &data[..] == b"quack" {
connection.send_datagram(b"quack-ack"[..].into())?;
} else {
const SIDUCK_ONLY_QUACKS_ECHO: quinn::VarInt = quinn::VarInt::from_u32(0x101);
connection.close(SIDUCK_ONLY_QUACKS_ECHO, b"quack quack quack");
bail!("got non-quack datagram");
}
}
}
}
Ok(())
}
| 33.068627 | 100 | 0.561814 |
5b9b1436039d09308cd53459877932740709e643 | 4,881 | mod path_lerping;
use crate::path_lerping::Lerp;
use bevy::prelude::*;
use bevy_prototype_lyon::entity::Path as PathComponent;
use bevy_prototype_lyon::prelude::*;
use std::ops::{Add, RangeBounds, RangeInclusive, Sub};
use tess::path::Path;
enum Direction {
Increasing,
Decreasing,
}
impl Direction {
fn inverted(&self) -> Self {
match self {
Direction::Increasing => Direction::Decreasing,
Direction::Decreasing => Direction::Increasing,
}
}
fn invert(&mut self) {
*self = self.inverted();
}
fn get_operation<T: Add<Rhs, Output = Output> + Sub<Rhs, Output = Output>, Rhs, Output>(
&self,
) -> &dyn Fn(T, Rhs) -> Output {
match self {
Direction::Increasing => &Add::add,
Direction::Decreasing => &Sub::sub,
}
}
}
#[derive(Component)]
struct SidesChangingShape<T: RangeBounds<u8>> {
sides: u8,
bounds: T,
direction: Direction,
}
impl<T: RangeBounds<u8>> SidesChangingShape<T> {
fn increment_sides(&mut self) {
let op = self.direction.get_operation();
let new_sides = op(self.sides, 1);
if self.bounds.contains(&new_sides) {
self.sides = new_sides;
} else {
self.direction.invert();
self.increment_sides();
}
}
}
#[derive(Component)]
struct LerpingShape {
target: Path,
lerp_t: f32,
margin_of_error: f32,
}
// Event for when all points of a LerpingShape are within the margin-of-error of the target path
struct LerpFinished(Entity);
#[derive(Clone, Copy, Hash, PartialEq, Eq, Debug, SystemLabel)]
enum System {
ChangeSides,
UpdateLerpTarget,
LerpShape,
}
fn main() {
App::new()
.insert_resource(Msaa { samples: 8 })
.add_plugins(DefaultPlugins)
.add_plugin(ShapePlugin)
.add_startup_system(setup)
.add_event::<LerpFinished>()
.add_system(change_sides::<RangeInclusive<u8>>.label(System::ChangeSides))
.add_system(
update_lerp_target::<RangeInclusive<u8>>
.label(System::UpdateLerpTarget)
.after(System::ChangeSides),
)
.add_system(
lerp_shape
.label(System::LerpShape)
.after(System::UpdateLerpTarget),
)
.run();
}
fn setup(mut commands: Commands) {
const SIDES: u8 = 5;
let shape = shapes::RegularPolygon {
sides: SIDES as usize,
feature: shapes::RegularPolygonFeature::Radius(200.0),
..Default::default()
};
commands.spawn_bundle(OrthographicCameraBundle::new_2d());
commands
.spawn_bundle(GeometryBuilder::build_as(
&shape,
DrawMode::Outlined {
fill_mode: FillMode::color(Color::ORANGE),
outline_mode: StrokeMode::new(Color::ORANGE_RED, 8.0),
},
Transform::default(),
))
.insert(SidesChangingShape {
sides: SIDES,
bounds: 3..=8,
direction: Direction::Increasing,
})
.insert(LerpingShape {
target: ShapePath::build_as(&shape).0,
lerp_t: 0.025,
margin_of_error: 1.0,
});
}
fn change_sides<T: RangeBounds<u8> + 'static + Send + Sync>(
mut lerp_events: EventReader<LerpFinished>,
mut query: Query<&mut SidesChangingShape<T>>,
) {
for LerpFinished(entity) in lerp_events.iter() {
if let Ok(mut sides) = query.get_mut(*entity) {
sides.increment_sides();
}
}
}
fn update_lerp_target<T: RangeBounds<u8> + 'static + Send + Sync>(
mut query: Query<(&SidesChangingShape<T>, &mut LerpingShape), Changed<SidesChangingShape<T>>>,
) {
for (sides, mut shape) in query.iter_mut() {
if sides.sides % 2 == 0 {
shape.target = ShapePath::build_as(&shapes::Ellipse {
radii: Vec2::new(
(sides.sides as f32).sin() * 200.0,
(sides.sides as f32).cos() * 200.0,
),
..Default::default()
})
.0;
} else {
shape.target = ShapePath::build_as(&shapes::RegularPolygon {
sides: sides.sides as usize,
feature: shapes::RegularPolygonFeature::Radius(200.0),
..Default::default()
})
.0;
}
}
}
fn lerp_shape(
mut lerp_events: EventWriter<LerpFinished>,
mut query: Query<(Entity, &mut PathComponent, &LerpingShape)>,
) {
for (entity, mut from, to) in query.iter_mut() {
let (is_within_margin_of_error, new_path) =
from.0.lerped(&to.target, to.lerp_t, to.margin_of_error);
from.0 = new_path;
if is_within_margin_of_error {
lerp_events.send(LerpFinished(entity));
}
}
}
| 28.051724 | 98 | 0.572833 |
e6ea25e274268ccb99ac69bddf59c2509d965f28 | 1,519 | // enums3.rs
// Address all the TODOs to make the tests pass!
enum Message {
Move { x: i32, y: i32 },
Echo(String),
ChangeColor(u8, u8, u8),
Quit,
}
struct Point {
x: i32,
y: i32,
}
struct State {
color: (u8, u8, u8),
position: Point,
quit: bool,
}
impl State {
fn change_color(&mut self, color: (u8, u8, u8)) {
self.color = color;
}
fn quit(&mut self) {
self.quit = true;
}
fn echo(&self, s: String) {
println!("{}", s);
}
fn move_position(&mut self, p: Point) {
self.position = p;
}
fn process(&mut self, message: Message) {
match message {
Message::Move { x, y } => self.position = Point { x, y },
Message::ChangeColor(r, g, b) => self.color = (r, g, b),
Message::Quit => self.quit = true,
_ => (),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_match_message_call() {
let mut state = State {
quit: false,
position: Point { x: 0, y: 0 },
color: (0, 0, 0),
};
state.process(Message::ChangeColor(255, 0, 255));
state.process(Message::Echo(String::from("hello world")));
state.process(Message::Move { x: 10, y: 15 });
state.process(Message::Quit);
assert_eq!(state.color, (255, 0, 255));
assert_eq!(state.position.x, 10);
assert_eq!(state.position.y, 15);
assert_eq!(state.quit, true);
}
}
| 21.394366 | 69 | 0.506254 |
ddcdb2031ea2c9113bbfef78631c6ef124bfd89a | 1,962 | use crate::steps;
use crossbeam_channel::{Receiver, Sender};
use raft::LogEntry;
use raft_modules::{ClusterConfiguration, MemoryRsm, RandomizedElectionTimer};
mod custom_operation_log;
use crate::create_node_worker;
pub fn run() {
let node_ids = vec![1, 2];
let new_node_id = node_ids.last().unwrap() + 1;
let peer_communicator = steps::get_generic_peer_communicator(vec![1, 2, 3]);
let mut cluster = steps::cluster::start_initial_cluster(
node_ids,
peer_communicator,
steps::create_generic_node_inproc,
);
steps::sleep(2);
//find elected leader
let leader = cluster.get_node1_leader_by_adding_data_sample();
//add new data to the cluster
steps::data::add_data_sample(&leader).expect("add sample successful");
let (tx, rx): (Sender<LogEntry>, Receiver<LogEntry>) = crossbeam_channel::unbounded();
let operation_log = custom_operation_log::MemoryOperationLog::new(
ClusterConfiguration::new(cluster.initial_nodes.clone()),
tx,
);
// run new server
cluster.add_new_server(new_node_id, |node_id, all_nodes, peer_communicator| {
create_node_worker!(
node_id,
ClusterConfiguration::new(all_nodes),
peer_communicator,
RandomizedElectionTimer::new(2000, 4000),
MemoryRsm::new(),
operation_log.clone()
)
});
//add new server to the cluster
steps::data::add_server(&leader, new_node_id);
steps::sleep(1);
//add new data to the cluster
steps::data::add_data_sample(&leader).expect("add sample successful");
steps::sleep(5);
let mut entry_count = 0;
loop {
let res = rx.try_recv();
if let Ok(val) = res {
entry_count += 1;
trace!("{:?}", val);
} else {
// println!("{:?}", res);
break;
}
}
assert_eq!(4, entry_count);
cluster.terminate();
}
| 26.876712 | 90 | 0.625382 |
2917e573278d95cf0efee81c409a9c1226e347fe | 183,864 | #![doc = "generated by AutoRust"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::models;
#[derive(Clone)]
pub struct Client {
endpoint: String,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
pipeline: azure_core::Pipeline,
}
#[derive(Clone)]
pub struct ClientBuilder {
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
endpoint: Option<String>,
scopes: Option<Vec<String>>,
}
pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD;
impl ClientBuilder {
pub fn new(credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>) -> Self {
Self {
credential,
endpoint: None,
scopes: None,
}
}
pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self {
self.endpoint = Some(endpoint.into());
self
}
pub fn scopes(mut self, scopes: &[&str]) -> Self {
self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect());
self
}
pub fn build(self) -> Client {
let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned());
let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]);
Client::new(endpoint, self.credential, scopes)
}
}
impl Client {
pub(crate) fn endpoint(&self) -> &str {
self.endpoint.as_str()
}
pub(crate) fn token_credential(&self) -> &dyn azure_core::auth::TokenCredential {
self.credential.as_ref()
}
pub(crate) fn scopes(&self) -> Vec<&str> {
self.scopes.iter().map(String::as_str).collect()
}
pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> azure_core::error::Result<azure_core::Response> {
let mut context = azure_core::Context::default();
let mut request = request.into();
self.pipeline.send(&mut context, &mut request).await
}
pub fn new(
endpoint: impl Into<String>,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
) -> Self {
let endpoint = endpoint.into();
let pipeline = azure_core::Pipeline::new(
option_env!("CARGO_PKG_NAME"),
option_env!("CARGO_PKG_VERSION"),
azure_core::ClientOptions::default(),
Vec::new(),
Vec::new(),
);
Self {
endpoint,
credential,
scopes,
pipeline,
}
}
pub fn operations(&self) -> operations::Client {
operations::Client(self.clone())
}
pub fn registries(&self) -> registries::Client {
registries::Client(self.clone())
}
pub fn replications(&self) -> replications::Client {
replications::Client(self.clone())
}
pub fn runs(&self) -> runs::Client {
runs::Client(self.clone())
}
pub fn tasks(&self) -> tasks::Client {
tasks::Client(self.clone())
}
pub fn webhooks(&self) -> webhooks::Client {
webhooks::Client(self.clone())
}
}
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Registries_ImportImage(#[from] registries::import_image::Error),
#[error(transparent)]
Registries_CheckNameAvailability(#[from] registries::check_name_availability::Error),
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
Registries_Get(#[from] registries::get::Error),
#[error(transparent)]
Registries_Create(#[from] registries::create::Error),
#[error(transparent)]
Registries_Update(#[from] registries::update::Error),
#[error(transparent)]
Registries_Delete(#[from] registries::delete::Error),
#[error(transparent)]
Registries_ListByResourceGroup(#[from] registries::list_by_resource_group::Error),
#[error(transparent)]
Registries_List(#[from] registries::list::Error),
#[error(transparent)]
Registries_ListCredentials(#[from] registries::list_credentials::Error),
#[error(transparent)]
Registries_RegenerateCredential(#[from] registries::regenerate_credential::Error),
#[error(transparent)]
Registries_ListUsages(#[from] registries::list_usages::Error),
#[error(transparent)]
Replications_Get(#[from] replications::get::Error),
#[error(transparent)]
Replications_Create(#[from] replications::create::Error),
#[error(transparent)]
Replications_Update(#[from] replications::update::Error),
#[error(transparent)]
Replications_Delete(#[from] replications::delete::Error),
#[error(transparent)]
Replications_List(#[from] replications::list::Error),
#[error(transparent)]
Webhooks_Get(#[from] webhooks::get::Error),
#[error(transparent)]
Webhooks_Create(#[from] webhooks::create::Error),
#[error(transparent)]
Webhooks_Update(#[from] webhooks::update::Error),
#[error(transparent)]
Webhooks_Delete(#[from] webhooks::delete::Error),
#[error(transparent)]
Webhooks_List(#[from] webhooks::list::Error),
#[error(transparent)]
Webhooks_Ping(#[from] webhooks::ping::Error),
#[error(transparent)]
Webhooks_GetCallbackConfig(#[from] webhooks::get_callback_config::Error),
#[error(transparent)]
Webhooks_ListEvents(#[from] webhooks::list_events::Error),
#[error(transparent)]
Registries_ScheduleRun(#[from] registries::schedule_run::Error),
#[error(transparent)]
Registries_GetBuildSourceUploadUrl(#[from] registries::get_build_source_upload_url::Error),
#[error(transparent)]
Runs_List(#[from] runs::list::Error),
#[error(transparent)]
Runs_Get(#[from] runs::get::Error),
#[error(transparent)]
Runs_Update(#[from] runs::update::Error),
#[error(transparent)]
Runs_GetLogSasUrl(#[from] runs::get_log_sas_url::Error),
#[error(transparent)]
Runs_Cancel(#[from] runs::cancel::Error),
#[error(transparent)]
Tasks_List(#[from] tasks::list::Error),
#[error(transparent)]
Tasks_Get(#[from] tasks::get::Error),
#[error(transparent)]
Tasks_Create(#[from] tasks::create::Error),
#[error(transparent)]
Tasks_Update(#[from] tasks::update::Error),
#[error(transparent)]
Tasks_Delete(#[from] tasks::delete::Error),
#[error(transparent)]
Tasks_GetDetails(#[from] tasks::get_details::Error),
}
pub mod registries {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn import_image(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
parameters: impl Into<models::ImportImageParameters>,
) -> import_image::Builder {
import_image::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
parameters: parameters.into(),
}
}
pub fn check_name_availability(
&self,
subscription_id: impl Into<String>,
registry_name_check_request: impl Into<models::RegistryNameCheckRequest>,
) -> check_name_availability::Builder {
check_name_availability::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
registry_name_check_request: registry_name_check_request.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
}
}
pub fn create(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
registry: impl Into<models::Registry>,
) -> create::Builder {
create::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
registry: registry.into(),
}
}
pub fn update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
registry_update_parameters: impl Into<models::RegistryUpdateParameters>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
registry_update_parameters: registry_update_parameters.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
}
}
pub fn list_by_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_resource_group::Builder {
list_by_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
pub fn list(&self, subscription_id: impl Into<String>) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
pub fn list_credentials(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
) -> list_credentials::Builder {
list_credentials::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
}
}
pub fn regenerate_credential(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
regenerate_credential_parameters: impl Into<models::RegenerateCredentialParameters>,
) -> regenerate_credential::Builder {
regenerate_credential::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
regenerate_credential_parameters: regenerate_credential_parameters.into(),
}
}
pub fn list_usages(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
) -> list_usages::Builder {
list_usages::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
}
}
pub fn schedule_run(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
run_request: impl Into<models::RunRequest>,
) -> schedule_run::Builder {
schedule_run::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
run_request: run_request.into(),
}
}
pub fn get_build_source_upload_url(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
) -> get_build_source_upload_url::Builder {
get_build_source_upload_url::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
}
}
}
pub mod import_image {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) parameters: models::ImportImageParameters,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/importImage",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod check_name_availability {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) registry_name_check_request: models::RegistryNameCheckRequest,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::RegistryNameStatus, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.ContainerRegistry/checkNameAvailability",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.registry_name_check_request).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RegistryNameStatus =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Registry, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Registry =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod create {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Registry),
Created201(models::Registry),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) registry: models::Registry,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.registry).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Registry =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Registry =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Registry),
Created201(models::Registry),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) registry_update_parameters: models::RegistryUpdateParameters,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.registry_update_parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Registry =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Registry =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod list_by_resource_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::RegistryListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RegistryListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::RegistryListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.ContainerRegistry/registries",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RegistryListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod list_credentials {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::RegistryListCredentialsResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/listCredentials",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RegistryListCredentialsResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod regenerate_credential {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) regenerate_credential_parameters: models::RegenerateCredentialParameters,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::RegistryListCredentialsResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/regenerateCredential",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.regenerate_credential_parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RegistryListCredentialsResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod list_usages {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::RegistryUsageListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/listUsages",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RegistryUsageListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod schedule_run {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Run),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) run_request: models::RunRequest,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/scheduleRun",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.run_request).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Run =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => Err(Error::DefaultResponse { status_code }),
}
})
}
}
}
pub mod get_build_source_upload_url {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SourceUploadDefinition, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/listBuildSourceUploadUrl" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . registry_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SourceUploadDefinition =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(Error::DefaultResponse { status_code }),
}
})
}
}
}
}
pub mod operations {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(&self) -> list::Builder {
list::Builder { client: self.0.clone() }
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OperationListResult, Error>> {
Box::pin(async move {
let url_str = &format!("{}/providers/Microsoft.ContainerRegistry/operations", self.client.endpoint(),);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OperationListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
}
pub mod replications {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
replication_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
replication_name: replication_name.into(),
}
}
pub fn create(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
replication_name: impl Into<String>,
replication: impl Into<models::Replication>,
) -> create::Builder {
create::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
replication_name: replication_name.into(),
replication: replication.into(),
}
}
pub fn update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
replication_name: impl Into<String>,
replication_update_parameters: impl Into<models::ReplicationUpdateParameters>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
replication_name: replication_name.into(),
replication_update_parameters: replication_update_parameters.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
replication_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
replication_name: replication_name.into(),
}
}
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) replication_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Replication, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/replications/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.replication_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Replication =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod create {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Replication),
Created201(models::Replication),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) replication_name: String,
pub(crate) replication: models::Replication,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/replications/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.replication_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.replication).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Replication =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Replication =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Replication),
Created201(models::Replication),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) replication_name: String,
pub(crate) replication_update_parameters: models::ReplicationUpdateParameters,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/replications/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.replication_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.replication_update_parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Replication =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Replication =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) replication_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/replications/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.replication_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ReplicationListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/replications",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ReplicationListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
}
pub mod webhooks {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
webhook_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
webhook_name: webhook_name.into(),
}
}
pub fn create(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
webhook_name: impl Into<String>,
webhook_create_parameters: impl Into<models::WebhookCreateParameters>,
) -> create::Builder {
create::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
webhook_name: webhook_name.into(),
webhook_create_parameters: webhook_create_parameters.into(),
}
}
pub fn update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
webhook_name: impl Into<String>,
webhook_update_parameters: impl Into<models::WebhookUpdateParameters>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
webhook_name: webhook_name.into(),
webhook_update_parameters: webhook_update_parameters.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
webhook_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
webhook_name: webhook_name.into(),
}
}
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
}
}
pub fn ping(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
webhook_name: impl Into<String>,
) -> ping::Builder {
ping::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
webhook_name: webhook_name.into(),
}
}
pub fn get_callback_config(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
webhook_name: impl Into<String>,
) -> get_callback_config::Builder {
get_callback_config::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
webhook_name: webhook_name.into(),
}
}
pub fn list_events(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
webhook_name: impl Into<String>,
) -> list_events::Builder {
list_events::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
webhook_name: webhook_name.into(),
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) webhook_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Webhook, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.webhook_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Webhook =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod create {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Webhook),
Created201(models::Webhook),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) webhook_name: String,
pub(crate) webhook_create_parameters: models::WebhookCreateParameters,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.webhook_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.webhook_create_parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Webhook =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Webhook =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Webhook),
Created201(models::Webhook),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) webhook_name: String,
pub(crate) webhook_update_parameters: models::WebhookUpdateParameters,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.webhook_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.webhook_update_parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Webhook =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Webhook =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) webhook_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.webhook_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::WebhookListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::WebhookListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod ping {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) webhook_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::EventInfo, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks/{}/ping",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.webhook_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::EventInfo =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod get_callback_config {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) webhook_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::CallbackConfig, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks/{}/getCallbackConfig" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . registry_name , & self . webhook_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CallbackConfig =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod list_events {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) webhook_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::EventListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks/{}/listEvents",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.webhook_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-05-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::EventListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
}
pub mod runs {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
filter: None,
top: None,
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
run_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
run_id: run_id.into(),
}
}
pub fn update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
run_id: impl Into<String>,
run_update_parameters: impl Into<models::RunUpdateParameters>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
run_id: run_id.into(),
run_update_parameters: run_update_parameters.into(),
}
}
pub fn get_log_sas_url(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
run_id: impl Into<String>,
) -> get_log_sas_url::Builder {
get_log_sas_url::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
run_id: run_id.into(),
}
}
pub fn cancel(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
run_id: impl Into<String>,
) -> cancel::Builder {
cancel::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
run_id: run_id.into(),
}
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) filter: Option<String>,
pub(crate) top: Option<i32>,
}
impl Builder {
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::RunListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/runs",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-01");
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RunListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(Error::DefaultResponse { status_code }),
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) run_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Run, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/runs/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.run_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Run =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(Error::DefaultResponse { status_code }),
}
})
}
}
}
pub mod update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Run),
Created201(models::Run),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) run_id: String,
pub(crate) run_update_parameters: models::RunUpdateParameters,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/runs/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.run_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.run_update_parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Run =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Run =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => Err(Error::DefaultResponse { status_code }),
}
})
}
}
}
pub mod get_log_sas_url {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) run_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::RunGetLogResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/runs/{}/listLogSasUrl",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.run_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RunGetLogResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(Error::DefaultResponse { status_code }),
}
})
}
}
}
pub mod cancel {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) run_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/runs/{}/cancel",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.run_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => Err(Error::DefaultResponse { status_code }),
}
})
}
}
}
}
pub mod tasks {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
task_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
task_name: task_name.into(),
}
}
pub fn create(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
task_name: impl Into<String>,
task_create_parameters: impl Into<models::Task>,
) -> create::Builder {
create::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
task_name: task_name.into(),
task_create_parameters: task_create_parameters.into(),
}
}
pub fn update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
task_name: impl Into<String>,
task_update_parameters: impl Into<models::TaskUpdateParameters>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
task_name: task_name.into(),
task_update_parameters: task_update_parameters.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
task_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
task_name: task_name.into(),
}
}
pub fn get_details(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
registry_name: impl Into<String>,
task_name: impl Into<String>,
) -> get_details::Builder {
get_details::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
registry_name: registry_name.into(),
task_name: task_name.into(),
}
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::TaskListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/tasks",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::TaskListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(Error::DefaultResponse { status_code }),
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) task_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Task, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/tasks/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.task_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Task =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(Error::DefaultResponse { status_code }),
}
})
}
}
}
pub mod create {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Task),
Created201(models::Task),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) task_name: String,
pub(crate) task_create_parameters: models::Task,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/tasks/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.task_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.task_create_parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Task =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Task =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => Err(Error::DefaultResponse { status_code }),
}
})
}
}
}
pub mod update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Task),
Created201(models::Task),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) task_name: String,
pub(crate) task_update_parameters: models::TaskUpdateParameters,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/tasks/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.task_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.task_update_parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Task =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Task =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => Err(Error::DefaultResponse { status_code }),
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) task_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/tasks/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.task_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => Err(Error::DefaultResponse { status_code }),
}
})
}
}
}
pub mod get_details {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) registry_name: String,
pub(crate) task_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Task, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/tasks/{}/listDetails",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.registry_name,
&self.task_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Task =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(Error::DefaultResponse { status_code }),
}
})
}
}
}
}
| 51.201337 | 307 | 0.52367 |
394db0ea59b5bc6950787ea18d2d8823e4a22392 | 2,839 | // Copyright Materialize, Inc. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
use std::fmt::Display;
use expr::EvalError;
use serde::{Deserialize, Serialize};
#[derive(Ord, PartialOrd, Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
pub enum DecodeError {
Text(String),
}
impl Display for DecodeError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DecodeError::Text(e) => write!(f, "Text: {}", e),
}
}
}
#[derive(Ord, PartialOrd, Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
pub struct SourceError {
pub source_name: String,
pub error: SourceErrorDetails,
}
impl SourceError {
pub fn new(source_name: String, error: SourceErrorDetails) -> SourceError {
SourceError { source_name, error }
}
}
impl Display for SourceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}: ", self.source_name)?;
self.error.fmt(f)
}
}
#[derive(Ord, PartialOrd, Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
pub enum SourceErrorDetails {
Initialization(String),
FileIO(String),
}
impl Display for SourceErrorDetails {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
SourceErrorDetails::Initialization(e) => {
write!(
f,
"failed during initialization, must be dropped and recreated: {}",
e
)
}
SourceErrorDetails::FileIO(e) => write!(f, "file IO: {}", e),
}
}
}
#[derive(Ord, PartialOrd, Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
pub enum DataflowError {
DecodeError(DecodeError),
EvalError(EvalError),
SourceError(SourceError),
}
impl Display for DataflowError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DataflowError::DecodeError(e) => write!(f, "Decode error: {}", e),
DataflowError::EvalError(e) => write!(f, "Evaluation error: {}", e),
DataflowError::SourceError(e) => write!(f, "Source error: {}", e),
}
}
}
impl From<DecodeError> for DataflowError {
fn from(e: DecodeError) -> Self {
Self::DecodeError(e)
}
}
impl From<EvalError> for DataflowError {
fn from(e: EvalError) -> Self {
Self::EvalError(e)
}
}
impl From<SourceError> for DataflowError {
fn from(e: SourceError) -> Self {
Self::SourceError(e)
}
}
| 27.833333 | 86 | 0.611131 |
717baa76b6aa0d2e1da8a11cb4c1333bcd630356 | 6,319 | use crate::{LaneID, Map, TurnID};
use geom::{Angle, Distance, PolyLine, Pt2D, Speed};
use serde::{Deserialize, Serialize};
use std::fmt;
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct Position {
// Don't let callers construct a Position directly, so it's easy to find callers of new().
lane: LaneID,
dist_along: Distance,
}
impl fmt::Display for Position {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Position({}, {})", self.lane, self.dist_along)
}
}
impl Position {
pub fn new(lane: LaneID, dist_along: Distance) -> Position {
Position { lane, dist_along }
}
pub fn start(lane: LaneID) -> Position {
Position {
lane,
dist_along: Distance::ZERO,
}
}
pub fn end(lane: LaneID, map: &Map) -> Position {
Position {
lane,
dist_along: map.get_l(lane).length(),
}
}
pub fn lane(&self) -> LaneID {
self.lane
}
pub fn dist_along(&self) -> Distance {
self.dist_along
}
pub fn pt(&self, map: &Map) -> Pt2D {
match map
.get_l(self.lane)
.lane_center_pts
.dist_along(self.dist_along)
{
Ok((pt, _)) => pt,
Err(err) => panic!("{} invalid: {}", self, err),
}
}
pub fn pt_and_angle(&self, map: &Map) -> (Pt2D, Angle) {
match map
.get_l(self.lane)
.lane_center_pts
.dist_along(self.dist_along)
{
Ok(pair) => pair,
Err(err) => panic!("{} invalid: {}", self, err),
}
}
pub fn equiv_pos(&self, lane: LaneID, map: &Map) -> Position {
self.equiv_pos_for_long_object(lane, Distance::ZERO, map)
}
pub fn equiv_pos_for_long_object(
&self,
lane: LaneID,
our_len: Distance,
map: &Map,
) -> Position {
let r = map.get_parent(lane);
assert_eq!(map.get_l(self.lane).parent, r.id);
// TODO Project perpendicular
let len = map.get_l(lane).length();
// The two lanes may be on opposite sides of the road; this often happens on one-ways with
// sidewalks on both sides.
if r.is_forwards(lane) == r.is_forwards(self.lane) {
Position::new(lane, self.dist_along.min(len))
} else {
Position::new(
lane,
// TODO I don't understand what this is doing anymore in the one case, revisit
(len - self.dist_along + our_len)
.max(Distance::ZERO)
.min(len),
)
}
}
pub fn min_dist(mut self, dist_along: Distance, map: &Map) -> Option<Position> {
if self.dist_along >= dist_along {
return Some(self);
}
if map.get_l(self.lane).length() < dist_along {
return None;
}
self.dist_along = dist_along;
Some(self)
}
pub fn buffer_dist(mut self, buffer: Distance, map: &Map) -> Option<Position> {
let len = map.get_l(self.lane).length();
if len <= buffer * 2.0 {
return None;
}
self.dist_along = self.dist_along.max(buffer).min(len - buffer);
Some(self)
}
}
// TODO also building paths?
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, Serialize, Deserialize)]
pub enum Traversable {
Lane(LaneID),
Turn(TurnID),
}
impl fmt::Display for Traversable {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Traversable::Lane(id) => write!(f, "Traversable::Lane({})", id.0),
Traversable::Turn(id) => write!(
f,
"Traversable::Turn({}, {}, {})",
id.src, id.dst, id.parent
),
}
}
}
impl Traversable {
pub fn as_lane(&self) -> LaneID {
match *self {
Traversable::Lane(id) => id,
Traversable::Turn(_) => panic!("not a lane"),
}
}
pub fn as_turn(&self) -> TurnID {
match *self {
Traversable::Turn(id) => id,
Traversable::Lane(_) => panic!("not a turn"),
}
}
pub fn maybe_turn(&self) -> Option<TurnID> {
match *self {
Traversable::Turn(id) => Some(id),
Traversable::Lane(_) => None,
}
}
pub fn maybe_lane(&self) -> Option<LaneID> {
match *self {
Traversable::Turn(_) => None,
Traversable::Lane(id) => Some(id),
}
}
// TODO Just expose the PolyLine instead of all these layers of helpers
pub fn length(&self, map: &Map) -> Distance {
match *self {
Traversable::Lane(id) => map.get_l(id).length(),
Traversable::Turn(id) => map.get_t(id).geom.length(),
}
}
pub fn dist_along(&self, dist: Distance, map: &Map) -> Result<(Pt2D, Angle), String> {
match *self {
Traversable::Lane(id) => map.get_l(id).lane_center_pts.dist_along(dist),
Traversable::Turn(id) => map.get_t(id).geom.dist_along(dist),
}
}
pub fn slice(
&self,
start: Distance,
end: Distance,
map: &Map,
) -> Result<(PolyLine, Distance), String> {
match *self {
Traversable::Lane(id) => map.get_l(id).lane_center_pts.slice(start, end),
Traversable::Turn(id) => map.get_t(id).geom.slice(start, end),
}
}
pub fn exact_slice(&self, start: Distance, end: Distance, map: &Map) -> PolyLine {
match *self {
Traversable::Lane(id) => map.get_l(id).lane_center_pts.exact_slice(start, end),
Traversable::Turn(id) => map.get_t(id).geom.exact_slice(start, end),
}
}
pub fn speed_limit(&self, map: &Map) -> Speed {
match *self {
Traversable::Lane(id) => map.get_parent(id).speed_limit,
Traversable::Turn(id) => map.get_parent(id.dst).speed_limit,
}
}
pub fn get_zorder(&self, map: &Map) -> isize {
match *self {
Traversable::Lane(id) => map.get_parent(id).zorder,
Traversable::Turn(id) => map.get_i(id.parent).get_zorder(map),
}
}
}
| 29.666667 | 98 | 0.53173 |
e91d343bd18cc285d7252445281fea8c42ce18ef | 990 | use crate::{
util::{const_ptr_as_ref, mut_ptr_as_mut},
Error,
};
pub struct BatchMerkleProof(pub ergo_lib::ergo_merkle_tree::BatchMerkleProof);
pub type BatchMerkleProofPtr = *mut BatchMerkleProof;
pub type ConstBatchMerkleProofPtr = *const BatchMerkleProof;
pub unsafe fn batchmerkleproof_valid(
proof: ConstBatchMerkleProofPtr,
expected_root: &[u8],
) -> Result<bool, Error> {
let proof = const_ptr_as_ref(proof, "proof")?;
Ok(proof.0.valid(expected_root))
}
pub unsafe fn batchmerkleproof_from_json(
json: &str,
proof_out: *mut BatchMerkleProofPtr,
) -> Result<(), Error> {
let proof_out = mut_ptr_as_mut(proof_out, "proof_out")?;
*proof_out = Box::into_raw(Box::new(serde_json::from_str(json).map(BatchMerkleProof)?));
Ok(())
}
pub unsafe fn batchmerkleproof_to_json(proof: ConstBatchMerkleProofPtr) -> Result<String, Error> {
let proof = const_ptr_as_ref(proof, "proof")?;
serde_json::to_string(&proof.0).map_err(Error::from)
}
| 30.9375 | 98 | 0.726263 |
6a1ce90e8229bc19a140bd8f11ad67bc01e6be7b | 34,529 | use std::cmp::Ordering;
use std::ops::Range;
use amethyst_core::specs::prelude::{Component, DenseVecStorage, Entities, Entity, Join, Read,
ReadStorage, Resources, System, Write, WriteStorage};
use amethyst_core::timing::Time;
use clipboard::{ClipboardContext, ClipboardProvider};
use hibitset::BitSet;
use rusttype::PositionedGlyph;
use shrev::{EventChannel, ReaderId};
use unicode_normalization::UnicodeNormalization;
use unicode_normalization::char::is_combining_mark;
use unicode_segmentation::UnicodeSegmentation;
use winit::{ElementState, Event, KeyboardInput, ModifiersState, MouseButton, VirtualKeyCode,
WindowEvent};
use super::*;
/// A component used to display text in this entity's UiTransform
#[derive(Clone, Derivative)]
#[derivative(Debug)]
pub struct UiText {
/// The string rendered by this.
pub text: String,
/// The height of a line of text in pixels.
pub font_size: f32,
/// The color of the rendered text, using a range of 0.0 to 1.0 per channel.
pub color: [f32; 4],
/// The font used for rendering.
pub font: FontHandle,
/// If true this will be rendered as dots instead of the text.
pub password: bool,
/// Cached FontHandle, used to detect changes to the font.
pub(crate) cached_font: FontHandle,
/// Cached glyph positions, used to process mouse highlighting
#[derivative(Debug = "ignore")]
pub(crate) cached_glyphs: Vec<PositionedGlyph<'static>>,
/// Cached id used to retrieve the `GlyphBrush` in the `UiPass`.
pub(crate) brush_id: Option<u32>,
}
impl UiText {
/// Initializes a new UiText
///
/// # Parameters
///
/// * `font`: A handle to a `Font` asset
/// * `text`: the glyphs to render
/// * `color`: RGBA color with a maximum of 1.0 and a minimum of 0.0 for each channel
/// * `font_size`: a uniform scale applied to the glyphs
pub fn new(font: FontHandle, text: String, color: [f32; 4], font_size: f32) -> UiText {
UiText {
text,
color,
font_size,
font: font.clone(),
password: false,
cached_font: font,
cached_glyphs: Vec::new(),
brush_id: None,
}
}
}
impl Component for UiText {
type Storage = DenseVecStorage<Self>;
}
/// If this component is attached to an entity with a UiText then that UiText is editable.
/// This component also controls how that editing works.
pub struct TextEditing {
/// The current editing cursor position, specified in terms of glyphs, not characters.
pub cursor_position: isize,
/// The maximum graphemes permitted in this string.
pub max_length: usize,
/// The amount and direction of glyphs highlighted relative to the cursor.
pub highlight_vector: isize,
/// The color of the text itself when highlighted.
pub selected_text_color: [f32; 4],
/// The text background color when highlighted.
pub selected_background_color: [f32; 4],
/// If this is true the text will use a block cursor for editing. Otherwise this uses a
/// standard line cursor. This is not recommended if your font is not monospace.
pub use_block_cursor: bool,
/// This value is used to control cursor blinking.
///
/// When it is greater than 0.5 / CURSOR_BLINK_RATE the cursor should not display, when it
/// is greater than or equal to 1.0 / CURSOR_BLINK_RATE it should be reset to 0. When the
/// player types it should be reset to 0.
pub(crate) cursor_blink_timer: f32,
}
impl TextEditing {
/// Create a new TextEditing Component
pub fn new(
max_length: usize,
selected_text_color: [f32; 4],
selected_background_color: [f32; 4],
use_block_cursor: bool,
) -> TextEditing {
TextEditing {
cursor_position: 0,
max_length,
highlight_vector: 0,
selected_text_color,
selected_background_color,
use_block_cursor,
cursor_blink_timer: 0.0,
}
}
}
impl Component for TextEditing {
type Storage = DenseVecStorage<Self>;
}
struct CachedTabOrder {
pub cached: BitSet,
pub cache: Vec<(i32, Entity)>,
}
/// This system processes the underlying UI data as needed.
pub struct UiSystem {
/// A reader for winit events.
reader: Option<ReaderId<Event>>,
/// A cache sorted by tab order, and then by Entity.
tab_order_cache: CachedTabOrder,
/// This is set to true while the left mouse button is pressed.
left_mouse_button_pressed: bool,
/// The screen coordinates of the mouse
mouse_position: (f32, f32),
}
impl UiSystem {
/// Initializes a new UiSystem that uses the given reader id.
pub fn new() -> Self {
Self {
reader: None,
tab_order_cache: CachedTabOrder {
cached: BitSet::new(),
cache: Vec::new(),
},
left_mouse_button_pressed: false,
mouse_position: (0., 0.),
}
}
}
impl<'a> System<'a> for UiSystem {
type SystemData = (
Entities<'a>,
WriteStorage<'a, UiText>,
WriteStorage<'a, TextEditing>,
ReadStorage<'a, UiTransform>,
Write<'a, UiFocused>,
Read<'a, EventChannel<Event>>,
Read<'a, Time>,
);
fn run(
&mut self,
(entities, mut text, mut editable, transform, mut focused, events, time): Self::SystemData,
) {
// Populate and update the tab order cache.
{
let bitset = &mut self.tab_order_cache.cached;
self.tab_order_cache.cache.retain(|&(_t, entity)| {
let keep = transform.contains(entity);
if !keep {
bitset.remove(entity.id());
}
keep
});
}
for &mut (ref mut t, entity) in &mut self.tab_order_cache.cache {
*t = transform.get(entity).unwrap().tab_order;
}
// Attempt to insert the new entities in sorted position. Should reduce work during
// the sorting step.
let transform_set = transform.mask().clone();
{
// Create a bitset containing only the new indices.
let new = (&transform_set ^ &self.tab_order_cache.cached) & &transform_set;
for (entity, transform, _new) in (&*entities, &transform, &new).join() {
let pos = self.tab_order_cache
.cache
.iter()
.position(|&(cached_t, _)| transform.tab_order < cached_t);
match pos {
Some(pos) => self.tab_order_cache
.cache
.insert(pos, (transform.tab_order, entity)),
None => self.tab_order_cache
.cache
.push((transform.tab_order, entity)),
}
}
}
self.tab_order_cache.cached = transform_set;
// Sort from smallest tab order to largest tab order, then by entity creation time.
// Most of the time this shouldn't do anything but you still need it for if the tab orders
// change.
self.tab_order_cache
.cache
.sort_unstable_by(|&(t1, ref e1), &(t2, ref e2)| {
let ret = t1.cmp(&t2);
if ret == Ordering::Equal {
return e1.cmp(e2);
}
ret
});
for text in (&mut text).join() {
if (*text.text).chars().any(|c| is_combining_mark(c)) {
let normalized = text.text.nfd().collect::<String>();
text.text = normalized;
}
}
{
let mut focused_text_edit = focused.entity.and_then(|entity| {
text.get_mut(entity)
.into_iter()
.zip(editable.get_mut(entity).into_iter())
.next()
});
if let Some((ref mut _focused_text, ref mut focused_edit)) = focused_text_edit {
focused_edit.cursor_blink_timer += time.delta_real_seconds();
if focused_edit.cursor_blink_timer >= 1.0 / CURSOR_BLINK_RATE {
focused_edit.cursor_blink_timer = 0.0;
}
}
}
for event in events.read(self.reader.as_mut().unwrap()) {
// Process events for the whole UI.
match *event {
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
state: ElementState::Pressed,
virtual_keycode: Some(VirtualKeyCode::Tab),
modifiers,
..
},
..
},
..
} => if let Some(focused) = focused.entity.as_mut() {
if let Some((i, _)) = self.tab_order_cache
.cache
.iter()
.enumerate()
.find(|&(_i, &(_, entity))| entity == *focused)
{
if self.tab_order_cache.cache.len() != 0 {
if modifiers.shift {
if i == 0 {
let new_i = self.tab_order_cache.cache.len() - 1;
*focused = self.tab_order_cache.cache[new_i].1;
} else {
*focused = self.tab_order_cache.cache[i - 1].1;
}
} else {
if i + 1 == self.tab_order_cache.cache.len() {
*focused = self.tab_order_cache.cache[0].1;
} else {
*focused = self.tab_order_cache.cache[i + 1].1;
}
}
}
}
},
Event::WindowEvent {
event: WindowEvent::CursorMoved { position, .. },
..
} => {
self.mouse_position = (position.0 as f32, position.1 as f32);
if self.left_mouse_button_pressed {
let mut focused_text_edit = focused.entity.and_then(|entity| {
text.get_mut(entity)
.into_iter()
.zip(editable.get_mut(entity).into_iter())
.next()
});
if let Some((ref mut focused_text, ref mut focused_edit)) =
focused_text_edit
{
use std::f32::NAN;
let mouse_x = self.mouse_position.0;
let mouse_y = self.mouse_position.1;
// Find the glyph closest to the mouse position.
focused_edit.highlight_vector = focused_text
.cached_glyphs
.iter()
.enumerate()
.fold((0, (NAN, NAN)), |(index, (x, y)), (i, g)| {
let pos = g.position();
// Use Pythagorean theorem to compute distance
if ((x - mouse_x).powi(2) + (y - mouse_y).powi(2)).sqrt()
< ((pos.x - mouse_x).powi(2) + (pos.y - mouse_y).powi(2))
.sqrt()
{
(index, (x, y))
} else {
(i, (pos.x, pos.y))
}
})
.0
as isize
- focused_edit.cursor_position;
// The end of the text, while not a glyph, is still something
// you'll likely want to click your cursor to, so if the cursor is
// near the end of the text, check if we should put it at the end
// of the text.
if focused_edit.cursor_position + focused_edit.highlight_vector + 1
== focused_text.cached_glyphs.len() as isize
{
if let Some(last_glyph) = focused_text.cached_glyphs.iter().last() {
if (last_glyph.position().x - mouse_x).abs()
> ((last_glyph.position().x
+ last_glyph.unpositioned().h_metrics().advance_width)
- mouse_x)
.abs()
{
focused_edit.highlight_vector += 1;
}
}
}
}
}
}
Event::WindowEvent {
event:
WindowEvent::MouseInput {
button: MouseButton::Left,
state,
..
},
..
} => {
match state {
ElementState::Pressed => {
use std::f32::INFINITY;
self.left_mouse_button_pressed = true;
// Start searching for an element to focus.
// Find all eligible elements
let mut eligible = (&*entities, &transform)
.join()
.filter(|&(_, t)| {
t.global_x - t.width / 2.0 <= self.mouse_position.0
&& t.global_x + t.width / 2.0 >= self.mouse_position.0
&& t.global_y - t.height / 2.0 <= self.mouse_position.1
&& t.global_y + t.height / 2.0 >= self.mouse_position.1
})
.collect::<Vec<_>>();
// In instances of ambiguity we want to select the element with the
// lowest Z order, so we need to find the lowest Z order value among
// eligible elements
let lowest_z = eligible
.iter()
.fold(INFINITY, |lowest, &(_, t)| lowest.min(t.global_z));
// Then filter by it
eligible.retain(|&(_, t)| t.global_z == lowest_z);
// We may still have ambiguity as to what to select at this point,
// so we'll resolve that by selecting the most recently created
// element.
focused.entity = eligible.iter().fold(None, |most_recent, &(e, _)| {
Some(match most_recent {
Some(most_recent) => if most_recent > e {
most_recent
} else {
e
},
None => e,
})
});
// If we focused an editable text field be sure to position the cursor
// in it.
let mut focused_text_edit = focused.entity.and_then(|entity| {
text.get_mut(entity)
.into_iter()
.zip(editable.get_mut(entity).into_iter())
.next()
});
if let Some((ref mut focused_text, ref mut focused_edit)) =
focused_text_edit
{
use std::f32::NAN;
let mouse_x = self.mouse_position.0;
let mouse_y = self.mouse_position.1;
// Find the glyph closest to the click position.
focused_edit.highlight_vector = 0;
focused_edit.cursor_position = focused_text
.cached_glyphs
.iter()
.enumerate()
.fold((0, (NAN, NAN)), |(index, (x, y)), (i, g)| {
let pos = g.position();
// Use Pythagorean theorem to compute distance
if ((x - mouse_x).powi(2) + (y - mouse_y).powi(2)).sqrt()
< ((pos.x - mouse_x).powi(2)
+ (pos.y - mouse_y).powi(2))
.sqrt()
{
(index, (x, y))
} else {
(i, (pos.x, pos.y))
}
})
.0
as isize;
// The end of the text, while not a glyph, is still something
// you'll likely want to click your cursor to, so if the cursor is
// near the end of the text, check if we should put it at the end
// of the text.
if focused_edit.cursor_position + 1
== focused_text.cached_glyphs.len() as isize
{
if let Some(last_glyph) =
focused_text.cached_glyphs.iter().last()
{
if (last_glyph.position().x - mouse_x).abs()
> ((last_glyph.position().x
+ last_glyph
.unpositioned()
.h_metrics()
.advance_width)
- mouse_x)
.abs()
{
focused_edit.cursor_position += 1;
}
}
}
}
}
ElementState::Released => {
self.left_mouse_button_pressed = false;
}
}
}
_ => {}
}
let mut focused_text_edit = focused.entity.and_then(|entity| {
text.get_mut(entity)
.into_iter()
.zip(editable.get_mut(entity).into_iter())
.next()
});
// Process events for the focused text element
if let Some((ref mut focused_text, ref mut focused_edit)) = focused_text_edit {
match *event {
Event::WindowEvent {
event: WindowEvent::ReceivedCharacter(input),
..
} => {
// Ignore obsolete control characters, and tab characters we can't render
// properly anyways. Also ignore newline characters since we don't
// support multi-line text at the moment.
if input < '\u{8}' || (input > '\u{8}' && input < '\u{20}') {
continue;
}
// Since delete character isn't emitted on windows, ignore it too.
// We'll handle this with the KeyboardInput event instead.
if input == '\u{7F}' {
continue;
}
focused_edit.cursor_blink_timer = 0.0;
let deleted = delete_highlighted(focused_edit, focused_text);
let start_byte = focused_text
.text
.grapheme_indices(true)
.nth(focused_edit.cursor_position as usize)
.map(|i| i.0)
.unwrap_or_else(|| {
// We are either in a 0 length string, or at the end of a string
// This line returns the correct byte index for both.
focused_text.text.len()
});
match input {
'\u{8}' /*Backspace*/ => if !deleted {
if focused_edit.cursor_position > 0 {
if let Some((byte, len)) = focused_text
.text
.grapheme_indices(true)
.nth(focused_edit.cursor_position as usize - 1)
.map(|i| (i.0, i.1.len())) {
{
focused_text.text.drain(byte..(byte + len));
}
focused_edit.cursor_position -= 1;
}
}
},
_ => {
if focused_text.text.graphemes(true).count() < focused_edit.max_length {
focused_text.text.insert(start_byte, input);
focused_edit.cursor_position += 1;
}
}
}
}
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
state: ElementState::Pressed,
virtual_keycode: Some(v_keycode),
modifiers,
..
},
..
},
..
} => match v_keycode {
VirtualKeyCode::Home | VirtualKeyCode::Up => {
focused_edit.highlight_vector = if modifiers.shift {
focused_edit.cursor_position
} else {
0
};
focused_edit.cursor_position = 0;
focused_edit.cursor_blink_timer = 0.0;
}
VirtualKeyCode::End | VirtualKeyCode::Down => {
let glyph_len = focused_text.text.graphemes(true).count() as isize;
focused_edit.highlight_vector = if modifiers.shift {
focused_edit.cursor_position - glyph_len
} else {
0
};
focused_edit.cursor_position = glyph_len;
focused_edit.cursor_blink_timer = 0.0;
}
VirtualKeyCode::Delete => {
if !delete_highlighted(focused_edit, focused_text) {
if let Some((start_byte, start_glyph_len)) = focused_text
.text
.grapheme_indices(true)
.nth(focused_edit.cursor_position as usize)
.map(|i| (i.0, i.1.len()))
{
focused_edit.cursor_blink_timer = 0.0;
focused_text
.text
.drain(start_byte..(start_byte + start_glyph_len));
}
}
}
VirtualKeyCode::Left => if focused_edit.highlight_vector == 0
|| modifiers.shift
{
if focused_edit.cursor_position > 0 {
let delta = if ctrl_or_cmd(&modifiers) {
let mut graphemes = 0;
for word in focused_text.text.split_word_bounds() {
let word_graphemes = word.graphemes(true).count() as isize;
if graphemes + word_graphemes
>= focused_edit.cursor_position
{
break;
}
graphemes += word_graphemes;
}
focused_edit.cursor_position - graphemes
} else {
1
};
focused_edit.cursor_position -= delta;
if modifiers.shift {
focused_edit.highlight_vector += delta;
}
focused_edit.cursor_blink_timer = 0.0;
}
} else {
focused_edit.cursor_position = focused_edit
.cursor_position
.min(focused_edit.cursor_position + focused_edit.highlight_vector);
focused_edit.highlight_vector = 0;
},
VirtualKeyCode::Right => {
if focused_edit.highlight_vector == 0 || modifiers.shift {
let glyph_len = focused_text.text.graphemes(true).count();
if (focused_edit.cursor_position as usize) < glyph_len {
let delta = if ctrl_or_cmd(&modifiers) {
let mut graphemes = 0;
for word in focused_text.text.split_word_bounds() {
graphemes += word.graphemes(true).count() as isize;
if graphemes > focused_edit.cursor_position {
break;
}
}
graphemes - focused_edit.cursor_position
} else {
1
};
focused_edit.cursor_position += delta;
if modifiers.shift {
focused_edit.highlight_vector -= delta;
}
focused_edit.cursor_blink_timer = 0.0;
}
} else {
focused_edit.cursor_position = focused_edit.cursor_position.max(
focused_edit.cursor_position + focused_edit.highlight_vector,
);
focused_edit.highlight_vector = 0;
}
}
VirtualKeyCode::A => if ctrl_or_cmd(&modifiers) {
let glyph_len = focused_text.text.graphemes(true).count() as isize;
focused_edit.cursor_position = glyph_len;
focused_edit.highlight_vector = -glyph_len;
},
VirtualKeyCode::X => if ctrl_or_cmd(&modifiers) {
let new_clip = extract_highlighted(focused_edit, focused_text);
if new_clip.len() > 0 {
let mut ctx: ClipboardContext = ClipboardProvider::new().unwrap();
ctx.set_contents(new_clip).unwrap();
}
},
VirtualKeyCode::C => if ctrl_or_cmd(&modifiers) {
let new_clip = read_highlighted(focused_edit, focused_text);
if new_clip.len() > 0 {
let mut ctx: ClipboardContext = ClipboardProvider::new().unwrap();
ctx.set_contents(new_clip.to_owned()).unwrap();
}
},
VirtualKeyCode::V => if ctrl_or_cmd(&modifiers) {
delete_highlighted(focused_edit, focused_text);
let mut ctx: ClipboardContext = ClipboardProvider::new().unwrap();
if let Ok(contents) = ctx.get_contents() {
let index = cursor_byte_index(focused_edit, focused_text);
let empty_space = focused_edit.max_length
- focused_text.text.graphemes(true).count();
let contents = contents.graphemes(true).take(empty_space).fold(
String::new(),
|mut init, new| {
init.push_str(new);
init
},
);
focused_text.text.insert_str(index, &contents);
focused_edit.cursor_position +=
contents.graphemes(true).count() as isize;
}
},
_ => {}
},
_ => {}
}
}
}
}
fn setup(&mut self, res: &mut Resources) {
use amethyst_core::specs::prelude::SystemData;
Self::SystemData::setup(res);
self.reader = Some(res.fetch_mut::<EventChannel<Event>>().register_reader());
}
}
/// Returns if the command key is down on OSX, and the CTRL key for everything else.
fn ctrl_or_cmd(modifiers: &ModifiersState) -> bool {
(cfg!(target_os = "macos") && modifiers.logo)
|| (cfg!(not(target_os = "macos")) && modifiers.ctrl)
}
fn read_highlighted<'a>(edit: &TextEditing, text: &'a UiText) -> &'a str {
let range = highlighted_bytes(edit, text);
&text.text[range]
}
/// Removes the highlighted text and returns it in a String.
fn extract_highlighted(edit: &mut TextEditing, text: &mut UiText) -> String {
let range = highlighted_bytes(edit, text);
edit.cursor_position = range.start as isize;
edit.highlight_vector = 0;
text.text.drain(range).collect::<String>()
}
/// Removes the highlighted text and returns true if anything was deleted..
fn delete_highlighted(edit: &mut TextEditing, text: &mut UiText) -> bool {
if edit.highlight_vector != 0 {
let range = highlighted_bytes(edit, text);
edit.cursor_position = range.start as isize;
edit.highlight_vector = 0;
text.text.drain(range);
return true;
}
false
}
// Gets the byte index of the cursor.
fn cursor_byte_index(edit: &TextEditing, text: &UiText) -> usize {
text.text
.grapheme_indices(true)
.nth(edit.cursor_position as usize)
.map(|i| i.0)
.unwrap_or(text.text.len())
}
/// Returns the byte indices that are highlighted in the string.
fn highlighted_bytes(edit: &TextEditing, text: &UiText) -> Range<usize> {
let start = edit.cursor_position
.min(edit.cursor_position + edit.highlight_vector) as usize;
let end = edit.cursor_position
.max(edit.cursor_position + edit.highlight_vector) as usize;
let start_byte = text.text
.grapheme_indices(true)
.nth(start)
.map(|i| i.0)
.unwrap_or(text.text.len());
let end_byte = text.text
.grapheme_indices(true)
.nth(end)
.map(|i| i.0)
.unwrap_or(text.text.len());
start_byte..end_byte
}
| 47.235294 | 104 | 0.413247 |
c15ac1217fcd4f76b3427c0bc71ed6c07e123d4e | 12,845 | use wagyu_model::{Amount, AmountError};
use ethereum_types::U256;
use serde::Serialize;
use std::fmt;
/// Represents the amount of Ethereum in wei
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)]
pub struct EthereumAmount(pub U256);
pub enum Denomination {
Wei,
Kwei,
Mwei,
Gwei,
Szabo,
Finney,
Ether,
}
impl Denomination {
/// The number of decimal places more than a wei.
fn precision(self) -> u32 {
match self {
Denomination::Wei => 0,
Denomination::Kwei => 3,
Denomination::Mwei => 6,
Denomination::Gwei => 9,
Denomination::Szabo => 12,
Denomination::Finney => 15,
Denomination::Ether => 18,
}
}
}
impl fmt::Display for Denomination {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
match self {
Denomination::Wei => "wei",
Denomination::Kwei => "kwei",
Denomination::Mwei => "mwei",
Denomination::Gwei => "gwei",
Denomination::Szabo => "szabo",
Denomination::Finney => "finney",
Denomination::Ether => "ETH",
}
)
}
}
impl Amount for EthereumAmount {}
impl EthereumAmount {
pub fn u256_from_str(val: &str) -> Result<U256, AmountError> {
match U256::from_dec_str(val) {
Ok(wei) => Ok(wei),
Err(error) => return Err(AmountError::Crate("uint", format!("{:?}", error))),
}
}
pub fn from_u256(wei: U256) -> Self {
Self(wei)
}
pub fn from_wei(wei_value: &str) -> Result<Self, AmountError> {
let wei = Self::u256_from_str(wei_value)?;
Ok(Self::from_u256(wei))
}
pub fn from_kwei(kwei_value: &str) -> Result<Self, AmountError> {
let wei = Self::u256_from_str(kwei_value)? * 10_i64.pow(Denomination::Kwei.precision());
Ok(Self::from_u256(wei))
}
pub fn from_mwei(mwei_value: &str) -> Result<Self, AmountError> {
let wei = Self::u256_from_str(mwei_value)? * 10_i64.pow(Denomination::Mwei.precision());
Ok(Self::from_u256(wei))
}
pub fn from_gwei(gwei_value: &str) -> Result<Self, AmountError> {
let wei = Self::u256_from_str(gwei_value)? * 10_i64.pow(Denomination::Gwei.precision());
Ok(Self::from_u256(wei))
}
pub fn from_szabo(szabo_value: &str) -> Result<Self, AmountError> {
let wei = Self::u256_from_str(szabo_value)? * 10_i64.pow(Denomination::Szabo.precision());
Ok(Self::from_u256(wei))
}
pub fn from_finney(finney_value: &str) -> Result<Self, AmountError> {
let wei = Self::u256_from_str(finney_value)? * 10_i64.pow(Denomination::Finney.precision());
Ok(Self::from_u256(wei))
}
pub fn from_eth(eth_value: &str) -> Result<Self, AmountError> {
let wei = Self::u256_from_str(eth_value)? * 10_i64.pow(Denomination::Ether.precision());
Ok(Self::from_u256(wei))
}
pub fn add(self, b: Self) -> Self {
Self::from_u256(self.0 + b.0)
}
pub fn sub(self, b: Self) -> Self {
Self::from_u256(self.0 - b.0)
}
}
impl fmt::Display for EthereumAmount {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0.to_string())
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_from_wei(wei_value: &str, expected_amount: &str) {
let amount = EthereumAmount::from_wei(wei_value).unwrap();
assert_eq!(expected_amount, amount.to_string())
}
fn test_from_finney(finney_value: &str, expected_amount: &str) {
let amount = EthereumAmount::from_finney(finney_value).unwrap();
assert_eq!(expected_amount, amount.to_string())
}
fn test_from_szabo(szabo_value: &str, expected_amount: &str) {
let amount = EthereumAmount::from_szabo(szabo_value).unwrap();
assert_eq!(expected_amount, amount.to_string())
}
fn test_from_gwei(gwei_value: &str, expected_amount: &str) {
let amount = EthereumAmount::from_gwei(gwei_value).unwrap();
assert_eq!(expected_amount, amount.to_string())
}
fn test_from_mwei(mwei_value: &str, expected_amount: &str) {
let amount = EthereumAmount::from_mwei(mwei_value).unwrap();
assert_eq!(expected_amount, amount.to_string())
}
fn test_from_kwei(kwei_value: &str, expected_amount: &str) {
let amount = EthereumAmount::from_kwei(kwei_value).unwrap();
assert_eq!(expected_amount, amount.to_string())
}
fn test_from_eth(eth_value: &str, expected_amount: &str) {
let amount = EthereumAmount::from_eth(eth_value).unwrap();
assert_eq!(expected_amount, amount.to_string())
}
fn test_addition(a: &str, b: &str, result: &str) {
let a = EthereumAmount::from_wei(a).unwrap();
let b = EthereumAmount::from_wei(b).unwrap();
let result = EthereumAmount::from_wei(result).unwrap();
assert_eq!(result, a.add(b));
}
fn test_subtraction(a: &str, b: &str, result: &str) {
let a = EthereumAmount::from_wei(a).unwrap();
let b = EthereumAmount::from_wei(b).unwrap();
let result = EthereumAmount::from_wei(result).unwrap();
assert_eq!(result, a.sub(b));
}
pub struct AmountDenominationTestCase {
wei: &'static str,
kwei: &'static str,
mwei: &'static str,
gwei: &'static str,
szabo: &'static str,
finney: &'static str,
ether: &'static str,
}
mod valid_conversions {
use super::*;
const TEST_AMOUNTS: [AmountDenominationTestCase; 5] = [
AmountDenominationTestCase {
wei: "0",
kwei: "0",
mwei: "0",
gwei: "0",
szabo: "0",
finney: "0",
ether: "0",
},
AmountDenominationTestCase {
wei: "1000000000000000000",
kwei: "1000000000000000",
mwei: "1000000000000",
gwei: "1000000000",
szabo: "1000000",
finney: "1000",
ether: "1",
},
AmountDenominationTestCase {
wei: "1000000000000000000000",
kwei: "1000000000000000000",
mwei: "1000000000000000",
gwei: "1000000000000",
szabo: "1000000000",
finney: "1000000",
ether: "1000",
},
AmountDenominationTestCase {
wei: "1234567000000000000000000",
kwei: "1234567000000000000000",
mwei: "1234567000000000000",
gwei: "1234567000000000",
szabo: "1234567000000",
finney: "1234567000",
ether: "1234567",
},
AmountDenominationTestCase {
wei: "100000000000000000000000000",
kwei: "100000000000000000000000",
mwei: "100000000000000000000",
gwei: "100000000000000000",
szabo: "100000000000000",
finney: "100000000000",
ether: "100000000",
},
];
#[test]
fn test_wei_conversion() {
TEST_AMOUNTS
.iter()
.for_each(|amounts| test_from_wei(amounts.wei, amounts.wei));
}
#[test]
fn test_finney_conversion() {
TEST_AMOUNTS
.iter()
.for_each(|amounts| test_from_finney(amounts.finney, amounts.wei));
}
#[test]
fn test_szabo_conversion() {
TEST_AMOUNTS
.iter()
.for_each(|amounts| test_from_szabo(amounts.szabo, amounts.wei));
}
#[test]
fn test_gwei_conversion() {
TEST_AMOUNTS
.iter()
.for_each(|amounts| test_from_gwei(amounts.gwei, amounts.wei));
}
#[test]
fn test_mwei_conversion() {
TEST_AMOUNTS
.iter()
.for_each(|amounts| test_from_mwei(amounts.mwei, amounts.wei));
}
#[test]
fn test_kwei_conversion() {
TEST_AMOUNTS
.iter()
.for_each(|amounts| test_from_kwei(amounts.kwei, amounts.wei));
}
#[test]
fn test_eth_conversion() {
TEST_AMOUNTS
.iter()
.for_each(|amounts| test_from_eth(amounts.ether, amounts.wei));
}
}
mod valid_arithmetic {
use super::*;
const TEST_VALUES: [(&str, &str, &str); 7] = [
("0", "0", "0"),
("1", "2", "3"),
("100000", "0", "100000"),
("123456789", "987654321", "1111111110"),
("1000000000000000", "2000000000000000", "3000000000000000"),
(
"10000000000000000000001",
"20000000000000000000002",
"30000000000000000000003",
),
(
"1000000000000000000000000",
"1000000000000000000000000",
"2000000000000000000000000",
),
];
#[test]
fn test_valid_addition() {
TEST_VALUES.iter().for_each(|(a, b, c)| test_addition(a, b, c));
}
#[test]
fn test_valid_subtraction() {
TEST_VALUES.iter().for_each(|(a, b, c)| test_subtraction(c, b, a));
}
}
mod test_invalid {
use super::*;
mod test_invalid_conversion {
use super::*;
const INVALID_TEST_AMOUNTS: [AmountDenominationTestCase; 4] = [
AmountDenominationTestCase {
wei: "1",
kwei: "1",
mwei: "1",
gwei: "1",
szabo: "1",
finney: "1",
ether: "1",
},
AmountDenominationTestCase {
wei: "1",
kwei: "1000",
mwei: "1000000",
gwei: "1000000000",
szabo: "1000000000000",
finney: "1000000000000000",
ether: "1000000000000000000",
},
AmountDenominationTestCase {
wei: "1234567891234567891",
kwei: "1234567891234567",
mwei: "1234567891234",
gwei: "1234567891",
szabo: "1234567",
finney: "1234",
ether: "1",
},
AmountDenominationTestCase {
wei: "1000000000000000000000000",
kwei: "1000000000000000000000",
mwei: "1000000000000000000",
gwei: "1000000000000000",
szabo: "1000000000000",
finney: "1000000000",
ether: "1000001",
},
];
#[should_panic]
#[test]
fn test_invalid_finney_conversion() {
INVALID_TEST_AMOUNTS
.iter()
.for_each(|amounts| test_from_finney(amounts.finney, amounts.wei));
}
#[should_panic]
#[test]
fn test_invalid_szabo_conversion() {
INVALID_TEST_AMOUNTS
.iter()
.for_each(|amounts| test_from_szabo(amounts.szabo, amounts.wei));
}
#[should_panic]
#[test]
fn test_invalid_gwei_conversion() {
INVALID_TEST_AMOUNTS
.iter()
.for_each(|amounts| test_from_gwei(amounts.gwei, amounts.wei));
}
#[should_panic]
#[test]
fn test_invalid_mwei_conversion() {
INVALID_TEST_AMOUNTS
.iter()
.for_each(|amounts| test_from_mwei(amounts.mwei, amounts.wei));
}
#[should_panic]
#[test]
fn test_invalid_kwei_conversion() {
INVALID_TEST_AMOUNTS
.iter()
.for_each(|amounts| test_from_kwei(amounts.kwei, amounts.wei));
}
#[should_panic]
#[test]
fn test_invalid_eth_conversion() {
INVALID_TEST_AMOUNTS
.iter()
.for_each(|amounts| test_from_eth(amounts.ether, amounts.wei));
}
}
}
}
| 30.729665 | 100 | 0.508914 |
7abe5a1d8d1ac2df8b11531bc8a1c1e6f0e3b83f | 2,177 | use async_lock::RwLock;
use azure_core::auth::{TokenCredential, TokenResponse};
use azure_core::error::{Error, ErrorKind, Result};
use chrono::{Duration, Utc};
use std::sync::Arc;
fn is_expired(token: &TokenResponse) -> bool {
token.expires_on < Utc::now() + Duration::seconds(20)
}
#[derive(Clone)]
/// Wraps a TokenCredential and handles token refresh on token expiry
pub struct AutoRefreshingTokenCredential {
credential: Arc<dyn TokenCredential>,
current_token: Arc<RwLock<Option<Result<TokenResponse>>>>,
}
impl std::fmt::Debug for AutoRefreshingTokenCredential {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
f.debug_struct("AutoRefreshingTokenCredential")
.field("credential", &"TokenCredential")
.finish()
}
}
impl AutoRefreshingTokenCredential {
/// Create a new `AutoRefreshingTokenCredential` around the provided base provider.
pub fn new(provider: Arc<dyn TokenCredential>) -> Self {
Self {
credential: provider,
current_token: Arc::new(RwLock::new(None)),
}
}
}
#[async_trait::async_trait]
impl TokenCredential for AutoRefreshingTokenCredential {
async fn get_token(&self, resource: &str) -> Result<TokenResponse> {
if let Some(Ok(token)) = self.current_token.read().await.as_ref() {
if !is_expired(token) {
return Ok(token.clone());
}
}
loop {
let mut guard = self.current_token.write().await;
match guard.as_ref() {
None => {
let res = self.credential.get_token(resource).await;
*guard = Some(res);
}
Some(Err(err)) => {
return Err(Error::with_message(ErrorKind::Credential, || {
err.to_string()
}));
}
Some(Ok(token)) => {
if is_expired(token) {
*guard = None;
} else {
return Ok(token.clone());
};
}
}
}
}
}
| 32.492537 | 87 | 0.550758 |
75cab5e550c76017adea51f6dba9397ab45ec830 | 9,508 | use swc::{
config::{Config, InputSourceMap, JscConfig, ModuleConfig, Options, SourceMapsConfig},
Compiler,
};
use swc_common::FileName;
use swc_ecma_ast::*;
use swc_ecma_parser::{EsConfig, Syntax, TsConfig};
use swc_ecma_transforms::{modules::common_js, pass::noop};
use swc_ecma_visit::{as_folder, noop_visit_mut_type, VisitMut};
struct PanicOnVisit;
impl VisitMut for PanicOnVisit {
noop_visit_mut_type!();
fn visit_mut_number(&mut self, n: &mut Number) {
panic!("Expected {:?}", n.value)
}
}
/// We ensure that typescript is stripped out before applying custom transforms.
#[test]
#[should_panic(expected = "Expected 5.0")]
fn test_visit_mut() {
testing::run_test2(false, |cm, handler| {
let c = Compiler::new(cm.clone());
let fm = cm.new_source_file(
FileName::Anon,
"
console.log(5 as const)
"
.into(),
);
let res = c.process_js_with_custom_pass(
fm,
&handler,
&Options {
config: Config {
jsc: JscConfig {
syntax: Some(Syntax::Typescript(Default::default())),
..Default::default()
},
..Default::default()
},
..Default::default()
},
as_folder(PanicOnVisit),
noop(),
);
assert_ne!(res.unwrap().code, "console.log(5 as const)");
Ok(())
})
.unwrap()
}
#[test]
fn shopify_1_check_filename() {
testing::run_test2(false, |cm, handler| {
let c = Compiler::new(cm.clone());
let fm = cm.new_source_file(
FileName::Anon,
"
import React from 'react';
import { useI18n } from '@shopify/react-i18n';
export function App() {
const [i18n] = useI18n();
return <h1>{i18n.translate('foo')}</h1>
}
"
.into(),
);
let res = c.process_js_with_custom_pass(
fm,
&handler,
&Options {
config: Config {
jsc: JscConfig {
syntax: Some(Syntax::Es(EsConfig {
jsx: true,
..Default::default()
})),
..Default::default()
},
module: Some(ModuleConfig::CommonJs(common_js::Config {
..Default::default()
})),
..Default::default()
},
is_module: true,
..Default::default()
},
noop(),
noop(),
);
if res.is_err() {
return Err(());
}
let res = res.unwrap();
eprintln!("{}", res.code);
assert!(res.code.contains("_react.default.createElement"));
Ok(())
})
.unwrap()
}
#[test]
fn shopify_2_same_opt() {
testing::run_test2(false, |cm, handler| {
let c = Compiler::new(cm.clone());
let opts = Options {
config: Config {
env: None,
test: None,
exclude: None,
jsc: JscConfig {
syntax: Some(Syntax::Typescript(TsConfig {
tsx: true,
decorators: false,
dynamic_import: true,
dts: false,
no_early_errors: false,
import_assertions: false,
})),
transform: None,
external_helpers: false,
target: Some(EsVersion::Es5),
loose: false,
keep_class_names: false,
base_url: Default::default(),
paths: Default::default(),
minify: None,
experimental: Default::default(),
},
module: None,
minify: false,
input_source_map: InputSourceMap::Bool(false),
source_maps: None,
inline_sources_content: false,
..Default::default()
},
skip_helper_injection: false,
disable_hygiene: false,
disable_fixer: false,
global_mark: None,
cwd: "/Users/kdy1/projects/example-swcify".into(),
filename: "/Users/kdy1/projects/example-swcify/src/App/App.tsx".into(),
env_name: "development".into(),
source_maps: Some(SourceMapsConfig::Bool(false)),
source_file_name: Some("/Users/kdy1/projects/example-swcify/src/App/App.tsx".into()),
is_module: true,
..Default::default()
};
let fm = cm.new_source_file(
FileName::Real("/Users/kdy1/projects/example-swcify/src/App/App.tsx".into()),
"
import React from 'react';
import { useI18n } from '@shopify/react-i18n';
export function App() {
const [i18n] = useI18n();
return <h1>{i18n.translate('foo')}</h1>
}
"
.into(),
);
let res = c.process_js_with_custom_pass(fm, &handler, &opts, noop(), noop());
if res.is_err() {
return Err(());
}
let res = res.unwrap();
eprintln!("{}", res.code);
assert!(res.code.contains("React.createElement"));
assert!(res.code.contains("import React"));
Ok(())
})
.unwrap()
}
#[test]
fn shopify_3_reduce_defaults() {
testing::run_test2(false, |cm, handler| {
let c = Compiler::new(cm.clone());
let opts = Options {
config: Config {
jsc: JscConfig {
syntax: Some(Syntax::Typescript(TsConfig {
tsx: true,
dynamic_import: true,
..Default::default()
})),
..Default::default()
},
module: None,
minify: false,
input_source_map: InputSourceMap::Bool(false),
source_maps: None,
inline_sources_content: false,
..Default::default()
},
cwd: "/Users/kdy1/projects/example-swcify".into(),
filename: "/Users/kdy1/projects/example-swcify/src/App/App.tsx".into(),
env_name: "development".into(),
source_maps: Some(SourceMapsConfig::Bool(false)),
source_file_name: Some("/Users/kdy1/projects/example-swcify/src/App/App.tsx".into()),
is_module: true,
..Default::default()
};
let fm = cm.new_source_file(
FileName::Real("/Users/kdy1/projects/example-swcify/src/App/App.tsx".into()),
"
import React from 'react';
import { useI18n } from '@shopify/react-i18n';
export function App() {
const [i18n] = useI18n();
return <h1>{i18n.translate('foo')}</h1>
}
"
.into(),
);
let res = c.process_js_with_custom_pass(fm, &handler, &opts, noop(), noop());
if res.is_err() {
return Err(());
}
let res = res.unwrap();
eprintln!("{}", res.code);
assert!(res.code.contains("React.createElement"));
assert!(res.code.contains("import React"));
Ok(())
})
.unwrap()
}
#[test]
fn shopify_4_reduce_more() {
testing::run_test2(false, |cm, handler| {
let c = Compiler::new(cm.clone());
let opts = Options {
config: Config {
jsc: JscConfig {
syntax: Some(Syntax::Es(EsConfig {
jsx: true,
..Default::default()
})),
..Default::default()
},
..Default::default()
},
cwd: "/Users/kdy1/projects/example-swcify".into(),
filename: "/Users/kdy1/projects/example-swcify/src/App/App.tsx".into(),
env_name: "development".into(),
source_maps: Some(SourceMapsConfig::Bool(false)),
source_file_name: Some("/Users/kdy1/projects/example-swcify/src/App/App.tsx".into()),
is_module: true,
..Default::default()
};
let fm = cm.new_source_file(
FileName::Real("/Users/kdy1/projects/example-swcify/src/App/App.tsx".into()),
"
import React from 'react';
import { useI18n } from '@shopify/react-i18n';
export function App() {
const [i18n] = useI18n();
return <h1>{i18n.translate('foo')}</h1>
}
"
.into(),
);
let res = c.process_js_with_custom_pass(fm, &handler, &opts, noop(), noop());
if res.is_err() {
return Err(());
}
let res = res.unwrap();
eprintln!("{}", res.code);
assert!(res.code.contains("React.createElement"));
assert!(res.code.contains("import React"));
Ok(())
})
.unwrap()
}
| 30.280255 | 97 | 0.465818 |
7a3e0b6baf5c1bdecb7480c134304b971496262e | 19,599 | use super::*;
use crate::{
compat::es2015::{block_scoping, destructuring, Classes},
modules::common_js::common_js,
};
use swc_common::chain;
use swc_ecma_parser::{EsConfig, Syntax};
fn tr() -> impl Fold<Module> {
chain!(resolver(), block_scoping())
}
fn syntax() -> Syntax {
Syntax::Es(EsConfig {
class_props: true,
..Default::default()
})
}
macro_rules! identical {
($name:ident, $src:literal) => {
test!(syntax(), |_| tr(), $name, $src, $src);
};
}
macro_rules! to {
($name:ident, $src:literal, $to:literal) => {
test!(syntax(), |_| tr(), $name, $src, $to);
};
}
macro_rules! identical_no_block {
($name:ident, $src:literal) => {
test!(syntax(), |_| resolver(), $name, $src, $src);
};
}
#[test]
fn test_mark_for() {
::testing::run_test(false, |_, _| {
let mark1 = Mark::fresh(Mark::root());
let mark2 = Mark::fresh(mark1);
let mark3 = Mark::fresh(mark2);
let mark4 = Mark::fresh(mark3);
let folder1 = Resolver::new(mark1, Scope::new(ScopeKind::Block, None), None);
let mut folder2 = Resolver::new(
mark2,
Scope::new(ScopeKind::Block, Some(&folder1.current)),
None,
);
folder2.current.declared_symbols.insert("foo".into());
let mut folder3 = Resolver::new(
mark3,
Scope::new(ScopeKind::Block, Some(&folder2.current)),
None,
);
folder3.current.declared_symbols.insert("bar".into());
assert_eq!(folder3.mark_for_ref(&"bar".into()), Some(mark3));
let mut folder4 = Resolver::new(
mark4,
Scope::new(ScopeKind::Block, Some(&folder3.current)),
None,
);
folder4.current.declared_symbols.insert("foo".into());
assert_eq!(folder4.mark_for_ref(&"foo".into()), Some(mark4));
assert_eq!(folder4.mark_for_ref(&"bar".into()), Some(mark3));
Ok(())
})
.unwrap();
}
to!(
basic_no_usage,
"
let foo;
{
let foo;
}
",
"
var foo;
{
var foo1;
}
"
);
to!(
class_nested_var,
"
var ConstructorScoping = function ConstructorScoping() {
_classCallCheck(this, ConstructorScoping);
var bar;
{
let bar;
use(bar);
}
}
",
"
var ConstructorScoping = function ConstructorScoping() {
_classCallCheck(this, ConstructorScoping);
var bar;
{
var bar1;
use(bar1);
}
}
"
);
to!(
basic,
r#"
{
var foo = 1;
{
let foo = 2;
use(foo);
}
use(foo)
}
"#,
r#"
{
var foo = 1;
{
var foo1 = 2;
use(foo1);
}
use(foo);
}
"#
);
to!(
general_assignment_patterns,
r#"const foo = "foo";
function foobar() {
for (let item of [1, 2, 3]) {
let foo = "bar";
[bar, foo] = [1, 2];
}
}"#,
r#"var foo = "foo";
function foobar() {
for (var item of [1, 2, 3]) {
var foo1 = "bar";
[bar, foo1] = [1, 2];
}
}"#
);
to!(
general_function,
r#"function test() {
let foo = "bar";
}"#,
r#"function test() {
var foo = "bar";
}"#
);
test!(
ignore,
::swc_ecma_parser::Syntax::default(),
|_| tr(),
babel_issue_1051,
r#"foo.func1 = function() {
if (cond1) {
for (;;) {
if (cond2) {
function func2() {}
function func3() {}
func4(function() {
func2();
});
break;
}
}
}
};"#,
r#"foo.func1 = function () {
if (cond1) {
for (;;) {
if (cond2) {
var _ret = function () {
function func2() {}
function func3() {}
func4(function () {
func2();
});
return "break";
}();
if (_ret === "break") break;
}
}
}
};"#
);
test!(
ignore,
::swc_ecma_parser::Syntax::default(),
// TODO(kdy1): WTF is this (again)?
|_| tr(),
babel_issue_2174,
r#"if (true) {
function foo() {}
function bar() {
return foo;
}
for (var x in {}) {}
}"#,
r#"
if (true) {
var foo = function () {};
var bar = function () {
return foo;
};
for (var x in {}) {}
}"#
);
test!(
ignore,
::swc_ecma_parser::Syntax::default(),
|_| tr(),
babel_issue_4363,
r#"function WithoutCurlyBraces() {
if (true)
for (let k in kv) {
function foo() { return this }
function bar() { return foo.call(this) }
console.log(this, k) // => undefined
}
}
function WithCurlyBraces() {
if (true) {
for (let k in kv) {
function foo() { return this }
function bar() { return foo.call(this) }
console.log(this, k) // => 777
}
}
}"#,
r#"function WithoutCurlyBraces() {
var _this = this;
if (true) {
var _loop = function (k) {
function foo() {
return this;
}
function bar() {
return foo.call(this);
}
console.log(_this, k); // => undefined
};
for (var k in kv) {
_loop(k);
}
}
}
function WithCurlyBraces() {
var _this2 = this;
if (true) {
var _loop2 = function (k) {
function foo() {
return this;
}
function bar() {
return foo.call(this);
}
console.log(_this2, k); // => 777
};
for (var k in kv) {
_loop2(k);
}
}
}"#
);
test!(
// Cannot represent function expression without parens (in result code)
ignore,
::swc_ecma_parser::Syntax::default(),
|_| tr(),
babel_issue_4946,
r#"(function foo() {
let foo = true;
});"#,
r#"(function foo() {
var foo = true;
});"#
);
// TODO: try {} catch (a) { let a } should report error
test!(
Default::default(),
|_| resolver(),
babel_issue_973,
r#"let arr = [];
for(let i = 0; i < 10; i++) {
for (let i = 0; i < 10; i++) {
arr.push(() => i);
}
}
"#,
r#"let arr = [];
for(let i = 0; i < 10; i++){
for(let i1 = 0; i1 < 10; i1++){
arr.push(()=>i1);
}
}"#
);
test_exec!(
::swc_ecma_parser::Syntax::default(),
|_| tr(),
pass_assignment,
r#"let a = 1;
a = 2;
expect(a).toBe(2);"#
);
test_exec!(
::swc_ecma_parser::Syntax::default(),
|_| tr(),
pass_call,
r#"let a = 1;
function b() {
return a + 1;
}
expect(b()).toBe(2);"#
);
test_exec!(
::swc_ecma_parser::Syntax::default(),
|_| tr(),
pass_update,
r#"let a = 1;
a++;
expect(a).toBe(2);"#
);
to!(
fn_param,
r#"let a = 'foo';
function foo(a) {
use(a);
}"#,
r#"var a = 'foo';
function foo(a1) {
use(a1);
}"#
);
to!(
fn_body,
r#"let a = 'foo';
function foo() {
let a = 'bar';
use(a);
}"#,
r#"var a = 'foo';
function foo() {
var a1 = 'bar';
use(a1);
}"#
);
to!(
shorthand,
r#"let a = 'foo';
function foo() {
let a = 'bar';
use({a});
}"#,
r#"var a = 'foo';
function foo() {
var a1 = 'bar';
use({a: a1});
}"#
);
to!(
same_level,
r#"
var a = 'foo';
var a = 'bar';
"#,
r#"
var a = 'foo';
var a = 'bar';
"#
);
to!(
class_block,
r#"
var Foo = function(_Bar) {
_inherits(Foo, _Bar);
function Foo() {
}
return Foo;
}(Bar);
"#,
r#"
var Foo = function(_Bar) {
_inherits(Foo, _Bar);
function Foo() {
}
return Foo;
}(Bar);
"#
);
to!(
class_block_2,
r#"
var Foo = (function(_Bar) {
_inherits(Foo, _Bar);
function Foo() {
}
return Foo;
})(Bar);
"#,
r#"
var Foo = function(_Bar) {
_inherits(Foo, _Bar);
function Foo() {
}
return Foo;
}(Bar);
"#
);
to!(
class_nested,
r#"
var Outer = function(_Hello) {
_inherits(Outer, _Hello);
function Outer() {
_classCallCheck(this, Outer);
var _this = _possibleConstructorReturn(this, _getPrototypeOf(Outer).call(this));
var Inner = function() {
function Inner() {
_classCallCheck(this, Inner);
}
_createClass(Inner, [{
key: _get(_getPrototypeOf(Outer.prototype), 'toString', _assertThisInitialized(_this)).call(_this), value: function() {
return 'hello';
}
}]);
return Inner;
}();
return _possibleConstructorReturn(_this, new Inner());
}
return Outer;
}(Hello);
"#,
r#"
var Outer = function(_Hello) {
_inherits(Outer, _Hello);
function Outer() {
_classCallCheck(this, Outer);
var _this = _possibleConstructorReturn(this, _getPrototypeOf(Outer).call(this));
var Inner = function() {
function Inner() {
_classCallCheck(this, Inner);
}
_createClass(Inner, [{
key: _get(_getPrototypeOf(Outer.prototype), 'toString', _assertThisInitialized(_this)).call(_this), value: function() {
return 'hello';
}
}]);
return Inner;
}();
return _possibleConstructorReturn(_this, new Inner());
}
return Outer;
}(Hello);
"#
);
identical!(class_var_constructor_only, r#"var Foo = function Foo(){}"#);
identical!(
class_var,
r#"
var Foo = function(_Bar) {
_inherits(Foo, _Bar);
function Foo() {
var _this;
_classCallCheck(this, Foo);
Foo[_assertThisInitialized(_this)];
return _possibleConstructorReturn(_this);
}
return Foo;
}(Bar);
"#
);
identical!(
class_singleton,
r#"
var singleton;
var Sub = function(_Foo) {
_inherits(Sub, _Foo);
function Sub() {
var _this;
_classCallCheck(this, Sub);
if (singleton) {
return _possibleConstructorReturn(_this, singleton);
}
singleton = _this = _possibleConstructorReturn(this, _getPrototypeOf(Sub).call(this));
return _possibleConstructorReturn(_this);
}
return Sub;
}(Foo);
"#
);
identical!(
regression_001,
"var sym = Symbol();
class Foo {
[sym] () {
return 1;
}
}
class Bar extends Foo {
[sym] () {
return super[sym]() + 2;
}
}
var i = new Bar();
expect(i[sym]()).toBe(3);"
);
identical!(
regression_002,
"var sym = Symbol();
var Foo = function() {
function Foo() {
_classCallCheck(this, Foo);
}
_createClass(Foo, [{
key: sym, value: function() {
return 1;
}
}]);
return Foo;
}();
var Bar = function(_Foo) {
_inherits(Bar, _Foo);
function Bar() {
_classCallCheck(this, Bar);
return _possibleConstructorReturn(this, _getPrototypeOf(Bar).apply(this, arguments));
}
_createClass(Bar, [{
key: sym, value: function() {
return _get(_getPrototypeOf(Bar.prototype), sym, this)() + 2;
}
}]);
return Bar;
}(Foo);
var i = new Bar();
expect(i[sym]()).toBe(3);"
);
identical!(module_01, "import { foo as foo } from 'src';");
identical!(
issue_271,
"function foo(scope) {
var startOperation = function startOperation(operation) {
scope.agentOperation = operation;
};
scope.startOperation = startOperation;
}"
);
identical!(
issue_281_01,
"function foo(e) {
e: { break e; }
}"
);
identical!(
issue_281_02,
"function foo(e) {
e: {
try {
} catch (e1) {
o = null;
break e
}
}
}"
);
identical_no_block!(
issue_292_1,
"var __assign = function () {
__assign = Object.assign || function __assign(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};"
);
identical_no_block!(
issue_292_2,
"__assign = Object.assign || function __assign(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
}
return t;
};
__assign.apply(this, arguments);"
);
identical!(
issue_295,
"export var bar = {};
class Foo {
constructor() {
bar;
}
}"
);
identical!(
issue_308,
"function bar(props) {}
var Foo = function Foo() {
_classCallCheck(this, Foo);
super();
_defineProperty(this, 'onBar', ()=>{
bar();
});
bar();
};
"
);
identical!(
issue_308_2,
"
function wrapper(){
function bar(props) {}
var Foo = function Foo() {
_classCallCheck(this, Foo);
super();
_defineProperty(this, 'onBar', ()=>{
bar();
});
bar();
};
}
"
);
identical!(
issue_369_1,
"export function input(name) {
return `${name}.md?render`;
}
export default function({
name, input: inp,
}) {
inp = inp || input(name);
return {input: inp};
};
"
);
test_exec!(
::swc_ecma_parser::Syntax::default(),
|_| tr(),
issue_369_2,
"
function a() {}
function b() {}
function foo({a: b}){
expect(b).toBe('a')
}
foo({a: 'a'})"
);
identical!(
issue_396_1,
"
function foo() {
bar;
function bar() {}
}
"
);
identical!(
issue_396_2,
"
function foo() {
var bar = function bar() {
};
bar;
}
"
);
to!(
issue_404,
"function foo(bar) {
const { foo } = bar;
return foo;
}",
"function foo(bar) {
var { foo: foo1 } = bar;
return foo1;
}
"
);
to!(
issue_438,
"function _setPrototypeOf(o, p) {
_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
o.__proto__ = p;
return o;
};
return _setPrototypeOf(o, p);
}",
"function _setPrototypeOf(o, p) {
_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o1, p1) {
o1.__proto__ = p1;
return o1;
};
return _setPrototypeOf(o, p);
}"
);
to!(
issue_454_1,
"var a = 2;
function foo() {
try {
var a = 1;
a;
} catch (err) {
// ignored
}
a;
}",
"var a = 2;
function foo() {
try {
var a1 = 1;
a1;
} catch (err) {
}
a1;
}"
);
to!(
issue_454_2,
"function a() {}
function foo() {
function b() {
a();
}
function a() {}
}",
"function a1() {
}
function foo() {
function b() {
a2();
}
function a2() {
}
}"
);
to!(
issue_454_3,
"function a() {}
function foo() {
function b() {
a();
}
function a() {
b();
}
}",
"function a1() {
}
function foo() {
function b() {
a2();
}
function a2() {
b();
}
}"
);
identical!(
regression_of_454,
"function broken(x) {
var Foo = function(_Bar) {
_inherits(Foo, _Bar);
function Foo() {
_classCallCheck(this, Foo);
return _possibleConstructorReturn(this, _getPrototypeOf(Foo).apply(this, \
arguments));
}
return Foo;
}(Bar);
}
"
);
to!(
issue_461,
"
for (var x in ['']) {
(x => 0)(x);
}",
"
for(var x in ['']){
((x1)=>0)(x);
}
"
);
to!(
issue_483,
"
function se(t,e,n,r){
//...
try{return O.apply(n,f.querySelectorAll(c)),n}catch(e){S(t,!0)}finally{s===N&&e.
removeAttribute('id')}
}
",
"
function se(t, e, n, r) {
try {
return O.apply(n, f.querySelectorAll(c)), n;
} catch (e) {
S(t, !0);
} finally{
s === N && e.removeAttribute('id');
}
}
"
);
identical!(
in_constructor,
"
class C {
}
class A extends C {
constructor() {
super();
class B extends C {
constructor() {
super();
}
}
new B();
}
}
"
);
test!(
syntax(),
|_| tr(),
issue_578_1,
"
import { myFunction } from './dep.js'
class SomeClass {
constructor(properties) {
this.props = properties;
}
call () {
const {myFunction} = this.props
if (myFunction) {
myFunction()
} else {
console.log('DID NOT WORK!')
}
}
}
let instance = new SomeClass({
myFunction: () => {
console.log('CORRECT FUNCTION CALLED')
}
});
instance.call()",
"import { myFunction } from './dep.js';
class SomeClass{
constructor(properties){
this.props = properties;
}
call() {
var { myFunction: myFunction1 } = this.props;
if (myFunction1) {
myFunction1();
} else {
console.log('DID NOT WORK!');
}
}
}
var instance = new SomeClass({
myFunction: ()=>{
console.log('CORRECT FUNCTION CALLED');
}
});
instance.call()"
);
test!(
syntax(),
|_| chain!(
tr(),
Classes::default(),
destructuring(Default::default()),
common_js(Default::default())
),
issue_578_2,
"
import { myFunction } from './dep.js'
class SomeClass {
constructor(properties) {
this.props = properties;
}
call () {
const {myFunction} = this.props
if (myFunction) {
myFunction()
} else {
console.log('DID NOT WORK!')
}
}
}
let instance = new SomeClass({
myFunction: () => {
console.log('CORRECT FUNCTION CALLED')
}
});
instance.call()",
"'use strict';
var _depJs = require('./dep.js');
let SomeClass = function() {
'use strict';
function SomeClass(properties) {
_classCallCheck(this, SomeClass);
this.props = properties;
}
_createClass(SomeClass, [{
key: 'call',
value: function call() {
var _props = this.props, myFunction = _props.myFunction;
if (myFunction) {
myFunction();
} else {
console.log('DID NOT WORK!');
}
}
}]);
return SomeClass;
}();
var instance = new SomeClass({
myFunction: ()=>{
console.log('CORRECT FUNCTION CALLED');
}
});
instance.call();"
);
test!(
syntax(),
|_| tr(),
global_object,
"function foo(Object) {
Object.defineProperty()
}",
"function foo(Object1) {
Object1.defineProperty();
}"
);
identical!(
hoisting,
"function foo() {
return XXX
}
var XXX = 1;
"
);
identical!(
issue_678,
"({
foo() {
function bar() {
bar;
}
},
});"
);
identical!(
issue_688,
"function test() {
if (typeof Missing == typeof EXTENDS) {
console.log('missing');
}
var EXTENDS = 'test';
}"
);
identical!(
issue_688_2,
"function test() {
if (typeof Missing == typeof EXTENDS) {
console.log('missing');
}
{
var EXTENDS = 'test';
}
}"
);
| 17.997245 | 140 | 0.484157 |
fc2ca46e4144ff250194a5150033609b28ab037c | 2,527 | use crate::{Error, mock::*};
use crate::*;
use frame_support::{assert_ok, assert_noop};
#[test]
fn issuing_asset_units_to_issuer_should_work() {
new_test_ext().execute_with(|| {
assert_eq!(Pooler::balance(1), 0);
});
}
#[test]
fn minting_pooler_multiple_times() {
new_test_ext().execute_with(|| {
assert_ok!(Pooler::deposit(Origin::signed(1), 90));
assert_eq!(Pooler::balance(1), 90);
assert_ok!(Pooler::deposit(Origin::signed(2), 10));
assert_eq!(Pooler::balance(2), 10);
assert_eq!(Pooler::total_supply(), 100);
});
}
#[test]
fn querying_total_supply_should_work() {
new_test_ext().execute_with(|| {
assert_ok!(Pooler::deposit(Origin::signed(1), 100));
assert_eq!(Pooler::balance(1), 100);
assert_ok!(Pooler::transfer(Origin::signed(1), 2, 50));
assert_eq!(Pooler::balance(1), 50);
assert_eq!(Pooler::balance(2), 50);
assert_ok!(Pooler::transfer(Origin::signed(2), 3, 31));
assert_eq!(Pooler::balance(1), 50);
assert_eq!(Pooler::balance(2), 19);
assert_eq!(Pooler::balance(3), 31);
assert_eq!(Pooler::total_supply(), 100);
});
}
#[test]
fn transferring_amount_above_available_balance_should_work() {
new_test_ext().execute_with(|| {
assert_ok!(Pooler::deposit(Origin::signed(1), 100));
assert_eq!(Pooler::balance(1), 100);
assert_ok!(Pooler::transfer(Origin::signed(1), 2, 50));
assert_eq!(Pooler::balance(1), 50);
assert_eq!(Pooler::balance(2), 50);
});
}
#[test]
fn transferring_amount_more_than_available_balance_should_not_work() {
new_test_ext().execute_with(|| {
assert_ok!(Pooler::deposit(Origin::signed(1), 100));
assert_eq!(Pooler::balance(1), 100);
assert_ok!(Pooler::transfer(Origin::signed(1), 2, 50));
assert_eq!(Pooler::balance(1), 50);
assert_eq!(Pooler::balance(2), 50);
assert_ok!(Pooler::withdraw(Origin::signed(1), 50));
assert_eq!(Pooler::balance(1), 0);
assert_noop!(Pooler::transfer(Origin::signed(1), 1, 50), Error::<Test>::BalanceLow);
});
}
#[test]
fn transferring_less_than_one_unit_should_not_work() {
new_test_ext().execute_with(|| {
assert_ok!(Pooler::deposit(Origin::signed(1), 100));
assert_eq!(Pooler::balance(1), 100);
assert_noop!(Pooler::transfer(Origin::signed(1), 2, 0), Error::<Test>::AmountZero);
});
}
#[test]
fn transferring_more_units_than_total_supply_should_not_work() {
new_test_ext().execute_with(|| {
assert_ok!(Pooler::deposit(Origin::signed(1), 100));
assert_eq!(Pooler::balance(1), 100);
assert_noop!(Pooler::transfer(Origin::signed(1), 2, 101), Error::<Test>::BalanceLow);
});
}
| 29.729412 | 87 | 0.691334 |
5d19ffd1870579646127cf08c425e465945fa5d6 | 2,995 | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn vhsubpd_1() {
run_test(&Instruction { mnemonic: Mnemonic::VHSUBPD, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM1)), operand3: Some(Direct(XMM4)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 241, 125, 212], OperandSize::Dword)
}
fn vhsubpd_2() {
run_test(&Instruction { mnemonic: Mnemonic::VHSUBPD, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM2)), operand3: Some(Indirect(EDX, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 233, 125, 10], OperandSize::Dword)
}
fn vhsubpd_3() {
run_test(&Instruction { mnemonic: Mnemonic::VHSUBPD, operand1: Some(Direct(XMM3)), operand2: Some(Direct(XMM0)), operand3: Some(Direct(XMM2)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 249, 125, 218], OperandSize::Qword)
}
fn vhsubpd_4() {
run_test(&Instruction { mnemonic: Mnemonic::VHSUBPD, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM7)), operand3: Some(Indirect(RSI, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 193, 125, 38], OperandSize::Qword)
}
fn vhsubpd_5() {
run_test(&Instruction { mnemonic: Mnemonic::VHSUBPD, operand1: Some(Direct(YMM2)), operand2: Some(Direct(YMM4)), operand3: Some(Direct(YMM7)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 221, 125, 215], OperandSize::Dword)
}
fn vhsubpd_6() {
run_test(&Instruction { mnemonic: Mnemonic::VHSUBPD, operand1: Some(Direct(YMM5)), operand2: Some(Direct(YMM1)), operand3: Some(IndirectScaledIndexedDisplaced(EAX, ECX, Four, 601255595, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 245, 125, 172, 136, 171, 110, 214, 35], OperandSize::Dword)
}
fn vhsubpd_7() {
run_test(&Instruction { mnemonic: Mnemonic::VHSUBPD, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM6)), operand3: Some(Direct(YMM4)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 205, 125, 220], OperandSize::Qword)
}
fn vhsubpd_8() {
run_test(&Instruction { mnemonic: Mnemonic::VHSUBPD, operand1: Some(Direct(YMM5)), operand2: Some(Direct(YMM3)), operand3: Some(IndirectScaledIndexed(RCX, RSI, Two, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 229, 125, 44, 113], OperandSize::Qword)
}
| 74.875 | 403 | 0.712855 |
756c4741f69ff85cb797631aa9f1c8ae7461ee77 | 368 | use serde::Deserialize;
use serde::Serialize;
/// This struct represents a paginator
#[derive(Serialize, Deserialize, Clone, Debug, Default)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub struct Paginator {
pub start_time: Option<u64>,
pub end_time: Option<u64>,
pub limit: Option<u64>,
pub before: Option<String>,
pub after: Option<String>,
} | 28.307692 | 56 | 0.711957 |
01f212f0aa66ffca1caf7b63be2cabbc70927f4d | 9,113 | mod integration_test;
#[cfg(test)]
mod account_tests {
use lockbook_core::repo::{account_repo, file_metadata_repo};
use lockbook_core::service::test_utils::{generate_account, random_username, test_config};
use lockbook_core::service::{account_service, sync_service};
use lockbook_core::{
create_account, export_account, import_account, CoreError, Error, ImportError,
};
use lockbook_models::account::Account;
#[test]
fn create_account_successfully() {
let db = test_config();
let generated_account = generate_account();
account_service::create_account(
&db,
&generated_account.username,
&generated_account.api_url,
)
.unwrap();
}
#[test]
fn username_taken_test() {
let db1 = test_config();
let db2 = test_config();
let generated_account = generate_account();
account_service::create_account(
&db1,
&generated_account.username,
&generated_account.api_url,
)
.unwrap();
let err = account_service::create_account(
&db2,
&generated_account.username,
&generated_account.api_url,
)
.unwrap_err();
assert!(
matches!(err, CoreError::UsernameTaken),
"Username \"{}\" should have caused a UsernameTaken error but instead was {:?}",
&generated_account.username,
err
)
}
#[test]
fn invalid_username_test() {
let db = test_config();
let invalid_unames = ["", "i/o", "@me", "###", "+1", "💩"];
for uname in &invalid_unames {
let err = account_service::create_account(&db, uname, &generate_account().api_url)
.unwrap_err();
assert!(
matches!(err, CoreError::UsernameInvalid),
"Username \"{}\" should have been InvalidUsername but instead was {:?}",
uname,
err
)
}
}
#[test]
fn import_sync() {
let db1 = test_config();
let generated_account = generate_account();
let account = account_service::create_account(
&db1,
&generated_account.username,
&generated_account.api_url,
)
.unwrap();
let account_string = account_service::export_account(&db1).unwrap();
let home_folders1 = file_metadata_repo::get_root(&db1).unwrap().unwrap();
let db2 = test_config();
assert!(account_service::export_account(&db2).is_err());
account_service::import_account(&db2, &account_string).unwrap();
assert_eq!(account_repo::get_account(&db2).unwrap(), account);
assert_eq!(file_metadata_repo::get_last_updated(&db2).unwrap(), 0);
let work = sync_service::calculate_work(&db2).unwrap();
assert_ne!(work.most_recent_update_from_server, 0);
assert_eq!(work.work_units.len(), 1);
assert!(file_metadata_repo::get_root(&db2).unwrap().is_none());
sync_service::sync(&db2, None).unwrap();
assert!(file_metadata_repo::get_root(&db2).unwrap().is_some());
let home_folders2 = file_metadata_repo::get_root(&db2).unwrap().unwrap();
assert_eq!(home_folders1, home_folders2);
assert_eq!(
file_metadata_repo::get_all(&db1).unwrap(),
file_metadata_repo::get_all(&db2).unwrap()
);
}
#[test]
fn test_new_account_when_one_exists() {
let db = test_config();
let generated_account = generate_account();
account_service::create_account(
&db,
&generated_account.username,
&generated_account.api_url,
)
.unwrap();
assert!(
matches!(
account_service::create_account(
&db,
&generated_account.username,
&generated_account.api_url,
),
Err(CoreError::AccountExists)
),
"This action should have failed with AccountAlreadyExists!",
);
}
#[test]
fn test_import_account_when_one_exists() {
let cfg1 = test_config();
let generated_account = generate_account();
create_account(
&cfg1,
&generated_account.username,
&generated_account.api_url,
)
.unwrap();
let account_string = export_account(&cfg1).unwrap();
match import_account(&cfg1, &account_string) {
Ok(_) => panic!(
"This should not have allowed this account to be imported as one exists already"
),
Err(err) => match err {
Error::UiError(ImportError::AccountExistsAlready) => {}
Error::UiError(ImportError::AccountStringCorrupted)
| Error::UiError(ImportError::AccountDoesNotExist)
| Error::UiError(ImportError::UsernamePKMismatch)
| Error::UiError(ImportError::ClientUpdateRequired)
| Error::UiError(ImportError::CouldNotReachServer)
| Error::Unexpected(_) => panic!("Wrong Error: {:#?}", err),
},
}
}
#[test]
fn test_account_string_corrupted() {
let cfg1 = test_config();
match import_account(&cfg1, "clearly a bad account string") {
Ok(_) => panic!("This should not be a valid account string"),
Err(err) => match err {
Error::UiError(ImportError::AccountStringCorrupted) => {}
Error::UiError(ImportError::AccountExistsAlready)
| Error::UiError(ImportError::AccountDoesNotExist)
| Error::UiError(ImportError::UsernamePKMismatch)
| Error::UiError(ImportError::ClientUpdateRequired)
| Error::UiError(ImportError::CouldNotReachServer)
| Error::Unexpected(_) => panic!("Wrong Error: {:#?}", err),
},
}
}
#[test]
fn test_importing_nonexistent_account() {
let cfg1 = test_config();
let generated_account = generate_account();
create_account(
&cfg1,
&generated_account.username,
&generated_account.api_url,
)
.unwrap();
let cfg2 = test_config();
{
let account = Account {
api_url: generated_account.api_url,
username: random_username(),
private_key: generated_account.private_key,
};
account_repo::insert_account(&cfg2, &account).unwrap();
} // release lock on db
let account_string = export_account(&cfg2).unwrap();
println!("Your thing\n{}", &account_string);
let cfg3 = test_config();
match import_account(&cfg3, &account_string) {
Ok(_) => panic!("Should not have passed"),
Err(err) => match err {
Error::UiError(ImportError::AccountDoesNotExist) => {}
Error::UiError(ImportError::AccountStringCorrupted)
| Error::UiError(ImportError::AccountExistsAlready)
| Error::UiError(ImportError::ClientUpdateRequired)
| Error::UiError(ImportError::UsernamePKMismatch)
| Error::UiError(ImportError::CouldNotReachServer)
| Error::Unexpected(_) => panic!("Wrong error: {:#?}", err),
},
}
}
#[test]
fn test_account_public_key_mismatch_import() {
let bad_account_string = {
let db1 = test_config();
let db2 = test_config();
let generated_account1 = generate_account();
let generated_account2 = generate_account();
let account1 = account_service::create_account(
&db1,
&generated_account1.username,
&generated_account1.api_url,
)
.unwrap();
let mut account2 = account_service::create_account(
&db2,
&generated_account2.username,
&generated_account2.api_url,
)
.unwrap();
account2.username = account1.username;
account_repo::insert_account(&db2, &account2).unwrap();
account_service::export_account(&db2).unwrap()
};
match import_account(&test_config(), &bad_account_string) {
Ok(_) => panic!("Should have failed"),
Err(err) => match err {
Error::UiError(ImportError::UsernamePKMismatch) => {}
Error::UiError(ImportError::AccountStringCorrupted)
| Error::UiError(ImportError::AccountExistsAlready)
| Error::UiError(ImportError::ClientUpdateRequired)
| Error::UiError(ImportError::AccountDoesNotExist)
| Error::UiError(ImportError::CouldNotReachServer)
| Error::Unexpected(_) => panic! {"Wrong error: {:#?}", err},
},
}
}
}
| 35.459144 | 96 | 0.565785 |
b9c9c5f0a537bd4b98437f311ecbb60dd70cf684 | 515 | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
enum e = int; //~ ERROR expected `{`, found `=`
| 42.916667 | 68 | 0.726214 |
cc4d2b2e923649d9c741594e4e6924e8d3b7ac15 | 5,378 | // Copyright Materialize, Inc. and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License in the LICENSE file at the
// root of this repository, or online at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Future and stream utilities.
//!
//! This module provides future and stream combinators that are missing from
//! the [`futures`](futures) crate.
use std::fmt::{self, Debug};
use std::future::Future;
use std::marker::PhantomData;
use std::pin::Pin;
use std::task::{Context, Poll};
use futures::sink::Sink;
/// Extension methods for futures.
pub trait OreFutureExt {
/// Wraps a future in a [`SpawnIfCanceled`] future, which will spawn a
/// task to poll the inner future to completion if it is dropped.
fn spawn_if_canceled(self) -> SpawnIfCanceled<Self::Output>
where
Self: Future + Send + 'static,
Self::Output: Send + 'static;
}
impl<T> OreFutureExt for T
where
T: Future,
{
fn spawn_if_canceled(self) -> SpawnIfCanceled<T::Output>
where
T: Send + 'static,
T::Output: Send + 'static,
{
SpawnIfCanceled {
inner: Some(Box::pin(self)),
}
}
}
/// The future returned by [`OreFutureExt::spawn_if_canceled`].
pub struct SpawnIfCanceled<T>
where
T: Send + 'static,
{
inner: Option<Pin<Box<dyn Future<Output = T> + Send>>>,
}
impl<T> Future for SpawnIfCanceled<T>
where
T: Send + 'static,
{
type Output = T;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<T> {
match &mut self.inner {
None => panic!("SpawnIfCanceled polled after completion"),
Some(f) => match f.as_mut().poll(cx) {
Poll::Pending => Poll::Pending,
Poll::Ready(res) => {
self.inner = None;
Poll::Ready(res)
}
},
}
}
}
impl<T> Drop for SpawnIfCanceled<T>
where
T: Send + 'static,
{
fn drop(&mut self) {
if let Some(f) = self.inner.take() {
tokio::spawn(f);
}
}
}
impl<T> fmt::Debug for SpawnIfCanceled<T>
where
T: Send + 'static,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("SpawnIfCanceled")
.field(
"inner",
match &self.inner {
None => &"None",
Some(_) => &"Some(<future>)",
},
)
.finish()
}
}
/// Extension methods for sinks.
pub trait OreSinkExt<T>: Sink<T> {
/// Boxes this sink.
fn boxed(self) -> Box<dyn Sink<T, Error = Self::Error> + Send>
where
Self: Sized + Send + 'static,
{
Box::new(self)
}
/// Like [`futures::sink::SinkExt::send`], but does not flush the sink after enqueuing
/// `item`.
fn enqueue(&mut self, item: T) -> Enqueue<Self, T> {
Enqueue {
sink: self,
item: Some(item),
}
}
}
impl<S, T> OreSinkExt<T> for S where S: Sink<T> {}
/// Future for the [`enqueue`](OreSinkExt::enqueue) method.
#[derive(Debug)]
#[must_use = "futures do nothing unless you `.await` or poll them"]
pub struct Enqueue<'a, Si, Item>
where
Si: ?Sized,
{
sink: &'a mut Si,
item: Option<Item>,
}
impl<Si, Item> Future for Enqueue<'_, Si, Item>
where
Si: Sink<Item> + Unpin + ?Sized,
Item: Unpin,
{
type Output = Result<(), Si::Error>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
let this = &mut *self;
if let Some(item) = this.item.take() {
let mut sink = Pin::new(&mut this.sink);
match sink.as_mut().poll_ready(cx)? {
Poll::Ready(()) => sink.as_mut().start_send(item)?,
Poll::Pending => {
this.item = Some(item);
return Poll::Pending;
}
}
}
Poll::Ready(Ok(()))
}
}
/// Constructs a sink that consumes its input and sends it nowhere.
pub fn dev_null<T, E>() -> DevNull<T, E> {
DevNull(PhantomData, PhantomData)
}
/// A sink that consumes its input and sends it nowhere.
///
/// Primarily useful as a base sink when folding multiple sinks into one using
/// [`futures::sink::SinkExt::fanout`].
#[derive(Debug)]
pub struct DevNull<T, E>(PhantomData<T>, PhantomData<E>);
impl<T, E> Sink<T> for DevNull<T, E> {
type Error = E;
fn poll_ready(self: Pin<&mut Self>, _: &mut Context) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn start_send(self: Pin<&mut Self>, _: T) -> Result<(), Self::Error> {
Ok(())
}
fn poll_flush(self: Pin<&mut Self>, _: &mut Context) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn poll_close(self: Pin<&mut Self>, _: &mut Context) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
}
| 27.299492 | 91 | 0.57289 |
1c6e3a3f8d1dba880baecbc31495c7fc690dc9d1 | 17,272 | use super::result::{CqlValue, Row};
use crate::frame::value::Counter;
use bigdecimal::BigDecimal;
use chrono::{Duration, NaiveDate};
use num_bigint::BigInt;
use std::collections::HashMap;
use std::hash::Hash;
use std::net::IpAddr;
use thiserror::Error;
use uuid::Uuid;
#[derive(Error, Debug, Clone, PartialEq, Eq)]
pub enum FromRowError {
#[error("{err} in the column with index {column}")]
BadCqlVal { err: FromCqlValError, column: usize },
#[error("Wrong row size: expected {expected}, actual {actual}")]
WrongRowSize { expected: usize, actual: usize },
}
#[derive(Error, Debug, PartialEq, Eq)]
pub enum CqlTypeError {
#[error("Invalid number of set elements: {0}")]
InvalidNumberOfElements(i32),
}
/// This trait defines a way to convert CqlValue or Option<CqlValue> into some rust type
// We can't use From trait because impl From<Option<CqlValue>> for String {...}
// is forbidden since neither From nor String are defined in this crate
pub trait FromCqlVal<T>: Sized {
fn from_cql(cql_val: T) -> Result<Self, FromCqlValError>;
}
#[derive(Error, Debug, Clone, PartialEq, Eq)]
pub enum FromCqlValError {
#[error("Bad CQL type")]
BadCqlType,
#[error("Value is null")]
ValIsNull,
}
/// This trait defines a way to convert CQL Row into some rust type
pub trait FromRow: Sized {
fn from_row(row: Row) -> Result<Self, FromRowError>;
}
// Implement from_cql<Option<CqlValue>> for every type that has from_cql<CqlValue>
// This tries to unwrap the option or fails with an error
impl<T: FromCqlVal<CqlValue>> FromCqlVal<Option<CqlValue>> for T {
fn from_cql(cql_val_opt: Option<CqlValue>) -> Result<Self, FromCqlValError> {
T::from_cql(cql_val_opt.ok_or(FromCqlValError::ValIsNull)?)
}
}
// Implement from_cql<Option<CqlValue>> for Option<T> for every type that has from_cql<CqlValue>
// Value inside Option gets mapped from CqlValue to T
impl<T: FromCqlVal<CqlValue>> FromCqlVal<Option<CqlValue>> for Option<T> {
fn from_cql(cql_val_opt: Option<CqlValue>) -> Result<Self, FromCqlValError> {
match cql_val_opt {
Some(CqlValue::Empty) => Ok(None),
Some(cql_val) => Ok(Some(T::from_cql(cql_val)?)),
None => Ok(None),
}
}
}
// This macro implements FromCqlVal given a type and method of CqlValue that returns this type
macro_rules! impl_from_cql_val {
($T:ty, $convert_func:ident) => {
impl FromCqlVal<CqlValue> for $T {
fn from_cql(cql_val: CqlValue) -> Result<$T, FromCqlValError> {
cql_val.$convert_func().ok_or(FromCqlValError::BadCqlType)
}
}
};
}
impl_from_cql_val!(i32, as_int); // i32::from_cql<CqlValue>
impl_from_cql_val!(i64, as_bigint); // i64::from_cql<CqlValue>
impl_from_cql_val!(Counter, as_counter); // Counter::from_cql<CqlValue>
impl_from_cql_val!(i16, as_smallint); // i16::from_cql<CqlValue>
impl_from_cql_val!(BigInt, into_varint); // BigInt::from_cql<CqlValue>
impl_from_cql_val!(i8, as_tinyint); // i8::from_cql<CqlValue>
impl_from_cql_val!(NaiveDate, as_date); // NaiveDate::from_cql<CqlValue>
impl_from_cql_val!(f32, as_float); // f32::from_cql<CqlValue>
impl_from_cql_val!(f64, as_double); // f64::from_cql<CqlValue>
impl_from_cql_val!(bool, as_boolean); // bool::from_cql<CqlValue>
impl_from_cql_val!(String, into_string); // String::from_cql<CqlValue>
impl_from_cql_val!(Vec<u8>, into_blob); // Vec<u8>::from_cql<CqlValue>
impl_from_cql_val!(IpAddr, as_inet); // IpAddr::from_cql<CqlValue>
impl_from_cql_val!(Uuid, as_uuid); // Uuid::from_cql<CqlValue>
impl_from_cql_val!(BigDecimal, into_decimal); // BigDecimal::from_cql<CqlValue>
impl_from_cql_val!(Duration, as_duration); // Duration::from_cql<CqlValue>
// Vec<T>::from_cql<CqlValue>
impl<T: FromCqlVal<CqlValue>> FromCqlVal<CqlValue> for Vec<T> {
fn from_cql(cql_val: CqlValue) -> Result<Self, FromCqlValError> {
cql_val
.into_vec()
.ok_or(FromCqlValError::BadCqlType)?
.into_iter()
.map(T::from_cql)
.collect::<Result<Vec<T>, FromCqlValError>>()
}
}
impl<T1: FromCqlVal<CqlValue> + Eq + Hash, T2: FromCqlVal<CqlValue>> FromCqlVal<CqlValue>
for HashMap<T1, T2>
{
fn from_cql(cql_val: CqlValue) -> Result<Self, FromCqlValError> {
let vec = cql_val.into_pair_vec().ok_or(FromCqlValError::BadCqlType)?;
let mut res = HashMap::with_capacity(vec.len());
for (key, value) in vec {
res.insert(T1::from_cql(key)?, T2::from_cql(value)?);
}
Ok(res)
}
}
macro_rules! replace_expr {
($_t:tt $sub:expr) => {
$sub
};
}
// This macro implements FromRow for tuple of types that have FromCqlVal
macro_rules! impl_tuple_from_row {
( $($Ti:tt),+ ) => {
impl<$($Ti),+> FromRow for ($($Ti,)+)
where
$($Ti: FromCqlVal<Option<CqlValue>>),+
{
fn from_row(row: Row) -> Result<Self, FromRowError> {
// From what I know, it is not possible yet to get the number of metavariable
// repetitions (https://github.com/rust-lang/lang-team/issues/28#issue-644523674)
// This is a workaround
let expected_len = <[()]>::len(&[$(replace_expr!(($Ti) ())),*]);
if expected_len != row.columns.len() {
return Err(FromRowError::WrongRowSize {
expected: expected_len,
actual: row.columns.len(),
});
}
let mut vals_iter = row.columns.into_iter().enumerate();
Ok((
$(
{
let (col_ix, col_value) = vals_iter
.next()
.unwrap(); // vals_iter size is checked before this code is reached,
// so it is safe to unwrap
$Ti::from_cql(col_value)
.map_err(|e| FromRowError::BadCqlVal {
err: e,
column: col_ix,
})?
}
,)+
))
}
}
}
}
// Implement FromRow for tuples of size up to 16
impl_tuple_from_row!(T1);
impl_tuple_from_row!(T1, T2);
impl_tuple_from_row!(T1, T2, T3);
impl_tuple_from_row!(T1, T2, T3, T4);
impl_tuple_from_row!(T1, T2, T3, T4, T5);
impl_tuple_from_row!(T1, T2, T3, T4, T5, T6);
impl_tuple_from_row!(T1, T2, T3, T4, T5, T6, T7);
impl_tuple_from_row!(T1, T2, T3, T4, T5, T6, T7, T8);
impl_tuple_from_row!(T1, T2, T3, T4, T5, T6, T7, T8, T9);
impl_tuple_from_row!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10);
impl_tuple_from_row!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11);
impl_tuple_from_row!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12);
impl_tuple_from_row!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13);
impl_tuple_from_row!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14);
impl_tuple_from_row!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15);
impl_tuple_from_row!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16);
macro_rules! impl_tuple_from_cql {
( $($Ti:tt),+ ) => {
impl<$($Ti),+> FromCqlVal<CqlValue> for ($($Ti,)+)
where
$($Ti: FromCqlVal<Option<CqlValue>>),+
{
fn from_cql(cql_val: CqlValue) -> Result<Self, FromCqlValError> {
let tuple_fields = match cql_val {
CqlValue::Tuple(fields) => fields,
_ => return Err(FromCqlValError::BadCqlType)
};
let mut tuple_fields_iter = tuple_fields.into_iter();
Ok((
$(
$Ti::from_cql(tuple_fields_iter.next().ok_or(FromCqlValError::BadCqlType) ?) ?
,)+
))
}
}
}
}
impl_tuple_from_cql!(T1);
impl_tuple_from_cql!(T1, T2);
impl_tuple_from_cql!(T1, T2, T3);
impl_tuple_from_cql!(T1, T2, T3, T4);
impl_tuple_from_cql!(T1, T2, T3, T4, T5);
impl_tuple_from_cql!(T1, T2, T3, T4, T5, T6);
impl_tuple_from_cql!(T1, T2, T3, T4, T5, T6, T7);
impl_tuple_from_cql!(T1, T2, T3, T4, T5, T6, T7, T8);
impl_tuple_from_cql!(T1, T2, T3, T4, T5, T6, T7, T8, T9);
impl_tuple_from_cql!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10);
impl_tuple_from_cql!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11);
impl_tuple_from_cql!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12);
impl_tuple_from_cql!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13);
impl_tuple_from_cql!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14);
impl_tuple_from_cql!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15);
impl_tuple_from_cql!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16);
#[cfg(test)]
mod tests {
use super::{CqlValue, FromCqlVal, FromCqlValError, FromRow, FromRowError, Row};
use crate as scylla;
use crate::frame::value::Counter;
use crate::macros::FromRow;
use bigdecimal::BigDecimal;
use chrono::{Duration, NaiveDate};
use num_bigint::{BigInt, ToBigInt};
use std::net::{IpAddr, Ipv4Addr};
use std::str::FromStr;
use uuid::Uuid;
#[test]
fn i32_from_cql() {
assert_eq!(Ok(1234), i32::from_cql(CqlValue::Int(1234)));
}
#[test]
fn bool_from_cql() {
assert_eq!(Ok(true), bool::from_cql(CqlValue::Boolean(true)));
assert_eq!(Ok(false), bool::from_cql(CqlValue::Boolean(false)));
}
#[test]
fn floatingpoints_from_cql() {
let float: f32 = 2.13;
let double: f64 = 4.26;
assert_eq!(Ok(float), f32::from_cql(CqlValue::Float(float)));
assert_eq!(Ok(double), f64::from_cql(CqlValue::Double(double)));
}
#[test]
fn i64_from_cql() {
assert_eq!(Ok(1234), i64::from_cql(CqlValue::BigInt(1234)));
}
#[test]
fn i8_from_cql() {
assert_eq!(Ok(6), i8::from_cql(CqlValue::TinyInt(6)));
}
#[test]
fn i16_from_cql() {
assert_eq!(Ok(16), i16::from_cql(CqlValue::SmallInt(16)));
}
#[test]
fn string_from_cql() {
assert_eq!(
Ok("ascii_test".to_string()),
String::from_cql(CqlValue::Ascii("ascii_test".to_string()))
);
assert_eq!(
Ok("text_test".to_string()),
String::from_cql(CqlValue::Text("text_test".to_string()))
);
}
#[test]
fn ip_addr_from_cql() {
let ip_addr = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1));
assert_eq!(Ok(ip_addr), IpAddr::from_cql(CqlValue::Inet(ip_addr)));
}
#[test]
fn varint_from_cql() {
let big_int = 0.to_bigint().unwrap();
assert_eq!(
Ok(big_int),
BigInt::from_cql(CqlValue::Varint(0.to_bigint().unwrap()))
);
}
#[test]
fn decimal_from_cql() {
let decimal = BigDecimal::from_str("123.4").unwrap();
assert_eq!(
Ok(decimal.clone()),
BigDecimal::from_cql(CqlValue::Decimal(decimal))
);
}
#[test]
fn counter_from_cql() {
let counter = Counter(1);
assert_eq!(Ok(counter), Counter::from_cql(CqlValue::Counter(counter)));
}
#[test]
fn naive_date_from_cql() {
let unix_epoch: CqlValue = CqlValue::Date(2_u32.pow(31));
assert_eq!(
Ok(NaiveDate::from_ymd(1970, 1, 1)),
NaiveDate::from_cql(unix_epoch)
);
let before_epoch: CqlValue = CqlValue::Date(2_u32.pow(31) - 30);
assert_eq!(
Ok(NaiveDate::from_ymd(1969, 12, 2)),
NaiveDate::from_cql(before_epoch)
);
let after_epoch: CqlValue = CqlValue::Date(2_u32.pow(31) + 30);
assert_eq!(
Ok(NaiveDate::from_ymd(1970, 1, 31)),
NaiveDate::from_cql(after_epoch)
);
let min_date: CqlValue = CqlValue::Date(0);
assert!(NaiveDate::from_cql(min_date).is_err());
let max_date: CqlValue = CqlValue::Date(u32::max_value());
assert!(NaiveDate::from_cql(max_date).is_err());
}
#[test]
fn duration_from_cql() {
let time_duration = Duration::nanoseconds(86399999999999);
assert_eq!(
time_duration,
Duration::from_cql(CqlValue::Time(time_duration)).unwrap(),
);
let timestamp_duration = Duration::milliseconds(i64::min_value());
assert_eq!(
timestamp_duration,
Duration::from_cql(CqlValue::Timestamp(timestamp_duration)).unwrap(),
);
}
#[test]
fn uuid_from_cql() {
let test_uuid: Uuid = Uuid::parse_str("8e14e760-7fa8-11eb-bc66-000000000001").unwrap();
assert_eq!(
test_uuid,
Uuid::from_cql(CqlValue::Uuid(test_uuid)).unwrap()
);
assert_eq!(
test_uuid,
Uuid::from_cql(CqlValue::Timeuuid(test_uuid)).unwrap()
);
}
#[test]
fn vec_from_cql() {
let cql_val = CqlValue::Set(vec![CqlValue::Int(1), CqlValue::Int(2), CqlValue::Int(3)]);
assert_eq!(Ok(vec![1, 2, 3]), Vec::<i32>::from_cql(cql_val));
}
#[test]
fn tuple_from_row() {
let row = Row {
columns: vec![
Some(CqlValue::Int(1)),
Some(CqlValue::Text("some_text".to_string())),
None,
],
};
let (a, b, c) = <(i32, Option<String>, Option<i64>)>::from_row(row).unwrap();
assert_eq!(a, 1);
assert_eq!(b, Some("some_text".to_string()));
assert_eq!(c, None);
}
#[test]
fn from_cql_null() {
assert_eq!(i32::from_cql(None), Err(FromCqlValError::ValIsNull));
}
#[test]
fn from_cql_wrong_type() {
assert_eq!(
i32::from_cql(CqlValue::BigInt(1234)),
Err(FromCqlValError::BadCqlType)
);
}
#[test]
fn from_cql_empty_value() {
assert_eq!(
i32::from_cql(CqlValue::Empty),
Err(FromCqlValError::BadCqlType)
);
assert_eq!(<Option<i32>>::from_cql(Some(CqlValue::Empty)), Ok(None));
}
#[test]
fn from_row_null() {
let row = Row {
columns: vec![None],
};
assert_eq!(
<(i32,)>::from_row(row),
Err(FromRowError::BadCqlVal {
err: FromCqlValError::ValIsNull,
column: 0
})
);
}
#[test]
fn from_row_wrong_type() {
let row = Row {
columns: vec![Some(CqlValue::Int(1234))],
};
assert_eq!(
<(String,)>::from_row(row),
Err(FromRowError::BadCqlVal {
err: FromCqlValError::BadCqlType,
column: 0
})
);
}
#[test]
fn from_row_too_large() {
let row = Row {
columns: vec![Some(CqlValue::Int(1234)), Some(CqlValue::Int(1234))],
};
assert_eq!(
<(i32,)>::from_row(row),
Err(FromRowError::WrongRowSize {
expected: 1,
actual: 2
})
);
}
#[test]
fn from_row_too_short() {
let row = Row {
columns: vec![Some(CqlValue::Int(1234)), Some(CqlValue::Int(1234))],
};
assert_eq!(
<(i32, i32, i32)>::from_row(row),
Err(FromRowError::WrongRowSize {
expected: 3,
actual: 2
})
);
}
#[test]
fn struct_from_row() {
#[derive(FromRow)]
struct MyRow {
a: i32,
b: Option<String>,
c: Option<Vec<i32>>,
}
let row = Row {
columns: vec![
Some(CqlValue::Int(16)),
None,
Some(CqlValue::Set(vec![CqlValue::Int(1), CqlValue::Int(2)])),
],
};
let my_row: MyRow = MyRow::from_row(row).unwrap();
assert_eq!(my_row.a, 16);
assert_eq!(my_row.b, None);
assert_eq!(my_row.c, Some(vec![1, 2]));
}
#[test]
fn struct_from_row_wrong_size() {
#[derive(FromRow, PartialEq, Eq, Debug)]
struct MyRow {
a: i32,
b: Option<String>,
c: Option<Vec<i32>>,
}
let too_short_row = Row {
columns: vec![Some(CqlValue::Int(16)), None],
};
let too_large_row = Row {
columns: vec![
Some(CqlValue::Int(16)),
None,
Some(CqlValue::Set(vec![CqlValue::Int(1), CqlValue::Int(2)])),
Some(CqlValue::Set(vec![CqlValue::Int(1), CqlValue::Int(2)])),
],
};
assert_eq!(
MyRow::from_row(too_short_row),
Err(FromRowError::WrongRowSize {
expected: 3,
actual: 2
})
);
assert_eq!(
MyRow::from_row(too_large_row),
Err(FromRowError::WrongRowSize {
expected: 3,
actual: 4
})
);
}
}
| 31.985185 | 102 | 0.55865 |
29a2454a5e7438266a75830d218be883251cb4f4 | 28 | pub mod cmd;
pub mod lexer;
| 9.333333 | 14 | 0.714286 |
e24d5cac31d4b924fb73591e80fab1973f4135b5 | 32,007 | // Copyright 2019 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Functions for building partial transactions to be passed
//! around during an interactive wallet exchange
use crate::blake2::blake2b::blake2b;
use crate::error::{Error, ErrorKind};
use crate::grin_core::core::amount_to_hr_string;
use crate::grin_core::core::committed::Committed;
use crate::grin_core::core::transaction::{
Input, KernelFeatures, Output, Transaction, TransactionBody, TxKernel, Weighting,
};
use crate::grin_core::core::verifier_cache::LruVerifierCache;
use crate::grin_core::libtx::{aggsig, build, proof::ProofBuild, secp_ser, tx_fee};
use crate::grin_core::map_vec;
use crate::grin_keychain::{BlindSum, BlindingFactor, Keychain};
use crate::grin_util::secp::key::{PublicKey, SecretKey};
use crate::grin_util::secp::pedersen::Commitment;
use crate::grin_util::secp::Signature;
use crate::grin_util::{self, secp, RwLock};
use crate::slate_versions::ser as dalek_ser;
use ed25519_dalek::PublicKey as DalekPublicKey;
use ed25519_dalek::Signature as DalekSignature;
use failure::ResultExt;
use rand::rngs::mock::StepRng;
use rand::thread_rng;
use serde::ser::{Serialize, Serializer};
use serde_json;
use std::fmt;
use std::sync::Arc;
use uuid::Uuid;
use crate::slate_versions::v2::SlateV2;
use crate::slate_versions::v3::{
CoinbaseV3, InputV3, OutputV3, ParticipantDataV3, PaymentInfoV3, SlateV3, TransactionBodyV3,
TransactionV3, TxKernelV3, VersionCompatInfoV3,
};
// use crate::slate_versions::{CURRENT_SLATE_VERSION, GRIN_BLOCK_HEADER_VERSION};
use crate::types::CbData;
use crate::SlateVersion;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct PaymentInfo {
#[serde(with = "dalek_ser::dalek_pubkey_serde")]
pub sender_address: DalekPublicKey,
#[serde(with = "dalek_ser::dalek_pubkey_serde")]
pub receiver_address: DalekPublicKey,
#[serde(with = "dalek_ser::option_dalek_sig_serde")]
pub receiver_signature: Option<DalekSignature>,
}
/// Public data for each participant in the slate
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ParticipantData {
/// Id of participant in the transaction. (For now, 0=sender, 1=rec)
#[serde(with = "secp_ser::string_or_u64")]
pub id: u64,
/// Public key corresponding to private blinding factor
#[serde(with = "secp_ser::pubkey_serde")]
pub public_blind_excess: PublicKey,
/// Public key corresponding to private nonce
#[serde(with = "secp_ser::pubkey_serde")]
pub public_nonce: PublicKey,
/// Public partial signature
#[serde(with = "secp_ser::option_sig_serde")]
pub part_sig: Option<Signature>,
/// A message for other participants
pub message: Option<String>,
/// Signature, created with private key corresponding to 'public_blind_excess'
#[serde(with = "secp_ser::option_sig_serde")]
pub message_sig: Option<Signature>,
}
impl ParticipantData {
/// A helper to return whether this participant
/// has completed round 1 and round 2;
/// Round 1 has to be completed before instantiation of this struct
/// anyhow, and for each participant consists of:
/// -Inputs added to transaction
/// -Outputs added to transaction
/// -Public signature nonce chosen and added
/// -Public contribution to blinding factor chosen and added
/// Round 2 can only be completed after all participants have
/// performed round 1, and adds:
/// -Part sig is filled out
pub fn is_complete(&self) -> bool {
self.part_sig.is_some()
}
}
/// Public message data (for serialising and storage)
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ParticipantMessageData {
/// id of the particpant in the tx
#[serde(with = "secp_ser::string_or_u64")]
pub id: u64,
/// Public key
#[serde(with = "secp_ser::pubkey_serde")]
pub public_key: PublicKey,
/// Message,
pub message: Option<String>,
/// Signature
#[serde(with = "secp_ser::option_sig_serde")]
pub message_sig: Option<Signature>,
}
impl ParticipantMessageData {
/// extract relevant message data from participant data
pub fn from_participant_data(p: &ParticipantData) -> ParticipantMessageData {
ParticipantMessageData {
id: p.id,
public_key: p.public_blind_excess,
message: p.message.clone(),
message_sig: p.message_sig.clone(),
}
}
}
impl fmt::Display for ParticipantMessageData {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "")?;
write!(f, "Participant ID {} ", self.id)?;
if self.id == 0 {
writeln!(f, "(Sender)")?;
} else {
writeln!(f, "(Recipient)")?;
}
writeln!(f, "---------------------")?;
let static_secp = grin_util::static_secp_instance();
let static_secp = static_secp.lock();
writeln!(
f,
"Public Key: {}",
&grin_util::to_hex(self.public_key.serialize_vec(&static_secp, true).to_vec())
)?;
let message = match self.message.clone() {
None => "None".to_owned(),
Some(m) => m,
};
writeln!(f, "Message: {}", message)?;
let message_sig = match self.message_sig.clone() {
None => "None".to_owned(),
Some(m) => grin_util::to_hex(m.to_raw_data().to_vec()),
};
writeln!(f, "Message Signature: {}", message_sig)
}
}
/// A 'Slate' is passed around to all parties to build up all of the public
/// transaction data needed to create a finalized transaction. Callers can pass
/// the slate around by whatever means they choose, (but we can provide some
/// binary or JSON serialization helpers here).
#[derive(Deserialize, Debug, Clone)]
pub struct Slate {
/// Versioning info
pub version_info: VersionCompatInfo,
/// The number of participants intended to take part in this transaction
pub num_participants: usize,
/// Unique transaction ID, selected by sender
pub id: Uuid,
/// The core transaction data:
/// inputs, outputs, kernels, kernel offset
pub tx: Transaction,
/// base amount (excluding fee)
#[serde(with = "secp_ser::string_or_u64")]
pub amount: u64,
/// fee amount
#[serde(with = "secp_ser::string_or_u64")]
pub fee: u64,
/// Block height for the transaction
#[serde(with = "secp_ser::string_or_u64")]
pub height: u64,
/// Lock height
#[serde(with = "secp_ser::string_or_u64")]
pub lock_height: u64,
/// TTL, the block height at which wallets
/// should refuse to process the transaction and unlock all
/// associated outputs
#[serde(with = "secp_ser::opt_string_or_u64")]
pub ttl_cutoff_height: Option<u64>,
/// Participant data, each participant in the transaction will
/// insert their public data here. For now, 0 is sender and 1
/// is receiver, though this will change for multi-party
pub participant_data: Vec<ParticipantData>,
/// Payment Proof
#[serde(default = "default_payment_none")]
pub payment_proof: Option<PaymentInfo>,
}
fn default_payment_none() -> Option<PaymentInfo> {
None
}
/// Versioning and compatibility info about this slate
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct VersionCompatInfo {
/// The current version of the slate format
pub version: u16,
/// Original version this slate was converted from
pub orig_version: u16,
/// The grin block header version this slate is intended for
pub block_header_version: u16,
}
/// Helper just to facilitate serialization
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ParticipantMessages {
/// included messages
pub messages: Vec<ParticipantMessageData>,
}
impl Slate {
/// Attempt to find slate version
pub fn parse_slate_version(slate_json: &str) -> Result<u16, Error> {
let probe: SlateVersionProbe =
serde_json::from_str(slate_json).map_err(|_| ErrorKind::SlateVersionParse)?;
Ok(probe.version())
}
/// Recieve a slate, upgrade it to the latest version internally
pub fn deserialize_upgrade(slate_json: &str) -> Result<Slate, Error> {
let version = Slate::parse_slate_version(slate_json)?;
let v3: SlateV3 = match version {
3 => serde_json::from_str(slate_json).context(ErrorKind::SlateDeser)?,
2 => {
let v2: SlateV2 =
serde_json::from_str(slate_json).context(ErrorKind::SlateDeser)?;
SlateV3::from(v2)
}
_ => return Err(ErrorKind::SlateVersion(version).into()),
};
Ok(v3.into())
}
/// Create a new slate
pub fn blank(num_participants: usize) -> Slate {
Slate {
num_participants: num_participants,
id: Uuid::new_v4(),
tx: Transaction::empty(),
amount: 0,
fee: 0,
height: 0,
lock_height: 0,
ttl_cutoff_height: None,
participant_data: vec![],
version_info: VersionCompatInfo {
version: 2, // CURRENT_SLATE_VERSION,
orig_version: 2, // CURRENT_SLATE_VERSION,
block_header_version: 1, // GRIN_BLOCK_HEADER_VERSION,
},
payment_proof: None,
}
}
/// Calculate minimal Slate version. For exchange we want to keep the varsion as low as possible
/// because there are might be many non upgraded wallets and we want ot be friendly to them.
pub fn lowest_version(&self) -> SlateVersion {
if self.payment_proof.is_some() || self.ttl_cutoff_height.is_some() {
SlateVersion::V3
} else {
SlateVersion::V2
}
}
/// Adds selected inputs and outputs to the slate's transaction
/// Returns blinding factor
pub fn add_transaction_elements<K, B>(
&mut self,
keychain: &K,
builder: &B,
elems: Vec<Box<build::Append<K, B>>>,
) -> Result<BlindingFactor, Error>
where
K: Keychain,
B: ProofBuild,
{
self.update_kernel();
let (tx, blind) = build::partial_transaction(self.tx.clone(), elems, keychain, builder)?;
self.tx = tx;
Ok(blind)
}
/// Update the tx kernel based on kernel features derived from the current slate.
/// The fee may change as we build a transaction and we need to
/// update the tx kernel to reflect this during the tx building process.
pub fn update_kernel(&mut self) {
self.tx = self
.tx
.clone()
.replace_kernel(TxKernel::with_features(self.kernel_features()));
}
/// Completes callers part of round 1, adding public key info
/// to the slate
pub fn fill_round_1<K>(
&mut self,
keychain: &K,
sec_key: &mut SecretKey,
sec_nonce: &SecretKey,
participant_id: usize,
message: Option<String>,
use_test_rng: bool,
) -> Result<(), Error>
where
K: Keychain,
{
// Whoever does this first generates the offset
if self.tx.offset == BlindingFactor::zero() {
self.generate_offset(keychain, sec_key, use_test_rng)?;
}
self.add_participant_info(
keychain,
&sec_key,
&sec_nonce,
participant_id,
None,
message,
use_test_rng,
)?;
Ok(())
}
// Construct the appropriate kernel features based on our fee and lock_height.
// If lock_height is 0 then its a plain kernel, otherwise its a height locked kernel.
fn kernel_features(&self) -> KernelFeatures {
match self.lock_height {
0 => KernelFeatures::Plain { fee: self.fee },
_ => KernelFeatures::HeightLocked {
fee: self.fee,
lock_height: self.lock_height,
},
}
}
// This is the msg that we will sign as part of the tx kernel.
// If lock_height is 0 then build a plain kernel, otherwise build a height locked kernel.
fn msg_to_sign(&self) -> Result<secp::Message, Error> {
let msg = self.kernel_features().kernel_sig_msg()?;
Ok(msg)
}
/// Completes caller's part of round 2, completing signatures
pub fn fill_round_2<K>(
&mut self,
keychain: &K,
sec_key: &SecretKey,
sec_nonce: &SecretKey,
participant_id: usize,
) -> Result<(), Error>
where
K: Keychain,
{
self.check_fees()?;
self.verify_part_sigs(keychain.secp())?;
let sig_part = aggsig::calculate_partial_sig(
keychain.secp(),
sec_key,
sec_nonce,
&self.pub_nonce_sum(keychain.secp())?,
Some(&self.pub_blind_sum(keychain.secp())?),
&self.msg_to_sign()?,
)?;
for i in 0..self.num_participants {
if self.participant_data[i].id == participant_id as u64 {
self.participant_data[i].part_sig = Some(sig_part);
break;
}
}
Ok(())
}
/// Creates the final signature, callable by either the sender or recipient
/// (after phase 3: sender confirmation)
pub fn finalize<K>(&mut self, keychain: &K) -> Result<(), Error>
where
K: Keychain,
{
let final_sig = self.finalize_signature(keychain)?;
self.finalize_transaction(keychain, &final_sig)
}
/// Return the participant with the given id
pub fn participant_with_id(&self, id: usize) -> Option<ParticipantData> {
for p in self.participant_data.iter() {
if p.id as usize == id {
return Some(p.clone());
}
}
None
}
/// Return the sum of public nonces
fn pub_nonce_sum(&self, secp: &secp::Secp256k1) -> Result<PublicKey, Error> {
let pub_nonces = self
.participant_data
.iter()
.map(|p| &p.public_nonce)
.collect();
match PublicKey::from_combination(secp, pub_nonces) {
Ok(k) => Ok(k),
Err(e) => Err(ErrorKind::Secp(e))?,
}
}
/// Return the sum of public blinding factors
fn pub_blind_sum(&self, secp: &secp::Secp256k1) -> Result<PublicKey, Error> {
let pub_blinds = self
.participant_data
.iter()
.map(|p| &p.public_blind_excess)
.collect();
match PublicKey::from_combination(secp, pub_blinds) {
Ok(k) => Ok(k),
Err(e) => Err(ErrorKind::Secp(e))?,
}
}
/// Return vector of all partial sigs
fn part_sigs(&self) -> Vec<&Signature> {
self.participant_data
.iter()
.map(|p| p.part_sig.as_ref().unwrap())
.collect()
}
/// Adds participants public keys to the slate data
/// and saves participant's transaction context
/// sec_key can be overridden to replace the blinding
/// factor (by whoever split the offset)
fn add_participant_info<K>(
&mut self,
keychain: &K,
sec_key: &SecretKey,
sec_nonce: &SecretKey,
id: usize,
part_sig: Option<Signature>,
message: Option<String>,
use_test_rng: bool,
) -> Result<(), Error>
where
K: Keychain,
{
// Add our public key and nonce to the slate
let pub_key = PublicKey::from_secret_key(keychain.secp(), &sec_key)?;
let pub_nonce = PublicKey::from_secret_key(keychain.secp(), &sec_nonce)?;
let test_message_nonce = SecretKey::from_slice(&keychain.secp(), &[1; 32]).unwrap();
let message_nonce = match use_test_rng {
false => None,
true => Some(&test_message_nonce),
};
// Sign the provided message
let message_sig = {
if let Some(m) = message.clone() {
let hashed = blake2b(secp::constants::MESSAGE_SIZE, &[], &m.as_bytes()[..]);
let m = secp::Message::from_slice(&hashed.as_bytes())?;
let res = aggsig::sign_single(
&keychain.secp(),
&m,
&sec_key,
message_nonce,
Some(&pub_key),
)?;
Some(res)
} else {
None
}
};
self.participant_data.push(ParticipantData {
id: id as u64,
public_blind_excess: pub_key,
public_nonce: pub_nonce,
part_sig: part_sig,
message: message,
message_sig: message_sig,
});
Ok(())
}
/// helper to return all participant messages
pub fn participant_messages(&self) -> ParticipantMessages {
let mut ret = ParticipantMessages { messages: vec![] };
for ref m in self.participant_data.iter() {
ret.messages
.push(ParticipantMessageData::from_participant_data(m));
}
ret
}
/// Somebody involved needs to generate an offset with their private key
/// For now, we'll have the transaction initiator be responsible for it
/// Return offset private key for the participant to use later in the
/// transaction
fn generate_offset<K>(
&mut self,
keychain: &K,
sec_key: &mut SecretKey,
use_test_rng: bool,
) -> Result<(), Error>
where
K: Keychain,
{
// Generate a random kernel offset here
// and subtract it from the blind_sum so we create
// the aggsig context with the "split" key
self.tx.offset = match use_test_rng {
false => {
BlindingFactor::from_secret_key(SecretKey::new(&keychain.secp(), &mut thread_rng()))
}
true => {
// allow for consistent test results
let mut test_rng = StepRng::new(1234567890u64, 1);
BlindingFactor::from_secret_key(SecretKey::new(&keychain.secp(), &mut test_rng))
}
};
let blind_offset = keychain.blind_sum(
&BlindSum::new()
.add_blinding_factor(BlindingFactor::from_secret_key(sec_key.clone()))
.sub_blinding_factor(self.tx.offset.clone()),
)?;
*sec_key = blind_offset.secret_key(&keychain.secp())?;
Ok(())
}
/// Checks the fees in the transaction in the given slate are valid
fn check_fees(&self) -> Result<(), Error> {
// double check the fee amount included in the partial tx
// we don't necessarily want to just trust the sender
// we could just overwrite the fee here (but we won't) due to the sig
let fee = tx_fee(
self.tx.inputs().len(),
self.tx.outputs().len(),
self.tx.kernels().len(),
None,
);
if fee > self.tx.fee() {
return Err(ErrorKind::Fee(
format!("Fee Dispute Error: {}, {}", self.tx.fee(), fee,).to_string(),
))?;
}
if fee > self.amount + self.fee {
let reason = format!(
"Rejected the transfer because transaction fee ({}) exceeds received amount ({}).",
amount_to_hr_string(fee, false),
amount_to_hr_string(self.amount + self.fee, false)
);
info!("{}", reason);
return Err(ErrorKind::Fee(reason.to_string()))?;
}
Ok(())
}
/// Verifies all of the partial signatures in the Slate are valid
fn verify_part_sigs(&self, secp: &secp::Secp256k1) -> Result<(), Error> {
// collect public nonces
for p in self.participant_data.iter() {
if p.is_complete() {
aggsig::verify_partial_sig(
secp,
p.part_sig.as_ref().unwrap(),
&self.pub_nonce_sum(secp)?,
&p.public_blind_excess,
Some(&self.pub_blind_sum(secp)?),
&self.msg_to_sign()?,
)?;
}
}
Ok(())
}
/// Verifies any messages in the slate's participant data match their signatures
pub fn verify_messages(&self) -> Result<(), Error> {
let secp = secp::Secp256k1::with_caps(secp::ContextFlag::VerifyOnly);
for p in self.participant_data.iter() {
if let Some(msg) = &p.message {
let hashed = blake2b(secp::constants::MESSAGE_SIZE, &[], &msg.as_bytes()[..]);
let m = secp::Message::from_slice(&hashed.as_bytes())?;
let signature = match p.message_sig {
None => {
error!("verify_messages - participant message doesn't have signature. Message: \"{}\"",
String::from_utf8_lossy(&msg.as_bytes()[..]));
return Err(ErrorKind::Signature(
"Optional participant messages doesn't have signature".to_owned(),
))?;
}
Some(s) => s,
};
if !aggsig::verify_single(
&secp,
&signature,
&m,
None,
&p.public_blind_excess,
Some(&p.public_blind_excess),
false,
) {
error!("verify_messages - participant message doesn't match signature. Message: \"{}\"",
String::from_utf8_lossy(&msg.as_bytes()[..]));
return Err(ErrorKind::Signature(
"Optional participant messages do not match signatures".to_owned(),
))?;
} else {
info!(
"verify_messages - signature verified ok. Participant message: \"{}\"",
String::from_utf8_lossy(&msg.as_bytes()[..])
);
}
}
}
Ok(())
}
/// This should be callable by either the sender or receiver
/// once phase 3 is done
///
/// Receive Part 3 of interactive transactions from sender, Sender
/// Confirmation Return Ok/Error
/// -Receiver receives sS
/// -Receiver verifies sender's sig, by verifying that
/// kS * G + e *xS * G = sS* G
/// -Receiver calculates final sig as s=(sS+sR, kS * G+kR * G)
/// -Receiver puts into TX kernel:
///
/// Signature S
/// pubkey xR * G+xS * G
/// fee (= M)
///
/// Returns completed transaction ready for posting to the chain
fn finalize_signature<K>(&mut self, keychain: &K) -> Result<Signature, Error>
where
K: Keychain,
{
self.verify_part_sigs(keychain.secp())?;
let part_sigs = self.part_sigs();
let pub_nonce_sum = self.pub_nonce_sum(keychain.secp())?;
let final_pubkey = self.pub_blind_sum(keychain.secp())?;
// get the final signature
let final_sig = aggsig::add_signatures(&keychain.secp(), part_sigs, &pub_nonce_sum)?;
// Calculate the final public key (for our own sanity check)
// Check our final sig verifies
aggsig::verify_completed_sig(
&keychain.secp(),
&final_sig,
&final_pubkey,
Some(&final_pubkey),
&self.msg_to_sign()?,
)?;
Ok(final_sig)
}
/// return the final excess
pub fn calc_excess<K>(&self, keychain: &K) -> Result<Commitment, Error>
where
K: Keychain,
{
let kernel_offset = &self.tx.offset;
let tx = self.tx.clone();
let overage = tx.fee() as i64;
let tx_excess = tx.sum_commitments(overage)?;
// subtract the kernel_excess (built from kernel_offset)
let offset_excess = keychain
.secp()
.commit(0, kernel_offset.secret_key(&keychain.secp())?)?;
Ok(keychain
.secp()
.commit_sum(vec![tx_excess], vec![offset_excess])?)
}
/// builds a final transaction after the aggregated sig exchange
fn finalize_transaction<K>(
&mut self,
keychain: &K,
final_sig: &secp::Signature,
) -> Result<(), Error>
where
K: Keychain,
{
self.check_fees()?;
// build the final excess based on final tx and offset
let final_excess = self.calc_excess(keychain)?;
debug!("Final Tx excess: {:?}", final_excess);
let mut final_tx = self.tx.clone();
// update the tx kernel to reflect the offset excess and sig
assert_eq!(final_tx.kernels().len(), 1);
final_tx.kernels_mut()[0].excess = final_excess.clone();
final_tx.kernels_mut()[0].excess_sig = final_sig.clone();
// confirm the kernel verifies successfully before proceeding
debug!("Validating final transaction");
final_tx.kernels()[0].verify()?;
// confirm the overall transaction is valid (including the updated kernel)
// accounting for tx weight limits
let verifier_cache = Arc::new(RwLock::new(LruVerifierCache::new()));
let _ = final_tx.validate(Weighting::AsTransaction, verifier_cache)?;
self.tx = final_tx;
Ok(())
}
}
impl Serialize for Slate {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
use serde::ser::Error;
let v3 = SlateV3::from(self);
match self.version_info.orig_version {
3 => v3.serialize(serializer),
// left as a reminder
2 => {
let v2 = SlateV2::from(&v3);
v2.serialize(serializer)
}
v => Err(S::Error::custom(format!("Unknown slate version {}", v))),
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct SlateVersionProbe {
#[serde(default)]
version: Option<u64>,
#[serde(default)]
version_info: Option<VersionCompatInfo>,
}
impl SlateVersionProbe {
pub fn version(&self) -> u16 {
match &self.version_info {
Some(v) => v.version,
None => match self.version {
Some(_) => 1,
None => 0,
},
}
}
}
// Coinbase data to versioned.
impl From<CbData> for CoinbaseV3 {
fn from(cb: CbData) -> CoinbaseV3 {
CoinbaseV3 {
output: OutputV3::from(&cb.output),
kernel: TxKernelV3::from(&cb.kernel),
key_id: cb.key_id,
}
}
}
// Current slate version to versioned conversions
// Slate to versioned
impl From<Slate> for SlateV3 {
fn from(slate: Slate) -> SlateV3 {
let Slate {
num_participants,
id,
tx,
amount,
fee,
height,
lock_height,
ttl_cutoff_height,
participant_data,
version_info,
payment_proof,
} = slate;
let participant_data = map_vec!(participant_data, |data| ParticipantDataV3::from(data));
let version_info = VersionCompatInfoV3::from(&version_info);
let payment_proof = match payment_proof {
Some(p) => Some(PaymentInfoV3::from(&p)),
None => None,
};
let tx = TransactionV3::from(tx);
SlateV3 {
num_participants,
id,
tx,
amount,
fee,
height,
lock_height,
ttl_cutoff_height,
participant_data,
version_info,
payment_proof,
}
}
}
impl From<&Slate> for SlateV3 {
fn from(slate: &Slate) -> SlateV3 {
let Slate {
num_participants,
id,
tx,
amount,
fee,
height,
lock_height,
ttl_cutoff_height,
participant_data,
version_info,
payment_proof,
} = slate;
let num_participants = *num_participants;
let id = *id;
let tx = TransactionV3::from(tx);
let amount = *amount;
let fee = *fee;
let height = *height;
let lock_height = *lock_height;
let ttl_cutoff_height = *ttl_cutoff_height;
let participant_data = map_vec!(participant_data, |data| ParticipantDataV3::from(data));
let version_info = VersionCompatInfoV3::from(version_info);
let payment_proof = match payment_proof {
Some(p) => Some(PaymentInfoV3::from(p)),
None => None,
};
SlateV3 {
num_participants,
id,
tx,
amount,
fee,
height,
lock_height,
ttl_cutoff_height,
participant_data,
version_info,
payment_proof,
}
}
}
impl From<&ParticipantData> for ParticipantDataV3 {
fn from(data: &ParticipantData) -> ParticipantDataV3 {
let ParticipantData {
id,
public_blind_excess,
public_nonce,
part_sig,
message,
message_sig,
} = data;
let id = *id;
let public_blind_excess = *public_blind_excess;
let public_nonce = *public_nonce;
let part_sig = *part_sig;
let message: Option<String> = message.as_ref().map(|t| String::from(&**t));
let message_sig = *message_sig;
ParticipantDataV3 {
id,
public_blind_excess,
public_nonce,
part_sig,
message,
message_sig,
}
}
}
impl From<&VersionCompatInfo> for VersionCompatInfoV3 {
fn from(data: &VersionCompatInfo) -> VersionCompatInfoV3 {
let VersionCompatInfo {
version,
orig_version,
block_header_version,
} = data;
let version = *version;
let orig_version = *orig_version;
let block_header_version = *block_header_version;
VersionCompatInfoV3 {
version,
orig_version,
block_header_version,
}
}
}
impl From<&PaymentInfo> for PaymentInfoV3 {
fn from(data: &PaymentInfo) -> PaymentInfoV3 {
let PaymentInfo {
sender_address,
receiver_address,
receiver_signature,
} = data;
let sender_address = *sender_address;
let receiver_address = *receiver_address;
let receiver_signature = *receiver_signature;
PaymentInfoV3 {
sender_address,
receiver_address,
receiver_signature,
}
}
}
impl From<Transaction> for TransactionV3 {
fn from(tx: Transaction) -> TransactionV3 {
let Transaction { offset, body } = tx;
let body = TransactionBodyV3::from(&body);
TransactionV3 { offset, body }
}
}
impl From<&Transaction> for TransactionV3 {
fn from(tx: &Transaction) -> TransactionV3 {
let Transaction { offset, body } = tx;
let offset = offset.clone();
let body = TransactionBodyV3::from(body);
TransactionV3 { offset, body }
}
}
impl From<&TransactionBody> for TransactionBodyV3 {
fn from(body: &TransactionBody) -> TransactionBodyV3 {
let TransactionBody {
inputs,
outputs,
kernels,
} = body;
let inputs = map_vec!(inputs, |inp| InputV3::from(inp));
let outputs = map_vec!(outputs, |out| OutputV3::from(out));
let kernels = map_vec!(kernels, |kern| TxKernelV3::from(kern));
TransactionBodyV3 {
inputs,
outputs,
kernels,
}
}
}
impl From<&Input> for InputV3 {
fn from(input: &Input) -> InputV3 {
let Input { features, commit } = *input;
InputV3 { features, commit }
}
}
impl From<&Output> for OutputV3 {
fn from(output: &Output) -> OutputV3 {
let Output {
features,
commit,
proof,
} = *output;
OutputV3 {
features,
commit,
proof,
}
}
}
impl From<&TxKernel> for TxKernelV3 {
fn from(kernel: &TxKernel) -> TxKernelV3 {
let (features, fee, lock_height) = match kernel.features {
KernelFeatures::Plain { fee } => (CompatKernelFeatures::Plain, fee, 0),
KernelFeatures::Coinbase => (CompatKernelFeatures::Coinbase, 0, 0),
KernelFeatures::HeightLocked { fee, lock_height } => {
(CompatKernelFeatures::HeightLocked, fee, lock_height)
}
};
TxKernelV3 {
features,
fee,
lock_height,
excess: kernel.excess,
excess_sig: kernel.excess_sig,
}
}
}
// Versioned to current slate
impl From<SlateV3> for Slate {
fn from(slate: SlateV3) -> Slate {
let SlateV3 {
num_participants,
id,
tx,
amount,
fee,
height,
lock_height,
ttl_cutoff_height,
participant_data,
version_info,
payment_proof,
} = slate;
let participant_data = map_vec!(participant_data, |data| ParticipantData::from(data));
let version_info = VersionCompatInfo::from(&version_info);
let payment_proof = match payment_proof {
Some(p) => Some(PaymentInfo::from(&p)),
None => None,
};
let tx = Transaction::from(tx);
Slate {
num_participants,
id,
tx,
amount,
fee,
height,
lock_height,
ttl_cutoff_height,
participant_data,
version_info,
payment_proof,
}
}
}
impl From<&ParticipantDataV3> for ParticipantData {
fn from(data: &ParticipantDataV3) -> ParticipantData {
let ParticipantDataV3 {
id,
public_blind_excess,
public_nonce,
part_sig,
message,
message_sig,
} = data;
let id = *id;
let public_blind_excess = *public_blind_excess;
let public_nonce = *public_nonce;
let part_sig = *part_sig;
let message: Option<String> = message.as_ref().map(|t| String::from(&**t));
let message_sig = *message_sig;
ParticipantData {
id,
public_blind_excess,
public_nonce,
part_sig,
message,
message_sig,
}
}
}
impl From<&VersionCompatInfoV3> for VersionCompatInfo {
fn from(data: &VersionCompatInfoV3) -> VersionCompatInfo {
let VersionCompatInfoV3 {
version,
orig_version,
block_header_version,
} = data;
let version = *version;
let orig_version = *orig_version;
let block_header_version = *block_header_version;
VersionCompatInfo {
version,
orig_version,
block_header_version,
}
}
}
impl From<&PaymentInfoV3> for PaymentInfo {
fn from(data: &PaymentInfoV3) -> PaymentInfo {
let PaymentInfoV3 {
sender_address,
receiver_address,
receiver_signature,
} = data;
let sender_address = *sender_address;
let receiver_address = *receiver_address;
let receiver_signature = *receiver_signature;
PaymentInfo {
sender_address,
receiver_address,
receiver_signature,
}
}
}
impl From<TransactionV3> for Transaction {
fn from(tx: TransactionV3) -> Transaction {
let TransactionV3 { offset, body } = tx;
let body = TransactionBody::from(&body);
Transaction { offset, body }
}
}
impl From<&TransactionBodyV3> for TransactionBody {
fn from(body: &TransactionBodyV3) -> TransactionBody {
let TransactionBodyV3 {
inputs,
outputs,
kernels,
} = body;
let inputs = map_vec!(inputs, |inp| Input::from(inp));
let outputs = map_vec!(outputs, |out| Output::from(out));
let kernels = map_vec!(kernels, |kern| TxKernel::from(kern));
TransactionBody {
inputs,
outputs,
kernels,
}
}
}
impl From<&InputV3> for Input {
fn from(input: &InputV3) -> Input {
let InputV3 { features, commit } = *input;
Input { features, commit }
}
}
impl From<&OutputV3> for Output {
fn from(output: &OutputV3) -> Output {
let OutputV3 {
features,
commit,
proof,
} = *output;
Output {
features,
commit,
proof,
}
}
}
impl From<&TxKernelV3> for TxKernel {
fn from(kernel: &TxKernelV3) -> TxKernel {
let (fee, lock_height) = (kernel.fee, kernel.lock_height);
let features = match kernel.features {
CompatKernelFeatures::Plain => KernelFeatures::Plain { fee },
CompatKernelFeatures::Coinbase => KernelFeatures::Coinbase,
CompatKernelFeatures::HeightLocked => KernelFeatures::HeightLocked { fee, lock_height },
};
TxKernel {
features,
excess: kernel.excess,
excess_sig: kernel.excess_sig,
}
}
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub enum CompatKernelFeatures {
Plain,
Coinbase,
HeightLocked,
}
| 27.124576 | 97 | 0.686381 |
fe74ffb2210aede470a9cf8de10ed255d0a9f356 | 2,117 | #[doc = "Reader of register F2"]
pub type R = crate::R<u32, super::F2>;
#[doc = "Writer for register F2"]
pub type W = crate::W<u32, super::F2>;
#[doc = "Register F2 `reset()`'s with value 0"]
impl crate::ResetValue for super::F2 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `Reserved32`"]
pub type RESERVED32_R = crate::R<u32, u32>;
#[doc = "Write proxy for field `Reserved32`"]
pub struct RESERVED32_W<'a> {
w: &'a mut W,
}
impl<'a> RESERVED32_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x00ff_ffff << 8)) | (((value as u32) & 0x00ff_ffff) << 8);
self.w
}
}
#[doc = "Reader of field `USBF2`"]
pub type USBF2_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `USBF2`"]
pub struct USBF2_W<'a> {
w: &'a mut W,
}
impl<'a> USBF2_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff);
self.w
}
}
impl R {
#[doc = "Bits 8:31 - 31:8\\] Reserved"]
#[inline(always)]
pub fn reserved32(&self) -> RESERVED32_R {
RESERVED32_R::new(((self.bits >> 8) & 0x00ff_ffff) as u32)
}
#[doc = "Bits 0:7 - 7:0\\] Endpoint 2 FIFO register Reading this register unloads one byte from the EP2 OUT FIFO. Writing to this register loads one byte into the EP2 IN FIFO."]
#[inline(always)]
pub fn usbf2(&self) -> USBF2_R {
USBF2_R::new((self.bits & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 8:31 - 31:8\\] Reserved"]
#[inline(always)]
pub fn reserved32(&mut self) -> RESERVED32_W {
RESERVED32_W { w: self }
}
#[doc = "Bits 0:7 - 7:0\\] Endpoint 2 FIFO register Reading this register unloads one byte from the EP2 OUT FIFO. Writing to this register loads one byte into the EP2 IN FIFO."]
#[inline(always)]
pub fn usbf2(&mut self) -> USBF2_W {
USBF2_W { w: self }
}
}
| 32.569231 | 181 | 0.586207 |
5b3c719909ade2eda0ec09ed7d5f986415ff7bc5 | 7,992 | pub mod cli;
use borsh::{
BorshDeserialize,
BorshSerialize,
};
use clap::Clap;
use log::{
warn,
LevelFilter,
};
use solana_client::rpc_client::RpcClient;
use solana_program::{
hash::Hash,
instruction::{
AccountMeta,
Instruction,
},
pubkey::Pubkey,
system_program,
sysvar::{
clock,
rent,
},
};
use solana_sdk::{
commitment_config::CommitmentConfig,
signature::read_keypair_file,
transaction::Transaction,
};
use solana_transaction_status::UiTransactionEncoding;
use solitaire::{
processors::seeded::Seeded,
AccountState,
Derive,
Info,
};
use solitaire_client::{
AccEntry,
Keypair,
SolSigner,
ToInstruction,
};
use cli::{
Action,
Cli,
};
use bridge::{
accounts::{
Bridge,
FeeCollector,
Sequence,
SequenceDerivationData,
},
types::ConsistencyLevel,
CHAIN_ID_SOLANA,
};
use pyth2wormhole::{
attest::P2WEmitter,
config::P2WConfigAccount,
initialize::InitializeAccounts,
set_config::SetConfigAccounts,
types::PriceAttestation,
AttestData,
Pyth2WormholeConfig,
};
pub type ErrBox = Box<dyn std::error::Error>;
pub const SEQNO_PREFIX: &'static str = "Program log: Sequence: ";
fn main() -> Result<(), ErrBox> {
let cli = Cli::parse();
init_logging(cli.log_level);
let payer = read_keypair_file(&*shellexpand::tilde(&cli.payer))?;
let rpc_client = RpcClient::new_with_commitment(cli.rpc_url, CommitmentConfig::finalized());
let p2w_addr = cli.p2w_addr;
let (recent_blockhash, _) = rpc_client.get_recent_blockhash()?;
let tx = match cli.action {
Action::Init {
owner_addr,
pyth_owner_addr,
wh_prog,
} => handle_init(
payer,
p2w_addr,
owner_addr,
wh_prog,
pyth_owner_addr,
recent_blockhash,
)?,
Action::SetConfig {
ref owner,
new_owner_addr,
new_wh_prog,
new_pyth_owner_addr,
} => handle_set_config(
payer,
p2w_addr,
read_keypair_file(&*shellexpand::tilde(&owner))?,
new_owner_addr,
new_wh_prog,
new_pyth_owner_addr,
recent_blockhash,
)?,
Action::Attest {
product_addr,
price_addr,
nonce,
} => handle_attest(
&rpc_client,
payer,
p2w_addr,
product_addr,
price_addr,
nonce,
recent_blockhash,
)?,
};
let sig = rpc_client.send_and_confirm_transaction_with_spinner(&tx)?;
// To complete attestation, retrieve sequence number from transaction logs
if let Action::Attest { .. } = cli.action {
let this_tx = rpc_client.get_transaction(&sig, UiTransactionEncoding::Json)?;
if let Some(logs) = this_tx.transaction.meta.and_then(|meta| meta.log_messages) {
for log in logs {
if log.starts_with(SEQNO_PREFIX) {
let seqno = log.replace(SEQNO_PREFIX, "");
println!("Sequence number: {}", seqno);
}
}
} else {
warn!("Could not get program logs for attestation");
}
}
Ok(())
}
fn handle_init(
payer: Keypair,
p2w_addr: Pubkey,
new_owner_addr: Pubkey,
wh_prog: Pubkey,
pyth_owner_addr: Pubkey,
recent_blockhash: Hash,
) -> Result<Transaction, ErrBox> {
use AccEntry::*;
let payer_pubkey = payer.pubkey();
let accs = InitializeAccounts {
payer: Signer(payer),
new_config: Derived(p2w_addr),
};
let config = Pyth2WormholeConfig {
owner: new_owner_addr,
wh_prog: wh_prog,
pyth_owner: pyth_owner_addr,
};
let ix_data = (pyth2wormhole::instruction::Instruction::Initialize, config);
let (ix, signers) = accs.to_ix(p2w_addr, ix_data.try_to_vec()?.as_slice())?;
let tx_signed = Transaction::new_signed_with_payer::<Vec<&Keypair>>(
&[ix],
Some(&payer_pubkey),
signers.iter().collect::<Vec<_>>().as_ref(),
recent_blockhash,
);
Ok(tx_signed)
}
fn handle_set_config(
payer: Keypair,
p2w_addr: Pubkey,
owner: Keypair,
new_owner_addr: Pubkey,
new_wh_prog: Pubkey,
new_pyth_owner_addr: Pubkey,
recent_blockhash: Hash,
) -> Result<Transaction, ErrBox> {
use AccEntry::*;
let payer_pubkey = payer.pubkey();
println!("Canary!");
let accs = SetConfigAccounts {
payer: Signer(payer),
current_owner: Signer(owner),
config: Derived(p2w_addr),
};
let config = Pyth2WormholeConfig {
owner: new_owner_addr,
wh_prog: new_wh_prog,
pyth_owner: new_pyth_owner_addr,
};
let ix_data = (pyth2wormhole::instruction::Instruction::SetConfig, config);
let (ix, signers) = accs.to_ix(p2w_addr, ix_data.try_to_vec()?.as_slice())?;
let tx_signed = Transaction::new_signed_with_payer::<Vec<&Keypair>>(
&[ix],
Some(&payer_pubkey),
signers.iter().collect::<Vec<_>>().as_ref(),
recent_blockhash,
);
Ok(tx_signed)
}
fn handle_attest(
rpc: &RpcClient, // Needed for reading Pyth account data
payer: Keypair,
p2w_addr: Pubkey,
product_addr: Pubkey,
price_addr: Pubkey,
nonce: u32,
recent_blockhash: Hash,
) -> Result<Transaction, ErrBox> {
let message_keypair = Keypair::new();
let emitter_addr = P2WEmitter::key(None, &p2w_addr);
let p2w_config_addr = P2WConfigAccount::<{ AccountState::Initialized }>::key(None, &p2w_addr);
let config =
Pyth2WormholeConfig::try_from_slice(rpc.get_account_data(&p2w_config_addr)?.as_slice())?;
// Derive dynamic seeded accounts
let seq_addr = Sequence::key(
&SequenceDerivationData {
emitter_key: &emitter_addr,
},
&config.wh_prog,
);
// Arrange Attest accounts
let acc_metas = vec![
// payer
AccountMeta::new(payer.pubkey(), true),
// system_program
AccountMeta::new_readonly(system_program::id(), false),
// config
AccountMeta::new_readonly(p2w_config_addr, false),
// pyth_product
AccountMeta::new_readonly(product_addr, false),
// pyth_price
AccountMeta::new_readonly(price_addr, false),
// clock
AccountMeta::new_readonly(clock::id(), false),
// wh_prog
AccountMeta::new_readonly(config.wh_prog, false),
// wh_bridge
AccountMeta::new(
Bridge::<{ AccountState::Initialized }>::key(None, &config.wh_prog),
false,
),
// wh_message
AccountMeta::new(message_keypair.pubkey(), true),
// wh_emitter
AccountMeta::new_readonly(emitter_addr, false),
// wh_sequence
AccountMeta::new(seq_addr, false),
// wh_fee_collector
AccountMeta::new(FeeCollector::<'_>::key(None, &config.wh_prog), false),
AccountMeta::new_readonly(rent::id(), false),
];
let ix_data = (
pyth2wormhole::instruction::Instruction::Attest,
AttestData {
nonce,
consistency_level: ConsistencyLevel::Finalized,
},
);
let ix = Instruction::new_with_bytes(p2w_addr, ix_data.try_to_vec()?.as_slice(), acc_metas);
// Signers that use off-chain keypairs
let signer_keypairs = vec![&payer, &message_keypair];
let tx_signed = Transaction::new_signed_with_payer::<Vec<&Keypair>>(
&[ix],
Some(&payer.pubkey()),
&signer_keypairs,
recent_blockhash,
);
Ok(tx_signed)
}
fn init_logging(verbosity: u32) {
use LevelFilter::*;
let filter = match verbosity {
0..=1 => Error,
2 => Warn,
3 => Info,
4 => Debug,
_other => Trace,
};
env_logger::builder().filter_level(filter).init();
}
| 25.371429 | 98 | 0.602603 |
090e0a1aebd464479db18e7304dbbbacb6e4030f | 2,520 | use super::test_repeated_packed_pb::*;
use protobuf_test_common::*;
// varint field number = 4
// unpacked tag = 20
// packed tag = 22
// sfixed32 field number = 5
// unpacked tag = 2d
// packed tag = 2a
#[test]
fn test_write_unpacked() {
let mut test = TestUnpacked::new();
test.set_varints([17i32, 1000].to_vec());
test_serialize_deserialize("20 11 20 e8 07", &test);
let mut test = TestUnpacked::new();
test.set_sfixed32s([17i32, 1000].to_vec());
test_serialize_deserialize("2d 11 00 00 00 2d e8 03 00 00", &test);
}
#[test]
fn test_read_unpacked_to_unpacked() {
let mut test = TestUnpacked::new();
test.set_varints([17i32, 1000].to_vec());
test_deserialize("20 11 20 e8 07", &test);
let mut test = TestUnpacked::new();
test.set_sfixed32s([17i32, 1000].to_vec());
test_deserialize("2d 11 00 00 00 2d e8 03 00 00", &test);
}
#[test]
fn test_read_packed_to_unpacked() {
let mut test = TestUnpacked::new();
test.set_varints([17i32, 1000].to_vec());
test_deserialize("22 03 11 e8 07", &test);
let mut test = TestUnpacked::new();
test.set_sfixed32s([17i32, 1000].to_vec());
test_deserialize("2a 08 11 00 00 00 e8 03 00 00", &test);
}
#[test]
fn test_write_packed() {
let mut test = TestPacked::new();
test.set_varints([17i32, 1000].to_vec());
test_serialize_deserialize("22 03 11 e8 07", &test);
let mut test = TestPacked::new();
test.set_sfixed32s([17i32, 1000].to_vec());
test_serialize_deserialize("2a 08 11 00 00 00 e8 03 00 00", &test);
}
#[test]
fn test_read_unpacked_to_packed() {
let mut test = TestPacked::new();
test.set_varints([17i32, 1000].to_vec());
test_deserialize("20 11 20 e8 07", &test);
let mut test = TestPacked::new();
test.set_sfixed32s([17i32, 1000].to_vec());
test_deserialize("2d 11 00 00 00 2d e8 03 00 00", &test);
}
#[test]
fn test_read_packed_to_packed() {
let mut test = TestPacked::new();
test.set_varints([17i32, 1000].to_vec());
test_deserialize("22 03 11 e8 07", &test);
let mut test = TestPacked::new();
test.set_sfixed32s([17i32, 1000].to_vec());
test_deserialize("2a 08 11 00 00 00 e8 03 00 00", &test);
}
#[test]
fn test_issue_281() {
// Data len len was incorrectly computed.
// For 100 elements, bytes len is 400
// and varint len of 400 is 2,
// while varint len of 100 is 1.
let mut test = TestIssue281::new();
test.set_values((0..100).collect());
test_serialize_deserialize_no_hex(&test);
}
| 30 | 71 | 0.655556 |
e56128232b9410b1ed23012869cdc43926d6a061 | 67,845 | use analog_cli::{
cli::{process_command, request_and_confirm_airdrop, CliCommand, CliConfig},
spend_utils::SpendAmount,
stake::StakeAuthorizationIndexed,
test_utils::{check_ready, check_recent_balance},
};
use analog_cli_output::{parse_sign_only_reply_string, OutputFormat};
use analog_client::{
blockhash_query::{self, BlockhashQuery},
nonce_utils,
rpc_client::RpcClient,
};
use analog_core::test_validator::TestValidator;
use analog_faucet::faucet::run_local_faucet;
use analog_sdk::{
account_utils::StateMut,
commitment_config::CommitmentConfig,
nonce::State as NonceState,
pubkey::Pubkey,
signature::{keypair_from_seed, Keypair, Signer},
stake::{
self,
instruction::LockupArgs,
state::{Lockup, StakeAuthorize, StakeState},
},
};
use analog_streamer::socket::SocketAddrSpace;
#[test]
fn test_stake_delegation_force() {
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let authorized_withdrawer = Keypair::new().pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let default_signer = Keypair::new();
let mut config = CliConfig::recent_for_tests();
config.json_rpc_url = test_validator.rpc_url();
config.signers = vec![&default_signer];
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 100_000)
.unwrap();
// Create vote account
let vote_keypair = Keypair::new();
config.signers = vec![&default_signer, &vote_keypair];
config.command = CliCommand::CreateVoteAccount {
vote_account: 1,
seed: None,
identity_account: 0,
authorized_voter: None,
authorized_withdrawer,
commission: 0,
memo: None,
};
process_command(&config).unwrap();
// Create stake account
let stake_keypair = Keypair::new();
config.signers = vec![&default_signer, &stake_keypair];
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
// Delegate stake fails (vote account had never voted)
config.signers = vec![&default_signer];
config.command = CliCommand::DelegateStake {
stake_account_pubkey: stake_keypair.pubkey(),
vote_account_pubkey: vote_keypair.pubkey(),
stake_authority: 0,
force: false,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap_err();
// But if we force it, it works anyway!
config.command = CliCommand::DelegateStake {
stake_account_pubkey: stake_keypair.pubkey(),
vote_account_pubkey: vote_keypair.pubkey(),
stake_authority: 0,
force: true,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
}
#[test]
fn test_seed_stake_delegation_and_deactivation() {
analog_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let validator_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
let mut config_validator = CliConfig::recent_for_tests();
config_validator.json_rpc_url = test_validator.rpc_url();
config_validator.signers = vec![&validator_keypair];
request_and_confirm_airdrop(
&rpc_client,
&config_validator,
&config_validator.signers[0].pubkey(),
100_000,
)
.unwrap();
check_recent_balance(100_000, &rpc_client, &config_validator.signers[0].pubkey());
let stake_address = Pubkey::create_with_seed(
&config_validator.signers[0].pubkey(),
"hi there",
&stake::program::id(),
)
.expect("bad seed");
// Create stake account with a seed, uses the validator config as the base,
// which is nice ;)
config_validator.command = CliCommand::CreateStakeAccount {
stake_account: 0,
seed: Some("hi there".to_string()),
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config_validator).unwrap();
// Delegate stake
config_validator.command = CliCommand::DelegateStake {
stake_account_pubkey: stake_address,
vote_account_pubkey: test_validator.vote_account_address(),
stake_authority: 0,
force: true,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config_validator).unwrap();
// Deactivate stake
config_validator.command = CliCommand::DeactivateStake {
stake_account_pubkey: stake_address,
stake_authority: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
seed: None,
fee_payer: 0,
};
process_command(&config_validator).unwrap();
}
#[test]
fn test_stake_delegation_and_deactivation() {
analog_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let validator_keypair = Keypair::new();
let mut config_validator = CliConfig::recent_for_tests();
config_validator.json_rpc_url = test_validator.rpc_url();
config_validator.signers = vec![&validator_keypair];
let stake_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
request_and_confirm_airdrop(
&rpc_client,
&config_validator,
&config_validator.signers[0].pubkey(),
100_000,
)
.unwrap();
check_recent_balance(100_000, &rpc_client, &config_validator.signers[0].pubkey());
// Create stake account
config_validator.signers.push(&stake_keypair);
config_validator.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config_validator).unwrap();
// Delegate stake
config_validator.signers.pop();
config_validator.command = CliCommand::DelegateStake {
stake_account_pubkey: stake_keypair.pubkey(),
vote_account_pubkey: test_validator.vote_account_address(),
stake_authority: 0,
force: true,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config_validator).unwrap();
// Deactivate stake
config_validator.command = CliCommand::DeactivateStake {
stake_account_pubkey: stake_keypair.pubkey(),
stake_authority: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
seed: None,
fee_payer: 0,
};
process_command(&config_validator).unwrap();
}
#[test]
fn test_offline_stake_delegation_and_deactivation() {
analog_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let mut config_validator = CliConfig::recent_for_tests();
config_validator.json_rpc_url = test_validator.rpc_url();
let validator_keypair = Keypair::new();
config_validator.signers = vec![&validator_keypair];
let mut config_payer = CliConfig::recent_for_tests();
config_payer.json_rpc_url = test_validator.rpc_url();
let stake_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
let mut config_offline = CliConfig::recent_for_tests();
config_offline.json_rpc_url = String::default();
config_offline.command = CliCommand::ClusterVersion;
let offline_keypair = Keypair::new();
config_offline.signers = vec![&offline_keypair];
// Verify that we cannot reach the cluster
process_command(&config_offline).unwrap_err();
request_and_confirm_airdrop(
&rpc_client,
&config_validator,
&config_validator.signers[0].pubkey(),
100_000,
)
.unwrap();
check_recent_balance(100_000, &rpc_client, &config_validator.signers[0].pubkey());
request_and_confirm_airdrop(
&rpc_client,
&config_offline,
&config_offline.signers[0].pubkey(),
100_000,
)
.unwrap();
check_recent_balance(100_000, &rpc_client, &config_offline.signers[0].pubkey());
// Create stake account
config_validator.signers.push(&stake_keypair);
config_validator.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: Some(config_offline.signers[0].pubkey()),
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config_validator).unwrap();
// Delegate stake offline
let blockhash = rpc_client.get_latest_blockhash().unwrap();
config_offline.command = CliCommand::DelegateStake {
stake_account_pubkey: stake_keypair.pubkey(),
vote_account_pubkey: test_validator.vote_account_address(),
stake_authority: 0,
force: true,
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
config_offline.output_format = OutputFormat::JsonCompact;
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only
.presigner_of(&config_offline.signers[0].pubkey())
.unwrap();
config_payer.signers = vec![&offline_presigner];
config_payer.command = CliCommand::DelegateStake {
stake_account_pubkey: stake_keypair.pubkey(),
vote_account_pubkey: test_validator.vote_account_address(),
stake_authority: 0,
force: true,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(blockhash_query::Source::Cluster, blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config_payer).unwrap();
// Deactivate stake offline
let blockhash = rpc_client.get_latest_blockhash().unwrap();
config_offline.command = CliCommand::DeactivateStake {
stake_account_pubkey: stake_keypair.pubkey(),
stake_authority: 0,
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
seed: None,
fee_payer: 0,
};
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only
.presigner_of(&config_offline.signers[0].pubkey())
.unwrap();
config_payer.signers = vec![&offline_presigner];
config_payer.command = CliCommand::DeactivateStake {
stake_account_pubkey: stake_keypair.pubkey(),
stake_authority: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(blockhash_query::Source::Cluster, blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
seed: None,
fee_payer: 0,
};
process_command(&config_payer).unwrap();
}
#[test]
fn test_nonced_stake_delegation_and_deactivation() {
analog_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let config_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
let mut config = CliConfig::recent_for_tests();
config.signers = vec![&config_keypair];
config.json_rpc_url = test_validator.rpc_url();
let minimum_nonce_balance = rpc_client
.get_minimum_balance_for_rent_exemption(NonceState::size())
.unwrap();
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 100_000)
.unwrap();
// Create stake account
let stake_keypair = Keypair::new();
config.signers.push(&stake_keypair);
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
// Create nonce account
let nonce_account = Keypair::new();
config.signers[1] = &nonce_account;
config.command = CliCommand::CreateNonceAccount {
nonce_account: 1,
seed: None,
nonce_authority: Some(config.signers[0].pubkey()),
memo: None,
amount: SpendAmount::Some(minimum_nonce_balance),
};
process_command(&config).unwrap();
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Delegate stake
config.signers = vec![&config_keypair];
config.command = CliCommand::DelegateStake {
stake_account_pubkey: stake_keypair.pubkey(),
vote_account_pubkey: test_validator.vote_account_address(),
stake_authority: 0,
force: true,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_account.pubkey()),
nonce_hash,
),
nonce_account: Some(nonce_account.pubkey()),
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Deactivate stake
config.command = CliCommand::DeactivateStake {
stake_account_pubkey: stake_keypair.pubkey(),
stake_authority: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_account.pubkey()),
nonce_hash,
),
nonce_account: Some(nonce_account.pubkey()),
nonce_authority: 0,
memo: None,
seed: None,
fee_payer: 0,
};
process_command(&config).unwrap();
}
#[test]
fn test_stake_authorize() {
analog_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let default_signer = Keypair::new();
let mut config = CliConfig::recent_for_tests();
config.json_rpc_url = test_validator.rpc_url();
config.signers = vec![&default_signer];
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 100_000)
.unwrap();
let offline_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
let mut config_offline = CliConfig::recent_for_tests();
config_offline.signers = vec![&offline_keypair];
config_offline.json_rpc_url = String::default();
let offline_authority_pubkey = config_offline.signers[0].pubkey();
config_offline.command = CliCommand::ClusterVersion;
// Verify that we cannot reach the cluster
process_command(&config_offline).unwrap_err();
request_and_confirm_airdrop(
&rpc_client,
&config_offline,
&config_offline.signers[0].pubkey(),
100_000,
)
.unwrap();
// Create stake account, identity is authority
let stake_keypair = Keypair::new();
let stake_account_pubkey = stake_keypair.pubkey();
config.signers.push(&stake_keypair);
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
// Assign new online stake authority
let online_authority = Keypair::new();
let online_authority_pubkey = online_authority.pubkey();
config.signers.pop();
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: online_authority_pubkey,
authority: 0,
new_authority_signer: None,
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_authority = match stake_state {
StakeState::Initialized(meta) => meta.authorized.staker,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(current_authority, online_authority_pubkey);
// Assign new online stake and withdraw authorities
let online_authority2 = Keypair::new();
let online_authority2_pubkey = online_authority2.pubkey();
let withdraw_authority = Keypair::new();
let withdraw_authority_pubkey = withdraw_authority.pubkey();
config.signers.push(&online_authority);
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![
StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: online_authority2_pubkey,
authority: 1,
new_authority_signer: None,
},
StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Withdrawer,
new_authority_pubkey: withdraw_authority_pubkey,
authority: 0,
new_authority_signer: None,
},
],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let (current_staker, current_withdrawer) = match stake_state {
StakeState::Initialized(meta) => (meta.authorized.staker, meta.authorized.withdrawer),
_ => panic!("Unexpected stake state!"),
};
assert_eq!(current_staker, online_authority2_pubkey);
assert_eq!(current_withdrawer, withdraw_authority_pubkey);
// Assign new offline stake authority
config.signers.pop();
config.signers.push(&online_authority2);
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: offline_authority_pubkey,
authority: 1,
new_authority_signer: None,
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_authority = match stake_state {
StakeState::Initialized(meta) => meta.authorized.staker,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(current_authority, offline_authority_pubkey);
// Offline assignment of new nonced stake authority
let nonced_authority = Keypair::new();
let nonced_authority_pubkey = nonced_authority.pubkey();
let blockhash = rpc_client.get_latest_blockhash().unwrap();
config_offline.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: nonced_authority_pubkey,
authority: 0,
new_authority_signer: None,
}],
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
config_offline.output_format = OutputFormat::JsonCompact;
let sign_reply = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sign_reply);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only.presigner_of(&offline_authority_pubkey).unwrap();
config.signers = vec![&offline_presigner];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: nonced_authority_pubkey,
authority: 0,
new_authority_signer: None,
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(blockhash_query::Source::Cluster, blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_authority = match stake_state {
StakeState::Initialized(meta) => meta.authorized.staker,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(current_authority, nonced_authority_pubkey);
// Create nonce account
let minimum_nonce_balance = rpc_client
.get_minimum_balance_for_rent_exemption(NonceState::size())
.unwrap();
let nonce_account = Keypair::new();
config.signers = vec![&default_signer, &nonce_account];
config.command = CliCommand::CreateNonceAccount {
nonce_account: 1,
seed: None,
nonce_authority: Some(offline_authority_pubkey),
memo: None,
amount: SpendAmount::Some(minimum_nonce_balance),
};
process_command(&config).unwrap();
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Nonced assignment of new online stake authority
let online_authority = Keypair::new();
let online_authority_pubkey = online_authority.pubkey();
config_offline.signers.push(&nonced_authority);
config_offline.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: online_authority_pubkey,
authority: 1,
new_authority_signer: None,
}],
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(nonce_hash),
nonce_account: Some(nonce_account.pubkey()),
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
let sign_reply = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sign_reply);
assert!(sign_only.has_all_signers());
assert_eq!(sign_only.blockhash, nonce_hash);
let offline_presigner = sign_only.presigner_of(&offline_authority_pubkey).unwrap();
let nonced_authority_presigner = sign_only.presigner_of(&nonced_authority_pubkey).unwrap();
config.signers = vec![&offline_presigner, &nonced_authority_presigner];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: online_authority_pubkey,
authority: 1,
new_authority_signer: None,
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_account.pubkey()),
sign_only.blockhash,
),
nonce_account: Some(nonce_account.pubkey()),
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_authority = match stake_state {
StakeState::Initialized(meta) => meta.authorized.staker,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(current_authority, online_authority_pubkey);
let new_nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
assert_ne!(nonce_hash, new_nonce_hash);
}
#[test]
fn test_stake_authorize_with_fee_payer() {
analog_logger::setup();
const SIG_FEE: u64 = 42;
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator = TestValidator::with_custom_fees(
mint_pubkey,
SIG_FEE,
Some(faucet_addr),
SocketAddrSpace::Unspecified,
);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let default_signer = Keypair::new();
let default_pubkey = default_signer.pubkey();
let mut config = CliConfig::recent_for_tests();
config.json_rpc_url = test_validator.rpc_url();
config.signers = vec![&default_signer];
let payer_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
let mut config_payer = CliConfig::recent_for_tests();
config_payer.signers = vec![&payer_keypair];
config_payer.json_rpc_url = test_validator.rpc_url();
let payer_pubkey = config_payer.signers[0].pubkey();
let mut config_offline = CliConfig::recent_for_tests();
let offline_signer = Keypair::new();
config_offline.signers = vec![&offline_signer];
config_offline.json_rpc_url = String::new();
let offline_pubkey = config_offline.signers[0].pubkey();
// Verify we're offline
config_offline.command = CliCommand::ClusterVersion;
process_command(&config_offline).unwrap_err();
request_and_confirm_airdrop(&rpc_client, &config, &default_pubkey, 100_000).unwrap();
check_recent_balance(100_000, &rpc_client, &config.signers[0].pubkey());
request_and_confirm_airdrop(&rpc_client, &config_payer, &payer_pubkey, 100_000).unwrap();
check_recent_balance(100_000, &rpc_client, &payer_pubkey);
request_and_confirm_airdrop(&rpc_client, &config_offline, &offline_pubkey, 100_000).unwrap();
check_recent_balance(100_000, &rpc_client, &offline_pubkey);
check_ready(&rpc_client);
// Create stake account, identity is authority
let stake_keypair = Keypair::new();
let stake_account_pubkey = stake_keypair.pubkey();
config.signers.push(&stake_keypair);
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
// `config` balance should be 50,000 - 1 stake account sig - 1 fee sig
check_recent_balance(50_000 - SIG_FEE - SIG_FEE, &rpc_client, &default_pubkey);
// Assign authority with separate fee payer
config.signers = vec![&default_signer, &payer_keypair];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: offline_pubkey,
authority: 0,
new_authority_signer: None,
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 1,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
// `config` balance has not changed, despite submitting the TX
check_recent_balance(50_000 - SIG_FEE - SIG_FEE, &rpc_client, &default_pubkey);
// `config_payer` however has paid `config`'s authority sig
// and `config_payer`'s fee sig
check_recent_balance(100_000 - SIG_FEE - SIG_FEE, &rpc_client, &payer_pubkey);
// Assign authority with offline fee payer
let blockhash = rpc_client.get_latest_blockhash().unwrap();
config_offline.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: payer_pubkey,
authority: 0,
new_authority_signer: None,
}],
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
config_offline.output_format = OutputFormat::JsonCompact;
let sign_reply = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sign_reply);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only.presigner_of(&offline_pubkey).unwrap();
config.signers = vec![&offline_presigner];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: payer_pubkey,
authority: 0,
new_authority_signer: None,
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(blockhash_query::Source::Cluster, blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
// `config`'s balance again has not changed
check_recent_balance(50_000 - SIG_FEE - SIG_FEE, &rpc_client, &default_pubkey);
// `config_offline` however has paid 1 sig due to being both authority
// and fee payer
check_recent_balance(100_000 - SIG_FEE, &rpc_client, &offline_pubkey);
}
#[test]
fn test_stake_split() {
analog_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator = TestValidator::with_custom_fees(
mint_pubkey,
1,
Some(faucet_addr),
SocketAddrSpace::Unspecified,
);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let default_signer = Keypair::new();
let offline_signer = Keypair::new();
let mut config = CliConfig::recent_for_tests();
config.json_rpc_url = test_validator.rpc_url();
config.signers = vec![&default_signer];
let mut config_offline = CliConfig::recent_for_tests();
config_offline.json_rpc_url = String::default();
config_offline.signers = vec![&offline_signer];
let offline_pubkey = config_offline.signers[0].pubkey();
// Verify we're offline
config_offline.command = CliCommand::ClusterVersion;
process_command(&config_offline).unwrap_err();
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 500_000)
.unwrap();
check_recent_balance(500_000, &rpc_client, &config.signers[0].pubkey());
request_and_confirm_airdrop(&rpc_client, &config_offline, &offline_pubkey, 100_000).unwrap();
check_recent_balance(100_000, &rpc_client, &offline_pubkey);
// Create stake account, identity is authority
let minimum_stake_balance = rpc_client
.get_minimum_balance_for_rent_exemption(std::mem::size_of::<StakeState>())
.unwrap();
let stake_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
let stake_account_pubkey = stake_keypair.pubkey();
config.signers.push(&stake_keypair);
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: Some(offline_pubkey),
withdrawer: Some(offline_pubkey),
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(10 * minimum_stake_balance),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
check_recent_balance(
10 * minimum_stake_balance,
&rpc_client,
&stake_account_pubkey,
);
// Create nonce account
let minimum_nonce_balance = rpc_client
.get_minimum_balance_for_rent_exemption(NonceState::size())
.unwrap();
let nonce_account = keypair_from_seed(&[1u8; 32]).unwrap();
config.signers = vec![&default_signer, &nonce_account];
config.command = CliCommand::CreateNonceAccount {
nonce_account: 1,
seed: None,
nonce_authority: Some(offline_pubkey),
memo: None,
amount: SpendAmount::Some(minimum_nonce_balance),
};
process_command(&config).unwrap();
check_recent_balance(minimum_nonce_balance, &rpc_client, &nonce_account.pubkey());
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Nonced offline split
let split_account = keypair_from_seed(&[2u8; 32]).unwrap();
check_recent_balance(0, &rpc_client, &split_account.pubkey());
config_offline.signers.push(&split_account);
config_offline.command = CliCommand::SplitStake {
stake_account_pubkey,
stake_authority: 0,
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(nonce_hash),
nonce_account: Some(nonce_account.pubkey()),
nonce_authority: 0,
memo: None,
split_stake_account: 1,
seed: None,
tocks: 2 * minimum_stake_balance,
fee_payer: 0,
};
config_offline.output_format = OutputFormat::JsonCompact;
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only.presigner_of(&offline_pubkey).unwrap();
config.signers = vec![&offline_presigner, &split_account];
config.command = CliCommand::SplitStake {
stake_account_pubkey,
stake_authority: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_account.pubkey()),
sign_only.blockhash,
),
nonce_account: Some(nonce_account.pubkey()),
nonce_authority: 0,
memo: None,
split_stake_account: 1,
seed: None,
tocks: 2 * minimum_stake_balance,
fee_payer: 0,
};
process_command(&config).unwrap();
check_recent_balance(
8 * minimum_stake_balance,
&rpc_client,
&stake_account_pubkey,
);
check_recent_balance(
2 * minimum_stake_balance,
&rpc_client,
&split_account.pubkey(),
);
}
#[test]
fn test_stake_set_lockup() {
analog_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator = TestValidator::with_custom_fees(
mint_pubkey,
1,
Some(faucet_addr),
SocketAddrSpace::Unspecified,
);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let default_signer = Keypair::new();
let offline_signer = Keypair::new();
let mut config = CliConfig::recent_for_tests();
config.json_rpc_url = test_validator.rpc_url();
config.signers = vec![&default_signer];
let mut config_offline = CliConfig::recent_for_tests();
config_offline.json_rpc_url = String::default();
config_offline.signers = vec![&offline_signer];
let offline_pubkey = config_offline.signers[0].pubkey();
// Verify we're offline
config_offline.command = CliCommand::ClusterVersion;
process_command(&config_offline).unwrap_err();
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 500_000)
.unwrap();
check_recent_balance(500_000, &rpc_client, &config.signers[0].pubkey());
request_and_confirm_airdrop(&rpc_client, &config_offline, &offline_pubkey, 100_000).unwrap();
check_recent_balance(100_000, &rpc_client, &offline_pubkey);
// Create stake account, identity is authority
let minimum_stake_balance = rpc_client
.get_minimum_balance_for_rent_exemption(std::mem::size_of::<StakeState>())
.unwrap();
let stake_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
let stake_account_pubkey = stake_keypair.pubkey();
let lockup = Lockup {
custodian: config.signers[0].pubkey(),
..Lockup::default()
};
config.signers.push(&stake_keypair);
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: Some(offline_pubkey),
withdrawer: Some(config.signers[0].pubkey()),
withdrawer_signer: None,
lockup,
amount: SpendAmount::Some(10 * minimum_stake_balance),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
check_recent_balance(
10 * minimum_stake_balance,
&rpc_client,
&stake_account_pubkey,
);
// Online set lockup
let lockup = LockupArgs {
unix_timestamp: Some(1_581_534_570),
epoch: Some(200),
custodian: None,
};
config.signers.pop();
config.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: None,
custodian: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_lockup = match stake_state {
StakeState::Initialized(meta) => meta.lockup,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(
current_lockup.unix_timestamp,
lockup.unix_timestamp.unwrap()
);
assert_eq!(current_lockup.epoch, lockup.epoch.unwrap());
assert_eq!(current_lockup.custodian, config.signers[0].pubkey());
// Set custodian to another pubkey
let online_custodian = Keypair::new();
let online_custodian_pubkey = online_custodian.pubkey();
let lockup = LockupArgs {
unix_timestamp: Some(1_581_534_571),
epoch: Some(201),
custodian: Some(online_custodian_pubkey),
};
config.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: None,
custodian: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
let lockup = LockupArgs {
unix_timestamp: Some(1_581_534_572),
epoch: Some(202),
custodian: None,
};
config.signers = vec![&default_signer, &online_custodian];
config.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: None,
custodian: 1,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_lockup = match stake_state {
StakeState::Initialized(meta) => meta.lockup,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(
current_lockup.unix_timestamp,
lockup.unix_timestamp.unwrap()
);
assert_eq!(current_lockup.epoch, lockup.epoch.unwrap());
assert_eq!(current_lockup.custodian, online_custodian_pubkey);
// Set custodian to offline pubkey
let lockup = LockupArgs {
unix_timestamp: Some(1_581_534_573),
epoch: Some(203),
custodian: Some(offline_pubkey),
};
config.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: None,
custodian: 1,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
// Create nonce account
let minimum_nonce_balance = rpc_client
.get_minimum_balance_for_rent_exemption(NonceState::size())
.unwrap();
let nonce_account = keypair_from_seed(&[1u8; 32]).unwrap();
let nonce_account_pubkey = nonce_account.pubkey();
config.signers = vec![&default_signer, &nonce_account];
config.command = CliCommand::CreateNonceAccount {
nonce_account: 1,
seed: None,
nonce_authority: Some(offline_pubkey),
memo: None,
amount: SpendAmount::Some(minimum_nonce_balance),
};
process_command(&config).unwrap();
check_recent_balance(minimum_nonce_balance, &rpc_client, &nonce_account_pubkey);
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Nonced offline set lockup
let lockup = LockupArgs {
unix_timestamp: Some(1_581_534_576),
epoch: Some(222),
custodian: None,
};
config_offline.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: None,
custodian: 0,
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(nonce_hash),
nonce_account: Some(nonce_account_pubkey),
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
config_offline.output_format = OutputFormat::JsonCompact;
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only.presigner_of(&offline_pubkey).unwrap();
config.signers = vec![&offline_presigner];
config.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: None,
custodian: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_account_pubkey),
sign_only.blockhash,
),
nonce_account: Some(nonce_account_pubkey),
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_lockup = match stake_state {
StakeState::Initialized(meta) => meta.lockup,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(
current_lockup.unix_timestamp,
lockup.unix_timestamp.unwrap()
);
assert_eq!(current_lockup.epoch, lockup.epoch.unwrap());
assert_eq!(current_lockup.custodian, offline_pubkey);
}
#[test]
fn test_offline_nonced_create_stake_account_and_withdraw() {
analog_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let mut config = CliConfig::recent_for_tests();
let default_signer = keypair_from_seed(&[1u8; 32]).unwrap();
config.signers = vec![&default_signer];
config.json_rpc_url = test_validator.rpc_url();
let mut config_offline = CliConfig::recent_for_tests();
let offline_signer = keypair_from_seed(&[2u8; 32]).unwrap();
config_offline.signers = vec![&offline_signer];
let offline_pubkey = config_offline.signers[0].pubkey();
config_offline.json_rpc_url = String::default();
config_offline.command = CliCommand::ClusterVersion;
// Verify that we cannot reach the cluster
process_command(&config_offline).unwrap_err();
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 200_000)
.unwrap();
check_recent_balance(200_000, &rpc_client, &config.signers[0].pubkey());
request_and_confirm_airdrop(&rpc_client, &config_offline, &offline_pubkey, 100_000).unwrap();
check_recent_balance(100_000, &rpc_client, &offline_pubkey);
// Create nonce account
let minimum_nonce_balance = rpc_client
.get_minimum_balance_for_rent_exemption(NonceState::size())
.unwrap();
let nonce_account = keypair_from_seed(&[3u8; 32]).unwrap();
let nonce_pubkey = nonce_account.pubkey();
config.signers.push(&nonce_account);
config.command = CliCommand::CreateNonceAccount {
nonce_account: 1,
seed: None,
nonce_authority: Some(offline_pubkey),
memo: None,
amount: SpendAmount::Some(minimum_nonce_balance),
};
process_command(&config).unwrap();
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Create stake account offline
let stake_keypair = keypair_from_seed(&[4u8; 32]).unwrap();
let stake_pubkey = stake_keypair.pubkey();
config_offline.signers.push(&stake_keypair);
config_offline.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(nonce_hash),
nonce_account: Some(nonce_pubkey),
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
config_offline.output_format = OutputFormat::JsonCompact;
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only.presigner_of(&offline_pubkey).unwrap();
let stake_presigner = sign_only.presigner_of(&stake_pubkey).unwrap();
config.signers = vec![&offline_presigner, &stake_presigner];
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: Some(offline_pubkey),
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_pubkey),
sign_only.blockhash,
),
nonce_account: Some(nonce_pubkey),
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
check_recent_balance(50_000, &rpc_client, &stake_pubkey);
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Offline, nonced stake-withdraw
let recipient = keypair_from_seed(&[5u8; 32]).unwrap();
let recipient_pubkey = recipient.pubkey();
config_offline.signers.pop();
config_offline.command = CliCommand::WithdrawStake {
stake_account_pubkey: stake_pubkey,
destination_account_pubkey: recipient_pubkey,
amount: SpendAmount::Some(42),
withdraw_authority: 0,
custodian: None,
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(nonce_hash),
nonce_account: Some(nonce_pubkey),
nonce_authority: 0,
memo: None,
seed: None,
fee_payer: 0,
};
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
let offline_presigner = sign_only.presigner_of(&offline_pubkey).unwrap();
config.signers = vec![&offline_presigner];
config.command = CliCommand::WithdrawStake {
stake_account_pubkey: stake_pubkey,
destination_account_pubkey: recipient_pubkey,
amount: SpendAmount::Some(42),
withdraw_authority: 0,
custodian: None,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_pubkey),
sign_only.blockhash,
),
nonce_account: Some(nonce_pubkey),
nonce_authority: 0,
memo: None,
seed: None,
fee_payer: 0,
};
process_command(&config).unwrap();
check_recent_balance(42, &rpc_client, &recipient_pubkey);
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Create another stake account. This time with seed
let seed = "seedy";
config_offline.signers = vec![&offline_signer, &stake_keypair];
config_offline.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: Some(seed.to_string()),
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(nonce_hash),
nonce_account: Some(nonce_pubkey),
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
let offline_presigner = sign_only.presigner_of(&offline_pubkey).unwrap();
let stake_presigner = sign_only.presigner_of(&stake_pubkey).unwrap();
config.signers = vec![&offline_presigner, &stake_presigner];
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: Some(seed.to_string()),
staker: Some(offline_pubkey),
withdrawer: Some(offline_pubkey),
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_pubkey),
sign_only.blockhash,
),
nonce_account: Some(nonce_pubkey),
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
let seed_address =
Pubkey::create_with_seed(&stake_pubkey, seed, &stake::program::id()).unwrap();
check_recent_balance(50_000, &rpc_client, &seed_address);
}
#[test]
fn test_stake_checked_instructions() {
analog_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let default_signer = Keypair::new();
let mut config = CliConfig::recent_for_tests();
config.json_rpc_url = test_validator.rpc_url();
config.signers = vec![&default_signer];
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 100_000)
.unwrap();
// Create stake account with withdrawer
let stake_keypair = Keypair::new();
let stake_account_pubkey = stake_keypair.pubkey();
let withdrawer_keypair = Keypair::new();
let withdrawer_pubkey = withdrawer_keypair.pubkey();
config.signers.push(&stake_keypair);
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: Some(withdrawer_pubkey),
withdrawer_signer: Some(1),
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap_err(); // unsigned authority should fail
config.signers = vec![&default_signer, &stake_keypair, &withdrawer_keypair];
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: Some(withdrawer_pubkey),
withdrawer_signer: Some(1),
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
// Re-authorize account, checking new authority
let staker_keypair = Keypair::new();
let staker_pubkey = staker_keypair.pubkey();
config.signers = vec![&default_signer];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: staker_pubkey,
authority: 0,
new_authority_signer: Some(0),
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap_err(); // unsigned authority should fail
config.signers = vec![&default_signer, &staker_keypair];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: staker_pubkey,
authority: 0,
new_authority_signer: Some(1),
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_authority = match stake_state {
StakeState::Initialized(meta) => meta.authorized.staker,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(current_authority, staker_pubkey);
let new_withdrawer_keypair = Keypair::new();
let new_withdrawer_pubkey = new_withdrawer_keypair.pubkey();
config.signers = vec![&default_signer, &withdrawer_keypair];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Withdrawer,
new_authority_pubkey: new_withdrawer_pubkey,
authority: 1,
new_authority_signer: Some(1),
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap_err(); // unsigned authority should fail
config.signers = vec![
&default_signer,
&withdrawer_keypair,
&new_withdrawer_keypair,
];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Withdrawer,
new_authority_pubkey: new_withdrawer_pubkey,
authority: 1,
new_authority_signer: Some(2),
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_authority = match stake_state {
StakeState::Initialized(meta) => meta.authorized.withdrawer,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(current_authority, new_withdrawer_pubkey);
// Set lockup, checking new custodian
let custodian = Keypair::new();
let custodian_pubkey = custodian.pubkey();
let lockup = LockupArgs {
unix_timestamp: Some(1_581_534_570),
epoch: Some(200),
custodian: Some(custodian_pubkey),
};
config.signers = vec![&default_signer, &new_withdrawer_keypair];
config.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: Some(1),
custodian: 1,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap_err(); // unsigned new custodian should fail
config.signers = vec![&default_signer, &new_withdrawer_keypair, &custodian];
config.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: Some(2),
custodian: 1,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_lockup = match stake_state {
StakeState::Initialized(meta) => meta.lockup,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(
current_lockup.unix_timestamp,
lockup.unix_timestamp.unwrap()
);
assert_eq!(current_lockup.epoch, lockup.epoch.unwrap());
assert_eq!(current_lockup.custodian, custodian_pubkey);
}
| 35.915828 | 100 | 0.666151 |
d9d5ee3e790e6713b32f1da8c2f0b31087ed61f4 | 3,075 | use crate::edwards_bls12::{fq::Fq, fr::Fr};
use algebra_core::{
biginteger::BigInteger256,
curves::{
models::{ModelParameters, MontgomeryModelParameters, TEModelParameters},
twisted_edwards_extended::{GroupAffine, GroupProjective},
},
field_new,
};
#[cfg(test)]
mod tests;
pub type EdwardsAffine = GroupAffine<EdwardsParameters>;
pub type EdwardsProjective = GroupProjective<EdwardsParameters>;
#[derive(Clone, Default, PartialEq, Eq)]
pub struct EdwardsParameters;
impl ModelParameters for EdwardsParameters {
type BaseField = Fq;
type ScalarField = Fr;
}
impl TEModelParameters for EdwardsParameters {
/// COEFF_A = -1
#[rustfmt::skip]
const COEFF_A: Fq = field_new!(Fq, BigInteger256([
0x8cf500000000000e,
0xe75281ef6000000e,
0x49dc37a90b0ba012,
0x55f8b2c6e710ab9,
]));
/// COEFF_D = 3021
#[rustfmt::skip]
const COEFF_D: Fq = field_new!(Fq, BigInteger256([
0xd047ffffffff5e30,
0xf0a91026ffff57d2,
0x9013f560d102582,
0x9fd242ca7be5700,
]));
/// COFACTOR = 4
const COFACTOR: &'static [u64] = &[4];
/// COFACTOR_INV =
/// 527778859339273151515551558673846658209717731602102048798421311598680340096
#[rustfmt::skip]
const COFACTOR_INV: Fr = field_new!(Fr, BigInteger256([
10836190823041854989,
14880086764632731920,
5023208332782666747,
239524813690824359,
]));
/// Generated randomly
const AFFINE_GENERATOR_COEFFS: (Self::BaseField, Self::BaseField) = (GENERATOR_X, GENERATOR_Y);
type MontgomeryModelParameters = EdwardsParameters;
/// Multiplication by `a` is just negation.
/// Is `a` 1 or -1?
#[inline(always)]
fn mul_by_a(elem: &Self::BaseField) -> Self::BaseField {
-*elem
}
}
impl MontgomeryModelParameters for EdwardsParameters {
/// COEFF_A = 0x8D26E3FADA9010A26949031ECE3971B93952AD84D4753DDEDB748DA37E8F552
#[rustfmt::skip]
const COEFF_A: Fq = field_new!(Fq, BigInteger256([
13800168384327121454u64,
6841573379969807446u64,
12529593083398462246u64,
853978956621483129u64,
]));
/// COEFF_B = 0x9D8F71EEC83A44C3A1FBCEC6F5418E5C6154C2682B8AC231C5A3725C8170AAD
#[rustfmt::skip]
const COEFF_B: Fq = field_new!(Fq, BigInteger256([
7239382437352637935u64,
14509846070439283655u64,
5083066350480839936u64,
1265663645916442191u64,
]));
type TEModelParameters = EdwardsParameters;
}
/// GENERATOR_X =
/// 7810607721416582242904415504650443951498042435501746664987470571546413371306
#[rustfmt::skip]
const GENERATOR_X: Fq = field_new!(Fq, BigInteger256([
0x5bbc9878d817221d,
0xd2b03489424e720,
0x6b66f128c16bb3c9,
0xdd3bff78733576d,
]));
/// GENERATOR_Y =
/// 1867362672570137759132108893390349941423731440336755218616442213142473202417
#[rustfmt::skip]
const GENERATOR_Y: Fq = field_new!(Fq, BigInteger256([
0x471517ae5e5e979e,
0xd9c97f6a73a7ff83,
0x85a95b45a5494402,
0xfad27c9b545b1f0,
]));
| 27.954545 | 99 | 0.699187 |
16a44cb7850cfa537c5f0a2a25a0ebe4868e4b9d | 1,866 | use protoc_rust::Customize;
use glob::glob;
use std::fs;
use std::path::Path;
use std::env;
use walkdir::WalkDir;
use std::io::Write;
fn main() {
let proto_root = "./proto";
let generated_root = "./src/generated";
let generated_root_absolute = Path::new(generated_root).canonicalize().unwrap();
println!("cargo:rerun-if-changed={}", proto_root);
let current_dir = env::current_dir().unwrap();
env::set_current_dir(proto_root).unwrap();
// compile proto.
for path in glob("./**/*.proto").unwrap().filter_map(Result::ok) {
let dir = generated_root_absolute.join(path.parent().unwrap());
fs::create_dir_all(&dir).unwrap();
let path = path.to_str().expect("").to_owned();
protoc_rust::run(protoc_rust::Args {
out_dir: dir.to_str().unwrap(),
input: &[path.as_str()],
includes: &["./"],
customize: Customize {
..Default::default()
},
}).expect("Failed to compile proto.");
}
env::set_current_dir(current_dir).unwrap();
// generate mod.rs.
for dir in WalkDir::new(generated_root)
.into_iter()
.filter_map(Result::ok)
.filter(|e| e.file_type().is_dir()) {
let mods: Vec<String> = WalkDir::new(dir.path())
.min_depth(1)
.max_depth(1)
.into_iter()
.filter_map(Result::ok)
.map(|e| e.path().file_stem().unwrap().to_str().unwrap().to_owned())
.filter(|e| !e.starts_with("."))
.filter(|e| e != "mod") // skip mod.rs
.collect();
let mut file = fs::File::create(dir.path().join("mod.rs")).unwrap();
write!(file, "// generated by build.rs\n").unwrap();
for name in mods.iter() {
write!(file, "pub mod {};\n", name).unwrap();
}
}
}
| 31.1 | 84 | 0.551447 |
ddaf8551f6ed1a6d6ff58707dc858165ecb26e27 | 3,092 | // Copyright 2020 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use common_exception::Result;
use super::now::NowFunction;
use super::RoundFunction;
use super::ToStartOfISOYearFunction;
use super::ToStartOfYearFunction;
use super::ToYYYYMMDDFunction;
use super::ToYYYYMMDDhhmmssFunction;
use super::ToYYYYMMFunction;
use super::TodayFunction;
use super::TomorrowFunction;
use super::YesterdayFunction;
use crate::scalars::FactoryFuncRef;
#[derive(Clone)]
pub struct DateFunction {}
impl DateFunction {
pub fn register(map: FactoryFuncRef) -> Result<()> {
let mut map = map.write();
map.insert("today".into(), TodayFunction::try_create);
map.insert("yesterday".into(), YesterdayFunction::try_create);
map.insert("tomorrow".into(), TomorrowFunction::try_create);
map.insert("now".into(), NowFunction::try_create);
map.insert("toYYYYMM".into(), ToYYYYMMFunction::try_create);
map.insert("toYYYYMMDD".into(), ToYYYYMMDDFunction::try_create);
map.insert(
"toYYYYMMDDhhmmss".into(),
ToYYYYMMDDhhmmssFunction::try_create,
);
map.insert("toStartOfYear".into(), ToStartOfYearFunction::try_create);
map.insert(
"toStartOfISOYear".into(),
ToStartOfISOYearFunction::try_create,
);
// rounders
{
map.insert("toStartOfSecond".into(), |display_name| {
RoundFunction::try_create(display_name, 1)
});
map.insert("toStartOfMinute".into(), |display_name| {
RoundFunction::try_create(display_name, 60)
});
map.insert("toStartOfFiveMinutes".into(), |display_name| {
RoundFunction::try_create(display_name, 5 * 60)
});
map.insert("toStartOfTenMinutes".into(), |display_name| {
RoundFunction::try_create(display_name, 10 * 60)
});
map.insert("toStartOfFifteenMinutes".into(), |display_name| {
RoundFunction::try_create(display_name, 15 * 60)
});
map.insert("timeSlot".into(), |display_name| {
RoundFunction::try_create(display_name, 30 * 60)
});
map.insert("toStartOfHour".into(), |display_name| {
RoundFunction::try_create(display_name, 60 * 60)
});
map.insert("toStartOfDay".into(), |display_name| {
RoundFunction::try_create(display_name, 60 * 60 * 24)
});
}
Ok(())
}
}
| 35.54023 | 78 | 0.63066 |
c1d438335dbd330bd90f7e9878be307e4dd8b8bb | 178,994 | // This file is generated by rust-protobuf 2.17.0. Do not edit
// @generated
// https://github.com/rust-lang/rust-clippy/issues/702
#![allow(unknown_lints)]
#![allow(clippy::all)]
#![allow(unused_attributes)]
#![rustfmt::skip]
#![allow(box_pointers)]
#![allow(dead_code)]
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(trivial_casts)]
#![allow(unused_imports)]
#![allow(unused_results)]
//! Generated file from `src/config/internal/config.proto`
/// Generated files are compatible only with the same version
/// of protobuf runtime.
// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_17_0;
#[derive(PartialEq,Clone,Default)]
pub struct DNS {
// message fields
pub servers: ::protobuf::RepeatedField<::std::string::String>,
pub bind: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a DNS {
fn default() -> &'a DNS {
<DNS as ::protobuf::Message>::default_instance()
}
}
impl DNS {
pub fn new() -> DNS {
::std::default::Default::default()
}
// repeated string servers = 1;
pub fn get_servers(&self) -> &[::std::string::String] {
&self.servers
}
pub fn clear_servers(&mut self) {
self.servers.clear();
}
// Param is passed by value, moved
pub fn set_servers(&mut self, v: ::protobuf::RepeatedField<::std::string::String>) {
self.servers = v;
}
// Mutable pointer to the field.
pub fn mut_servers(&mut self) -> &mut ::protobuf::RepeatedField<::std::string::String> {
&mut self.servers
}
// Take field
pub fn take_servers(&mut self) -> ::protobuf::RepeatedField<::std::string::String> {
::std::mem::replace(&mut self.servers, ::protobuf::RepeatedField::new())
}
// string bind = 2;
pub fn get_bind(&self) -> &str {
&self.bind
}
pub fn clear_bind(&mut self) {
self.bind.clear();
}
// Param is passed by value, moved
pub fn set_bind(&mut self, v: ::std::string::String) {
self.bind = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_bind(&mut self) -> &mut ::std::string::String {
&mut self.bind
}
// Take field
pub fn take_bind(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.bind, ::std::string::String::new())
}
}
impl ::protobuf::Message for DNS {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_repeated_string_into(wire_type, is, &mut self.servers)?;
},
2 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.bind)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
for value in &self.servers {
my_size += ::protobuf::rt::string_size(1, &value);
};
if !self.bind.is_empty() {
my_size += ::protobuf::rt::string_size(2, &self.bind);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
for v in &self.servers {
os.write_string(1, &v)?;
};
if !self.bind.is_empty() {
os.write_string(2, &self.bind)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> DNS {
DNS::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"servers",
|m: &DNS| { &m.servers },
|m: &mut DNS| { &mut m.servers },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"bind",
|m: &DNS| { &m.bind },
|m: &mut DNS| { &mut m.bind },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<DNS>(
"DNS",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static DNS {
static instance: ::protobuf::rt::LazyV2<DNS> = ::protobuf::rt::LazyV2::INIT;
instance.get(DNS::new)
}
}
impl ::protobuf::Clear for DNS {
fn clear(&mut self) {
self.servers.clear();
self.bind.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for DNS {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for DNS {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct Log {
// message fields
pub level: Log_Level,
pub output: Log_Output,
pub output_file: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a Log {
fn default() -> &'a Log {
<Log as ::protobuf::Message>::default_instance()
}
}
impl Log {
pub fn new() -> Log {
::std::default::Default::default()
}
// .Log.Level level = 1;
pub fn get_level(&self) -> Log_Level {
self.level
}
pub fn clear_level(&mut self) {
self.level = Log_Level::TRACE;
}
// Param is passed by value, moved
pub fn set_level(&mut self, v: Log_Level) {
self.level = v;
}
// .Log.Output output = 2;
pub fn get_output(&self) -> Log_Output {
self.output
}
pub fn clear_output(&mut self) {
self.output = Log_Output::CONSOLE;
}
// Param is passed by value, moved
pub fn set_output(&mut self, v: Log_Output) {
self.output = v;
}
// string output_file = 3;
pub fn get_output_file(&self) -> &str {
&self.output_file
}
pub fn clear_output_file(&mut self) {
self.output_file.clear();
}
// Param is passed by value, moved
pub fn set_output_file(&mut self, v: ::std::string::String) {
self.output_file = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_output_file(&mut self) -> &mut ::std::string::String {
&mut self.output_file
}
// Take field
pub fn take_output_file(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.output_file, ::std::string::String::new())
}
}
impl ::protobuf::Message for Log {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.level, 1, &mut self.unknown_fields)?
},
2 => {
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.output, 2, &mut self.unknown_fields)?
},
3 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.output_file)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if self.level != Log_Level::TRACE {
my_size += ::protobuf::rt::enum_size(1, self.level);
}
if self.output != Log_Output::CONSOLE {
my_size += ::protobuf::rt::enum_size(2, self.output);
}
if !self.output_file.is_empty() {
my_size += ::protobuf::rt::string_size(3, &self.output_file);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if self.level != Log_Level::TRACE {
os.write_enum(1, ::protobuf::ProtobufEnum::value(&self.level))?;
}
if self.output != Log_Output::CONSOLE {
os.write_enum(2, ::protobuf::ProtobufEnum::value(&self.output))?;
}
if !self.output_file.is_empty() {
os.write_string(3, &self.output_file)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> Log {
Log::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<Log_Level>>(
"level",
|m: &Log| { &m.level },
|m: &mut Log| { &mut m.level },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<Log_Output>>(
"output",
|m: &Log| { &m.output },
|m: &mut Log| { &mut m.output },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"output_file",
|m: &Log| { &m.output_file },
|m: &mut Log| { &mut m.output_file },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<Log>(
"Log",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static Log {
static instance: ::protobuf::rt::LazyV2<Log> = ::protobuf::rt::LazyV2::INIT;
instance.get(Log::new)
}
}
impl ::protobuf::Clear for Log {
fn clear(&mut self) {
self.level = Log_Level::TRACE;
self.output = Log_Output::CONSOLE;
self.output_file.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for Log {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for Log {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum Log_Level {
TRACE = 0,
DEBUG = 1,
INFO = 2,
WARN = 3,
ERROR = 4,
}
impl ::protobuf::ProtobufEnum for Log_Level {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<Log_Level> {
match value {
0 => ::std::option::Option::Some(Log_Level::TRACE),
1 => ::std::option::Option::Some(Log_Level::DEBUG),
2 => ::std::option::Option::Some(Log_Level::INFO),
3 => ::std::option::Option::Some(Log_Level::WARN),
4 => ::std::option::Option::Some(Log_Level::ERROR),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [Log_Level] = &[
Log_Level::TRACE,
Log_Level::DEBUG,
Log_Level::INFO,
Log_Level::WARN,
Log_Level::ERROR,
];
values
}
fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new_pb_name::<Log_Level>("Log.Level", file_descriptor_proto())
})
}
}
impl ::std::marker::Copy for Log_Level {
}
impl ::std::default::Default for Log_Level {
fn default() -> Self {
Log_Level::TRACE
}
}
impl ::protobuf::reflect::ProtobufValue for Log_Level {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self))
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum Log_Output {
CONSOLE = 0,
FILE = 1,
}
impl ::protobuf::ProtobufEnum for Log_Output {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<Log_Output> {
match value {
0 => ::std::option::Option::Some(Log_Output::CONSOLE),
1 => ::std::option::Option::Some(Log_Output::FILE),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [Log_Output] = &[
Log_Output::CONSOLE,
Log_Output::FILE,
];
values
}
fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new_pb_name::<Log_Output>("Log.Output", file_descriptor_proto())
})
}
}
impl ::std::marker::Copy for Log_Output {
}
impl ::std::default::Default for Log_Output {
fn default() -> Self {
Log_Output::CONSOLE
}
}
impl ::protobuf::reflect::ProtobufValue for Log_Output {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self))
}
}
#[derive(PartialEq,Clone,Default)]
pub struct TUNInboundSettings {
// message fields
pub fd: i32,
pub name: ::std::string::String,
pub address: ::std::string::String,
pub gateway: ::std::string::String,
pub netmask: ::std::string::String,
pub mtu: i32,
pub fake_dns_exclude: ::protobuf::RepeatedField<::std::string::String>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a TUNInboundSettings {
fn default() -> &'a TUNInboundSettings {
<TUNInboundSettings as ::protobuf::Message>::default_instance()
}
}
impl TUNInboundSettings {
pub fn new() -> TUNInboundSettings {
::std::default::Default::default()
}
// int32 fd = 1;
pub fn get_fd(&self) -> i32 {
self.fd
}
pub fn clear_fd(&mut self) {
self.fd = 0;
}
// Param is passed by value, moved
pub fn set_fd(&mut self, v: i32) {
self.fd = v;
}
// string name = 2;
pub fn get_name(&self) -> &str {
&self.name
}
pub fn clear_name(&mut self) {
self.name.clear();
}
// Param is passed by value, moved
pub fn set_name(&mut self, v: ::std::string::String) {
self.name = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_name(&mut self) -> &mut ::std::string::String {
&mut self.name
}
// Take field
pub fn take_name(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.name, ::std::string::String::new())
}
// string address = 3;
pub fn get_address(&self) -> &str {
&self.address
}
pub fn clear_address(&mut self) {
self.address.clear();
}
// Param is passed by value, moved
pub fn set_address(&mut self, v: ::std::string::String) {
self.address = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_address(&mut self) -> &mut ::std::string::String {
&mut self.address
}
// Take field
pub fn take_address(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.address, ::std::string::String::new())
}
// string gateway = 4;
pub fn get_gateway(&self) -> &str {
&self.gateway
}
pub fn clear_gateway(&mut self) {
self.gateway.clear();
}
// Param is passed by value, moved
pub fn set_gateway(&mut self, v: ::std::string::String) {
self.gateway = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_gateway(&mut self) -> &mut ::std::string::String {
&mut self.gateway
}
// Take field
pub fn take_gateway(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.gateway, ::std::string::String::new())
}
// string netmask = 5;
pub fn get_netmask(&self) -> &str {
&self.netmask
}
pub fn clear_netmask(&mut self) {
self.netmask.clear();
}
// Param is passed by value, moved
pub fn set_netmask(&mut self, v: ::std::string::String) {
self.netmask = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_netmask(&mut self) -> &mut ::std::string::String {
&mut self.netmask
}
// Take field
pub fn take_netmask(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.netmask, ::std::string::String::new())
}
// int32 mtu = 6;
pub fn get_mtu(&self) -> i32 {
self.mtu
}
pub fn clear_mtu(&mut self) {
self.mtu = 0;
}
// Param is passed by value, moved
pub fn set_mtu(&mut self, v: i32) {
self.mtu = v;
}
// repeated string fake_dns_exclude = 7;
pub fn get_fake_dns_exclude(&self) -> &[::std::string::String] {
&self.fake_dns_exclude
}
pub fn clear_fake_dns_exclude(&mut self) {
self.fake_dns_exclude.clear();
}
// Param is passed by value, moved
pub fn set_fake_dns_exclude(&mut self, v: ::protobuf::RepeatedField<::std::string::String>) {
self.fake_dns_exclude = v;
}
// Mutable pointer to the field.
pub fn mut_fake_dns_exclude(&mut self) -> &mut ::protobuf::RepeatedField<::std::string::String> {
&mut self.fake_dns_exclude
}
// Take field
pub fn take_fake_dns_exclude(&mut self) -> ::protobuf::RepeatedField<::std::string::String> {
::std::mem::replace(&mut self.fake_dns_exclude, ::protobuf::RepeatedField::new())
}
}
impl ::protobuf::Message for TUNInboundSettings {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int32()?;
self.fd = tmp;
},
2 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.name)?;
},
3 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.address)?;
},
4 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.gateway)?;
},
5 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.netmask)?;
},
6 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int32()?;
self.mtu = tmp;
},
7 => {
::protobuf::rt::read_repeated_string_into(wire_type, is, &mut self.fake_dns_exclude)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if self.fd != 0 {
my_size += ::protobuf::rt::value_size(1, self.fd, ::protobuf::wire_format::WireTypeVarint);
}
if !self.name.is_empty() {
my_size += ::protobuf::rt::string_size(2, &self.name);
}
if !self.address.is_empty() {
my_size += ::protobuf::rt::string_size(3, &self.address);
}
if !self.gateway.is_empty() {
my_size += ::protobuf::rt::string_size(4, &self.gateway);
}
if !self.netmask.is_empty() {
my_size += ::protobuf::rt::string_size(5, &self.netmask);
}
if self.mtu != 0 {
my_size += ::protobuf::rt::value_size(6, self.mtu, ::protobuf::wire_format::WireTypeVarint);
}
for value in &self.fake_dns_exclude {
my_size += ::protobuf::rt::string_size(7, &value);
};
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if self.fd != 0 {
os.write_int32(1, self.fd)?;
}
if !self.name.is_empty() {
os.write_string(2, &self.name)?;
}
if !self.address.is_empty() {
os.write_string(3, &self.address)?;
}
if !self.gateway.is_empty() {
os.write_string(4, &self.gateway)?;
}
if !self.netmask.is_empty() {
os.write_string(5, &self.netmask)?;
}
if self.mtu != 0 {
os.write_int32(6, self.mtu)?;
}
for v in &self.fake_dns_exclude {
os.write_string(7, &v)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> TUNInboundSettings {
TUNInboundSettings::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt32>(
"fd",
|m: &TUNInboundSettings| { &m.fd },
|m: &mut TUNInboundSettings| { &mut m.fd },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"name",
|m: &TUNInboundSettings| { &m.name },
|m: &mut TUNInboundSettings| { &mut m.name },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"address",
|m: &TUNInboundSettings| { &m.address },
|m: &mut TUNInboundSettings| { &mut m.address },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"gateway",
|m: &TUNInboundSettings| { &m.gateway },
|m: &mut TUNInboundSettings| { &mut m.gateway },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"netmask",
|m: &TUNInboundSettings| { &m.netmask },
|m: &mut TUNInboundSettings| { &mut m.netmask },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt32>(
"mtu",
|m: &TUNInboundSettings| { &m.mtu },
|m: &mut TUNInboundSettings| { &mut m.mtu },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"fake_dns_exclude",
|m: &TUNInboundSettings| { &m.fake_dns_exclude },
|m: &mut TUNInboundSettings| { &mut m.fake_dns_exclude },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<TUNInboundSettings>(
"TUNInboundSettings",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static TUNInboundSettings {
static instance: ::protobuf::rt::LazyV2<TUNInboundSettings> = ::protobuf::rt::LazyV2::INIT;
instance.get(TUNInboundSettings::new)
}
}
impl ::protobuf::Clear for TUNInboundSettings {
fn clear(&mut self) {
self.fd = 0;
self.name.clear();
self.address.clear();
self.gateway.clear();
self.netmask.clear();
self.mtu = 0;
self.fake_dns_exclude.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for TUNInboundSettings {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for TUNInboundSettings {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct SocksInboundSettings {
// message fields
pub bind: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SocksInboundSettings {
fn default() -> &'a SocksInboundSettings {
<SocksInboundSettings as ::protobuf::Message>::default_instance()
}
}
impl SocksInboundSettings {
pub fn new() -> SocksInboundSettings {
::std::default::Default::default()
}
// string bind = 1;
pub fn get_bind(&self) -> &str {
&self.bind
}
pub fn clear_bind(&mut self) {
self.bind.clear();
}
// Param is passed by value, moved
pub fn set_bind(&mut self, v: ::std::string::String) {
self.bind = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_bind(&mut self) -> &mut ::std::string::String {
&mut self.bind
}
// Take field
pub fn take_bind(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.bind, ::std::string::String::new())
}
}
impl ::protobuf::Message for SocksInboundSettings {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.bind)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.bind.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.bind);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.bind.is_empty() {
os.write_string(1, &self.bind)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SocksInboundSettings {
SocksInboundSettings::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"bind",
|m: &SocksInboundSettings| { &m.bind },
|m: &mut SocksInboundSettings| { &mut m.bind },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<SocksInboundSettings>(
"SocksInboundSettings",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SocksInboundSettings {
static instance: ::protobuf::rt::LazyV2<SocksInboundSettings> = ::protobuf::rt::LazyV2::INIT;
instance.get(SocksInboundSettings::new)
}
}
impl ::protobuf::Clear for SocksInboundSettings {
fn clear(&mut self) {
self.bind.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SocksInboundSettings {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SocksInboundSettings {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct Inbound {
// message fields
pub tag: ::std::string::String,
pub protocol: ::std::string::String,
pub listen: ::std::string::String,
pub port: u32,
pub settings: ::std::vec::Vec<u8>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a Inbound {
fn default() -> &'a Inbound {
<Inbound as ::protobuf::Message>::default_instance()
}
}
impl Inbound {
pub fn new() -> Inbound {
::std::default::Default::default()
}
// string tag = 1;
pub fn get_tag(&self) -> &str {
&self.tag
}
pub fn clear_tag(&mut self) {
self.tag.clear();
}
// Param is passed by value, moved
pub fn set_tag(&mut self, v: ::std::string::String) {
self.tag = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_tag(&mut self) -> &mut ::std::string::String {
&mut self.tag
}
// Take field
pub fn take_tag(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.tag, ::std::string::String::new())
}
// string protocol = 2;
pub fn get_protocol(&self) -> &str {
&self.protocol
}
pub fn clear_protocol(&mut self) {
self.protocol.clear();
}
// Param is passed by value, moved
pub fn set_protocol(&mut self, v: ::std::string::String) {
self.protocol = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_protocol(&mut self) -> &mut ::std::string::String {
&mut self.protocol
}
// Take field
pub fn take_protocol(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.protocol, ::std::string::String::new())
}
// string listen = 3;
pub fn get_listen(&self) -> &str {
&self.listen
}
pub fn clear_listen(&mut self) {
self.listen.clear();
}
// Param is passed by value, moved
pub fn set_listen(&mut self, v: ::std::string::String) {
self.listen = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_listen(&mut self) -> &mut ::std::string::String {
&mut self.listen
}
// Take field
pub fn take_listen(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.listen, ::std::string::String::new())
}
// uint32 port = 4;
pub fn get_port(&self) -> u32 {
self.port
}
pub fn clear_port(&mut self) {
self.port = 0;
}
// Param is passed by value, moved
pub fn set_port(&mut self, v: u32) {
self.port = v;
}
// bytes settings = 5;
pub fn get_settings(&self) -> &[u8] {
&self.settings
}
pub fn clear_settings(&mut self) {
self.settings.clear();
}
// Param is passed by value, moved
pub fn set_settings(&mut self, v: ::std::vec::Vec<u8>) {
self.settings = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_settings(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.settings
}
// Take field
pub fn take_settings(&mut self) -> ::std::vec::Vec<u8> {
::std::mem::replace(&mut self.settings, ::std::vec::Vec::new())
}
}
impl ::protobuf::Message for Inbound {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.tag)?;
},
2 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.protocol)?;
},
3 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.listen)?;
},
4 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_uint32()?;
self.port = tmp;
},
5 => {
::protobuf::rt::read_singular_proto3_bytes_into(wire_type, is, &mut self.settings)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.tag.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.tag);
}
if !self.protocol.is_empty() {
my_size += ::protobuf::rt::string_size(2, &self.protocol);
}
if !self.listen.is_empty() {
my_size += ::protobuf::rt::string_size(3, &self.listen);
}
if self.port != 0 {
my_size += ::protobuf::rt::value_size(4, self.port, ::protobuf::wire_format::WireTypeVarint);
}
if !self.settings.is_empty() {
my_size += ::protobuf::rt::bytes_size(5, &self.settings);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.tag.is_empty() {
os.write_string(1, &self.tag)?;
}
if !self.protocol.is_empty() {
os.write_string(2, &self.protocol)?;
}
if !self.listen.is_empty() {
os.write_string(3, &self.listen)?;
}
if self.port != 0 {
os.write_uint32(4, self.port)?;
}
if !self.settings.is_empty() {
os.write_bytes(5, &self.settings)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> Inbound {
Inbound::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"tag",
|m: &Inbound| { &m.tag },
|m: &mut Inbound| { &mut m.tag },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"protocol",
|m: &Inbound| { &m.protocol },
|m: &mut Inbound| { &mut m.protocol },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"listen",
|m: &Inbound| { &m.listen },
|m: &mut Inbound| { &mut m.listen },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint32>(
"port",
|m: &Inbound| { &m.port },
|m: &mut Inbound| { &mut m.port },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>(
"settings",
|m: &Inbound| { &m.settings },
|m: &mut Inbound| { &mut m.settings },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<Inbound>(
"Inbound",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static Inbound {
static instance: ::protobuf::rt::LazyV2<Inbound> = ::protobuf::rt::LazyV2::INIT;
instance.get(Inbound::new)
}
}
impl ::protobuf::Clear for Inbound {
fn clear(&mut self) {
self.tag.clear();
self.protocol.clear();
self.listen.clear();
self.port = 0;
self.settings.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for Inbound {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for Inbound {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RedirectOutboundSettings {
// message fields
pub address: ::std::string::String,
pub port: u32,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RedirectOutboundSettings {
fn default() -> &'a RedirectOutboundSettings {
<RedirectOutboundSettings as ::protobuf::Message>::default_instance()
}
}
impl RedirectOutboundSettings {
pub fn new() -> RedirectOutboundSettings {
::std::default::Default::default()
}
// string address = 1;
pub fn get_address(&self) -> &str {
&self.address
}
pub fn clear_address(&mut self) {
self.address.clear();
}
// Param is passed by value, moved
pub fn set_address(&mut self, v: ::std::string::String) {
self.address = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_address(&mut self) -> &mut ::std::string::String {
&mut self.address
}
// Take field
pub fn take_address(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.address, ::std::string::String::new())
}
// uint32 port = 2;
pub fn get_port(&self) -> u32 {
self.port
}
pub fn clear_port(&mut self) {
self.port = 0;
}
// Param is passed by value, moved
pub fn set_port(&mut self, v: u32) {
self.port = v;
}
}
impl ::protobuf::Message for RedirectOutboundSettings {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.address)?;
},
2 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_uint32()?;
self.port = tmp;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.address.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.address);
}
if self.port != 0 {
my_size += ::protobuf::rt::value_size(2, self.port, ::protobuf::wire_format::WireTypeVarint);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.address.is_empty() {
os.write_string(1, &self.address)?;
}
if self.port != 0 {
os.write_uint32(2, self.port)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RedirectOutboundSettings {
RedirectOutboundSettings::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"address",
|m: &RedirectOutboundSettings| { &m.address },
|m: &mut RedirectOutboundSettings| { &mut m.address },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint32>(
"port",
|m: &RedirectOutboundSettings| { &m.port },
|m: &mut RedirectOutboundSettings| { &mut m.port },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<RedirectOutboundSettings>(
"RedirectOutboundSettings",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RedirectOutboundSettings {
static instance: ::protobuf::rt::LazyV2<RedirectOutboundSettings> = ::protobuf::rt::LazyV2::INIT;
instance.get(RedirectOutboundSettings::new)
}
}
impl ::protobuf::Clear for RedirectOutboundSettings {
fn clear(&mut self) {
self.address.clear();
self.port = 0;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RedirectOutboundSettings {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RedirectOutboundSettings {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct SocksOutboundSettings {
// message fields
pub address: ::std::string::String,
pub port: u32,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a SocksOutboundSettings {
fn default() -> &'a SocksOutboundSettings {
<SocksOutboundSettings as ::protobuf::Message>::default_instance()
}
}
impl SocksOutboundSettings {
pub fn new() -> SocksOutboundSettings {
::std::default::Default::default()
}
// string address = 1;
pub fn get_address(&self) -> &str {
&self.address
}
pub fn clear_address(&mut self) {
self.address.clear();
}
// Param is passed by value, moved
pub fn set_address(&mut self, v: ::std::string::String) {
self.address = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_address(&mut self) -> &mut ::std::string::String {
&mut self.address
}
// Take field
pub fn take_address(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.address, ::std::string::String::new())
}
// uint32 port = 2;
pub fn get_port(&self) -> u32 {
self.port
}
pub fn clear_port(&mut self) {
self.port = 0;
}
// Param is passed by value, moved
pub fn set_port(&mut self, v: u32) {
self.port = v;
}
}
impl ::protobuf::Message for SocksOutboundSettings {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.address)?;
},
2 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_uint32()?;
self.port = tmp;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.address.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.address);
}
if self.port != 0 {
my_size += ::protobuf::rt::value_size(2, self.port, ::protobuf::wire_format::WireTypeVarint);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.address.is_empty() {
os.write_string(1, &self.address)?;
}
if self.port != 0 {
os.write_uint32(2, self.port)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> SocksOutboundSettings {
SocksOutboundSettings::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"address",
|m: &SocksOutboundSettings| { &m.address },
|m: &mut SocksOutboundSettings| { &mut m.address },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint32>(
"port",
|m: &SocksOutboundSettings| { &m.port },
|m: &mut SocksOutboundSettings| { &mut m.port },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<SocksOutboundSettings>(
"SocksOutboundSettings",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static SocksOutboundSettings {
static instance: ::protobuf::rt::LazyV2<SocksOutboundSettings> = ::protobuf::rt::LazyV2::INIT;
instance.get(SocksOutboundSettings::new)
}
}
impl ::protobuf::Clear for SocksOutboundSettings {
fn clear(&mut self) {
self.address.clear();
self.port = 0;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for SocksOutboundSettings {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for SocksOutboundSettings {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct ShadowsocksOutboundSettings {
// message fields
pub address: ::std::string::String,
pub port: u32,
pub method: ::std::string::String,
pub password: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a ShadowsocksOutboundSettings {
fn default() -> &'a ShadowsocksOutboundSettings {
<ShadowsocksOutboundSettings as ::protobuf::Message>::default_instance()
}
}
impl ShadowsocksOutboundSettings {
pub fn new() -> ShadowsocksOutboundSettings {
::std::default::Default::default()
}
// string address = 1;
pub fn get_address(&self) -> &str {
&self.address
}
pub fn clear_address(&mut self) {
self.address.clear();
}
// Param is passed by value, moved
pub fn set_address(&mut self, v: ::std::string::String) {
self.address = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_address(&mut self) -> &mut ::std::string::String {
&mut self.address
}
// Take field
pub fn take_address(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.address, ::std::string::String::new())
}
// uint32 port = 2;
pub fn get_port(&self) -> u32 {
self.port
}
pub fn clear_port(&mut self) {
self.port = 0;
}
// Param is passed by value, moved
pub fn set_port(&mut self, v: u32) {
self.port = v;
}
// string method = 3;
pub fn get_method(&self) -> &str {
&self.method
}
pub fn clear_method(&mut self) {
self.method.clear();
}
// Param is passed by value, moved
pub fn set_method(&mut self, v: ::std::string::String) {
self.method = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_method(&mut self) -> &mut ::std::string::String {
&mut self.method
}
// Take field
pub fn take_method(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.method, ::std::string::String::new())
}
// string password = 4;
pub fn get_password(&self) -> &str {
&self.password
}
pub fn clear_password(&mut self) {
self.password.clear();
}
// Param is passed by value, moved
pub fn set_password(&mut self, v: ::std::string::String) {
self.password = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_password(&mut self) -> &mut ::std::string::String {
&mut self.password
}
// Take field
pub fn take_password(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.password, ::std::string::String::new())
}
}
impl ::protobuf::Message for ShadowsocksOutboundSettings {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.address)?;
},
2 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_uint32()?;
self.port = tmp;
},
3 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.method)?;
},
4 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.password)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.address.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.address);
}
if self.port != 0 {
my_size += ::protobuf::rt::value_size(2, self.port, ::protobuf::wire_format::WireTypeVarint);
}
if !self.method.is_empty() {
my_size += ::protobuf::rt::string_size(3, &self.method);
}
if !self.password.is_empty() {
my_size += ::protobuf::rt::string_size(4, &self.password);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.address.is_empty() {
os.write_string(1, &self.address)?;
}
if self.port != 0 {
os.write_uint32(2, self.port)?;
}
if !self.method.is_empty() {
os.write_string(3, &self.method)?;
}
if !self.password.is_empty() {
os.write_string(4, &self.password)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> ShadowsocksOutboundSettings {
ShadowsocksOutboundSettings::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"address",
|m: &ShadowsocksOutboundSettings| { &m.address },
|m: &mut ShadowsocksOutboundSettings| { &mut m.address },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint32>(
"port",
|m: &ShadowsocksOutboundSettings| { &m.port },
|m: &mut ShadowsocksOutboundSettings| { &mut m.port },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"method",
|m: &ShadowsocksOutboundSettings| { &m.method },
|m: &mut ShadowsocksOutboundSettings| { &mut m.method },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"password",
|m: &ShadowsocksOutboundSettings| { &m.password },
|m: &mut ShadowsocksOutboundSettings| { &mut m.password },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<ShadowsocksOutboundSettings>(
"ShadowsocksOutboundSettings",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static ShadowsocksOutboundSettings {
static instance: ::protobuf::rt::LazyV2<ShadowsocksOutboundSettings> = ::protobuf::rt::LazyV2::INIT;
instance.get(ShadowsocksOutboundSettings::new)
}
}
impl ::protobuf::Clear for ShadowsocksOutboundSettings {
fn clear(&mut self) {
self.address.clear();
self.port = 0;
self.method.clear();
self.password.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for ShadowsocksOutboundSettings {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for ShadowsocksOutboundSettings {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct TrojanOutboundSettings {
// message fields
pub address: ::std::string::String,
pub port: u32,
pub password: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a TrojanOutboundSettings {
fn default() -> &'a TrojanOutboundSettings {
<TrojanOutboundSettings as ::protobuf::Message>::default_instance()
}
}
impl TrojanOutboundSettings {
pub fn new() -> TrojanOutboundSettings {
::std::default::Default::default()
}
// string address = 1;
pub fn get_address(&self) -> &str {
&self.address
}
pub fn clear_address(&mut self) {
self.address.clear();
}
// Param is passed by value, moved
pub fn set_address(&mut self, v: ::std::string::String) {
self.address = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_address(&mut self) -> &mut ::std::string::String {
&mut self.address
}
// Take field
pub fn take_address(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.address, ::std::string::String::new())
}
// uint32 port = 2;
pub fn get_port(&self) -> u32 {
self.port
}
pub fn clear_port(&mut self) {
self.port = 0;
}
// Param is passed by value, moved
pub fn set_port(&mut self, v: u32) {
self.port = v;
}
// string password = 3;
pub fn get_password(&self) -> &str {
&self.password
}
pub fn clear_password(&mut self) {
self.password.clear();
}
// Param is passed by value, moved
pub fn set_password(&mut self, v: ::std::string::String) {
self.password = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_password(&mut self) -> &mut ::std::string::String {
&mut self.password
}
// Take field
pub fn take_password(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.password, ::std::string::String::new())
}
}
impl ::protobuf::Message for TrojanOutboundSettings {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.address)?;
},
2 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_uint32()?;
self.port = tmp;
},
3 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.password)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.address.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.address);
}
if self.port != 0 {
my_size += ::protobuf::rt::value_size(2, self.port, ::protobuf::wire_format::WireTypeVarint);
}
if !self.password.is_empty() {
my_size += ::protobuf::rt::string_size(3, &self.password);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.address.is_empty() {
os.write_string(1, &self.address)?;
}
if self.port != 0 {
os.write_uint32(2, self.port)?;
}
if !self.password.is_empty() {
os.write_string(3, &self.password)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> TrojanOutboundSettings {
TrojanOutboundSettings::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"address",
|m: &TrojanOutboundSettings| { &m.address },
|m: &mut TrojanOutboundSettings| { &mut m.address },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint32>(
"port",
|m: &TrojanOutboundSettings| { &m.port },
|m: &mut TrojanOutboundSettings| { &mut m.port },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"password",
|m: &TrojanOutboundSettings| { &m.password },
|m: &mut TrojanOutboundSettings| { &mut m.password },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<TrojanOutboundSettings>(
"TrojanOutboundSettings",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static TrojanOutboundSettings {
static instance: ::protobuf::rt::LazyV2<TrojanOutboundSettings> = ::protobuf::rt::LazyV2::INIT;
instance.get(TrojanOutboundSettings::new)
}
}
impl ::protobuf::Clear for TrojanOutboundSettings {
fn clear(&mut self) {
self.address.clear();
self.port = 0;
self.password.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for TrojanOutboundSettings {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for TrojanOutboundSettings {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct VMessOutboundSettings {
// message fields
pub address: ::std::string::String,
pub port: u32,
pub uuid: ::std::string::String,
pub security: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a VMessOutboundSettings {
fn default() -> &'a VMessOutboundSettings {
<VMessOutboundSettings as ::protobuf::Message>::default_instance()
}
}
impl VMessOutboundSettings {
pub fn new() -> VMessOutboundSettings {
::std::default::Default::default()
}
// string address = 1;
pub fn get_address(&self) -> &str {
&self.address
}
pub fn clear_address(&mut self) {
self.address.clear();
}
// Param is passed by value, moved
pub fn set_address(&mut self, v: ::std::string::String) {
self.address = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_address(&mut self) -> &mut ::std::string::String {
&mut self.address
}
// Take field
pub fn take_address(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.address, ::std::string::String::new())
}
// uint32 port = 2;
pub fn get_port(&self) -> u32 {
self.port
}
pub fn clear_port(&mut self) {
self.port = 0;
}
// Param is passed by value, moved
pub fn set_port(&mut self, v: u32) {
self.port = v;
}
// string uuid = 3;
pub fn get_uuid(&self) -> &str {
&self.uuid
}
pub fn clear_uuid(&mut self) {
self.uuid.clear();
}
// Param is passed by value, moved
pub fn set_uuid(&mut self, v: ::std::string::String) {
self.uuid = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_uuid(&mut self) -> &mut ::std::string::String {
&mut self.uuid
}
// Take field
pub fn take_uuid(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.uuid, ::std::string::String::new())
}
// string security = 4;
pub fn get_security(&self) -> &str {
&self.security
}
pub fn clear_security(&mut self) {
self.security.clear();
}
// Param is passed by value, moved
pub fn set_security(&mut self, v: ::std::string::String) {
self.security = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_security(&mut self) -> &mut ::std::string::String {
&mut self.security
}
// Take field
pub fn take_security(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.security, ::std::string::String::new())
}
}
impl ::protobuf::Message for VMessOutboundSettings {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.address)?;
},
2 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_uint32()?;
self.port = tmp;
},
3 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.uuid)?;
},
4 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.security)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.address.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.address);
}
if self.port != 0 {
my_size += ::protobuf::rt::value_size(2, self.port, ::protobuf::wire_format::WireTypeVarint);
}
if !self.uuid.is_empty() {
my_size += ::protobuf::rt::string_size(3, &self.uuid);
}
if !self.security.is_empty() {
my_size += ::protobuf::rt::string_size(4, &self.security);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.address.is_empty() {
os.write_string(1, &self.address)?;
}
if self.port != 0 {
os.write_uint32(2, self.port)?;
}
if !self.uuid.is_empty() {
os.write_string(3, &self.uuid)?;
}
if !self.security.is_empty() {
os.write_string(4, &self.security)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> VMessOutboundSettings {
VMessOutboundSettings::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"address",
|m: &VMessOutboundSettings| { &m.address },
|m: &mut VMessOutboundSettings| { &mut m.address },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint32>(
"port",
|m: &VMessOutboundSettings| { &m.port },
|m: &mut VMessOutboundSettings| { &mut m.port },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"uuid",
|m: &VMessOutboundSettings| { &m.uuid },
|m: &mut VMessOutboundSettings| { &mut m.uuid },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"security",
|m: &VMessOutboundSettings| { &m.security },
|m: &mut VMessOutboundSettings| { &mut m.security },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<VMessOutboundSettings>(
"VMessOutboundSettings",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static VMessOutboundSettings {
static instance: ::protobuf::rt::LazyV2<VMessOutboundSettings> = ::protobuf::rt::LazyV2::INIT;
instance.get(VMessOutboundSettings::new)
}
}
impl ::protobuf::Clear for VMessOutboundSettings {
fn clear(&mut self) {
self.address.clear();
self.port = 0;
self.uuid.clear();
self.security.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for VMessOutboundSettings {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for VMessOutboundSettings {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct VLessOutboundSettings {
// message fields
pub address: ::std::string::String,
pub port: u32,
pub uuid: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a VLessOutboundSettings {
fn default() -> &'a VLessOutboundSettings {
<VLessOutboundSettings as ::protobuf::Message>::default_instance()
}
}
impl VLessOutboundSettings {
pub fn new() -> VLessOutboundSettings {
::std::default::Default::default()
}
// string address = 1;
pub fn get_address(&self) -> &str {
&self.address
}
pub fn clear_address(&mut self) {
self.address.clear();
}
// Param is passed by value, moved
pub fn set_address(&mut self, v: ::std::string::String) {
self.address = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_address(&mut self) -> &mut ::std::string::String {
&mut self.address
}
// Take field
pub fn take_address(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.address, ::std::string::String::new())
}
// uint32 port = 2;
pub fn get_port(&self) -> u32 {
self.port
}
pub fn clear_port(&mut self) {
self.port = 0;
}
// Param is passed by value, moved
pub fn set_port(&mut self, v: u32) {
self.port = v;
}
// string uuid = 3;
pub fn get_uuid(&self) -> &str {
&self.uuid
}
pub fn clear_uuid(&mut self) {
self.uuid.clear();
}
// Param is passed by value, moved
pub fn set_uuid(&mut self, v: ::std::string::String) {
self.uuid = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_uuid(&mut self) -> &mut ::std::string::String {
&mut self.uuid
}
// Take field
pub fn take_uuid(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.uuid, ::std::string::String::new())
}
}
impl ::protobuf::Message for VLessOutboundSettings {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.address)?;
},
2 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_uint32()?;
self.port = tmp;
},
3 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.uuid)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.address.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.address);
}
if self.port != 0 {
my_size += ::protobuf::rt::value_size(2, self.port, ::protobuf::wire_format::WireTypeVarint);
}
if !self.uuid.is_empty() {
my_size += ::protobuf::rt::string_size(3, &self.uuid);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.address.is_empty() {
os.write_string(1, &self.address)?;
}
if self.port != 0 {
os.write_uint32(2, self.port)?;
}
if !self.uuid.is_empty() {
os.write_string(3, &self.uuid)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> VLessOutboundSettings {
VLessOutboundSettings::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"address",
|m: &VLessOutboundSettings| { &m.address },
|m: &mut VLessOutboundSettings| { &mut m.address },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint32>(
"port",
|m: &VLessOutboundSettings| { &m.port },
|m: &mut VLessOutboundSettings| { &mut m.port },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"uuid",
|m: &VLessOutboundSettings| { &m.uuid },
|m: &mut VLessOutboundSettings| { &mut m.uuid },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<VLessOutboundSettings>(
"VLessOutboundSettings",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static VLessOutboundSettings {
static instance: ::protobuf::rt::LazyV2<VLessOutboundSettings> = ::protobuf::rt::LazyV2::INIT;
instance.get(VLessOutboundSettings::new)
}
}
impl ::protobuf::Clear for VLessOutboundSettings {
fn clear(&mut self) {
self.address.clear();
self.port = 0;
self.uuid.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for VLessOutboundSettings {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for VLessOutboundSettings {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct TlsOutboundSettings {
// message fields
pub server_name: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a TlsOutboundSettings {
fn default() -> &'a TlsOutboundSettings {
<TlsOutboundSettings as ::protobuf::Message>::default_instance()
}
}
impl TlsOutboundSettings {
pub fn new() -> TlsOutboundSettings {
::std::default::Default::default()
}
// string server_name = 1;
pub fn get_server_name(&self) -> &str {
&self.server_name
}
pub fn clear_server_name(&mut self) {
self.server_name.clear();
}
// Param is passed by value, moved
pub fn set_server_name(&mut self, v: ::std::string::String) {
self.server_name = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_server_name(&mut self) -> &mut ::std::string::String {
&mut self.server_name
}
// Take field
pub fn take_server_name(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.server_name, ::std::string::String::new())
}
}
impl ::protobuf::Message for TlsOutboundSettings {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.server_name)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.server_name.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.server_name);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.server_name.is_empty() {
os.write_string(1, &self.server_name)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> TlsOutboundSettings {
TlsOutboundSettings::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"server_name",
|m: &TlsOutboundSettings| { &m.server_name },
|m: &mut TlsOutboundSettings| { &mut m.server_name },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<TlsOutboundSettings>(
"TlsOutboundSettings",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static TlsOutboundSettings {
static instance: ::protobuf::rt::LazyV2<TlsOutboundSettings> = ::protobuf::rt::LazyV2::INIT;
instance.get(TlsOutboundSettings::new)
}
}
impl ::protobuf::Clear for TlsOutboundSettings {
fn clear(&mut self) {
self.server_name.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for TlsOutboundSettings {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for TlsOutboundSettings {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct WebSocketOutboundSettings {
// message fields
pub path: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a WebSocketOutboundSettings {
fn default() -> &'a WebSocketOutboundSettings {
<WebSocketOutboundSettings as ::protobuf::Message>::default_instance()
}
}
impl WebSocketOutboundSettings {
pub fn new() -> WebSocketOutboundSettings {
::std::default::Default::default()
}
// string path = 1;
pub fn get_path(&self) -> &str {
&self.path
}
pub fn clear_path(&mut self) {
self.path.clear();
}
// Param is passed by value, moved
pub fn set_path(&mut self, v: ::std::string::String) {
self.path = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_path(&mut self) -> &mut ::std::string::String {
&mut self.path
}
// Take field
pub fn take_path(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.path, ::std::string::String::new())
}
}
impl ::protobuf::Message for WebSocketOutboundSettings {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.path)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.path.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.path);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.path.is_empty() {
os.write_string(1, &self.path)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> WebSocketOutboundSettings {
WebSocketOutboundSettings::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"path",
|m: &WebSocketOutboundSettings| { &m.path },
|m: &mut WebSocketOutboundSettings| { &mut m.path },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<WebSocketOutboundSettings>(
"WebSocketOutboundSettings",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static WebSocketOutboundSettings {
static instance: ::protobuf::rt::LazyV2<WebSocketOutboundSettings> = ::protobuf::rt::LazyV2::INIT;
instance.get(WebSocketOutboundSettings::new)
}
}
impl ::protobuf::Clear for WebSocketOutboundSettings {
fn clear(&mut self) {
self.path.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for WebSocketOutboundSettings {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for WebSocketOutboundSettings {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct TryAllOutboundSettings {
// message fields
pub actors: ::protobuf::RepeatedField<::std::string::String>,
pub delay_base: u32,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a TryAllOutboundSettings {
fn default() -> &'a TryAllOutboundSettings {
<TryAllOutboundSettings as ::protobuf::Message>::default_instance()
}
}
impl TryAllOutboundSettings {
pub fn new() -> TryAllOutboundSettings {
::std::default::Default::default()
}
// repeated string actors = 1;
pub fn get_actors(&self) -> &[::std::string::String] {
&self.actors
}
pub fn clear_actors(&mut self) {
self.actors.clear();
}
// Param is passed by value, moved
pub fn set_actors(&mut self, v: ::protobuf::RepeatedField<::std::string::String>) {
self.actors = v;
}
// Mutable pointer to the field.
pub fn mut_actors(&mut self) -> &mut ::protobuf::RepeatedField<::std::string::String> {
&mut self.actors
}
// Take field
pub fn take_actors(&mut self) -> ::protobuf::RepeatedField<::std::string::String> {
::std::mem::replace(&mut self.actors, ::protobuf::RepeatedField::new())
}
// uint32 delay_base = 2;
pub fn get_delay_base(&self) -> u32 {
self.delay_base
}
pub fn clear_delay_base(&mut self) {
self.delay_base = 0;
}
// Param is passed by value, moved
pub fn set_delay_base(&mut self, v: u32) {
self.delay_base = v;
}
}
impl ::protobuf::Message for TryAllOutboundSettings {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_repeated_string_into(wire_type, is, &mut self.actors)?;
},
2 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_uint32()?;
self.delay_base = tmp;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
for value in &self.actors {
my_size += ::protobuf::rt::string_size(1, &value);
};
if self.delay_base != 0 {
my_size += ::protobuf::rt::value_size(2, self.delay_base, ::protobuf::wire_format::WireTypeVarint);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
for v in &self.actors {
os.write_string(1, &v)?;
};
if self.delay_base != 0 {
os.write_uint32(2, self.delay_base)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> TryAllOutboundSettings {
TryAllOutboundSettings::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"actors",
|m: &TryAllOutboundSettings| { &m.actors },
|m: &mut TryAllOutboundSettings| { &mut m.actors },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint32>(
"delay_base",
|m: &TryAllOutboundSettings| { &m.delay_base },
|m: &mut TryAllOutboundSettings| { &mut m.delay_base },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<TryAllOutboundSettings>(
"TryAllOutboundSettings",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static TryAllOutboundSettings {
static instance: ::protobuf::rt::LazyV2<TryAllOutboundSettings> = ::protobuf::rt::LazyV2::INIT;
instance.get(TryAllOutboundSettings::new)
}
}
impl ::protobuf::Clear for TryAllOutboundSettings {
fn clear(&mut self) {
self.actors.clear();
self.delay_base = 0;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for TryAllOutboundSettings {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for TryAllOutboundSettings {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RandomOutboundSettings {
// message fields
pub actors: ::protobuf::RepeatedField<::std::string::String>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RandomOutboundSettings {
fn default() -> &'a RandomOutboundSettings {
<RandomOutboundSettings as ::protobuf::Message>::default_instance()
}
}
impl RandomOutboundSettings {
pub fn new() -> RandomOutboundSettings {
::std::default::Default::default()
}
// repeated string actors = 1;
pub fn get_actors(&self) -> &[::std::string::String] {
&self.actors
}
pub fn clear_actors(&mut self) {
self.actors.clear();
}
// Param is passed by value, moved
pub fn set_actors(&mut self, v: ::protobuf::RepeatedField<::std::string::String>) {
self.actors = v;
}
// Mutable pointer to the field.
pub fn mut_actors(&mut self) -> &mut ::protobuf::RepeatedField<::std::string::String> {
&mut self.actors
}
// Take field
pub fn take_actors(&mut self) -> ::protobuf::RepeatedField<::std::string::String> {
::std::mem::replace(&mut self.actors, ::protobuf::RepeatedField::new())
}
}
impl ::protobuf::Message for RandomOutboundSettings {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_repeated_string_into(wire_type, is, &mut self.actors)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
for value in &self.actors {
my_size += ::protobuf::rt::string_size(1, &value);
};
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
for v in &self.actors {
os.write_string(1, &v)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RandomOutboundSettings {
RandomOutboundSettings::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"actors",
|m: &RandomOutboundSettings| { &m.actors },
|m: &mut RandomOutboundSettings| { &mut m.actors },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<RandomOutboundSettings>(
"RandomOutboundSettings",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RandomOutboundSettings {
static instance: ::protobuf::rt::LazyV2<RandomOutboundSettings> = ::protobuf::rt::LazyV2::INIT;
instance.get(RandomOutboundSettings::new)
}
}
impl ::protobuf::Clear for RandomOutboundSettings {
fn clear(&mut self) {
self.actors.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RandomOutboundSettings {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RandomOutboundSettings {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct ChainOutboundSettings {
// message fields
pub actors: ::protobuf::RepeatedField<::std::string::String>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a ChainOutboundSettings {
fn default() -> &'a ChainOutboundSettings {
<ChainOutboundSettings as ::protobuf::Message>::default_instance()
}
}
impl ChainOutboundSettings {
pub fn new() -> ChainOutboundSettings {
::std::default::Default::default()
}
// repeated string actors = 1;
pub fn get_actors(&self) -> &[::std::string::String] {
&self.actors
}
pub fn clear_actors(&mut self) {
self.actors.clear();
}
// Param is passed by value, moved
pub fn set_actors(&mut self, v: ::protobuf::RepeatedField<::std::string::String>) {
self.actors = v;
}
// Mutable pointer to the field.
pub fn mut_actors(&mut self) -> &mut ::protobuf::RepeatedField<::std::string::String> {
&mut self.actors
}
// Take field
pub fn take_actors(&mut self) -> ::protobuf::RepeatedField<::std::string::String> {
::std::mem::replace(&mut self.actors, ::protobuf::RepeatedField::new())
}
}
impl ::protobuf::Message for ChainOutboundSettings {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_repeated_string_into(wire_type, is, &mut self.actors)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
for value in &self.actors {
my_size += ::protobuf::rt::string_size(1, &value);
};
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
for v in &self.actors {
os.write_string(1, &v)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> ChainOutboundSettings {
ChainOutboundSettings::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"actors",
|m: &ChainOutboundSettings| { &m.actors },
|m: &mut ChainOutboundSettings| { &mut m.actors },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<ChainOutboundSettings>(
"ChainOutboundSettings",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static ChainOutboundSettings {
static instance: ::protobuf::rt::LazyV2<ChainOutboundSettings> = ::protobuf::rt::LazyV2::INIT;
instance.get(ChainOutboundSettings::new)
}
}
impl ::protobuf::Clear for ChainOutboundSettings {
fn clear(&mut self) {
self.actors.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for ChainOutboundSettings {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for ChainOutboundSettings {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct FailOverOutboundSettings {
// message fields
pub actors: ::protobuf::RepeatedField<::std::string::String>,
pub fail_timeout: u32,
pub health_check: bool,
pub check_interval: u32,
pub failover: bool,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a FailOverOutboundSettings {
fn default() -> &'a FailOverOutboundSettings {
<FailOverOutboundSettings as ::protobuf::Message>::default_instance()
}
}
impl FailOverOutboundSettings {
pub fn new() -> FailOverOutboundSettings {
::std::default::Default::default()
}
// repeated string actors = 1;
pub fn get_actors(&self) -> &[::std::string::String] {
&self.actors
}
pub fn clear_actors(&mut self) {
self.actors.clear();
}
// Param is passed by value, moved
pub fn set_actors(&mut self, v: ::protobuf::RepeatedField<::std::string::String>) {
self.actors = v;
}
// Mutable pointer to the field.
pub fn mut_actors(&mut self) -> &mut ::protobuf::RepeatedField<::std::string::String> {
&mut self.actors
}
// Take field
pub fn take_actors(&mut self) -> ::protobuf::RepeatedField<::std::string::String> {
::std::mem::replace(&mut self.actors, ::protobuf::RepeatedField::new())
}
// uint32 fail_timeout = 2;
pub fn get_fail_timeout(&self) -> u32 {
self.fail_timeout
}
pub fn clear_fail_timeout(&mut self) {
self.fail_timeout = 0;
}
// Param is passed by value, moved
pub fn set_fail_timeout(&mut self, v: u32) {
self.fail_timeout = v;
}
// bool health_check = 3;
pub fn get_health_check(&self) -> bool {
self.health_check
}
pub fn clear_health_check(&mut self) {
self.health_check = false;
}
// Param is passed by value, moved
pub fn set_health_check(&mut self, v: bool) {
self.health_check = v;
}
// uint32 check_interval = 4;
pub fn get_check_interval(&self) -> u32 {
self.check_interval
}
pub fn clear_check_interval(&mut self) {
self.check_interval = 0;
}
// Param is passed by value, moved
pub fn set_check_interval(&mut self, v: u32) {
self.check_interval = v;
}
// bool failover = 5;
pub fn get_failover(&self) -> bool {
self.failover
}
pub fn clear_failover(&mut self) {
self.failover = false;
}
// Param is passed by value, moved
pub fn set_failover(&mut self, v: bool) {
self.failover = v;
}
}
impl ::protobuf::Message for FailOverOutboundSettings {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_repeated_string_into(wire_type, is, &mut self.actors)?;
},
2 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_uint32()?;
self.fail_timeout = tmp;
},
3 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_bool()?;
self.health_check = tmp;
},
4 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_uint32()?;
self.check_interval = tmp;
},
5 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_bool()?;
self.failover = tmp;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
for value in &self.actors {
my_size += ::protobuf::rt::string_size(1, &value);
};
if self.fail_timeout != 0 {
my_size += ::protobuf::rt::value_size(2, self.fail_timeout, ::protobuf::wire_format::WireTypeVarint);
}
if self.health_check != false {
my_size += 2;
}
if self.check_interval != 0 {
my_size += ::protobuf::rt::value_size(4, self.check_interval, ::protobuf::wire_format::WireTypeVarint);
}
if self.failover != false {
my_size += 2;
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
for v in &self.actors {
os.write_string(1, &v)?;
};
if self.fail_timeout != 0 {
os.write_uint32(2, self.fail_timeout)?;
}
if self.health_check != false {
os.write_bool(3, self.health_check)?;
}
if self.check_interval != 0 {
os.write_uint32(4, self.check_interval)?;
}
if self.failover != false {
os.write_bool(5, self.failover)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> FailOverOutboundSettings {
FailOverOutboundSettings::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"actors",
|m: &FailOverOutboundSettings| { &m.actors },
|m: &mut FailOverOutboundSettings| { &mut m.actors },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint32>(
"fail_timeout",
|m: &FailOverOutboundSettings| { &m.fail_timeout },
|m: &mut FailOverOutboundSettings| { &mut m.fail_timeout },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBool>(
"health_check",
|m: &FailOverOutboundSettings| { &m.health_check },
|m: &mut FailOverOutboundSettings| { &mut m.health_check },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint32>(
"check_interval",
|m: &FailOverOutboundSettings| { &m.check_interval },
|m: &mut FailOverOutboundSettings| { &mut m.check_interval },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBool>(
"failover",
|m: &FailOverOutboundSettings| { &m.failover },
|m: &mut FailOverOutboundSettings| { &mut m.failover },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<FailOverOutboundSettings>(
"FailOverOutboundSettings",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static FailOverOutboundSettings {
static instance: ::protobuf::rt::LazyV2<FailOverOutboundSettings> = ::protobuf::rt::LazyV2::INIT;
instance.get(FailOverOutboundSettings::new)
}
}
impl ::protobuf::Clear for FailOverOutboundSettings {
fn clear(&mut self) {
self.actors.clear();
self.fail_timeout = 0;
self.health_check = false;
self.check_interval = 0;
self.failover = false;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for FailOverOutboundSettings {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for FailOverOutboundSettings {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct Outbound {
// message fields
pub tag: ::std::string::String,
pub protocol: ::std::string::String,
pub bind: ::std::string::String,
pub settings: ::std::vec::Vec<u8>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a Outbound {
fn default() -> &'a Outbound {
<Outbound as ::protobuf::Message>::default_instance()
}
}
impl Outbound {
pub fn new() -> Outbound {
::std::default::Default::default()
}
// string tag = 1;
pub fn get_tag(&self) -> &str {
&self.tag
}
pub fn clear_tag(&mut self) {
self.tag.clear();
}
// Param is passed by value, moved
pub fn set_tag(&mut self, v: ::std::string::String) {
self.tag = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_tag(&mut self) -> &mut ::std::string::String {
&mut self.tag
}
// Take field
pub fn take_tag(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.tag, ::std::string::String::new())
}
// string protocol = 2;
pub fn get_protocol(&self) -> &str {
&self.protocol
}
pub fn clear_protocol(&mut self) {
self.protocol.clear();
}
// Param is passed by value, moved
pub fn set_protocol(&mut self, v: ::std::string::String) {
self.protocol = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_protocol(&mut self) -> &mut ::std::string::String {
&mut self.protocol
}
// Take field
pub fn take_protocol(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.protocol, ::std::string::String::new())
}
// string bind = 3;
pub fn get_bind(&self) -> &str {
&self.bind
}
pub fn clear_bind(&mut self) {
self.bind.clear();
}
// Param is passed by value, moved
pub fn set_bind(&mut self, v: ::std::string::String) {
self.bind = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_bind(&mut self) -> &mut ::std::string::String {
&mut self.bind
}
// Take field
pub fn take_bind(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.bind, ::std::string::String::new())
}
// bytes settings = 4;
pub fn get_settings(&self) -> &[u8] {
&self.settings
}
pub fn clear_settings(&mut self) {
self.settings.clear();
}
// Param is passed by value, moved
pub fn set_settings(&mut self, v: ::std::vec::Vec<u8>) {
self.settings = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_settings(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.settings
}
// Take field
pub fn take_settings(&mut self) -> ::std::vec::Vec<u8> {
::std::mem::replace(&mut self.settings, ::std::vec::Vec::new())
}
}
impl ::protobuf::Message for Outbound {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.tag)?;
},
2 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.protocol)?;
},
3 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.bind)?;
},
4 => {
::protobuf::rt::read_singular_proto3_bytes_into(wire_type, is, &mut self.settings)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.tag.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.tag);
}
if !self.protocol.is_empty() {
my_size += ::protobuf::rt::string_size(2, &self.protocol);
}
if !self.bind.is_empty() {
my_size += ::protobuf::rt::string_size(3, &self.bind);
}
if !self.settings.is_empty() {
my_size += ::protobuf::rt::bytes_size(4, &self.settings);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.tag.is_empty() {
os.write_string(1, &self.tag)?;
}
if !self.protocol.is_empty() {
os.write_string(2, &self.protocol)?;
}
if !self.bind.is_empty() {
os.write_string(3, &self.bind)?;
}
if !self.settings.is_empty() {
os.write_bytes(4, &self.settings)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> Outbound {
Outbound::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"tag",
|m: &Outbound| { &m.tag },
|m: &mut Outbound| { &mut m.tag },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"protocol",
|m: &Outbound| { &m.protocol },
|m: &mut Outbound| { &mut m.protocol },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"bind",
|m: &Outbound| { &m.bind },
|m: &mut Outbound| { &mut m.bind },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>(
"settings",
|m: &Outbound| { &m.settings },
|m: &mut Outbound| { &mut m.settings },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<Outbound>(
"Outbound",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static Outbound {
static instance: ::protobuf::rt::LazyV2<Outbound> = ::protobuf::rt::LazyV2::INIT;
instance.get(Outbound::new)
}
}
impl ::protobuf::Clear for Outbound {
fn clear(&mut self) {
self.tag.clear();
self.protocol.clear();
self.bind.clear();
self.settings.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for Outbound {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for Outbound {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RoutingRule {
// message fields
pub target_tag: ::std::string::String,
pub domains: ::protobuf::RepeatedField<RoutingRule_Domain>,
pub ip_cidrs: ::protobuf::RepeatedField<::std::string::String>,
pub mmdbs: ::protobuf::RepeatedField<RoutingRule_Mmdb>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RoutingRule {
fn default() -> &'a RoutingRule {
<RoutingRule as ::protobuf::Message>::default_instance()
}
}
impl RoutingRule {
pub fn new() -> RoutingRule {
::std::default::Default::default()
}
// string target_tag = 1;
pub fn get_target_tag(&self) -> &str {
&self.target_tag
}
pub fn clear_target_tag(&mut self) {
self.target_tag.clear();
}
// Param is passed by value, moved
pub fn set_target_tag(&mut self, v: ::std::string::String) {
self.target_tag = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_target_tag(&mut self) -> &mut ::std::string::String {
&mut self.target_tag
}
// Take field
pub fn take_target_tag(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.target_tag, ::std::string::String::new())
}
// repeated .RoutingRule.Domain domains = 2;
pub fn get_domains(&self) -> &[RoutingRule_Domain] {
&self.domains
}
pub fn clear_domains(&mut self) {
self.domains.clear();
}
// Param is passed by value, moved
pub fn set_domains(&mut self, v: ::protobuf::RepeatedField<RoutingRule_Domain>) {
self.domains = v;
}
// Mutable pointer to the field.
pub fn mut_domains(&mut self) -> &mut ::protobuf::RepeatedField<RoutingRule_Domain> {
&mut self.domains
}
// Take field
pub fn take_domains(&mut self) -> ::protobuf::RepeatedField<RoutingRule_Domain> {
::std::mem::replace(&mut self.domains, ::protobuf::RepeatedField::new())
}
// repeated string ip_cidrs = 3;
pub fn get_ip_cidrs(&self) -> &[::std::string::String] {
&self.ip_cidrs
}
pub fn clear_ip_cidrs(&mut self) {
self.ip_cidrs.clear();
}
// Param is passed by value, moved
pub fn set_ip_cidrs(&mut self, v: ::protobuf::RepeatedField<::std::string::String>) {
self.ip_cidrs = v;
}
// Mutable pointer to the field.
pub fn mut_ip_cidrs(&mut self) -> &mut ::protobuf::RepeatedField<::std::string::String> {
&mut self.ip_cidrs
}
// Take field
pub fn take_ip_cidrs(&mut self) -> ::protobuf::RepeatedField<::std::string::String> {
::std::mem::replace(&mut self.ip_cidrs, ::protobuf::RepeatedField::new())
}
// repeated .RoutingRule.Mmdb mmdbs = 4;
pub fn get_mmdbs(&self) -> &[RoutingRule_Mmdb] {
&self.mmdbs
}
pub fn clear_mmdbs(&mut self) {
self.mmdbs.clear();
}
// Param is passed by value, moved
pub fn set_mmdbs(&mut self, v: ::protobuf::RepeatedField<RoutingRule_Mmdb>) {
self.mmdbs = v;
}
// Mutable pointer to the field.
pub fn mut_mmdbs(&mut self) -> &mut ::protobuf::RepeatedField<RoutingRule_Mmdb> {
&mut self.mmdbs
}
// Take field
pub fn take_mmdbs(&mut self) -> ::protobuf::RepeatedField<RoutingRule_Mmdb> {
::std::mem::replace(&mut self.mmdbs, ::protobuf::RepeatedField::new())
}
}
impl ::protobuf::Message for RoutingRule {
fn is_initialized(&self) -> bool {
for v in &self.domains {
if !v.is_initialized() {
return false;
}
};
for v in &self.mmdbs {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.target_tag)?;
},
2 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.domains)?;
},
3 => {
::protobuf::rt::read_repeated_string_into(wire_type, is, &mut self.ip_cidrs)?;
},
4 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.mmdbs)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.target_tag.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.target_tag);
}
for value in &self.domains {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
for value in &self.ip_cidrs {
my_size += ::protobuf::rt::string_size(3, &value);
};
for value in &self.mmdbs {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.target_tag.is_empty() {
os.write_string(1, &self.target_tag)?;
}
for v in &self.domains {
os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
for v in &self.ip_cidrs {
os.write_string(3, &v)?;
};
for v in &self.mmdbs {
os.write_tag(4, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RoutingRule {
RoutingRule::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"target_tag",
|m: &RoutingRule| { &m.target_tag },
|m: &mut RoutingRule| { &mut m.target_tag },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<RoutingRule_Domain>>(
"domains",
|m: &RoutingRule| { &m.domains },
|m: &mut RoutingRule| { &mut m.domains },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"ip_cidrs",
|m: &RoutingRule| { &m.ip_cidrs },
|m: &mut RoutingRule| { &mut m.ip_cidrs },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<RoutingRule_Mmdb>>(
"mmdbs",
|m: &RoutingRule| { &m.mmdbs },
|m: &mut RoutingRule| { &mut m.mmdbs },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<RoutingRule>(
"RoutingRule",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RoutingRule {
static instance: ::protobuf::rt::LazyV2<RoutingRule> = ::protobuf::rt::LazyV2::INIT;
instance.get(RoutingRule::new)
}
}
impl ::protobuf::Clear for RoutingRule {
fn clear(&mut self) {
self.target_tag.clear();
self.domains.clear();
self.ip_cidrs.clear();
self.mmdbs.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RoutingRule {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RoutingRule {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RoutingRule_Domain {
// message fields
pub field_type: RoutingRule_Domain_Type,
pub value: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RoutingRule_Domain {
fn default() -> &'a RoutingRule_Domain {
<RoutingRule_Domain as ::protobuf::Message>::default_instance()
}
}
impl RoutingRule_Domain {
pub fn new() -> RoutingRule_Domain {
::std::default::Default::default()
}
// .RoutingRule.Domain.Type type = 1;
pub fn get_field_type(&self) -> RoutingRule_Domain_Type {
self.field_type
}
pub fn clear_field_type(&mut self) {
self.field_type = RoutingRule_Domain_Type::PLAIN;
}
// Param is passed by value, moved
pub fn set_field_type(&mut self, v: RoutingRule_Domain_Type) {
self.field_type = v;
}
// string value = 2;
pub fn get_value(&self) -> &str {
&self.value
}
pub fn clear_value(&mut self) {
self.value.clear();
}
// Param is passed by value, moved
pub fn set_value(&mut self, v: ::std::string::String) {
self.value = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_value(&mut self) -> &mut ::std::string::String {
&mut self.value
}
// Take field
pub fn take_value(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.value, ::std::string::String::new())
}
}
impl ::protobuf::Message for RoutingRule_Domain {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.field_type, 1, &mut self.unknown_fields)?
},
2 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.value)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if self.field_type != RoutingRule_Domain_Type::PLAIN {
my_size += ::protobuf::rt::enum_size(1, self.field_type);
}
if !self.value.is_empty() {
my_size += ::protobuf::rt::string_size(2, &self.value);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if self.field_type != RoutingRule_Domain_Type::PLAIN {
os.write_enum(1, ::protobuf::ProtobufEnum::value(&self.field_type))?;
}
if !self.value.is_empty() {
os.write_string(2, &self.value)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RoutingRule_Domain {
RoutingRule_Domain::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<RoutingRule_Domain_Type>>(
"type",
|m: &RoutingRule_Domain| { &m.field_type },
|m: &mut RoutingRule_Domain| { &mut m.field_type },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"value",
|m: &RoutingRule_Domain| { &m.value },
|m: &mut RoutingRule_Domain| { &mut m.value },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<RoutingRule_Domain>(
"RoutingRule.Domain",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RoutingRule_Domain {
static instance: ::protobuf::rt::LazyV2<RoutingRule_Domain> = ::protobuf::rt::LazyV2::INIT;
instance.get(RoutingRule_Domain::new)
}
}
impl ::protobuf::Clear for RoutingRule_Domain {
fn clear(&mut self) {
self.field_type = RoutingRule_Domain_Type::PLAIN;
self.value.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RoutingRule_Domain {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RoutingRule_Domain {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum RoutingRule_Domain_Type {
PLAIN = 0,
DOMAIN = 1,
FULL = 2,
}
impl ::protobuf::ProtobufEnum for RoutingRule_Domain_Type {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<RoutingRule_Domain_Type> {
match value {
0 => ::std::option::Option::Some(RoutingRule_Domain_Type::PLAIN),
1 => ::std::option::Option::Some(RoutingRule_Domain_Type::DOMAIN),
2 => ::std::option::Option::Some(RoutingRule_Domain_Type::FULL),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [RoutingRule_Domain_Type] = &[
RoutingRule_Domain_Type::PLAIN,
RoutingRule_Domain_Type::DOMAIN,
RoutingRule_Domain_Type::FULL,
];
values
}
fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new_pb_name::<RoutingRule_Domain_Type>("RoutingRule.Domain.Type", file_descriptor_proto())
})
}
}
impl ::std::marker::Copy for RoutingRule_Domain_Type {
}
impl ::std::default::Default for RoutingRule_Domain_Type {
fn default() -> Self {
RoutingRule_Domain_Type::PLAIN
}
}
impl ::protobuf::reflect::ProtobufValue for RoutingRule_Domain_Type {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self))
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RoutingRule_Mmdb {
// message fields
pub file: ::std::string::String,
pub country_code: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RoutingRule_Mmdb {
fn default() -> &'a RoutingRule_Mmdb {
<RoutingRule_Mmdb as ::protobuf::Message>::default_instance()
}
}
impl RoutingRule_Mmdb {
pub fn new() -> RoutingRule_Mmdb {
::std::default::Default::default()
}
// string file = 1;
pub fn get_file(&self) -> &str {
&self.file
}
pub fn clear_file(&mut self) {
self.file.clear();
}
// Param is passed by value, moved
pub fn set_file(&mut self, v: ::std::string::String) {
self.file = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_file(&mut self) -> &mut ::std::string::String {
&mut self.file
}
// Take field
pub fn take_file(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.file, ::std::string::String::new())
}
// string country_code = 2;
pub fn get_country_code(&self) -> &str {
&self.country_code
}
pub fn clear_country_code(&mut self) {
self.country_code.clear();
}
// Param is passed by value, moved
pub fn set_country_code(&mut self, v: ::std::string::String) {
self.country_code = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_country_code(&mut self) -> &mut ::std::string::String {
&mut self.country_code
}
// Take field
pub fn take_country_code(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.country_code, ::std::string::String::new())
}
}
impl ::protobuf::Message for RoutingRule_Mmdb {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.file)?;
},
2 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.country_code)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.file.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.file);
}
if !self.country_code.is_empty() {
my_size += ::protobuf::rt::string_size(2, &self.country_code);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.file.is_empty() {
os.write_string(1, &self.file)?;
}
if !self.country_code.is_empty() {
os.write_string(2, &self.country_code)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RoutingRule_Mmdb {
RoutingRule_Mmdb::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"file",
|m: &RoutingRule_Mmdb| { &m.file },
|m: &mut RoutingRule_Mmdb| { &mut m.file },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"country_code",
|m: &RoutingRule_Mmdb| { &m.country_code },
|m: &mut RoutingRule_Mmdb| { &mut m.country_code },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<RoutingRule_Mmdb>(
"RoutingRule.Mmdb",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RoutingRule_Mmdb {
static instance: ::protobuf::rt::LazyV2<RoutingRule_Mmdb> = ::protobuf::rt::LazyV2::INIT;
instance.get(RoutingRule_Mmdb::new)
}
}
impl ::protobuf::Clear for RoutingRule_Mmdb {
fn clear(&mut self) {
self.file.clear();
self.country_code.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RoutingRule_Mmdb {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RoutingRule_Mmdb {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct Config {
// message fields
pub log: ::protobuf::SingularPtrField<Log>,
pub inbounds: ::protobuf::RepeatedField<Inbound>,
pub outbounds: ::protobuf::RepeatedField<Outbound>,
pub routing_rules: ::protobuf::RepeatedField<RoutingRule>,
pub dns: ::protobuf::SingularPtrField<DNS>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a Config {
fn default() -> &'a Config {
<Config as ::protobuf::Message>::default_instance()
}
}
impl Config {
pub fn new() -> Config {
::std::default::Default::default()
}
// .Log log = 1;
pub fn get_log(&self) -> &Log {
self.log.as_ref().unwrap_or_else(|| <Log as ::protobuf::Message>::default_instance())
}
pub fn clear_log(&mut self) {
self.log.clear();
}
pub fn has_log(&self) -> bool {
self.log.is_some()
}
// Param is passed by value, moved
pub fn set_log(&mut self, v: Log) {
self.log = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_log(&mut self) -> &mut Log {
if self.log.is_none() {
self.log.set_default();
}
self.log.as_mut().unwrap()
}
// Take field
pub fn take_log(&mut self) -> Log {
self.log.take().unwrap_or_else(|| Log::new())
}
// repeated .Inbound inbounds = 2;
pub fn get_inbounds(&self) -> &[Inbound] {
&self.inbounds
}
pub fn clear_inbounds(&mut self) {
self.inbounds.clear();
}
// Param is passed by value, moved
pub fn set_inbounds(&mut self, v: ::protobuf::RepeatedField<Inbound>) {
self.inbounds = v;
}
// Mutable pointer to the field.
pub fn mut_inbounds(&mut self) -> &mut ::protobuf::RepeatedField<Inbound> {
&mut self.inbounds
}
// Take field
pub fn take_inbounds(&mut self) -> ::protobuf::RepeatedField<Inbound> {
::std::mem::replace(&mut self.inbounds, ::protobuf::RepeatedField::new())
}
// repeated .Outbound outbounds = 3;
pub fn get_outbounds(&self) -> &[Outbound] {
&self.outbounds
}
pub fn clear_outbounds(&mut self) {
self.outbounds.clear();
}
// Param is passed by value, moved
pub fn set_outbounds(&mut self, v: ::protobuf::RepeatedField<Outbound>) {
self.outbounds = v;
}
// Mutable pointer to the field.
pub fn mut_outbounds(&mut self) -> &mut ::protobuf::RepeatedField<Outbound> {
&mut self.outbounds
}
// Take field
pub fn take_outbounds(&mut self) -> ::protobuf::RepeatedField<Outbound> {
::std::mem::replace(&mut self.outbounds, ::protobuf::RepeatedField::new())
}
// repeated .RoutingRule routing_rules = 4;
pub fn get_routing_rules(&self) -> &[RoutingRule] {
&self.routing_rules
}
pub fn clear_routing_rules(&mut self) {
self.routing_rules.clear();
}
// Param is passed by value, moved
pub fn set_routing_rules(&mut self, v: ::protobuf::RepeatedField<RoutingRule>) {
self.routing_rules = v;
}
// Mutable pointer to the field.
pub fn mut_routing_rules(&mut self) -> &mut ::protobuf::RepeatedField<RoutingRule> {
&mut self.routing_rules
}
// Take field
pub fn take_routing_rules(&mut self) -> ::protobuf::RepeatedField<RoutingRule> {
::std::mem::replace(&mut self.routing_rules, ::protobuf::RepeatedField::new())
}
// .DNS dns = 5;
pub fn get_dns(&self) -> &DNS {
self.dns.as_ref().unwrap_or_else(|| <DNS as ::protobuf::Message>::default_instance())
}
pub fn clear_dns(&mut self) {
self.dns.clear();
}
pub fn has_dns(&self) -> bool {
self.dns.is_some()
}
// Param is passed by value, moved
pub fn set_dns(&mut self, v: DNS) {
self.dns = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_dns(&mut self) -> &mut DNS {
if self.dns.is_none() {
self.dns.set_default();
}
self.dns.as_mut().unwrap()
}
// Take field
pub fn take_dns(&mut self) -> DNS {
self.dns.take().unwrap_or_else(|| DNS::new())
}
}
impl ::protobuf::Message for Config {
fn is_initialized(&self) -> bool {
for v in &self.log {
if !v.is_initialized() {
return false;
}
};
for v in &self.inbounds {
if !v.is_initialized() {
return false;
}
};
for v in &self.outbounds {
if !v.is_initialized() {
return false;
}
};
for v in &self.routing_rules {
if !v.is_initialized() {
return false;
}
};
for v in &self.dns {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.log)?;
},
2 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.inbounds)?;
},
3 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.outbounds)?;
},
4 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.routing_rules)?;
},
5 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.dns)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(ref v) = self.log.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
for value in &self.inbounds {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
for value in &self.outbounds {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
for value in &self.routing_rules {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
if let Some(ref v) = self.dns.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if let Some(ref v) = self.log.as_ref() {
os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
for v in &self.inbounds {
os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
for v in &self.outbounds {
os.write_tag(3, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
for v in &self.routing_rules {
os.write_tag(4, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
if let Some(ref v) = self.dns.as_ref() {
os.write_tag(5, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> Config {
Config::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<Log>>(
"log",
|m: &Config| { &m.log },
|m: &mut Config| { &mut m.log },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<Inbound>>(
"inbounds",
|m: &Config| { &m.inbounds },
|m: &mut Config| { &mut m.inbounds },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<Outbound>>(
"outbounds",
|m: &Config| { &m.outbounds },
|m: &mut Config| { &mut m.outbounds },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<RoutingRule>>(
"routing_rules",
|m: &Config| { &m.routing_rules },
|m: &mut Config| { &mut m.routing_rules },
));
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<DNS>>(
"dns",
|m: &Config| { &m.dns },
|m: &mut Config| { &mut m.dns },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<Config>(
"Config",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static Config {
static instance: ::protobuf::rt::LazyV2<Config> = ::protobuf::rt::LazyV2::INIT;
instance.get(Config::new)
}
}
impl ::protobuf::Clear for Config {
fn clear(&mut self) {
self.log.clear();
self.inbounds.clear();
self.outbounds.clear();
self.routing_rules.clear();
self.dns.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for Config {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for Config {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
static file_descriptor_proto_data: &'static [u8] = b"\
\n\x20src/config/internal/config.proto\"3\n\x03DNS\x12\x18\n\x07servers\
\x18\x01\x20\x03(\tR\x07servers\x12\x12\n\x04bind\x18\x02\x20\x01(\tR\
\x04bind\"\xcc\x01\n\x03Log\x12\x20\n\x05level\x18\x01\x20\x01(\x0e2\n.L\
og.LevelR\x05level\x12#\n\x06output\x18\x02\x20\x01(\x0e2\x0b.Log.Output\
R\x06output\x12\x1f\n\x0boutput_file\x18\x03\x20\x01(\tR\noutputFile\"<\
\n\x05Level\x12\t\n\x05TRACE\x10\0\x12\t\n\x05DEBUG\x10\x01\x12\x08\n\
\x04INFO\x10\x02\x12\x08\n\x04WARN\x10\x03\x12\t\n\x05ERROR\x10\x04\"\
\x1f\n\x06Output\x12\x0b\n\x07CONSOLE\x10\0\x12\x08\n\x04FILE\x10\x01\"\
\xc2\x01\n\x12TUNInboundSettings\x12\x0e\n\x02fd\x18\x01\x20\x01(\x05R\
\x02fd\x12\x12\n\x04name\x18\x02\x20\x01(\tR\x04name\x12\x18\n\x07addres\
s\x18\x03\x20\x01(\tR\x07address\x12\x18\n\x07gateway\x18\x04\x20\x01(\t\
R\x07gateway\x12\x18\n\x07netmask\x18\x05\x20\x01(\tR\x07netmask\x12\x10\
\n\x03mtu\x18\x06\x20\x01(\x05R\x03mtu\x12(\n\x10fake_dns_exclude\x18\
\x07\x20\x03(\tR\x0efakeDnsExclude\"*\n\x14SocksInboundSettings\x12\x12\
\n\x04bind\x18\x01\x20\x01(\tR\x04bind\"\x7f\n\x07Inbound\x12\x10\n\x03t\
ag\x18\x01\x20\x01(\tR\x03tag\x12\x1a\n\x08protocol\x18\x02\x20\x01(\tR\
\x08protocol\x12\x16\n\x06listen\x18\x03\x20\x01(\tR\x06listen\x12\x12\n\
\x04port\x18\x04\x20\x01(\rR\x04port\x12\x1a\n\x08settings\x18\x05\x20\
\x01(\x0cR\x08settings\"H\n\x18RedirectOutboundSettings\x12\x18\n\x07add\
ress\x18\x01\x20\x01(\tR\x07address\x12\x12\n\x04port\x18\x02\x20\x01(\r\
R\x04port\"E\n\x15SocksOutboundSettings\x12\x18\n\x07address\x18\x01\x20\
\x01(\tR\x07address\x12\x12\n\x04port\x18\x02\x20\x01(\rR\x04port\"\x7f\
\n\x1bShadowsocksOutboundSettings\x12\x18\n\x07address\x18\x01\x20\x01(\
\tR\x07address\x12\x12\n\x04port\x18\x02\x20\x01(\rR\x04port\x12\x16\n\
\x06method\x18\x03\x20\x01(\tR\x06method\x12\x1a\n\x08password\x18\x04\
\x20\x01(\tR\x08password\"b\n\x16TrojanOutboundSettings\x12\x18\n\x07add\
ress\x18\x01\x20\x01(\tR\x07address\x12\x12\n\x04port\x18\x02\x20\x01(\r\
R\x04port\x12\x1a\n\x08password\x18\x03\x20\x01(\tR\x08password\"u\n\x15\
VMessOutboundSettings\x12\x18\n\x07address\x18\x01\x20\x01(\tR\x07addres\
s\x12\x12\n\x04port\x18\x02\x20\x01(\rR\x04port\x12\x12\n\x04uuid\x18\
\x03\x20\x01(\tR\x04uuid\x12\x1a\n\x08security\x18\x04\x20\x01(\tR\x08se\
curity\"Y\n\x15VLessOutboundSettings\x12\x18\n\x07address\x18\x01\x20\
\x01(\tR\x07address\x12\x12\n\x04port\x18\x02\x20\x01(\rR\x04port\x12\
\x12\n\x04uuid\x18\x03\x20\x01(\tR\x04uuid\"6\n\x13TlsOutboundSettings\
\x12\x1f\n\x0bserver_name\x18\x01\x20\x01(\tR\nserverName\"/\n\x19WebSoc\
ketOutboundSettings\x12\x12\n\x04path\x18\x01\x20\x01(\tR\x04path\"O\n\
\x16TryAllOutboundSettings\x12\x16\n\x06actors\x18\x01\x20\x03(\tR\x06ac\
tors\x12\x1d\n\ndelay_base\x18\x02\x20\x01(\rR\tdelayBase\"0\n\x16Random\
OutboundSettings\x12\x16\n\x06actors\x18\x01\x20\x03(\tR\x06actors\"/\n\
\x15ChainOutboundSettings\x12\x16\n\x06actors\x18\x01\x20\x03(\tR\x06act\
ors\"\xbb\x01\n\x18FailOverOutboundSettings\x12\x16\n\x06actors\x18\x01\
\x20\x03(\tR\x06actors\x12!\n\x0cfail_timeout\x18\x02\x20\x01(\rR\x0bfai\
lTimeout\x12!\n\x0chealth_check\x18\x03\x20\x01(\x08R\x0bhealthCheck\x12\
%\n\x0echeck_interval\x18\x04\x20\x01(\rR\rcheckInterval\x12\x1a\n\x08fa\
ilover\x18\x05\x20\x01(\x08R\x08failover\"h\n\x08Outbound\x12\x10\n\x03t\
ag\x18\x01\x20\x01(\tR\x03tag\x12\x1a\n\x08protocol\x18\x02\x20\x01(\tR\
\x08protocol\x12\x12\n\x04bind\x18\x03\x20\x01(\tR\x04bind\x12\x1a\n\x08\
settings\x18\x04\x20\x01(\x0cR\x08settings\"\xd5\x02\n\x0bRoutingRule\
\x12\x1d\n\ntarget_tag\x18\x01\x20\x01(\tR\ttargetTag\x12-\n\x07domains\
\x18\x02\x20\x03(\x0b2\x13.RoutingRule.DomainR\x07domains\x12\x19\n\x08i\
p_cidrs\x18\x03\x20\x03(\tR\x07ipCidrs\x12'\n\x05mmdbs\x18\x04\x20\x03(\
\x0b2\x11.RoutingRule.MmdbR\x05mmdbs\x1au\n\x06Domain\x12,\n\x04type\x18\
\x01\x20\x01(\x0e2\x18.RoutingRule.Domain.TypeR\x04type\x12\x14\n\x05val\
ue\x18\x02\x20\x01(\tR\x05value\"'\n\x04Type\x12\t\n\x05PLAIN\x10\0\x12\
\n\n\x06DOMAIN\x10\x01\x12\x08\n\x04FULL\x10\x02\x1a=\n\x04Mmdb\x12\x12\
\n\x04file\x18\x01\x20\x01(\tR\x04file\x12!\n\x0ccountry_code\x18\x02\
\x20\x01(\tR\x0bcountryCode\"\xba\x01\n\x06Config\x12\x16\n\x03log\x18\
\x01\x20\x01(\x0b2\x04.LogR\x03log\x12$\n\x08inbounds\x18\x02\x20\x03(\
\x0b2\x08.InboundR\x08inbounds\x12'\n\toutbounds\x18\x03\x20\x03(\x0b2\t\
.OutboundR\toutbounds\x121\n\rrouting_rules\x18\x04\x20\x03(\x0b2\x0c.Ro\
utingRuleR\x0croutingRules\x12\x16\n\x03dns\x18\x05\x20\x01(\x0b2\x04.DN\
SR\x03dnsb\x06proto3\
";
static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT;
fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {
::protobuf::parse_from_bytes(file_descriptor_proto_data).unwrap()
}
pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {
file_descriptor_proto_lazy.get(|| {
parse_descriptor_proto()
})
}
| 32.824867 | 149 | 0.57112 |
1ce75deeb12082bde4ae332c52fae25415103f10 | 4,733 | use kekbit::api::ReadError::*;
use kekbit::api::{EncoderHandler, Reader, Writer};
use kekbit::core::*;
use log::{error, info};
use nix::sys::wait::waitpid;
use nix::unistd::{fork, getpid, ForkResult};
use simple_logger::SimpleLogger;
use std::path::Path;
use std::process::exit;
use std::result::Result;
const ITERATIONS: u32 = 1 * 1_000_000_0;
const Q_PATH: &str = "/dev/shm";
//const Q_PATH: &str = "./shm/keki";
pub fn run_writer() -> Result<(), ()> {
info!("Creating writer process ...{}", getpid());
let chunk_size = 100;
let metadata = Metadata::new(100, 1000, chunk_size * (ITERATIONS + 100), 1000, 99999999999, TickUnit::Nanos);
let mut writer = shm_writer(&Path::new(Q_PATH), &metadata, EncoderHandler::default()).unwrap();
let msg_bytes = "There are 10 kinds of people: those who know binary and those who don't".as_bytes();
// let msgs: Vec<&str> = "There are 10 kinds of people: those who know binary and those who don't"
// .split_whitespace()
// .collect();
let mut total = 16;
for _i in 0..ITERATIONS {
// for m in &msgs {
// let to_wr = m.as_bytes();
// let len = to_wr.len() as u32;
// let res = writer.write(&to_wr, len);
// match res {
// WriteResult::Success(_) => (),
// err => {
// error!("Write failed {:?}", err);
// panic!("Write failed");
// }
// }
// }
let res = writer.write(&msg_bytes);
match res {
Ok(b) => {
total += b;
}
Err(err) => {
error!("Write failed {:?}", err);
panic!("Write failed");
}
};
}
info!("We wrote {} bytes ", total);
Ok(())
}
pub fn run_reader() -> Result<(), ()> {
info!("Creating reader porcess ...{}", getpid());
let mut reader = try_shm_reader(&Path::new(Q_PATH), 1000, 2000, 200).unwrap();
let mut stop = false;
let mut msg_count = 0;
while !stop {
match reader.try_read() {
Ok(Some(_)) => msg_count += 1,
Ok(None) => (),
Err(read_err) => match read_err {
Timeout(_) => {
info!("Timeout detected by reader");
stop = true;
}
Closed => {
info!("Closed channel detected by reader");
stop = true;
}
ChannelFull | Failed => {
error!("Read failed. Will stop. So far we read {} messages", msg_count);
panic!("Read failed!!!!");
}
},
}
}
info!(
"We read {} bytes in {} messages. Channel state is {:?}",
reader.position(),
msg_count,
reader.exhausted()
);
Ok(())
}
fn main() {
SimpleLogger::new().init().unwrap();
info!("Kekbit Driver PID is {}.", getpid());
let w_pid = match fork() {
Ok(ForkResult::Child) => {
exit(match run_writer() {
Ok(_) => 0,
Err(err) => {
error!("error: {:?}", err);
1
}
});
}
Ok(ForkResult::Parent { child, .. }) => child,
Err(err) => {
panic!("[main] writer fork() failed: {}", err);
}
};
let mut rpids = Vec::new();
for _i in 0..1 {
let r_pid = match fork() {
Ok(ForkResult::Child) => {
exit(match run_reader() {
Ok(_) => 0,
Err(err) => {
error!("error: {:?}", err);
1
}
});
}
Ok(ForkResult::Parent { child, .. }) => child,
Err(err) => {
panic!("[main] reader fork() failed: {}", err);
}
};
rpids.push(r_pid);
}
for r_pid in rpids {
match waitpid(r_pid, None) {
Ok(status) => info!("[main] Reader {} completed with status {:?}", r_pid, status),
Err(err) => panic!("[main] waitpid() on reader {} failed: {}", r_pid, err),
}
}
match waitpid(w_pid, None) {
Ok(status) => info!("[main] Writer completed with status {:?}", status),
Err(err) => panic!("[main] waitpid() on writer failed: {}", err),
}
let shm_file_path = storage_path(&Path::new(Q_PATH), 1000);
if shm_file_path.exists() {
std::fs::remove_file(&shm_file_path).unwrap();
info!("Channel data file {:?} removed", &shm_file_path);
}
info!("Kekbit Driver Done!");
}
| 32.417808 | 113 | 0.462286 |
fbc3faa7caabc2fe54b997569f91a791e48da358 | 9,045 | use rand::{Rand, Rng};
use std::{
cmp::{Ord, Ordering, PartialOrd},
io::{Read, Result as IoResult, Write},
marker::PhantomData,
ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign},
};
use crate::{
bytes::{FromBytes, ToBytes},
fields::{Field, LegendreSymbol, PrimeField, SquareRootField},
};
pub trait Fp2Parameters: 'static + Send + Sync {
type Fp: PrimeField + SquareRootField;
const NONRESIDUE: Self::Fp;
const QUADRATIC_NONRESIDUE: (Self::Fp, Self::Fp);
/// Coefficients for the Frobenius automorphism.
const FROBENIUS_COEFF_FP2_C1: [Self::Fp; 2];
#[inline(always)]
fn mul_fp_by_nonresidue(fe: &Self::Fp) -> Self::Fp {
Self::NONRESIDUE * fe
}
}
#[derive(Derivative)]
#[derivative(
Default(bound = "P: Fp2Parameters"),
Hash(bound = "P: Fp2Parameters"),
Clone(bound = "P: Fp2Parameters"),
Copy(bound = "P: Fp2Parameters"),
Debug(bound = "P: Fp2Parameters"),
PartialEq(bound = "P: Fp2Parameters"),
Eq(bound = "P: Fp2Parameters")
)]
pub struct Fp2<P: Fp2Parameters> {
pub c0: P::Fp,
pub c1: P::Fp,
#[derivative(Debug = "ignore")]
_parameters: PhantomData<P>,
}
impl<P: Fp2Parameters> Fp2<P> {
pub const fn new(c0: P::Fp, c1: P::Fp) -> Self {
Fp2 {
c0,
c1,
_parameters: PhantomData,
}
}
/// Norm of Fp2 over Fp: Norm(a) = a.x^2 - beta * a.y^2
pub fn norm(&self) -> P::Fp {
let t0 = self.c0.square();
let mut t1 = self.c1.square();
t1 = -P::mul_fp_by_nonresidue(&t1);
t1.add_assign(&t0);
t1
}
pub fn mul_by_fp(&mut self, element: &P::Fp) {
self.c0.mul_assign(&element);
self.c1.mul_assign(&element);
}
}
impl<P: Fp2Parameters> Field for Fp2<P> {
fn zero() -> Self {
Fp2::new(P::Fp::zero(), P::Fp::zero())
}
fn is_zero(&self) -> bool {
self.c0.is_zero() && self.c1.is_zero()
}
fn one() -> Self {
Fp2::new(P::Fp::one(), P::Fp::zero())
}
fn is_one(&self) -> bool {
self.c0.is_one() && self.c1.is_zero()
}
#[inline]
fn characteristic<'a>() -> &'a [u64] {
P::Fp::characteristic()
}
fn double(&self) -> Self {
let mut result = self.clone();
result.double_in_place();
result
}
fn double_in_place(&mut self) -> &mut Self {
self.c0.double_in_place();
self.c1.double_in_place();
self
}
fn square(&self) -> Self {
let mut result = *self;
result.square_in_place();
result
}
fn square_in_place(&mut self) -> &mut Self {
// v0 = c0 - c1
let mut v0 = self.c0 - &self.c1;
// v3 = c0 - beta * c1
let v3 = self.c0 - &P::mul_fp_by_nonresidue(&self.c1);
// v2 = c0 * c1
let v2 = self.c0 * &self.c1;
// v0 = (v0 * v3) + v2
v0 *= &v3;
v0 += &v2;
self.c1 = v2.double();
self.c0 = v0 + &P::mul_fp_by_nonresidue(&v2);
self
}
fn inverse(&self) -> Option<Self> {
if self.is_zero() {
None
} else {
// Guide to Pairing-based Cryptography, Algorithm 5.19.
// v0 = c0.square()
let mut v0 = self.c0.square();
// v1 = c1.square()
let v1 = self.c1.square();
// v0 = v0 - beta * v1
v0 -= &P::mul_fp_by_nonresidue(&v1);
v0.inverse().map(|v1| {
let c0 = self.c0 * &v1;
let c1 = -(self.c1 * &v1);
Self::new(c0, c1)
})
}
}
fn inverse_in_place(&mut self) -> Option<&mut Self> {
if let Some(inverse) = self.inverse() {
*self = inverse;
Some(self)
} else {
None
}
}
fn frobenius_map(&mut self, power: usize) {
self.c1.mul_assign(&P::FROBENIUS_COEFF_FP2_C1[power % 2]);
}
}
impl<'a, P: Fp2Parameters> SquareRootField for Fp2<P> {
fn legendre(&self) -> LegendreSymbol {
self.norm().legendre()
}
fn sqrt(&self) -> Option<Self> {
use crate::LegendreSymbol::*;
if self.c1.is_zero() {
return self.c0.sqrt().map(|c0| Self::new(c0, P::Fp::zero()));
}
match self.legendre() {
// Square root based on the complex method. See
// https://eprint.iacr.org/2012/685.pdf (page 15, algorithm 8)
Zero => Some(*self),
QuadraticNonResidue => None,
QuadraticResidue => {
let two_inv = P::Fp::one()
.double()
.inverse()
.expect("Two should always have an inverse");
let alpha = self
.norm()
.sqrt()
.expect("We are in the QR case, the norm should have a square root");
let mut delta = (alpha + &self.c0) * &two_inv;
if delta.legendre().is_qnr() {
delta -= α
}
let c0 = delta.sqrt().expect("Delta must have a square root");
let c0_inv = c0.inverse().expect("c0 must have an inverse");
Some(Self::new(c0, self.c1 * &two_inv * &c0_inv))
},
}
}
fn sqrt_in_place(&mut self) -> Option<&mut Self> {
(*self).sqrt().map(|sqrt| {
*self = sqrt;
self
})
}
}
/// `Fp2` elements are ordered lexicographically.
impl<P: Fp2Parameters> Ord for Fp2<P> {
#[inline(always)]
fn cmp(&self, other: &Self) -> Ordering {
match self.c1.cmp(&other.c1) {
Ordering::Greater => Ordering::Greater,
Ordering::Less => Ordering::Less,
Ordering::Equal => self.c0.cmp(&other.c0),
}
}
}
impl<P: Fp2Parameters> PartialOrd for Fp2<P> {
#[inline(always)]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<P: Fp2Parameters> ToBytes for Fp2<P> {
#[inline]
fn write<W: Write>(&self, mut writer: W) -> IoResult<()> {
self.c0.write(&mut writer)?;
self.c1.write(writer)
}
}
impl<P: Fp2Parameters> FromBytes for Fp2<P> {
#[inline]
fn read<R: Read>(mut reader: R) -> IoResult<Self> {
let c0 = P::Fp::read(&mut reader)?;
let c1 = P::Fp::read(reader)?;
Ok(Fp2::new(c0, c1))
}
}
impl<P: Fp2Parameters> Neg for Fp2<P> {
type Output = Self;
#[inline]
#[must_use]
fn neg(self) -> Self {
let mut res = self.clone();
res.c0 = res.c0.neg();
res.c1 = res.c1.neg();
res
}
}
impl<P: Fp2Parameters> Rand for Fp2<P> {
fn rand<R: Rng>(rng: &mut R) -> Self {
Fp2::new(rng.gen(), rng.gen())
}
}
impl<'a, P: Fp2Parameters> Add<&'a Fp2<P>> for Fp2<P> {
type Output = Self;
#[inline]
fn add(self, other: &Self) -> Self {
let mut result = self;
result.add_assign(&other);
result
}
}
impl<'a, P: Fp2Parameters> Sub<&'a Fp2<P>> for Fp2<P> {
type Output = Self;
#[inline]
fn sub(self, other: &Self) -> Self {
let mut result = self;
result.sub_assign(&other);
result
}
}
impl<'a, P: Fp2Parameters> Mul<&'a Fp2<P>> for Fp2<P> {
type Output = Self;
#[inline]
fn mul(self, other: &Self) -> Self {
let mut result = self;
result.mul_assign(&other);
result
}
}
impl<'a, P: Fp2Parameters> Div<&'a Fp2<P>> for Fp2<P> {
type Output = Self;
#[inline]
fn div(self, other: &Self) -> Self {
let mut result = self;
result.mul_assign(&other.inverse().unwrap());
result
}
}
impl<'a, P: Fp2Parameters> AddAssign<&'a Self> for Fp2<P> {
#[inline]
fn add_assign(&mut self, other: &Self) {
self.c0.add_assign(&other.c0);
self.c1.add_assign(&other.c1);
}
}
impl<'a, P: Fp2Parameters> SubAssign<&'a Self> for Fp2<P> {
#[inline]
fn sub_assign(&mut self, other: &Self) {
self.c0.sub_assign(&other.c0);
self.c1.sub_assign(&other.c1);
}
}
impl<'a, P: Fp2Parameters> MulAssign<&'a Self> for Fp2<P> {
#[inline]
fn mul_assign(&mut self, other: &Self) {
// Karatsuba multiplication;
// Guide to Pairing-based cryprography, Algorithm 5.16.
let v0 = self.c0 * &other.c0;
let v1 = self.c1 * &other.c1;
self.c1 += &self.c0;
self.c1 *= &(other.c0 + &other.c1);
self.c1 -= &v0;
self.c1 -= &v1;
self.c0 = v0 + &P::mul_fp_by_nonresidue(&v1);
}
}
impl<'a, P: Fp2Parameters> DivAssign<&'a Self> for Fp2<P> {
#[inline]
fn div_assign(&mut self, other: &Self) {
self.mul_assign(&other.inverse().unwrap());
}
}
impl<P: Fp2Parameters> std::fmt::Display for Fp2<P> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Fp2({} + {} * u)", self.c0, self.c1)
}
}
| 26.066282 | 89 | 0.516418 |
677c027f17b54c65480db41e57d50c5132730931 | 28,718 | //! Macro support for format strings
//!
//! These structures are used when parsing format strings for the compiler.
//! Parsing does not happen at runtime: structures of `std::fmt::rt` are
//! generated instead.
#![doc(
html_root_url = "https://doc.rust-lang.org/nightly/",
html_playground_url = "https://play.rust-lang.org/",
test(attr(deny(warnings)))
)]
#![feature(nll)]
#![feature(or_patterns)]
#![feature(rustc_private)]
#![feature(unicode_internals)]
#![feature(bool_to_option)]
pub use Alignment::*;
pub use Count::*;
pub use Flag::*;
pub use Piece::*;
pub use Position::*;
use std::iter;
use std::str;
use std::string;
use rustc_span::{InnerSpan, Symbol};
/// The type of format string that we are parsing.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum ParseMode {
/// A normal format string as per `format_args!`.
Format,
/// An inline assembly template string for `asm!`.
InlineAsm,
}
#[derive(Copy, Clone)]
struct InnerOffset(usize);
impl InnerOffset {
fn to(self, end: InnerOffset) -> InnerSpan {
InnerSpan::new(self.0, end.0)
}
}
/// A piece is a portion of the format string which represents the next part
/// to emit. These are emitted as a stream by the `Parser` class.
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum Piece<'a> {
/// A literal string which should directly be emitted
String(&'a str),
/// This describes that formatting should process the next argument (as
/// specified inside) for emission.
NextArgument(Argument<'a>),
}
/// Representation of an argument specification.
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct Argument<'a> {
/// Where to find this argument
pub position: Position,
/// How to format the argument
pub format: FormatSpec<'a>,
}
/// Specification for the formatting of an argument in the format string.
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct FormatSpec<'a> {
/// Optionally specified character to fill alignment with.
pub fill: Option<char>,
/// Optionally specified alignment.
pub align: Alignment,
/// Packed version of various flags provided.
pub flags: u32,
/// The integer precision to use.
pub precision: Count,
/// The span of the precision formatting flag (for diagnostics).
pub precision_span: Option<InnerSpan>,
/// The string width requested for the resulting format.
pub width: Count,
/// The span of the width formatting flag (for diagnostics).
pub width_span: Option<InnerSpan>,
/// The descriptor string representing the name of the format desired for
/// this argument, this can be empty or any number of characters, although
/// it is required to be one word.
pub ty: &'a str,
/// The span of the descriptor string (for diagnostics).
pub ty_span: Option<InnerSpan>,
}
/// Enum describing where an argument for a format can be located.
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum Position {
/// The argument is implied to be located at an index
ArgumentImplicitlyIs(usize),
/// The argument is located at a specific index given in the format
ArgumentIs(usize),
/// The argument has a name.
ArgumentNamed(Symbol),
}
impl Position {
pub fn index(&self) -> Option<usize> {
match self {
ArgumentIs(i) | ArgumentImplicitlyIs(i) => Some(*i),
_ => None,
}
}
}
/// Enum of alignments which are supported.
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum Alignment {
/// The value will be aligned to the left.
AlignLeft,
/// The value will be aligned to the right.
AlignRight,
/// The value will be aligned in the center.
AlignCenter,
/// The value will take on a default alignment.
AlignUnknown,
}
/// Various flags which can be applied to format strings. The meaning of these
/// flags is defined by the formatters themselves.
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum Flag {
/// A `+` will be used to denote positive numbers.
FlagSignPlus,
/// A `-` will be used to denote negative numbers. This is the default.
FlagSignMinus,
/// An alternate form will be used for the value. In the case of numbers,
/// this means that the number will be prefixed with the supplied string.
FlagAlternate,
/// For numbers, this means that the number will be padded with zeroes,
/// and the sign (`+` or `-`) will precede them.
FlagSignAwareZeroPad,
/// For Debug / `?`, format integers in lower-case hexadecimal.
FlagDebugLowerHex,
/// For Debug / `?`, format integers in upper-case hexadecimal.
FlagDebugUpperHex,
}
/// A count is used for the precision and width parameters of an integer, and
/// can reference either an argument or a literal integer.
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum Count {
/// The count is specified explicitly.
CountIs(usize),
/// The count is specified by the argument with the given name.
CountIsName(Symbol),
/// The count is specified by the argument at the given index.
CountIsParam(usize),
/// The count is implied and cannot be explicitly specified.
CountImplied,
}
pub struct ParseError {
pub description: string::String,
pub note: Option<string::String>,
pub label: string::String,
pub span: InnerSpan,
pub secondary_label: Option<(string::String, InnerSpan)>,
}
/// The parser structure for interpreting the input format string. This is
/// modeled as an iterator over `Piece` structures to form a stream of tokens
/// being output.
///
/// This is a recursive-descent parser for the sake of simplicity, and if
/// necessary there's probably lots of room for improvement performance-wise.
pub struct Parser<'a> {
mode: ParseMode,
input: &'a str,
cur: iter::Peekable<str::CharIndices<'a>>,
/// Error messages accumulated during parsing
pub errors: Vec<ParseError>,
/// Current position of implicit positional argument pointer
curarg: usize,
/// `Some(raw count)` when the string is "raw", used to position spans correctly
style: Option<usize>,
/// Start and end byte offset of every successfully parsed argument
pub arg_places: Vec<InnerSpan>,
/// Characters that need to be shifted
skips: Vec<usize>,
/// Span of the last opening brace seen, used for error reporting
last_opening_brace: Option<InnerSpan>,
/// Whether the source string is comes from `println!` as opposed to `format!` or `print!`
append_newline: bool,
/// Whether this formatting string is a literal or it comes from a macro.
is_literal: bool,
}
impl<'a> Iterator for Parser<'a> {
type Item = Piece<'a>;
fn next(&mut self) -> Option<Piece<'a>> {
if let Some(&(pos, c)) = self.cur.peek() {
match c {
'{' => {
let curr_last_brace = self.last_opening_brace;
let byte_pos = self.to_span_index(pos);
self.last_opening_brace = Some(byte_pos.to(InnerOffset(byte_pos.0 + 1)));
self.cur.next();
if self.consume('{') {
self.last_opening_brace = curr_last_brace;
Some(String(self.string(pos + 1)))
} else {
let arg = self.argument();
if let Some(end) = self.must_consume('}') {
let start = self.to_span_index(pos);
let end = self.to_span_index(end + 1);
if self.is_literal {
self.arg_places.push(start.to(end));
}
}
Some(NextArgument(arg))
}
}
'}' => {
self.cur.next();
if self.consume('}') {
Some(String(self.string(pos + 1)))
} else {
let err_pos = self.to_span_index(pos);
self.err_with_note(
"unmatched `}` found",
"unmatched `}`",
"if you intended to print `}`, you can escape it using `}}`",
err_pos.to(err_pos),
);
None
}
}
'\n' => Some(String(self.string(pos))),
_ => Some(String(self.string(pos))),
}
} else {
None
}
}
}
impl<'a> Parser<'a> {
/// Creates a new parser for the given format string
pub fn new(
s: &'a str,
style: Option<usize>,
snippet: Option<string::String>,
append_newline: bool,
mode: ParseMode,
) -> Parser<'a> {
let (skips, is_literal) = find_skips_from_snippet(snippet, style);
Parser {
mode,
input: s,
cur: s.char_indices().peekable(),
errors: vec![],
curarg: 0,
style,
arg_places: vec![],
skips,
last_opening_brace: None,
append_newline,
is_literal,
}
}
/// Notifies of an error. The message doesn't actually need to be of type
/// String, but I think it does when this eventually uses conditions so it
/// might as well start using it now.
fn err<S1: Into<string::String>, S2: Into<string::String>>(
&mut self,
description: S1,
label: S2,
span: InnerSpan,
) {
self.errors.push(ParseError {
description: description.into(),
note: None,
label: label.into(),
span,
secondary_label: None,
});
}
/// Notifies of an error. The message doesn't actually need to be of type
/// String, but I think it does when this eventually uses conditions so it
/// might as well start using it now.
fn err_with_note<
S1: Into<string::String>,
S2: Into<string::String>,
S3: Into<string::String>,
>(
&mut self,
description: S1,
label: S2,
note: S3,
span: InnerSpan,
) {
self.errors.push(ParseError {
description: description.into(),
note: Some(note.into()),
label: label.into(),
span,
secondary_label: None,
});
}
/// Optionally consumes the specified character. If the character is not at
/// the current position, then the current iterator isn't moved and `false` is
/// returned, otherwise the character is consumed and `true` is returned.
fn consume(&mut self, c: char) -> bool {
self.consume_pos(c).is_some()
}
/// Optionally consumes the specified character. If the character is not at
/// the current position, then the current iterator isn't moved and `None` is
/// returned, otherwise the character is consumed and the current position is
/// returned.
fn consume_pos(&mut self, c: char) -> Option<usize> {
if let Some(&(pos, maybe)) = self.cur.peek() {
if c == maybe {
self.cur.next();
return Some(pos);
}
}
None
}
fn to_span_index(&self, pos: usize) -> InnerOffset {
let mut pos = pos;
// This handles the raw string case, the raw argument is the number of #
// in r###"..."### (we need to add one because of the `r`).
let raw = self.style.map(|raw| raw + 1).unwrap_or(0);
for skip in &self.skips {
if pos > *skip {
pos += 1;
} else if pos == *skip && raw == 0 {
pos += 1;
} else {
break;
}
}
InnerOffset(raw + pos + 1)
}
/// Forces consumption of the specified character. If the character is not
/// found, an error is emitted.
fn must_consume(&mut self, c: char) -> Option<usize> {
self.ws();
if let Some(&(pos, maybe)) = self.cur.peek() {
if c == maybe {
self.cur.next();
Some(pos)
} else {
let pos = self.to_span_index(pos);
let description = format!("expected `'}}'`, found `{:?}`", maybe);
let label = "expected `}`".to_owned();
let (note, secondary_label) = if c == '}' {
(
Some(
"if you intended to print `{`, you can escape it using `{{`".to_owned(),
),
self.last_opening_brace
.map(|sp| ("because of this opening brace".to_owned(), sp)),
)
} else {
(None, None)
};
self.errors.push(ParseError {
description,
note,
label,
span: pos.to(pos),
secondary_label,
});
None
}
} else {
let description = format!("expected `{:?}` but string was terminated", c);
// point at closing `"`
let pos = self.input.len() - if self.append_newline { 1 } else { 0 };
let pos = self.to_span_index(pos);
if c == '}' {
let label = format!("expected `{:?}`", c);
let (note, secondary_label) = if c == '}' {
(
Some(
"if you intended to print `{`, you can escape it using `{{`".to_owned(),
),
self.last_opening_brace
.map(|sp| ("because of this opening brace".to_owned(), sp)),
)
} else {
(None, None)
};
self.errors.push(ParseError {
description,
note,
label,
span: pos.to(pos),
secondary_label,
});
} else {
self.err(description, format!("expected `{:?}`", c), pos.to(pos));
}
None
}
}
/// Consumes all whitespace characters until the first non-whitespace character
fn ws(&mut self) {
while let Some(&(_, c)) = self.cur.peek() {
if c.is_whitespace() {
self.cur.next();
} else {
break;
}
}
}
/// Parses all of a string which is to be considered a "raw literal" in a
/// format string. This is everything outside of the braces.
fn string(&mut self, start: usize) -> &'a str {
// we may not consume the character, peek the iterator
while let Some(&(pos, c)) = self.cur.peek() {
match c {
'{' | '}' => {
return &self.input[start..pos];
}
_ => {
self.cur.next();
}
}
}
&self.input[start..self.input.len()]
}
/// Parses an `Argument` structure, or what's contained within braces inside the format string.
fn argument(&mut self) -> Argument<'a> {
let pos = self.position();
let format = match self.mode {
ParseMode::Format => self.format(),
ParseMode::InlineAsm => self.inline_asm(),
};
// Resolve position after parsing format spec.
let pos = match pos {
Some(position) => position,
None => {
let i = self.curarg;
self.curarg += 1;
ArgumentImplicitlyIs(i)
}
};
Argument { position: pos, format }
}
/// Parses a positional argument for a format. This could either be an
/// integer index of an argument, a named argument, or a blank string.
/// Returns `Some(parsed_position)` if the position is not implicitly
/// consuming a macro argument, `None` if it's the case.
fn position(&mut self) -> Option<Position> {
if let Some(i) = self.integer() {
Some(ArgumentIs(i))
} else {
match self.cur.peek() {
Some(&(_, c)) if rustc_lexer::is_id_start(c) => {
Some(ArgumentNamed(Symbol::intern(self.word())))
}
// This is an `ArgumentNext`.
// Record the fact and do the resolution after parsing the
// format spec, to make things like `{:.*}` work.
_ => None,
}
}
}
/// Parses a format specifier at the current position, returning all of the
/// relevant information in the `FormatSpec` struct.
fn format(&mut self) -> FormatSpec<'a> {
let mut spec = FormatSpec {
fill: None,
align: AlignUnknown,
flags: 0,
precision: CountImplied,
precision_span: None,
width: CountImplied,
width_span: None,
ty: &self.input[..0],
ty_span: None,
};
if !self.consume(':') {
return spec;
}
// fill character
if let Some(&(_, c)) = self.cur.peek() {
match self.cur.clone().nth(1) {
Some((_, '>' | '<' | '^')) => {
spec.fill = Some(c);
self.cur.next();
}
_ => {}
}
}
// Alignment
if self.consume('<') {
spec.align = AlignLeft;
} else if self.consume('>') {
spec.align = AlignRight;
} else if self.consume('^') {
spec.align = AlignCenter;
}
// Sign flags
if self.consume('+') {
spec.flags |= 1 << (FlagSignPlus as u32);
} else if self.consume('-') {
spec.flags |= 1 << (FlagSignMinus as u32);
}
// Alternate marker
if self.consume('#') {
spec.flags |= 1 << (FlagAlternate as u32);
}
// Width and precision
let mut havewidth = false;
if self.consume('0') {
// small ambiguity with '0$' as a format string. In theory this is a
// '0' flag and then an ill-formatted format string with just a '$'
// and no count, but this is better if we instead interpret this as
// no '0' flag and '0$' as the width instead.
if self.consume('$') {
spec.width = CountIsParam(0);
havewidth = true;
} else {
spec.flags |= 1 << (FlagSignAwareZeroPad as u32);
}
}
if !havewidth {
let width_span_start = if let Some((pos, _)) = self.cur.peek() { *pos } else { 0 };
let (w, sp) = self.count(width_span_start);
spec.width = w;
spec.width_span = sp;
}
if let Some(start) = self.consume_pos('.') {
if let Some(end) = self.consume_pos('*') {
// Resolve `CountIsNextParam`.
// We can do this immediately as `position` is resolved later.
let i = self.curarg;
self.curarg += 1;
spec.precision = CountIsParam(i);
spec.precision_span =
Some(self.to_span_index(start).to(self.to_span_index(end + 1)));
} else {
let (p, sp) = self.count(start);
spec.precision = p;
spec.precision_span = sp;
}
}
let ty_span_start = self.cur.peek().map(|(pos, _)| *pos);
// Optional radix followed by the actual format specifier
if self.consume('x') {
if self.consume('?') {
spec.flags |= 1 << (FlagDebugLowerHex as u32);
spec.ty = "?";
} else {
spec.ty = "x";
}
} else if self.consume('X') {
if self.consume('?') {
spec.flags |= 1 << (FlagDebugUpperHex as u32);
spec.ty = "?";
} else {
spec.ty = "X";
}
} else if self.consume('?') {
spec.ty = "?";
} else {
spec.ty = self.word();
let ty_span_end = self.cur.peek().map(|(pos, _)| *pos);
if !spec.ty.is_empty() {
spec.ty_span = ty_span_start
.and_then(|s| ty_span_end.map(|e| (s, e)))
.map(|(start, end)| self.to_span_index(start).to(self.to_span_index(end)));
}
}
spec
}
/// Parses an inline assembly template modifier at the current position, returning the modifier
/// in the `ty` field of the `FormatSpec` struct.
fn inline_asm(&mut self) -> FormatSpec<'a> {
let mut spec = FormatSpec {
fill: None,
align: AlignUnknown,
flags: 0,
precision: CountImplied,
precision_span: None,
width: CountImplied,
width_span: None,
ty: &self.input[..0],
ty_span: None,
};
if !self.consume(':') {
return spec;
}
let ty_span_start = self.cur.peek().map(|(pos, _)| *pos);
spec.ty = self.word();
let ty_span_end = self.cur.peek().map(|(pos, _)| *pos);
if !spec.ty.is_empty() {
spec.ty_span = ty_span_start
.and_then(|s| ty_span_end.map(|e| (s, e)))
.map(|(start, end)| self.to_span_index(start).to(self.to_span_index(end)));
}
spec
}
/// Parses a `Count` parameter at the current position. This does not check
/// for 'CountIsNextParam' because that is only used in precision, not
/// width.
fn count(&mut self, start: usize) -> (Count, Option<InnerSpan>) {
if let Some(i) = self.integer() {
if let Some(end) = self.consume_pos('$') {
let span = self.to_span_index(start).to(self.to_span_index(end + 1));
(CountIsParam(i), Some(span))
} else {
(CountIs(i), None)
}
} else {
let tmp = self.cur.clone();
let word = self.word();
if word.is_empty() {
self.cur = tmp;
(CountImplied, None)
} else if self.consume('$') {
(CountIsName(Symbol::intern(word)), None)
} else {
self.cur = tmp;
(CountImplied, None)
}
}
}
/// Parses a word starting at the current position. A word is the same as
/// Rust identifier, except that it can't start with `_` character.
fn word(&mut self) -> &'a str {
let start = match self.cur.peek() {
Some(&(pos, c)) if rustc_lexer::is_id_start(c) => {
self.cur.next();
pos
}
_ => {
return "";
}
};
let mut end = None;
while let Some(&(pos, c)) = self.cur.peek() {
if rustc_lexer::is_id_continue(c) {
self.cur.next();
} else {
end = Some(pos);
break;
}
}
let end = end.unwrap_or(self.input.len());
let word = &self.input[start..end];
if word == "_" {
self.err_with_note(
"invalid argument name `_`",
"invalid argument name",
"argument name cannot be a single underscore",
self.to_span_index(start).to(self.to_span_index(end)),
);
}
word
}
/// Optionally parses an integer at the current position. This doesn't deal
/// with overflow at all, it's just accumulating digits.
fn integer(&mut self) -> Option<usize> {
let mut cur = 0;
let mut found = false;
while let Some(&(_, c)) = self.cur.peek() {
if let Some(i) = c.to_digit(10) {
cur = cur * 10 + i as usize;
found = true;
self.cur.next();
} else {
break;
}
}
found.then_some(cur)
}
}
/// Finds the indices of all characters that have been processed and differ between the actual
/// written code (code snippet) and the `InternedString` that gets processed in the `Parser`
/// in order to properly synthethise the intra-string `Span`s for error diagnostics.
fn find_skips_from_snippet(
snippet: Option<string::String>,
str_style: Option<usize>,
) -> (Vec<usize>, bool) {
let snippet = match snippet {
Some(ref s) if s.starts_with('"') || s.starts_with("r#") => s,
_ => return (vec![], false),
};
fn find_skips(snippet: &str, is_raw: bool) -> Vec<usize> {
let mut eat_ws = false;
let mut s = snippet.chars().enumerate().peekable();
let mut skips = vec![];
while let Some((pos, c)) = s.next() {
match (c, s.peek()) {
// skip whitespace and empty lines ending in '\\'
('\\', Some((next_pos, '\n'))) if !is_raw => {
eat_ws = true;
skips.push(pos);
skips.push(*next_pos);
let _ = s.next();
}
('\\', Some((next_pos, '\n' | 'n' | 't'))) if eat_ws => {
skips.push(pos);
skips.push(*next_pos);
let _ = s.next();
}
(' ' | '\n' | '\t', _) if eat_ws => {
skips.push(pos);
}
('\\', Some((next_pos, 'n' | 't' | '0' | '\\' | '\'' | '\"'))) => {
skips.push(*next_pos);
let _ = s.next();
}
('\\', Some((_, 'x'))) if !is_raw => {
for _ in 0..3 {
// consume `\xAB` literal
if let Some((pos, _)) = s.next() {
skips.push(pos);
} else {
break;
}
}
}
('\\', Some((_, 'u'))) if !is_raw => {
if let Some((pos, _)) = s.next() {
skips.push(pos);
}
if let Some((next_pos, next_c)) = s.next() {
if next_c == '{' {
skips.push(next_pos);
let mut i = 0; // consume up to 6 hexanumeric chars + closing `}`
while let (Some((next_pos, c)), true) = (s.next(), i < 7) {
if c.is_digit(16) {
skips.push(next_pos);
} else if c == '}' {
skips.push(next_pos);
break;
} else {
break;
}
i += 1;
}
} else if next_c.is_digit(16) {
skips.push(next_pos);
// We suggest adding `{` and `}` when appropriate, accept it here as if
// it were correct
let mut i = 0; // consume up to 6 hexanumeric chars
while let (Some((next_pos, c)), _) = (s.next(), i < 6) {
if c.is_digit(16) {
skips.push(next_pos);
} else {
break;
}
i += 1;
}
}
}
}
_ if eat_ws => {
// `take_while(|c| c.is_whitespace())`
eat_ws = false;
}
_ => {}
}
}
skips
}
let r_start = str_style.map(|r| r + 1).unwrap_or(0);
let r_end = str_style.map(|r| r).unwrap_or(0);
let s = &snippet[r_start + 1..snippet.len() - r_end - 1];
(find_skips(s, str_style.is_some()), true)
}
#[cfg(test)]
mod tests;
| 35.630273 | 100 | 0.492096 |
0ee6cb64c028f3ae9e9edfef65b827f0d7e68284 | 178 | pub enum En {
A(Vec<u8>)
}
fn f() -> Result<(), impl core::fmt::Debug> {
let x: En = loop {};
assert!(matches!(x, En::A(vec![])));
Ok::<(), &'static str>(())
}
| 16.181818 | 45 | 0.455056 |
e8263eb04aaaef54f5c50e98be298cf85192ce6b | 852 | use serde::{Deserialize, Serialize};
use super::*;
/// Application specific value.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Body<V> {
inner: Val<V>,
}
impl<V> Body<V> {
pub fn new(val: V) -> Self {
Self {
inner: Val::new(val),
}
}
}
impl<M, V> BasicValue<M, V> for Body<V> {
fn ty(&self) -> BasicType {
BasicType::Val
}
fn as_val(&self) -> &V {
self.inner.as_inner()
}
fn into_val(self) -> V {
self.inner.into_inner()
}
impl_invalid_basic_types!(<M, V> U8, U64, Str, Map);
}
impl<M, V> FromBasicValuePart<M, V> for Body<V> {
type Error = UnexpectedType;
fn expected_types() -> &'static [BasicType] {
&[BasicType::Val]
}
fn from_basic_val(v: V) -> Result<Self, Self::Error> {
Ok(Body::new(v))
}
}
| 18.521739 | 58 | 0.546948 |
899f22b6aaa4ade4943afedc7491e1c957799d98 | 2,287 | //! All errors that can be _generated_ by the compiler.
use std::fmt;
/// Any error that occurs as a result of compiling the source code.
#[derive(Debug)]
pub struct CompilationError {
reason: Reason,
location: Option<Location>,
}
#[derive(Debug)]
pub struct Location {
filename: String,
line_no: u32,
}
#[derive(Debug)]
pub enum Reason {
TooManyCloseBrackets,
TooManyOpenBrackets,
}
impl CompilationError {
pub fn new(reason: Reason, location: Location) -> Self {
CompilationError {
reason,
location: Some(location),
}
}
pub fn without_location(reason: Reason) -> Self {
CompilationError {
reason,
location: None,
}
}
pub fn location(&self) -> Option<&Location> {
self.location.as_ref()
}
pub fn message(&self) -> &'static str {
self.reason.message()
}
pub fn message_identifier(&self) -> u32 {
self.reason.message_identifier()
}
}
impl Reason {
pub fn message_identifier(&self) -> u32 {
use Reason::*;
match self {
TooManyCloseBrackets => 0x001,
TooManyOpenBrackets => 0x002,
}
}
pub fn message(&self) -> &'static str {
use Reason::*;
match self {
TooManyCloseBrackets => "too many ']' brackets. Check that each '[' has a matching ']'",
TooManyOpenBrackets => "too many '[' brackets. Check that each '[' has a matching ']'",
}
}
}
impl Location {
pub fn new(filename: String, line_no: u32) -> Self {
Location { filename, line_no }
}
}
impl std::error::Error for CompilationError {}
impl fmt::Display for CompilationError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let location = self
.location
.as_ref()
.map(|l| format!("{}:", l))
.unwrap_or_else(|| String::from(""));
write!(
f,
"error[BF{:04x}]:{} {}",
self.message_identifier(),
location,
self.message()
)
}
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}:{}", self.filename, self.line_no)
}
}
| 22.87 | 100 | 0.551377 |
3aad748f0c471788e4ea9eb7fb95ce21ab3fb695 | 1,647 |
pub struct Icon8kPlus {
props: crate::Props,
}
impl yew::Component for Icon8kPlus {
type Properties = crate::Props;
type Message = ();
fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self
{
Self { props }
}
fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
{
true
}
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender
{
false
}
fn view(&self) -> yew::prelude::Html
{
yew::prelude::html! {
<svg
class=self.props.class.unwrap_or("")
width=self.props.size.unwrap_or(24).to_string()
height=self.props.size.unwrap_or(24).to_string()
viewBox="0 0 24 24"
fill=self.props.fill.unwrap_or("none")
stroke=self.props.color.unwrap_or("currentColor")
stroke-width=self.props.stroke_width.unwrap_or(2).to_string()
stroke-linecap=self.props.stroke_linecap.unwrap_or("round")
stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round")
>
<svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0V0z" fill="none"/><path d="M6.5 12.5H8V14H6.5zm0-2.5H8v1.5H6.5zM19 3H5c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2zM9.5 14c0 .55-.45 1-1 1H6c-.55 0-1-.45-1-1v-4c0-.55.45-1 1-1h2.5c.55 0 1 .45 1 1v4zm6.5 1h-1.75l-1.75-2.25V15H11V9h1.5v2.25L14.25 9H16l-2.25 3L16 15zm4-2.5h-1.5V14h-1v-1.5H16v-1h1.5V10h1v1.5H20v1z"/></svg>
</svg>
}
}
}
| 35.804348 | 460 | 0.585914 |
bfa0f03ee0ef76a11c5596617790539e80ff54bd | 428 | extern crate autocfg;
/// Tests that autocfg uses the RUSTFLAGS environment variable when running
/// rustc.
#[test]
fn test_with_sysroot() {
std::env::set_var("RUSTFLAGS", "-L target/debug/deps -L target/debug");
std::env::set_var("OUT_DIR", "target");
// Ensure HOST != TARGET.
std::env::set_var("HOST", "lol");
let ac = autocfg::AutoCfg::new().unwrap();
assert!(ac.probe_sysroot_crate("autocfg"));
}
| 30.571429 | 75 | 0.658879 |
56883a4f017e3bd444815976c8d9a392c1e1d402 | 536 | // option1.rs
// Make me compile! Execute `rustlings hint option1` for hints
// you can modify anything EXCEPT for this function's sig
fn print_number(maybe_number: Option<u16>) {
println!("printing: {}", maybe_number.unwrap());
}
fn main() {
print_number(Some(13));
print_number(Some(99));
let mut numbers: [Option<u16>; 5] = [Some(0); 5];
for iter in 0..5 {
let number_to_add: u16 = {
((iter * 1235) + 2) / (4 * 16)
};
numbers[iter as usize] = Some(number_to_add);
}
}
| 24.363636 | 62 | 0.593284 |
e53f03ea22477881282cc43a0f8cbdacba836e42 | 705 | //! Test that the derive macros properly name all the types and traits used
// Ensure no prelude is available
#![no_implicit_prelude]
#![allow(dead_code, unused_imports)]
use ::s_with::{DeserializeFromStr, SerializeDisplay};
// Needed for 1.46, unused in 1.50
use ::std::panic;
#[derive(DeserializeFromStr, SerializeDisplay)]
#[serde_with(crate = "::s_with")]
struct A;
impl ::std::str::FromStr for A {
type Err = ::std::string::String;
fn from_str(_: &str) -> ::std::result::Result<Self, Self::Err> {
::std::unimplemented!()
}
}
impl ::std::fmt::Display for A {
fn fmt(&self, _: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::std::unimplemented!()
}
}
| 26.111111 | 76 | 0.643972 |
18df83706646123284cdd72170c3138a0795da98 | 1,320 | // Copyright (C) 2018 Thiago Santos <[email protected]>
// Sebastian Dröge <[email protected]>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use DiscovererVideoInfo;
use glib::translate::*;
use gst;
use gst_pbutils_sys;
impl DiscovererVideoInfo {
pub fn get_framerate(&self) -> gst::Fraction {
unsafe {
gst::Fraction::new(
gst_pbutils_sys::gst_discoverer_video_info_get_framerate_num(self.to_glib_none().0)
as i32,
gst_pbutils_sys::gst_discoverer_video_info_get_framerate_denom(
self.to_glib_none().0,
) as i32,
)
}
}
pub fn get_par(&self) -> gst::Fraction {
unsafe {
gst::Fraction::new(
gst_pbutils_sys::gst_discoverer_video_info_get_par_num(self.to_glib_none().0)
as i32,
gst_pbutils_sys::gst_discoverer_video_info_get_par_denom(self.to_glib_none().0)
as i32,
)
}
}
}
| 33 | 99 | 0.609091 |
0a1de045b12902c1a3df70a5cb38f39a79fc706b | 13,153 | pub struct MapperFactory;
use rom::Mirrorings;
use rom::RomHeader;
use register::Register;
impl MapperFactory {
pub fn create(header: &RomHeader) -> Box<Mapper> {
match header.mapper_num() {
0 => Box::new(NRomMapper::new(header)),
1 => Box::new(MMC1Mapper::new(header)),
2 => Box::new(UNRomMapper::new(header)),
3 => Box::new(CNRomMapper::new()),
4 => Box::new(MMC3Mapper::new(header)),
_ => panic!("Unsupported mapper {}", header.mapper_num())
}
}
}
pub trait Mapper {
// Maps 0x8000 - 0xFFFF to the program rom address
fn map(&self, address: u32) -> u32;
// Maps 0x0000 - 0x1FFF to the character rom address
fn map_for_chr_rom(&self, address: u32) -> u32;
// Writes control register inside in general
fn store(&mut self, address: u32, value: u8);
fn has_mirroring_type(&self) -> bool;
fn mirroring_type(&self) -> Mirrorings;
// @TODO: MMC3Mapper specific. Should this method be here?
fn drive_irq_counter(&mut self) -> bool;
}
pub struct NRomMapper {
program_bank_num: u8
}
impl NRomMapper {
fn new(header: &RomHeader) -> Self {
NRomMapper {
program_bank_num: header.prg_rom_bank_num()
}
}
}
impl Mapper for NRomMapper {
/**
* if program_bank_num == 1:
* 0x8000 - 0xBFFF: 0x0000 - 0x3FFF
* 0xC000 - 0xFFFF: 0x0000 - 0x3FFF
* else:
* 0x8000 - 0xFFFF: 0x0000 - 0x7FFF
*/
fn map(&self, mut address: u32) -> u32 {
if self.program_bank_num == 1 && address >= 0xC000 {
address -= 0x4000;
}
address - 0x8000
}
/**
* 0x0000 - 0x1FFF: 0x0000 - 0x1FFF
*/
fn map_for_chr_rom(&self, address: u32) -> u32 {
address
}
/**
* Nothing to do
*/
fn store(&mut self, address: u32, value: u8) {
// throw exception?
}
fn has_mirroring_type(&self) -> bool {
false
}
fn mirroring_type(&self) -> Mirrorings {
Mirrorings::SingleScreen // dummy
}
fn drive_irq_counter(&mut self) -> bool {
false
}
}
pub struct MMC1Mapper {
program_bank_num: u8,
control_register: Register<u8>,
chr_bank0_register: Register<u8>,
chr_bank1_register: Register<u8>,
prg_bank_register: Register<u8>,
latch: Register<u8>,
register_write_count: u32
}
impl MMC1Mapper {
fn new(header: &RomHeader) -> Self {
let mut control_register = Register::<u8>::new();
control_register.store(0x0C);
MMC1Mapper {
program_bank_num: header.prg_rom_bank_num(),
control_register: control_register,
chr_bank0_register: Register::<u8>::new(),
chr_bank1_register: Register::<u8>::new(),
prg_bank_register: Register::<u8>::new(),
latch: Register::<u8>::new(),
register_write_count: 0
}
}
}
impl Mapper for MMC1Mapper {
fn map(&self, address: u32) -> u32 {
let mut bank = 0 as u32;
let mut offset = address & 0x3FFF;
let bank_num = self.prg_bank_register.load() as u32 & 0x0F;
match self.control_register.load_bits(2, 2) {
0 | 1 => {
// switch 32KB at 0x8000, ignoring low bit of bank number
// TODO: Fix me
offset = offset | (address & 0x4000);
bank = bank_num & 0x0E;
},
2 => {
// fix first bank at 0x8000 and switch 16KB bank at 0xC000
bank = match address < 0xC000 {
true => 0,
false => bank_num
};
},
_ /*3*/ => {
// fix last bank at 0xC000 and switch 16KB bank at 0x8000
bank = match address >= 0xC000 {
true => self.program_bank_num as u32 - 1,
false => bank_num
};
}
};
bank * 0x4000 + offset
}
fn map_for_chr_rom(&self, address: u32) -> u32 {
let mut bank = 0 as u32;
let mut offset = address & 0x0FFF;
if self.control_register.load_bit(4) == 0 {
// switch 8KB at a time
bank = self.chr_bank0_register.load() as u32 & 0x1E;
offset = offset | (address & 0x1000);
} else {
// switch two separate 4KB banks
bank = match address < 0x1000 {
true => self.chr_bank0_register.load(),
false => self.chr_bank1_register.load()
} as u32 & 0x1f;
}
bank * 0x1000 + offset
}
fn store(&mut self, address: u32, value: u8) {
if (value & 0x80) != 0 {
self.register_write_count = 0;
self.latch.clear();
if (address & 0x6000) == 0 {
self.control_register.store_bits(2, 2, 3);
}
} else {
self.latch.store(((value & 1) << 4) | (self.latch.load() >> 1));
self.register_write_count += 1;
if self.register_write_count >= 5 {
let val = self.latch.load();
match address & 0x6000 {
0x0000 => self.control_register.store(val),
0x2000 => self.chr_bank0_register.store(val),
0x4000 => self.chr_bank1_register.store(val),
_ /*0x6000*/ => self.prg_bank_register.store(val)
};
self.register_write_count = 0;
self.latch.clear();
}
}
}
fn has_mirroring_type(&self) -> bool {
true
}
fn mirroring_type(&self) -> Mirrorings {
match self.control_register.load_bits(0, 2) {
0 | 1 => Mirrorings::SingleScreen,
2 => Mirrorings::Vertical,
_ /*3*/ => Mirrorings::Horizontal
}
}
fn drive_irq_counter(&mut self) -> bool {
false
}
}
struct UNRomMapper {
program_bank_num: u8,
register: Register<u8>
}
impl UNRomMapper {
fn new(header: &RomHeader) -> Self {
UNRomMapper {
program_bank_num: header.prg_rom_bank_num(),
register: Register::<u8>::new()
}
}
}
impl Mapper for UNRomMapper {
fn map(&self, address: u32) -> u32 {
let bank = match address < 0xC000 {
true => self.register.load(),
false => self.program_bank_num - 1
} as u32;
let offset = address & 0x3FFF;
0x4000 * bank + offset
}
fn map_for_chr_rom(&self, address: u32) -> u32 {
address
}
fn store(&mut self, address: u32, value: u8) {
self.register.store(value & 0xF);
}
fn has_mirroring_type(&self) -> bool {
false
}
fn mirroring_type(&self) -> Mirrorings {
Mirrorings::SingleScreen // dummy
}
fn drive_irq_counter(&mut self) -> bool {
false
}
}
struct CNRomMapper {
register: Register<u8>
}
impl CNRomMapper {
fn new() -> Self {
CNRomMapper {
register: Register::<u8>::new()
}
}
}
impl Mapper for CNRomMapper {
fn map(&self, address: u32) -> u32 {
address - 0x8000
}
fn map_for_chr_rom(&self, address: u32) -> u32 {
self.register.load() as u32 * 0x2000 + (address & 0x1FFF)
}
fn store(&mut self, address: u32, value: u8) {
self.register.store(value & 0xF);
}
fn has_mirroring_type(&self) -> bool {
false
}
fn mirroring_type(&self) -> Mirrorings {
Mirrorings::SingleScreen // dummy
}
fn drive_irq_counter(&mut self) -> bool {
false
}
}
struct MMC3Mapper {
program_bank_num: u8,
character_bank_num: u8,
register0: Register<u8>,
register1: Register<u8>,
register2: Register<u8>,
register3: Register<u8>,
register4: Register<u8>,
register5: Register<u8>,
register6: Register<u8>,
register7: Register<u8>,
program_register0: Register<u8>,
program_register1: Register<u8>,
character_register0: Register<u8>,
character_register1: Register<u8>,
character_register2: Register<u8>,
character_register3: Register<u8>,
character_register4: Register<u8>,
character_register5: Register<u8>,
irq_counter: u8,
irq_counter_reload: bool,
irq_enabled: bool
}
impl MMC3Mapper {
fn new(header: &RomHeader) -> Self {
MMC3Mapper {
program_bank_num: header.prg_rom_bank_num(),
character_bank_num: header.chr_rom_bank_num(),
register0: Register::<u8>::new(),
register1: Register::<u8>::new(),
register2: Register::<u8>::new(),
register3: Register::<u8>::new(),
register4: Register::<u8>::new(),
register5: Register::<u8>::new(),
register6: Register::<u8>::new(),
register7: Register::<u8>::new(),
program_register0: Register::<u8>::new(),
program_register1: Register::<u8>::new(),
character_register0: Register::<u8>::new(),
character_register1: Register::<u8>::new(),
character_register2: Register::<u8>::new(),
character_register3: Register::<u8>::new(),
character_register4: Register::<u8>::new(),
character_register5: Register::<u8>::new(),
irq_counter: 0,
irq_counter_reload: false,
irq_enabled: true
}
}
}
impl Mapper for MMC3Mapper {
fn map(&self, address: u32) -> u32 {
let bank = match address {
0x8000..=0x9FFF => match self.register0.is_bit_set(6) {
true => self.program_bank_num * 2 - 2,
false => self.program_register0.load()
},
0xA000..=0xBFFF => self.program_register1.load(),
0xC000..=0xDFFF => match self.register0.is_bit_set(6) {
true => self.program_register0.load(),
false => self.program_bank_num * 2 - 2
},
_ => self.program_bank_num * 2 - 1
};
// I couldn't in the spec but it seems that
// we need to wrap 2k bank with 4k program_bank_num
((bank as u32) % ((self.program_bank_num as u32) * 2)) * 0x2000 + (address & 0x1FFF)
}
fn map_for_chr_rom(&self, address: u32) -> u32 {
let bank = match self.register0.is_bit_set(7) {
true => match address & 0x1FFF {
0x0000..=0x03FF => self.character_register2.load(),
0x0400..=0x07FF => self.character_register3.load(),
0x0800..=0x0BFF => self.character_register4.load(),
0x0C00..=0x0FFF => self.character_register5.load(),
0x1000..=0x13FF => self.character_register0.load() & 0xFE,
0x1400..=0x17FF => self.character_register0.load() | 1,
0x1800..=0x1BFF => self.character_register1.load() & 0xFE,
_ => self.character_register1.load() | 1
},
false => match address & 0x1FFF {
0x0000..=0x03FF => self.character_register0.load() & 0xFE,
0x0400..=0x07FF => self.character_register0.load() | 1,
0x0800..=0x0BFF => self.character_register1.load() & 0xFE,
0x0C00..=0x0FFF => self.character_register1.load() | 1,
0x1000..=0x13FF => self.character_register2.load(),
0x1400..=0x17FF => self.character_register3.load(),
0x1800..=0x1BFF => self.character_register4.load(),
_ => self.character_register5.load()
}
};
// I couldn't in the spec but it seems that
// we need to wrap 0.4k bank with 4k character_bank_num
((bank as u32) % ((self.character_bank_num as u32) * 8)) * 0x400 + (address & 0x3FF)
}
fn store(&mut self, address: u32, value: u8) {
match address {
0x8000..=0x9FFF => match (address & 1) == 0 {
true => self.register0.store(value),
false => {
self.register1.store(value);
match self.register0.load_bits(0, 3) {
0 => self.character_register0.store(value & 0xFE),
1 => self.character_register1.store(value & 0xFE),
2 => self.character_register2.store(value),
3 => self.character_register3.store(value),
4 => self.character_register4.store(value),
5 => self.character_register5.store(value),
6 => self.program_register0.store(value & 0x3F),
_ => self.program_register1.store(value & 0x3F)
};
}
},
0xA000..=0xBFFF => match (address & 1) == 0 {
true => self.register2.store(value),
false => self.register3.store(value)
},
0xC000..=0xDFFF => {
match (address & 1) == 0 {
true => self.register4.store(value),
false => self.register5.store(value)
};
self.irq_counter_reload = true;
},
_ => match (address & 1) == 0 {
true => {
self.register6.store(value);
self.irq_enabled = false;
},
false => {
self.register7.store(value);
self.irq_enabled = true;
}
}
};
}
fn has_mirroring_type(&self) -> bool {
true
}
fn mirroring_type(&self) -> Mirrorings {
match self.register2.is_bit_set(0) {
true => Mirrorings::Horizontal,
false => Mirrorings::Vertical
}
}
fn drive_irq_counter(&mut self) -> bool {
match self.irq_counter_reload {
true => {
self.irq_counter = self.register4.load();
self.irq_counter_reload = false;
false
},
false => match self.irq_enabled {
true => match self.irq_counter > 0 {
true => {
self.irq_counter -= 1;
match self.irq_counter == 0 {
true => {
self.irq_counter_reload = true;
true
}
false => false
}
},
false => false
},
false => false
}
}
}
}
#[cfg(test)]
mod tests_nrom_mapper {
use super::*;
#[test]
fn initialize() {
NRomMapper{program_bank_num: 1};
}
#[test]
fn map_with_program_bank_num_1() {
let m = NRomMapper{program_bank_num: 1};
assert_eq!(0x0000, m.map(0x8000));
assert_eq!(0x3FFF, m.map(0xBFFF));
assert_eq!(0x0000, m.map(0xC000));
assert_eq!(0x3FFF, m.map(0xFFFF));
}
#[test]
fn map_with_program_bank_num_2() {
let m = NRomMapper{program_bank_num: 2};
assert_eq!(0x0000, m.map(0x8000));
assert_eq!(0x3FFF, m.map(0xBFFF));
assert_eq!(0x4000, m.map(0xC000));
assert_eq!(0x7FFF, m.map(0xFFFF));
}
#[test]
fn map_for_chr_rom() {
let m = NRomMapper{program_bank_num: 1};
assert_eq!(0x0000, m.map_for_chr_rom(0x0000));
assert_eq!(0x1FFF, m.map_for_chr_rom(0x1FFF));
}
}
| 26.149105 | 87 | 0.622748 |
b94630550a94a54856453e72b3698949c3cba5c1 | 16,354 | #![cfg_attr(not(feature = "std"), no_std)]
use ink_lang as ink;
#[ink::contract]
mod clinical_trial_data {
// import ink data structures
use ink_prelude::string::String;
use ink_prelude::vec::Vec;
use ink_prelude::vec;
use ink_storage::Mapping;
use ink_storage::traits::SpreadAllocate;
#[ink(storage)]
#[derive(SpreadAllocate)]
pub struct ClinicalTrialData {
// data
raw_records: Vec<(String, String, String)>, // [("1", "Treatment", "Positive"), ("2", "Placebo", "Negative"), ...]
preprocessed_records: Vec<(String, String, String)>, // [("1", "Treatment", "Positive"), ("2", "Placebo", "Negative"), ...]
data_summary: Mapping<String, u128>, // {'Treatment Positive': 3, 'Treatment Negative': 385, 'Placebo Positive': 28, 'Placebo Negative': 358}
// study characteristics
p_thresh: u128, // p-value threshold
stat_test: String, // e.g. fishers_exact_test
p_value: Vec<u128> // [nominator, denominator], compute resulting p-value in frontend = nominator/denominator
result: bool, // true for significant result, i.e. p_value < p_thresh, otherwise false
}
impl ClinicalTrialData {
// creates a new ClinicalTrialData contract initialized to the given values (done on polkadot/subtrate UI)
#[ink(constructor)]
pub fn new(custom_p_thresh: u128, custom_stat_test: String) -> Self {
// to initialize ink_storage::Mapping
ink_lang::utils::initialize_contract(|contract: &mut Self| {
contract.p_thresh = custom_p_thresh;
contract.stat_test = custom_stat_test;
})
}
// creates a new ClinicalTrialData contract initialized to default values (done on polkadot/subtrate UI)
#[ink(constructor)]
pub fn default() -> Self {
// to initialize ink_storage::Mapping
ink_lang::utils::initialize_contract(|contract: &mut Self| {
contract.result = false;
contract.p_thresh = 5;
contract.stat_test = String::from("fishers_exact_test");
})
}
// sets the p-value of the ClinicalTrialData contract
#[ink(message)]
pub fn set_p_thresh(&mut self, p: u128) {
self.p_thresh = p;
}
// gets the stat test of the ClinicalTrialData contract
#[ink(message)]
pub fn set_stat_test(&mut self, stat_test: String) {
self.stat_test = stat_test;
}
// gets the p-threshold of the ClinicalTrialData contract
#[ink(message)]
pub fn get_p_thresh(&self) -> u128 {
self.p_thresh
}
// gets the stat test of the ClinicalTrialData contract
#[ink(message)]
pub fn get_stat_test(&self) -> String {
self.stat_test.clone()
}
// uploads raw records to contract storage from frontend with a whole array, use createApiType in frontend
#[ink(message)]
pub fn upload_all_raw(&mut self, records: Vec<(String, String, String)>) {
for record in records {
self.raw_records.push(record);
}
}
// uploads raw records to contract storage from frontend one-by-one
#[ink(message)]
pub fn upload_one_raw(&mut self, patient_id: String, group: String, outcome: String) {
let record: (String, String, String) = (patient_id, group, outcome);
self.raw_records.push(record);
}
// returns raw records from contract storage to frontend
#[ink(message)]
pub fn download_raw(&self) -> Vec<(String, String, String)>{
self.raw_records.clone()
}
// returns preprocessed records from contract storage to frontend
#[ink(message)]
pub fn download_preprocessed(&self) -> Vec<(String, String, String)>{
self.preprocessed_records.clone()
}
// clears raw records in contract storage
#[ink(message)]
pub fn clear_raw(&mut self) {
self.raw_records.clear();
}
// clears preprocessed records in contract storage
#[ink(message)]
pub fn clear_preprocessed(&mut self) {
self.preprocessed_records.clear();
}
// uploads preprocessed record to contract storage from frontend and returns stat test results
#[ink(message)]
pub fn upload_all_preprocessed(&mut self, records: Vec<(String, String, String)>) {
for record in records {
self.preprocessed_records.push(record);
}
self.aggregate_data();
self.run_stat_test();
}
// uploads preprocessed record to contract storage from frontend and returns stat test results
#[ink(message)]
pub fn upload_one_preprocessed(&mut self, patient_id: String, group: String, outcome: String) {
let record: (String, String, String) = (patient_id, group, outcome);
self.preprocessed_records.push(record);
}
// runs test after upload_one_proprocessed is done
#[ink(message)]
pub fn run_on_preprocessed(&mut self) {
self.aggregate_data();
self.run_stat_test();
}
// gets result
#[ink(message)]
pub fn get_result(&self) -> bool {
self.result
}
// gets p-value
#[ink(message)]
pub fn get_p_value(&self) -> Vec<u128> {
self.p_value.clone()
}
// aggregates preprocessed records to data summary (access: owner)
pub fn aggregate_data(&mut self) {
// 1. initiazlie variables
let mut treatment_pos: u128 = 0;
let mut treatment_neg: u128 = 0;
let mut placebo_pos: u128 = 0;
let mut placebo_neg: u128 = 0;
// 2. iterate through preprocessed records
for patient in self.preprocessed_records.iter() {
if patient.1 == "Treatment" {
if patient.2 == "Yes" {
treatment_pos += 1;
} else {
treatment_neg += 1;
}
} else {
if patient.2 == "Yes" {
placebo_pos += 1;
} else {
placebo_neg += 1;
}
}
}
// 3. insert into Mapping into contract storage
self.data_summary.insert(String::from("Treatment Positive"), &treatment_pos);
self.data_summary.insert(String::from("Treatment Negative"), &treatment_neg);
self.data_summary.insert(String::from("Placebo Positive"), &placebo_pos);
self.data_summary.insert(String::from("Placebo Negative"), &placebo_neg);
}
// calculates factorial iteratively
pub fn factorial(&self, num: u128) -> u128 {
if num == 0 {
1
}
else{
(1..=num).fold(1, |acc, v| acc * v)
}
}
// calculates fisher's exact test formulaically
pub fn binomial(&self, val1: u128, val2: u128) -> u128{
self.factorial(val1) / (self.factorial(val2) * self.factorial(&val1 - &val2))
}
// calculates p-value using hypergeometric distribution in fisher's exact test
pub fn hypergeom_cdf(&self, population: u128, cured: u128, treatment: u128, observed: u128) -> u128 {
let mut hypergeom_sum: u128 = 0;
while observed <= cured && observed <= treatment{
hypergeom_sum += self.binomial(cured, observed) * self.binomial(population-cured, treatment-observed);
observed += 1;
}
hypergeom_sum
}
// runs statistical test on data summary
pub fn run_stat_test(&mut self) {
// 1. read self.data_summary
let treatment_pos = self.data_summary.get(String::from("Treatment Positive")).unwrap();
let treatment_neg = self.data_summary.get(String::from("Treatment Negative")).unwrap();
let placebo_pos = self.data_summary.get(String::from("Treatment Positive")).unwrap();
let placebo_neg = self.data_summary.get(String::from("Treatment Negative")).unwrap();
// 2. get hypergeomtric parameters
let population = treatment_pos + treatment_neg + placebo_pos + placebo_neg;
let cured = treatment_pos + placebo_pos;
let treatment = treatment_pos + treatment_neg;
let observed = treatment_pos;
// 3. significant figure multiplier
let scalar: u128 = 100; // significant figure multiplier
let scaled_p: u128 = self.p_thresh * self.binomial(population, treatment);
let mut scaled_right_cdf: u128 = self.hypergeom_cdf(population, cured, treatment, observed);
if scaled_right_cdf > self.binomial(population, treatment)/2 {
scaled_right_cdf = self.binomial(population, treatment) - scaled_right_cdf;
}
// 4. compare p-value with p-thresh
self.p_value = vec![scaled_right_cdf, self.binomial(population, treatment)];
if scaled_right_cdf*scalar < scaled_p {
self.result = true;
}
}
}
// off-chain unit tests for ClinicalTrialData contract
#[cfg(test)]
mod tests {
use super::*; // imports all definitions from the outer scope
use ink_lang as ink; // imports `ink_lang` so we can use `#[ink::test]`
use ink_prelude::string::ToString;
#[ink::test]
fn init_works() {
let research = ClinicalTrialData::default();
assert!(research.get_p_thresh() == 5 && research.get_stat_test() == String::from("fishers_exact_test"));
let research = ClinicalTrialData::new(2, String::from("t_test"));
assert!(research.get_p_thresh() == 2 && research.get_stat_test() == String::from("t_test"));
}
#[ink::test]
fn upload_all_works() {
let sample: Vec<(String, String, String)> = vec![
("1", "Treatment", "Yes"), ("2", "Treatment", "Yes"), ("3", "Treatment", "Yes"),
("4", "Treatment", "Yes"), ("5", "Treatment", "No"), ("6", "Treatment", "No"),
("7", "Treatment", "No"), ("8", "Placebo", "Yes"), ("9", "Placebo", "Yes"),
("10", "Placebo", "Yes"),("111", "Placebo", "Yes"), ("112", "Placebo", "Yes"),
("113", "Placebo", "Yes"), ("114", "Placebo", "Yes"), ("115", "Placebo", "Yes"),
("431", "Placebo", "Yes"), ("432", "Placebo", "Yes"), ("433", "Placebo", "No"),
("434", "Placebo", "No"), ("435", "Placebo", "No"), ("436", "Placebo", "No"),
("440", "Placebo", "No"), ("23", "Treatment", "Yes"), ("24", "Treatment", "Yes"),
("25", "Treatment", "Yes"), ("26", "Treatment", "Yes"), ("27", "Treatment", "Yes"),
("28", "Treatment", "Yes"), ("29", "Treatment", "Yes"), ("30", "Treatment", "Yes"),
("31", "Treatment", "Yes"), ("45", "Treatment", "Yes"),("85", "Treatment", "Yes")]
.iter()
.map(|x| (x.0.to_string(), x.1.to_string(), x.2.to_string()))
.collect::<Vec<(String, String, String)>>();
// initialize default contract with p = 0.05 and fisher's exact test
let mut research = ClinicalTrialData::default();
// test raw records upload
research.upload_all_raw(sample.clone());
assert!(research.raw_records == sample);
// test raw records download
let download = research.download_raw();
assert!(download == research.raw_records);
// clear raw records
research.clear_raw();
assert!(research.raw_records.len() == 0);
// test preprocessed records upload
research.upload_all_preprocessed(sample.clone());
assert!(research.preprocessed_records == sample);
// test data aggregation
assert!(research.data_summary.get(&String::from("Treatment Positive")).unwrap() == 15);
assert!(research.data_summary.get(&String::from("Treatment Negative")).unwrap() == 3);
assert!(research.data_summary.get(&String::from("Placebo Positive")).unwrap() == 10);
assert!(research.data_summary.get(&String::from("Placebo Negative")).unwrap() == 5);
// test statistical test
ink_env::debug_println!("p-value: {:?}", research.p_value);
assert!(research.result == true);
}
#[ink::test]
fn upload_one_by_one_works() {
let sample: Vec<(String, String, String)> = vec![
("1", "Treatment", "Yes"), ("2", "Treatment", "Yes"), ("3", "Treatment", "Yes"),
("4", "Treatment", "Yes"), ("5", "Treatment", "No"), ("6", "Treatment", "No"),
("7", "Treatment", "No"), ("8", "Placebo", "Yes"), ("9", "Placebo", "Yes"),
("10", "Placebo", "Yes"),("111", "Placebo", "Yes"), ("112", "Placebo", "Yes"),
("113", "Placebo", "Yes"), ("114", "Placebo", "Yes"), ("115", "Placebo", "Yes"),
("431", "Placebo", "Yes"), ("432", "Placebo", "Yes"), ("433", "Placebo", "No"),
("434", "Placebo", "No"), ("435", "Placebo", "No"), ("436", "Placebo", "No"),
("440", "Placebo", "No"), ("23", "Treatment", "Yes"), ("24", "Treatment", "Yes"),
("25", "Treatment", "Yes"), ("26", "Treatment", "Yes"), ("27", "Treatment", "Yes"),
("28", "Treatment", "Yes"), ("29", "Treatment", "Yes"), ("30", "Treatment", "Yes"),
("31", "Treatment", "Yes"), ("45", "Treatment", "Yes"),("85", "Treatment", "Yes")]
.iter()
.map(|x| (x.0.to_string(), x.1.to_string(), x.2.to_string()))
.collect::<Vec<(String, String, String)>>();
// initialize default contract with p = 0.05 and fisher's exact test
let mut research = ClinicalTrialData::default();
// set p-value and stat test
research.set_p_thresh(6);
research.set_stat_test(String::from("difference_of_means_test"));
assert!(research.p_thresh == 6);
assert!(research.stat_test == "difference_of_means_test");
// revert back to default since contract doesn't have difference of means yet
research.set_p_thresh(5);
research.set_stat_test(String::from("fishers_exact_test"));
// test raw records upload one by one
for patient in sample.clone().iter() {
research.upload_one_raw(patient.0.clone(), patient.1.clone(), patient.2.clone())
}
assert!(research.raw_records == sample);
// test raw records download
let download = research.download_raw();
assert!(download == research.raw_records);
// test preprocessed records upload
for patient in sample.clone().iter() {
research.upload_one_preprocessed(patient.0.clone(), patient.1.clone(), patient.2.clone())
};
research.run_on_preprocessed();
assert!(research.preprocessed_records == sample);
// test data aggregation
assert!(research.data_summary.get(&String::from("Treatment Positive")).unwrap() == 15);
assert!(research.data_summary.get(&String::from("Treatment Negative")).unwrap() == 3);
assert!(research.data_summary.get(&String::from("Placebo Positive")).unwrap() == 10);
assert!(research.data_summary.get(&String::from("Placebo Negative")).unwrap() == 5);
// test statistical test
ink_env::debug_println!("p-value: {:?}", research.p_value);
assert!(research.result == true);
}
}
}
| 43.844504 | 149 | 0.548184 |
8710f032d44f9fb6b8f520c287c7ac8490fe5e11 | 1,830 | /*
* Copyright (c) 2012-2020 MIRACL UK Ltd.
*
* This file is part of MIRACL Core
* (see https://github.com/miracl/core).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::arch::Chunk;
use crate::nums256w::big::NLEN;
// Base Bits= 56
// nums256 Modulus
pub const MODULUS: [Chunk; NLEN] = [
0xFFFFFFFFFFFF43,
0xFFFFFFFFFFFFFF,
0xFFFFFFFFFFFFFF,
0xFFFFFFFFFFFFFF,
0xFFFFFFFF,
];
pub const ROI: [Chunk; NLEN] = [
0xFFFFFFFFFFFF42,
0xFFFFFFFFFFFFFF,
0xFFFFFFFFFFFFFF,
0xFFFFFFFFFFFFFF,
0xFFFFFFFF,
];
pub const R2MODP: [Chunk; NLEN] = [0x89000000000000, 0x8B, 0x0, 0x0, 0x0];
pub const MCONST: Chunk = 0xBD;
// nums256w Curve
pub const CURVE_COF_I: isize = 1;
pub const CURVE_B_I: isize = 152961;
pub const CURVE_COF: [Chunk; NLEN] = [0x1, 0x0, 0x0, 0x0, 0x0];
pub const CURVE_B: [Chunk; NLEN] = [0x25581, 0x0, 0x0, 0x0, 0x0];
pub const CURVE_ORDER: [Chunk; NLEN] = [
0xAB20294751A825,
0x8275EA265C6020,
0xFFFFFFFFFFE43C,
0xFFFFFFFFFFFFFF,
0xFFFFFFFF,
];
pub const CURVE_GX: [Chunk; NLEN] = [
0x52EE1EB21AACB1,
0x9B0903D4C73ABC,
0xA04F42CB098357,
0x5AAADB61297A95,
0xBC9ED6B6,
];
pub const CURVE_GY: [Chunk; NLEN] = [
0xB5B9CB2184DE9F,
0xC3D115310FBB80,
0xF77E04E035C955,
0x3399B6A673448B,
0xD08FC0F1,
];
| 26.911765 | 75 | 0.698907 |
f4b1a89fa17dc0a3e3d7c18639e6764133578b8f | 2,531 | // Copyright (c) 2021 Alibaba Cloud
//
// SPDX-License-Identifier: Apache-2.0
//
use anyhow::*;
use std::collections::HashMap;
use std::sync::Arc;
use std::sync::Mutex;
use crate::kbc_modules::{KbcCheckInfo, KbcInstance, KBC_MODULE_LIST};
lazy_static! {
pub static ref KBC_RUNTIME: Arc<Mutex<KbcRuntime>> = Arc::new(Mutex::new(KbcRuntime::new()));
}
pub struct KbcRuntime {
kbc_instance_map: HashMap<String, KbcInstance>,
}
impl KbcRuntime {
fn new() -> KbcRuntime {
KbcRuntime {
kbc_instance_map: HashMap::new(),
}
}
fn register_instance(&mut self, kbc_name: String, kbc_instance: KbcInstance) {
self.kbc_instance_map.insert(kbc_name, kbc_instance);
}
fn instantiate_kbc(&mut self, kbc_name: String, kbs_uri: String) -> Result<()> {
let kbc_module_list = KBC_MODULE_LIST.clone();
let instantiate_func = kbc_module_list.get_func(&kbc_name)?;
let kbc_instance = (instantiate_func)(kbs_uri);
self.register_instance(kbc_name, kbc_instance);
Ok(())
}
pub fn decrypt(
&mut self,
kbc_name: String,
kbs_uri: String,
annotation: String,
) -> Result<Vec<u8>> {
if self.kbc_instance_map.get_mut(&kbc_name).is_none() {
self.instantiate_kbc(kbc_name.clone(), kbs_uri)?;
}
let kbc_instance = self
.kbc_instance_map
.get_mut(&kbc_name)
.ok_or(anyhow!("KBC runtime: The KBC instance does not existing!"))?;
let plain_payload = kbc_instance.decrypt_payload(&annotation)?;
Ok(plain_payload)
}
pub fn get_resource(
&mut self,
kbc_name: String,
kbs_uri: String,
resource_description: String,
) -> Result<Vec<u8>> {
if self.kbc_instance_map.get_mut(&kbc_name).is_none() {
self.instantiate_kbc(kbc_name.clone(), kbs_uri)?;
}
let kbc_instance = self
.kbc_instance_map
.get_mut(&kbc_name)
.ok_or(anyhow!("KBC runtime: The KBC instance does not existing!"))?;
let resource = kbc_instance.get_resource(resource_description)?;
Ok(resource)
}
pub fn check(&self, kbc_name: String) -> Result<KbcCheckInfo> {
let kbc_instance = self
.kbc_instance_map
.get(&kbc_name)
.ok_or(anyhow!("KBC runtime: The KBC instance does not existing!"))?;
let check_info: KbcCheckInfo = kbc_instance.check()?;
Ok(check_info)
}
}
| 30.493976 | 97 | 0.619123 |
8aff8f73eeea09b32d54cf60ac5b0ca70580def5 | 1,160 | use std::time::Instant;
use anyhow::Result;
use bytes::Bytes;
use futures::prelude::*;
use srt::{ConnInitMethod, SrtSocketBuilder};
const PACKET_SIZE: usize = 1 << 19;
#[tokio::test]
async fn message_splitting() -> Result<()> {
env_logger::init();
let sender = SrtSocketBuilder::new(ConnInitMethod::Connect("127.0.0.1:11124".parse().unwrap()))
.connect();
let recvr = SrtSocketBuilder::new(ConnInitMethod::Listen)
.local_port(11124)
.connect();
// send a really really long packet
let long_message = Bytes::from(&[b'8'; PACKET_SIZE][..]);
tokio::spawn(async move {
let mut sender = sender.await?;
sender.send((Instant::now(), long_message)).await?;
sender.close().await?;
Ok(()) as Result<_>
});
tokio::spawn(async move {
let data_vec = recvr.await.unwrap().collect::<Vec<_>>().await;
assert_eq!(
&data_vec
.iter()
.map(|r| r.as_ref().unwrap())
.map(|(_, b)| b)
.collect::<Vec<_>>(),
&[&Bytes::from(&[b'8'; PACKET_SIZE][..])]
);
});
Ok(())
}
| 25.217391 | 99 | 0.54569 |
56cea759cffcd50db2e81953fff3d6b53e427fc0 | 16,352 | //! Install tools and build the `esp-idf` using native tooling.
use std::convert::TryFrom;
use std::ffi::OsString;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::{env, fs};
use anyhow::*;
use embuild::cmake::file_api::codemodel::Language;
use embuild::cmake::file_api::ObjKind;
use embuild::espidf::InstallOpts;
use embuild::fs::copy_file_if_different;
use embuild::utils::{OsStrExt, PathExt};
use embuild::{bindgen, build, cargo, cmake, espidf, git, kconfig, path_buf};
use strum::{Display, EnumString};
use super::common::{
self, get_install_dir, list_specific_sdkconfigs, workspace_dir, EspIdfBuildOutput,
EspIdfComponents, ESP_IDF_GLOB_VAR_PREFIX, ESP_IDF_SDKCONFIG_DEFAULTS_VAR,
ESP_IDF_SDKCONFIG_VAR, ESP_IDF_TOOLS_INSTALL_DIR_VAR, MASTER_PATCHES, MCU_VAR, STABLE_PATCHES,
};
use crate::common::{SDKCONFIG_DEFAULTS_FILE, SDKCONFIG_FILE};
const ESP_IDF_VERSION_VAR: &str = "ESP_IDF_VERSION";
const ESP_IDF_REPOSITORY_VAR: &str = "ESP_IDF_REPOSITORY";
const DEFAULT_ESP_IDF_VERSION: &str = "v4.3.1";
const CARGO_CMAKE_BUILD_ACTIVE_VAR: &str = "CARGO_CMAKE_BUILD_ACTIVE";
const CARGO_CMAKE_BUILD_INCLUDES_VAR: &str = "CARGO_CMAKE_BUILD_INCLUDES";
const CARGO_CMAKE_BUILD_LINK_LIBRARIES_VAR: &str = "CARGO_CMAKE_BUILD_LINK_LIBRARIES";
const CARGO_CMAKE_BUILD_COMPILER_VAR: &str = "CARGO_CMAKE_BUILD_COMPILER";
const CARGO_CMAKE_BUILD_SDKCONFIG_VAR: &str = "CARGO_CMAKE_BUILD_SDKCONFIG";
const CARGO_CMAKE_BUILD_ESP_IDF_VAR: &str = "CARGO_CMAKE_BUILD_ESP_IDF";
pub fn build() -> Result<EspIdfBuildOutput> {
if env::var(CARGO_CMAKE_BUILD_ACTIVE_VAR).is_ok()
|| env::var(CARGO_CMAKE_BUILD_INCLUDES_VAR).is_ok()
{
build_cmake_first()
} else {
build_cargo_first()
}
}
fn build_cmake_first() -> Result<EspIdfBuildOutput> {
let components = EspIdfComponents::from(
env::var(CARGO_CMAKE_BUILD_LINK_LIBRARIES_VAR)?
.split(';')
.filter_map(|s| {
s.strip_prefix("__idf_").map(|comp| {
// All ESP-IDF components are prefixed with `__idf_`
// Check this comment for more info:
// https://github.com/esp-rs/esp-idf-sys/pull/17#discussion_r723133416
format!("comp_{}_enabled", comp)
})
}),
);
let sdkconfig = PathBuf::from(env::var(CARGO_CMAKE_BUILD_SDKCONFIG_VAR)?);
let build_output = EspIdfBuildOutput {
cincl_args: build::CInclArgs {
args: env::var(CARGO_CMAKE_BUILD_INCLUDES_VAR)?,
},
link_args: None,
kconfig_args: Box::new(
kconfig::try_from_config_file(sdkconfig.clone())
.with_context(|| anyhow!("Failed to read '{:?}'", sdkconfig))?
.map(|(key, value)| {
(
key.strip_prefix("CONFIG_")
.map(str::to_string)
.unwrap_or(key),
value,
)
}),
),
components,
bindgen: bindgen::Factory::new()
.with_linker(env::var(CARGO_CMAKE_BUILD_COMPILER_VAR)?)
.with_clang_args(
env::var(CARGO_CMAKE_BUILD_INCLUDES_VAR)?
.split(';')
.map(|dir| format!("-I{}", dir))
.collect::<Vec<_>>(),
),
env_path: None,
esp_idf: PathBuf::from(env::var(CARGO_CMAKE_BUILD_ESP_IDF_VAR)?),
};
Ok(build_output)
}
fn build_cargo_first() -> Result<EspIdfBuildOutput> {
let out_dir = cargo::out_dir();
let target = env::var("TARGET")?;
let workspace_dir = workspace_dir()?;
let manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR")?);
let chip = if let Some(mcu) = env::var_os(MCU_VAR) {
Chip::from_str(&mcu.to_string_lossy())?
} else {
Chip::detect(&target)?
};
let chip_name = chip.to_string();
let profile = common::build_profile();
cargo::track_env_var(ESP_IDF_TOOLS_INSTALL_DIR_VAR);
cargo::track_env_var(ESP_IDF_VERSION_VAR);
cargo::track_env_var(ESP_IDF_REPOSITORY_VAR);
cargo::track_env_var(ESP_IDF_SDKCONFIG_DEFAULTS_VAR);
cargo::track_env_var(ESP_IDF_SDKCONFIG_VAR);
cargo::track_env_var(MCU_VAR);
let cmake_tool = espidf::Tools::cmake()?;
let tools = espidf::Tools::new(
vec!["ninja", chip.gcc_toolchain()]
.into_iter()
.chain(chip.ulp_gcc_toolchain()),
);
let install_dir = get_install_dir("espressif")?;
let idf = espidf::Installer::new(esp_idf_version()?)
.opts(InstallOpts::empty())
.local_install_dir(install_dir)
.git_url(match env::var(ESP_IDF_REPOSITORY_VAR) {
Err(env::VarError::NotPresent) => None,
git_url => Some(git_url?),
})
.with_tools(tools)
.with_tools(cmake_tool)
.install()
.context("Could not install esp-idf")?;
// Apply patches, only if the patches were not previously applied.
let patch_set = match &idf.esp_idf_version {
git::Ref::Branch(b) if idf.esp_idf.get_default_branch()?.as_ref() == Some(b) => {
MASTER_PATCHES
}
git::Ref::Tag(t) if t == DEFAULT_ESP_IDF_VERSION => STABLE_PATCHES,
_ => {
cargo::print_warning(format_args!(
"`esp-idf` version ({:?}) not officially supported by `esp-idf-sys`. \
Supported versions are 'master', '{}'.",
&idf.esp_idf_version, DEFAULT_ESP_IDF_VERSION
));
&[]
}
};
if !patch_set.is_empty() {
idf.esp_idf
.apply_once(patch_set.iter().map(|p| manifest_dir.join(p)))?;
}
env::set_var("PATH", &idf.exported_path);
// Create cmake project.
copy_file_if_different(
manifest_dir.join(path_buf!("resources", "cmake_project", "CMakeLists.txt")),
&out_dir,
)?;
copy_file_if_different(
manifest_dir.join(path_buf!("resources", "cmake_project", "main.c")),
&out_dir,
)?;
// Copy additional globbed files specified by user env variables
for file in build::tracked_env_globs_iter(ESP_IDF_GLOB_VAR_PREFIX)? {
let dest_path = out_dir.join(file.1);
fs::create_dir_all(dest_path.parent().unwrap())?;
// TODO: Maybe warn if this overwrites a critical file (e.g. CMakeLists.txt).
// It could be useful for the user to explicitly overwrite our files.
copy_file_if_different(&file.0, &out_dir)?;
}
// The `kconfig.cmake` script looks at this variable if it should compile `mconf` on windows.
// But this variable is also present when using git-bash which doesn't have gcc.
env::remove_var("MSYSTEM");
// Resolve `ESP_IDF_SDKCONFIG` and `ESP_IDF_SDKCONFIG_DEFAULTS` to an absolute path
// relative to the workspace directory if not empty.
let sdkconfig = {
let file = env::var_os(ESP_IDF_SDKCONFIG_VAR).unwrap_or_else(|| SDKCONFIG_FILE.into());
let path = Path::new(&file).abspath_relative_to(&workspace_dir);
let cfg = list_specific_sdkconfigs(path, &profile, &chip_name).next();
if let Some(ref file) = cfg {
cargo::track_file(file);
}
cfg
};
let sdkconfig_defaults = {
let gen_defaults_path = out_dir.join("gen-sdkconfig.defaults");
fs::write(&gen_defaults_path, generate_sdkconfig_defaults()?)?;
let mut result = vec![gen_defaults_path];
result.extend(
env::var_os(ESP_IDF_SDKCONFIG_DEFAULTS_VAR)
.unwrap_or_else(|| SDKCONFIG_DEFAULTS_FILE.into())
.try_to_str()?
.split(';')
.filter_map(|v| {
if !v.is_empty() {
let path = Path::new(v).abspath_relative_to(&workspace_dir);
Some(
list_specific_sdkconfigs(path, &profile, &chip_name)
// We need to reverse the order here so that the more
// specific defaults come last.
.rev()
.inspect(|p| cargo::track_file(p)),
)
} else {
None
}
})
.flatten(),
);
result
};
let defaults_files = sdkconfig_defaults
.iter()
// Use the `sdkconfig` as a defaults file to prevent it from being changed by the
// build. It must be the last defaults file so that its options have precendence
// over any actual defaults from files before it.
.chain(sdkconfig.as_ref())
.try_fold(OsString::new(), |mut accu, p| -> Result<OsString> {
if !accu.is_empty() {
accu.push(";");
}
// Windows uses `\` as directory separators which cmake can't deal with, so we
// convert all back-slashes to forward-slashes here. This would be tedious to
// do with an `OsString` so we have to convert it to `str` first.
if cfg!(windows) {
accu.push(p.try_to_str()?.replace('\\', "/"));
} else {
accu.push(p);
}
Ok(accu)
})?;
let cmake_toolchain_file = path_buf![
&idf.esp_idf.worktree(),
"tools",
"cmake",
chip.cmake_toolchain_file()
];
// Get the asm, C and C++ flags from the toolchain file, these would otherwise get
// overwritten because `cmake::Config` also sets these (see
// https://github.com/espressif/esp-idf/issues/7507).
let (asm_flags, c_flags, cxx_flags) = {
let mut vars = cmake::get_script_variables(&cmake_toolchain_file)?;
(
vars.remove("CMAKE_ASM_FLAGS").unwrap_or_default(),
vars.remove("CMAKE_C_FLAGS").unwrap_or_default(),
vars.remove("CMAKE_CXX_FLAGS").unwrap_or_default(),
)
};
// `cmake::Config` automatically uses `<out_dir>/build` and there is no way to query
// what build directory it sets, so we hard-code it.
let cmake_build_dir = out_dir.join("build");
let query = cmake::Query::new(
&cmake_build_dir,
"cargo",
&[ObjKind::Codemodel, ObjKind::Toolchains],
)?;
// Build the esp-idf.
cmake::Config::new(&out_dir)
.generator("Ninja")
.out_dir(&out_dir)
.no_build_target(true)
.define("CMAKE_TOOLCHAIN_FILE", &cmake_toolchain_file)
.define("CMAKE_BUILD_TYPE", "")
.always_configure(true)
.pic(false)
.asmflag(asm_flags)
.cflag(c_flags)
.cxxflag(cxx_flags)
.env("IDF_PATH", &idf.esp_idf.worktree())
.env("PATH", &idf.exported_path)
.env("SDKCONFIG_DEFAULTS", defaults_files)
.env("IDF_TARGET", &chip_name)
.build();
let replies = query.get_replies()?;
let target = replies
.get_codemodel()?
.into_first_conf()
.get_target("libespidf.elf")
.unwrap_or_else(|| {
bail!("Could not read build information from cmake: Target 'libespidf.elf' not found")
})?;
let compiler = replies
.get_toolchains()
.and_then(|mut t| {
t.take(Language::C)
.ok_or_else(|| Error::msg("No C toolchain"))
})
.and_then(|t| {
t.compiler
.path
.ok_or_else(|| Error::msg("No compiler path set"))
})
.context("Could not determine the compiler from cmake")?;
// Save information about the esp-idf build to the out dir so that it can be
// easily retrieved by tools that need it.
espidf::EspIdfBuildInfo {
esp_idf_dir: idf.esp_idf.worktree().to_owned(),
exported_path_var: idf.exported_path.try_to_str()?.to_owned(),
venv_python: idf.venv_python,
build_dir: cmake_build_dir.clone(),
project_dir: out_dir.clone(),
compiler: compiler.clone(),
mcu: chip_name,
sdkconfig,
sdkconfig_defaults: Some(sdkconfig_defaults),
}
.save_json(out_dir.join(espidf::BUILD_INFO_FILENAME))?;
let sdkconfig_json = path_buf![&cmake_build_dir, "config", "sdkconfig.json"];
let build_output = EspIdfBuildOutput {
cincl_args: build::CInclArgs::try_from(&target.compile_groups[0])?,
link_args: Some(
build::LinkArgsBuilder::try_from(&target.link.unwrap())?
.linker(&compiler)
.working_directory(&cmake_build_dir)
.force_ldproxy(true)
.build()?,
),
bindgen: bindgen::Factory::from_cmake(&target.compile_groups[0])?.with_linker(&compiler),
components: EspIdfComponents::new(),
kconfig_args: Box::new(
kconfig::try_from_json_file(sdkconfig_json.clone())
.with_context(|| anyhow!("Failed to read '{:?}'", sdkconfig_json))?,
),
env_path: Some(idf.exported_path.try_to_str()?.to_owned()),
esp_idf: idf.esp_idf.worktree().to_owned(),
};
Ok(build_output)
}
fn esp_idf_version() -> Result<git::Ref> {
let version = match env::var(ESP_IDF_VERSION_VAR) {
Err(env::VarError::NotPresent) => DEFAULT_ESP_IDF_VERSION.to_owned(),
v => v?,
};
Ok(espidf::decode_esp_idf_version_ref(&version))
}
// Generate `sdkconfig.defaults` content based on the crate manifest (`Cargo.toml`).
//
// This is currently only used to forward the optimization options to the esp-idf.
fn generate_sdkconfig_defaults() -> Result<String> {
const OPT_VARS: [&str; 4] = [
"CONFIG_COMPILER_OPTIMIZATION_NONE",
"CONFIG_COMPILER_OPTIMIZATION_DEFAULT",
"CONFIG_COMPILER_OPTIMIZATION_PERF",
"CONFIG_COMPILER_OPTIMIZATION_SIZE",
];
let opt_level = env::var("OPT_LEVEL")?;
let debug = env::var("DEBUG")?;
let opt_index = match (opt_level.as_str(), debug.as_str()) {
("s" | "z", _) => 3, // -Os
("1", _) | (_, "2" | "true") => 1, // -Og
("0", _) => 0, // -O0
("2" | "3", _) => 2, // -O2
_ => unreachable!("Invalid DEBUG or OPT_LEVEL"),
};
Ok(OPT_VARS
.iter()
.enumerate()
.map(|(i, s)| format!("{}={}\n", s, if i == opt_index { 'y' } else { 'n' }))
.collect::<String>())
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Display, EnumString)]
#[repr(u32)]
enum Chip {
/// Xtensa LX7 based dual core
#[strum(serialize = "esp32")]
ESP32 = 0,
/// Xtensa LX7 based single core
#[strum(serialize = "esp32s2")]
ESP32S2,
/// Xtensa LX7 based single core
#[strum(serialize = "esp32s3")]
ESP32S3,
/// RISC-V based single core
#[strum(serialize = "esp32c3")]
ESP32C3,
}
impl Chip {
fn detect(rust_target_triple: &str) -> Result<Chip> {
if rust_target_triple.starts_with("xtensa-esp") {
if rust_target_triple.contains("esp32s3") {
return Ok(Chip::ESP32S3);
} else if rust_target_triple.contains("esp32s2") {
return Ok(Chip::ESP32S2);
} else {
return Ok(Chip::ESP32);
}
} else if rust_target_triple.starts_with("riscv32imc-esp") {
return Ok(Chip::ESP32C3);
}
bail!("Unsupported target '{}'", rust_target_triple)
}
/// The name of the gcc toolchain (to compile the `esp-idf`) for `idf_tools.py`.
fn gcc_toolchain(self) -> &'static str {
match self {
Self::ESP32 => "xtensa-esp32-elf",
Self::ESP32S2 => "xtensa-esp32s2-elf",
Self::ESP32S3 => "xtensa-esp32s3-elf",
Self::ESP32C3 => "riscv32-esp-elf",
}
}
/// The name of the gcc toolchain for the ultra low-power co-processor for
/// `idf_tools.py`.
fn ulp_gcc_toolchain(self) -> Option<&'static str> {
match self {
Self::ESP32 => Some("esp32ulp-elf"),
Self::ESP32S2 => Some("esp32s2ulp-elf"),
_ => None,
}
}
fn cmake_toolchain_file(self) -> String {
format!("toolchain-{}.cmake", self)
}
}
| 36.828829 | 98 | 0.588919 |
03525a3a034dc1b1f0c397a31538f310f3c70232 | 1,178 | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-android: FIXME(#10381)
// compile-flags:-g
// gdb-command:rbreak zzz
// gdb-command:run
// gdb-command:finish
// gdb-command:print constant
// gdb-check:$1 = 1
// gdb-command:print a_struct
// gdb-check:$2 = {a = -2, b = 3.5, c = 4}
// gdb-command:print *owned
// gdb-check:$3 = 5
#![allow(unused_variable)]
struct Struct {
a: int,
b: f64,
c: uint
}
fn main() {
let constant = 1;
let a_struct = Struct {
a: -2,
b: 3.5,
c: 4
};
let owned = box 5;
let closure: proc() = proc() {
zzz();
do_something(&constant, &a_struct.a, owned);
};
closure();
}
fn do_something(_: &int, _:&int, _:&int) {
}
fn zzz() {()}
| 20.666667 | 69 | 0.619694 |
3881ac0d452e543a6faadbb9ae650b7f43351ad7 | 17,162 | //! Models relating to channels and types within channels.
mod group;
mod attachment;
mod channel_id;
mod embed;
mod guild_channel;
mod message;
mod private_channel;
mod reaction;
mod channel_category;
pub use self::attachment::*;
pub use self::channel_id::*;
pub use self::embed::*;
pub use self::group::*;
pub use self::guild_channel::*;
pub use self::message::*;
pub use self::private_channel::*;
pub use self::reaction::*;
pub use self::channel_category::*;
use crate::model::prelude::*;
use serde::de::Error as DeError;
use serde::ser::{SerializeStruct, Serialize, Serializer};
use super::utils::deserialize_u64;
#[cfg(feature = "model")]
use std::fmt::{Display, Formatter, Result as FmtResult};
#[cfg(all(feature = "cache", feature = "model", feature = "utils"))]
use crate::cache::FromStrAndCache;
#[cfg(all(feature = "cache", feature = "model", feature = "utils"))]
use crate::model::misc::ChannelParseError;
#[cfg(all(feature = "cache", feature = "model", feature = "utils"))]
use crate::utils::parse_channel;
#[cfg(all(feature = "cache", feature = "model"))]
use crate::cache::Cache;
#[cfg(all(feature = "cache", feature = "model", feature = "utils"))]
use async_trait::async_trait;
#[cfg(feature = "model")]
use crate::http::CacheHttp;
/// A container for any channel.
#[derive(Clone, Debug)]
#[non_exhaustive]
pub enum Channel {
/// A group. A group comprises of only one channel.
Group(Group),
/// A [text] or [voice] channel within a [`Guild`].
///
/// [`Guild`]: ../guild/struct.Guild.html
/// [text]: enum.ChannelType.html#variant.Text
/// [voice]: enum.ChannelType.html#variant.Voice
Guild(GuildChannel),
/// A private channel to another [`User`]. No other users may access the
/// channel. For multi-user "private channels", use a group.
///
/// [`User`]: ../user/struct.User.html
Private(PrivateChannel),
/// A category of [`GuildChannel`]s
///
/// [`GuildChannel`]: struct.GuildChannel.html
Category(ChannelCategory),
}
#[cfg(feature = "model")]
impl Channel {
pub fn group(self) -> Option<Group> {
match self {
Channel::Group(lock) => Some(lock),
_ => None,
}
}
/// Converts from `Channel` to `Option<GuildChannel>`.
///
/// Converts `self` into an `Option<GuildChannel>`, consuming
/// `self`, and discarding a `PrivateChannel`, or
/// `ChannelCategory`, if any.
///
/// # Examples
///
/// Basic usage:
///
/// ```rust,no_run
/// # #[cfg(all(feature = "model", feature = "cache"))]
/// # async fn run() {
/// # use serenity::{cache::Cache, model::id::ChannelId};
/// # use tokio::sync::RwLock;
/// # use std::sync::Arc;
/// #
/// # let cache = Cache::default();
/// # let channel = ChannelId(0).to_channel_cached(&cache).await.unwrap();
/// #
/// match channel.guild() {
/// Some(guild) => {
/// println!("It's a guild named {}!", guild.name);
/// },
/// None => { println!("It's not a guild!"); },
/// }
/// # }
/// ```
pub fn guild(self) -> Option<GuildChannel> {
match self {
Channel::Guild(lock) => Some(lock),
_ => None,
}
}
/// Converts from `Channel` to `Option<PrivateChannel>`.
///
/// Converts `self` into an `Option<PrivateChannel>`, consuming
/// `self`, and discarding a `GuildChannel`, or `ChannelCategory`,
/// if any.
///
/// # Examples
///
/// Basic usage:
///
/// ```rust,no_run
/// # #[cfg(all(feature = "model", feature = "cache"))]
/// # async fn run() {
/// # use serenity::{cache::Cache, model::id::ChannelId};
/// # use tokio::sync::RwLock;
/// # use std::sync::Arc;
/// #
/// # let cache = Cache::default();
/// # let channel = ChannelId(0).to_channel_cached(&cache).await.unwrap();
/// #
/// match channel.private() {
/// Some(private) => {
/// println!("It's a private channel with {}!", &private.recipient);
/// },
/// None => { println!("It's not a private channel!"); },
/// }
/// # }
/// ```
pub fn private(self) -> Option<PrivateChannel> {
match self {
Channel::Private(lock) => Some(lock),
_ => None,
}
}
/// Converts from `Channel` to `Option<ChannelCategory>`.
///
/// Converts `self` into an `Option<ChannelCategory>`,
/// consuming `self`, and discarding a `GuildChannel`, or
/// `PrivateChannel`, if any.
///
/// # Examples
///
/// Basic usage:
///
/// ```rust,no_run
/// # #[cfg(all(feature = "model", feature = "cache"))]
/// # async fn run() {
/// # use serenity::{cache::Cache, model::id::ChannelId};
/// # use tokio::sync::RwLock;
/// # use std::sync::Arc;
/// #
/// # let cache = Cache::default();
/// # let channel = ChannelId(0).to_channel_cached(&cache).await.unwrap();
/// #
/// match channel.category() {
/// Some(category) => {
/// println!("It's a category named {}!", category.name);
/// },
/// None => { println!("It's not a category!"); },
/// }
/// #
/// # }
/// ```
pub fn category(self) -> Option<ChannelCategory> {
match self {
Channel::Category(lock) => Some(lock),
_ => None,
}
}
/// Deletes the inner channel.
///
/// **Note**: If the `cache`-feature is enabled permissions will be checked and upon
/// owning the required permissions the HTTP-request will be issued.
pub async fn delete(&self, cache_http: impl CacheHttp) -> Result<()> {
match self {
Channel::Guild(public_channel) => {
public_channel.delete(cache_http).await?;
},
Channel::Private(private_channel) => {
private_channel.delete(cache_http.http()).await?;
},
Channel::Category(category) => {
category.delete(cache_http).await?;
},
_ => {}
}
Ok(())
}
/// Determines if the channel is NSFW.
#[inline]
pub fn is_nsfw(&self) -> bool {
match self {
Channel::Guild(channel) => channel.is_nsfw(),
Channel::Category(category) => category.is_nsfw(),
Channel::Private(_) => false,
_ => false
}
}
/// Retrieves the Id of the inner [`GuildChannel`], or
/// [`PrivateChannel`].
///
/// [`GuildChannel`]: struct.GuildChannel.html
/// [`PrivateChannel`]: struct.PrivateChannel.html
#[inline]
pub fn id(&self) -> ChannelId {
match self {
Channel::Guild(ch) => ch.id,
Channel::Private(ch) => ch.id,
Channel::Category(ch) => ch.id,
Channel::Group(ch) => ch.channel_id,
}
}
/// Retrieves the position of the inner [`GuildChannel`] or
/// [`ChannelCategory`].
///
/// If other channel types are used it will return None.
///
/// [`GuildChannel`]: struct.GuildChannel.html
/// [`CategoryChannel`]: struct.ChannelCategory.html
#[inline]
pub fn position(&self) -> Option<i64> {
match self {
Channel::Guild(channel) => Some(channel.position),
Channel::Category(catagory) => Some(catagory.position),
_ => None
}
}
}
impl<'de> Deserialize<'de> for Channel {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> StdResult<Self, D::Error> {
let v = JsonMap::deserialize(deserializer)?;
let kind = {
let kind = v.get("type").ok_or_else(|| DeError::missing_field("type"))?;
kind.as_u64().unwrap()
};
match kind {
0 | 2 | 5 | 6 => serde_json::from_value::<GuildChannel>(Value::Object(v))
.map(Channel::Guild)
.map_err(DeError::custom),
1 => serde_json::from_value::<PrivateChannel>(Value::Object(v))
.map(Channel::Private)
.map_err(DeError::custom),
3 => serde_json::from_value::<Group>(Value::Object(v))
.map(Channel::Group)
.map_err(DeError::custom),
4 => serde_json::from_value::<ChannelCategory>(Value::Object(v))
.map(Channel::Category)
.map_err(DeError::custom),
_ => Err(DeError::custom("Unknown channel type")),
}
}
}
impl Serialize for Channel {
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error>
where S: Serializer {
match self {
Channel::Category(c) => ChannelCategory::serialize(c, serializer),
Channel::Guild(c) => GuildChannel::serialize(c, serializer),
Channel::Private(c) => PrivateChannel::serialize(c, serializer),
Channel::Group(c) => Group::serialize(c, serializer),
}
}
}
impl Display for Channel {
/// Formats the channel into a "mentioned" string.
///
/// This will return a different format for each type of channel:
///
/// - [`PrivateChannel`]s: the recipient's name;
/// - [`GuildChannel`]s: a string mentioning the channel that users who can
/// see the channel can click on.
///
/// [`GuildChannel`]: struct.GuildChannel.html
/// [`PrivateChannel`]: struct.PrivateChannel.html
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
match self {
Channel::Group(group) => Display::fmt(&group.name(), f),
Channel::Guild(ch) => Display::fmt(&ch.id.mention(), f),
Channel::Private(ch) => Display::fmt(&ch.recipient.name, f),
Channel::Category(ch) => Display::fmt(&ch.name, f),
}
}
}
/// A representation of a type of channel.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
#[non_exhaustive]
pub enum ChannelType {
/// An indicator that the channel is a text [`GuildChannel`].
///
/// [`GuildChannel`]: struct.GuildChannel.html
Text = 0,
/// An indicator that the channel is a [`PrivateChannel`].
///
/// [`PrivateChannel`]: struct.PrivateChannel.html
Private = 1,
/// An indicator that the channel is a voice [`GuildChannel`].
///
/// [`GuildChannel`]: struct.GuildChannel.html
Voice = 2,
Group = 3,
/// An indicator that the channel is the channel of a [`ChannelCategory`].
///
/// [`ChannelCategory`]: struct.ChannelCategory.html
Category = 4,
/// An indicator that the channel is a `NewsChannel`.
///
/// Note: `NewsChannel` is serialized into a [`GuildChannel`]
///
/// [`GuildChannel`]: struct.GuildChannel.html
News = 5,
/// An indicator that the channel is a `StoreChannel`
///
/// Note: `StoreChannel` is serialized into a [`GuildChannel`]
///
/// [`GuildChannel`]: struct.GuildChannel.html
Store = 6,
}
enum_number!(
ChannelType {
Text,
Private,
Voice,
Group,
Category,
News,
Store,
}
);
impl ChannelType {
#[inline]
pub fn name(&self) -> &str {
match *self {
ChannelType::Group => "group",
ChannelType::Private => "private",
ChannelType::Text => "text",
ChannelType::Voice => "voice",
ChannelType::Category => "category",
ChannelType::News => "news",
ChannelType::Store => "store",
}
}
#[inline]
pub fn num(self) -> u64 {
match self {
ChannelType::Text => 0,
ChannelType::Private => 1,
ChannelType::Voice => 2,
ChannelType::Group => 3,
ChannelType::Category => 4,
ChannelType::News => 5,
ChannelType::Store => 6,
}
}
}
#[derive(Deserialize, Serialize)]
struct PermissionOverwriteData {
allow: Permissions,
deny: Permissions,
#[serde(serialize_with = "serialize_u64", deserialize_with = "deserialize_u64")] id: u64,
#[serde(rename = "type")] kind: String,
}
/// A channel-specific permission overwrite for a member or role.
#[derive(Clone, Debug)]
pub struct PermissionOverwrite {
pub allow: Permissions,
pub deny: Permissions,
pub kind: PermissionOverwriteType,
}
impl<'de> Deserialize<'de> for PermissionOverwrite {
fn deserialize<D: Deserializer<'de>>(deserializer: D)
-> StdResult<PermissionOverwrite, D::Error> {
let data = PermissionOverwriteData::deserialize(deserializer)?;
let kind = match &data.kind[..] {
"member" => PermissionOverwriteType::Member(UserId(data.id)),
"role" => PermissionOverwriteType::Role(RoleId(data.id)),
_ => return Err(DeError::custom("Unknown PermissionOverwriteType")),
};
Ok(PermissionOverwrite {
allow: data.allow,
deny: data.deny,
kind,
})
}
}
impl Serialize for PermissionOverwrite {
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error>
where S: Serializer {
let (id, kind) = match self.kind {
PermissionOverwriteType::Member(id) => (id.0, "member"),
PermissionOverwriteType::Role(id) => (id.0, "role"),
};
let mut state = serializer.serialize_struct("PermissionOverwrite", 4)?;
state.serialize_field("allow", &self.allow.bits())?;
state.serialize_field("deny", &self.deny.bits())?;
state.serialize_field("id", &id)?;
state.serialize_field("type", kind)?;
state.end()
}
}
/// The type of edit being made to a Channel's permissions.
///
/// This is for use with methods such as `GuildChannel::create_permission`.
///
/// [`GuildChannel::create_permission`]: struct.GuildChannel.html#method.create_permission
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
#[non_exhaustive]
pub enum PermissionOverwriteType {
/// A member which is having its permission overwrites edited.
Member(UserId),
/// A role which is having its permission overwrites edited.
Role(RoleId),
}
#[cfg(test)]
mod test {
#[cfg(all(feature = "model", feature = "utils"))]
mod model_utils {
use crate::model::prelude::*;
fn guild_channel() -> GuildChannel {
GuildChannel {
id: ChannelId(1),
bitrate: None,
category_id: None,
guild_id: GuildId(2),
kind: ChannelType::Text,
last_message_id: None,
last_pin_timestamp: None,
name: "nsfw-stuff".to_string(),
permission_overwrites: vec![],
position: 0,
topic: None,
user_limit: None,
nsfw: false,
slow_mode_rate: Some(0),
_nonexhaustive: (),
}
}
fn private_channel() -> PrivateChannel {
PrivateChannel {
id: ChannelId(1),
last_message_id: None,
last_pin_timestamp: None,
kind: ChannelType::Private,
recipient: User {
id: UserId(2),
avatar: None,
bot: false,
discriminator: 1,
name: "ab".to_string(),
_nonexhaustive: (),
},
_nonexhaustive: (),
}
}
#[test]
fn nsfw_checks() {
let mut channel = guild_channel();
assert!(!channel.is_nsfw());
channel.kind = ChannelType::Voice;
assert!(!channel.is_nsfw());
channel.kind = ChannelType::Text;
channel.name = "nsfw-".to_string();
assert!(!channel.is_nsfw());
channel.name = "nsfw".to_string();
assert!(!channel.is_nsfw());
channel.kind = ChannelType::Voice;
assert!(!channel.is_nsfw());
channel.kind = ChannelType::Text;
channel.name = "nsf".to_string();
channel.nsfw = true;
assert!(channel.is_nsfw());
channel.nsfw = false;
assert!(!channel.is_nsfw());
let channel = Channel::Guild(channel);
assert!(!channel.is_nsfw());
let private_channel = private_channel();
assert!(!private_channel.is_nsfw());
}
}
}
#[cfg(all(feature = "cache", feature = "model", feature = "utils"))]
#[async_trait]
impl FromStrAndCache for Channel {
type Err = ChannelParseError;
async fn from_str<C>(cache: C, s: &str) -> StdResult<Self, Self::Err>
where C: AsRef<Cache> + Send + Sync
{
match parse_channel(s) {
Some(x) => match ChannelId(x).to_channel_cached(&cache).await {
Some(channel) => Ok(channel),
_ => Err(ChannelParseError::NotPresentInCache),
},
_ => Err(ChannelParseError::InvalidChannel),
}
}
}
| 31.781481 | 93 | 0.548887 |
4a05e867e25dc62adc12b9400d2c326ae2d2d692 | 2,670 | //! Json codec.
#![deny(missing_docs)]
#![deny(warnings)]
use core::convert::TryFrom;
use libipld_core::cid::Cid;
use libipld_core::codec::{Codec, Decode, Encode, References};
use libipld_core::error::{Result, UnsupportedCodec};
use libipld_core::ipld::Ipld;
// TODO vmx 2020-05-28: Don't expose the `serde_json` error directly, but wrap it in a custom one
pub use serde_json::Error;
use std::io::{Read, Seek, Write};
mod codec;
/// Json codec.
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct DagJsonCodec;
impl Codec for DagJsonCodec {}
impl From<DagJsonCodec> for u64 {
fn from(_: DagJsonCodec) -> Self {
0x0129
}
}
impl TryFrom<u64> for DagJsonCodec {
type Error = UnsupportedCodec;
fn try_from(_: u64) -> core::result::Result<Self, Self::Error> {
Ok(Self)
}
}
impl Encode<DagJsonCodec> for Ipld {
fn encode<W: Write>(&self, _: DagJsonCodec, w: &mut W) -> Result<()> {
Ok(codec::encode(self, w)?)
}
}
impl Decode<DagJsonCodec> for Ipld {
fn decode<R: Read + Seek>(_: DagJsonCodec, r: &mut R) -> Result<Self> {
Ok(codec::decode(r)?)
}
}
impl References<DagJsonCodec> for Ipld {
fn references<R: Read + Seek, E: Extend<Cid>>(
c: DagJsonCodec,
r: &mut R,
set: &mut E,
) -> Result<()> {
Ipld::decode(c, r)?.references(set);
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use libipld_core::cid::Cid;
use libipld_core::multihash::{Code, MultihashDigest};
use std::collections::BTreeMap;
#[test]
fn encode_struct() {
let digest = Code::Blake3_256.digest(&b"block"[..]);
let cid = Cid::new_v1(0x55, digest);
// Create a contact object that looks like:
// Contact { name: "Hello World", details: CID }
let mut map = BTreeMap::new();
map.insert("name".to_string(), Ipld::String("Hello World!".to_string()));
map.insert("details".to_string(), Ipld::Link(cid));
let contact = Ipld::StringMap(map);
let contact_encoded = DagJsonCodec.encode(&contact).unwrap();
println!("encoded: {:02x?}", contact_encoded);
println!(
"encoded string {}",
std::str::from_utf8(&contact_encoded).unwrap()
);
assert_eq!(
std::str::from_utf8(&contact_encoded).unwrap(),
format!(
r#"{{"details":{{"/":"{}"}},"name":"Hello World!"}}"#,
base64::encode(cid.to_bytes()),
)
);
let contact_decoded: Ipld = DagJsonCodec.decode(&contact_encoded).unwrap();
assert_eq!(contact_decoded, contact);
}
}
| 27.525773 | 97 | 0.592135 |
8ff0f464dda3711add75dcb1541f2c2439767ba0 | 1,915 | //! Types for the *m.room.third_party_invite* event.
use ruma_events_macros::StateEventContent;
use serde::{Deserialize, Serialize};
use crate::StateEvent;
/// An invitation to a room issued to a third party identifier, rather than a matrix user ID.
///
/// Acts as an *m.room.member* invite event, where there isn't a target user_id to invite. This
/// event contains a token and a public key whose private key must be used to sign the token.
/// Any user who can present that signature may use this invitation to join the target room.
pub type ThirdPartyInviteEvent = StateEvent<ThirdPartyInviteEventContent>;
/// The payload for `ThirdPartyInviteEvent`.
#[derive(Clone, Debug, Deserialize, Serialize, StateEventContent)]
#[ruma_event(type = "m.room.third_party_invite")]
pub struct ThirdPartyInviteEventContent {
/// A user-readable string which represents the user who has been invited.
#[serde(default)]
pub display_name: String,
/// A URL which can be fetched to validate whether the key has been revoked.
pub key_validity_url: String,
/// A Base64-encoded Ed25519 key with which the token must be signed.
pub public_key: String,
/// Keys with which the token may be signed.
#[serde(skip_serializing_if = "Option::is_none")]
pub public_keys: Option<Vec<PublicKey>>,
}
/// A public key for signing a third party invite token.
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct PublicKey {
/// An optional URL which can be fetched to validate whether the key has been revoked.
///
/// The URL must return a JSON object containing a boolean property named 'valid'.
/// If this URL is absent, the key must be considered valid indefinitely.
#[serde(skip_serializing_if = "Option::is_none")]
pub key_validity_url: Option<String>,
/// A Base64-encoded Ed25519 key with which the token must be signed.
pub public_key: String,
}
| 40.744681 | 95 | 0.732637 |
ef85ad059b7067673db2f3510611caa4ba8db311 | 100 | #[macro_use]
mod text;
#[macro_use]
mod tag;
#[macro_use]
mod comment;
#[macro_use]
mod doctype;
| 8.333333 | 12 | 0.69 |
fc8ac2311e9787213efc0e0d88280168fc7ce677 | 307 | //! Low-level LED pixel driver API.
pub mod color;
#[cfg(target_vendor = "espressif")]
mod esp32_rmt;
#[cfg(not(target_vendor = "espressif"))]
mod mock;
#[cfg(not(target_vendor = "espressif"))]
use mock as esp32_rmt;
pub use esp32_rmt::Ws2812Esp32RmtDriver;
pub use esp32_rmt::Ws2812Esp32RmtDriverError;
| 21.928571 | 45 | 0.745928 |
39030ba845afabcd5dc4a0179f72ad14d001393d | 165,483 | // Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or https://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use super::{
pool::{
standard::{StandardCommandPoolAlloc, StandardCommandPoolBuilder},
CommandPool, CommandPoolAlloc, CommandPoolBuilderAlloc,
},
synced::{
CommandBufferState, SyncCommandBuffer, SyncCommandBufferBuilder,
SyncCommandBufferBuilderError,
},
sys::{
CommandBufferBeginInfo, RenderPassBeginInfo, UnsafeCommandBuffer,
UnsafeCommandBufferBuilderBufferImageCopy, UnsafeCommandBufferBuilderColorImageClear,
UnsafeCommandBufferBuilderDepthStencilImageClear, UnsafeCommandBufferBuilderImageBlit,
UnsafeCommandBufferBuilderImageCopy,
},
validity::{
check_begin_query, check_blit_image, check_clear_color_image,
check_clear_depth_stencil_image, check_copy_buffer, check_copy_buffer_image,
check_copy_image, check_copy_query_pool_results, check_debug_marker_color,
check_descriptor_sets_validity, check_dispatch, check_dynamic_state_validity,
check_end_query, check_fill_buffer, check_index_buffer, check_indirect_buffer,
check_pipeline_compute, check_pipeline_graphics, check_push_constants_validity,
check_reset_query_pool, check_update_buffer, check_vertex_buffers, check_write_timestamp,
CheckBeginQueryError, CheckBlitImageError, CheckClearColorImageError,
CheckClearDepthStencilImageError, CheckColorError, CheckCopyBufferError,
CheckCopyBufferImageError, CheckCopyBufferImageTy, CheckCopyImageError,
CheckCopyQueryPoolResultsError, CheckDescriptorSetsValidityError, CheckDispatchError,
CheckDynamicStateValidityError, CheckEndQueryError, CheckFillBufferError,
CheckIndexBufferError, CheckIndirectBufferError, CheckPipelineError,
CheckPushConstantsValidityError, CheckResetQueryPoolError, CheckUpdateBufferError,
CheckVertexBufferError, CheckWriteTimestampError,
},
CommandBufferExecError, CommandBufferInheritanceInfo, CommandBufferInheritanceRenderPassInfo,
CommandBufferLevel, CommandBufferUsage, DispatchIndirectCommand, DrawIndexedIndirectCommand,
DrawIndirectCommand, PrimaryCommandBuffer, SecondaryCommandBuffer, SubpassContents,
};
use crate::{
buffer::{sys::UnsafeBuffer, BufferAccess, BufferContents, TypedBufferAccess},
descriptor_set::{check_descriptor_write, DescriptorSetsCollection, WriteDescriptorSet},
device::{physical::QueueFamily, Device, DeviceOwned, Queue},
format::{ClearValue, NumericType},
image::{
attachment::{ClearAttachment, ClearRect},
sys::UnsafeImage,
ImageAccess, ImageAspect, ImageAspects, ImageLayout,
},
pipeline::{
graphics::{
color_blend::LogicOp,
depth_stencil::{CompareOp, StencilFaces, StencilOp},
input_assembly::{Index, IndexType, PrimitiveTopology},
rasterization::{CullMode, FrontFace},
vertex_input::VertexBuffersCollection,
viewport::{Scissor, Viewport},
},
ComputePipeline, DynamicState, GraphicsPipeline, Pipeline, PipelineBindPoint,
PipelineLayout,
},
query::{
QueryControlFlags, QueryPipelineStatisticFlags, QueryPool, QueryResultElement,
QueryResultFlags, QueryType,
},
render_pass::{Framebuffer, LoadOp, Subpass},
sampler::Filter,
sync::{
AccessCheckError, AccessFlags, GpuFuture, PipelineMemoryAccess, PipelineStage,
PipelineStages,
},
DeviceSize, OomError, SafeDeref, Version, VulkanObject,
};
use smallvec::SmallVec;
use std::{
cmp,
collections::HashMap,
error,
ffi::CStr,
fmt, iter,
marker::PhantomData,
mem::{size_of, size_of_val},
ops::Range,
slice,
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
};
/// Note that command buffers allocated from the default command pool (`Arc<StandardCommandPool>`)
/// don't implement the `Send` and `Sync` traits. If you use this pool, then the
/// `AutoCommandBufferBuilder` will not implement `Send` and `Sync` either. Once a command buffer
/// is built, however, it *does* implement `Send` and `Sync`.
pub struct AutoCommandBufferBuilder<L, P = StandardCommandPoolBuilder> {
inner: SyncCommandBufferBuilder,
pool_builder_alloc: P, // Safety: must be dropped after `inner`
// The queue family that this command buffer is being created for.
queue_family_id: u32,
// The inheritance for secondary command buffers.
// Must be `None` in a primary command buffer and `Some` in a secondary command buffer.
inheritance_info: Option<CommandBufferInheritanceInfo>,
// Usage flags passed when creating the command buffer.
usage: CommandBufferUsage,
// If we're inside a render pass, contains the render pass state.
render_pass_state: Option<RenderPassState>,
// If any queries are active, this hashmap contains their state.
query_state: HashMap<ash::vk::QueryType, QueryState>,
_data: PhantomData<L>,
}
// The state of the current render pass, specifying the pass, subpass index and its intended contents.
struct RenderPassState {
subpass: Subpass,
contents: SubpassContents,
attached_layers_ranges: SmallVec<[Range<u32>; 4]>,
extent: [u32; 2],
framebuffer: ash::vk::Framebuffer, // Always null for secondary command buffers
}
// The state of an active query.
struct QueryState {
query_pool: ash::vk::QueryPool,
query: u32,
ty: QueryType,
flags: QueryControlFlags,
in_subpass: bool,
}
impl AutoCommandBufferBuilder<PrimaryAutoCommandBuffer, StandardCommandPoolBuilder> {
/// Starts building a primary command buffer.
#[inline]
pub fn primary(
device: Arc<Device>,
queue_family: QueueFamily,
usage: CommandBufferUsage,
) -> Result<
AutoCommandBufferBuilder<PrimaryAutoCommandBuffer, StandardCommandPoolBuilder>,
OomError,
> {
unsafe {
AutoCommandBufferBuilder::with_level(
device,
queue_family,
CommandBufferLevel::Primary,
CommandBufferBeginInfo {
usage,
..Default::default()
},
)
}
}
}
impl AutoCommandBufferBuilder<SecondaryAutoCommandBuffer, StandardCommandPoolBuilder> {
/// Starts building a secondary compute command buffer.
#[inline]
pub fn secondary_compute(
device: Arc<Device>,
queue_family: QueueFamily,
usage: CommandBufferUsage,
) -> Result<
AutoCommandBufferBuilder<SecondaryAutoCommandBuffer, StandardCommandPoolBuilder>,
OomError,
> {
unsafe {
Ok(AutoCommandBufferBuilder::with_level(
device,
queue_family,
CommandBufferLevel::Secondary,
CommandBufferBeginInfo {
usage,
inheritance_info: Some(CommandBufferInheritanceInfo::default()),
..Default::default()
},
)?)
}
}
/// Same as `secondary_compute`, but allows specifying how queries are being inherited.
#[inline]
pub fn secondary_compute_inherit_queries(
device: Arc<Device>,
queue_family: QueueFamily,
usage: CommandBufferUsage,
occlusion_query: Option<QueryControlFlags>,
query_statistics_flags: QueryPipelineStatisticFlags,
) -> Result<
AutoCommandBufferBuilder<SecondaryAutoCommandBuffer, StandardCommandPoolBuilder>,
BeginError,
> {
if occlusion_query.is_some() && !device.enabled_features().inherited_queries {
return Err(BeginError::InheritedQueriesFeatureNotEnabled);
}
if query_statistics_flags.count() > 0
&& !device.enabled_features().pipeline_statistics_query
{
return Err(BeginError::PipelineStatisticsQueryFeatureNotEnabled);
}
unsafe {
Ok(AutoCommandBufferBuilder::with_level(
device,
queue_family,
CommandBufferLevel::Secondary,
CommandBufferBeginInfo {
usage,
inheritance_info: Some(CommandBufferInheritanceInfo {
occlusion_query,
query_statistics_flags,
..Default::default()
}),
..Default::default()
},
)?)
}
}
/// Starts building a secondary graphics command buffer.
#[inline]
pub fn secondary_graphics(
device: Arc<Device>,
queue_family: QueueFamily,
usage: CommandBufferUsage,
subpass: Subpass,
) -> Result<
AutoCommandBufferBuilder<SecondaryAutoCommandBuffer, StandardCommandPoolBuilder>,
OomError,
> {
unsafe {
Ok(AutoCommandBufferBuilder::with_level(
device,
queue_family,
CommandBufferLevel::Secondary,
CommandBufferBeginInfo {
usage,
inheritance_info: Some(CommandBufferInheritanceInfo {
render_pass: Some(CommandBufferInheritanceRenderPassInfo {
subpass,
framebuffer: None,
}),
..Default::default()
}),
..Default::default()
},
)?)
}
}
/// Same as `secondary_graphics`, but allows specifying how queries are being inherited.
#[inline]
pub fn secondary_graphics_inherit_queries(
device: Arc<Device>,
queue_family: QueueFamily,
usage: CommandBufferUsage,
subpass: Subpass,
occlusion_query: Option<QueryControlFlags>,
query_statistics_flags: QueryPipelineStatisticFlags,
) -> Result<
AutoCommandBufferBuilder<SecondaryAutoCommandBuffer, StandardCommandPoolBuilder>,
BeginError,
> {
if occlusion_query.is_some() && !device.enabled_features().inherited_queries {
return Err(BeginError::InheritedQueriesFeatureNotEnabled);
}
if query_statistics_flags.count() > 0
&& !device.enabled_features().pipeline_statistics_query
{
return Err(BeginError::PipelineStatisticsQueryFeatureNotEnabled);
}
unsafe {
Ok(AutoCommandBufferBuilder::with_level(
device,
queue_family,
CommandBufferLevel::Secondary,
CommandBufferBeginInfo {
usage,
inheritance_info: Some(CommandBufferInheritanceInfo {
render_pass: Some(CommandBufferInheritanceRenderPassInfo {
subpass,
framebuffer: None,
}),
occlusion_query,
query_statistics_flags,
..Default::default()
}),
..Default::default()
},
)?)
}
}
}
impl<L> AutoCommandBufferBuilder<L, StandardCommandPoolBuilder> {
// Actual constructor. Private.
//
// `begin_info.inheritance_info` must match `level`.
unsafe fn with_level(
device: Arc<Device>,
queue_family: QueueFamily,
level: CommandBufferLevel,
begin_info: CommandBufferBeginInfo,
) -> Result<AutoCommandBufferBuilder<L, StandardCommandPoolBuilder>, OomError> {
let usage = begin_info.usage;
let inheritance_info = begin_info.inheritance_info.clone();
let render_pass_state = begin_info
.inheritance_info
.as_ref()
.and_then(|inheritance_info| inheritance_info.render_pass.as_ref())
.map(
|CommandBufferInheritanceRenderPassInfo {
subpass,
framebuffer,
}| RenderPassState {
subpass: subpass.clone(),
contents: SubpassContents::Inline,
extent: framebuffer.as_ref().map(|f| f.extent()).unwrap_or_default(),
attached_layers_ranges: framebuffer
.as_ref()
.map(|f| f.attached_layers_ranges())
.unwrap_or_default(),
framebuffer: ash::vk::Framebuffer::null(), // Only needed for primary command buffers
},
);
let pool = Device::standard_command_pool(&device, queue_family);
let pool_builder_alloc = pool
.allocate(level, 1)?
.next()
.expect("Requested one command buffer from the command pool, but got zero.");
let inner = SyncCommandBufferBuilder::new(pool_builder_alloc.inner(), begin_info)?;
Ok(AutoCommandBufferBuilder {
inner,
pool_builder_alloc,
queue_family_id: queue_family.id(),
render_pass_state,
query_state: HashMap::default(),
inheritance_info,
usage,
_data: PhantomData,
})
}
}
#[derive(Clone, Copy, Debug)]
pub enum BeginError {
/// Occlusion query inheritance was requested, but the `inherited_queries` feature was not enabled.
InheritedQueriesFeatureNotEnabled,
/// Not enough memory.
OomError(OomError),
/// Pipeline statistics query inheritance was requested, but the `pipeline_statistics_query` feature was not enabled.
PipelineStatisticsQueryFeatureNotEnabled,
}
impl error::Error for BeginError {
#[inline]
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
match *self {
Self::OomError(ref err) => Some(err),
_ => None,
}
}
}
impl fmt::Display for BeginError {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(
fmt,
"{}",
match *self {
Self::InheritedQueriesFeatureNotEnabled => {
"occlusion query inheritance was requested but the corresponding feature \
wasn't enabled"
}
Self::OomError(_) => "not enough memory available",
Self::PipelineStatisticsQueryFeatureNotEnabled => {
"pipeline statistics query inheritance was requested but the corresponding \
feature wasn't enabled"
}
}
)
}
}
impl From<OomError> for BeginError {
#[inline]
fn from(err: OomError) -> Self {
Self::OomError(err)
}
}
impl<P> AutoCommandBufferBuilder<PrimaryAutoCommandBuffer<P::Alloc>, P>
where
P: CommandPoolBuilderAlloc,
{
/// Builds the command buffer.
#[inline]
pub fn build(self) -> Result<PrimaryAutoCommandBuffer<P::Alloc>, BuildError> {
if self.render_pass_state.is_some() {
return Err(AutoCommandBufferBuilderContextError::ForbiddenInsideRenderPass.into());
}
if !self.query_state.is_empty() {
return Err(AutoCommandBufferBuilderContextError::QueryIsActive.into());
}
let submit_state = match self.usage {
CommandBufferUsage::MultipleSubmit => SubmitState::ExclusiveUse {
in_use: AtomicBool::new(false),
},
CommandBufferUsage::SimultaneousUse => SubmitState::Concurrent,
CommandBufferUsage::OneTimeSubmit => SubmitState::OneTime {
already_submitted: AtomicBool::new(false),
},
};
Ok(PrimaryAutoCommandBuffer {
inner: self.inner.build()?,
pool_alloc: self.pool_builder_alloc.into_alloc(),
submit_state,
})
}
}
impl<P> AutoCommandBufferBuilder<SecondaryAutoCommandBuffer<P::Alloc>, P>
where
P: CommandPoolBuilderAlloc,
{
/// Builds the command buffer.
#[inline]
pub fn build(self) -> Result<SecondaryAutoCommandBuffer<P::Alloc>, BuildError> {
if !self.query_state.is_empty() {
return Err(AutoCommandBufferBuilderContextError::QueryIsActive.into());
}
let submit_state = match self.usage {
CommandBufferUsage::MultipleSubmit => SubmitState::ExclusiveUse {
in_use: AtomicBool::new(false),
},
CommandBufferUsage::SimultaneousUse => SubmitState::Concurrent,
CommandBufferUsage::OneTimeSubmit => SubmitState::OneTime {
already_submitted: AtomicBool::new(false),
},
};
Ok(SecondaryAutoCommandBuffer {
inner: self.inner.build()?,
pool_alloc: self.pool_builder_alloc.into_alloc(),
inheritance_info: self.inheritance_info.unwrap(),
submit_state,
})
}
}
impl<L, P> AutoCommandBufferBuilder<L, P> {
#[inline]
fn ensure_outside_render_pass(&self) -> Result<(), AutoCommandBufferBuilderContextError> {
if self.render_pass_state.is_some() {
return Err(AutoCommandBufferBuilderContextError::ForbiddenInsideRenderPass);
}
Ok(())
}
#[inline]
fn ensure_inside_render_pass_inline(
&self,
pipeline: &GraphicsPipeline,
) -> Result<(), AutoCommandBufferBuilderContextError> {
let render_pass_state = self
.render_pass_state
.as_ref()
.ok_or(AutoCommandBufferBuilderContextError::ForbiddenOutsideRenderPass)?;
// Subpass must be for inline commands
if render_pass_state.contents != SubpassContents::Inline {
return Err(AutoCommandBufferBuilderContextError::WrongSubpassType);
}
// Subpasses must be the same.
if pipeline.subpass().index() != render_pass_state.subpass.index() {
return Err(AutoCommandBufferBuilderContextError::WrongSubpassIndex);
}
// Render passes must be compatible.
if !pipeline
.subpass()
.render_pass()
.is_compatible_with(&render_pass_state.subpass.render_pass())
{
return Err(AutoCommandBufferBuilderContextError::IncompatibleRenderPass);
}
Ok(())
}
#[inline]
fn queue_family(&self) -> QueueFamily {
self.device()
.physical_device()
.queue_family_by_id(self.queue_family_id)
.unwrap()
}
/// Returns the binding/setting state.
#[inline]
pub fn state(&self) -> CommandBufferState {
self.inner.state()
}
/// Binds descriptor sets for future dispatch or draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support `pipeline_bind_point`.
/// - Panics if the highest descriptor set slot being bound is not less than the number of sets
/// in `pipeline_layout`.
/// - Panics if `self` and any element of `descriptor_sets` do not belong to the same device.
pub fn bind_descriptor_sets<S>(
&mut self,
pipeline_bind_point: PipelineBindPoint,
pipeline_layout: Arc<PipelineLayout>,
first_set: u32,
descriptor_sets: S,
) -> &mut Self
where
S: DescriptorSetsCollection,
{
match pipeline_bind_point {
PipelineBindPoint::Compute => assert!(
self.queue_family().supports_compute(),
"the queue family of the command buffer must support compute operations"
),
PipelineBindPoint::Graphics => assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
),
}
let descriptor_sets = descriptor_sets.into_vec();
assert!(
first_set as usize + descriptor_sets.len()
<= pipeline_layout.set_layouts().len(),
"the highest descriptor set slot being bound must be less than the number of sets in pipeline_layout"
);
for (num, set) in descriptor_sets.iter().enumerate() {
assert_eq!(
set.as_ref().0.device().internal_object(),
self.device().internal_object()
);
let pipeline_set = &pipeline_layout.set_layouts()[first_set as usize + num];
assert!(
pipeline_set.is_compatible_with(set.as_ref().0.layout()),
"the element of descriptor_sets being bound to slot {} is not compatible with the corresponding slot in pipeline_layout",
first_set as usize + num,
);
// TODO: see https://github.com/vulkano-rs/vulkano/issues/1643
// For each dynamic uniform or storage buffer binding in pDescriptorSets, the sum of the
// effective offset, as defined above, and the range of the binding must be less than or
// equal to the size of the buffer
// TODO:
// Each element of pDescriptorSets must not have been allocated from a VkDescriptorPool
// with the VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE flag set
}
unsafe {
let mut sets_binder = self.inner.bind_descriptor_sets();
for set in descriptor_sets.into_iter() {
sets_binder.add(set);
}
sets_binder.submit(pipeline_bind_point, pipeline_layout, first_set);
}
self
}
/// Binds an index buffer for future indexed draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if `self` and `index_buffer` do not belong to the same device.
/// - Panics if `index_buffer` does not have the
/// [`index_buffer`](crate::buffer::BufferUsage::index_buffer) usage enabled.
/// - If the index buffer contains `u8` indices, panics if the
/// [`index_type_uint8`](crate::device::Features::index_type_uint8) feature is not
/// enabled on the device.
pub fn bind_index_buffer<Ib, I>(&mut self, index_buffer: Arc<Ib>) -> &mut Self
where
Ib: TypedBufferAccess<Content = [I]> + 'static,
I: Index + 'static,
{
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert_eq!(
index_buffer.device().internal_object(),
self.device().internal_object()
);
// TODO:
// The sum of offset and the address of the range of VkDeviceMemory object that is backing
// buffer, must be a multiple of the type indicated by indexType
assert!(
index_buffer.inner().buffer.usage().index_buffer,
"index_buffer must have the index_buffer usage enabled"
);
// TODO:
// If buffer is non-sparse then it must be bound completely and contiguously to a single
// VkDeviceMemory object
if !self.device().enabled_features().index_type_uint8 {
assert!(I::ty() != IndexType::U8, "if the index buffer contains u8 indices, the index_type_uint8 feature must be enabled on the device");
}
unsafe {
self.inner.bind_index_buffer(index_buffer, I::ty());
}
self
}
/// Binds a compute pipeline for future dispatch calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support compute operations.
/// - Panics if `self` and `pipeline` do not belong to the same device.
pub fn bind_pipeline_compute(&mut self, pipeline: Arc<ComputePipeline>) -> &mut Self {
assert!(
self.queue_family().supports_compute(),
"the queue family of the command buffer must support compute operations"
);
assert_eq!(
pipeline.device().internal_object(),
self.device().internal_object()
);
// TODO:
// This command must not be recorded when transform feedback is active
// TODO:
// pipeline must not have been created with VK_PIPELINE_CREATE_LIBRARY_BIT_KHR set
unsafe {
self.inner.bind_pipeline_compute(pipeline);
}
self
}
/// Binds a graphics pipeline for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if `self` and `pipeline` do not belong to the same device.
pub fn bind_pipeline_graphics(&mut self, pipeline: Arc<GraphicsPipeline>) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert_eq!(
pipeline.device().internal_object(),
self.device().internal_object()
);
// TODO:
// If the variable multisample rate feature is not supported, pipeline is a graphics
// pipeline, the current subpass uses no attachments, and this is not the first call to
// this function with a graphics pipeline after transitioning to the current subpass, then
// the sample count specified by this pipeline must match that set in the previous pipeline
// TODO:
// If VkPhysicalDeviceSampleLocationsPropertiesEXT::variableSampleLocations is VK_FALSE, and
// pipeline is a graphics pipeline created with a
// VkPipelineSampleLocationsStateCreateInfoEXT structure having its sampleLocationsEnable
// member set to VK_TRUE but without VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT enabled then the
// current render pass instance must have been begun by specifying a
// VkRenderPassSampleLocationsBeginInfoEXT structure whose pPostSubpassSampleLocations
// member contains an element with a subpassIndex matching the current subpass index and the
// sampleLocationsInfo member of that element must match the sampleLocationsInfo specified
// in VkPipelineSampleLocationsStateCreateInfoEXT when the pipeline was created
// TODO:
// This command must not be recorded when transform feedback is active
// TODO:
// pipeline must not have been created with VK_PIPELINE_CREATE_LIBRARY_BIT_KHR set
// TODO:
// If commandBuffer is a secondary command buffer with
// VkCommandBufferInheritanceViewportScissorInfoNV::viewportScissor2D enabled and
// pipelineBindPoint is VK_PIPELINE_BIND_POINT_GRAPHICS, then the pipeline must have been
// created with VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT or VK_DYNAMIC_STATE_VIEWPORT, and
// VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT or VK_DYNAMIC_STATE_SCISSOR enabled
// TODO:
// If pipelineBindPoint is VK_PIPELINE_BIND_POINT_GRAPHICS and the
// provokingVertexModePerPipeline limit is VK_FALSE, then pipeline’s
// VkPipelineRasterizationProvokingVertexStateCreateInfoEXT::provokingVertexMode must be the
// same as that of any other pipelines previously bound to this bind point within the
// current renderpass instance, including any pipeline already bound when beginning the
// renderpass instance
unsafe {
self.inner.bind_pipeline_graphics(pipeline);
}
self
}
/// Binds vertex buffers for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the highest vertex buffer binding being bound is greater than the
/// [`max_vertex_input_bindings`](crate::device::Properties::max_vertex_input_bindings)
// device property.
/// - Panics if `self` and any element of `vertex_buffers` do not belong to the same device.
/// - Panics if any element of `vertex_buffers` does not have the
/// [`vertex_buffer`](crate::buffer::BufferUsage::vertex_buffer) usage enabled.
pub fn bind_vertex_buffers<V>(&mut self, first_binding: u32, vertex_buffers: V) -> &mut Self
where
V: VertexBuffersCollection,
{
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
let vertex_buffers = vertex_buffers.into_vec();
assert!(
first_binding + vertex_buffers.len() as u32
<= self
.device()
.physical_device()
.properties()
.max_vertex_input_bindings,
"the highest vertex buffer binding being bound must not be higher than the max_vertex_input_bindings device property"
);
for (num, buf) in vertex_buffers.iter().enumerate() {
assert_eq!(
buf.device().internal_object(),
self.device().internal_object()
);
assert!(
buf.inner().buffer.usage().vertex_buffer,
"vertex_buffers element {} must have the vertex_buffer usage",
num
);
// TODO:
// Each element of pBuffers that is non-sparse must be bound completely and contiguously
// to a single VkDeviceMemory object
// TODO:
// If the nullDescriptor feature is not enabled, all elements of pBuffers must not be
// VK_NULL_HANDLE
// TODO:
// If an element of pBuffers is VK_NULL_HANDLE, then the corresponding element of
// pOffsets must be zero
}
unsafe {
let mut binder = self.inner.bind_vertex_buffers();
for vb in vertex_buffers.into_iter() {
binder.add(vb);
}
binder.submit(first_binding);
}
self
}
/// Adds a command that blits an image to another.
///
/// A *blit* is similar to an image copy operation, except that the portion of the image that
/// is transferred can be resized. You choose an area of the source and an area of the
/// destination, and the implementation will resize the area of the source so that it matches
/// the size of the area of the destination before writing it.
///
/// Blit operations have several restrictions:
///
/// - Blit operations are only allowed on queue families that support graphics operations.
/// - The format of the source and destination images must support blit operations, which
/// depends on the Vulkan implementation. Vulkan guarantees that some specific formats must
/// always be supported. See tables 52 to 61 of the specifications.
/// - Only single-sampled images are allowed.
/// - You can only blit between two images whose formats belong to the same type. The types
/// are: floating-point, signed integers, unsigned integers, depth-stencil.
/// - If you blit between depth, stencil or depth-stencil images, the format of both images
/// must match exactly.
/// - If you blit between depth, stencil or depth-stencil images, only the `Nearest` filter is
/// allowed.
/// - For two-dimensional images, the Z coordinate must be 0 for the top-left offset and 1 for
/// the bottom-right offset. Same for the Y coordinate for one-dimensional images.
/// - For non-array images, the base array layer must be 0 and the number of layers must be 1.
///
/// If `layer_count` is greater than 1, the blit will happen between each individual layer as
/// if they were separate images.
///
/// # Panic
///
/// - Panics if the source or the destination was not created with `device`.
///
pub fn blit_image(
&mut self,
source: Arc<dyn ImageAccess>,
source_top_left: [i32; 3],
source_bottom_right: [i32; 3],
source_base_array_layer: u32,
source_mip_level: u32,
destination: Arc<dyn ImageAccess>,
destination_top_left: [i32; 3],
destination_bottom_right: [i32; 3],
destination_base_array_layer: u32,
destination_mip_level: u32,
layer_count: u32,
filter: Filter,
) -> Result<&mut Self, BlitImageError> {
unsafe {
if !self.queue_family().supports_graphics() {
return Err(AutoCommandBufferBuilderContextError::NotSupportedByQueueFamily.into());
}
self.ensure_outside_render_pass()?;
check_blit_image(
self.device(),
source.as_ref(),
source_top_left,
source_bottom_right,
source_base_array_layer,
source_mip_level,
destination.as_ref(),
destination_top_left,
destination_bottom_right,
destination_base_array_layer,
destination_mip_level,
layer_count,
filter,
)?;
let blit = UnsafeCommandBufferBuilderImageBlit {
// TODO:
aspects: if source.format().aspects().color {
ImageAspects {
color: true,
..ImageAspects::none()
}
} else {
unimplemented!()
},
source_mip_level,
destination_mip_level,
source_base_array_layer,
destination_base_array_layer,
layer_count,
source_top_left,
source_bottom_right,
destination_top_left,
destination_bottom_right,
};
// TODO: Allow choosing layouts, but note that only Transfer*Optimal and General are
// valid.
if source.conflict_key() == destination.conflict_key() {
// since we are blitting from the same image, we must use the same layout
self.inner.blit_image(
source,
ImageLayout::General,
destination,
ImageLayout::General,
iter::once(blit),
filter,
)?;
} else {
self.inner.blit_image(
source,
ImageLayout::TransferSrcOptimal,
destination,
ImageLayout::TransferDstOptimal,
iter::once(blit),
filter,
)?;
}
Ok(self)
}
}
/// Adds a command that clears specific regions of specific attachments of the framebuffer.
///
/// `attachments` specify the types of attachments and their clear values.
/// `rects` specify the regions to clear.
///
/// A graphics pipeline must have been bound using
/// [`bind_pipeline_graphics`](Self::bind_pipeline_graphics). And the command must be inside render pass.
///
/// If the render pass instance this is recorded in uses multiview,
/// then `ClearRect.base_array_layer` must be zero and `ClearRect.layer_count` must be one.
///
/// The rectangle area must be inside the render area ranges.
pub fn clear_attachments<A, R>(
&mut self,
attachments: A,
rects: R,
) -> Result<&mut Self, ClearAttachmentsError>
where
A: IntoIterator<Item = ClearAttachment>,
R: IntoIterator<Item = ClearRect>,
{
let pipeline = check_pipeline_graphics(self.state())?;
self.ensure_inside_render_pass_inline(pipeline)?;
let render_pass_state = self.render_pass_state.as_ref().unwrap();
let subpass = &render_pass_state.subpass;
let has_depth_stencil_attachment = subpass.has_depth_stencil_attachment();
let num_color_attachments = subpass.num_color_attachments();
let attached_layers_ranges = &render_pass_state.attached_layers_ranges;
let attachments: SmallVec<[ClearAttachment; 3]> = attachments.into_iter().collect();
let rects: SmallVec<[ClearRect; 4]> = rects.into_iter().collect();
for attachment in &attachments {
match attachment {
ClearAttachment::Color(_, color_attachment) => {
if *color_attachment >= num_color_attachments as u32 {
return Err(ClearAttachmentsError::InvalidColorAttachmentIndex(
*color_attachment,
));
}
}
ClearAttachment::Depth(_)
| ClearAttachment::Stencil(_)
| ClearAttachment::DepthStencil(_) => {
if !has_depth_stencil_attachment {
return Err(ClearAttachmentsError::DepthStencilAttachmentNotPresent);
}
}
}
}
for rect in &rects {
if rect.rect_extent[0] == 0 || rect.rect_extent[1] == 0 {
return Err(ClearAttachmentsError::ZeroRectExtent);
}
if rect.rect_offset[0] + rect.rect_extent[0] > render_pass_state.extent[0]
|| rect.rect_offset[1] + rect.rect_extent[1] > render_pass_state.extent[1]
{
return Err(ClearAttachmentsError::RectOutOfBounds);
}
if rect.layer_count == 0 {
return Err(ClearAttachmentsError::ZeroLayerCount);
}
if subpass.render_pass().views_used() != 0
&& (rect.base_array_layer != 0 || rect.layer_count != 1)
{
return Err(ClearAttachmentsError::InvalidMultiviewLayerRange);
}
// make sure rect's layers is inside attached layers ranges
for range in attached_layers_ranges {
if rect.base_array_layer < range.start
|| rect.base_array_layer + rect.layer_count > range.end
{
return Err(ClearAttachmentsError::LayersOutOfBounds);
}
}
}
unsafe {
self.inner.clear_attachments(attachments, rects);
}
Ok(self)
}
/// Adds a command that clears all the layers and mipmap levels of a color image with a
/// specific value.
///
/// # Panic
///
/// Panics if `color` is not a color value.
///
pub fn clear_color_image(
&mut self,
image: Arc<dyn ImageAccess>,
color: ClearValue,
) -> Result<&mut Self, ClearColorImageError> {
let array_layers = image.dimensions().array_layers();
let mip_levels = image.mip_levels();
self.clear_color_image_dimensions(image, 0, array_layers, 0, mip_levels, color)
}
/// Adds a command that clears a color image with a specific value.
///
/// # Panic
///
/// - Panics if `color` is not a color value.
///
pub fn clear_color_image_dimensions(
&mut self,
image: Arc<dyn ImageAccess>,
base_array_layer: u32,
layer_count: u32,
base_mip_level: u32,
level_count: u32,
color: ClearValue,
) -> Result<&mut Self, ClearColorImageError> {
unsafe {
if !self.queue_family().supports_graphics() && !self.queue_family().supports_compute() {
return Err(AutoCommandBufferBuilderContextError::NotSupportedByQueueFamily.into());
}
self.ensure_outside_render_pass()?;
check_clear_color_image(
self.device(),
image.as_ref(),
base_array_layer,
layer_count,
base_mip_level,
level_count,
)?;
match color {
ClearValue::Float(_) | ClearValue::Int(_) | ClearValue::Uint(_) => {}
_ => panic!("The clear color is not a color value"),
};
let region = UnsafeCommandBufferBuilderColorImageClear {
base_mip_level,
level_count,
base_array_layer,
layer_count,
};
// TODO: let choose layout
self.inner.clear_color_image(
image,
ImageLayout::TransferDstOptimal,
color,
iter::once(region),
)?;
Ok(self)
}
}
/// Adds a command that clears all the layers of a depth / stencil image with a
/// specific value.
///
/// # Panic
///
/// Panics if `clear_value` is not a depth / stencil value.
///
pub fn clear_depth_stencil_image(
&mut self,
image: Arc<dyn ImageAccess>,
clear_value: ClearValue,
) -> Result<&mut Self, ClearDepthStencilImageError> {
let layers = image.dimensions().array_layers();
self.clear_depth_stencil_image_dimensions(image, 0, layers, clear_value)
}
/// Adds a command that clears a depth / stencil image with a specific value.
///
/// # Panic
///
/// - Panics if `clear_value` is not a depth / stencil value.
///
pub fn clear_depth_stencil_image_dimensions(
&mut self,
image: Arc<dyn ImageAccess>,
base_array_layer: u32,
layer_count: u32,
clear_value: ClearValue,
) -> Result<&mut Self, ClearDepthStencilImageError> {
unsafe {
if !self.queue_family().supports_graphics() && !self.queue_family().supports_compute() {
return Err(AutoCommandBufferBuilderContextError::NotSupportedByQueueFamily.into());
}
self.ensure_outside_render_pass()?;
check_clear_depth_stencil_image(
self.device(),
image.as_ref(),
base_array_layer,
layer_count,
)?;
let (clear_depth, clear_stencil) = match clear_value {
ClearValue::Depth(_) => (true, false),
ClearValue::Stencil(_) => (false, true),
ClearValue::DepthStencil(_) => (true, true),
_ => panic!("The clear value is not a depth / stencil value"),
};
let region = UnsafeCommandBufferBuilderDepthStencilImageClear {
base_array_layer,
layer_count,
clear_depth,
clear_stencil,
};
// TODO: let choose layout
self.inner.clear_depth_stencil_image(
image,
ImageLayout::TransferDstOptimal,
clear_value,
iter::once(region),
)?;
Ok(self)
}
}
/// Adds a command that copies from a buffer to another.
///
/// This command will copy from the source to the destination. If their size is not equal, then
/// the amount of data copied is equal to the smallest of the two.
#[inline]
pub fn copy_buffer<S, D, T>(
&mut self,
source: Arc<S>,
destination: Arc<D>,
) -> Result<&mut Self, CopyBufferError>
where
S: TypedBufferAccess<Content = T> + 'static,
D: TypedBufferAccess<Content = T> + 'static,
T: ?Sized,
{
unsafe {
self.ensure_outside_render_pass()?;
let copy_size = cmp::min(source.size(), destination.size());
check_copy_buffer(
self.device(),
source.as_ref(),
destination.as_ref(),
0,
0,
copy_size,
)?;
self.inner
.copy_buffer(source, destination, [(0, 0, copy_size)])?;
Ok(self)
}
}
/// Adds a command that copies a range from the source to the destination buffer.
/// Panics if out of bounds.
#[inline]
pub fn copy_buffer_dimensions<S, D, T>(
&mut self,
source: Arc<S>,
source_offset: DeviceSize,
destination: Arc<D>,
destination_offset: DeviceSize,
count: DeviceSize,
) -> Result<&mut Self, CopyBufferError>
where
S: TypedBufferAccess<Content = [T]> + 'static,
D: TypedBufferAccess<Content = [T]> + 'static,
{
self.ensure_outside_render_pass()?;
let size = std::mem::size_of::<T>() as DeviceSize;
let source_offset = source_offset * size;
let destination_offset = destination_offset * size;
let copy_size = count * size;
check_copy_buffer(
self.device(),
source.as_ref(),
destination.as_ref(),
source_offset,
destination_offset,
copy_size,
)?;
unsafe {
self.inner.copy_buffer(
source,
destination,
iter::once((source_offset, destination_offset, copy_size)),
)?;
}
Ok(self)
}
/// Adds a command that copies from a buffer to an image.
pub fn copy_buffer_to_image(
&mut self,
source: Arc<dyn BufferAccess>,
destination: Arc<dyn ImageAccess>,
) -> Result<&mut Self, CopyBufferImageError> {
self.ensure_outside_render_pass()?;
let dims = destination.dimensions().width_height_depth();
self.copy_buffer_to_image_dimensions(source, destination, [0, 0, 0], dims, 0, 1, 0)
}
/// Adds a command that copies from a buffer to an image.
pub fn copy_buffer_to_image_dimensions(
&mut self,
source: Arc<dyn BufferAccess>,
destination: Arc<dyn ImageAccess>,
offset: [u32; 3],
size: [u32; 3],
base_array_layer: u32,
layer_count: u32,
mip_level: u32,
) -> Result<&mut Self, CopyBufferImageError> {
unsafe {
self.ensure_outside_render_pass()?;
check_copy_buffer_image(
self.device(),
source.as_ref(),
destination.as_ref(),
CheckCopyBufferImageTy::BufferToImage,
offset,
size,
base_array_layer,
layer_count,
mip_level,
)?;
let copy = UnsafeCommandBufferBuilderBufferImageCopy {
buffer_offset: 0,
buffer_row_length: 0,
buffer_image_height: 0,
image_aspect: if destination.format().aspects().color {
ImageAspect::Color
} else {
unimplemented!()
},
image_mip_level: mip_level,
image_base_array_layer: base_array_layer,
image_layer_count: layer_count,
image_offset: [offset[0] as i32, offset[1] as i32, offset[2] as i32],
image_extent: size,
};
self.inner.copy_buffer_to_image(
source,
destination,
ImageLayout::TransferDstOptimal, // TODO: let choose layout
iter::once(copy),
)?;
Ok(self)
}
}
/// Adds a command that copies an image to another.
///
/// Copy operations have several restrictions:
///
/// - Copy operations are only allowed on queue families that support transfer, graphics, or
/// compute operations.
/// - The number of samples in the source and destination images must be equal.
/// - The size of the uncompressed element format of the source image must be equal to the
/// compressed element format of the destination.
/// - If you copy between depth, stencil or depth-stencil images, the format of both images
/// must match exactly.
/// - For two-dimensional images, the Z coordinate must be 0 for the image offsets and 1 for
/// the extent. Same for the Y coordinate for one-dimensional images.
/// - For non-array images, the base array layer must be 0 and the number of layers must be 1.
///
/// If `layer_count` is greater than 1, the copy will happen between each individual layer as
/// if they were separate images.
///
/// # Panic
///
/// - Panics if the source or the destination was not created with `device`.
///
pub fn copy_image(
&mut self,
source: Arc<dyn ImageAccess>,
source_offset: [i32; 3],
source_base_array_layer: u32,
source_mip_level: u32,
destination: Arc<dyn ImageAccess>,
destination_offset: [i32; 3],
destination_base_array_layer: u32,
destination_mip_level: u32,
extent: [u32; 3],
layer_count: u32,
) -> Result<&mut Self, CopyImageError> {
unsafe {
self.ensure_outside_render_pass()?;
check_copy_image(
self.device(),
source.as_ref(),
source_offset,
source_base_array_layer,
source_mip_level,
destination.as_ref(),
destination_offset,
destination_base_array_layer,
destination_mip_level,
extent,
layer_count,
)?;
let source_aspects = source.format().aspects();
let destination_aspects = destination.format().aspects();
let copy = UnsafeCommandBufferBuilderImageCopy {
// TODO: Allowing choosing a subset of the image aspects, but note that if color
// is included, neither depth nor stencil may.
aspects: ImageAspects {
color: source_aspects.color,
depth: !source_aspects.color
&& source_aspects.depth
&& destination_aspects.depth,
stencil: !source_aspects.color
&& source_aspects.stencil
&& destination_aspects.stencil,
..ImageAspects::none()
},
source_mip_level,
destination_mip_level,
source_base_array_layer,
destination_base_array_layer,
layer_count,
source_offset,
destination_offset,
extent,
};
// TODO: Allow choosing layouts, but note that only Transfer*Optimal and General are
// valid.
if source.conflict_key() == destination.conflict_key() {
// since we are copying from the same image, we must use the same layout
self.inner.copy_image(
source,
ImageLayout::General,
destination,
ImageLayout::General,
iter::once(copy),
)?;
} else {
self.inner.copy_image(
source,
ImageLayout::TransferSrcOptimal,
destination,
ImageLayout::TransferDstOptimal,
iter::once(copy),
)?;
}
Ok(self)
}
}
/// Adds a command that copies from an image to a buffer.
// The data layout of the image on the gpu is opaque, as in, it is non of our business how the gpu stores the image.
// This does not matter since the act of copying the image into a buffer converts it to linear form.
pub fn copy_image_to_buffer(
&mut self,
source: Arc<dyn ImageAccess>,
destination: Arc<dyn BufferAccess>,
) -> Result<&mut Self, CopyBufferImageError> {
self.ensure_outside_render_pass()?;
let dims = source.dimensions().width_height_depth();
self.copy_image_to_buffer_dimensions(source, destination, [0, 0, 0], dims, 0, 1, 0)
}
/// Adds a command that copies from an image to a buffer.
pub fn copy_image_to_buffer_dimensions(
&mut self,
source: Arc<dyn ImageAccess>,
destination: Arc<dyn BufferAccess>,
offset: [u32; 3],
size: [u32; 3],
base_array_layer: u32,
layer_count: u32,
mip_level: u32,
) -> Result<&mut Self, CopyBufferImageError> {
unsafe {
self.ensure_outside_render_pass()?;
check_copy_buffer_image(
self.device(),
destination.as_ref(),
source.as_ref(),
CheckCopyBufferImageTy::ImageToBuffer,
offset,
size,
base_array_layer,
layer_count,
mip_level,
)?;
let source_aspects = source.format().aspects();
let copy = UnsafeCommandBufferBuilderBufferImageCopy {
buffer_offset: 0,
buffer_row_length: 0,
buffer_image_height: 0,
// TODO: Allow the user to choose aspect
image_aspect: if source_aspects.color {
ImageAspect::Color
} else if source_aspects.depth {
ImageAspect::Depth
} else if source_aspects.stencil {
ImageAspect::Stencil
} else {
unimplemented!()
},
image_mip_level: mip_level,
image_base_array_layer: base_array_layer,
image_layer_count: layer_count,
image_offset: [offset[0] as i32, offset[1] as i32, offset[2] as i32],
image_extent: size,
};
self.inner.copy_image_to_buffer(
source,
ImageLayout::TransferSrcOptimal,
destination, // TODO: let choose layout
iter::once(copy),
)?;
Ok(self)
}
}
/// Open a command buffer debug label region.
///
/// Note: you need to enable `VK_EXT_debug_utils` extension when creating an instance.
#[inline]
pub fn debug_marker_begin(
&mut self,
name: &'static CStr,
color: [f32; 4],
) -> Result<&mut Self, DebugMarkerError> {
if !self.queue_family().supports_graphics() && self.queue_family().supports_compute() {
return Err(AutoCommandBufferBuilderContextError::NotSupportedByQueueFamily.into());
}
check_debug_marker_color(color)?;
unsafe {
self.inner.debug_marker_begin(name.into(), color);
}
Ok(self)
}
/// Close a command buffer label region.
///
/// Note: you need to open a command buffer label region first with `debug_marker_begin`.
/// Note: you need to enable `VK_EXT_debug_utils` extension when creating an instance.
#[inline]
pub fn debug_marker_end(&mut self) -> Result<&mut Self, DebugMarkerError> {
if !self.queue_family().supports_graphics() && self.queue_family().supports_compute() {
return Err(AutoCommandBufferBuilderContextError::NotSupportedByQueueFamily.into());
}
// TODO: validate that debug_marker_begin with same name was sent earlier
unsafe {
self.inner.debug_marker_end();
}
Ok(self)
}
/// Insert a label into a command buffer.
///
/// Note: you need to enable `VK_EXT_debug_utils` extension when creating an instance.
#[inline]
pub fn debug_marker_insert(
&mut self,
name: &'static CStr,
color: [f32; 4],
) -> Result<&mut Self, DebugMarkerError> {
if !self.queue_family().supports_graphics() && self.queue_family().supports_compute() {
return Err(AutoCommandBufferBuilderContextError::NotSupportedByQueueFamily.into());
}
check_debug_marker_color(color)?;
unsafe {
self.inner.debug_marker_insert(name.into(), color);
}
Ok(self)
}
/// Perform a single compute operation using a compute pipeline.
///
/// A compute pipeline must have been bound using
/// [`bind_pipeline_compute`](Self::bind_pipeline_compute). Any resources used by the compute
/// pipeline, such as descriptor sets, must have been set beforehand.
#[inline]
pub fn dispatch(&mut self, group_counts: [u32; 3]) -> Result<&mut Self, DispatchError> {
if !self.queue_family().supports_compute() {
return Err(AutoCommandBufferBuilderContextError::NotSupportedByQueueFamily.into());
}
let pipeline = check_pipeline_compute(self.state())?;
self.ensure_outside_render_pass()?;
check_descriptor_sets_validity(self.state(), pipeline, pipeline.descriptor_requirements())?;
check_push_constants_validity(self.state(), pipeline.layout())?;
check_dispatch(self.device(), group_counts)?;
unsafe {
self.inner.dispatch(group_counts);
}
Ok(self)
}
/// Perform multiple compute operations using a compute pipeline. One dispatch is performed for
/// each [`DispatchIndirectCommand`] struct in `indirect_buffer`.
///
/// A compute pipeline must have been bound using
/// [`bind_pipeline_compute`](Self::bind_pipeline_compute). Any resources used by the compute
/// pipeline, such as descriptor sets, must have been set beforehand.
#[inline]
pub fn dispatch_indirect<Inb>(
&mut self,
indirect_buffer: Arc<Inb>,
) -> Result<&mut Self, DispatchIndirectError>
where
Inb: TypedBufferAccess<Content = [DispatchIndirectCommand]> + 'static,
{
if !self.queue_family().supports_compute() {
return Err(AutoCommandBufferBuilderContextError::NotSupportedByQueueFamily.into());
}
let pipeline = check_pipeline_compute(self.state())?;
self.ensure_outside_render_pass()?;
check_descriptor_sets_validity(self.state(), pipeline, pipeline.descriptor_requirements())?;
check_push_constants_validity(self.state(), pipeline.layout())?;
check_indirect_buffer(self.device(), indirect_buffer.as_ref())?;
unsafe {
self.inner.dispatch_indirect(indirect_buffer)?;
}
Ok(self)
}
/// Perform a single draw operation using a graphics pipeline.
///
/// The parameters specify the first vertex and the number of vertices to draw, and the first
/// instance and number of instances. For non-instanced drawing, specify `instance_count` as 1
/// and `first_instance` as 0.
///
/// A graphics pipeline must have been bound using
/// [`bind_pipeline_graphics`](Self::bind_pipeline_graphics). Any resources used by the graphics
/// pipeline, such as descriptor sets, vertex buffers and dynamic state, must have been set
/// beforehand. If the bound graphics pipeline uses vertex buffers, then the provided vertex and
/// instance ranges must be in range of the bound vertex buffers.
#[inline]
pub fn draw(
&mut self,
vertex_count: u32,
instance_count: u32,
first_vertex: u32,
first_instance: u32,
) -> Result<&mut Self, DrawError> {
let pipeline = check_pipeline_graphics(self.state())?;
self.ensure_inside_render_pass_inline(pipeline)?;
check_dynamic_state_validity(self.state(), pipeline)?;
check_descriptor_sets_validity(self.state(), pipeline, pipeline.descriptor_requirements())?;
check_push_constants_validity(self.state(), pipeline.layout())?;
check_vertex_buffers(
self.state(),
pipeline,
Some((first_vertex, vertex_count)),
Some((first_instance, instance_count)),
)?;
unsafe {
self.inner
.draw(vertex_count, instance_count, first_vertex, first_instance);
}
Ok(self)
}
/// Perform multiple draw operations using a graphics pipeline.
///
/// One draw is performed for each [`DrawIndirectCommand`] struct in `indirect_buffer`.
/// The maximum number of draw commands in the buffer is limited by the
/// [`max_draw_indirect_count`](crate::device::Properties::max_draw_indirect_count) limit.
/// This limit is 1 unless the
/// [`multi_draw_indirect`](crate::device::Features::multi_draw_indirect) feature has been
/// enabled.
///
/// A graphics pipeline must have been bound using
/// [`bind_pipeline_graphics`](Self::bind_pipeline_graphics). Any resources used by the graphics
/// pipeline, such as descriptor sets, vertex buffers and dynamic state, must have been set
/// beforehand. If the bound graphics pipeline uses vertex buffers, then the vertex and instance
/// ranges of each `DrawIndirectCommand` in the indirect buffer must be in range of the bound
/// vertex buffers.
#[inline]
pub fn draw_indirect<Inb>(
&mut self,
indirect_buffer: Arc<Inb>,
) -> Result<&mut Self, DrawIndirectError>
where
Inb: TypedBufferAccess<Content = [DrawIndirectCommand]> + Send + Sync + 'static,
{
let pipeline = check_pipeline_graphics(self.state())?;
self.ensure_inside_render_pass_inline(pipeline)?;
check_dynamic_state_validity(self.state(), pipeline)?;
check_descriptor_sets_validity(self.state(), pipeline, pipeline.descriptor_requirements())?;
check_push_constants_validity(self.state(), pipeline.layout())?;
check_vertex_buffers(self.state(), pipeline, None, None)?;
check_indirect_buffer(self.device(), indirect_buffer.as_ref())?;
let draw_count = indirect_buffer.len() as u32;
let limit = self
.device()
.physical_device()
.properties()
.max_draw_indirect_count;
if draw_count > limit {
return Err(
CheckIndirectBufferError::MaxDrawIndirectCountLimitExceeded {
limit,
requested: draw_count,
}
.into(),
);
}
unsafe {
self.inner.draw_indirect(
indirect_buffer,
draw_count,
size_of::<DrawIndirectCommand>() as u32,
)?;
}
Ok(self)
}
/// Perform a single draw operation using a graphics pipeline, using an index buffer.
///
/// The parameters specify the first index and the number of indices in the index buffer that
/// should be used, and the first instance and number of instances. For non-instanced drawing,
/// specify `instance_count` as 1 and `first_instance` as 0. The `vertex_offset` is a constant
/// value that should be added to each index in the index buffer to produce the final vertex
/// number to be used.
///
/// An index buffer must have been bound using
/// [`bind_index_buffer`](Self::bind_index_buffer), and the provided index range must be in
/// range of the bound index buffer.
///
/// A graphics pipeline must have been bound using
/// [`bind_pipeline_graphics`](Self::bind_pipeline_graphics). Any resources used by the graphics
/// pipeline, such as descriptor sets, vertex buffers and dynamic state, must have been set
/// beforehand. If the bound graphics pipeline uses vertex buffers, then the provided instance
/// range must be in range of the bound vertex buffers. The vertex indices in the index buffer
/// must be in range of the bound vertex buffers.
#[inline]
pub fn draw_indexed(
&mut self,
index_count: u32,
instance_count: u32,
first_index: u32,
vertex_offset: i32,
first_instance: u32,
) -> Result<&mut Self, DrawIndexedError> {
// TODO: how to handle an index out of range of the vertex buffers?
let pipeline = check_pipeline_graphics(self.state())?;
self.ensure_inside_render_pass_inline(pipeline)?;
check_dynamic_state_validity(self.state(), pipeline)?;
check_descriptor_sets_validity(self.state(), pipeline, pipeline.descriptor_requirements())?;
check_push_constants_validity(self.state(), pipeline.layout())?;
check_vertex_buffers(
self.state(),
pipeline,
None,
Some((first_instance, instance_count)),
)?;
check_index_buffer(self.state(), Some((first_index, index_count)))?;
unsafe {
self.inner.draw_indexed(
index_count,
instance_count,
first_index,
vertex_offset,
first_instance,
);
}
Ok(self)
}
/// Perform multiple draw operations using a graphics pipeline, using an index buffer.
///
/// One draw is performed for each [`DrawIndexedIndirectCommand`] struct in `indirect_buffer`.
/// The maximum number of draw commands in the buffer is limited by the
/// [`max_draw_indirect_count`](crate::device::Properties::max_draw_indirect_count) limit.
/// This limit is 1 unless the
/// [`multi_draw_indirect`](crate::device::Features::multi_draw_indirect) feature has been
/// enabled.
///
/// An index buffer must have been bound using
/// [`bind_index_buffer`](Self::bind_index_buffer), and the index ranges of each
/// `DrawIndexedIndirectCommand` in the indirect buffer must be in range of the bound index
/// buffer.
///
/// A graphics pipeline must have been bound using
/// [`bind_pipeline_graphics`](Self::bind_pipeline_graphics). Any resources used by the graphics
/// pipeline, such as descriptor sets, vertex buffers and dynamic state, must have been set
/// beforehand. If the bound graphics pipeline uses vertex buffers, then the instance ranges of
/// each `DrawIndexedIndirectCommand` in the indirect buffer must be in range of the bound
/// vertex buffers.
#[inline]
pub fn draw_indexed_indirect<Inb>(
&mut self,
indirect_buffer: Arc<Inb>,
) -> Result<&mut Self, DrawIndexedIndirectError>
where
Inb: TypedBufferAccess<Content = [DrawIndexedIndirectCommand]> + 'static,
{
let pipeline = check_pipeline_graphics(self.state())?;
self.ensure_inside_render_pass_inline(pipeline)?;
check_dynamic_state_validity(self.state(), pipeline)?;
check_descriptor_sets_validity(self.state(), pipeline, pipeline.descriptor_requirements())?;
check_push_constants_validity(self.state(), pipeline.layout())?;
check_vertex_buffers(self.state(), pipeline, None, None)?;
check_index_buffer(self.state(), None)?;
check_indirect_buffer(self.device(), indirect_buffer.as_ref())?;
let draw_count = indirect_buffer.len() as u32;
let limit = self
.device()
.physical_device()
.properties()
.max_draw_indirect_count;
if draw_count > limit {
return Err(
CheckIndirectBufferError::MaxDrawIndirectCountLimitExceeded {
limit,
requested: draw_count,
}
.into(),
);
}
unsafe {
self.inner.draw_indexed_indirect(
indirect_buffer,
draw_count,
size_of::<DrawIndexedIndirectCommand>() as u32,
)?;
}
Ok(self)
}
/// Adds a command that writes the content of a buffer.
///
/// This function is similar to the `memset` function in C. The `data` parameter is a number
/// that will be repeatedly written through the entire buffer.
///
/// > **Note**: This function is technically safe because buffers can only contain integers or
/// > floating point numbers, which are always valid whatever their memory representation is.
/// > But unless your buffer actually contains only 32-bits integers, you are encouraged to use
/// > this function only for zeroing the content of a buffer by passing `0` for the data.
// TODO: not safe because of signalling NaNs
#[inline]
pub fn fill_buffer(
&mut self,
buffer: Arc<dyn BufferAccess>,
data: u32,
) -> Result<&mut Self, FillBufferError> {
unsafe {
self.ensure_outside_render_pass()?;
check_fill_buffer(self.device(), buffer.as_ref())?;
self.inner.fill_buffer(buffer, data);
Ok(self)
}
}
/// Sets push constants for future dispatch or draw calls.
///
/// # Panics
///
/// - Panics if `offset` is not a multiple of 4.
/// - Panics if the size of `push_constants` is not a multiple of 4.
/// - Panics if any of the bytes in `push_constants` do not fall within any of the pipeline
/// layout's push constant ranges.
pub fn push_constants<Pc>(
&mut self,
pipeline_layout: Arc<PipelineLayout>,
offset: u32,
push_constants: Pc,
) -> &mut Self {
let size = size_of::<Pc>() as u32;
if size == 0 {
return self;
}
assert!(offset % 4 == 0, "the offset must be a multiple of 4");
assert!(
size % 4 == 0,
"the size of push_constants must be a multiple of 4"
);
// SAFETY: `&push_constants` is a valid pointer, and the size of the struct is `size`,
// thus, getting a slice of the whole struct is safe if its not modified.
let whole_data = unsafe {
slice::from_raw_parts(&push_constants as *const Pc as *const u8, size as usize)
};
let mut current_offset = offset;
let mut remaining_size = size;
for range in pipeline_layout
.push_constant_ranges_disjoint()
.iter()
.skip_while(|range| range.offset + range.size <= offset)
{
// there is a gap between ranges, but the passed push_constants contains
// some bytes in this gap, exit the loop and report error
if range.offset > current_offset {
break;
}
// push the minimum of the whole remaining data, and the part until the end of this range
let push_size = remaining_size.min(range.offset + range.size - current_offset);
let data_offset = (current_offset - offset) as usize;
unsafe {
self.inner.push_constants::<[u8]>(
pipeline_layout.clone(),
range.stages,
current_offset,
push_size,
&whole_data[data_offset..(data_offset + push_size as usize)],
);
}
current_offset += push_size;
remaining_size -= push_size;
if remaining_size == 0 {
break;
}
}
assert!(
remaining_size == 0,
"There exists data at offset {} that is not included in any range",
current_offset,
);
self
}
/// Pushes descriptor data directly into the command buffer for future dispatch or draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support `pipeline_bind_point`.
/// - Panics if the
/// [`khr_push_descriptor`](crate::device::DeviceExtensions::khr_push_descriptor)
/// extension is not enabled on the device.
/// - Panics if `set_num` is not less than the number of sets in `pipeline_layout`.
/// - Panics if an element of `descriptor_writes` is not compatible with `pipeline_layout`.
pub fn push_descriptor_set(
&mut self,
pipeline_bind_point: PipelineBindPoint,
pipeline_layout: Arc<PipelineLayout>,
set_num: u32,
descriptor_writes: impl IntoIterator<Item = WriteDescriptorSet>,
) -> &mut Self {
match pipeline_bind_point {
PipelineBindPoint::Compute => assert!(
self.queue_family().supports_compute(),
"the queue family of the command buffer must support compute operations"
),
PipelineBindPoint::Graphics => assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
),
}
assert!(
self.device().enabled_extensions().khr_push_descriptor,
"the khr_push_descriptor extension must be enabled on the device"
);
assert!(
set_num as usize <= pipeline_layout.set_layouts().len(),
"the descriptor set slot being bound must be less than the number of sets in pipeline_layout"
);
let descriptor_writes: SmallVec<[_; 8]> = descriptor_writes.into_iter().collect();
let descriptor_set_layout = &pipeline_layout.set_layouts()[set_num as usize];
for write in &descriptor_writes {
check_descriptor_write(write, descriptor_set_layout, 0).unwrap();
}
unsafe {
self.inner.push_descriptor_set(
pipeline_bind_point,
pipeline_layout,
set_num,
descriptor_writes,
);
}
self
}
// Helper function for dynamic state setting.
fn has_fixed_state(&self, state: DynamicState) -> bool {
self.state()
.pipeline_graphics()
.map(|pipeline| matches!(pipeline.dynamic_state(state), Some(false)))
.unwrap_or(false)
}
/// Sets the dynamic blend constants for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
pub fn set_blend_constants(&mut self, constants: [f32; 4]) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
!self.has_fixed_state(DynamicState::BlendConstants),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_blend_constants(constants);
}
self
}
/// Sets whether dynamic color writes should be enabled for each attachment in the
/// framebuffer.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the [`color_write_enable`](crate::device::Features::color_write_enable)
/// feature is not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
/// - If there is a graphics pipeline with color blend state bound, `enables.len()` must equal
/// - [`attachments.len()`](crate::pipeline::graphics::color_blend::ColorBlendState::attachments).
#[inline]
pub fn set_color_write_enable<I>(&mut self, enables: I) -> &mut Self
where
I: IntoIterator<Item = bool>,
I::IntoIter: ExactSizeIterator,
{
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().enabled_features().color_write_enable,
"the color_write_enable feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::ColorWriteEnable),
"the currently bound graphics pipeline must not contain this state internally"
);
let enables = enables.into_iter();
if let Some(color_blend_state) = self
.state()
.pipeline_graphics()
.and_then(|pipeline| pipeline.color_blend_state())
{
assert!(
enables.len() == color_blend_state.attachments.len(),
"if there is a graphics pipeline with color blend state bound, enables.len() must equal attachments.len()"
);
}
unsafe {
self.inner.set_color_write_enable(enables);
}
self
}
/// Sets the dynamic cull mode for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the device API version is less than 1.3 and the
/// [`extended_dynamic_state`](crate::device::Features::extended_dynamic_state) feature is
/// not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
#[inline]
pub fn set_cull_mode(&mut self, cull_mode: CullMode) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().api_version() >= Version::V1_3
|| self.device().enabled_features().extended_dynamic_state,
"the extended_dynamic_state feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::CullMode),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_cull_mode(cull_mode);
}
self
}
/// Sets the dynamic depth bias values for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
/// - If the [`depth_bias_clamp`](crate::device::Features::depth_bias_clamp)
/// feature is not enabled on the device, panics if `clamp` is not 0.0.
#[inline]
pub fn set_depth_bias(
&mut self,
constant_factor: f32,
clamp: f32,
slope_factor: f32,
) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
!self.has_fixed_state(DynamicState::DepthBias),
"the currently bound graphics pipeline must not contain this state internally"
);
assert!(
clamp == 0.0 || self.device().enabled_features().depth_bias_clamp,
"if the depth_bias_clamp feature is not enabled, clamp must be 0.0"
);
unsafe {
self.inner
.set_depth_bias(constant_factor, clamp, slope_factor);
}
self
}
/// Sets whether dynamic depth bias is enabled for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the device API version is less than 1.3 and the
/// [`extended_dynamic_state2`](crate::device::Features::extended_dynamic_state2) feature is
/// not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
#[inline]
pub fn set_depth_bias_enable(&mut self, enable: bool) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().api_version() >= Version::V1_3
|| self.device().enabled_features().extended_dynamic_state2,
"the extended_dynamic_state2 feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::DepthBiasEnable),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_depth_bias_enable(enable);
}
self
}
/// Sets the dynamic depth bounds for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
/// - If the
/// [`ext_depth_range_unrestricted`](crate::device::DeviceExtensions::ext_depth_range_unrestricted)
/// device extension is not enabled, panics if `min` or `max` is not between 0.0 and 1.0 inclusive.
pub fn set_depth_bounds(&mut self, min: f32, max: f32) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
!self.has_fixed_state(DynamicState::DepthBounds),
"the currently bound graphics pipeline must not contain this state internally"
);
if !self
.device()
.enabled_extensions()
.ext_depth_range_unrestricted
{
assert!(
min >= 0.0 && min <= 1.0 && max >= 0.0 && max <= 1.0,
"if the ext_depth_range_unrestricted device extension is not enabled, depth bounds values must be between 0.0 and 1.0"
);
}
unsafe {
self.inner.set_depth_bounds(min, max);
}
self
}
/// Sets whether dynamic depth bounds testing is enabled for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the device API version is less than 1.3 and the
/// [`extended_dynamic_state`](crate::device::Features::extended_dynamic_state) feature is
/// not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
#[inline]
pub fn set_depth_bounds_test_enable(&mut self, enable: bool) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().api_version() >= Version::V1_3
|| self.device().enabled_features().extended_dynamic_state,
"the extended_dynamic_state feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::DepthBoundsTestEnable),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_depth_bounds_test_enable(enable);
}
self
}
/// Sets the dynamic depth compare op for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the device API version is less than 1.3 and the
/// [`extended_dynamic_state`](crate::device::Features::extended_dynamic_state) feature is
/// not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
#[inline]
pub fn set_depth_compare_op(&mut self, compare_op: CompareOp) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().api_version() >= Version::V1_3
|| self.device().enabled_features().extended_dynamic_state,
"the extended_dynamic_state feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::DepthCompareOp),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_depth_compare_op(compare_op);
}
self
}
/// Sets whether dynamic depth testing is enabled for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the device API version is less than 1.3 and the
/// [`extended_dynamic_state`](crate::device::Features::extended_dynamic_state) feature is
/// not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
#[inline]
pub fn set_depth_test_enable(&mut self, enable: bool) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().api_version() >= Version::V1_3
|| self.device().enabled_features().extended_dynamic_state,
"the extended_dynamic_state feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::DepthTestEnable),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_depth_test_enable(enable);
}
self
}
/// Sets whether dynamic depth write is enabled for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the device API version is less than 1.3 and the
/// [`extended_dynamic_state`](crate::device::Features::extended_dynamic_state) feature is
/// not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
#[inline]
pub fn set_depth_write_enable(&mut self, enable: bool) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().api_version() >= Version::V1_3
|| self.device().enabled_features().extended_dynamic_state,
"the extended_dynamic_state feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::DepthWriteEnable),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_depth_write_enable(enable);
}
self
}
/// Sets the dynamic discard rectangles for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the
/// [`ext_discard_rectangles`](crate::device::DeviceExtensions::ext_discard_rectangles)
/// extension is not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
/// - Panics if the highest discard rectangle slot being set is greater than the
/// [`max_discard_rectangles`](crate::device::Properties::max_discard_rectangles) device
/// property.
pub fn set_discard_rectangle<I>(&mut self, first_rectangle: u32, rectangles: I) -> &mut Self
where
I: IntoIterator<Item = Scissor>,
{
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().enabled_extensions().ext_discard_rectangles,
"the ext_discard_rectangles extension must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::DiscardRectangle),
"the currently bound graphics pipeline must not contain this state internally"
);
let rectangles: SmallVec<[Scissor; 2]> = rectangles.into_iter().collect();
assert!(
first_rectangle + rectangles.len() as u32 <= self.device().physical_device().properties().max_discard_rectangles.unwrap(),
"the highest discard rectangle slot being set must not be higher than the max_discard_rectangles device property"
);
// TODO: VUID-vkCmdSetDiscardRectangleEXT-viewportScissor2D-04788
// If this command is recorded in a secondary command buffer with
// VkCommandBufferInheritanceViewportScissorInfoNV::viewportScissor2D enabled, then this
// function must not be called
unsafe {
self.inner
.set_discard_rectangle(first_rectangle, rectangles);
}
self
}
/// Sets the dynamic front face for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the device API version is less than 1.3 and the
/// [`extended_dynamic_state`](crate::device::Features::extended_dynamic_state) feature is
/// not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
#[inline]
pub fn set_front_face(&mut self, face: FrontFace) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().api_version() >= Version::V1_3
|| self.device().enabled_features().extended_dynamic_state,
"the extended_dynamic_state feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::FrontFace),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_front_face(face);
}
self
}
/// Sets the dynamic line stipple values for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the [`ext_line_rasterization`](crate::device::DeviceExtensions::ext_line_rasterization)
/// extension is not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
/// - Panics if `factor` is not between 1 and 256 inclusive.
#[inline]
pub fn set_line_stipple(&mut self, factor: u32, pattern: u16) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().enabled_extensions().ext_line_rasterization,
"the ext_line_rasterization extension must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::LineStipple),
"the currently bound graphics pipeline must not contain this state internally"
);
assert!(
factor >= 1 && factor <= 256,
"factor must be between 1 and 256 inclusive"
);
unsafe {
self.inner.set_line_stipple(factor, pattern);
}
self
}
/// Sets the dynamic line width for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
/// - If the [`wide_lines`](crate::device::Features::wide_lines) feature is not enabled, panics
/// if `line_width` is not 1.0.
pub fn set_line_width(&mut self, line_width: f32) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
!self.has_fixed_state(DynamicState::LineWidth),
"the currently bound graphics pipeline must not contain this state internally"
);
if !self.device().enabled_features().wide_lines {
assert!(
line_width == 1.0,
"if the wide_line features is not enabled, line width must be 1.0"
);
}
unsafe {
self.inner.set_line_width(line_width);
}
self
}
/// Sets the dynamic logic op for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the
/// [`extended_dynamic_state2_logic_op`](crate::device::Features::extended_dynamic_state2_logic_op)
/// feature is not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
#[inline]
pub fn set_logic_op(&mut self, logic_op: LogicOp) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device()
.enabled_features()
.extended_dynamic_state2_logic_op,
"the extended_dynamic_state2_logic_op feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::LogicOp),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_logic_op(logic_op);
}
self
}
/// Sets the dynamic number of patch control points for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the
/// [`extended_dynamic_state2_patch_control_points`](crate::device::Features::extended_dynamic_state2_patch_control_points)
/// feature is not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
/// - Panics if `num` is 0.
/// - Panics if `num` is greater than the
/// [`max_tessellation_patch_size`](crate::device::Properties::max_tessellation_patch_size)
/// property of the device.
#[inline]
pub fn set_patch_control_points(&mut self, num: u32) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().enabled_features().extended_dynamic_state2_patch_control_points,
"the extended_dynamic_state2_patch_control_points feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::PatchControlPoints),
"the currently bound graphics pipeline must not contain this state internally"
);
assert!(num > 0, "num must be greater than 0");
assert!(
num <= self
.device()
.physical_device()
.properties()
.max_tessellation_patch_size,
"num must be less than or equal to max_tessellation_patch_size"
);
unsafe {
self.inner.set_patch_control_points(num);
}
self
}
/// Sets whether dynamic primitive restart is enabled for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the device API version is less than 1.3 and the
/// [`extended_dynamic_state2`](crate::device::Features::extended_dynamic_state2) feature is
/// not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
#[inline]
pub fn set_primitive_restart_enable(&mut self, enable: bool) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().api_version() >= Version::V1_3
|| self.device().enabled_features().extended_dynamic_state2,
"the extended_dynamic_state2 feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::PrimitiveRestartEnable),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_primitive_restart_enable(enable);
}
self
}
/// Sets the dynamic primitive topology for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the device API version is less than 1.3 and the
/// [`extended_dynamic_state`](crate::device::Features::extended_dynamic_state) feature is
/// not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
/// - If the [`geometry_shader`](crate::device::Features::geometry_shader) feature is not
/// enabled, panics if `topology` is a `WithAdjacency` topology.
/// - If the [`tessellation_shader`](crate::device::Features::tessellation_shader) feature is
/// not enabled, panics if `topology` is `PatchList`.
#[inline]
pub fn set_primitive_topology(&mut self, topology: PrimitiveTopology) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().api_version() >= Version::V1_3
|| self.device().enabled_features().extended_dynamic_state,
"the extended_dynamic_state feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::PrimitiveTopology),
"the currently bound graphics pipeline must not contain this state internally"
);
if !self.device().enabled_features().geometry_shader {
assert!(!matches!(topology, PrimitiveTopology::LineListWithAdjacency
| PrimitiveTopology::LineStripWithAdjacency
| PrimitiveTopology::TriangleListWithAdjacency
| PrimitiveTopology::TriangleStripWithAdjacency), "if the geometry_shader feature is not enabled, topology must not be a WithAdjacency topology");
}
if !self.device().enabled_features().tessellation_shader {
assert!(
!matches!(topology, PrimitiveTopology::PatchList),
"if the tessellation_shader feature is not enabled, topology must not be PatchList"
);
}
unsafe {
self.inner.set_primitive_topology(topology);
}
self
}
/// Sets whether dynamic rasterizer discard is enabled for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the device API version is less than 1.3 and the
/// [`extended_dynamic_state2`](crate::device::Features::extended_dynamic_state2) feature is
/// not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
#[inline]
pub fn set_rasterizer_discard_enable(&mut self, enable: bool) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().api_version() >= Version::V1_3
|| self.device().enabled_features().extended_dynamic_state2,
"the extended_dynamic_state2 feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::RasterizerDiscardEnable),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_rasterizer_discard_enable(enable);
}
self
}
/// Sets the dynamic scissors for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
/// - Panics if the highest scissor slot being set is greater than the
/// [`max_viewports`](crate::device::Properties::max_viewports) device property.
/// - If the [`multi_viewport`](crate::device::Features::multi_viewport) feature is not enabled,
/// panics if `first_scissor` is not 0, or if more than 1 scissor is provided.
pub fn set_scissor<I>(&mut self, first_scissor: u32, scissors: I) -> &mut Self
where
I: IntoIterator<Item = Scissor>,
{
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
!self.has_fixed_state(DynamicState::Scissor),
"the currently bound graphics pipeline must not contain this state internally"
);
let scissors: SmallVec<[Scissor; 2]> = scissors.into_iter().collect();
assert!(
first_scissor + scissors.len() as u32 <= self.device().physical_device().properties().max_viewports,
"the highest scissor slot being set must not be higher than the max_viewports device property"
);
if !self.device().enabled_features().multi_viewport {
assert!(
first_scissor == 0,
"if the multi_viewport feature is not enabled, first_scissor must be 0"
);
assert!(
scissors.len() <= 1,
"if the multi_viewport feature is not enabled, no more than 1 scissor must be provided"
);
}
// TODO:
// If this command is recorded in a secondary command buffer with
// VkCommandBufferInheritanceViewportScissorInfoNV::viewportScissor2D enabled, then this
// function must not be called
unsafe {
self.inner.set_scissor(first_scissor, scissors);
}
self
}
/// Sets the dynamic scissors with count for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the device API version is less than 1.3 and the
/// [`extended_dynamic_state`](crate::device::Features::extended_dynamic_state) feature is
/// not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
/// - Panics if the highest scissor slot being set is greater than the
/// [`max_viewports`](crate::device::Properties::max_viewports) device property.
/// - If the [`multi_viewport`](crate::device::Features::multi_viewport) feature is not enabled,
/// panics if more than 1 scissor is provided.
#[inline]
pub fn set_scissor_with_count<I>(&mut self, scissors: I) -> &mut Self
where
I: IntoIterator<Item = Scissor>,
{
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().api_version() >= Version::V1_3
|| self.device().enabled_features().extended_dynamic_state,
"the extended_dynamic_state feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::ScissorWithCount),
"the currently bound graphics pipeline must not contain this state internally"
);
let scissors: SmallVec<[Scissor; 2]> = scissors.into_iter().collect();
assert!(
scissors.len() as u32 <= self.device().physical_device().properties().max_viewports,
"the highest scissor slot being set must not be higher than the max_viewports device property"
);
if !self.device().enabled_features().multi_viewport {
assert!(
scissors.len() <= 1,
"if the multi_viewport feature is not enabled, no more than 1 scissor must be provided"
);
}
// TODO: VUID-vkCmdSetScissorWithCountEXT-commandBuffer-04820
// commandBuffer must not have
// VkCommandBufferInheritanceViewportScissorInfoNV::viewportScissor2D enabled
unsafe {
self.inner.set_scissor_with_count(scissors);
}
self
}
/// Sets the dynamic stencil compare mask on one or both faces for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
pub fn set_stencil_compare_mask(
&mut self,
faces: StencilFaces,
compare_mask: u32,
) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
!self.has_fixed_state(DynamicState::StencilCompareMask),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_stencil_compare_mask(faces, compare_mask);
}
self
}
/// Sets the dynamic stencil ops on one or both faces for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the device API version is less than 1.3 and the
/// [`extended_dynamic_state`](crate::device::Features::extended_dynamic_state) feature is
/// not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
#[inline]
pub fn set_stencil_op(
&mut self,
faces: StencilFaces,
fail_op: StencilOp,
pass_op: StencilOp,
depth_fail_op: StencilOp,
compare_op: CompareOp,
) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().api_version() >= Version::V1_3
|| self.device().enabled_features().extended_dynamic_state,
"the extended_dynamic_state feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::StencilOp),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner
.set_stencil_op(faces, fail_op, pass_op, depth_fail_op, compare_op);
}
self
}
/// Sets the dynamic stencil reference on one or both faces for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
pub fn set_stencil_reference(&mut self, faces: StencilFaces, reference: u32) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
!self.has_fixed_state(DynamicState::StencilReference),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_stencil_reference(faces, reference);
}
self
}
/// Sets whether dynamic stencil testing is enabled for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the device API version is less than 1.3 and the
/// [`extended_dynamic_state`](crate::device::Features::extended_dynamic_state) feature is
/// not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
#[inline]
pub fn set_stencil_test_enable(&mut self, enable: bool) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().api_version() >= Version::V1_3
|| self.device().enabled_features().extended_dynamic_state,
"the extended_dynamic_state feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::StencilTestEnable),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_stencil_test_enable(enable);
}
self
}
/// Sets the dynamic stencil write mask on one or both faces for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
pub fn set_stencil_write_mask(&mut self, faces: StencilFaces, write_mask: u32) -> &mut Self {
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
!self.has_fixed_state(DynamicState::StencilWriteMask),
"the currently bound graphics pipeline must not contain this state internally"
);
unsafe {
self.inner.set_stencil_write_mask(faces, write_mask);
}
self
}
/// Sets the dynamic viewports for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
/// - Panics if the highest viewport slot being set is greater than the
/// [`max_viewports`](crate::device::Properties::max_viewports) device property.
/// - If the [`multi_viewport`](crate::device::Features::multi_viewport) feature is not enabled,
/// panics if `first_viewport` is not 0, or if more than 1 viewport is provided.
pub fn set_viewport<I>(&mut self, first_viewport: u32, viewports: I) -> &mut Self
where
I: IntoIterator<Item = Viewport>,
{
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
!self.has_fixed_state(DynamicState::Viewport),
"the currently bound graphics pipeline must not contain this state internally"
);
let viewports: SmallVec<[Viewport; 2]> = viewports.into_iter().collect();
assert!(
first_viewport + viewports.len() as u32 <= self.device().physical_device().properties().max_viewports,
"the highest viewport slot being set must not be higher than the max_viewports device property"
);
if !self.device().enabled_features().multi_viewport {
assert!(
first_viewport == 0,
"if the multi_viewport feature is not enabled, first_viewport must be 0"
);
assert!(
viewports.len() <= 1,
"if the multi_viewport feature is not enabled, no more than 1 viewport must be provided"
);
}
// TODO:
// commandBuffer must not have
// VkCommandBufferInheritanceViewportScissorInfoNV::viewportScissor2D enabled
unsafe {
self.inner.set_viewport(first_viewport, viewports);
}
self
}
/// Sets the dynamic viewports with count for future draw calls.
///
/// # Panics
///
/// - Panics if the queue family of the command buffer does not support graphics operations.
/// - Panics if the device API version is less than 1.3 and the
/// [`extended_dynamic_state`](crate::device::Features::extended_dynamic_state) feature is
/// not enabled on the device.
/// - Panics if the currently bound graphics pipeline already contains this state internally.
/// - Panics if the highest viewport slot being set is greater than the
/// [`max_viewports`](crate::device::Properties::max_viewports) device property.
/// - If the [`multi_viewport`](crate::device::Features::multi_viewport) feature is not enabled,
/// panics if more than 1 viewport is provided.
#[inline]
pub fn set_viewport_with_count<I>(&mut self, viewports: I) -> &mut Self
where
I: IntoIterator<Item = Viewport>,
{
assert!(
self.queue_family().supports_graphics(),
"the queue family of the command buffer must support graphics operations"
);
assert!(
self.device().api_version() >= Version::V1_3
|| self.device().enabled_features().extended_dynamic_state,
"the extended_dynamic_state feature must be enabled on the device"
);
assert!(
!self.has_fixed_state(DynamicState::ViewportWithCount),
"the currently bound graphics pipeline must not contain this state internally"
);
let viewports: SmallVec<[Viewport; 2]> = viewports.into_iter().collect();
assert!(
viewports.len() as u32 <= self.device().physical_device().properties().max_viewports,
"the highest viewport slot being set must not be higher than the max_viewports device property"
);
if !self.device().enabled_features().multi_viewport {
assert!(
viewports.len() <= 1,
"if the multi_viewport feature is not enabled, no more than 1 viewport must be provided"
);
}
// TODO: VUID-vkCmdSetViewportWithCountEXT-commandBuffer-04819
// commandBuffer must not have
// VkCommandBufferInheritanceViewportScissorInfoNV::viewportScissor2D enabled
unsafe {
self.inner.set_viewport_with_count(viewports);
}
self
}
/// Adds a command that writes data to a buffer.
///
/// If `data` is larger than the buffer, only the part of `data` that fits is written. If the
/// buffer is larger than `data`, only the start of the buffer is written.
#[inline]
pub fn update_buffer<B, D, Dd>(
&mut self,
buffer: Arc<B>,
data: Dd,
) -> Result<&mut Self, UpdateBufferError>
where
B: TypedBufferAccess<Content = D> + 'static,
D: BufferContents + ?Sized,
Dd: SafeDeref<Target = D> + Send + Sync + 'static,
{
unsafe {
self.ensure_outside_render_pass()?;
check_update_buffer(self.device(), buffer.as_ref(), data.deref())?;
let size_of_data = size_of_val(data.deref()) as DeviceSize;
if buffer.size() >= size_of_data {
self.inner.update_buffer(buffer, data);
} else {
unimplemented!() // TODO:
//self.inner.update_buffer(buffer.slice(0 .. size_of_data), data);
}
Ok(self)
}
}
/// Adds a command that begins a query.
///
/// The query will be active until [`end_query`](Self::end_query) is called for the same query.
///
/// # Safety
/// The query must be unavailable, ensured by calling [`reset_query_pool`](Self::reset_query_pool).
pub unsafe fn begin_query(
&mut self,
query_pool: Arc<QueryPool>,
query: u32,
flags: QueryControlFlags,
) -> Result<&mut Self, BeginQueryError> {
check_begin_query(self.device(), &query_pool, query, flags)?;
match query_pool.query_type() {
QueryType::Occlusion => {
if !self.queue_family().supports_graphics() {
return Err(
AutoCommandBufferBuilderContextError::NotSupportedByQueueFamily.into(),
);
}
}
QueryType::PipelineStatistics(flags) => {
if flags.is_compute() && !self.queue_family().supports_compute()
|| flags.is_graphics() && !self.queue_family().supports_graphics()
{
return Err(
AutoCommandBufferBuilderContextError::NotSupportedByQueueFamily.into(),
);
}
}
QueryType::Timestamp => unreachable!(),
}
let ty = query_pool.query_type();
let raw_ty = ty.into();
let raw_query_pool = query_pool.internal_object();
if self.query_state.contains_key(&raw_ty) {
return Err(AutoCommandBufferBuilderContextError::QueryIsActive.into());
}
// TODO: validity checks
self.inner.begin_query(query_pool, query, flags);
self.query_state.insert(
raw_ty,
QueryState {
query_pool: raw_query_pool,
query,
ty,
flags,
in_subpass: self.render_pass_state.is_some(),
},
);
Ok(self)
}
/// Adds a command that ends an active query.
pub fn end_query(
&mut self,
query_pool: Arc<QueryPool>,
query: u32,
) -> Result<&mut Self, EndQueryError> {
unsafe {
check_end_query(self.device(), &query_pool, query)?;
let raw_ty = query_pool.query_type().into();
let raw_query_pool = query_pool.internal_object();
if !self.query_state.get(&raw_ty).map_or(false, |state| {
state.query_pool == raw_query_pool && state.query == query
}) {
return Err(AutoCommandBufferBuilderContextError::QueryNotActive.into());
}
self.inner.end_query(query_pool, query);
self.query_state.remove(&raw_ty);
}
Ok(self)
}
/// Adds a command that writes a timestamp to a timestamp query.
///
/// # Safety
/// The query must be unavailable, ensured by calling [`reset_query_pool`](Self::reset_query_pool).
pub unsafe fn write_timestamp(
&mut self,
query_pool: Arc<QueryPool>,
query: u32,
stage: PipelineStage,
) -> Result<&mut Self, WriteTimestampError> {
check_write_timestamp(
self.device(),
self.queue_family(),
&query_pool,
query,
stage,
)?;
if !(self.queue_family().supports_graphics()
|| self.queue_family().supports_compute()
|| self.queue_family().explicitly_supports_transfers())
{
return Err(AutoCommandBufferBuilderContextError::NotSupportedByQueueFamily.into());
}
// TODO: validity checks
self.inner.write_timestamp(query_pool, query, stage);
Ok(self)
}
/// Adds a command that copies the results of a range of queries to a buffer on the GPU.
///
/// [`query_pool.ty().result_size()`](crate::query::QueryType::result_size) elements
/// will be written for each query in the range, plus 1 extra element per query if
/// [`QueryResultFlags::with_availability`] is enabled.
/// The provided buffer must be large enough to hold the data.
///
/// See also [`get_results`](crate::query::QueriesRange::get_results).
pub fn copy_query_pool_results<D, T>(
&mut self,
query_pool: Arc<QueryPool>,
queries: Range<u32>,
destination: Arc<D>,
flags: QueryResultFlags,
) -> Result<&mut Self, CopyQueryPoolResultsError>
where
D: TypedBufferAccess<Content = [T]> + 'static,
T: QueryResultElement,
{
unsafe {
self.ensure_outside_render_pass()?;
let stride = check_copy_query_pool_results(
self.device(),
&query_pool,
queries.clone(),
destination.as_ref(),
flags,
)?;
self.inner
.copy_query_pool_results(query_pool, queries, destination, stride, flags)?;
}
Ok(self)
}
/// Adds a command to reset a range of queries on a query pool.
///
/// The affected queries will be marked as "unavailable" after this command runs, and will no
/// longer return any results. They will be ready to have new results recorded for them.
///
/// # Safety
/// The queries in the specified range must not be active in another command buffer.
pub unsafe fn reset_query_pool(
&mut self,
query_pool: Arc<QueryPool>,
queries: Range<u32>,
) -> Result<&mut Self, ResetQueryPoolError> {
self.ensure_outside_render_pass()?;
check_reset_query_pool(self.device(), &query_pool, queries.clone())?;
let raw_query_pool = query_pool.internal_object();
if self
.query_state
.values()
.any(|state| state.query_pool == raw_query_pool && queries.contains(&state.query))
{
return Err(AutoCommandBufferBuilderContextError::QueryIsActive.into());
}
// TODO: validity checks
// Do other command buffers actually matter here? Not sure on the Vulkan spec.
self.inner.reset_query_pool(query_pool, queries);
Ok(self)
}
}
/// Commands that can only be executed on primary command buffers
impl<P> AutoCommandBufferBuilder<PrimaryAutoCommandBuffer<P::Alloc>, P>
where
P: CommandPoolBuilderAlloc,
{
/// Adds a command that enters a render pass.
///
/// If `contents` is `SubpassContents::SecondaryCommandBuffers`, then you will only be able to
/// add secondary command buffers while you're inside the first subpass of the render pass.
/// If it is `SubpassContents::Inline`, you will only be able to add inline draw commands and
/// not secondary command buffers.
///
/// C must contain exactly one clear value for each attachment in the framebuffer.
///
/// You must call this before you can add draw commands.
#[inline]
pub fn begin_render_pass<I>(
&mut self,
framebuffer: Arc<Framebuffer>,
contents: SubpassContents,
clear_values: I,
) -> Result<&mut Self, BeginRenderPassError>
where
I: IntoIterator<Item = ClearValue>,
{
unsafe {
if !self.queue_family().supports_graphics() {
return Err(AutoCommandBufferBuilderContextError::NotSupportedByQueueFamily.into());
}
self.ensure_outside_render_pass()?;
let clear_values: Vec<_> = framebuffer
.render_pass()
.convert_clear_values(clear_values)
.collect();
let mut clear_values_copy = clear_values.iter().enumerate(); // TODO: Proper errors for clear value errors instead of panics
for (atch_i, atch_desc) in framebuffer
.render_pass()
.attachments()
.into_iter()
.enumerate()
{
match clear_values_copy.next() {
Some((clear_i, clear_value)) => {
if atch_desc.load_op == LoadOp::Clear {
let aspects = atch_desc
.format
.map_or(ImageAspects::none(), |f| f.aspects());
if aspects.depth && aspects.stencil {
assert!(
matches!(clear_value, ClearValue::DepthStencil(_)),
"Bad ClearValue! index: {}, attachment index: {}, expected: DepthStencil, got: {:?}",
clear_i,
atch_i,
clear_value,
);
} else if aspects.depth {
assert!(
matches!(clear_value, ClearValue::Depth(_)),
"Bad ClearValue! index: {}, attachment index: {}, expected: Depth, got: {:?}",
clear_i,
atch_i,
clear_value,
);
} else if aspects.stencil {
assert!(
matches!(clear_value, ClearValue::Stencil(_)),
"Bad ClearValue! index: {}, attachment index: {}, expected: Stencil, got: {:?}",
clear_i,
atch_i,
clear_value,
);
} else if let Some(numeric_type) =
atch_desc.format.and_then(|f| f.type_color())
{
match numeric_type {
NumericType::SFLOAT
| NumericType::UFLOAT
| NumericType::SNORM
| NumericType::UNORM
| NumericType::SSCALED
| NumericType::USCALED
| NumericType::SRGB => {
assert!(
matches!(clear_value, ClearValue::Float(_)),
"Bad ClearValue! index: {}, attachment index: {}, expected: Float, got: {:?}",
clear_i,
atch_i,
clear_value,
);
}
NumericType::SINT => {
assert!(
matches!(clear_value, ClearValue::Int(_)),
"Bad ClearValue! index: {}, attachment index: {}, expected: Int, got: {:?}",
clear_i,
atch_i,
clear_value,
);
}
NumericType::UINT => {
assert!(
matches!(clear_value, ClearValue::Uint(_)),
"Bad ClearValue! index: {}, attachment index: {}, expected: Uint, got: {:?}",
clear_i,
atch_i,
clear_value,
);
}
}
} else {
panic!("Shouldn't happen!");
}
} else {
assert!(
matches!(clear_value, ClearValue::None),
"Bad ClearValue! index: {}, attachment index: {}, expected: None, got: {:?}",
clear_i,
atch_i,
clear_value,
);
}
}
None => panic!("Not enough clear values"),
}
}
if clear_values_copy.count() != 0 {
panic!("Too many clear values")
}
let render_pass_state = RenderPassState {
subpass: framebuffer.render_pass().clone().first_subpass(),
extent: framebuffer.extent(),
attached_layers_ranges: framebuffer.attached_layers_ranges(),
contents,
framebuffer: framebuffer.internal_object(),
};
self.inner.begin_render_pass(
RenderPassBeginInfo {
clear_values,
..RenderPassBeginInfo::framebuffer(framebuffer)
},
contents,
)?;
self.render_pass_state = Some(render_pass_state);
Ok(self)
}
}
/// Adds a command that ends the current render pass.
///
/// This must be called after you went through all the subpasses and before you can build
/// the command buffer or add further commands.
#[inline]
pub fn end_render_pass(&mut self) -> Result<&mut Self, AutoCommandBufferBuilderContextError> {
unsafe {
if let Some(render_pass_state) = self.render_pass_state.as_ref() {
if !render_pass_state.subpass.is_last_subpass() {
return Err(AutoCommandBufferBuilderContextError::NumSubpassesMismatch {
actual: render_pass_state.subpass.render_pass().subpasses().len() as u32,
current: render_pass_state.subpass.index(),
});
}
} else {
return Err(AutoCommandBufferBuilderContextError::ForbiddenOutsideRenderPass);
}
if self.query_state.values().any(|state| state.in_subpass) {
return Err(AutoCommandBufferBuilderContextError::QueryIsActive);
}
debug_assert!(self.queue_family().supports_graphics());
self.inner.end_render_pass();
self.render_pass_state = None;
Ok(self)
}
}
/// Adds a command that executes a secondary command buffer.
///
/// If the `flags` that `command_buffer` was created with are more restrictive than those of
/// `self`, then `self` will be restricted to match. E.g. executing a secondary command buffer
/// with `Flags::OneTimeSubmit` will set `self`'s flags to `Flags::OneTimeSubmit` also.
pub fn execute_commands<C>(
&mut self,
command_buffer: C,
) -> Result<&mut Self, ExecuteCommandsError>
where
C: SecondaryCommandBuffer + 'static,
{
self.check_command_buffer(&command_buffer)?;
let secondary_usage = command_buffer.inner().usage();
unsafe {
let mut builder = self.inner.execute_commands();
builder.add(command_buffer);
builder.submit()?;
}
// Secondary command buffer could leave the primary in any state.
self.inner.reset_state();
// If the secondary is non-concurrent or one-time use, that restricts the primary as well.
self.usage = std::cmp::min(self.usage, secondary_usage);
Ok(self)
}
/// Adds a command that multiple secondary command buffers in a vector.
///
/// This requires that the secondary command buffers do not have resource conflicts; an error
/// will be returned if there are any. Use `execute_commands` if you want to ensure that
/// resource conflicts are automatically resolved.
// TODO ^ would be nice if this just worked without errors
pub fn execute_commands_from_vec<C>(
&mut self,
command_buffers: Vec<C>,
) -> Result<&mut Self, ExecuteCommandsError>
where
C: SecondaryCommandBuffer + 'static,
{
for command_buffer in &command_buffers {
self.check_command_buffer(command_buffer)?;
}
let mut secondary_usage = CommandBufferUsage::SimultaneousUse; // Most permissive usage
unsafe {
let mut builder = self.inner.execute_commands();
for command_buffer in command_buffers {
secondary_usage = std::cmp::min(secondary_usage, command_buffer.inner().usage());
builder.add(command_buffer);
}
builder.submit()?;
}
// Secondary command buffer could leave the primary in any state.
self.inner.reset_state();
// If the secondary is non-concurrent or one-time use, that restricts the primary as well.
self.usage = std::cmp::min(self.usage, secondary_usage);
Ok(self)
}
// Helper function for execute_commands
fn check_command_buffer<C>(
&self,
command_buffer: &C,
) -> Result<(), AutoCommandBufferBuilderContextError>
where
C: SecondaryCommandBuffer + 'static,
{
if let Some(render_pass) = &command_buffer.inheritance_info().render_pass {
self.ensure_inside_render_pass_secondary(render_pass)?;
} else {
self.ensure_outside_render_pass()?;
}
for state in self.query_state.values() {
match state.ty {
QueryType::Occlusion => match command_buffer.inheritance_info().occlusion_query {
Some(inherited_flags) => {
let inherited_flags = ash::vk::QueryControlFlags::from(inherited_flags);
let state_flags = ash::vk::QueryControlFlags::from(state.flags);
if inherited_flags & state_flags != state_flags {
return Err(AutoCommandBufferBuilderContextError::QueryNotInherited);
}
}
None => return Err(AutoCommandBufferBuilderContextError::QueryNotInherited),
},
QueryType::PipelineStatistics(state_flags) => {
let inherited_flags = command_buffer.inheritance_info().query_statistics_flags;
let inherited_flags =
ash::vk::QueryPipelineStatisticFlags::from(inherited_flags);
let state_flags = ash::vk::QueryPipelineStatisticFlags::from(state_flags);
if inherited_flags & state_flags != state_flags {
return Err(AutoCommandBufferBuilderContextError::QueryNotInherited);
}
}
_ => (),
}
}
Ok(())
}
#[inline]
fn ensure_inside_render_pass_secondary(
&self,
render_pass: &CommandBufferInheritanceRenderPassInfo,
) -> Result<(), AutoCommandBufferBuilderContextError> {
let render_pass_state = self
.render_pass_state
.as_ref()
.ok_or(AutoCommandBufferBuilderContextError::ForbiddenOutsideRenderPass)?;
if render_pass_state.contents != SubpassContents::SecondaryCommandBuffers {
return Err(AutoCommandBufferBuilderContextError::WrongSubpassType);
}
// Subpasses must be the same.
if render_pass.subpass.index() != render_pass_state.subpass.index() {
return Err(AutoCommandBufferBuilderContextError::WrongSubpassIndex);
}
// Render passes must be compatible.
if !render_pass
.subpass
.render_pass()
.is_compatible_with(render_pass_state.subpass.render_pass())
{
return Err(AutoCommandBufferBuilderContextError::IncompatibleRenderPass);
}
// Framebuffer, if present on the secondary command buffer, must be the
// same as the one in the current render pass.
if let Some(framebuffer) = &render_pass.framebuffer {
if framebuffer.internal_object() != render_pass_state.framebuffer {
return Err(AutoCommandBufferBuilderContextError::IncompatibleFramebuffer);
}
}
Ok(())
}
/// Adds a command that jumps to the next subpass of the current render pass.
#[inline]
pub fn next_subpass(
&mut self,
contents: SubpassContents,
) -> Result<&mut Self, AutoCommandBufferBuilderContextError> {
unsafe {
if let Some(render_pass_state) = self.render_pass_state.as_mut() {
if render_pass_state.subpass.try_next_subpass() {
render_pass_state.contents = contents;
} else {
return Err(AutoCommandBufferBuilderContextError::NumSubpassesMismatch {
actual: render_pass_state.subpass.render_pass().subpasses().len() as u32,
current: render_pass_state.subpass.index(),
});
}
if render_pass_state.subpass.render_pass().views_used() != 0 {
// When multiview is enabled, at the beginning of each subpass all non-render pass state is undefined
self.inner.reset_state();
}
} else {
return Err(AutoCommandBufferBuilderContextError::ForbiddenOutsideRenderPass);
}
if self.query_state.values().any(|state| state.in_subpass) {
return Err(AutoCommandBufferBuilderContextError::QueryIsActive);
}
debug_assert!(self.queue_family().supports_graphics());
self.inner.next_subpass(contents);
Ok(self)
}
}
}
impl<P> AutoCommandBufferBuilder<SecondaryAutoCommandBuffer<P::Alloc>, P> where
P: CommandPoolBuilderAlloc
{
}
unsafe impl<L, P> DeviceOwned for AutoCommandBufferBuilder<L, P> {
#[inline]
fn device(&self) -> &Arc<Device> {
self.inner.device()
}
}
pub struct PrimaryAutoCommandBuffer<P = StandardCommandPoolAlloc> {
inner: SyncCommandBuffer,
pool_alloc: P, // Safety: must be dropped after `inner`
// Tracks usage of the command buffer on the GPU.
submit_state: SubmitState,
}
unsafe impl<P> DeviceOwned for PrimaryAutoCommandBuffer<P> {
#[inline]
fn device(&self) -> &Arc<Device> {
self.inner.device()
}
}
unsafe impl<P> PrimaryCommandBuffer for PrimaryAutoCommandBuffer<P>
where
P: CommandPoolAlloc,
{
#[inline]
fn inner(&self) -> &UnsafeCommandBuffer {
self.inner.as_ref()
}
#[inline]
fn lock_submit(
&self,
future: &dyn GpuFuture,
queue: &Queue,
) -> Result<(), CommandBufferExecError> {
match self.submit_state {
SubmitState::OneTime {
ref already_submitted,
} => {
let was_already_submitted = already_submitted.swap(true, Ordering::SeqCst);
if was_already_submitted {
return Err(CommandBufferExecError::OneTimeSubmitAlreadySubmitted);
}
}
SubmitState::ExclusiveUse { ref in_use } => {
let already_in_use = in_use.swap(true, Ordering::SeqCst);
if already_in_use {
return Err(CommandBufferExecError::ExclusiveAlreadyInUse);
}
}
SubmitState::Concurrent => (),
};
let err = match self.inner.lock_submit(future, queue) {
Ok(()) => return Ok(()),
Err(err) => err,
};
// If `self.inner.lock_submit()` failed, we revert action.
match self.submit_state {
SubmitState::OneTime {
ref already_submitted,
} => {
already_submitted.store(false, Ordering::SeqCst);
}
SubmitState::ExclusiveUse { ref in_use } => {
in_use.store(false, Ordering::SeqCst);
}
SubmitState::Concurrent => (),
};
Err(err)
}
#[inline]
unsafe fn unlock(&self) {
// Because of panic safety, we unlock the inner command buffer first.
self.inner.unlock();
match self.submit_state {
SubmitState::OneTime {
ref already_submitted,
} => {
debug_assert!(already_submitted.load(Ordering::SeqCst));
}
SubmitState::ExclusiveUse { ref in_use } => {
let old_val = in_use.swap(false, Ordering::SeqCst);
debug_assert!(old_val);
}
SubmitState::Concurrent => (),
};
}
#[inline]
fn check_buffer_access(
&self,
buffer: &UnsafeBuffer,
range: Range<DeviceSize>,
exclusive: bool,
queue: &Queue,
) -> Result<Option<(PipelineStages, AccessFlags)>, AccessCheckError> {
self.inner
.check_buffer_access(buffer, range, exclusive, queue)
}
#[inline]
fn check_image_access(
&self,
image: &UnsafeImage,
range: Range<DeviceSize>,
exclusive: bool,
expected_layout: ImageLayout,
queue: &Queue,
) -> Result<Option<(PipelineStages, AccessFlags)>, AccessCheckError> {
self.inner
.check_image_access(image, range, exclusive, expected_layout, queue)
}
}
pub struct SecondaryAutoCommandBuffer<P = StandardCommandPoolAlloc> {
inner: SyncCommandBuffer,
pool_alloc: P, // Safety: must be dropped after `inner`
inheritance_info: CommandBufferInheritanceInfo,
// Tracks usage of the command buffer on the GPU.
submit_state: SubmitState,
}
unsafe impl<P> DeviceOwned for SecondaryAutoCommandBuffer<P> {
#[inline]
fn device(&self) -> &Arc<Device> {
self.inner.device()
}
}
unsafe impl<P> SecondaryCommandBuffer for SecondaryAutoCommandBuffer<P>
where
P: CommandPoolAlloc,
{
#[inline]
fn inner(&self) -> &UnsafeCommandBuffer {
self.inner.as_ref()
}
#[inline]
fn lock_record(&self) -> Result<(), CommandBufferExecError> {
match self.submit_state {
SubmitState::OneTime {
ref already_submitted,
} => {
let was_already_submitted = already_submitted.swap(true, Ordering::SeqCst);
if was_already_submitted {
return Err(CommandBufferExecError::OneTimeSubmitAlreadySubmitted);
}
}
SubmitState::ExclusiveUse { ref in_use } => {
let already_in_use = in_use.swap(true, Ordering::SeqCst);
if already_in_use {
return Err(CommandBufferExecError::ExclusiveAlreadyInUse);
}
}
SubmitState::Concurrent => (),
};
Ok(())
}
#[inline]
unsafe fn unlock(&self) {
match self.submit_state {
SubmitState::OneTime {
ref already_submitted,
} => {
debug_assert!(already_submitted.load(Ordering::SeqCst));
}
SubmitState::ExclusiveUse { ref in_use } => {
let old_val = in_use.swap(false, Ordering::SeqCst);
debug_assert!(old_val);
}
SubmitState::Concurrent => (),
};
}
#[inline]
fn inheritance_info(&self) -> &CommandBufferInheritanceInfo {
&self.inheritance_info
}
#[inline]
fn num_buffers(&self) -> usize {
self.inner.num_buffers()
}
#[inline]
fn buffer(&self, index: usize) -> Option<(&Arc<dyn BufferAccess>, PipelineMemoryAccess)> {
self.inner.buffer(index)
}
#[inline]
fn num_images(&self) -> usize {
self.inner.num_images()
}
#[inline]
fn image(
&self,
index: usize,
) -> Option<(
&Arc<dyn ImageAccess>,
PipelineMemoryAccess,
ImageLayout,
ImageLayout,
)> {
self.inner.image(index)
}
}
// Whether the command buffer can be submitted.
#[derive(Debug)]
enum SubmitState {
// The command buffer was created with the "SimultaneousUse" flag. Can always be submitted at
// any time.
Concurrent,
// The command buffer can only be submitted once simultaneously.
ExclusiveUse {
// True if the command buffer is current in use by the GPU.
in_use: AtomicBool,
},
// The command buffer can only ever be submitted once.
OneTime {
// True if the command buffer has already been submitted once and can be no longer be
// submitted.
already_submitted: AtomicBool,
},
}
macro_rules! err_gen {
($name:ident { $($err:ident,)+ }) => (
#[derive(Debug, Clone)]
pub enum $name {
$(
$err($err),
)+
}
impl error::Error for $name {
#[inline]
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
match *self {
$(
$name::$err(ref err) => Some(err),
)+
}
}
}
impl fmt::Display for $name {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(fmt, "{}", match *self {
$(
$name::$err(_) => {
concat!("a ", stringify!($err))
}
)+
})
}
}
$(
impl From<$err> for $name {
#[inline]
fn from(err: $err) -> $name {
$name::$err(err)
}
}
)+
);
}
err_gen!(BuildError {
AutoCommandBufferBuilderContextError,
OomError,
});
err_gen!(BeginRenderPassError {
AutoCommandBufferBuilderContextError,
SyncCommandBufferBuilderError,
});
err_gen!(CopyImageError {
AutoCommandBufferBuilderContextError,
CheckCopyImageError,
SyncCommandBufferBuilderError,
});
err_gen!(BlitImageError {
AutoCommandBufferBuilderContextError,
CheckBlitImageError,
SyncCommandBufferBuilderError,
});
err_gen!(ClearColorImageError {
AutoCommandBufferBuilderContextError,
CheckClearColorImageError,
SyncCommandBufferBuilderError,
});
err_gen!(ClearDepthStencilImageError {
AutoCommandBufferBuilderContextError,
CheckClearDepthStencilImageError,
SyncCommandBufferBuilderError,
});
err_gen!(CopyBufferError {
AutoCommandBufferBuilderContextError,
CheckCopyBufferError,
SyncCommandBufferBuilderError,
});
err_gen!(CopyBufferImageError {
AutoCommandBufferBuilderContextError,
CheckCopyBufferImageError,
SyncCommandBufferBuilderError,
});
err_gen!(CopyQueryPoolResultsError {
AutoCommandBufferBuilderContextError,
CheckCopyQueryPoolResultsError,
SyncCommandBufferBuilderError,
});
err_gen!(FillBufferError {
AutoCommandBufferBuilderContextError,
CheckFillBufferError,
});
err_gen!(DebugMarkerError {
AutoCommandBufferBuilderContextError,
CheckColorError,
});
err_gen!(DispatchError {
AutoCommandBufferBuilderContextError,
CheckPipelineError,
CheckPushConstantsValidityError,
CheckDescriptorSetsValidityError,
CheckDispatchError,
SyncCommandBufferBuilderError,
});
err_gen!(DispatchIndirectError {
AutoCommandBufferBuilderContextError,
CheckPipelineError,
CheckPushConstantsValidityError,
CheckDescriptorSetsValidityError,
CheckIndirectBufferError,
CheckDispatchError,
SyncCommandBufferBuilderError,
});
err_gen!(DrawError {
AutoCommandBufferBuilderContextError,
CheckPipelineError,
CheckDynamicStateValidityError,
CheckPushConstantsValidityError,
CheckDescriptorSetsValidityError,
CheckVertexBufferError,
SyncCommandBufferBuilderError,
});
err_gen!(DrawIndexedError {
AutoCommandBufferBuilderContextError,
CheckPipelineError,
CheckDynamicStateValidityError,
CheckPushConstantsValidityError,
CheckDescriptorSetsValidityError,
CheckVertexBufferError,
CheckIndexBufferError,
SyncCommandBufferBuilderError,
});
err_gen!(DrawIndirectError {
AutoCommandBufferBuilderContextError,
CheckPipelineError,
CheckDynamicStateValidityError,
CheckPushConstantsValidityError,
CheckDescriptorSetsValidityError,
CheckVertexBufferError,
CheckIndirectBufferError,
SyncCommandBufferBuilderError,
});
err_gen!(DrawIndexedIndirectError {
AutoCommandBufferBuilderContextError,
CheckPipelineError,
CheckDynamicStateValidityError,
CheckPushConstantsValidityError,
CheckDescriptorSetsValidityError,
CheckVertexBufferError,
CheckIndexBufferError,
CheckIndirectBufferError,
SyncCommandBufferBuilderError,
});
err_gen!(ExecuteCommandsError {
AutoCommandBufferBuilderContextError,
SyncCommandBufferBuilderError,
});
err_gen!(BeginQueryError {
AutoCommandBufferBuilderContextError,
CheckBeginQueryError,
});
err_gen!(EndQueryError {
AutoCommandBufferBuilderContextError,
CheckEndQueryError,
});
err_gen!(WriteTimestampError {
AutoCommandBufferBuilderContextError,
CheckWriteTimestampError,
});
err_gen!(ResetQueryPoolError {
AutoCommandBufferBuilderContextError,
CheckResetQueryPoolError,
});
err_gen!(UpdateBufferError {
AutoCommandBufferBuilderContextError,
CheckUpdateBufferError,
});
/// Errors that can happen when calling [`clear_attachments`](AutoCommandBufferBuilder::clear_attachments)
#[derive(Debug, Copy, Clone)]
pub enum ClearAttachmentsError {
/// AutoCommandBufferBuilderContextError
AutoCommandBufferBuilderContextError(AutoCommandBufferBuilderContextError),
/// CheckPipelineError
CheckPipelineError(CheckPipelineError),
/// The index of the color attachment is not present
InvalidColorAttachmentIndex(u32),
/// There is no depth/stencil attachment present
DepthStencilAttachmentNotPresent,
/// The clear rect cannot have extent of `0`
ZeroRectExtent,
/// The layer count cannot be `0`
ZeroLayerCount,
/// The clear rect region must be inside the render area of the render pass
RectOutOfBounds,
/// The clear rect's layers must be inside the layers ranges for all the attachments
LayersOutOfBounds,
/// If the render pass instance this is recorded in uses multiview,
/// then `ClearRect.base_array_layer` must be zero and `ClearRect.layer_count` must be one
InvalidMultiviewLayerRange,
}
impl error::Error for ClearAttachmentsError {}
impl fmt::Display for ClearAttachmentsError {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
ClearAttachmentsError::AutoCommandBufferBuilderContextError(e) => write!(fmt, "{}", e)?,
ClearAttachmentsError::CheckPipelineError(e) => write!(fmt, "{}", e)?,
ClearAttachmentsError::InvalidColorAttachmentIndex(index) => {
write!(fmt, "Color attachment {} is not present", index)?
}
ClearAttachmentsError::DepthStencilAttachmentNotPresent => {
write!(fmt, "There is no depth/stencil attachment present")?
}
ClearAttachmentsError::ZeroRectExtent => {
write!(fmt, "The clear rect cannot have extent of 0")?
}
ClearAttachmentsError::ZeroLayerCount => write!(fmt, "The layer count cannot be 0")?,
ClearAttachmentsError::RectOutOfBounds => write!(
fmt,
"The clear rect region must be inside the render area of the render pass"
)?,
ClearAttachmentsError::LayersOutOfBounds => write!(
fmt,
"The clear rect's layers must be inside the layers ranges for all the attachments"
)?,
ClearAttachmentsError::InvalidMultiviewLayerRange => write!(
fmt,
"If the render pass instance this is recorded in uses multiview, then `ClearRect.base_array_layer` must be zero and `ClearRect.layer_count` must be one"
)?,
}
Ok(())
}
}
impl From<AutoCommandBufferBuilderContextError> for ClearAttachmentsError {
#[inline]
fn from(err: AutoCommandBufferBuilderContextError) -> ClearAttachmentsError {
ClearAttachmentsError::AutoCommandBufferBuilderContextError(err)
}
}
impl From<CheckPipelineError> for ClearAttachmentsError {
#[inline]
fn from(err: CheckPipelineError) -> ClearAttachmentsError {
ClearAttachmentsError::CheckPipelineError(err)
}
}
#[derive(Debug, Copy, Clone)]
pub enum AutoCommandBufferBuilderContextError {
/// Operation forbidden inside of a render pass.
ForbiddenInsideRenderPass,
/// Operation forbidden outside of a render pass.
ForbiddenOutsideRenderPass,
/// Tried to use a secondary command buffer with a specified framebuffer that is
/// incompatible with the current framebuffer.
IncompatibleFramebuffer,
/// Tried to use a graphics pipeline or secondary command buffer whose render pass
/// is incompatible with the current render pass.
IncompatibleRenderPass,
/// The queue family doesn't allow this operation.
NotSupportedByQueueFamily,
/// Tried to end a render pass with subpasses remaining, or tried to go to next subpass with no
/// subpass remaining.
NumSubpassesMismatch {
/// Actual number of subpasses in the current render pass.
actual: u32,
/// Current subpass index before the failing command.
current: u32,
},
/// A query is active that conflicts with the current operation.
QueryIsActive,
/// This query was not active.
QueryNotActive,
/// A query is active that is not included in the `inheritance` of the secondary command buffer.
QueryNotInherited,
/// Tried to use a graphics pipeline or secondary command buffer whose subpass index
/// didn't match the current subpass index.
WrongSubpassIndex,
/// Tried to execute a secondary command buffer inside a subpass that only allows inline
/// commands, or a draw command in a subpass that only allows secondary command buffers.
WrongSubpassType,
}
impl error::Error for AutoCommandBufferBuilderContextError {}
impl fmt::Display for AutoCommandBufferBuilderContextError {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(
fmt,
"{}",
match *self {
AutoCommandBufferBuilderContextError::ForbiddenInsideRenderPass => {
"operation forbidden inside of a render pass"
}
AutoCommandBufferBuilderContextError::ForbiddenOutsideRenderPass => {
"operation forbidden outside of a render pass"
}
AutoCommandBufferBuilderContextError::IncompatibleFramebuffer => {
"tried to use a secondary command buffer with a specified framebuffer that is \
incompatible with the current framebuffer"
}
AutoCommandBufferBuilderContextError::IncompatibleRenderPass => {
"tried to use a graphics pipeline or secondary command buffer whose render pass \
is incompatible with the current render pass"
}
AutoCommandBufferBuilderContextError::NotSupportedByQueueFamily => {
"the queue family doesn't allow this operation"
}
AutoCommandBufferBuilderContextError::NumSubpassesMismatch { .. } => {
"tried to end a render pass with subpasses remaining, or tried to go to next \
subpass with no subpass remaining"
}
AutoCommandBufferBuilderContextError::QueryIsActive => {
"a query is active that conflicts with the current operation"
}
AutoCommandBufferBuilderContextError::QueryNotActive => {
"this query was not active"
}
AutoCommandBufferBuilderContextError::QueryNotInherited => {
"a query is active that is not included in the inheritance of the secondary command buffer"
}
AutoCommandBufferBuilderContextError::WrongSubpassIndex => {
"tried to use a graphics pipeline whose subpass index didn't match the current \
subpass index"
}
AutoCommandBufferBuilderContextError::WrongSubpassType => {
"tried to execute a secondary command buffer inside a subpass that only allows \
inline commands, or a draw command in a subpass that only allows secondary \
command buffers"
}
}
)
}
}
#[cfg(test)]
mod tests {
use super::CopyBufferError;
use crate::buffer::BufferUsage;
use crate::buffer::CpuAccessibleBuffer;
use crate::command_buffer::synced::SyncCommandBufferBuilderError;
use crate::command_buffer::validity::CheckCopyBufferError;
use crate::command_buffer::AutoCommandBufferBuilder;
use crate::command_buffer::CommandBufferExecError;
use crate::command_buffer::CommandBufferUsage;
use crate::command_buffer::ExecuteCommandsError;
use crate::command_buffer::PrimaryCommandBuffer;
use crate::device::physical::PhysicalDevice;
use crate::device::{Device, DeviceCreateInfo, QueueCreateInfo};
use crate::sync::GpuFuture;
use std::sync::Arc;
#[test]
fn copy_buffer_dimensions() {
let instance = instance!();
let phys = match PhysicalDevice::enumerate(&instance).next() {
Some(p) => p,
None => return,
};
let queue_family = match phys.queue_families().next() {
Some(q) => q,
None => return,
};
let (device, mut queues) = Device::new(
phys,
DeviceCreateInfo {
queue_create_infos: vec![QueueCreateInfo::family(queue_family)],
..Default::default()
},
)
.unwrap();
let queue = queues.next().unwrap();
let source = CpuAccessibleBuffer::from_iter(
device.clone(),
BufferUsage::all(),
true,
[1_u32, 2].iter().copied(),
)
.unwrap();
let destination = CpuAccessibleBuffer::from_iter(
device.clone(),
BufferUsage::all(),
true,
[0_u32, 10, 20, 3, 4].iter().copied(),
)
.unwrap();
let mut cbb = AutoCommandBufferBuilder::primary(
device.clone(),
queue.family(),
CommandBufferUsage::OneTimeSubmit,
)
.unwrap();
cbb.copy_buffer_dimensions(source.clone(), 0, destination.clone(), 1, 2)
.unwrap();
let cb = cbb.build().unwrap();
let future = cb
.execute(queue.clone())
.unwrap()
.then_signal_fence_and_flush()
.unwrap();
future.wait(None).unwrap();
let result = destination.read().unwrap();
assert_eq!(*result, [0_u32, 1, 2, 3, 4]);
}
#[test]
fn secondary_nonconcurrent_conflict() {
let (device, queue) = gfx_dev_and_queue!();
// Make a secondary CB that doesn't support simultaneous use.
let builder = AutoCommandBufferBuilder::secondary_compute(
device.clone(),
queue.family(),
CommandBufferUsage::MultipleSubmit,
)
.unwrap();
let secondary = Arc::new(builder.build().unwrap());
{
let mut builder = AutoCommandBufferBuilder::primary(
device.clone(),
queue.family(),
CommandBufferUsage::SimultaneousUse,
)
.unwrap();
// Add the secondary a first time
builder.execute_commands(secondary.clone()).unwrap();
// Recording the same non-concurrent secondary command buffer twice into the same
// primary is an error.
assert!(matches!(
builder.execute_commands(secondary.clone()),
Err(ExecuteCommandsError::SyncCommandBufferBuilderError(
SyncCommandBufferBuilderError::ExecError(
CommandBufferExecError::ExclusiveAlreadyInUse
)
))
));
}
{
let mut builder = AutoCommandBufferBuilder::primary(
device.clone(),
queue.family(),
CommandBufferUsage::SimultaneousUse,
)
.unwrap();
builder.execute_commands(secondary.clone()).unwrap();
let cb1 = builder.build().unwrap();
let mut builder = AutoCommandBufferBuilder::primary(
device.clone(),
queue.family(),
CommandBufferUsage::SimultaneousUse,
)
.unwrap();
// Recording the same non-concurrent secondary command buffer into multiple
// primaries is an error.
assert!(matches!(
builder.execute_commands(secondary.clone()),
Err(ExecuteCommandsError::SyncCommandBufferBuilderError(
SyncCommandBufferBuilderError::ExecError(
CommandBufferExecError::ExclusiveAlreadyInUse
)
))
));
std::mem::drop(cb1);
// Now that the first cb is dropped, we should be able to record.
builder.execute_commands(secondary.clone()).unwrap();
}
}
#[test]
fn buffer_self_copy_overlapping() {
let (device, queue) = gfx_dev_and_queue!();
let source = CpuAccessibleBuffer::from_iter(
device.clone(),
BufferUsage::all(),
true,
[0_u32, 1, 2, 3].iter().copied(),
)
.unwrap();
let mut builder = AutoCommandBufferBuilder::primary(
device.clone(),
queue.family(),
CommandBufferUsage::OneTimeSubmit,
)
.unwrap();
builder
.copy_buffer_dimensions(source.clone(), 0, source.clone(), 2, 2)
.unwrap();
let cb = builder.build().unwrap();
let future = cb
.execute(queue.clone())
.unwrap()
.then_signal_fence_and_flush()
.unwrap();
future.wait(None).unwrap();
let result = source.read().unwrap();
assert_eq!(*result, [0_u32, 1, 0, 1]);
}
#[test]
fn buffer_self_copy_not_overlapping() {
let (device, queue) = gfx_dev_and_queue!();
let source = CpuAccessibleBuffer::from_iter(
device.clone(),
BufferUsage::all(),
true,
[0_u32, 1, 2, 3].iter().copied(),
)
.unwrap();
let mut builder = AutoCommandBufferBuilder::primary(
device.clone(),
queue.family(),
CommandBufferUsage::OneTimeSubmit,
)
.unwrap();
assert!(matches!(
builder.copy_buffer_dimensions(source.clone(), 0, source.clone(), 1, 2),
Err(CopyBufferError::CheckCopyBufferError(
CheckCopyBufferError::OverlappingRanges
))
));
}
}
| 37.541515 | 169 | 0.597185 |
1c0ac721777b424b251f9ff8235f7fcb31fd43b6 | 2,197 | #![feature(futures_api, await_macro, pin, arbitrary_self_types)]
#![doc(html_root_url = "https://docs.rs/tokio-async-await/0.1.2")]
#![deny(missing_docs, missing_debug_implementations)]
#![cfg_attr(test, deny(warnings))]
//! A preview of Tokio w/ `async` / `await` support.
extern crate futures;
extern crate futures_core;
extern crate futures_util;
// Re-export all of Tokio
pub use tokio::{
// Modules
clock,
codec,
executor,
fs,
io,
net,
reactor,
runtime,
timer,
util,
// Functions
run,
spawn,
};
pub mod sync {
//! Asynchronous aware synchronization
pub use tokio_channel::{
mpsc,
oneshot,
};
}
pub mod async_await;
pub mod prelude {
//! A "prelude" for users of the `tokio` crate.
//!
//! This prelude is similar to the standard library's prelude in that you'll
//! almost always want to import its entire contents, but unlike the standard
//! library's prelude you'll have to do so manually:
//!
//! ```
//! use tokio::prelude::*;
//! ```
//!
//! The prelude may grow over time as additional items see ubiquitous use.
pub use tokio::prelude::*;
#[doc(inline)]
pub use crate::async_await::{
io::{
AsyncReadExt,
AsyncWriteExt,
},
sink::{
SinkExt,
},
stream::{
StreamExt,
},
};
}
use futures_core::{
Future as Future03,
};
// Rename the `await` macro in `std`
#[doc(hidden)]
#[macro_export]
pub use std::await as std_await;
/// Like `tokio::run`, but takes an `async` block
pub fn run_async<F>(future: F)
where F: Future03<Output = ()> + Send + 'static,
{
use futures_util::future::FutureExt;
use crate::async_await::compat::backward;
let future = future.map(|_| Ok(()));
run(backward::Compat::new(future))
}
/// Like `tokio::spawn`, but takes an `async` block
pub fn spawn_async<F>(future: F)
where F: Future03<Output = ()> + Send + 'static,
{
use futures_util::future::FutureExt;
use crate::async_await::compat::backward;
let future = future.map(|_| Ok(()));
spawn(backward::Compat::new(future));
}
| 21.330097 | 81 | 0.603095 |
2115d1275b141020f62537bbff2c9d8104269035 | 7,213 | use std::io;
use winapi::shared::minwindef::{WORD};
use winapi::um::wincon::{
self,
FOREGROUND_BLUE as FG_BLUE,
FOREGROUND_GREEN as FG_GREEN,
FOREGROUND_RED as FG_RED,
FOREGROUND_INTENSITY as FG_INTENSITY,
};
use winapi_util as winutil;
const FG_CYAN: WORD = FG_BLUE | FG_GREEN;
const FG_MAGENTA: WORD = FG_BLUE | FG_RED;
const FG_YELLOW: WORD = FG_GREEN | FG_RED;
const FG_WHITE: WORD = FG_BLUE | FG_GREEN | FG_RED;
/// A Windows console.
///
/// This represents a very limited set of functionality available to a Windows
/// console. In particular, it can only change text attributes such as color
/// and intensity.
///
/// There is no way to "write" to this console. Simply write to
/// stdout or stderr instead, while interleaving instructions to the console
/// to change text attributes.
///
/// A common pitfall when using a console is to forget to flush writes to
/// stdout before setting new text attributes.
#[derive(Debug)]
pub struct Console {
kind: HandleKind,
start_attr: TextAttributes,
cur_attr: TextAttributes,
}
#[derive(Clone, Copy, Debug)]
enum HandleKind {
Stdout,
Stderr,
}
impl HandleKind {
fn handle(&self) -> winutil::HandleRef {
match *self {
HandleKind::Stdout => winutil::HandleRef::stdout(),
HandleKind::Stderr => winutil::HandleRef::stderr(),
}
}
}
impl Console {
/// Get a console for a standard I/O stream.
fn create_for_stream(kind: HandleKind) -> io::Result<Console> {
let h = kind.handle();
let info = winutil::console::screen_buffer_info(&h)?;
let attr = TextAttributes::from_word(info.attributes());
Ok(Console {
kind: kind,
start_attr: attr,
cur_attr: attr,
})
}
/// Create a new Console to stdout.
///
/// If there was a problem creating the console, then an error is returned.
pub fn stdout() -> io::Result<Console> {
Self::create_for_stream(HandleKind::Stdout)
}
/// Create a new Console to stderr.
///
/// If there was a problem creating the console, then an error is returned.
pub fn stderr() -> io::Result<Console> {
Self::create_for_stream(HandleKind::Stderr)
}
/// Applies the current text attributes.
fn set(&mut self) -> io::Result<()> {
winutil::console::set_text_attributes(
self.kind.handle(),
self.cur_attr.to_word(),
)
}
/// Apply the given intensity and color attributes to the console
/// foreground.
///
/// If there was a problem setting attributes on the console, then an error
/// is returned.
pub fn fg(&mut self, intense: Intense, color: Color) -> io::Result<()> {
self.cur_attr.fg_color = color;
self.cur_attr.fg_intense = intense;
self.set()
}
/// Apply the given intensity and color attributes to the console
/// background.
///
/// If there was a problem setting attributes on the console, then an error
/// is returned.
pub fn bg(&mut self, intense: Intense, color: Color) -> io::Result<()> {
self.cur_attr.bg_color = color;
self.cur_attr.bg_intense = intense;
self.set()
}
/// Reset the console text attributes to their original settings.
///
/// The original settings correspond to the text attributes on the console
/// when this `Console` value was created.
///
/// If there was a problem setting attributes on the console, then an error
/// is returned.
pub fn reset(&mut self) -> io::Result<()> {
self.cur_attr = self.start_attr;
self.set()
}
/// Toggle virtual terminal processing.
///
/// This method attempts to toggle virtual terminal processing for this
/// console. If there was a problem toggling it, then an error returned.
/// On success, the caller may assume that toggling it was successful.
///
/// When virtual terminal processing is enabled, characters emitted to the
/// console are parsed for VT100 and similar control character sequences
/// that control color and other similar operations.
pub fn set_virtual_terminal_processing(
&mut self,
yes: bool,
) -> io::Result<()> {
let vt = wincon::ENABLE_VIRTUAL_TERMINAL_PROCESSING;
let handle = self.kind.handle();
let old_mode = winutil::console::mode(&handle)?;
let new_mode =
if yes {
old_mode | vt
} else {
old_mode & !vt
};
if old_mode == new_mode {
return Ok(());
}
winutil::console::set_mode(&handle, new_mode)
}
}
/// A representation of text attributes for the Windows console.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
struct TextAttributes {
fg_color: Color,
fg_intense: Intense,
bg_color: Color,
bg_intense: Intense,
}
impl TextAttributes {
fn to_word(&self) -> WORD {
let mut w = 0;
w |= self.fg_color.to_fg();
w |= self.fg_intense.to_fg();
w |= self.bg_color.to_bg();
w |= self.bg_intense.to_bg();
w
}
fn from_word(word: WORD) -> TextAttributes {
TextAttributes {
fg_color: Color::from_fg(word),
fg_intense: Intense::from_fg(word),
bg_color: Color::from_bg(word),
bg_intense: Intense::from_bg(word),
}
}
}
/// Whether to use intense colors or not.
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Intense {
Yes,
No,
}
impl Intense {
fn to_bg(&self) -> WORD {
self.to_fg() << 4
}
fn from_bg(word: WORD) -> Intense {
Intense::from_fg(word >> 4)
}
fn to_fg(&self) -> WORD {
match *self {
Intense::No => 0,
Intense::Yes => FG_INTENSITY,
}
}
fn from_fg(word: WORD) -> Intense {
if word & FG_INTENSITY > 0 {
Intense::Yes
} else {
Intense::No
}
}
}
/// The set of available colors for use with a Windows console.
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Color {
Black,
Blue,
Green,
Red,
Cyan,
Magenta,
Yellow,
White,
}
impl Color {
fn to_bg(&self) -> WORD {
self.to_fg() << 4
}
fn from_bg(word: WORD) -> Color {
Color::from_fg(word >> 4)
}
fn to_fg(&self) -> WORD {
match *self {
Color::Black => 0,
Color::Blue => FG_BLUE,
Color::Green => FG_GREEN,
Color::Red => FG_RED,
Color::Cyan => FG_CYAN,
Color::Magenta => FG_MAGENTA,
Color::Yellow => FG_YELLOW,
Color::White => FG_WHITE,
}
}
fn from_fg(word: WORD) -> Color {
match word & 0b111 {
FG_BLUE => Color::Blue,
FG_GREEN => Color::Green,
FG_RED => Color::Red,
FG_CYAN => Color::Cyan,
FG_MAGENTA => Color::Magenta,
FG_YELLOW => Color::Yellow,
FG_WHITE => Color::White,
_ => Color::Black,
}
}
}
| 27.530534 | 79 | 0.586441 |
0e8a037d56af6989cdfac7fa49d51dd87e7249d0 | 1,612 | use crate::*;
#[derive(Default)]
pub struct RoundedCorners<'a> {
static_css_props: StaticCSSProps<'a>,
}
impl<'a> RoundedCorners<'a> {
pub fn all(radius: u32) -> Self {
Self::default()
.top_left(radius)
.top_right(radius)
.bottom_left(radius)
.bottom_right(radius)
}
pub fn top(self, radius: u32) -> Self {
self.top_left(radius).top_right(radius)
}
pub fn bottom(self, radius: u32) -> Self {
self.bottom_left(radius).bottom_right(radius)
}
pub fn left(self, radius: u32) -> Self {
self.top_left(radius).bottom_left(radius)
}
pub fn right(self, radius: u32) -> Self {
self.top_right(radius).bottom_right(radius)
}
pub fn top_left(mut self, radius: u32) -> Self {
self.static_css_props
.insert("border-top-left-radius", px(radius));
self
}
pub fn top_right(mut self, radius: u32) -> Self {
self.static_css_props
.insert("border-top-right-radius", px(radius));
self
}
pub fn bottom_left(mut self, radius: u32) -> Self {
self.static_css_props
.insert("border-bottom-left-radius", px(radius));
self
}
pub fn bottom_right(mut self, radius: u32) -> Self {
self.static_css_props
.insert("border-bottom-right-radius", px(radius));
self
}
}
impl<'a> Style<'a> for RoundedCorners<'a> {
fn into_css_props_container(self) -> CssPropsContainer<'a> {
CssPropsContainer::default().static_css_props(self.static_css_props)
}
}
| 25.587302 | 76 | 0.592432 |
cc833252453b363af4347ce07de6fd97857068ad | 980 | use error::*;
use silk_proto::message::MessageType;
use silk_proto::*;
pub trait Handler: Sync + Send + 'static {
fn handler(&self, message: Message) -> Result<()>;
}
#[derive(Clone)]
pub struct EventHandler {
support: Option<i32>,
}
impl EventHandler {
pub fn new() -> Self {
EventHandler { support: None }
}
pub fn set_support(&mut self, support: i32) {
self.support = Some(support);
}
}
impl Handler for EventHandler {
fn handler(&self, msg: Message) -> Result<()> {
debug!("received consensus message: {:?}", msg);
match msg.message_type {
t if t == MessageType::ConsensusNotifyBlockCommit as i32 => {
let _block = utils::proto::unmarshal::<Block>(&msg.content)?;
Ok(())
}
_ => {
let describe = format!("unhandled massage type {:?}", msg.message_type);
Err(from_str(&describe))
}
}
}
}
| 24.5 | 88 | 0.54898 |
f8d95a797efc28a607dea27d3be0b48f6f390744 | 10,317 | use crate::{common::make_plaintext_response, HttpError};
use crate::{MAX_REQUEST_RECEIVE_DURATION, MAX_REQUEST_SIZE_BYTES};
use futures_util::StreamExt;
use hyper::{body::HttpBody, Body, Response, StatusCode};
use std::future::Future;
use std::pin::Pin;
use std::task::{Context, Poll};
use std::time::Duration;
use tokio::time::timeout;
use tower::{Layer, Service};
pub(crate) struct BodyReceiverLayer {
max_request_receive_duration: Duration,
max_request_body_size_bytes: usize,
}
impl BodyReceiverLayer {
pub(crate) fn new(
max_request_receive_duration: Duration,
max_request_body_size_bytes: usize,
) -> Self {
Self {
max_request_receive_duration,
max_request_body_size_bytes,
}
}
}
impl Default for BodyReceiverLayer {
fn default() -> Self {
BodyReceiverLayer::new(MAX_REQUEST_RECEIVE_DURATION, MAX_REQUEST_SIZE_BYTES)
}
}
impl<S> Layer<S> for BodyReceiverLayer {
type Service = BodyReceiverService<S>;
fn layer(&self, inner: S) -> Self::Service {
BodyReceiverService {
max_request_receive_duration: self.max_request_receive_duration,
max_request_body_size_bytes: self.max_request_body_size_bytes,
inner,
}
}
}
#[derive(Clone)]
pub(crate) struct BodyReceiverService<S> {
max_request_receive_duration: Duration,
max_request_body_size_bytes: usize,
inner: S,
}
async fn receive_body_without_timeout(
mut body: Body,
max_request_body_size_bytes: usize,
) -> Result<Vec<u8>, HttpError> {
let body_size_hint = body.size_hint().lower() as usize;
if body_size_hint > max_request_body_size_bytes {
return Err(HttpError {
status: StatusCode::PAYLOAD_TOO_LARGE,
message: format!(
"The request body is bigger than {} bytes.",
max_request_body_size_bytes
),
});
}
let mut received_body = Vec::<u8>::with_capacity(body_size_hint);
while let Some(chunk) = body.next().await {
match chunk {
Err(err) => {
return Err(HttpError {
status: StatusCode::INTERNAL_SERVER_ERROR,
message: format!("Unexpected error while reading request: {}", err),
});
}
Ok(bytes) => {
if received_body.len() + bytes.len() > max_request_body_size_bytes {
return Err(HttpError {
status: StatusCode::PAYLOAD_TOO_LARGE,
message: format!(
"The request body is bigger than {} bytes.",
max_request_body_size_bytes
),
});
}
received_body.append(&mut bytes.to_vec());
}
}
}
Ok(received_body)
}
async fn receive_body(
body: Body,
max_request_receive_duration: Duration,
max_request_body_size_bytes: usize,
) -> Result<Vec<u8>, HttpError> {
match timeout(
max_request_receive_duration,
receive_body_without_timeout(body, max_request_body_size_bytes),
)
.await
{
Ok(res) => res,
Err(_err) => Err(HttpError {
status: StatusCode::REQUEST_TIMEOUT,
message: format!(
"The request body was not received within {:?} seconds.",
max_request_receive_duration
),
}),
}
}
impl<S, E> Service<Body> for BodyReceiverService<S>
where
S: Service<
Vec<u8>,
Response = Response<Body>,
Error = E,
Future = Pin<Box<dyn Future<Output = Result<Response<Body>, E>> + Send>>,
> + Clone
+ Send
+ 'static,
{
type Response = S::Response;
type Error = S::Error;
type Future = S::Future;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, body: Body) -> Self::Future {
let inner = self.inner.clone();
// In case the inner service has state that's driven to readiness and
// not tracked by clones (such as `Buffer`), pass the version we have
// already called `poll_ready` on into the future, and leave its clone
// behind.
//
// The types implementing the Service trait are not necessary thread-safe.
// So the unless the caller is sure that the service implementation is
// thread-safe we must make sure 'poll_ready' is always called before 'call'
// on the same object. Hence if 'poll_ready' is called and not tracked by
// the 'Clone' implementation the following sequence of events may panic.
//
// s1.call_ready()
// s2 = s1.clone()
// s2.call()
let mut inner = std::mem::replace(&mut self.inner, inner);
let max_request_receive_duration = self.max_request_receive_duration;
let max_request_body_size_bytes = self.max_request_body_size_bytes;
Box::pin(async move {
match receive_body(
body,
max_request_receive_duration,
max_request_body_size_bytes,
)
.await
{
Err(HttpError { status, message }) => Ok(make_plaintext_response(status, message)),
Ok(body) => inner.call(body).await,
}
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::HttpError;
use rand::{distributions::Alphanumeric, thread_rng, Rng};
#[tokio::test]
async fn test_succesfully_parse_small_body() {
let (mut sender, body) = Body::channel();
assert!(sender
.send_data(bytes::Bytes::from("hello world"))
.await
.is_ok());
// We need to drop the channel so the service will know there aren't any new
// chunks. If we remove this line the test should run forever.
std::mem::drop(sender);
assert_eq!(
receive_body(body, MAX_REQUEST_RECEIVE_DURATION, MAX_REQUEST_SIZE_BYTES)
.await
.ok(),
Some(Vec::<u8>::from("hello world"))
);
}
#[tokio::test]
async fn test_stop_and_return_error_when_parsing_big_body() {
let (mut sender, body) = Body::channel();
let chunk_size: usize = 1024;
let rand_string: String = thread_rng()
.sample_iter(&Alphanumeric)
.take(chunk_size)
.map(char::from)
.collect();
let jh = tokio::task::spawn(async move {
receive_body(body, MAX_REQUEST_RECEIVE_DURATION, MAX_REQUEST_SIZE_BYTES).await
});
for _i in 0..(MAX_REQUEST_SIZE_BYTES / chunk_size) {
assert!(sender
.send_data(bytes::Bytes::from(rand_string.clone()))
.await
.is_ok());
}
// We are at the limit, so sending an extra byte will succeed and cause the
// service to yield.
assert!(sender.send_data(bytes::Bytes::from("a")).await.is_ok());
let response = jh
.await
.unwrap()
.expect_err("The service must have returned an Err.");
assert_eq!(
response,
HttpError {
status: StatusCode::PAYLOAD_TOO_LARGE,
message: format!(
"The request body is bigger than {} bytes.",
MAX_REQUEST_SIZE_BYTES
)
}
);
// Check we can't send more data. The other end of the channel - the body - is
// dropped.
assert!(sender
.send_data(bytes::Bytes::from(rand_string.clone()))
.await
.is_err());
}
#[tokio::test]
async fn test_time_out_while_waiting_for_a_single_chunk() {
let (mut sender, body) = Body::channel();
let time_to_wait = Duration::from_secs(5);
let jh = tokio::task::spawn(async move {
receive_body(body, time_to_wait, MAX_REQUEST_SIZE_BYTES).await
});
assert!(sender
.send_data(bytes::Bytes::from("hello world"))
.await
.is_ok());
// If we drop the sender here the test will fail because parse_body has all the
// chunks so it won't timeout.
tokio::time::sleep(time_to_wait + Duration::from_secs(1)).await;
let response = jh
.await
.unwrap()
.expect_err("parse_body must have returned an Err.");
assert_eq!(
response,
HttpError {
status: StatusCode::REQUEST_TIMEOUT,
message: format!(
"The request body was not received within {:?} seconds.",
time_to_wait
)
}
);
}
#[tokio::test]
async fn test_time_out_while_waiting_for_many_chunks() {
let (mut sender, body) = Body::channel();
let time_to_wait = Duration::from_secs(5);
let jh = tokio::task::spawn(async move {
receive_body(body, time_to_wait, MAX_REQUEST_SIZE_BYTES).await
});
let num_chunks = 10;
let mut chunks_sent = 0;
for _i in 0..num_chunks {
if sender
.send_data(bytes::Bytes::from("hello world"))
.await
.is_ok()
{
chunks_sent += 1;
}
tokio::time::sleep(Duration::from_secs(1)).await;
}
std::mem::drop(sender);
// We expect the sender to fail because the receiver will be closed once we are
// pass the timeout.
assert!(chunks_sent < num_chunks);
assert!(chunks_sent > 1);
let response = jh
.await
.unwrap()
.expect_err("parse_body must have returned an Err.");
assert_eq!(
response,
HttpError {
status: StatusCode::REQUEST_TIMEOUT,
message: format!(
"The request body was not received within {:?} seconds.",
time_to_wait
)
}
);
}
}
| 32.961661 | 99 | 0.560725 |
fbe8cb85f0709e695436326290bb5e2fdbeeb7fa | 55,986 | //! The `pubsub` module implements a threaded subscription service on client RPC request
use crate::commitment::BlockCommitmentCache;
use core::hash::Hash;
use jsonrpc_core::futures::Future;
use jsonrpc_pubsub::{
typed::{Sink, Subscriber},
SubscriptionId,
};
use serde::Serialize;
use solana_account_decoder::{UiAccount, UiAccountEncoding};
use solana_client::rpc_response::{
Response, RpcKeyedAccount, RpcResponseContext, RpcSignatureResult,
};
use solana_ledger::{bank_forks::BankForks, blockstore::Blockstore};
use solana_runtime::bank::Bank;
use solana_sdk::{
account::Account,
clock::{Slot, UnixTimestamp},
commitment_config::{CommitmentConfig, CommitmentLevel},
pubkey::Pubkey,
signature::Signature,
transaction,
};
use solana_vote_program::vote_state::Vote;
use std::sync::{
atomic::{AtomicBool, Ordering},
mpsc::{Receiver, RecvTimeoutError, SendError, Sender},
};
use std::thread::{Builder, JoinHandle};
use std::time::Duration;
use std::{
collections::{HashMap, HashSet},
iter,
sync::{Arc, Mutex, RwLock},
};
use tokio::runtime::{Builder as RuntimeBuilder, Runtime, TaskExecutor};
const RECEIVE_DELAY_MILLIS: u64 = 100;
#[derive(Serialize, Deserialize, Clone, Copy, Debug)]
pub struct SlotInfo {
pub slot: Slot,
pub parent: Slot,
pub root: Slot,
}
#[derive(Default)]
pub struct CacheSlotInfo {
pub current_slot: Slot,
pub node_root: Slot,
pub largest_confirmed_root: Slot,
pub highest_confirmed_slot: Slot,
}
// A more human-friendly version of Vote, with the bank state signature base58 encoded.
#[derive(Serialize, Deserialize, Debug)]
pub struct RpcVote {
pub slots: Vec<Slot>,
pub hash: String,
pub timestamp: Option<UnixTimestamp>,
}
enum NotificationEntry {
Slot(SlotInfo),
Vote(Vote),
Root(Slot),
Frozen(Slot),
Bank(CacheSlotInfo),
Gossip(Slot),
}
impl std::fmt::Debug for NotificationEntry {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
NotificationEntry::Root(root) => write!(f, "Root({})", root),
NotificationEntry::Frozen(slot) => write!(f, "Frozen({})", slot),
NotificationEntry::Vote(vote) => write!(f, "Vote({:?})", vote),
NotificationEntry::Slot(slot_info) => write!(f, "Slot({:?})", slot_info),
NotificationEntry::Bank(cache_slot_info) => write!(
f,
"Bank({{current_slot: {:?}}})",
cache_slot_info.current_slot
),
NotificationEntry::Gossip(slot) => write!(f, "Gossip({:?})", slot),
}
}
}
struct SubscriptionData<S> {
sink: Sink<S>,
commitment: CommitmentConfig,
last_notified_slot: RwLock<Slot>,
}
type RpcAccountSubscriptions =
RwLock<HashMap<Pubkey, HashMap<SubscriptionId, SubscriptionData<Response<UiAccount>>>>>;
type RpcProgramSubscriptions =
RwLock<HashMap<Pubkey, HashMap<SubscriptionId, SubscriptionData<Response<RpcKeyedAccount>>>>>;
type RpcSignatureSubscriptions = RwLock<
HashMap<Signature, HashMap<SubscriptionId, SubscriptionData<Response<RpcSignatureResult>>>>,
>;
type RpcSlotSubscriptions = RwLock<HashMap<SubscriptionId, Sink<SlotInfo>>>;
type RpcVoteSubscriptions = RwLock<HashMap<SubscriptionId, Sink<RpcVote>>>;
type RpcRootSubscriptions = RwLock<HashMap<SubscriptionId, Sink<Slot>>>;
fn add_subscription<K, S>(
subscriptions: &mut HashMap<K, HashMap<SubscriptionId, SubscriptionData<S>>>,
hashmap_key: K,
commitment: Option<CommitmentConfig>,
sub_id: SubscriptionId,
subscriber: Subscriber<S>,
last_notified_slot: Slot,
) where
K: Eq + Hash,
S: Clone,
{
let sink = subscriber.assign_id(sub_id.clone()).unwrap();
let commitment = commitment.unwrap_or_else(CommitmentConfig::single);
let subscription_data = SubscriptionData {
sink,
commitment,
last_notified_slot: RwLock::new(last_notified_slot),
};
if let Some(current_hashmap) = subscriptions.get_mut(&hashmap_key) {
current_hashmap.insert(sub_id, subscription_data);
return;
}
let mut hashmap = HashMap::new();
hashmap.insert(sub_id, subscription_data);
subscriptions.insert(hashmap_key, hashmap);
}
fn remove_subscription<K, S>(
subscriptions: &mut HashMap<K, HashMap<SubscriptionId, SubscriptionData<S>>>,
sub_id: &SubscriptionId,
) -> bool
where
K: Eq + Hash,
S: Clone,
{
let mut found = false;
subscriptions.retain(|_, v| {
v.retain(|k, _| {
let retain = k != sub_id;
if !retain {
found = true;
}
retain
});
!v.is_empty()
});
found
}
#[allow(clippy::type_complexity)]
fn check_commitment_and_notify<K, S, B, F, X>(
subscriptions: &HashMap<K, HashMap<SubscriptionId, SubscriptionData<Response<S>>>>,
hashmap_key: &K,
bank_forks: &Arc<RwLock<BankForks>>,
cache_slot_info: &CacheSlotInfo,
bank_method: B,
filter_results: F,
notifier: &RpcNotifier,
) -> HashSet<SubscriptionId>
where
K: Eq + Hash + Clone + Copy,
S: Clone + Serialize,
B: Fn(&Bank, &K) -> X,
F: Fn(X, Slot) -> (Box<dyn Iterator<Item = S>>, Slot),
X: Clone + Serialize + Default,
{
let mut notified_set: HashSet<SubscriptionId> = HashSet::new();
if let Some(hashmap) = subscriptions.get(hashmap_key) {
for (
sub_id,
SubscriptionData {
sink,
commitment,
last_notified_slot,
},
) in hashmap.iter()
{
let slot = match commitment.commitment {
CommitmentLevel::Max => cache_slot_info.largest_confirmed_root,
CommitmentLevel::Recent => cache_slot_info.current_slot,
CommitmentLevel::Root => cache_slot_info.node_root,
CommitmentLevel::Single | CommitmentLevel::SingleGossip => {
cache_slot_info.highest_confirmed_slot
}
};
let results = {
let bank_forks = bank_forks.read().unwrap();
bank_forks
.get(slot)
.map(|desired_bank| bank_method(&desired_bank, hashmap_key))
.unwrap_or_default()
};
let mut w_last_notified_slot = last_notified_slot.write().unwrap();
let (filter_results, result_slot) = filter_results(results, *w_last_notified_slot);
for result in filter_results {
notifier.notify(
Response {
context: RpcResponseContext { slot },
value: result,
},
sink,
);
notified_set.insert(sub_id.clone());
*w_last_notified_slot = result_slot;
}
}
}
notified_set
}
struct RpcNotifier(TaskExecutor);
impl RpcNotifier {
fn notify<T>(&self, value: T, sink: &Sink<T>)
where
T: serde::Serialize,
{
self.0
.spawn(sink.notify(Ok(value)).map(|_| ()).map_err(|_| ()));
}
}
fn filter_account_result(
result: Option<(Account, Slot)>,
last_notified_slot: Slot,
) -> (Box<dyn Iterator<Item = UiAccount>>, Slot) {
if let Some((account, fork)) = result {
// If fork < last_notified_slot this means that we last notified for a fork
// and should notify that the account state has been reverted.
if fork != last_notified_slot {
return (
Box::new(iter::once(UiAccount::encode(
account,
UiAccountEncoding::Binary,
))),
fork,
);
}
}
(Box::new(iter::empty()), last_notified_slot)
}
fn filter_signature_result(
result: Option<transaction::Result<()>>,
last_notified_slot: Slot,
) -> (Box<dyn Iterator<Item = RpcSignatureResult>>, Slot) {
(
Box::new(
result
.into_iter()
.map(|result| RpcSignatureResult { err: result.err() }),
),
last_notified_slot,
)
}
fn filter_program_results(
accounts: Vec<(Pubkey, Account)>,
last_notified_slot: Slot,
) -> (Box<dyn Iterator<Item = RpcKeyedAccount>>, Slot) {
(
Box::new(
accounts
.into_iter()
.map(|(pubkey, account)| RpcKeyedAccount {
pubkey: pubkey.to_string(),
account: UiAccount::encode(account, UiAccountEncoding::Binary),
}),
),
last_notified_slot,
)
}
#[derive(Clone)]
struct Subscriptions {
account_subscriptions: Arc<RpcAccountSubscriptions>,
program_subscriptions: Arc<RpcProgramSubscriptions>,
signature_subscriptions: Arc<RpcSignatureSubscriptions>,
gossip_account_subscriptions: Arc<RpcAccountSubscriptions>,
gossip_program_subscriptions: Arc<RpcProgramSubscriptions>,
gossip_signature_subscriptions: Arc<RpcSignatureSubscriptions>,
slot_subscriptions: Arc<RpcSlotSubscriptions>,
vote_subscriptions: Arc<RpcVoteSubscriptions>,
root_subscriptions: Arc<RpcRootSubscriptions>,
}
pub struct RpcSubscriptions {
subscriptions: Subscriptions,
notification_sender: Arc<Mutex<Sender<NotificationEntry>>>,
t_cleanup: Option<JoinHandle<()>>,
notifier_runtime: Option<Runtime>,
bank_forks: Arc<RwLock<BankForks>>,
block_commitment_cache: Arc<RwLock<BlockCommitmentCache>>,
last_checked_slots: Arc<RwLock<HashMap<CommitmentLevel, Slot>>>,
exit: Arc<AtomicBool>,
}
impl Drop for RpcSubscriptions {
fn drop(&mut self) {
self.shutdown().unwrap_or_else(|err| {
warn!("RPC Notification - shutdown error: {:?}", err);
});
}
}
impl RpcSubscriptions {
pub fn new(
exit: &Arc<AtomicBool>,
bank_forks: Arc<RwLock<BankForks>>,
block_commitment_cache: Arc<RwLock<BlockCommitmentCache>>,
) -> Self {
let (notification_sender, notification_receiver): (
Sender<NotificationEntry>,
Receiver<NotificationEntry>,
) = std::sync::mpsc::channel();
let account_subscriptions = Arc::new(RpcAccountSubscriptions::default());
let program_subscriptions = Arc::new(RpcProgramSubscriptions::default());
let signature_subscriptions = Arc::new(RpcSignatureSubscriptions::default());
let gossip_account_subscriptions = Arc::new(RpcAccountSubscriptions::default());
let gossip_program_subscriptions = Arc::new(RpcProgramSubscriptions::default());
let gossip_signature_subscriptions = Arc::new(RpcSignatureSubscriptions::default());
let slot_subscriptions = Arc::new(RpcSlotSubscriptions::default());
let vote_subscriptions = Arc::new(RpcVoteSubscriptions::default());
let root_subscriptions = Arc::new(RpcRootSubscriptions::default());
let notification_sender = Arc::new(Mutex::new(notification_sender));
let _bank_forks = bank_forks.clone();
let _block_commitment_cache = block_commitment_cache.clone();
let exit_clone = exit.clone();
let subscriptions = Subscriptions {
account_subscriptions,
program_subscriptions,
signature_subscriptions,
gossip_account_subscriptions,
gossip_program_subscriptions,
gossip_signature_subscriptions,
slot_subscriptions,
vote_subscriptions,
root_subscriptions,
};
let _subscriptions = subscriptions.clone();
let last_checked_slots = Arc::new(RwLock::new(HashMap::new()));
let _last_checked_slots = last_checked_slots.clone();
let notifier_runtime = RuntimeBuilder::new()
.core_threads(1)
.name_prefix("solana-rpc-notifier-")
.build()
.unwrap();
let notifier = RpcNotifier(notifier_runtime.executor());
let t_cleanup = Builder::new()
.name("solana-rpc-notifications".to_string())
.spawn(move || {
Self::process_notifications(
exit_clone,
notifier,
notification_receiver,
_subscriptions,
_bank_forks,
_last_checked_slots,
);
})
.unwrap();
Self {
subscriptions,
notification_sender,
notifier_runtime: Some(notifier_runtime),
t_cleanup: Some(t_cleanup),
bank_forks,
block_commitment_cache,
last_checked_slots,
exit: exit.clone(),
}
}
pub fn default_with_blockstore_bank_forks(
blockstore: Arc<Blockstore>,
bank_forks: Arc<RwLock<BankForks>>,
) -> Self {
Self::new(
&Arc::new(AtomicBool::new(false)),
bank_forks,
Arc::new(RwLock::new(BlockCommitmentCache::default_with_blockstore(
blockstore,
))),
)
}
fn check_account(
pubkey: &Pubkey,
bank_forks: &Arc<RwLock<BankForks>>,
account_subscriptions: Arc<RpcAccountSubscriptions>,
notifier: &RpcNotifier,
cache_slot_info: &CacheSlotInfo,
) {
let subscriptions = account_subscriptions.read().unwrap();
check_commitment_and_notify(
&subscriptions,
pubkey,
bank_forks,
cache_slot_info,
Bank::get_account_modified_slot,
filter_account_result,
notifier,
);
}
fn check_program(
program_id: &Pubkey,
bank_forks: &Arc<RwLock<BankForks>>,
program_subscriptions: Arc<RpcProgramSubscriptions>,
notifier: &RpcNotifier,
cache_slot_info: &CacheSlotInfo,
) {
let subscriptions = program_subscriptions.read().unwrap();
check_commitment_and_notify(
&subscriptions,
program_id,
bank_forks,
cache_slot_info,
Bank::get_program_accounts_modified_since_parent,
filter_program_results,
notifier,
);
}
fn check_signature(
signature: &Signature,
bank_forks: &Arc<RwLock<BankForks>>,
signature_subscriptions: Arc<RpcSignatureSubscriptions>,
notifier: &RpcNotifier,
cache_slot_info: &CacheSlotInfo,
) {
let mut subscriptions = signature_subscriptions.write().unwrap();
let notified_ids = check_commitment_and_notify(
&subscriptions,
signature,
bank_forks,
cache_slot_info,
Bank::get_signature_status_processed_since_parent,
filter_signature_result,
notifier,
);
if let Some(subscription_ids) = subscriptions.get_mut(signature) {
subscription_ids.retain(|k, _| !notified_ids.contains(k));
if subscription_ids.is_empty() {
subscriptions.remove(&signature);
}
}
}
pub fn add_account_subscription(
&self,
pubkey: Pubkey,
commitment: Option<CommitmentConfig>,
sub_id: SubscriptionId,
subscriber: Subscriber<Response<UiAccount>>,
) {
let commitment_level = commitment
.unwrap_or_else(CommitmentConfig::single)
.commitment;
let slot = match commitment_level {
CommitmentLevel::Max => self
.block_commitment_cache
.read()
.unwrap()
.largest_confirmed_root(),
CommitmentLevel::Recent => self.block_commitment_cache.read().unwrap().slot(),
CommitmentLevel::Root => self.block_commitment_cache.read().unwrap().root(),
CommitmentLevel::Single => self
.block_commitment_cache
.read()
.unwrap()
.highest_confirmed_slot(),
CommitmentLevel::SingleGossip => *self
.last_checked_slots
.read()
.unwrap()
.get(&CommitmentLevel::SingleGossip)
.unwrap_or(&0),
};
let last_notified_slot = if let Some((_account, slot)) = self
.bank_forks
.read()
.unwrap()
.get(slot)
.and_then(|bank| bank.get_account_modified_slot(&pubkey))
{
slot
} else {
0
};
let mut subscriptions = if commitment_level == CommitmentLevel::SingleGossip {
self.subscriptions
.gossip_account_subscriptions
.write()
.unwrap()
} else {
self.subscriptions.account_subscriptions.write().unwrap()
};
add_subscription(
&mut subscriptions,
pubkey,
commitment,
sub_id,
subscriber,
last_notified_slot,
);
}
pub fn remove_account_subscription(&self, id: &SubscriptionId) -> bool {
let mut subscriptions = self.subscriptions.account_subscriptions.write().unwrap();
if remove_subscription(&mut subscriptions, id) {
true
} else {
let mut subscriptions = self
.subscriptions
.gossip_account_subscriptions
.write()
.unwrap();
remove_subscription(&mut subscriptions, id)
}
}
pub fn add_program_subscription(
&self,
program_id: Pubkey,
commitment: Option<CommitmentConfig>,
sub_id: SubscriptionId,
subscriber: Subscriber<Response<RpcKeyedAccount>>,
) {
let commitment_level = commitment
.unwrap_or_else(CommitmentConfig::recent)
.commitment;
let mut subscriptions = if commitment_level == CommitmentLevel::SingleGossip {
self.subscriptions
.gossip_program_subscriptions
.write()
.unwrap()
} else {
self.subscriptions.program_subscriptions.write().unwrap()
};
add_subscription(
&mut subscriptions,
program_id,
commitment,
sub_id,
subscriber,
0, // last_notified_slot is not utilized for program subscriptions
);
}
pub fn remove_program_subscription(&self, id: &SubscriptionId) -> bool {
let mut subscriptions = self.subscriptions.program_subscriptions.write().unwrap();
if remove_subscription(&mut subscriptions, id) {
true
} else {
let mut subscriptions = self
.subscriptions
.gossip_program_subscriptions
.write()
.unwrap();
remove_subscription(&mut subscriptions, id)
}
}
pub fn add_signature_subscription(
&self,
signature: Signature,
commitment: Option<CommitmentConfig>,
sub_id: SubscriptionId,
subscriber: Subscriber<Response<RpcSignatureResult>>,
) {
let commitment_level = commitment
.unwrap_or_else(CommitmentConfig::recent)
.commitment;
let mut subscriptions = if commitment_level == CommitmentLevel::SingleGossip {
self.subscriptions
.gossip_signature_subscriptions
.write()
.unwrap()
} else {
self.subscriptions.signature_subscriptions.write().unwrap()
};
add_subscription(
&mut subscriptions,
signature,
commitment,
sub_id,
subscriber,
0, // last_notified_slot is not utilized for signature subscriptions
);
}
pub fn remove_signature_subscription(&self, id: &SubscriptionId) -> bool {
let mut subscriptions = self.subscriptions.signature_subscriptions.write().unwrap();
if remove_subscription(&mut subscriptions, id) {
true
} else {
let mut subscriptions = self
.subscriptions
.gossip_signature_subscriptions
.write()
.unwrap();
remove_subscription(&mut subscriptions, id)
}
}
/// Notify subscribers of changes to any accounts or new signatures since
/// the bank's last checkpoint.
pub fn notify_subscribers(&self, cache_slot_info: CacheSlotInfo) {
self.enqueue_notification(NotificationEntry::Bank(cache_slot_info));
}
/// Notify SingleGossip commitment-level subscribers of changes to any accounts or new
/// signatures.
pub fn notify_gossip_subscribers(&self, slot: Slot) {
self.enqueue_notification(NotificationEntry::Gossip(slot));
}
pub fn add_slot_subscription(&self, sub_id: SubscriptionId, subscriber: Subscriber<SlotInfo>) {
let sink = subscriber.assign_id(sub_id.clone()).unwrap();
let mut subscriptions = self.subscriptions.slot_subscriptions.write().unwrap();
subscriptions.insert(sub_id, sink);
}
pub fn remove_slot_subscription(&self, id: &SubscriptionId) -> bool {
let mut subscriptions = self.subscriptions.slot_subscriptions.write().unwrap();
subscriptions.remove(id).is_some()
}
pub fn notify_slot(&self, slot: Slot, parent: Slot, root: Slot) {
self.enqueue_notification(NotificationEntry::Slot(SlotInfo { slot, parent, root }));
}
pub fn add_vote_subscription(&self, sub_id: SubscriptionId, subscriber: Subscriber<RpcVote>) {
let sink = subscriber.assign_id(sub_id.clone()).unwrap();
let mut subscriptions = self.subscriptions.vote_subscriptions.write().unwrap();
subscriptions.insert(sub_id, sink);
}
pub fn remove_vote_subscription(&self, id: &SubscriptionId) -> bool {
let mut subscriptions = self.subscriptions.vote_subscriptions.write().unwrap();
subscriptions.remove(id).is_some()
}
pub fn notify_vote(&self, vote: &Vote) {
self.enqueue_notification(NotificationEntry::Vote(vote.clone()));
}
pub fn notify_frozen(&self, frozen_slot: Slot) {
self.enqueue_notification(NotificationEntry::Frozen(frozen_slot));
}
pub fn add_root_subscription(&self, sub_id: SubscriptionId, subscriber: Subscriber<Slot>) {
let sink = subscriber.assign_id(sub_id.clone()).unwrap();
let mut subscriptions = self.subscriptions.root_subscriptions.write().unwrap();
subscriptions.insert(sub_id, sink);
}
pub fn remove_root_subscription(&self, id: &SubscriptionId) -> bool {
let mut subscriptions = self.subscriptions.root_subscriptions.write().unwrap();
subscriptions.remove(id).is_some()
}
pub fn notify_roots(&self, mut rooted_slots: Vec<Slot>) {
rooted_slots.sort();
rooted_slots.into_iter().for_each(|root| {
self.enqueue_notification(NotificationEntry::Root(root));
});
}
fn enqueue_notification(&self, notification_entry: NotificationEntry) {
match self
.notification_sender
.lock()
.unwrap()
.send(notification_entry)
{
Ok(()) => (),
Err(SendError(notification)) => {
warn!(
"Dropped RPC Notification - receiver disconnected : {:?}",
notification
);
}
}
}
fn process_notifications(
exit: Arc<AtomicBool>,
notifier: RpcNotifier,
notification_receiver: Receiver<NotificationEntry>,
subscriptions: Subscriptions,
bank_forks: Arc<RwLock<BankForks>>,
last_checked_slots: Arc<RwLock<HashMap<CommitmentLevel, Slot>>>,
) {
let mut pending_gossip_notifications = HashSet::new();
loop {
if exit.load(Ordering::Relaxed) {
break;
}
match notification_receiver.recv_timeout(Duration::from_millis(RECEIVE_DELAY_MILLIS)) {
Ok(notification_entry) => match notification_entry {
NotificationEntry::Slot(slot_info) => {
let subscriptions = subscriptions.slot_subscriptions.read().unwrap();
for (_, sink) in subscriptions.iter() {
notifier.notify(slot_info, sink);
}
}
NotificationEntry::Vote(ref vote_info) => {
let subscriptions = subscriptions.vote_subscriptions.read().unwrap();
for (_, sink) in subscriptions.iter() {
notifier.notify(
RpcVote {
slots: vote_info.slots.clone(),
hash: bs58::encode(vote_info.hash).into_string(),
timestamp: vote_info.timestamp,
},
sink,
);
}
}
NotificationEntry::Root(root) => {
let subscriptions = subscriptions.root_subscriptions.read().unwrap();
for (_, sink) in subscriptions.iter() {
notifier.notify(root, sink);
}
// Prune old pending notifications
pending_gossip_notifications = pending_gossip_notifications
.into_iter()
.filter(|&s| s > root)
.collect();
}
NotificationEntry::Bank(cache_slot_info) => {
RpcSubscriptions::notify_accounts_programs_signatures(
&subscriptions.account_subscriptions,
&subscriptions.program_subscriptions,
&subscriptions.signature_subscriptions,
&bank_forks,
&cache_slot_info,
¬ifier,
)
}
NotificationEntry::Frozen(slot) => {
if pending_gossip_notifications.remove(&slot) {
Self::process_gossip_notification(
slot,
¬ifier,
&subscriptions,
&bank_forks,
&last_checked_slots,
);
}
}
NotificationEntry::Gossip(slot) => {
let bank_frozen = bank_forks
.read()
.unwrap()
.get(slot)
.filter(|b| b.is_frozen())
.is_some();
if !bank_frozen {
pending_gossip_notifications.insert(slot);
} else {
Self::process_gossip_notification(
slot,
¬ifier,
&subscriptions,
&bank_forks,
&last_checked_slots,
);
}
}
},
Err(RecvTimeoutError::Timeout) => {
// not a problem - try reading again
}
Err(RecvTimeoutError::Disconnected) => {
warn!("RPC Notification thread - sender disconnected");
break;
}
}
}
}
fn process_gossip_notification(
slot: Slot,
notifier: &RpcNotifier,
subscriptions: &Subscriptions,
bank_forks: &Arc<RwLock<BankForks>>,
last_checked_slots: &Arc<RwLock<HashMap<CommitmentLevel, Slot>>>,
) {
let mut last_checked_slots_lock = last_checked_slots.write().unwrap();
let last_checked_slot = last_checked_slots_lock
.get(&CommitmentLevel::SingleGossip)
.cloned()
.unwrap_or_default();
if slot > last_checked_slot {
last_checked_slots_lock.insert(CommitmentLevel::SingleGossip, slot);
} else {
// Avoid sending stale or duplicate notifications
return;
}
drop(last_checked_slots_lock);
let cache_slot_info = CacheSlotInfo {
highest_confirmed_slot: slot,
..CacheSlotInfo::default()
};
RpcSubscriptions::notify_accounts_programs_signatures(
&subscriptions.gossip_account_subscriptions,
&subscriptions.gossip_program_subscriptions,
&subscriptions.gossip_signature_subscriptions,
&bank_forks,
&cache_slot_info,
¬ifier,
);
}
fn notify_accounts_programs_signatures(
account_subscriptions: &Arc<RpcAccountSubscriptions>,
program_subscriptions: &Arc<RpcProgramSubscriptions>,
signature_subscriptions: &Arc<RpcSignatureSubscriptions>,
bank_forks: &Arc<RwLock<BankForks>>,
cache_slot_info: &CacheSlotInfo,
notifier: &RpcNotifier,
) {
let pubkeys: Vec<_> = {
let subs = account_subscriptions.read().unwrap();
subs.keys().cloned().collect()
};
for pubkey in &pubkeys {
Self::check_account(
pubkey,
&bank_forks,
account_subscriptions.clone(),
¬ifier,
&cache_slot_info,
);
}
let programs: Vec<_> = {
let subs = program_subscriptions.read().unwrap();
subs.keys().cloned().collect()
};
for program_id in &programs {
Self::check_program(
program_id,
&bank_forks,
program_subscriptions.clone(),
¬ifier,
&cache_slot_info,
);
}
let signatures: Vec<_> = {
let subs = signature_subscriptions.read().unwrap();
subs.keys().cloned().collect()
};
for signature in &signatures {
Self::check_signature(
signature,
&bank_forks,
signature_subscriptions.clone(),
¬ifier,
&cache_slot_info,
);
}
}
fn shutdown(&mut self) -> std::thread::Result<()> {
if let Some(runtime) = self.notifier_runtime.take() {
info!("RPC Notifier runtime - shutting down");
let _ = runtime.shutdown_now().wait();
info!("RPC Notifier runtime - shut down");
}
if self.t_cleanup.is_some() {
info!("RPC Notification thread - shutting down");
self.exit.store(true, Ordering::Relaxed);
let x = self.t_cleanup.take().unwrap().join();
info!("RPC Notification thread - shut down.");
x
} else {
warn!("RPC Notification thread - already shut down.");
Ok(())
}
}
}
#[cfg(test)]
pub(crate) mod tests {
use super::*;
use crate::commitment::BlockCommitment;
use jsonrpc_core::futures::{self, stream::Stream};
use jsonrpc_pubsub::typed::Subscriber;
use serial_test_derive::serial;
use solana_ledger::{
blockstore::Blockstore,
genesis_utils::{create_genesis_config, GenesisConfigInfo},
get_tmp_ledger_path,
};
use solana_sdk::{
signature::{Keypair, Signer},
system_transaction,
};
use std::{fmt::Debug, sync::mpsc::channel, time::Instant};
use tokio::{prelude::FutureExt, runtime::Runtime, timer::Delay};
pub(crate) fn robust_poll_or_panic<T: Debug + Send + 'static>(
receiver: futures::sync::mpsc::Receiver<T>,
) -> (T, futures::sync::mpsc::Receiver<T>) {
let (inner_sender, inner_receiver) = channel();
let mut rt = Runtime::new().unwrap();
rt.spawn(futures::lazy(|| {
let recv_timeout = receiver
.into_future()
.timeout(Duration::from_millis(RECEIVE_DELAY_MILLIS))
.map(move |result| match result {
(Some(value), receiver) => {
inner_sender.send((value, receiver)).expect("send error")
}
(None, _) => panic!("unexpected end of stream"),
})
.map_err(|err| panic!("stream error {:?}", err));
const INITIAL_DELAY_MS: u64 = RECEIVE_DELAY_MILLIS * 2;
Delay::new(Instant::now() + Duration::from_millis(INITIAL_DELAY_MS))
.and_then(|_| recv_timeout)
.map_err(|err| panic!("timer error {:?}", err))
}));
inner_receiver.recv().expect("recv error")
}
#[test]
#[serial]
fn test_check_account_subscribe() {
let GenesisConfigInfo {
genesis_config,
mint_keypair,
..
} = create_genesis_config(100);
let ledger_path = get_tmp_ledger_path!();
let blockstore = Arc::new(Blockstore::open(&ledger_path).unwrap());
let bank = Bank::new(&genesis_config);
let blockhash = bank.last_blockhash();
let bank_forks = Arc::new(RwLock::new(BankForks::new(bank)));
let bank0 = bank_forks.read().unwrap().get(0).unwrap().clone();
let bank1 = Bank::new_from_parent(&bank0, &Pubkey::default(), 1);
bank_forks.write().unwrap().insert(bank1);
let alice = Keypair::new();
let (subscriber, _id_receiver, transport_receiver) =
Subscriber::new_test("accountNotification");
let sub_id = SubscriptionId::Number(0 as u64);
let exit = Arc::new(AtomicBool::new(false));
let subscriptions = RpcSubscriptions::new(
&exit,
bank_forks.clone(),
Arc::new(RwLock::new(
BlockCommitmentCache::new_for_tests_with_blockstore_bank(
blockstore,
bank_forks.read().unwrap().get(1).unwrap().clone(),
1,
),
)),
);
subscriptions.add_account_subscription(
alice.pubkey(),
Some(CommitmentConfig::recent()),
sub_id.clone(),
subscriber,
);
assert!(subscriptions
.subscriptions
.account_subscriptions
.read()
.unwrap()
.contains_key(&alice.pubkey()));
let tx = system_transaction::create_account(
&mint_keypair,
&alice,
blockhash,
1,
16,
&solana_budget_program::id(),
);
bank_forks
.write()
.unwrap()
.get(1)
.unwrap()
.process_transaction(&tx)
.unwrap();
let mut cache_slot_info = CacheSlotInfo::default();
cache_slot_info.current_slot = 1;
subscriptions.notify_subscribers(cache_slot_info);
let (response, _) = robust_poll_or_panic(transport_receiver);
let expected = json!({
"jsonrpc": "2.0",
"method": "accountNotification",
"params": {
"result": {
"context": { "slot": 1 },
"value": {
"data": "1111111111111111",
"executable": false,
"lamports": 1,
"owner": "Budget1111111111111111111111111111111111111",
"rentEpoch": 1,
},
},
"subscription": 0,
}
});
assert_eq!(serde_json::to_string(&expected).unwrap(), response);
subscriptions.remove_account_subscription(&sub_id);
assert!(!subscriptions
.subscriptions
.account_subscriptions
.read()
.unwrap()
.contains_key(&alice.pubkey()));
}
#[test]
#[serial]
fn test_check_program_subscribe() {
let GenesisConfigInfo {
genesis_config,
mint_keypair,
..
} = create_genesis_config(100);
let ledger_path = get_tmp_ledger_path!();
let blockstore = Arc::new(Blockstore::open(&ledger_path).unwrap());
let bank = Bank::new(&genesis_config);
let blockhash = bank.last_blockhash();
let bank_forks = Arc::new(RwLock::new(BankForks::new(bank)));
let alice = Keypair::new();
let tx = system_transaction::create_account(
&mint_keypair,
&alice,
blockhash,
1,
16,
&solana_budget_program::id(),
);
bank_forks
.write()
.unwrap()
.get(0)
.unwrap()
.process_transaction(&tx)
.unwrap();
let (subscriber, _id_receiver, transport_receiver) =
Subscriber::new_test("programNotification");
let sub_id = SubscriptionId::Number(0 as u64);
let exit = Arc::new(AtomicBool::new(false));
let subscriptions = RpcSubscriptions::new(
&exit,
bank_forks,
Arc::new(RwLock::new(
BlockCommitmentCache::new_for_tests_with_blockstore(blockstore),
)),
);
subscriptions.add_program_subscription(
solana_budget_program::id(),
None,
sub_id.clone(),
subscriber,
);
assert!(subscriptions
.subscriptions
.program_subscriptions
.read()
.unwrap()
.contains_key(&solana_budget_program::id()));
subscriptions.notify_subscribers(CacheSlotInfo::default());
let (response, _) = robust_poll_or_panic(transport_receiver);
let expected = json!({
"jsonrpc": "2.0",
"method": "programNotification",
"params": {
"result": {
"context": { "slot": 0 },
"value": {
"account": {
"data": "1111111111111111",
"executable": false,
"lamports": 1,
"owner": "Budget1111111111111111111111111111111111111",
"rentEpoch": 1,
},
"pubkey": alice.pubkey().to_string(),
},
},
"subscription": 0,
}
});
assert_eq!(serde_json::to_string(&expected).unwrap(), response);
subscriptions.remove_program_subscription(&sub_id);
assert!(!subscriptions
.subscriptions
.program_subscriptions
.read()
.unwrap()
.contains_key(&solana_budget_program::id()));
}
#[test]
#[serial]
fn test_check_signature_subscribe() {
let GenesisConfigInfo {
genesis_config,
mint_keypair,
..
} = create_genesis_config(100);
let ledger_path = get_tmp_ledger_path!();
let blockstore = Arc::new(Blockstore::open(&ledger_path).unwrap());
let bank = Bank::new(&genesis_config);
let blockhash = bank.last_blockhash();
let mut bank_forks = BankForks::new(bank);
let alice = Keypair::new();
let past_bank_tx =
system_transaction::transfer(&mint_keypair, &alice.pubkey(), 1, blockhash);
let unprocessed_tx =
system_transaction::transfer(&mint_keypair, &alice.pubkey(), 2, blockhash);
let processed_tx =
system_transaction::transfer(&mint_keypair, &alice.pubkey(), 3, blockhash);
bank_forks
.get(0)
.unwrap()
.process_transaction(&past_bank_tx)
.unwrap();
let next_bank =
Bank::new_from_parent(&bank_forks.banks[&0].clone(), &Pubkey::new_rand(), 1);
bank_forks.insert(next_bank);
bank_forks
.get(1)
.unwrap()
.process_transaction(&processed_tx)
.unwrap();
let bank1 = bank_forks[1].clone();
let bank_forks = Arc::new(RwLock::new(bank_forks));
let mut cache0 = BlockCommitment::default();
cache0.increase_confirmation_stake(1, 10);
let cache1 = BlockCommitment::default();
let mut block_commitment = HashMap::new();
block_commitment.entry(0).or_insert(cache0);
block_commitment.entry(1).or_insert(cache1);
let block_commitment_cache =
BlockCommitmentCache::new(block_commitment, 0, 10, bank1, blockstore, 0, 0);
let exit = Arc::new(AtomicBool::new(false));
let subscriptions = RpcSubscriptions::new(
&exit,
bank_forks,
Arc::new(RwLock::new(block_commitment_cache)),
);
let (past_bank_sub1, _id_receiver, past_bank_recv1) =
Subscriber::new_test("signatureNotification");
let (past_bank_sub2, _id_receiver, past_bank_recv2) =
Subscriber::new_test("signatureNotification");
let (processed_sub, _id_receiver, processed_recv) =
Subscriber::new_test("signatureNotification");
subscriptions.add_signature_subscription(
past_bank_tx.signatures[0],
Some(CommitmentConfig::recent()),
SubscriptionId::Number(1 as u64),
past_bank_sub1,
);
subscriptions.add_signature_subscription(
past_bank_tx.signatures[0],
Some(CommitmentConfig::root()),
SubscriptionId::Number(2 as u64),
past_bank_sub2,
);
subscriptions.add_signature_subscription(
processed_tx.signatures[0],
Some(CommitmentConfig::recent()),
SubscriptionId::Number(3 as u64),
processed_sub,
);
subscriptions.add_signature_subscription(
unprocessed_tx.signatures[0],
Some(CommitmentConfig::recent()),
SubscriptionId::Number(4 as u64),
Subscriber::new_test("signatureNotification").0,
);
{
let sig_subs = subscriptions
.subscriptions
.signature_subscriptions
.read()
.unwrap();
assert_eq!(sig_subs.get(&past_bank_tx.signatures[0]).unwrap().len(), 2);
assert!(sig_subs.contains_key(&unprocessed_tx.signatures[0]));
assert!(sig_subs.contains_key(&processed_tx.signatures[0]));
}
let mut cache_slot_info = CacheSlotInfo::default();
cache_slot_info.current_slot = 1;
subscriptions.notify_subscribers(cache_slot_info);
let expected_res = RpcSignatureResult { err: None };
struct Notification {
slot: Slot,
id: u64,
}
let expected_notification = |exp: Notification| -> String {
let json = json!({
"jsonrpc": "2.0",
"method": "signatureNotification",
"params": {
"result": {
"context": { "slot": exp.slot },
"value": &expected_res,
},
"subscription": exp.id,
}
});
serde_json::to_string(&json).unwrap()
};
// Expect to receive a notification from bank 1 because this subscription is
// looking for 0 confirmations and so checks the current bank
let expected = expected_notification(Notification { slot: 1, id: 1 });
let (response, _) = robust_poll_or_panic(past_bank_recv1);
assert_eq!(expected, response);
// Expect to receive a notification from bank 0 because this subscription is
// looking for 1 confirmation and so checks the past bank
let expected = expected_notification(Notification { slot: 0, id: 2 });
let (response, _) = robust_poll_or_panic(past_bank_recv2);
assert_eq!(expected, response);
let expected = expected_notification(Notification { slot: 1, id: 3 });
let (response, _) = robust_poll_or_panic(processed_recv);
assert_eq!(expected, response);
// Subscription should be automatically removed after notification
let sig_subs = subscriptions
.subscriptions
.signature_subscriptions
.read()
.unwrap();
assert!(!sig_subs.contains_key(&processed_tx.signatures[0]));
assert!(!sig_subs.contains_key(&past_bank_tx.signatures[0]));
// Unprocessed signature subscription should not be removed
assert_eq!(
sig_subs.get(&unprocessed_tx.signatures[0]).unwrap().len(),
1
);
}
#[test]
#[serial]
fn test_check_slot_subscribe() {
let (subscriber, _id_receiver, transport_receiver) =
Subscriber::new_test("slotNotification");
let sub_id = SubscriptionId::Number(0 as u64);
let exit = Arc::new(AtomicBool::new(false));
let ledger_path = get_tmp_ledger_path!();
let blockstore = Arc::new(Blockstore::open(&ledger_path).unwrap());
let GenesisConfigInfo { genesis_config, .. } = create_genesis_config(10_000);
let bank = Bank::new(&genesis_config);
let bank_forks = Arc::new(RwLock::new(BankForks::new(bank)));
let subscriptions = RpcSubscriptions::new(
&exit,
bank_forks,
Arc::new(RwLock::new(
BlockCommitmentCache::new_for_tests_with_blockstore(blockstore),
)),
);
subscriptions.add_slot_subscription(sub_id.clone(), subscriber);
assert!(subscriptions
.subscriptions
.slot_subscriptions
.read()
.unwrap()
.contains_key(&sub_id));
subscriptions.notify_slot(0, 0, 0);
let (response, _) = robust_poll_or_panic(transport_receiver);
let expected_res = SlotInfo {
parent: 0,
slot: 0,
root: 0,
};
let expected_res_str =
serde_json::to_string(&serde_json::to_value(expected_res).unwrap()).unwrap();
let expected = format!(
r#"{{"jsonrpc":"2.0","method":"slotNotification","params":{{"result":{},"subscription":0}}}}"#,
expected_res_str
);
assert_eq!(expected, response);
subscriptions.remove_slot_subscription(&sub_id);
assert!(!subscriptions
.subscriptions
.slot_subscriptions
.read()
.unwrap()
.contains_key(&sub_id));
}
#[test]
#[serial]
fn test_check_root_subscribe() {
let (subscriber, _id_receiver, mut transport_receiver) =
Subscriber::new_test("rootNotification");
let sub_id = SubscriptionId::Number(0 as u64);
let exit = Arc::new(AtomicBool::new(false));
let ledger_path = get_tmp_ledger_path!();
let blockstore = Arc::new(Blockstore::open(&ledger_path).unwrap());
let GenesisConfigInfo { genesis_config, .. } = create_genesis_config(10_000);
let bank = Bank::new(&genesis_config);
let bank_forks = Arc::new(RwLock::new(BankForks::new(bank)));
let subscriptions = RpcSubscriptions::new(
&exit,
bank_forks,
Arc::new(RwLock::new(
BlockCommitmentCache::new_for_tests_with_blockstore(blockstore),
)),
);
subscriptions.add_root_subscription(sub_id.clone(), subscriber);
assert!(subscriptions
.subscriptions
.root_subscriptions
.read()
.unwrap()
.contains_key(&sub_id));
subscriptions.notify_roots(vec![2, 1, 3]);
for expected_root in 1..=3 {
let (response, receiver) = robust_poll_or_panic(transport_receiver);
transport_receiver = receiver;
let expected_res_str =
serde_json::to_string(&serde_json::to_value(expected_root).unwrap()).unwrap();
let expected = format!(
r#"{{"jsonrpc":"2.0","method":"rootNotification","params":{{"result":{},"subscription":0}}}}"#,
expected_res_str
);
assert_eq!(expected, response);
}
subscriptions.remove_root_subscription(&sub_id);
assert!(!subscriptions
.subscriptions
.root_subscriptions
.read()
.unwrap()
.contains_key(&sub_id));
}
#[test]
#[serial]
fn test_add_and_remove_subscription() {
let mut subscriptions: HashMap<u64, HashMap<SubscriptionId, SubscriptionData<()>>> =
HashMap::new();
let num_keys = 5;
for key in 0..num_keys {
let (subscriber, _id_receiver, _transport_receiver) =
Subscriber::new_test("notification");
let sub_id = SubscriptionId::Number(key);
add_subscription(&mut subscriptions, key, None, sub_id, subscriber, 0);
}
// Add another subscription to the "0" key
let (subscriber, _id_receiver, _transport_receiver) = Subscriber::new_test("notification");
let extra_sub_id = SubscriptionId::Number(num_keys);
add_subscription(
&mut subscriptions,
0,
None,
extra_sub_id.clone(),
subscriber,
0,
);
assert_eq!(subscriptions.len(), num_keys as usize);
assert_eq!(subscriptions.get(&0).unwrap().len(), 2);
assert_eq!(subscriptions.get(&1).unwrap().len(), 1);
assert_eq!(
remove_subscription(&mut subscriptions, &SubscriptionId::Number(0)),
true
);
assert_eq!(subscriptions.len(), num_keys as usize);
assert_eq!(subscriptions.get(&0).unwrap().len(), 1);
assert_eq!(
remove_subscription(&mut subscriptions, &SubscriptionId::Number(0)),
false
);
assert_eq!(remove_subscription(&mut subscriptions, &extra_sub_id), true);
assert_eq!(subscriptions.len(), (num_keys - 1) as usize);
assert!(subscriptions.get(&0).is_none());
}
#[test]
#[serial]
fn test_gossip_separate_account_notifications() {
let GenesisConfigInfo {
genesis_config,
mint_keypair,
..
} = create_genesis_config(100);
let ledger_path = get_tmp_ledger_path!();
let blockstore = Arc::new(Blockstore::open(&ledger_path).unwrap());
let bank = Bank::new(&genesis_config);
let blockhash = bank.last_blockhash();
let bank_forks = Arc::new(RwLock::new(BankForks::new(bank)));
let bank0 = bank_forks.read().unwrap().get(0).unwrap().clone();
let bank1 = Bank::new_from_parent(&bank0, &Pubkey::default(), 1);
bank_forks.write().unwrap().insert(bank1);
let bank2 = Bank::new_from_parent(&bank0, &Pubkey::default(), 2);
bank_forks.write().unwrap().insert(bank2);
let alice = Keypair::new();
let (subscriber0, _id_receiver, transport_receiver0) =
Subscriber::new_test("accountNotification");
let (subscriber1, _id_receiver, transport_receiver1) =
Subscriber::new_test("accountNotification");
let exit = Arc::new(AtomicBool::new(false));
let subscriptions = RpcSubscriptions::new(
&exit,
bank_forks.clone(),
Arc::new(RwLock::new(
BlockCommitmentCache::new_for_tests_with_blockstore_bank(
blockstore,
bank_forks.read().unwrap().get(1).unwrap().clone(),
1,
),
)),
);
let sub_id0 = SubscriptionId::Number(0 as u64);
subscriptions.add_account_subscription(
alice.pubkey(),
Some(CommitmentConfig::single_gossip()),
sub_id0.clone(),
subscriber0,
);
assert!(subscriptions
.subscriptions
.gossip_account_subscriptions
.read()
.unwrap()
.contains_key(&alice.pubkey()));
let tx = system_transaction::create_account(
&mint_keypair,
&alice,
blockhash,
1,
16,
&solana_budget_program::id(),
);
// Add the transaction to the 1st bank and then freeze the bank
let bank1 = bank_forks.write().unwrap().get(1).cloned().unwrap();
bank1.process_transaction(&tx).unwrap();
bank1.freeze();
// Add the same transaction to the unfrozen 2nd bank
bank_forks
.write()
.unwrap()
.get(2)
.unwrap()
.process_transaction(&tx)
.unwrap();
// First, notify the unfrozen bank first to queue pending notification
subscriptions.notify_gossip_subscribers(2);
// Now, notify the frozen bank and ensure its notifications are processed
subscriptions.notify_gossip_subscribers(1);
let (response, _) = robust_poll_or_panic(transport_receiver0);
let expected = json!({
"jsonrpc": "2.0",
"method": "accountNotification",
"params": {
"result": {
"context": { "slot": 1 },
"value": {
"data": "1111111111111111",
"executable": false,
"lamports": 1,
"owner": "Budget1111111111111111111111111111111111111",
"rentEpoch": 1,
},
},
"subscription": 0,
}
});
assert_eq!(serde_json::to_string(&expected).unwrap(), response);
subscriptions.remove_account_subscription(&sub_id0);
let sub_id1 = SubscriptionId::Number(1 as u64);
subscriptions.add_account_subscription(
alice.pubkey(),
Some(CommitmentConfig::single_gossip()),
sub_id1.clone(),
subscriber1,
);
subscriptions.notify_frozen(2);
let (response, _) = robust_poll_or_panic(transport_receiver1);
let expected = json!({
"jsonrpc": "2.0",
"method": "accountNotification",
"params": {
"result": {
"context": { "slot": 2 },
"value": {
"data": "1111111111111111",
"executable": false,
"lamports": 1,
"owner": "Budget1111111111111111111111111111111111111",
"rentEpoch": 1,
},
},
"subscription": 1,
}
});
assert_eq!(serde_json::to_string(&expected).unwrap(), response);
subscriptions.remove_account_subscription(&sub_id1);
assert!(!subscriptions
.subscriptions
.gossip_account_subscriptions
.read()
.unwrap()
.contains_key(&alice.pubkey()));
}
}
| 35.501585 | 111 | 0.559586 |
09b53051bd8c8e143487be76d02bf3e1d1fbd8c0 | 3,996 | use crate::{dict::*, variation::Variation};
/// Indexes all words and variants in toki pona.
/// This uses multiple dictionaries to categorise each word.
pub struct DictionarySet<'a> {
/// A list of dictionaries whose words will be directly encoded as a single byte.
base_dictionaries: Vec<&'a Dictionary<'a>>,
}
/// Uniquely identifies a word, and possibly a variant.
/// Linked to a single DictionarySet.
#[derive(Debug, Clone, Copy)]
pub struct WordIdentifier {
/// Which dictionary is this word stored in?
pub dict: usize,
/// What index is this word at in the dictionary above?
pub word: usize,
}
impl<'a> Default for DictionarySet<'a> {
fn default() -> Self {
Self {
base_dictionaries: vec![&*PU],
}
}
}
lazy_static::lazy_static! {
pub static ref DICT_SET: DictionarySet<'static> = DictionarySet::default();
}
impl<'a> DictionarySet<'a> {
/// Looks up a toki pona word, written in the given variation.
/// If this lookup fails, the lookup will be retried in the default orthography.
pub fn get_identifier_variation(
&self,
word: &str,
variation: Variation,
) -> Option<WordIdentifier> {
if variation == Variation::Default {
return self.get_identifier(word);
}
for (dict_idx, dict) in self.base_dictionaries.iter().enumerate() {
if let Some(variation_dict) = dict.variations.get(&variation) {
if let Some(result) = variation_dict.lookup.get(word) {
return Some(WordIdentifier {
dict: dict_idx,
word: *result,
});
}
}
}
self.get_identifier(word)
}
/// Looks up a toki pona word, written in the default orthography.
pub fn get_identifier(&self, word: &str) -> Option<WordIdentifier> {
for (dict_idx, dict) in self.base_dictionaries.iter().enumerate() {
if let Some(result) = dict.default.lookup.get(word) {
return Some(WordIdentifier {
dict: dict_idx,
word: *result,
});
}
}
None
}
/// Looks up a word identifier and returns the toki pona word in the given orthography.
pub fn get_word_variation(&self, identifier: WordIdentifier, variation: Variation) -> &'a str {
let dict = self.base_dictionaries[identifier.dict];
if variation == Variation::Default {
dict.default.words[identifier.word]
} else if let Some(word) = dict.variations[&variation].words[identifier.word] {
word
} else {
dict.default.words[identifier.word]
}
}
/// Returns a list of bytes representing this word.
pub fn word_to_bytes(&self, word: WordIdentifier) -> Vec<u8> {
if word.dict < self.base_dictionaries.len() {
let index = self
.base_dictionaries
.iter()
.take(word.dict)
.map(|dict| dict.default.words.len())
.sum::<usize>()
+ word.word;
vec![0x22 + u8::try_from(index).expect("index too large")]
} else {
todo!()
}
}
/// Returns the word identifier representing this word.
pub fn word_from_bytes(&self, bytes: &[u8]) -> WordIdentifier {
if bytes.len() == 1 {
// This is a single-byte word, which must be in the base dictionaries.
let mut index = bytes[0] as usize;
let mut dict_index = 0;
while index >= self.base_dictionaries[dict_index].default.words.len() {
index -= self.base_dictionaries[dict_index].default.words.len();
dict_index += 1;
}
WordIdentifier {
dict: dict_index,
word: index,
}
} else {
todo!()
}
}
}
| 34.153846 | 99 | 0.563063 |
f5c04f20e34c9b3a400decb8046ea527d5c4c1a4 | 6,605 | //! `candid::Result<T> = Result<T, candid::Error>>`
use serde::{de, ser};
use crate::parser::token;
use codespan_reporting::diagnostic::{Diagnostic, Label};
use codespan_reporting::files::{Error as ReportError, SimpleFile};
use codespan_reporting::term::{self, termcolor::StandardStream};
use std::io;
use thiserror::Error;
pub type Result<T = ()> = std::result::Result<T, Error>;
#[derive(Debug, Error)]
pub enum Error {
#[error("Candid parser error: {0}")]
Parse(#[from] token::ParserError),
#[error("binary parser error: {}", .0.get(0).map(|f| format!("{} at byte offset {}", f.message, f.range.start/2)).unwrap_or_else(|| "io error".to_string()))]
Binread(Vec<Label<()>>),
#[error(transparent)]
Custom(#[from] anyhow::Error),
}
impl Error {
pub fn msg<T: ToString>(msg: T) -> Self {
Error::Custom(anyhow::anyhow!(msg.to_string()))
}
pub fn report(&self) -> Diagnostic<()> {
match self {
Error::Parse(e) => {
use lalrpop_util::ParseError::*;
let mut diag = Diagnostic::error().with_message("parser error");
let label = match e {
User { error } => {
Label::primary((), error.span.clone()).with_message(&error.err)
}
InvalidToken { location } => {
Label::primary((), *location..location + 1).with_message("Invalid token")
}
UnrecognizedEOF { location, expected } => {
diag = diag.with_notes(report_expected(&expected));
Label::primary((), *location..location + 1).with_message("Unexpected EOF")
}
UnrecognizedToken { token, expected } => {
diag = diag.with_notes(report_expected(&expected));
Label::primary((), token.0..token.2).with_message("Unexpected token")
}
ExtraToken { token } => {
Label::primary((), token.0..token.2).with_message("Extra token")
}
};
diag.with_labels(vec![label])
}
Error::Binread(labels) => {
let diag = Diagnostic::error().with_message("decoding error");
diag.with_labels(labels.to_vec())
}
Error::Custom(e) => Diagnostic::error().with_message(e.to_string()),
}
}
}
fn get_binread_labels(e: &binread::Error) -> Vec<Label<()>> {
use binread::Error::*;
match e {
BadMagic { pos, .. } => {
let pos = (pos * 2) as usize;
vec![Label::primary((), pos..pos + 2).with_message("Unexpected bytes")]
}
Custom { pos, err } => {
let pos = (pos * 2) as usize;
let err = err
.downcast_ref::<&str>()
.unwrap_or(&"unknown error (there's a bug in error reporting)");
vec![Label::primary((), pos..pos + 2).with_message(err.to_string())]
}
EnumErrors {
pos,
variant_errors,
} => {
let pos = (pos * 2) as usize;
let variant = variant_errors
.iter()
.find(|(_, e)| !matches!(e, BadMagic { .. }));
// Should have at most one non-magic error
match variant {
None => vec![Label::primary((), pos..pos + 2).with_message("Unknown opcode")],
Some((id, e)) => {
let mut labels = get_binread_labels(e);
labels.push(Label::secondary((), pos..pos + 2).with_message(id.to_string()));
labels
}
}
}
NoVariantMatch { pos } => {
let pos = (pos * 2) as usize;
vec![Label::primary((), pos..pos + 2).with_message("No variant match")]
}
AssertFail { pos, message } => {
let pos = (pos * 2) as usize;
vec![Label::primary((), pos..pos + 2).with_message(message)]
}
Io(_) => vec![],
_ => unreachable!(),
}
}
fn report_expected(expected: &[String]) -> Vec<String> {
if expected.is_empty() {
return Vec::new();
}
use pretty::RcDoc;
let doc: RcDoc<()> = RcDoc::intersperse(
expected.iter().map(RcDoc::text),
RcDoc::text(",").append(RcDoc::softline()),
);
let header = if expected.len() == 1 {
"Expects"
} else {
"Expects one of"
};
let doc = RcDoc::text(header).append(RcDoc::softline().append(doc));
vec![doc.pretty(70).to_string()]
}
impl ser::Error for Error {
fn custom<T: std::fmt::Display>(msg: T) -> Self {
Error::msg(format!("Serialize error: {}", msg))
}
}
impl de::Error for Error {
fn custom<T: std::fmt::Display>(msg: T) -> Self {
Error::msg(format!("Deserialize error: {}", msg))
}
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Error {
Error::msg(format!("io error: {}", e))
}
}
impl From<binread::Error> for Error {
fn from(e: binread::Error) -> Error {
Error::Binread(get_binread_labels(&e))
}
}
impl From<ReportError> for Error {
fn from(e: ReportError) -> Error {
Error::msg(e)
}
}
#[cfg(feature = "random")]
impl From<arbitrary::Error> for Error {
fn from(e: arbitrary::Error) -> Error {
Error::msg(format!("arbitrary error: {}", e))
}
}
#[cfg(feature = "configs")]
impl From<serde_dhall::Error> for Error {
fn from(e: serde_dhall::Error) -> Error {
Error::msg(format!("dhall error: {}", e))
}
}
pub fn pretty_parse<T>(name: &str, str: &str) -> Result<T>
where
T: std::str::FromStr<Err = Error>,
{
str.parse::<T>().or_else(|e| {
let writer = StandardStream::stderr(term::termcolor::ColorChoice::Auto);
let config = term::Config::default();
let file = SimpleFile::new(name, str);
term::emit(&mut writer.lock(), &config, &file, &e.report())?;
Err(e)
})
}
pub fn pretty_read<T>(reader: &mut std::io::Cursor<&[u8]>) -> Result<T>
where
T: binread::BinRead,
{
T::read(reader).or_else(|e| {
let e = Error::from(e);
let writer = StandardStream::stderr(term::termcolor::ColorChoice::Auto);
let config = term::Config::default();
let str = hex::encode(&reader.get_ref());
let file = SimpleFile::new("binary", &str);
term::emit(&mut writer.lock(), &config, &file, &e.report())?;
Err(e)
})
}
| 33.358586 | 161 | 0.518092 |
14597bdcee9fd3a60f7f846d806e5d94cd8977b5 | 1,482 | use std::io;
use String;
fn main() {
println!("Input \"exit\" to break the loop!");
loop {
let mut f_letter = FirstLetter {
letter: ' ',
read: false,
vowel: false,
};
let mut string = String::new();
io::stdin()
.read_line(&mut string)
.expect("Error while reading stdin");
let string = string.trim();
if string.trim() == String::from("exit") { break }
for c in string.trim().chars() {
if f_letter.read == false {
f_letter.read = true;
f_letter.letter = c;
f_letter.vowel_check();
if f_letter.vowel == true {
print!("{}", f_letter.letter);
}
continue;
}
print!("{}", c);
}
if f_letter.vowel == true {
println!("-hay");
}
else {
println!("-{}ay", f_letter.letter);
}
}
}
struct FirstLetter {
vowel: bool,
letter: char,
read: bool,
}
impl FirstLetter {
fn vowel_check(&mut self) {
let c = self.letter;
match c {
'a' => self.vowel = true,
'i' => self.vowel = true,
'u' => self.vowel = true,
'e' => self.vowel = true,
'o' => self.vowel = true,
'y' => self.vowel = true,
_ => self.vowel = false,
}
}
}
| 21.171429 | 58 | 0.423077 |
d93ea0a06a4d5128b070ef5d787afa736f217d05 | 2,876 | #[doc = "Register `PSG` reader"]
pub struct R(crate::R<PSG_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<PSG_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<PSG_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<PSG_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `PSG` writer"]
pub struct W(crate::W<PSG_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<PSG_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<PSG_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<PSG_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `PSG` reader - "]
pub struct PSG_R(crate::FieldReader<u16, u16>);
impl PSG_R {
pub(crate) fn new(bits: u16) -> Self {
PSG_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for PSG_R {
type Target = crate::FieldReader<u16, u16>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `PSG` writer - "]
pub struct PSG_W<'a> {
w: &'a mut W,
}
impl<'a> PSG_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff) | (value as u32 & 0xffff);
self.w
}
}
impl R {
#[doc = "Bits 0:15"]
#[inline(always)]
pub fn psg(&self) -> PSG_R {
PSG_R::new((self.bits & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15"]
#[inline(always)]
pub fn psg(&mut self) -> PSG_W {
PSG_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Timer Clock Prescaler Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [psg](index.html) module"]
pub struct PSG_SPEC;
impl crate::RegisterSpec for PSG_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [psg::R](R) reader structure"]
impl crate::Readable for PSG_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [psg::W](W) writer structure"]
impl crate::Writable for PSG_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets PSG to value 0"]
impl crate::Resettable for PSG_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 27.92233 | 414 | 0.581711 |
26572e774749227779c15269204f52439362519b | 915 | /******************************************************************************
* DEBUG FUNCTIONS
*****************************************************************************/
#[cfg(feature = "debug")]
pub type Debug = String;
#[cfg(feature = "debug")]
macro_rules! debug_format {
( $( $args:tt )+ ) => {
format!($($args)*);
}
}
#[cfg(feature = "debug")]
macro_rules! debug_println {
( $( $args:tt )+ ) => {
println!($($args)*);
}
}
/******************************************************************************
* NORMAL FUNCTIONS
*****************************************************************************/
#[cfg(not(feature = "debug"))]
pub type Debug = ();
#[cfg(not(feature = "debug"))]
macro_rules! debug_format {
( $( $args:tt )+ ) => {
()
}
}
#[cfg(not(feature = "debug"))]
macro_rules! debug_println {
( $( $args:tt )+ ) => {
()
}
}
| 22.875 | 79 | 0.32459 |
482d61276ef86645f11cc34cd7aa3ae44b21973a | 577 | #[cfg(all(feature = "napi-4", feature = "event-queue-api"))]
mod event_queue;
#[cfg(all(feature = "napi-4", feature = "event-queue-api"))]
pub use self::event_queue::{EventQueue, EventQueueError};
#[cfg(all(not(feature = "napi-1"), feature = "event-handler-api"))]
mod event_handler;
#[cfg(all(not(feature = "napi-1"), feature = "event-handler-api"))]
pub use self::event_handler::EventHandler;
#[cfg(all(feature = "napi-1", feature = "event-handler-api"))]
compile_error!("The `EventHandler` API is not supported with the N-API \
backend. Use `EventQueue` instead.");
| 36.0625 | 72 | 0.686308 |
727752a160b8ddd0e99d2a834ff0d5d779dd3fde | 14,090 | // Copyright 2020 Nym Technologies SA
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::client::config::Config;
use crate::socks::{
authentication::{AuthenticationMethods, Authenticator, User},
server::SphinxSocksServer,
};
use client_core::client::cover_traffic_stream::LoopCoverTrafficStream;
use client_core::client::inbound_messages::{
InputMessage, InputMessageReceiver, InputMessageSender,
};
use client_core::client::key_manager::KeyManager;
use client_core::client::mix_traffic::{
MixMessageReceiver, MixMessageSender, MixTrafficController,
};
use client_core::client::real_messages_control::RealMessagesController;
use client_core::client::received_buffer::{
ReceivedBufferRequestReceiver, ReceivedBufferRequestSender, ReceivedMessagesBufferController,
};
use client_core::client::reply_key_storage::ReplyKeyStorage;
use client_core::client::topology_control::{
TopologyAccessor, TopologyRefresher, TopologyRefresherConfig,
};
use client_core::config::persistence::key_pathfinder::ClientKeyPathfinder;
use crypto::asymmetric::identity;
use futures::channel::mpsc;
use gateway_client::{
AcknowledgementReceiver, AcknowledgementSender, GatewayClient, MixnetMessageReceiver,
MixnetMessageSender,
};
use log::*;
use nymsphinx::addressing::clients::Recipient;
use nymsphinx::addressing::nodes::NodeIdentity;
use tokio::runtime::Runtime;
pub(crate) mod config;
pub struct NymClient {
/// Client configuration options, including, among other things, packet sending rates,
/// key filepaths, etc.
config: Config,
/// Tokio runtime used for futures execution.
// TODO: JS: Personally I think I prefer the implicit way of using it that we've done with the
// gateway.
runtime: Runtime,
/// KeyManager object containing smart pointers to all relevant keys used by the client.
key_manager: KeyManager,
}
impl NymClient {
pub fn new(config: Config) -> Self {
let pathfinder = ClientKeyPathfinder::new_from_config(config.get_base());
let key_manager = KeyManager::load_keys(&pathfinder).expect("failed to load stored keys");
NymClient {
runtime: Runtime::new().unwrap(),
config,
key_manager,
}
}
pub fn as_mix_recipient(&self) -> Recipient {
Recipient::new(
*self.key_manager.identity_keypair().public_key(),
self.key_manager.encryption_keypair().public_key().clone(),
// TODO: below only works under assumption that gateway address == gateway id
// (which currently is true)
NodeIdentity::from_base58_string(self.config.get_base().get_gateway_id()).unwrap(),
)
}
// future constantly pumping loop cover traffic at some specified average rate
// the pumped traffic goes to the MixTrafficController
fn start_cover_traffic_stream(
&self,
topology_accessor: TopologyAccessor,
mix_tx: MixMessageSender,
) {
info!("Starting loop cover traffic stream...");
// we need to explicitly enter runtime due to "next_delay: time::delay_for(Default::default())"
// set in the constructor which HAS TO be called within context of a tokio runtime
self.runtime
.enter(|| {
LoopCoverTrafficStream::new(
self.key_manager.ack_key(),
self.config.get_base().get_average_ack_delay(),
self.config.get_base().get_average_packet_delay(),
self.config
.get_base()
.get_loop_cover_traffic_average_delay(),
mix_tx,
self.as_mix_recipient(),
topology_accessor,
)
})
.start(self.runtime.handle());
}
fn start_real_traffic_controller(
&self,
topology_accessor: TopologyAccessor,
reply_key_storage: ReplyKeyStorage,
ack_receiver: AcknowledgementReceiver,
input_receiver: InputMessageReceiver,
mix_sender: MixMessageSender,
) {
let controller_config = client_core::client::real_messages_control::Config::new(
self.key_manager.ack_key(),
self.config.get_base().get_ack_wait_multiplier(),
self.config.get_base().get_ack_wait_addition(),
self.config.get_base().get_average_ack_delay(),
self.config.get_base().get_message_sending_average_delay(),
self.config.get_base().get_average_packet_delay(),
self.as_mix_recipient(),
);
info!("Starting real traffic stream...");
// we need to explicitly enter runtime due to "next_delay: time::delay_for(Default::default())"
// set in the constructor [of OutQueueControl] which HAS TO be called within context of a tokio runtime
// When refactoring this restriction should definitely be removed.
let real_messages_controller = self.runtime.enter(|| {
RealMessagesController::new(
controller_config,
ack_receiver,
input_receiver,
mix_sender,
topology_accessor,
reply_key_storage,
)
});
real_messages_controller.start(self.runtime.handle());
}
// buffer controlling all messages fetched from provider
// required so that other components would be able to use them (say the websocket)
fn start_received_messages_buffer_controller(
&self,
query_receiver: ReceivedBufferRequestReceiver,
mixnet_receiver: MixnetMessageReceiver,
reply_key_storage: ReplyKeyStorage,
) {
info!("Starting received messages buffer controller...");
ReceivedMessagesBufferController::new(
self.key_manager.encryption_keypair(),
query_receiver,
mixnet_receiver,
reply_key_storage,
)
.start(self.runtime.handle())
}
fn start_gateway_client(
&mut self,
mixnet_message_sender: MixnetMessageSender,
ack_sender: AcknowledgementSender,
) -> GatewayClient {
let gateway_id = self.config.get_base().get_gateway_id();
if gateway_id.is_empty() {
panic!("The identity of the gateway is unknown - did you run `nym-client` init?")
}
let gateway_address = self.config.get_base().get_gateway_listener();
if gateway_address.is_empty() {
panic!("The address of the gateway is unknown - did you run `nym-client` init?")
}
let gateway_identity = identity::PublicKey::from_base58_string(gateway_id)
.expect("provided gateway id is invalid!");
let mut gateway_client = GatewayClient::new(
gateway_address,
self.key_manager.identity_keypair(),
gateway_identity,
Some(self.key_manager.gateway_shared_key()),
mixnet_message_sender,
ack_sender,
self.config.get_base().get_gateway_response_timeout(),
);
self.runtime.block_on(async {
gateway_client
.authenticate_and_start()
.await
.expect("could not authenticate and start up the gateway connection")
});
gateway_client
}
// future responsible for periodically polling directory server and updating
// the current global view of topology
fn start_topology_refresher(&mut self, topology_accessor: TopologyAccessor) {
let topology_refresher_config = TopologyRefresherConfig::new(
self.config.get_base().get_directory_server(),
self.config.get_base().get_topology_refresh_rate(),
);
let mut topology_refresher =
TopologyRefresher::new_directory_client(topology_refresher_config, topology_accessor);
// before returning, block entire runtime to refresh the current network view so that any
// components depending on topology would see a non-empty view
info!(
"Obtaining initial network topology from {}",
self.config.get_base().get_directory_server()
);
self.runtime.block_on(topology_refresher.refresh());
// TODO: a slightly more graceful termination here
if !self
.runtime
.block_on(topology_refresher.is_topology_routable())
{
panic!(
"The current network topology seem to be insufficient to route any packets through\
- check if enough nodes and a gateway are online"
);
}
info!("Starting topology refresher...");
topology_refresher.start(self.runtime.handle());
}
// controller for sending sphinx packets to mixnet (either real traffic or cover traffic)
// TODO: if we want to send control messages to gateway_client, this CAN'T take the ownership
// over it. Perhaps GatewayClient needs to be thread-shareable or have some channel for
// requests?
fn start_mix_traffic_controller(
&mut self,
mix_rx: MixMessageReceiver,
gateway_client: GatewayClient,
) {
info!("Starting mix traffic controller...");
MixTrafficController::new(mix_rx, gateway_client).start(self.runtime.handle());
}
fn start_socks5_listener(
&self,
buffer_requester: ReceivedBufferRequestSender,
msg_input: InputMessageSender,
) {
info!("Starting socks5 listener...");
let mut auth_methods: Vec<u8> = Vec::new();
auth_methods.push(AuthenticationMethods::NoAuth as u8);
let allowed_users: Vec<User> = Vec::new();
let authenticator = Authenticator::new(auth_methods, allowed_users);
let mut sphinx_socks = SphinxSocksServer::new(
self.config.get_listening_port(),
authenticator,
self.config.get_provider_mix_address(),
self.as_mix_recipient(),
);
self.runtime
.spawn(async move { sphinx_socks.serve(msg_input, buffer_requester).await });
}
/// blocking version of `start` method. Will run forever (or until SIGINT is sent)
pub fn run_forever(&mut self) {
self.start();
if let Err(e) = self.runtime.block_on(tokio::signal::ctrl_c()) {
error!(
"There was an error while capturing SIGINT - {:?}. We will terminate regardless",
e
);
}
println!(
"Received SIGINT - the client will terminate now (threads are not YET nicely stopped)"
);
}
pub fn start(&mut self) {
info!("Starting nym client");
// channels for inter-component communication
// TODO: make the channels be internally created by the relevant components
// rather than creating them here, so say for example the buffer controller would create the request channels
// and would allow anyone to clone the sender channel
// sphinx_message_sender is the transmitter for any component generating sphinx packets that are to be sent to the mixnet
// they are used by cover traffic stream and real traffic stream
// sphinx_message_receiver is the receiver used by MixTrafficController that sends the actual traffic
let (sphinx_message_sender, sphinx_message_receiver) = mpsc::unbounded();
// unwrapped_sphinx_sender is the transmitter of mixnet messages received from the gateway
// unwrapped_sphinx_receiver is the receiver for said messages - used by ReceivedMessagesBuffer
let (mixnet_messages_sender, mixnet_messages_receiver) = mpsc::unbounded();
// used for announcing connection or disconnection of a channel for pushing re-assembled messages to
let (received_buffer_request_sender, received_buffer_request_receiver) = mpsc::unbounded();
// channels responsible for controlling real messages
let (input_sender, input_receiver) = mpsc::unbounded::<InputMessage>();
// channels responsible for controlling ack messages
let (ack_sender, ack_receiver) = mpsc::unbounded();
let shared_topology_accessor = TopologyAccessor::new();
let reply_key_storage =
ReplyKeyStorage::load(self.config.get_base().get_reply_encryption_key_store_path())
.expect("Failed to load reply key storage!");
// the components are started in very specific order. Unless you know what you are doing,
// do not change that.
self.start_topology_refresher(shared_topology_accessor.clone());
self.start_received_messages_buffer_controller(
received_buffer_request_receiver,
mixnet_messages_receiver,
reply_key_storage.clone(),
);
let gateway_client = self.start_gateway_client(mixnet_messages_sender, ack_sender);
self.start_mix_traffic_controller(sphinx_message_receiver, gateway_client);
self.start_real_traffic_controller(
shared_topology_accessor.clone(),
reply_key_storage,
ack_receiver,
input_receiver,
sphinx_message_sender.clone(),
);
self.start_cover_traffic_stream(shared_topology_accessor, sphinx_message_sender);
self.start_socks5_listener(received_buffer_request_sender, input_sender);
info!("Client startup finished!");
info!("The address of this client is: {}", self.as_mix_recipient());
}
}
| 41.441176 | 129 | 0.663733 |
875168eb6b2a7a33218696fa3bcb76124b74fd64 | 1,388 | // move_semantics2.rs
// Make me compile without changing line 13!
// Execute `rustlings hint move_semantics2` for hints :)
fn main() {
let vec0 = Vec::new();
let mut vec1 = fill_vec(&vec0);
// Do not change the following line!
println!("{} has length {} content `{:?}`", "vec0", vec0.len(), vec0);
vec1.push(88);
println!("{} has length {} content `{:?}`", "vec1", vec1.len(), vec1);
}
fn fill_vec(vec: &Vec<i32>) -> Vec<i32> {
let mut vec1 = vec.to_vec();
vec1.push(22);
vec1.push(44);
vec1.push(66);
vec1
}
/*
So `vec0` is being *moved* into the function `fill_vec` when we call it on
line 10, which means it gets dropped at the end of `fill_vec`, which means we
can't use `vec0` again on line 13 (or anywhere else in `main` after the
`fill_vec` call for that matter). We could fix this in a few ways, try them
all!
1. Make another, separate version of the data that's in `vec0` and pass that
to `fill_vec` instead.
2. Make `fill_vec` borrow its argument instead of taking ownership of it,
and then copy the data within the function in order to return an owned
`Vec<i32>`
3. Make `fill_vec` *mutably* borrow its argument (which will need to be
mutable), modify it directly, then not return anything. Then you can get rid
of `vec1` entirely -- note that this will change what gets printed by the
first `println!`
*/
| 33.047619 | 79 | 0.669308 |
bff4c82d890049a608c66456bb9822a2ce3db676 | 29 | pub(crate) mod double_click;
| 14.5 | 28 | 0.793103 |
11d3ed5f06f2bf7a833654e39f975d5386cdc5c6 | 4,424 | use super::nalgebra_conversions::*;
use crate::egui::Grid;
use crate::impls::NumberAttributes;
use crate::{utils, Context, Inspectable};
use bevy_rapier3d::{
na::Isometry3,
physics::RigidBodyHandleComponent,
rapier::dynamics::{BodyStatus, MassProperties, RigidBody, RigidBodySet},
};
impl_for_simple_enum!(BodyStatus: Dynamic, Static, Kinematic);
impl Inspectable for MassProperties {
type Attributes = ();
fn ui(
&mut self,
ui: &mut bevy_egui::egui::Ui,
_options: Self::Attributes,
context: &Context,
) -> bool {
let mut changed = false;
ui.label("Mass");
let mut mass = 1. / self.inv_mass;
changed |= mass.ui(ui, NumberAttributes::min(0.001), context);
self.inv_mass = 1. / mass;
ui.end_row();
ui.label("Center of mass");
let mut com = self.local_com.to_glam_vec3();
changed |= com.ui(ui, Default::default(), context);
self.local_com = com.to_na_point3();
ui.end_row();
changed
}
}
impl Inspectable for RigidBody {
type Attributes = ();
fn ui(
&mut self,
ui: &mut bevy_egui::egui::Ui,
_options: Self::Attributes,
context: &Context,
) -> bool {
// PERF: some updates here can be avoided
let mut changed = false;
ui.vertical_centered(|ui| {
Grid::new(context.id()).show(ui, |ui| {
ui.label("Body Status");
let mut body_status = self.body_status();
changed |= body_status.ui(ui, Default::default(), context);
self.set_body_status(body_status);
ui.end_row();
let mut mass_properties = *self.mass_properties();
changed |= mass_properties.ui(ui, Default::default(), context);
self.set_mass_properties(mass_properties, false);
let position = self.position();
ui.label("Translation");
let mut translation = position.translation.vector.to_glam_vec3();
changed |= translation.ui(ui, Default::default(), context);
ui.end_row();
ui.label("Rotation");
let mut rotation = position.rotation.to_glam_quat();
changed |= rotation.ui(ui, Default::default(), context);
ui.end_row();
if changed {
self.set_position(
Isometry3::from_parts(
translation.to_na_translation(),
rotation.to_na_unit_quat(),
),
false,
);
}
ui.label("Linear velocity");
let mut linvel = self.linvel().to_glam_vec3();
trunc_epsilon_vec3(&mut linvel);
changed |= linvel.ui(ui, Default::default(), context);
self.set_linvel(linvel.to_na_vector3(), false);
ui.end_row();
ui.label("Angular velocity");
let mut angvel = self.angvel().to_glam_vec3();
trunc_epsilon_vec3(&mut angvel);
changed |= angvel.ui(ui, Default::default(), context);
self.set_angvel(angvel.to_na_vector3(), false);
ui.end_row();
self.wake_up(false);
});
});
changed
}
}
fn trunc_epsilon_f32(val: &mut f32) {
if val.abs() < f32::EPSILON {
*val = 0.0;
}
}
fn trunc_epsilon_vec3(val: &mut bevy::math::Vec3) {
trunc_epsilon_f32(&mut val.x);
trunc_epsilon_f32(&mut val.y);
trunc_epsilon_f32(&mut val.z);
}
impl Inspectable for RigidBodyHandleComponent {
type Attributes = <RigidBody as Inspectable>::Attributes;
fn ui(
&mut self,
ui: &mut bevy_egui::egui::Ui,
options: Self::Attributes,
context: &Context,
) -> bool {
let world = expect_world!(ui, context, "RigidBodyHandleComponent");
let mut bodies = world.get_resource_mut::<RigidBodySet>().unwrap();
let body = match bodies.get_mut(self.handle()) {
Some(body) => body,
None => {
utils::error_label(ui, "This handle does not exist on RigidBodySet");
return false;
}
};
body.ui(ui, options, context)
}
}
| 31.6 | 85 | 0.538879 |
dbc068fd20bb5930f733d856f95553a4e3bd440e | 15,243 | //
// mtpng - a multithreaded parallel PNG encoder in Rust
// filter.rs - adaptive pixel filtering for PNG encoding
//
// Copyright (c) 2018 Brion Vibber
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
use std::cmp;
use std::convert::TryFrom;
use std::io;
use super::Header;
use super::Mode;
use super::Mode::{Adaptive, Fixed};
use super::utils::invalid_input;
#[repr(u8)]
#[derive(Copy, Clone)]
pub enum Filter {
None = 0,
Sub = 1,
Up = 2,
Average = 3,
Paeth = 4,
}
impl TryFrom<u8> for Filter {
type Error = io::Error;
fn try_from(val: u8) -> Result<Self, Self::Error> {
match val {
0 => Ok(Filter::None),
1 => Ok(Filter::Sub),
2 => Ok(Filter::Up),
3 => Ok(Filter::Average),
4 => Ok(Filter::Paeth),
_ => Err(invalid_input("Invalid type constant")),
}
}
}
//
// Iterator helper for the filter functions.
//
// Filters are all byte-wise, and accept four input pixels:
// val (current pixel), left, above, and upper_left.
//
// They return an offset value which is used to reconstruct
// the original pixels on decode based on the pixels decoded
// so far plus the offset.
//
#[inline(always)]
fn filter_iter<F>(bpp: usize, prev: &[u8], src: &[u8], out: &mut [u8], func: F)
where F : Fn(u8, u8, u8, u8) -> u8
{
//
// The izip! macro merges multiple iterators together.
// Performs _slightly_ better than a for loop with indexing
// and the bounds checks mostly factored out by careful
// optimization, and doesn't require the voodoo assertions.
//
for (dest, cur, up) in
izip!(&mut out[0 .. bpp],
&src[0 .. bpp],
&prev[0 .. bpp]) {
*dest = func(*cur, 0, *up, 0);
}
let len = out.len();
for (dest, cur, left, up, above_left) in
izip!(&mut out[bpp .. len],
&src[bpp .. len],
&src[0 .. len - bpp],
&prev[bpp .. len],
&prev[0 .. len - bpp]) {
*dest = func(*cur, *left, *up, *above_left);
}
}
//
// "None" filter copies the untouched source data.
// Good for indexed color where there's no relation between pixel values.
//
// https://www.w3.org/TR/PNG/#9Filter-types
//
fn filter_none(_bpp: usize, _prev: &[u8], src: &[u8], dest: &mut [u8]) {
// Does not need specialization.
dest[0] = Filter::None as u8;
dest[1 ..].clone_from_slice(src);
}
//
// "Sub" filter diffs each byte against its neighbor one pixel to the left.
// Good for lines that smoothly vary, like horizontal gradients.
//
// https://www.w3.org/TR/PNG/#9Filter-types
//
fn filter_sub(bpp: usize, prev: &[u8], src: &[u8], dest: &mut [u8]) {
dest[0] = Filter::Sub as u8;
filter_iter(bpp, prev, src, &mut dest[1 ..], |val, left, _above, _upper_left| -> u8 {
val.wrapping_sub(left)
})
}
//
// "Up" filter diffs the pixel against its upper neighbor from prev row.
// Good for vertical gradients and lines that are similar to their
// predecessors.
//
// https://www.w3.org/TR/PNG/#9Filter-types
//
fn filter_up(bpp: usize, prev: &[u8], src: &[u8], dest: &mut [u8]) {
// Does not need specialization.
dest[0] = Filter::Up as u8;
filter_iter(bpp, prev, src, &mut dest[1 ..], |val, _left, above, _upper_left| -> u8 {
val.wrapping_sub(above)
})
}
//
// "Average" filter diffs the pixel against the average of its left and
// upper neighbors. Good for smoothly varying tonal and photographic images.
//
// https://www.w3.org/TR/PNG/#9Filter-type-3-Average
//
fn filter_average(bpp: usize, prev: &[u8], src: &[u8], dest: &mut [u8]) {
dest[0] = Filter::Average as u8;
filter_iter(bpp, prev, src, &mut dest[1 ..], |val, left, above, _upper_left| -> u8 {
let avg = ((i16::from(left) + i16::from(above)) / 2) as u8;
val.wrapping_sub(avg)
})
}
//
// Predictor function for the "Paeth" filter.
// The order of comparisons is important; use the PNG standard's reference.
//
// https://www.w3.org/TR/PNG/#9Filter-type-4-Paeth
//
fn paeth_predictor(left: u8, above: u8, upper_left: u8) -> u8 {
let a = i16::from(left);
let b = i16::from(above);
let c = i16::from(upper_left);
let p = a + b - c; // initial estimate
let pa = i16::abs(p - a); // distances to a, b, c
let pb = i16::abs(p - b);
let pc = i16::abs(p - c);
// return nearest of a,b,c,
// breaking ties in order a,b,c.
if pa <= pb && pa <= pc {
left
} else if pb <= pc {
above
} else {
upper_left
}
}
//
// The "Paeth" filter diffs each byte against the nearest one of its
// neighbor pixels, to the left, above, and upper-left.
//
// Good for photographic images and such.
//
// Note this is the most expensive filter to calculate.
//
// https://www.w3.org/TR/PNG/#9Filter-type-4-Paeth
//
fn filter_paeth(bpp: usize, prev: &[u8], src: &[u8], dest: &mut [u8]) {
dest[0] = Filter::Paeth as u8;
filter_iter(bpp, prev, src, &mut dest[1 ..], |val, left, above, upper_left| -> u8 {
val.wrapping_sub(paeth_predictor(left, above, upper_left))
})
}
//
// For the complexity/compressibility heuristic. Absolute value
// of the byte treated as a signed value, extended to a u32.
//
// Note this doesn't produce useful results on the "none" filter,
// as it's expecting, well, a filter delta. :D
//
fn filter_complexity_delta(val: u8) -> u32 {
i32::abs(i32::from(val as i8)) as u32
}
//
// The maximum complexity heuristic value that can be represented
// without overflow.
//
fn complexity_max() -> u32 {
u32::max_value() - 256
}
//
// Any row with this number of bytes needs to check for overflow
// of the complexity heuristic.
//
fn complexity_big_row(len: usize) -> bool {
len >= (1 << 24)
}
//
// Complexity/compressibility heuristic recommended by the PNG spec
// and used in libpng as well.
//
// Note this doesn't produce useful results on the "none" filter
//
// libpng tries to do this inline with the filter with a clever
// early return if "too complex", but I find that's slower on large
// files than just running the whole filter.
//
fn estimate_complexity(data: &[u8]) -> u32 {
let mut sum = 0u32;
//
// Very long rows could overflow the 32-bit complexity heuristic's
// accumulator, but it doesn't trigger until tens of millions
// of bytes per row. :)
//
// The check slows down the inner loop on more realistic sizes
// (say, ~31k bytes for a 7680 wide RGBA image) so we skip it.
//
if complexity_big_row(data.len()) {
for iter in data.iter() {
sum += filter_complexity_delta(*iter);
if sum > complexity_max() {
return complexity_max();
}
}
} else {
for iter in data.iter() {
sum += filter_complexity_delta(*iter);
}
}
sum
}
//
// Holds a target row that can be filtered
// Can be reused.
//
struct Filterator {
filter: Filter,
bpp: usize,
data: Vec<u8>,
complexity: u32,
}
impl Filterator {
fn new(filter: Filter, bpp: usize, stride: usize) -> Filterator {
Filterator {
filter,
bpp,
data: vec![0u8; stride + 1],
complexity: 0,
}
}
#[inline(always)]
fn do_filter(&mut self, prev: &[u8], src: &[u8]) -> &[u8] {
match self.filter {
Filter::None => filter_none(self.bpp, prev, src, &mut self.data),
Filter::Sub => filter_sub(self.bpp, prev, src, &mut self.data),
Filter::Up => filter_up(self.bpp, prev, src, &mut self.data),
Filter::Average => filter_average(self.bpp, prev, src, &mut self.data),
Filter::Paeth => filter_paeth(self.bpp, prev, src, &mut self.data),
}
self.complexity = estimate_complexity(&self.data[1..]);
&self.data
}
#[cfg(target_arch = "x86")]
#[target_feature(enable = "sse2")]
unsafe fn do_filter_sse2(&mut self, prev: &[u8], src: &[u8]) -> &[u8] {
self.do_filter(prev, src)
}
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
#[target_feature(enable = "sse4.1")]
unsafe fn do_filter_sse41(&mut self, prev: &[u8], src: &[u8]) -> &[u8] {
self.do_filter(prev, src)
}
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
#[target_feature(enable = "sse4.2")]
unsafe fn do_filter_sse42(&mut self, prev: &[u8], src: &[u8]) -> &[u8] {
self.do_filter(prev, src)
}
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
#[target_feature(enable = "avx")]
unsafe fn do_filter_avx(&mut self, prev: &[u8], src: &[u8]) -> &[u8] {
self.do_filter(prev, src)
}
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
#[target_feature(enable = "avx2")]
unsafe fn do_filter_avx2(&mut self, prev: &[u8], src: &[u8]) -> &[u8] {
self.do_filter(prev, src)
}
fn filter(&mut self, prev: &[u8], src: &[u8]) -> &[u8] {
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
{
if is_x86_feature_detected!("avx2") {
return unsafe {
self.do_filter_avx2(prev, src)
};
}
if is_x86_feature_detected!("avx") {
return unsafe {
self.do_filter_avx(prev, src)
};
}
if is_x86_feature_detected!("sse4.2") {
return unsafe {
self.do_filter_sse42(prev, src)
};
}
if is_x86_feature_detected!("sse4.1") {
return unsafe {
self.do_filter_sse41(prev, src)
};
}
}
#[cfg(target_arch = "x86")]
{
// SSE2 is guaranteed on x86_64
// but may not be present on x86
if is_x86_feature_detected!("sse2") {
return unsafe {
self.do_filter_sse2(prev, src)
};
}
}
self.do_filter(prev, src)
}
fn get_data(&self) -> &[u8] {
&self.data
}
fn get_complexity(&self) -> u32 {
self.complexity
}
}
pub struct AdaptiveFilter {
mode: Mode<Filter>,
filter_none: Filterator,
filter_up: Filterator,
filter_sub: Filterator,
filter_average: Filterator,
filter_paeth: Filterator,
}
impl AdaptiveFilter {
pub fn new(header: Header, mode: Mode<Filter>) -> AdaptiveFilter {
let stride = header.stride();
let bpp = header.bytes_per_pixel();
AdaptiveFilter {
mode,
filter_none: Filterator::new(Filter::None, bpp, stride),
filter_up: Filterator::new(Filter::Up, bpp, stride),
filter_sub: Filterator::new(Filter::Sub, bpp, stride),
filter_average: Filterator::new(Filter::Average, bpp, stride),
filter_paeth: Filterator::new(Filter::Paeth, bpp, stride),
}
}
fn filter_adaptive(&mut self, prev: &[u8], src: &[u8]) -> &[u8] {
//
// Note the "none" filter is often good for things like
// line-art diagrams and screenshots that have lots of
// sharp pixel edges and long runs of solid colors.
//
// The adaptive filter algorithm doesn't work on it, however,
// since it measures accumulated filter prediction offets and
// that gives useless results on absolute color magnitudes.
//
// Compression could be improved for some files if a heuristic
// can be devised to check if the none filter will work well.
//
self.filter_sub.filter(prev, src);
let mut min = self.filter_sub.get_complexity();
self.filter_up.filter(prev, src);
min = cmp::min(min, self.filter_up.get_complexity());
self.filter_average.filter(prev, src);
min = cmp::min(min, self.filter_average.get_complexity());
self.filter_paeth.filter(prev, src);
min = cmp::min(min, self.filter_paeth.get_complexity());
if min == self.filter_sub.get_complexity() {
self.filter_sub.get_data()
} else if min == self.filter_up.get_complexity() {
self.filter_up.get_data()
} else if min == self.filter_average.get_complexity() {
self.filter_average.get_data()
} else /* if min == self.filter_paeth.get_complexity() */ {
self.filter_paeth.get_data()
}
}
pub fn filter(&mut self, prev: &[u8], src: &[u8]) -> &[u8] {
match self.mode {
Fixed(Filter::None) => self.filter_none.filter(prev, src),
Fixed(Filter::Sub) => self.filter_sub.filter(prev, src),
Fixed(Filter::Up) => self.filter_up.filter(prev, src),
Fixed(Filter::Average) => self.filter_average.filter(prev, src),
Fixed(Filter::Paeth) => self.filter_paeth.filter(prev, src),
Adaptive => self.filter_adaptive(prev, src),
}
}
}
#[cfg(test)]
mod tests {
use super::AdaptiveFilter;
use super::Mode;
use super::super::Header;
use super::super::ColorType;
#[test]
fn it_works() {
let mut header = Header::new();
header.set_size(1024, 768).unwrap();
header.set_color(ColorType::Truecolor, 8).unwrap();
let mut filter = AdaptiveFilter::new(header, Mode::Adaptive);
let prev = vec![0u8; header.stride()];
let row = vec![0u8; header.stride()];
let filtered_data = filter.filter(&prev, &row);
assert_eq!(filtered_data.len(), header.stride() + 1);
}
#[test]
fn it_works_16() {
let mut header = Header::new();
header.set_size(1024, 768).unwrap();
header.set_color(ColorType::Truecolor, 16).unwrap();
let mut filter = AdaptiveFilter::new(header, Mode::Adaptive);
let prev = vec![0u8; header.stride()];
let row = vec![0u8; header.stride()];
let filtered_data = filter.filter(&prev, &row);
assert_eq!(filtered_data.len(), header.stride() + 1);
}
}
| 31.690229 | 89 | 0.590632 |
0afe1f25093857d6e1342b31596b1993a7b77e99 | 20,588 | use std::error::Error as StdError;
use std::fmt;
use std::str;
use std::time::{SystemTime, Duration, UNIX_EPOCH};
#[cfg(target_os="cloudabi")]
mod max {
pub const SECONDS: u64 = ::std::u64::MAX / 1_000_000_000;
#[allow(unused)]
pub const TIMESTAMP: &'static str = "2554-07-21T23:34:33Z";
}
#[cfg(all(
target_pointer_width="32",
not(target_os="cloudabi"),
not(target_os="windows"),
not(all(target_arch="wasm32", not(target_os="emscripten")))
))]
mod max {
pub const SECONDS: u64 = ::std::i32::MAX as u64;
#[allow(unused)]
pub const TIMESTAMP: &'static str = "2038-01-19T03:14:07Z";
}
#[cfg(any(
target_pointer_width="64",
target_os="windows",
all(target_arch="wasm32", not(target_os="emscripten")),
))]
mod max {
pub const SECONDS: u64 = 253_402_300_800-1; // last second of year 9999
#[allow(unused)]
pub const TIMESTAMP: &str = "9999-12-31T23:59:59Z";
}
/// Error parsing datetime (timestamp)
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum Error {
/// Numeric component is out of range
OutOfRange,
/// Bad character where digit is expected
InvalidDigit,
/// Other formatting errors
InvalidFormat,
}
impl StdError for Error {}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Error::OutOfRange => write!(f, "numeric component is out of range"),
Error::InvalidDigit => write!(f, "bad character where digit is expected"),
Error::InvalidFormat => write!(f, "timestamp format is invalid"),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
enum Precision {
Smart,
Seconds,
Millis,
Micros,
Nanos,
}
/// A wrapper type that allows you to Display a SystemTime
#[derive(Debug, Clone)]
pub struct Rfc3339Timestamp(SystemTime, Precision);
#[inline]
fn two_digits(b1: u8, b2: u8) -> Result<u64, Error> {
if b1 < b'0' || b2 < b'0' || b1 > b'9' || b2 > b'9' {
return Err(Error::InvalidDigit);
}
Ok(((b1 - b'0')*10 + (b2 - b'0')) as u64)
}
/// Parse RFC3339 timestamp `2018-02-14T00:28:07Z`
///
/// Supported feature: any precision of fractional
/// digits `2018-02-14T00:28:07.133Z`.
///
/// Unsupported feature: localized timestamps. Only UTC is supported.
pub fn parse_rfc3339(s: &str) -> Result<SystemTime, Error> {
if s.len() < "2018-02-14T00:28:07Z".len() {
return Err(Error::InvalidFormat);
}
let b = s.as_bytes();
if b[10] != b'T' || b[b.len()-1] != b'Z' {
return Err(Error::InvalidFormat);
}
parse_rfc3339_weak(s)
}
/// Parse RFC3339-like timestamp `2018-02-14 00:28:07`
///
/// Supported features:
///
/// 1. Any precision of fractional digits `2018-02-14 00:28:07.133`.
/// 2. Supports timestamp with or without either of `T` or `Z`
/// 3. Anything valid for `parse_3339` is valid for this function
///
/// Unsupported feature: localized timestamps. Only UTC is supported, even if
/// `Z` is not specified.
///
/// This function is intended to use for parsing human input. Whereas
/// `parse_rfc3339` is for strings generated programmatically.
pub fn parse_rfc3339_weak(s: &str) -> Result<SystemTime, Error> {
if s.len() < "2018-02-14T00:28:07".len() {
return Err(Error::InvalidFormat);
}
let b = s.as_bytes(); // for careless slicing
if b[4] != b'-' || b[7] != b'-' || (b[10] != b'T' && b[10] != b' ') ||
b[13] != b':' || b[16] != b':'
{
return Err(Error::InvalidFormat);
}
let year = two_digits(b[0], b[1])? * 100 + two_digits(b[2], b[3])?;
let month = two_digits(b[5], b[6])?;
let day = two_digits(b[8], b[9])?;
let hour = two_digits(b[11], b[12])?;
let minute = two_digits(b[14], b[15])?;
let mut second = two_digits(b[17], b[18])?;
if year < 1970 || hour > 23 || minute > 59 || second > 60 {
return Err(Error::OutOfRange);
}
// TODO(tailhook) should we check that leaps second is only on midnight ?
if second == 60 {
second = 59
};
let leap_years = ((year - 1) - 1968) / 4 - ((year - 1) - 1900) / 100 +
((year - 1) - 1600) / 400;
let leap = is_leap_year(year);
let (mut ydays, mdays) = match month {
1 => (0, 31),
2 if leap => (31, 29),
2 => (31, 28),
3 => (59, 31),
4 => (90, 30),
5 => (120, 31),
6 => (151, 30),
7 => (181, 31),
8 => (212, 31),
9 => (243, 30),
10 => (273, 31),
11 => (304, 30),
12 => (334, 31),
_ => return Err(Error::OutOfRange),
};
if day > mdays || day == 0 {
return Err(Error::OutOfRange);
}
ydays += day - 1;
if leap && month > 2 {
ydays += 1;
}
let days = (year - 1970) * 365 + leap_years + ydays;
let time = second + minute * 60 + hour * 3600;
let mut nanos = 0;
let mut mult = 100_000_000;
if b.get(19) == Some(&b'.') {
for idx in 20..b.len() {
if b[idx] == b'Z' {
if idx == b.len()-1 {
break;
} else {
return Err(Error::InvalidDigit);
}
}
if b[idx] < b'0' || b[idx] > b'9' {
return Err(Error::InvalidDigit);
}
nanos += mult * (b[idx] - b'0') as u32;
mult /= 10;
}
} else if b.len() != 19 && (b.len() > 20 || b[19] != b'Z') {
return Err(Error::InvalidFormat);
}
let total_seconds = time + days * 86400;
if total_seconds > max::SECONDS {
return Err(Error::OutOfRange);
}
Ok(UNIX_EPOCH + Duration::new(total_seconds, nanos))
}
fn is_leap_year(y: u64) -> bool {
y % 4 == 0 && (y % 100 != 0 || y % 400 == 0)
}
/// Format an RFC3339 timestamp `2018-02-14T00:28:07Z`
///
/// This function formats timestamp with smart precision: i.e. if it has no
/// fractional seconds, they aren't written at all. And up to nine digits if
/// they are.
///
/// The value is always UTC and ignores system timezone.
pub fn format_rfc3339(system_time: SystemTime) -> Rfc3339Timestamp {
Rfc3339Timestamp(system_time, Precision::Smart)
}
/// Format an RFC3339 timestamp `2018-02-14T00:28:07Z`
///
/// This format always shows timestamp without fractional seconds.
///
/// The value is always UTC and ignores system timezone.
pub fn format_rfc3339_seconds(system_time: SystemTime) -> Rfc3339Timestamp {
Rfc3339Timestamp(system_time, Precision::Seconds)
}
/// Format an RFC3339 timestamp `2018-02-14T00:28:07.000Z`
///
/// This format always shows milliseconds even if millisecond value is zero.
///
/// The value is always UTC and ignores system timezone.
pub fn format_rfc3339_millis(system_time: SystemTime) -> Rfc3339Timestamp {
Rfc3339Timestamp(system_time, Precision::Millis)
}
/// Format an RFC3339 timestamp `2018-02-14T00:28:07.000000Z`
///
/// This format always shows microseconds even if microsecond value is zero.
///
/// The value is always UTC and ignores system timezone.
pub fn format_rfc3339_micros(system_time: SystemTime) -> Rfc3339Timestamp {
Rfc3339Timestamp(system_time, Precision::Micros)
}
/// Format an RFC3339 timestamp `2018-02-14T00:28:07.000000000Z`
///
/// This format always shows nanoseconds even if nanosecond value is zero.
///
/// The value is always UTC and ignores system timezone.
pub fn format_rfc3339_nanos(system_time: SystemTime) -> Rfc3339Timestamp {
Rfc3339Timestamp(system_time, Precision::Nanos)
}
impl fmt::Display for Rfc3339Timestamp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::Precision::*;
let dur = self.0.duration_since(UNIX_EPOCH)
.expect("all times should be after the epoch");
let secs_since_epoch = dur.as_secs();
let nanos = dur.subsec_nanos();
if secs_since_epoch >= 253_402_300_800 { // year 9999
return Err(fmt::Error);
}
/* 2000-03-01 (mod 400 year, immediately after feb29 */
const LEAPOCH: i64 = 11017;
const DAYS_PER_400Y: i64 = 365*400 + 97;
const DAYS_PER_100Y: i64 = 365*100 + 24;
const DAYS_PER_4Y: i64 = 365*4 + 1;
let days = (secs_since_epoch / 86400) as i64 - LEAPOCH;
let secs_of_day = secs_since_epoch % 86400;
let mut qc_cycles = days / DAYS_PER_400Y;
let mut remdays = days % DAYS_PER_400Y;
if remdays < 0 {
remdays += DAYS_PER_400Y;
qc_cycles -= 1;
}
let mut c_cycles = remdays / DAYS_PER_100Y;
if c_cycles == 4 { c_cycles -= 1; }
remdays -= c_cycles * DAYS_PER_100Y;
let mut q_cycles = remdays / DAYS_PER_4Y;
if q_cycles == 25 { q_cycles -= 1; }
remdays -= q_cycles * DAYS_PER_4Y;
let mut remyears = remdays / 365;
if remyears == 4 { remyears -= 1; }
remdays -= remyears * 365;
let mut year = 2000 +
remyears + 4*q_cycles + 100*c_cycles + 400*qc_cycles;
let months = [31,30,31,30,31,31,30,31,30,31,31,29];
let mut mon = 0;
for mon_len in months.iter() {
mon += 1;
if remdays < *mon_len {
break;
}
remdays -= *mon_len;
}
let mday = remdays+1;
let mon = if mon + 2 > 12 {
year += 1;
mon - 10
} else {
mon + 2
};
let mut buf: [u8; 30] = [
// Too long to write as: b"0000-00-00T00:00:00.000000000Z"
b'0', b'0', b'0', b'0', b'-', b'0', b'0', b'-', b'0', b'0', b'T',
b'0', b'0', b':', b'0', b'0', b':', b'0', b'0',
b'.', b'0', b'0', b'0', b'0', b'0', b'0', b'0', b'0', b'0', b'Z',
];
buf[0] = b'0' + (year / 1000) as u8;
buf[1] = b'0' + (year / 100 % 10) as u8;
buf[2] = b'0' + (year / 10 % 10) as u8;
buf[3] = b'0' + (year % 10) as u8;
buf[5] = b'0' + (mon / 10) as u8;
buf[6] = b'0' + (mon % 10) as u8;
buf[8] = b'0' + (mday / 10) as u8;
buf[9] = b'0' + (mday % 10) as u8;
buf[11] = b'0' + (secs_of_day / 3600 / 10) as u8;
buf[12] = b'0' + (secs_of_day / 3600 % 10) as u8;
buf[14] = b'0' + (secs_of_day / 60 / 10 % 6) as u8;
buf[15] = b'0' + (secs_of_day / 60 % 10) as u8;
buf[17] = b'0' + (secs_of_day / 10 % 6) as u8;
buf[18] = b'0' + (secs_of_day % 10) as u8;
let offset = if self.1 == Seconds || nanos == 0 && self.1 == Smart {
buf[19] = b'Z';
19
} else if self.1 == Millis {
buf[20] = b'0' + (nanos / 100_000_000) as u8;
buf[21] = b'0' + (nanos / 10_000_000 % 10) as u8;
buf[22] = b'0' + (nanos / 1_000_000 % 10) as u8;
buf[23] = b'Z';
23
} else if self.1 == Micros {
buf[20] = b'0' + (nanos / 100_000_000) as u8;
buf[21] = b'0' + (nanos / 10_000_000 % 10) as u8;
buf[22] = b'0' + (nanos / 1_000_000 % 10) as u8;
buf[23] = b'0' + (nanos / 100_000 % 10) as u8;
buf[24] = b'0' + (nanos / 10_000 % 10) as u8;
buf[25] = b'0' + (nanos / 1_000 % 10) as u8;
buf[26] = b'Z';
26
} else {
buf[20] = b'0' + (nanos / 100_000_000) as u8;
buf[21] = b'0' + (nanos / 10_000_000 % 10) as u8;
buf[22] = b'0' + (nanos / 1_000_000 % 10) as u8;
buf[23] = b'0' + (nanos / 100_000 % 10) as u8;
buf[24] = b'0' + (nanos / 10_000 % 10) as u8;
buf[25] = b'0' + (nanos / 1_000 % 10) as u8;
buf[26] = b'0' + (nanos / 100 % 10) as u8;
buf[27] = b'0' + (nanos / 10 % 10) as u8;
buf[28] = b'0' + (nanos / 1 % 10) as u8;
// 29th is 'Z'
29
};
// we know our chars are all ascii
f.write_str(str::from_utf8(&buf[..=offset]).expect("Conversion to utf8 failed"))
}
}
#[cfg(test)]
mod test {
use std::str::from_utf8;
use std::time::{UNIX_EPOCH, SystemTime, Duration};
use rand::Rng;
use super::{parse_rfc3339, parse_rfc3339_weak, format_rfc3339};
use super::{format_rfc3339_millis, format_rfc3339_micros};
use super::{format_rfc3339_nanos};
use super::max;
fn from_sec(sec: u64) -> (String, SystemTime) {
let s = time::at_utc(time::Timespec { sec: sec as i64, nsec: 0 })
.rfc3339().to_string();
let time = UNIX_EPOCH + Duration::new(sec, 0);
(s, time)
}
#[test]
#[cfg(all(target_pointer_width="32", target_os="linux"))]
fn year_after_2038_fails_gracefully() {
// next second
assert_eq!(parse_rfc3339("2038-01-19T03:14:08Z").unwrap_err(),
super::Error::OutOfRange);
assert_eq!(parse_rfc3339("9999-12-31T23:59:59Z").unwrap_err(),
super::Error::OutOfRange);
}
#[test]
fn smoke_tests_parse() {
assert_eq!(parse_rfc3339("1970-01-01T00:00:00Z").unwrap(),
UNIX_EPOCH + Duration::new(0, 0));
assert_eq!(parse_rfc3339("1970-01-01T00:00:01Z").unwrap(),
UNIX_EPOCH + Duration::new(1, 0));
assert_eq!(parse_rfc3339("2018-02-13T23:08:32Z").unwrap(),
UNIX_EPOCH + Duration::new(1_518_563_312, 0));
assert_eq!(parse_rfc3339("2012-01-01T00:00:00Z").unwrap(),
UNIX_EPOCH + Duration::new(1_325_376_000, 0));
}
#[test]
fn smoke_tests_format() {
assert_eq!(
format_rfc3339(UNIX_EPOCH + Duration::new(0, 0)).to_string(),
"1970-01-01T00:00:00Z");
assert_eq!(
format_rfc3339(UNIX_EPOCH + Duration::new(1, 0)).to_string(),
"1970-01-01T00:00:01Z");
assert_eq!(
format_rfc3339(UNIX_EPOCH + Duration::new(1_518_563_312, 0)).to_string(),
"2018-02-13T23:08:32Z");
assert_eq!(
format_rfc3339(UNIX_EPOCH + Duration::new(1_325_376_000, 0)).to_string(),
"2012-01-01T00:00:00Z");
}
#[test]
fn smoke_tests_format_millis() {
assert_eq!(
format_rfc3339_millis(UNIX_EPOCH +
Duration::new(0, 0)).to_string(),
"1970-01-01T00:00:00.000Z");
assert_eq!(
format_rfc3339_millis(UNIX_EPOCH +
Duration::new(1_518_563_312, 123_000_000)).to_string(),
"2018-02-13T23:08:32.123Z");
}
#[test]
fn smoke_tests_format_micros() {
assert_eq!(
format_rfc3339_micros(UNIX_EPOCH +
Duration::new(0, 0)).to_string(),
"1970-01-01T00:00:00.000000Z");
assert_eq!(
format_rfc3339_micros(UNIX_EPOCH +
Duration::new(1_518_563_312, 123_000_000)).to_string(),
"2018-02-13T23:08:32.123000Z");
assert_eq!(
format_rfc3339_micros(UNIX_EPOCH +
Duration::new(1_518_563_312, 456_123_000)).to_string(),
"2018-02-13T23:08:32.456123Z");
}
#[test]
fn smoke_tests_format_nanos() {
assert_eq!(
format_rfc3339_nanos(UNIX_EPOCH +
Duration::new(0, 0)).to_string(),
"1970-01-01T00:00:00.000000000Z");
assert_eq!(
format_rfc3339_nanos(UNIX_EPOCH +
Duration::new(1_518_563_312, 123_000_000)).to_string(),
"2018-02-13T23:08:32.123000000Z");
assert_eq!(
format_rfc3339_nanos(UNIX_EPOCH +
Duration::new(1_518_563_312, 789_456_123)).to_string(),
"2018-02-13T23:08:32.789456123Z");
}
#[test]
fn upper_bound() {
let max = UNIX_EPOCH + Duration::new(max::SECONDS, 0);
assert_eq!(parse_rfc3339(&max::TIMESTAMP).unwrap(), max);
assert_eq!(format_rfc3339(max).to_string(), max::TIMESTAMP);
}
#[test]
fn leap_second() {
assert_eq!(parse_rfc3339("2016-12-31T23:59:60Z").unwrap(),
UNIX_EPOCH + Duration::new(1_483_228_799, 0));
}
#[test]
fn first_731_days() {
let year_start = 0; // 1970
for day in 0..= 365 * 2 { // scan leap year and non-leap year
let (s, time) = from_sec(year_start + day * 86400);
assert_eq!(parse_rfc3339(&s).unwrap(), time);
assert_eq!(format_rfc3339(time).to_string(), s);
}
}
#[test]
fn the_731_consecutive_days() {
let year_start = 1_325_376_000; // 2012
for day in 0..= 365 * 2 { // scan leap year and non-leap year
let (s, time) = from_sec(year_start + day * 86400);
assert_eq!(parse_rfc3339(&s).unwrap(), time);
assert_eq!(format_rfc3339(time).to_string(), s);
}
}
#[test]
fn all_86400_seconds() {
let day_start = 1_325_376_000;
for second in 0..86400 { // scan leap year and non-leap year
let (s, time) = from_sec(day_start + second);
assert_eq!(parse_rfc3339(&s).unwrap(), time);
assert_eq!(format_rfc3339(time).to_string(), s);
}
}
#[test]
fn random_past() {
let upper = SystemTime::now().duration_since(UNIX_EPOCH).unwrap()
.as_secs();
for _ in 0..10000 {
let sec = rand::thread_rng().gen_range(0, upper);
let (s, time) = from_sec(sec);
assert_eq!(parse_rfc3339(&s).unwrap(), time);
assert_eq!(format_rfc3339(time).to_string(), s);
}
}
#[test]
fn random_wide_range() {
for _ in 0..100_000 {
let sec = rand::thread_rng().gen_range(0, max::SECONDS);
let (s, time) = from_sec(sec);
assert_eq!(parse_rfc3339(&s).unwrap(), time);
assert_eq!(format_rfc3339(time).to_string(), s);
}
}
#[test]
fn milliseconds() {
assert_eq!(parse_rfc3339("1970-01-01T00:00:00.123Z").unwrap(),
UNIX_EPOCH + Duration::new(0, 123_000_000));
assert_eq!(format_rfc3339(UNIX_EPOCH + Duration::new(0, 123_000_000))
.to_string(), "1970-01-01T00:00:00.123000000Z");
}
#[test]
#[should_panic(expected="OutOfRange")]
fn zero_month() {
parse_rfc3339("1970-00-01T00:00:00Z").unwrap();
}
#[test]
#[should_panic(expected="OutOfRange")]
fn big_month() {
parse_rfc3339("1970-32-01T00:00:00Z").unwrap();
}
#[test]
#[should_panic(expected="OutOfRange")]
fn zero_day() {
parse_rfc3339("1970-01-00T00:00:00Z").unwrap();
}
#[test]
#[should_panic(expected="OutOfRange")]
fn big_day() {
parse_rfc3339("1970-12-35T00:00:00Z").unwrap();
}
#[test]
#[should_panic(expected="OutOfRange")]
fn big_day2() {
parse_rfc3339("1970-02-30T00:00:00Z").unwrap();
}
#[test]
#[should_panic(expected="OutOfRange")]
fn big_second() {
parse_rfc3339("1970-12-30T00:00:78Z").unwrap();
}
#[test]
#[should_panic(expected="OutOfRange")]
fn big_minute() {
parse_rfc3339("1970-12-30T00:78:00Z").unwrap();
}
#[test]
#[should_panic(expected="OutOfRange")]
fn big_hour() {
parse_rfc3339("1970-12-30T24:00:00Z").unwrap();
}
#[test]
fn break_data() {
for pos in 0.."2016-12-31T23:59:60Z".len() {
let mut s = b"2016-12-31T23:59:60Z".to_vec();
s[pos] = b'x';
parse_rfc3339(from_utf8(&s).unwrap()).unwrap_err();
}
}
#[test]
fn weak_smoke_tests() {
assert_eq!(parse_rfc3339_weak("1970-01-01 00:00:00").unwrap(),
UNIX_EPOCH + Duration::new(0, 0));
parse_rfc3339("1970-01-01 00:00:00").unwrap_err();
assert_eq!(parse_rfc3339_weak("1970-01-01 00:00:00.000123").unwrap(),
UNIX_EPOCH + Duration::new(0, 123_000));
parse_rfc3339("1970-01-01 00:00:00.000123").unwrap_err();
assert_eq!(parse_rfc3339_weak("1970-01-01T00:00:00.000123").unwrap(),
UNIX_EPOCH + Duration::new(0, 123_000));
parse_rfc3339("1970-01-01T00:00:00.000123").unwrap_err();
assert_eq!(parse_rfc3339_weak("1970-01-01 00:00:00.000123Z").unwrap(),
UNIX_EPOCH + Duration::new(0, 123_000));
parse_rfc3339("1970-01-01 00:00:00.000123Z").unwrap_err();
assert_eq!(parse_rfc3339_weak("1970-01-01 00:00:00Z").unwrap(),
UNIX_EPOCH + Duration::new(0, 0));
parse_rfc3339("1970-01-01 00:00:00Z").unwrap_err();
}
}
| 33.367909 | 88 | 0.553381 |
f81e2ad84d3b6792910774ba4baa0a8bf9e6c1c4 | 90,534 | #[doc = "✍\u{fe0f}"]
pub const WRITING_HAND: crate::Emoji = crate::Emoji {
glyph: "✍\u{fe0f}",
codepoint: "270D FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.7f32,
name: "writing hand",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[crate::Emoji {
glyph: "✍",
codepoint: "270D",
status: crate::Status::Unqualified,
introduction_version: 0.7f32,
name: "writing hand",
group: "People & Body",
subgroup: "hand-prop",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("hand wat skryf"),
keywords: &["hand", "hand wat skryf", "liggaam", "skryf"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("እየጻፈ ያለ እጅ"),
keywords: &["መጻፍ", "አካል", "እየጻፈ ያለ እጅ", "እጅ"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("يد تكتب"),
keywords: &["كتابة", "يد", "يد تكتب"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("লিখি থক\u{9be} হ\u{9be}ত"),
keywords: &["লিখ\u{9be}", "লিখি থক\u{9be} হ\u{9be}ত", "হ\u{9be}ত"],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("yazı əli"),
keywords: &["orqan", "yazmaq", "yazı əli", "əl"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("пішучая рука"),
keywords: &["асадка", "пісьмо", "пішучая рука", "рука", "ручка"],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("Пишеща ръка"),
keywords: &["Пишеща ръка", "пиша", "ръка", "тяло"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("লেখ\u{9be}র হ\u{9be}ত"),
keywords: &["লেখ\u{9be}", "লেখ\u{9be}র হ\u{9be}ত", "শরীর", "হ\u{9be}ত"],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("ruka koja piše"),
keywords: &["pisanje", "ruka", "ruka koja piše"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("mà que escriu"),
keywords: &["escriure", "mà", "mà que escriu"],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎤᏬᏱ ᎪᏪᎳᏅᎢ"),
keywords: &["ᎤᏬᏱ", "ᎤᏬᏱ ᎪᏪᎳᏅᎢ", "ᎪᏪᎶᏗ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("píšící ruka"),
keywords: &["psát", "píšící ruka", "ruka", "tělo"],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("dwylo yn ysgrifennu"),
keywords: &["corff", "dwylo yn ysgrifennu", "llaw", "ysgrifennu"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("skrivende hånd"),
keywords: &["hånd", "skrive", "skrivende hånd"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("schreibende Hand"),
keywords: &["Hand", "Schreiben", "schreiben", "schreibende Hand"],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("χέρι που γράφει"),
keywords: &["γράφω", "σώμα", "χέρι", "χέρι που γράφει"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("writing hand"),
keywords: &["hand", "write", "writing hand"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("mano escribiendo"),
keywords: &["cuerpo", "escribir", "lápiz", "mano", "mano escribiendo"],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("mano escribiendo"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["escribir", "mano", "mano escribiendo"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("kirjutav käsi"),
keywords: &["keha", "kirjutamine", "kirjutav käsi", "käsi"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("idazten ari den eskua"),
keywords: &["esku", "gorputz", "idatzi", "idazten ari den eskua"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("نوشتن با دست"),
keywords: &["دست", "نوشتن", "نوشتن با دست"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("kirjoittava käsi"),
keywords: &["kirjoittaa", "kirjoittava käsi", "käsi", "vartalo"],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("nagsusulat na kamay"),
keywords: &["kamay", "nagsusulat", "nagsusulat na kamay"],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("skrivandi hond"),
keywords: &["hond", "skriva", "skrivandi hond"],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("main qui écrit"),
keywords: &["main", "main qui écrit", "écrire"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("↑↑↑"),
keywords: &["main", "main qui écrit", "écrire"],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("lámh scríbhneoireachta"),
keywords: &[
"corp",
"lámh",
"lámh scríbhneoireachta",
"lámha ag scríobh",
"scríbhneoireacht",
],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("làmh a’ sgrìobhadh"),
keywords: &["làmh", "làmh a’ sgrìobhadh", "sgrìobh", "sgrìobhadh"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("man escribindo"),
keywords: &["bolígrafo", "escribir", "lapis", "man", "man escribindo"],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("લખી રહ\u{ac7}લો હાથ"),
keywords: &[
"લખતો",
"લખવ\u{ac1}\u{a82}",
"લખાણ",
"લખી રહ\u{ac7}લો હાથ",
"શરીર",
"હાથ",
],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("hannu mai rubutawa"),
keywords: &["hannu", "hannu mai rubutawa", "rubuta"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("יד כותבת"),
keywords: &["יד", "כותבת", "כתב יד", "כתיבה"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("लिखता ह\u{941}आ हाथ"),
keywords: &["लिखता ह\u{941}आ हाथ", "लिखना", "शरीर", "हाथ"],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("ruka koja piše"),
keywords: &["pisanje", "ruka", "ruka koja piše", "tijelo"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("író kéz"),
keywords: &["kéz", "test", "írás", "író kéz"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("գրող ձեռք"),
keywords: &["գրել", "գրող ձեռք", "ձեռք", "մարմին"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tangan menulis"),
keywords: &[
"badan",
"nulis",
"tangan",
"tangan menulis",
"tubuh",
"tulis",
],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("aka na-ede ihe"),
keywords: &["aka", "aka na-ede ihe", "dèe"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("skrifandi hönd"),
keywords: &["hönd", "líkami", "skrifa", "skrifandi hönd"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("mano che scrive"),
keywords: &["mano che scrive", "scrivere", "scrivere a mano"],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("書いている手"),
keywords: &["手", "手で書く", "手書き", "書いている手"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("tangan nulis"),
keywords: &["nulis", "tangan"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ხელი, რომელიც წერს"),
keywords: &["სხეული", "ტანი", "წერა", "ხელი", "ხელი, რომელიც წერს"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("қолмен жазу"),
keywords: &["дене", "жазу", "қол", "қолмен жазу"],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("skrivende hånd"),
keywords: &["hånd", "krop", "skrive", "skrivende hånd"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some("ដៃកាន\u{17cb}ប\u{17ca}\u{17b7}ចសរសេរ"),
keywords: &[
"ដៃកាន\u{17cb}ប\u{17ca}\u{17b7}កសរសេរ",
"ដៃកាន\u{17cb}ប\u{17ca}\u{17b7}ចសរសេរ",
"ប\u{17ca}\u{17b7}ក",
"ប\u{17ca}\u{17b7}ច",
"សរសេរ",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಬರ\u{cc6}ಯುತ\u{ccd}ತ\u{cbf}ರುವ ಕೈ"),
keywords: &["ಬರ\u{cc6}ಯುತ\u{ccd}ತ\u{cbf}ರುವ ಕೈ", "ಬರ\u{cc6}ಯುವ ಕೈ"],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("글을 쓰고 있는 손"),
keywords: &["글을 쓰고 있는 손", "손", "연필 쥔 손"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("बरोवपाचो हात"),
keywords: &["बरोवपाचो हात", "हात । बरोवप । बरोवपाचो हात"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("жазып жаткан кол"),
keywords: &["дене", "жаз", "жазып жаткан кол", "кол"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("schreiwend Hand"),
keywords: &["Hand", "schreiwen", "schreiwend Hand"],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ມ\u{eb7}ກຳລ\u{eb1}ງຂຽນ"),
keywords: &["ຂຽນ", "ມ\u{eb7}", "ມ\u{eb7}ກຳລ\u{eb1}ງຂຽນ", "ຮ\u{ec8}າງກາຍ"],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rašanti ranka"),
keywords: &["ranka", "rašanti ranka", "rašyti"],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("rakstoša roka"),
keywords: &["rakstoša roka", "rakstīt", "roka", "ķermenis"],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("ringa tuhituhi"),
keywords: &["ringa", "ringa tuhituhi", "tuhi"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("рака што пишува"),
keywords: &["пишува", "рака", "рака што пишува", "тело"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some("എഴ\u{d41}ത\u{d41}ന\u{d4d}ന കൈ"),
keywords: &[
"എഴ\u{d41}ത\u{d41}ക",
"എഴ\u{d41}ത\u{d41}ന\u{d4d}ന കൈ",
"കൈ",
"കൈയ\u{d4d}യെഴ\u{d41}ത\u{d4d}ത\u{d4d}",
"ശരീരം",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("бичиж байгаа гар"),
keywords: &["бичиж байгаа гар", "бичих", "гар", "үзэг"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("लिखाण करणारा हात"),
keywords: &["लिखाण करणारा हात", "लिहिण\u{947}", "हात"],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("tangan menulis"),
keywords: &["badan", "tangan", "tangan menulis", "tulis"],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("id li tikteb biha"),
keywords: &["id", "id li tikteb biha", "tikteb"],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("စာရေးနေသည\u{1037}\u{103a} လက\u{103a}"),
keywords: &[
"စာရေးခြင\u{103a}း",
"စာရေးနေသည\u{1037}\u{103a} လက\u{103a}",
"စာရေးနေသော လက\u{103a}",
"လက\u{103a}",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("skrivende hånd"),
keywords: &["hånd", "kropp", "skrive", "skrivende hånd"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("ल\u{947}खिरह\u{947}को हात"),
keywords: &[
"ल\u{947}खिरह\u{947}को हात",
"ल\u{947}ख\u{94d}न\u{947}",
"शरीर",
"हात",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("schrijvende hand"),
keywords: &["hand", "schrijven", "schrijvende hand"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("skrivande hand"),
keywords: &["hand", "kropp", "skrivande hand", "skrive"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ଲେଖ\u{b3f}ବ\u{b3e} ହ\u{b3e}ତ"),
keywords: &["ଲେଖ\u{b3f}ବ\u{b3e}", "ହ\u{b3e}ତ"],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਲਿਖਣਾ, ਲਿਖਾਈ"),
keywords: &[
"ਲਿਖਣ ਵਾਲਾ ਹ\u{a71}ਥ",
"ਲਿਖਣਾ",
"ਲਿਖਣਾ, ਲਿਖਾਈ",
"ਲਿਖਾਵਟ",
"ਹ\u{a71}ਥ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("لکھائی کردا ہتھ"),
keywords: &["لکھائی کردا ہتھ", "لکھنا", "ہتھ"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Hand Wé De Raít Sọ\u{301}mtin"),
keywords: &["Hand", "Hand Wé De Raít Sọ\u{301}mtin", "Rait"],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("pisząca dłoń"),
keywords: &["pisać", "pisać odręcznie", "pisząca dłoń", "piszę"],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("لیکونکې لاس"),
keywords: &["لاس", "لیکل", "لیکونکې لاس"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("escrevendo à mão"),
keywords: &["caneta", "escrevendo", "escrevendo à mão", "mão"],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("mão a escrever"),
keywords: &["corpo", "escrever", "mão", "mão a escrever"],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("maki qillqachkan"),
keywords: &["maki", "maki qillqachkan", "qillqay"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("mână scriind"),
keywords: &["mână", "mână scriind", "scris"],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-128"),
keywords: &["E10-128"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("пишущая рука"),
keywords: &["бумага", "письмо", "пишущая рука", "рука", "ручка"],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("ikiganza cyandika"),
keywords: &["ikiganza cyandika", "intoki", "kwandikisha"],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("لکندڙ هٿ"),
keywords: &["لکندڙ هٿ", "لکڻ", "هٿ"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("ල\u{dd2}යම\u{dd2}න\u{dca} ස\u{dd2}ට\u{dd2}න අත"),
keywords: &[
"අත",
"ල\u{dd2}යම\u{dd2}න\u{dca} ස\u{dd2}ට\u{dd2}න අත",
"ල\u{dd2}ව\u{dd3}ම",
"ශර\u{dd3}රය",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("píšuca ruka"),
keywords: &["pero", "písať", "píšuca ruka", "ruka"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("pišoča dlan"),
keywords: &["dlan", "pisati", "pišoča dlan", "telo"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("gacan wax qoraysa"),
keywords: &["gacan", "gacan wax qoraysa", "qor"],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("dorë që shkruan"),
keywords: &["dorë", "dorë që shkruan", "shkruaj", "trup"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("рука која пише"),
keywords: &["писaњe", "рукa", "рука која пише"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("↑↑↑"),
keywords: &["рука која пише"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("ruka koja piše"),
keywords: &["pisanje", "ruka", "ruka koja piše"],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: None,
keywords: &["ruka koja piše"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("hand som skriver"),
keywords: &["hand", "hand som skriver", "skriver"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mkono unaoandika"),
keywords: &["andika", "mkono", "mkono unaoandika", "mwili"],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("எழுதுதல\u{bcd}"),
keywords: &["எழுது", "எழுதுதல\u{bcd}"],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("వ\u{c4d}ర\u{c3e}స\u{c4d}తున\u{c4d}న చ\u{c47}య\u{c3f}"),
keywords: &[
"చ\u{c47}య\u{c3f}",
"వ\u{c4d}ర\u{c3e}యడం",
"వ\u{c4d}ర\u{c3e}స\u{c4d}తున\u{c4d}న చ\u{c47}య\u{c3f}",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("дасти навишта истода"),
keywords: &["даст", "дасти навишта истода", "навиштан"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("เข\u{e35}ยนหน\u{e31}งส\u{e37}อ"),
keywords: &[
"ม\u{e37}อ",
"อว\u{e31}ยวะ",
"เข\u{e35}ยน",
"เข\u{e35}ยนหน\u{e31}งส\u{e37}อ",
],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("ýazýan el"),
keywords: &["el", "ýazmak", "ýazýan el"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("nima ʻoku tohi"),
keywords: &["nima ʻoku tohi"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("yazı yazan el"),
keywords: &["beden", "el", "yazma", "yazı yazan el"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("يېزىۋاتقان قول"),
keywords: &["قول", "يېزىش", "يېزىۋاتقان قول"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("рука, що пише"),
keywords: &["писати", "рука", "рука, що пише", "тіло"],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("لکھتا ہوا ہاتھ"),
keywords: &["جسم", "لکھتا ہوا ہاتھ", "لکھنا", "ہاتھ"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("yozayotgan qo‘l"),
keywords: &["qo‘l", "tana", "yozayotgan qo‘l", "yozish"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("bàn tay đang viết"),
keywords: &["bàn tay đang viết", "tay", "viết"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("loxo buy bind"),
keywords: &["bind", "loxo", "loxo buy bind"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("isandla esibhalayo"),
keywords: &["bhala", "isandla", "isandla esibhalayo"],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("ńkọ\u{301}wé"),
keywords: &["kọ\u{300}wé", "ńkọ\u{301}wé", "ọwọ\u{301}"],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("寫字嘅手"),
keywords: &["寫字", "寫字嘅手", "手"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("写字嘅手"),
keywords: &["写字", "写字嘅手", "手"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("写字"),
keywords: &["写", "写字", "笔"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("寫"),
keywords: &["寫", "書寫", "記錄"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("isandla esibhalayo"),
keywords: &["isandla", "isandla esibhalayo", "ukubhala", "umzimba"],
},
],
};
#[doc = "✍🏻"]
pub const WRITING_HAND_LIGHT_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "✍🏻",
codepoint: "270D 1F3FB",
status: crate::Status::FullyQualified,
introduction_version: 1f32,
name: "writing hand: light skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
#[doc = "✍🏼"]
pub const WRITING_HAND_MEDIUM_LIGHT_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "✍🏼",
codepoint: "270D 1F3FC",
status: crate::Status::FullyQualified,
introduction_version: 1f32,
name: "writing hand: medium-light skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
#[doc = "✍🏽"]
pub const WRITING_HAND_MEDIUM_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "✍🏽",
codepoint: "270D 1F3FD",
status: crate::Status::FullyQualified,
introduction_version: 1f32,
name: "writing hand: medium skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
#[doc = "✍🏾"]
pub const WRITING_HAND_MEDIUM_DARK_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "✍🏾",
codepoint: "270D 1F3FE",
status: crate::Status::FullyQualified,
introduction_version: 1f32,
name: "writing hand: medium-dark skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
#[doc = "✍🏿"]
pub const WRITING_HAND_DARK_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "✍🏿",
codepoint: "270D 1F3FF",
status: crate::Status::FullyQualified,
introduction_version: 1f32,
name: "writing hand: dark skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
#[doc = "💅"]
pub const NAIL_POLISH: crate::Emoji = crate::Emoji {
glyph: "💅",
codepoint: "1F485",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "nail polish",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("naellak"),
keywords: &[
"kosmetiek",
"liggaam",
"manikuur",
"nael",
"naellak",
"verf",
"versorg",
],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("የጥፍር ቀለም"),
keywords: &["እንክብካቤ", "ኮስሞቲክስ", "የጣት ውበት ጥበቃ", "የጥፍር ቀለም", "ጥፍር"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("طلاء أظافر"),
keywords: &[
"أظافر",
"تهذيب أظافر",
"طلاء أظافر",
"مانكير",
"مستحضرات تجميل",
],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("নেইল প’লিচ"),
keywords: &[
"কছমেটিক\u{9cd}স",
"নখ",
"নেইল প’লিচ",
"প’লিচ",
"মেনিকিয\u{9bc}ৰ",
"যত\u{9cd}ন",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("dırnaq laklamaq"),
keywords: &[
"dırnaq",
"dırnaq laklamaq",
"kosmetika",
"lak",
"manikür",
"orqan",
"qulluq",
],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("лак для ногцяў"),
keywords: &["догляд", "касметыка", "лак для ногцяў", "ногці", "рука"],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("Лак за нокти"),
keywords: &["Лак за нокти", "козметика", "лак", "маникюр", "нокти"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("নেল পলিশ"),
keywords: &[
"নখ",
"নেল পলিশ",
"প\u{9be}লিশ",
"প\u{9cd}রস\u{9be}ধনী",
"ম\u{9cd}য\u{9be}নিকিউর",
"যত\u{9cd}ন",
"শরীর",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("lakiranje noktiju"),
keywords: &[
"kozmetika",
"lak za nokte",
"lakiranje noktiju",
"nokti",
"uljepšavanje",
],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("esmalt d’ungles"),
keywords: &[
"cosmètics",
"esmalt",
"esmalt d’ungles",
"manicura",
"ungla",
],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎤᏑᎦᏢ ᎦᏅᎵᏰᏗ"),
keywords: &["ᎠᎦᏎᏍᏙᏗ", "ᎠᏓᏅᎵᏰᏙᏗ", "ᎤᏑᎦᏢ", "ᎦᏅᎵᏰᏗ", "ᏗᏍᏙᏰᏗ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("lak na nehty"),
keywords: &[
"kosmetika",
"lak",
"lak na nehty",
"manikúra",
"nehet",
"péče",
"tělo",
],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("farnais ewinedd"),
keywords: &["corff", "ewyn", "farnais ewinedd", "gofal", "triniaeth"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("neglelak"),
keywords: &["kosmetik", "manicure", "negle", "neglelak"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("Nagellack"),
keywords: &["Kosmetik", "Maniküre", "Nagel", "Nagellack", "Nagelpflege"],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("βάψιμο νυχιών"),
keywords: &[
"βάψιμο νυχιών",
"βερνίκι",
"καλλυντικά",
"μανικιούρ",
"νύχι",
"σώμα",
"φροντίδα",
],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("nail polish"),
keywords: &["care", "cosmetics", "manicure", "nail", "polish"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("pintarse las uñas"),
keywords: &[
"cosmética",
"esmalte",
"manicura",
"pintarse las uñas",
"uñas",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("pintarse las uñas"),
keywords: &["esmalte", "pintarse las uñas", "uñas"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("pintarse las uñas"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("küünelakk"),
keywords: &[
"hooldus",
"keha",
"kosmeetika",
"küünelakk",
"küüs",
"lakk",
"maniküür",
],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("azazkaletako esmaltea"),
keywords: &[
"azazkal",
"azazkaletako esmaltea",
"edertasun",
"esmalte",
"gorputz",
"kosmetika",
"manikura",
],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("لاک زدن"),
keywords: &["آرایش", "رسیدگی", "لاک", "لاک زدن", "مانیکور", "ناخن"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("kynsilakka"),
keywords: &[
"kosmetiikka",
"kynsi",
"kynsihuolto",
"kynsilakka",
"manikyyri",
"vartalo",
],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("nail polish"),
keywords: &[
"cosmetics",
"daliri",
"kamay",
"kuko",
"manicure",
"nail polish",
"polish",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("neglalakk"),
keywords: &["fríðkanarevni", "negl", "neglalakk", "vakurleikarøkt"],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("vernis à ongles"),
keywords: &["maquillage", "vernis à ongles"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("↑↑↑"),
keywords: &[
"manucure",
"maquillage",
"ongle",
"vernis",
"vernis à ongles",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("vearnais ingne"),
keywords: &["cosmaidí", "smideadh", "vearnais ingne"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("lìomh ìnean"),
keywords: &["cùram", "làmh-mhaiseachadh", "lìomh", "maise", "ìnean"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("pintar as uñas"),
keywords: &["cosmético", "esmalte", "manicura", "pintar as uñas", "uñas"],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("ન\u{ac7}ઇલ પોલિશ"),
keywords: &[
"કોસ\u{acd}મ\u{ac7}ટિક\u{acd}સ",
"નખ",
"નખની સ\u{a82}ભાળ",
"ન\u{ac7}ઇલ પોલિશ",
"હાથ તથા નખની સાજસ\u{a82}ભાળ",
],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("jan farce"),
keywords: &[
"farce",
"jan farce",
"kayan shafawa",
"kula",
"mai",
"yankan farce",
],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("מריחת לק"),
keywords: &[
"טיפוח",
"לק",
"מניקור",
"מריחת לק",
"מריחת לק על ציפורניים",
"ציפורניים",
"קוסמטיקה",
],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("न\u{947}ल पॉलिश"),
keywords: &[
"कॉस\u{94d}म\u{947}टिक",
"नाख\u{942}न",
"नाख\u{942}नो\u{902} की द\u{947}खर\u{947}ख",
"न\u{947}ल पॉलिश",
"म\u{947}नीक\u{94d}योर",
],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("lak za nokte"),
keywords: &["kozmetika", "lak", "lak za nokte", "manikura", "nokti"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("körömlakk"),
keywords: &["kifestés", "köröm", "körömlakk", "lakk", "manikűr"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("եղունգների լաքապատում"),
keywords: &[
"եղունգ",
"եղունգների լաքապատում",
"խնամք",
"կոսմետիկա",
"հղկել",
"մատնահարդարում",
"մարմին",
],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("poles kuku"),
keywords: &[
"cat kuku",
"kosmetik",
"kuku",
"manikur",
"perawatan",
"poles kuku",
],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("nteji mbọaka"),
keywords: &[
"mbọ",
"mkpụcha aka",
"nleta",
"nteji",
"nteji mbọaka",
"ọtanjele",
],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("naglalakk"),
keywords: &["handsnyrting", "naglalakk", "neglur", "snyrtivörur"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("smalto per unghie"),
keywords: &["cosmetici", "manicure", "smalto", "smalto per unghie"],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("マニキュアを塗る手"),
keywords: &["ネイル", "マニキュア", "マニキュアを塗る手", "爪"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("cet kuku"),
keywords: &["cet kuku", "kosmetik", "kuku", "menikur", "ngrumat"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ფრჩხილების ლაქი"),
keywords: &[
"კოსმეტიკა",
"მანიკური",
"მოვლა",
"სხეული",
"ტანი",
"ფრჩხილების ლაქი",
],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("маникюр"),
keywords: &["бояу", "косметика", "күтім", "маникюр", "тырнақ"],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("neglelak"),
keywords: &["kosmetik", "negle", "neglelak"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some("លាបថ\u{17d2}នា\u{17c6}ក\u{17d2}រចក"),
keywords: &["ក\u{17d2}រចក", "លាបថ\u{17d2}នា\u{17c6}ក\u{17d2}រចក"],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ನೈಲ\u{ccd} ಪಾಲೀಷ\u{ccd}"),
keywords: &[
"ಉಗುರು ಪಾಲ\u{cbf}ಷ\u{ccd}",
"ನೈಲ\u{ccd} ಪಾಲೀಷ\u{ccd}",
"ಹಸ\u{ccd}ತಾಲಂಕಾರ ಮಾಡು",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("매니큐어"),
keywords: &["네일 아트", "네일 케어", "매니큐어"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("न\u{947}ल पॉलिश"),
keywords: &[
"जतन । सौ\u{902}दर\u{94d}यप\u{94d}रसाधन । म\u{945}निक\u{94d}य\u{941}अर ।न\u{947}ल पॉलिश",
"न\u{947}ल पॉलिश",
],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("тырмак боёо"),
keywords: &[
"косметика",
"маникюр",
"тырмак",
"тырмак боёо",
"өзүнө кам көрүү",
],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Neellack"),
keywords: &["Kosmetik", "Lack", "Manikür", "Neel", "Neellack", "fleegen"],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ທາເລ\u{eb1}ບມ\u{eb7}"),
keywords: &["ທາເລ\u{eb1}ບ", "ທາເລ\u{eb1}ບມ\u{eb7}"],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("nagų lakas"),
keywords: &[
"kosmetika",
"lakas",
"manikiūras",
"nagų lakas",
"priežiūra",
],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("nagu laka"),
keywords: &[
"kosmētika",
"laka",
"manikīrs",
"nagi",
"nagu laka",
"skaistumkopšana",
"ķermenis",
],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pani maikūkū"),
keywords: &[
"maikūkū",
"pani",
"tiaki",
"whakapai maikūkū",
"whakapaipai",
],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("лак за нокти"),
keywords: &["козметика", "лак за нокти", "маникир", "нега", "нокти"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some("നെയിൽ പോളിഷ\u{d4d}"),
keywords: &[
"കോസ\u{d4d}\u{200c}മെറ\u{d4d}റിക\u{d4d}\u{200c}സ\u{d4d}",
"നഖം",
"നെയിൽ പോളിഷ\u{d4d}",
"പരിരക\u{d4d}ഷ",
"പോളിഷ\u{d4d}",
"മ\u{d3e}നിക\u{d4d}യ\u{d42}ർ",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("хумсаа будах"),
keywords: &["арчлах", "косметик", "маникур", "хумс", "хумсаа будах"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("न\u{947}ल पॉलीश"),
keywords: &[
"नख",
"निगा",
"निगा राखण\u{947}",
"न\u{947}ल पॉलीश",
"पॉलीश",
"सौ\u{902}दर\u{94d}य प\u{94d}रसाधन\u{947}",
],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("pengilat kuku"),
keywords: &["kosmetik", "kuku", "pengilat", "penjagaan", "rias tangan"],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("nail polish"),
keywords: &[
"dwiefer",
"kosmetiċi",
"kura",
"manicure",
"nail polish",
"polish",
],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("လက\u{103a}သည\u{103a}း ဆ\u{102d}\u{102f}းဆေး"),
keywords: &[
"ပြ\u{102f}စ\u{102f}သည\u{103a}",
"လက\u{103a}သည\u{103a}း",
"လက\u{103a}သည\u{103a}း ဆ\u{102d}\u{102f}းဆေး",
"လက\u{103a}သည\u{103a}းည\u{103e}ပ\u{103a}ခြင\u{103a}း",
"အလ\u{103e}ပြင\u{103a}",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("neglelakk"),
keywords: &["kosmetikk", "manikyr", "negl", "neglelakk"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("नङ पालिस"),
keywords: &[
"नङ पालिस",
"नङ\u{94d}",
"पालिस",
"म\u{94d}यानिक\u{94d}य\u{941}र",
"श\u{94d}र\u{943}ङ\u{94d}गार",
"ह\u{947}रचाह",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("nagellak"),
keywords: &[
"cosmetica",
"lak",
"manicure",
"nagel",
"nagellak",
"verzorging",
],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("naglelakk"),
keywords: &["kosmetikk", "manikyr", "nagl", "naglelakk"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ନଖ ପ\u{b3e}ଲ\u{b3f}ସ\u{b4d}\u{200c}"),
keywords: &[
"ନଖ",
"ନଖ ପ\u{b3e}ଲ\u{b3f}ସ\u{b4d}\u{200c}",
"ପ\u{b3e}ଲ\u{b3f}ସ\u{b4d}\u{200c} କର\u{b3f}ବ\u{b3e}",
"ପ\u{b4d}ରସ\u{b3e}ଧନ",
"ଯତ\u{b4d}ନ",
"ହ\u{b3e}ତ ଓ ନଖର ସୌଦର\u{b4d}ଯ\u{b4d}ୟବର\u{b4d}ଦ\u{b4d}ଧକ ପର\u{b3f}ଚର\u{b4d}ଯ\u{b4d}ୟ\u{b3e}",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਨ\u{a47}ਲ ਪਾਲਿਸ\u{a3c}"),
keywords: &[
"ਨਹ\u{a41}\u{a70} ਦੀ ਦ\u{a47}ਖਭਾਲ",
"ਨਹ\u{a41}\u{a70} ਪਾਲਸ\u{a3c}",
"ਨ\u{a47}ਲ ਪਾਲਿਸ\u{a3c}",
"ਮ\u{a48}ਨੀਕਿਓਰ",
"ਸ\u{a3c}ਿ\u{a70}ਗਾਰ ਸਮਾਨ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("نوں پالش"),
keywords: &[
"احتیاط",
"سنگھار دا سامان",
"نوں پالش",
"ن\u{64f}و",
"پالش",
"ہتھاں دی صفائی",
],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Nél Pọ\u{301}lish"),
keywords: &[
"Kia",
"Kọsmẹ\u{301}tiks",
"Nel",
"Nél Pọ\u{301}lish",
"To Lúk Áftá Yọ Nel an Hand",
],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("lakier do paznokci"),
keywords: &[
"ciało",
"kosmetyki",
"lakier do paznokci",
"lakierować paznokcie",
"manicure",
"pielęgnacja",
],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("نوکانو رنګ"),
keywords: &[
"رنګ",
"مانیکیور",
"نوک",
"نوکانو رنګ",
"پاملرنه",
"کاسمټکیس",
],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("esmalte de unha"),
keywords: &["cosméticos", "esmalte", "esmalte de unha", "unhas"],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("verniz para as unhas"),
keywords: &[
"cosméticos",
"manicura",
"unha",
"verniz",
"verniz para as unhas",
],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("esmalte sillukunapaq"),
keywords: &[
"cosmeticos",
"esmalte",
"esmalte sillukunapaq",
"manicura",
"qhaway",
"sillu",
],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("ojă pe unghii"),
keywords: &[
"lac de unghii",
"manichiură",
"ojă",
"ojă pe unghii",
"unghie",
],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-578"),
keywords: &["E10-578"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("маникюр"),
keywords: &[
"кисть с лаком",
"лак для ногтей",
"маникюр",
"ногти",
"уход за руками",
],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("verini y’inzara"),
keywords: &[
"amavuta yo kwisiga",
"gusiga inzara",
"inzara",
"kwitak ku ntoki n’nzara",
"kwiyitaho",
"mubiri",
"verini y’inzara",
],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("ننهن پالش"),
keywords: &[
"رنگڻ",
"سنڀال",
"سينگار",
"ننهن",
"ننهن مکڻي",
"ننهن پالش",
"ننهن ڪتڻ",
],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("න\u{dd2}ය ආලේපන"),
keywords: &[
"අත\u{dca} සහ න\u{dd2}ය අලංක\u{dcf}රකරණය",
"අලේපන",
"න\u{dd2}ය ආලේපන",
"ව\u{dd2}ලව\u{dd4}න\u{dca}",
"සත\u{dca}ක\u{dcf}රය",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("lakovanie nechtov"),
keywords: &[
"kozmetika",
"lak",
"lakovanie nechtov",
"manikúra",
"necht",
"starostlivosť",
"telo",
],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("lak za nohte"),
keywords: &[
"kozmetika",
"lak",
"lak za nohte",
"manikira",
"nega",
"noht",
"telo",
],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("dhalaalinta ciddiyaha"),
keywords: &[
"ciddi",
"daryeel",
"dhalaalinta",
"dhalaalinta ciddiyaha",
"qurxinta",
"qurxinta ciddiyaha",
],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("manikyr"),
keywords: &["kozmetikë", "kujdes", "lyej", "manikyr", "thua"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("лак за нокте"),
keywords: &["кoзмeтикa", "лак за нокте", "мaникир", "нeгa", "нoкaт"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("↑↑↑"),
keywords: &["лак за нокте"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("lak za nokte"),
keywords: &["kozmetika", "lak za nokte", "manikir", "nega", "nokat"],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: None,
keywords: &["lak za nokte"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("nagellack"),
keywords: &["manikyr", "måla naglar", "nagellack", "smink"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("rangi ya kupaka kwenye kucha"),
keywords: &[
"kucha",
"mwili",
"rangi",
"rangi ya kupaka kwenye kucha",
"tengeneza kucha",
"tunza",
"vipodozi",
],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("நகச\u{bcd}ச\u{bbe}யம\u{bcd}"),
keywords: &[
"அழகுப\u{bcd}பொருட\u{bcd}கள\u{bcd}",
"நகச\u{bcd}ச\u{bbe}யம\u{bcd}",
"நகப\u{bcd}பூச\u{bcd}சு",
"நகம\u{bcd}",
"பர\u{bbe}மரிப\u{bcd}பு",
"மெனிக\u{bcd}கியூர\u{bcd}",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("గ\u{c4b}ర\u{c4d}ల రంగు"),
keywords: &["గ\u{c4b}ర\u{c4d}ల రంగు", "గ\u{c4b}ళ\u{c4d}లు", "రంగు"],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("лок барои нохунҳо"),
keywords: &[
"косметика",
"лок",
"лок барои нохунҳо",
"маникюр",
"нохун",
"ғамхорӣ",
],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ส\u{e35}ทาเล\u{e47}บ"),
keywords: &["ทำเล\u{e47}บ", "ส\u{e35}ทาเล\u{e47}บ"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("dyrnak lagy"),
keywords: &[
"dyrnak",
"dyrnak lagy",
"ideg",
"kosmetika",
"lak",
"manikýur",
],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("valivali ngeʻesinima"),
keywords: &["valivali ngeʻesinima"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("oje sürme"),
keywords: &["kozmetik", "manikür", "oje", "oje sürme", "tırnak"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("تىرناق بوياش"),
keywords: &[
"ئاسراش",
"تىرناق",
"تىرناق ئېلىش",
"تىرناق بوياش",
"خېنە",
"گىرىم",
],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("лак для нігтів"),
keywords: &[
"догляд",
"косметика",
"лак для нігтів",
"манікюр",
"ніготь",
"тіло",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("نیل پالش"),
keywords: &["جسم", "خیال", "ناخن", "نیل پالش", "پالش"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("tirnoq uchun lok"),
keywords: &[
"kosmetika",
"lok",
"manikyur",
"parvarish",
"tana",
"tirnoq",
"tirnoq uchun lok",
],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("sơn móng tay"),
keywords: &[
"chăm sóc",
"làm móng tay",
"móng tay",
"mỹ phẩm",
"sơn móng tay",
"đánh bóng",
],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("jakkal wew"),
keywords: &["faj", "jakkal wew", "jekkal", "kosmetik", "manikiir", "wew"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("ipolishi yeenzipho"),
keywords: &[
"i-manicure",
"ipolishi",
"ipolishi yeenzipho",
"izinto zokuchokoza",
"ukhathalelo",
"uzipho",
],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("Ikun Eekanna"),
keywords: &[
"Ikun Eekanna",
"aṣaralóge",
"kíkùn",
"àkun ojú",
"èékánná",
"ìtọ\u{301}jú",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("指甲油"),
keywords: &["修甲", "化妝品", "指甲", "指甲油", "護理"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("指甲油"),
keywords: &["修甲", "化妆品", "护理", "指甲", "指甲油"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("涂指甲油"),
keywords: &["指甲油", "涂指甲油", "美甲"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("指甲油"),
keywords: &["指甲油", "美甲"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("ikhyutheksi"),
keywords: &[
"amakhozimetiksi",
"ikhyutheksi",
"ukucwala izinzipho",
"ukunakekela",
"umzimba",
"uzipho",
],
},
],
};
#[doc = "💅🏻"]
pub const NAIL_POLISH_LIGHT_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "💅🏻",
codepoint: "1F485 1F3FB",
status: crate::Status::FullyQualified,
introduction_version: 1f32,
name: "nail polish: light skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
#[doc = "💅🏼"]
pub const NAIL_POLISH_MEDIUM_LIGHT_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "💅🏼",
codepoint: "1F485 1F3FC",
status: crate::Status::FullyQualified,
introduction_version: 1f32,
name: "nail polish: medium-light skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
#[doc = "💅🏽"]
pub const NAIL_POLISH_MEDIUM_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "💅🏽",
codepoint: "1F485 1F3FD",
status: crate::Status::FullyQualified,
introduction_version: 1f32,
name: "nail polish: medium skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
#[doc = "💅🏾"]
pub const NAIL_POLISH_MEDIUM_DARK_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "💅🏾",
codepoint: "1F485 1F3FE",
status: crate::Status::FullyQualified,
introduction_version: 1f32,
name: "nail polish: medium-dark skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
#[doc = "💅🏿"]
pub const NAIL_POLISH_DARK_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "💅🏿",
codepoint: "1F485 1F3FF",
status: crate::Status::FullyQualified,
introduction_version: 1f32,
name: "nail polish: dark skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
#[doc = "🤳"]
pub const SELFIE: crate::Emoji = crate::Emoji {
glyph: "🤳",
codepoint: "1F933",
status: crate::Status::FullyQualified,
introduction_version: 3f32,
name: "selfie",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("selfie"),
keywords: &["foon", "kamera", "selfie"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("እራስ ፎቶ ማንሳት"),
keywords: &["ስልክ", "እራስ ፎቶ ማንሳት", "ካሜራ"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("صورة سيلفي"),
keywords: &["سيلفي", "صورة سيلفي", "كاميرا", "هاتف"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("ছেল\u{9cd}\u{200c}ফি"),
keywords: &["কেমেৰ\u{9be}", "ছেল\u{9cd}\u{200c}ফি", "ফোন"],
},
#[cfg(feature = "ast")]
crate::Annotation {
lang: "ast",
tts: Some("selfie"),
keywords: &["selfie"],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("özçəkim"),
keywords: &["foto", "selfi", "özçəkim", "şəkil"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("сэлфі"),
keywords: &["сэлфі", "тэлефон", "фота"],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("селфи"),
keywords: &["камера", "селфи", "телефон"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("সেলফি"),
keywords: &["ক\u{9cd}য\u{9be}মের\u{9be}", "ফোন", "সেলফি"],
},
#[cfg(feature = "br")]
crate::Annotation {
lang: "br",
tts: Some("emboltred"),
keywords: &["emboltred"],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("selfi"),
keywords: &["fotoaparat", "selfi", "slikanje", "telefon"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("autofoto"),
keywords: &["autofoto", "càmera", "foto", "mòbil", "selfie"],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎣᏮᏌ ᏗᏓᏟᎶᏍᏔᏅᎢ"),
keywords: &["ᎣᏮᏌ ᏗᏓᏟᎶᏍᏔᏅᎢ", "ᏗᏟᎶᏗᏍᎩᏍᎩ", "ᏗᏟᏃᎮᏗᎢ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("selfie"),
keywords: &["fotoaparát", "selfie", "selfíčko", "telefon"],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("hunlun"),
keywords: &["camera", "ffôn", "hunlun"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("selfie"),
keywords: &[
"arm",
"foto",
"fotografere",
"fotografi",
"kamera",
"mobil",
"mobiltelefon",
"selfie",
"tager billeder",
"tager selfies",
"telefon",
],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("Selfie"),
keywords: &["Selfie", "Smartphone"],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("selfie"),
keywords: &[
"selfie",
"κάμερα",
"σέλφι",
"τηλέφωνο",
"φωτογραφική μηχανή",
],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("selfie"),
keywords: &["camera", "phone", "selfie"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("selfi"),
keywords: &["autofoto", "cámara", "selfi", "selfie", "teléfono"],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("selfie"),
keywords: &["autofoto", "cámara", "selfie", "teléfono"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("selfi"),
keywords: &["celular", "cámara", "móvil", "selfi", "teléfono"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("selfi"),
keywords: &["selfi"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("selfi"),
keywords: &["kaamera", "selfi", "telefon"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("autoargazkia"),
keywords: &["autoargazki", "autoargazkia", "kamera", "telefono"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("خودگرفت"),
keywords: &["خودگرفت", "سلفی", "عکس از خود"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("selfie"),
keywords: &["kamera", "omakuva", "puhelin", "selfie"],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("selfie"),
keywords: &["camera", "mobile phone", "selfie", "telepono"],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("sjálvsmynd"),
keywords: &["selfie", "sjálvsmynd"],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("selfie"),
keywords: &["appareil photo", "selfie", "téléphone"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("autophoto"),
keywords: &["appareil photo", "autophoto", "selfie", "téléphone"],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("féinín"),
keywords: &["ceamara", "féinín", "fón", "guthán"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("fèineag"),
keywords: &["camara", "fèineag", "fòn"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("autofoto"),
keywords: &["autofoto", "cámara", "foto", "teléfono"],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("સ\u{ac7}લ\u{acd}ફી"),
keywords: &["ક\u{ac5}મરા", "ફોન", "સ\u{ac7}લ\u{acd}ફી"],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("hoton kanka"),
keywords: &["hoton kanka", "kyamara", "waya"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("יד מצלמת סלפי"),
keywords: &["טלפון", "יד מצלמת סלפי", "מצלמה", "סלפי"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("स\u{947}ल\u{94d}फ\u{93c}ी"),
keywords: &["क\u{948}मरा", "फ\u{93c}ोन", "स\u{947}ल\u{94d}फ\u{93c}ी"],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("selfie"),
keywords: &["fotoaparat", "selfie", "slikanje", "telefon"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("szelfi"),
keywords: &["fényképező", "szelfi", "telefon"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("սելֆի"),
keywords: &["լուսանկար", "հեռախոս", "սելֆի"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("selfie"),
keywords: &["foto selfie", "kamera", "ponsel", "selfi", "selfie"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("selfi"),
keywords: &["ekwenti", "igwefòto", "selfi"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("sjálfa"),
keywords: &["myndavél", "sjálfa"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("selfie"),
keywords: &["autoscatto", "fotocamera", "selfie", "telefono"],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("セルフィー"),
keywords: &["スマホ", "セルフィー", "自分撮り", "自撮り"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("foto selfi"),
keywords: &["foto selfi", "hp", "kamera", "selfi"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("სელფი"),
keywords: &["კამერა", "სელფი", "ტელეფონი"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("селфи"),
keywords: &["селфи"],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("selfie"),
keywords: &[
"arm",
"foto",
"fotograf",
"fotograferer",
"fotografi",
"kamera",
"mobil",
"mobiltelefon",
"selfie",
"tager billeder",
"telefon",
],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some("selfie"),
keywords: &[
"selfie",
"កាមេរ\u{17c9}ា",
"ថត",
"ទ\u{17bc}រស\u{17d0}ព\u{17d2}ទ",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಸ\u{cc6}ಲ\u{ccd}ಫ\u{cbf}"),
keywords: &["ಕ\u{ccd}ಯಾಮರಾ", "ಫೋನ\u{ccd}", "ಸ\u{cc6}ಲ\u{ccd}ಫ\u{cbf}"],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("셀카"),
keywords: &["사진", "셀카", "셀피", "카메라"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("स\u{947}ल\u{94d}फी"),
keywords: &["क\u{945}म\u{947}रा", "फोन", "स\u{947}ल\u{94d}फी"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("селфи"),
keywords: &["селфи", "сүрөт", "телефон"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Selfie"),
keywords: &["Handy", "Kamera", "Selfie"],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ເຊວຟ\u{eb5}"),
keywords: &[
"ກ\u{ec9}ອງຖ\u{ec8}າຍຮ\u{eb9}ບ",
"ເຊວຟ\u{eb5}",
"ໂທລະສ\u{eb1}ບ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("asmenukė"),
keywords: &["asmenukė", "fotoaparatas", "telefonas"],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("pašfoto"),
keywords: &["kamera", "pašfoto", "tālrunis"],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("kiriāhua"),
keywords: &["kiriāhua", "kāmera", "waea"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("селфи"),
keywords: &["камера", "селфи", "телефон"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some("സെൽഫി"),
keywords: &["ക\u{d4d}യ\u{d3e}മറ", "ഫോൺ", "സെൽഫി"],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("селфи"),
keywords: &["селфи"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("स\u{947}ल\u{94d}फी"),
keywords: &["क\u{945}म\u{947}रा", "फोन", "स\u{947}ल\u{94d}फी"],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("swafoto"),
keywords: &["kamera", "selfie", "swafoto", "telefon"],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("stessu"),
keywords: &["kamera", "stessu", "telefown"],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ဆ\u{1032}လ\u{103a}ဖ\u{102e}"),
keywords: &[
"ကင\u{103a}မရာ",
"ဆ\u{1032}လ\u{103a}ဖ\u{102e}",
"ဖ\u{102f}န\u{103a}း",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("selfie"),
keywords: &["kamera", "selfie", "telefon"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("स\u{947}ल\u{94d}फी"),
keywords: &["क\u{94d}याम\u{947}रा", "फोन", "स\u{947}ल\u{94d}फी"],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("selfie"),
keywords: &["camera", "selfie", "telefoon"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("selfie"),
keywords: &["kamera", "selfie", "telefon"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ସେଲଫ\u{b3f}"),
keywords: &[
"କ\u{b4d}ୟ\u{b3e}ମେର\u{b3e}",
"ଫୋନ\u{b4d}\u{200c}",
"ସେଲଫ\u{b3f}",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਸ\u{a48}ਲਫੀ"),
keywords: &["ਕ\u{a48}ਮਰਾ", "ਫ\u{a3c}\u{a4b}ਨ", "ਸ\u{a48}ਲਫੀ"],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("سیلفی"),
keywords: &["سیلفی", "فون", "کیمرہ"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Sẹ\u{301}lfi"),
keywords: &["Fon", "Kámẹ\u{301}ra", "Sẹ\u{301}lfi"],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("selfie"),
keywords: &["aparat", "selfie", "telefon", "zdjęcie"],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("سيلفي"),
keywords: &["سيلفي", "فون", "کامره"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("selfie"),
keywords: &["camera", "celular", "foto", "selfie", "smartphone"],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("↑↑↑"),
keywords: &["câmara", "selfie", "telemóvel"],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("selfie"),
keywords: &["camara", "selfie", "telefono"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("selfie"),
keywords: &["aparat de fotografiat", "selfie", "telefon"],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-1045"),
keywords: &["E10-1045"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("селфи"),
keywords: &["селфи", "снимок", "телефон"],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("serifi"),
keywords: &["kamera", "kwifotora", "serifi", "telefone"],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("سيلفي"),
keywords: &["سيلفي", "فون", "ڪئميرا"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("සෙල\u{dca}ෆ\u{dd2}"),
keywords: &["කැමර\u{dcf}ව", "ද\u{dd4}රකථනය", "සෙල\u{dca}ෆ\u{dd2}"],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("selfie"),
keywords: &["selfie", "telefón"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("selfi"),
keywords: &["fotoaparat", "kamera", "mobilni telefon", "selfi"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("sawir iska-qaadis"),
keywords: &["kamarad", "sawir iska-qaadis", "taleefoon"],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("foto vetjake"),
keywords: &["foto vetjake", "kamerë", "sellfi", "telefon", "vetjake"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("селфи"),
keywords: &["кaмерa", "селфи", "телефон"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("↑↑↑"),
keywords: &["селфи"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("selfi"),
keywords: &["kamera", "selfi", "telefon"],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: None,
keywords: &["selfi"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("selfie"),
keywords: &["kamera", "selfie", "telefon"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("selfi"),
keywords: &["kamera", "selfi", "simu"],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("செல\u{bcd}ஃபி"),
keywords: &["கேமர\u{bbe}", "செல\u{bcd}ஃபி", "மொபைல\u{bcd}"],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("స\u{c46}ల\u{c4d}ఫ\u{c40}"),
keywords: &[
"క\u{c46}మ\u{c46}ర\u{c3e}",
"ఫ\u{c4b}న\u{c4d}",
"స\u{c46}ల\u{c4d}ఫ\u{c40}",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("селфи"),
keywords: &["камера", "селфи", "телефон"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("เซลฟ\u{e35}\u{e48}"),
keywords: &["กล\u{e49}อง", "ม\u{e37}อถ\u{e37}อ", "เซลฟ\u{e35}\u{e48}"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("selfi"),
keywords: &["kamera", "selfi", "telefon"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("faitaaʻi pē kita"),
keywords: &["faitaaʻi pē kita"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("selfie"),
keywords: &["kamera", "selfie", "telefon"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئۆزتارتىم"),
keywords: &["ئۆزتارتىم", "تېلېفون", "كامېرا"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("селфі"),
keywords: &["автопортрет", "камера", "селфі", "телефон"],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("سیلفی"),
keywords: &["تصویر", "سیلفی", "فون", "کیمرہ"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("selfi"),
keywords: &["kamera", "selfi", "telefon"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("tự sướng"),
keywords: &["máy ảnh", "tự sướng", "điện thoại"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("selfie"),
keywords: &["nataalukaay", "selfie", "telefon"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("iselfi"),
keywords: &["ifowuni", "ikhamera", "iselfi"],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("Yiya Ara ẹni"),
keywords: &[
"Yiya Ara ẹni",
"fóònù",
"fọ\u{301}tò àdáyà",
"kámẹ\u{301}rà",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("自拍"),
keywords: &["手機", "相機", "自拍"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("自拍"),
keywords: &["手机", "相机", "自拍"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("自拍"),
keywords: &["手机", "相机", "自拍"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("自拍"),
keywords: &["拍", "自", "自拍"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("uzishuthe"),
keywords: &["ifoni", "ikhamela", "ukuzishutha", "uzishuthe"],
},
],
};
#[doc = "🤳🏻"]
pub const SELFIE_LIGHT_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "🤳🏻",
codepoint: "1F933 1F3FB",
status: crate::Status::FullyQualified,
introduction_version: 3f32,
name: "selfie: light skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
#[doc = "🤳🏼"]
pub const SELFIE_MEDIUM_LIGHT_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "🤳🏼",
codepoint: "1F933 1F3FC",
status: crate::Status::FullyQualified,
introduction_version: 3f32,
name: "selfie: medium-light skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
#[doc = "🤳🏽"]
pub const SELFIE_MEDIUM_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "🤳🏽",
codepoint: "1F933 1F3FD",
status: crate::Status::FullyQualified,
introduction_version: 3f32,
name: "selfie: medium skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
#[doc = "🤳🏾"]
pub const SELFIE_MEDIUM_DARK_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "🤳🏾",
codepoint: "1F933 1F3FE",
status: crate::Status::FullyQualified,
introduction_version: 3f32,
name: "selfie: medium-dark skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
#[doc = "🤳🏿"]
pub const SELFIE_DARK_SKIN_TONE: crate::Emoji = crate::Emoji {
glyph: "🤳🏿",
codepoint: "1F933 1F3FF",
status: crate::Status::FullyQualified,
introduction_version: 3f32,
name: "selfie: dark skin tone",
group: "People & Body",
subgroup: "hand-prop",
is_variant: false,
variants: &[],
annotations: &[],
};
| 30.523938 | 109 | 0.395763 |
1e6dc050a831f35f864a14542a3737b24cae28f1 | 109,076 | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
Workspaces_Get(#[from] workspaces::get::Error),
#[error(transparent)]
Workspaces_CreateOrUpdate(#[from] workspaces::create_or_update::Error),
#[error(transparent)]
Workspaces_Update(#[from] workspaces::update::Error),
#[error(transparent)]
Workspaces_Delete(#[from] workspaces::delete::Error),
#[error(transparent)]
WorkspaceFeatures_List(#[from] workspace_features::list::Error),
#[error(transparent)]
Workspaces_ListByResourceGroup(#[from] workspaces::list_by_resource_group::Error),
#[error(transparent)]
Workspaces_ListKeys(#[from] workspaces::list_keys::Error),
#[error(transparent)]
Workspaces_ResyncKeys(#[from] workspaces::resync_keys::Error),
#[error(transparent)]
Usages_List(#[from] usages::list::Error),
#[error(transparent)]
VirtualMachineSizes_List(#[from] virtual_machine_sizes::list::Error),
#[error(transparent)]
Quotas_Update(#[from] quotas::update::Error),
#[error(transparent)]
Quotas_List(#[from] quotas::list::Error),
#[error(transparent)]
Workspaces_ListBySubscription(#[from] workspaces::list_by_subscription::Error),
#[error(transparent)]
MachineLearningCompute_ListByWorkspace(#[from] machine_learning_compute::list_by_workspace::Error),
#[error(transparent)]
MachineLearningCompute_Get(#[from] machine_learning_compute::get::Error),
#[error(transparent)]
MachineLearningCompute_CreateOrUpdate(#[from] machine_learning_compute::create_or_update::Error),
#[error(transparent)]
MachineLearningCompute_Update(#[from] machine_learning_compute::update::Error),
#[error(transparent)]
MachineLearningCompute_Delete(#[from] machine_learning_compute::delete::Error),
#[error(transparent)]
MachineLearningCompute_ListNodes(#[from] machine_learning_compute::list_nodes::Error),
#[error(transparent)]
MachineLearningCompute_ListKeys(#[from] machine_learning_compute::list_keys::Error),
#[error(transparent)]
ListSkus(#[from] list_skus::Error),
#[error(transparent)]
PrivateEndpointConnections_Get(#[from] private_endpoint_connections::get::Error),
#[error(transparent)]
PrivateEndpointConnections_Put(#[from] private_endpoint_connections::put::Error),
#[error(transparent)]
PrivateEndpointConnections_Delete(#[from] private_endpoint_connections::delete::Error),
#[error(transparent)]
PrivateLinkResources_ListByWorkspace(#[from] private_link_resources::list_by_workspace::Error),
#[error(transparent)]
LinkedWorkspaces_List(#[from] linked_workspaces::list::Error),
#[error(transparent)]
LinkedWorkspaces_Get(#[from] linked_workspaces::get::Error),
#[error(transparent)]
LinkedWorkspaces_Create(#[from] linked_workspaces::create::Error),
#[error(transparent)]
LinkedWorkspaces_Delete(#[from] linked_workspaces::delete::Error),
}
pub mod operations {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::OperationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.MachineLearningServices/operations",
operation_config.base_path(),
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod workspaces {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
) -> std::result::Result<models::Workspace, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Workspace =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
parameters: &models::Workspace,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Workspace = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Workspace = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Workspace),
Created201(models::Workspace),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
parameters: &models::WorkspaceUpdateParameters,
) -> std::result::Result<models::Workspace, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Workspace =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
skiptoken: Option<&str>,
) -> std::result::Result<models::WorkspaceListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(skiptoken) = skiptoken {
url.query_pairs_mut().append_pair("$skiptoken", skiptoken);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::WorkspaceListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_keys(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
) -> std::result::Result<models::ListWorkspaceKeysResult, list_keys::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/listKeys",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(list_keys::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_keys::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_keys::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_keys::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ListWorkspaceKeysResult =
serde_json::from_slice(rsp_body).map_err(|source| list_keys::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| list_keys::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_keys::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_keys {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn resync_keys(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
) -> std::result::Result<(), resync_keys::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/resyncKeys",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(resync_keys::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(resync_keys::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(resync_keys::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(resync_keys::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| resync_keys::Error::DeserializeError(source, rsp_body.clone()))?;
Err(resync_keys::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod resync_keys {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
skiptoken: Option<&str>,
) -> std::result::Result<models::WorkspaceListResult, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/workspaces",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_subscription::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(skiptoken) = skiptoken {
url.query_pairs_mut().append_pair("$skiptoken", skiptoken);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_subscription::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::WorkspaceListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_subscription::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_subscription {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod workspace_features {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
) -> std::result::Result<models::ListAmlUserFeatureResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/features",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ListAmlUserFeatureResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod usages {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location: &str,
) -> std::result::Result<models::ListUsagesResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/locations/{}/usages",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ListUsagesResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod virtual_machine_sizes {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
location: &str,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineSizeListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/locations/{}/vmSizes",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineSizeListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod quotas {
use super::{models, API_VERSION};
pub async fn update(
operation_config: &crate::OperationConfig,
location: &str,
parameters: &models::QuotaUpdateParameters,
subscription_id: &str,
) -> std::result::Result<models::UpdateWorkspaceQuotasResult, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/locations/{}/updateQuotas",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::UpdateWorkspaceQuotasResult =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location: &str,
) -> std::result::Result<models::ListWorkspaceQuotas, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/locations/{}/Quotas",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ListWorkspaceQuotas =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod machine_learning_compute {
use super::{models, API_VERSION};
pub async fn list_by_workspace(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
skiptoken: Option<&str>,
) -> std::result::Result<models::PaginatedComputeResourcesList, list_by_workspace::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_workspace::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_workspace::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(skiptoken) = skiptoken {
url.query_pairs_mut().append_pair("$skiptoken", skiptoken);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_workspace::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_workspace::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PaginatedComputeResourcesList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_workspace::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_workspace::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_workspace::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_workspace {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
compute_name: &str,
) -> std::result::Result<models::ComputeResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
compute_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ComputeResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
compute_name: &str,
parameters: &models::ComputeResource,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
compute_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ComputeResource = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::ComputeResource = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::ComputeResource),
Created201(models::ComputeResource),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
compute_name: &str,
parameters: &models::ClusterUpdateParameters,
) -> std::result::Result<models::ComputeResource, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
compute_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ComputeResource =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
compute_name: &str,
underlying_resource_action: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
compute_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut()
.append_pair("underlyingResourceAction", underlying_resource_action);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_nodes(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
compute_name: &str,
) -> std::result::Result<models::AmlComputeNodesInformation, list_nodes::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}/listNodes",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
compute_name
);
let mut url = url::Url::parse(url_str).map_err(list_nodes::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_nodes::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_nodes::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_nodes::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AmlComputeNodesInformation =
serde_json::from_slice(rsp_body).map_err(|source| list_nodes::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| list_nodes::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_nodes::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_nodes {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_keys(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
compute_name: &str,
) -> std::result::Result<models::ComputeSecrets, list_keys::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}/listKeys",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
compute_name
);
let mut url = url::Url::parse(url_str).map_err(list_keys::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_keys::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_keys::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_keys::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ComputeSecrets =
serde_json::from_slice(rsp_body).map_err(|source| list_keys::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| list_keys::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_keys::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_keys {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub async fn list_skus(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::SkuListResult, list_skus::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/workspaces/skus",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_skus::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_skus::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_skus::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_skus::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SkuListResult =
serde_json::from_slice(rsp_body).map_err(|source| list_skus::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| list_skus::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_skus::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_skus {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub mod private_endpoint_connections {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
private_endpoint_connection_name: &str,
) -> std::result::Result<models::PrivateEndpointConnection, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/privateEndpointConnections/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
private_endpoint_connection_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PrivateEndpointConnection =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn put(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
private_endpoint_connection_name: &str,
properties: &models::PrivateEndpointConnection,
) -> std::result::Result<models::PrivateEndpointConnection, put::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/privateEndpointConnections/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
private_endpoint_connection_name
);
let mut url = url::Url::parse(url_str).map_err(put::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(put::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(properties).map_err(put::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(put::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(put::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PrivateEndpointConnection =
serde_json::from_slice(rsp_body).map_err(|source| put::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| put::Error::DeserializeError(source, rsp_body.clone()))?;
Err(put::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod put {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
private_endpoint_connection_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/privateEndpointConnections/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
private_endpoint_connection_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod private_link_resources {
use super::{models, API_VERSION};
pub async fn list_by_workspace(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
) -> std::result::Result<models::PrivateLinkResourceListResult, list_by_workspace::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/privateLinkResources",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_workspace::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_workspace::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_workspace::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_workspace::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PrivateLinkResourceListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_workspace::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_by_workspace::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_by_workspace {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod linked_workspaces {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
) -> std::result::Result<Vec<models::LinkedWorkspace>, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/Microsoft.MachineLearningServices/workspaces/{}/linkedWorkspaces",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<models::LinkedWorkspace> =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
link_name: &str,
) -> std::result::Result<models::LinkedWorkspace, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/Microsoft.MachineLearningServices/workspaces/{}/linkedWorkspaces/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
link_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::LinkedWorkspace =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
link_name: &str,
parameters: &models::LinkedWorkspaceDto,
) -> std::result::Result<models::LinkedWorkspace, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/Microsoft.MachineLearningServices/workspaces/{}/linkedWorkspaces/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
link_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::LinkedWorkspace =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
link_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/Microsoft.MachineLearningServices/workspaces/{}/linkedWorkspaces/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
link_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::MachineLearningServiceError =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::MachineLearningServiceError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
| 48.008803 | 140 | 0.593769 |
e6abdef903f632675214f4a6b5098d3b8c6b170b | 2,034 | #[doc = "Reader of register ACR"]
pub type R = crate::R<u32, super::ACR>;
#[doc = "Writer for register ACR"]
pub type W = crate::W<u32, super::ACR>;
#[doc = "Register ACR `reset()`'s with value 0x0100"]
impl crate::ResetValue for super::ACR {
#[inline(always)]
fn reset_value() -> Self::Ux {
0x0100
}
}
#[doc = "Reader of field `TSON`"]
pub type TSON_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TSON`"]
pub struct TSON_W<'a> {
w: &'a mut W,
}
impl<'a> TSON_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `IBCTL`"]
pub type IBCTL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `IBCTL`"]
pub struct IBCTL_W<'a> {
w: &'a mut W,
}
impl<'a> IBCTL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8);
self.w
}
}
impl R {
#[doc = "Bit 4 - Temperature Sensor On"]
#[inline(always)]
pub fn tson(&self) -> TSON_R {
TSON_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bits 8:9 - ADC Bias Current Control"]
#[inline(always)]
pub fn ibctl(&self) -> IBCTL_R {
IBCTL_R::new(((self.bits >> 8) & 0x03) as u8)
}
}
impl W {
#[doc = "Bit 4 - Temperature Sensor On"]
#[inline(always)]
pub fn tson(&mut self) -> TSON_W {
TSON_W { w: self }
}
#[doc = "Bits 8:9 - ADC Bias Current Control"]
#[inline(always)]
pub fn ibctl(&mut self) -> IBCTL_W {
IBCTL_W { w: self }
}
}
| 27.486486 | 84 | 0.536382 |
e5a195e7c1664f4fcb7da9abdda1e6e1789b0b4b | 603 | #![feature(test)]
#![feature(once_cell)]
extern crate test;
mod imp;
#[cfg(test)]
#[allow(dead_code, unused_imports)]
mod testing;
mod error;
pub type ArcResult<T> = std::result::Result<T, crate::error::NouArcError>;
pub use error::NouArcError;
pub use imp::read_archive_data_from_directory::read_archive_data_from_directory;
pub use imp::read_archive::read_archive;
pub use imp::write_archive::write_archive;
pub use imp::structs::archive_data::{ArchiveData, ArchiveDataItem};
pub use imp::structs::archive_options::ArchiveOptions;
pub use imp::structs::archive_options::ArchiveOptionsBuilder;
| 24.12 | 80 | 0.777778 |
ffbf79ef4958efe9d7b744012bb0e4793d3d5695 | 195 | use num::complex::Complex;
fn main() {
let a = Complex { re: 2.1, im: -1.2 };
let b = Complex::new(11.1, 22.2);
let result = a + b;
println!("{} + {}i", result.re, result.im);
}
| 21.666667 | 47 | 0.512821 |
ab6beedbb0072c6a99a5d630cae6727a2f09c619 | 4,190 | use bollard::auth::DockerCredentials;
use bollard::errors::Error;
use bollard::image::*;
use bollard::models::*;
use bollard::system::*;
use bollard::Docker;
use chrono::{Utc};
use futures_util::future;
use futures_util::stream::select;
use futures_util::stream::StreamExt;
use futures_util::stream::TryStreamExt;
use tokio::runtime::Runtime;
#[macro_use]
pub mod common;
use common::*;
#[derive(Debug)]
enum Results {
CreateImageResults(CreateImageInfo),
EventsResults(SystemEventsResponse),
}
async fn events_test(docker: Docker) -> Result<(), Error> {
let image = if cfg!(windows) {
format!("{}hello-world:nanoserver", registry_http_addr())
} else {
format!("{}hello-world:linux", registry_http_addr())
};
let stream = docker.events(None::<EventsOptions<String>>);
let stream2 = docker.create_image(
Some(CreateImageOptions {
from_image: &image[..],
..Default::default()
}),
None,
if cfg!(windows) {
None
} else {
Some(DockerCredentials {
username: Some("bollard".to_string()),
password: std::env::var("REGISTRY_PASSWORD").ok(),
..Default::default()
})
},
);
let vec = select(
stream.map_ok(|events_results| Results::EventsResults(events_results)),
stream2.map_ok(|image_results| Results::CreateImageResults(image_results)),
)
.skip_while(|value| match value {
Ok(Results::EventsResults(_)) => future::ready(false),
_ => future::ready(true),
})
.take(1)
.try_collect::<Vec<_>>()
.await?;
assert!(vec
.iter()
.map(|value| {
println!("{:?}", value);
value
})
.any(|value| match value {
Results::EventsResults(SystemEventsResponse { typ: _, .. }) => true,
_ => false,
}));
Ok(())
}
async fn events_until_forever_test(docker: Docker) -> Result<(), Error> {
let image = if cfg!(windows) {
format!("{}hello-world:nanoserver", registry_http_addr())
} else {
format!("{}hello-world:linux", registry_http_addr())
};
let start_time = Utc::now();
let stream = docker.events(Some(EventsOptions::<String> {
since: Some(start_time),
until: None,
..Default::default()
}));
let stream2 = docker.create_image(
Some(CreateImageOptions {
from_image: &image[..],
..Default::default()
}),
None,
if cfg!(windows) {
None
} else {
Some(DockerCredentials {
username: Some("bollard".to_string()),
password: std::env::var("REGISTRY_PASSWORD").ok(),
..Default::default()
})
},
);
let vec = select(
stream.map_ok(|events_results| Results::EventsResults(events_results)),
stream2.map_ok(|image_results| Results::CreateImageResults(image_results)),
)
.skip_while(|value| match value {
Ok(Results::EventsResults(_)) => future::ready(false),
_ => future::ready(true),
})
.take(1)
.try_collect::<Vec<_>>()
.await?;
assert!(vec
.iter()
.map(|value| {
value
})
.any(|value| match value {
Results::EventsResults(SystemEventsResponse { typ: _, .. }) => true,
_ => false,
}));
Ok(())
}
async fn df_test(docker: Docker) -> Result<(), Error> {
create_image_hello_world(&docker).await?;
let result = &docker.df().await?;
let c: Vec<_> = result
.images
.as_ref()
.unwrap()
.iter()
.filter(|c: &&ImageSummary| c.repo_tags.iter().any(|r| r.contains("hello-world")))
.collect();
assert!(c.len() > 0);
Ok(())
}
#[test]
fn integration_test_events() {
connect_to_docker_and_run!(events_test);
}
#[test]
#[cfg(not(windows))]
fn integration_test_events_until_forever() {
connect_to_docker_and_run!(events_until_forever_test);
}
#[test]
fn integration_test_df() {
connect_to_docker_and_run!(df_test);
}
| 25.08982 | 90 | 0.565394 |
edd6b6a81867d2a075322f0bdccb9a64645651e1 | 2,167 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use bytecode_source_map::source_map::SourceMap;
use bytecode_verifier::VerifiedModule;
use ir_to_bytecode::{compiler::compile_module, parser::ast::Loc, parser::parse_module};
use libra_types::account_address::AccountAddress;
use std::{
env,
fs::{self, File},
io::prelude::*,
};
use stdlib::{stdlib_modules, stdlib_source_map};
use tree_heap::translator::BoogieTranslator;
// mod translator;
fn compile_files(file_names: Vec<String>) -> (Vec<VerifiedModule>, SourceMap<Loc>) {
let mut verified_modules = stdlib_modules().to_vec();
let mut source_maps = stdlib_source_map().to_vec();
let files_len = file_names.len();
let dep_files = &file_names[0..files_len];
let address = AccountAddress::default();
for file_name in dep_files {
let code = fs::read_to_string(file_name).unwrap();
let module = parse_module(&code).unwrap();
let (compiled_module, source_map) =
compile_module(address, module, &verified_modules).expect("module failed to compile");
let verified_module_res = VerifiedModule::new(compiled_module);
match verified_module_res {
Err(e) => {
panic!("{:?}", e);
}
Ok(verified_module) => {
verified_modules.push(verified_module);
source_maps.push(source_map);
}
}
}
(verified_modules, source_maps)
}
fn main() {
let args: Vec<String> = env::args().collect();
let file_names = &args[1..];
// read files and compile into compiled modules
let (modules, source_maps) = compile_files(file_names.to_vec());
let mut ts = BoogieTranslator::new(&modules, &source_maps);
let mut res = String::new();
// handwritten boogie code
let written_code = fs::read_to_string("src/bytecode_instrs.bpl").unwrap();
res.push_str(&written_code);
res.push_str(&ts.translate());
let mut f = File::create("output.bpl").expect("Unable to create file");
// write resulting code into output.bpl
write!(f, "{}", res).expect("unable to write file");
}
| 36.116667 | 98 | 0.658053 |
ccc3869c0f1f57a77c874565ccf5a4d85b3a791d | 188 | #![allow(unused)]
#![feature(decl_macro)]
#[allow(unused_macros)]
macro_rules! foo {
() => {};
}
struct Foo<'a>(std::marker::PhantomData<&'a ()>);
impl<'a> Foo<'a> {}
fn main() {}
| 13.428571 | 49 | 0.56383 |
50658bc7c325211ad0d6764c7484247814375136 | 648 | use yew::prelude::*;
use yew::Properties;
pub enum QueueMsg {}
pub struct Queue {
pub link: ComponentLink<Self>,
}
#[derive(Clone, Properties)]
pub struct Props {}
impl Component for Queue {
type Message = QueueMsg;
type Properties = Props;
fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {
Self { link: link }
}
fn change(&mut self, props: Self::Properties) -> bool {
false
}
fn update(&mut self, msg: Self::Message) -> bool {
false
}
fn view(&self) -> Html {
html! {
<div>
{"Queue"}
</div>
}
}
}
| 17.513514 | 75 | 0.540123 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.