hexsha
stringlengths
40
40
size
int64
4
1.05M
content
stringlengths
4
1.05M
avg_line_length
float64
1.33
100
max_line_length
int64
1
1k
alphanum_fraction
float64
0.25
1
8a1954a705ceb19d702bfce4028b6f1c901e5fdc
15,902
//! Futex: A Linux-specific fast user-space locking primitive. //! //! This crate provides easy-to-use wrappers around the not-so-easy-to-use `SYS_futex` Linux syscall. //! //! The documentation of Linux's futexes can be found in the //! [relevant man page](http://man7.org/linux/man-pages/man2/futex.2.html). //! The most important details are also explained in the documentation of this crate. //! //! The two main types of this crate are [`Futex`] and [`PiFutex`], which are //! simply wrappers containing an [`AtomicI32`] exposing all the futex //! operations Linux can apply to them. //! //! Existing [`AtomicI32`]s can be used as futexes through [`AsFutex`] //! without changing their type. mod errors; mod scope; mod sys; mod timeout; pub mod op; use op::OpAndCmp; use std::marker::PhantomData; use std::sync::atomic::AtomicI32; use std::time::{Duration, Instant}; use sys::{Error, FutexCall}; use timeout::as_timespec; pub use errors::*; pub use scope::{Private, Scope, Shared}; pub use timeout::Timeout; /// A Linux-specific fast user-space locking primitive. /// /// `Futex<Private>` may only be used from the same address space (the same /// process) and is faster than a `Futex<Shared>`, which may be used accross /// address spaces (processes). #[repr(transparent)] pub struct Futex<Scope> { pub value: AtomicI32, phantom: PhantomData<Scope>, } /// A Linux-specific priority inheriting fast user-space locking primitive. /// /// Unlike with a regular [`Futex`], the value of a [`PiFutex`] has meaning /// to the Linux kernel, taking away some flexibility. User-space must follow /// the assumed protocol to allow the kernel to properly implement priority /// inheritance. /// /// See the *Priority-inheritance futexes* section of [the Linux futex man /// page](http://man7.org/linux/man-pages/man2/futex.2.html) for details. /// /// `PiFutex<Private>` may only be used from the same address space (the same /// process) and is faster than a `PiFutex<Shared>`, which may be used accross /// address spaces (processes). #[repr(transparent)] pub struct PiFutex<Scope> { pub value: AtomicI32, phantom: PhantomData<Scope>, } /// Use any [`AtomicI32`] as [`Futex`] or [`PiFutex`]. /// /// This also allows you to convert between a [`Futex`] and a [`PiFutex`] or /// between [`Private`] and [`Shared`] futexes if you ever need that, as they /// expose their internal [`AtomicI32`] through `.value`. pub trait AsFutex<S> { fn as_futex(&self) -> &Futex<S>; fn as_pi_futex(&self) -> &PiFutex<S>; } impl<S> AsFutex<S> for AtomicI32 { #[must_use] #[inline] fn as_futex(&self) -> &Futex<S> { unsafe { std::mem::transmute(self) } } #[inline] #[must_use] fn as_pi_futex(&self) -> &PiFutex<S> { unsafe { std::mem::transmute(self) } } } impl<S> Futex<S> { /// Create a new [`Futex`] with an initial value. #[inline] pub const fn new(value: i32) -> Self { Self { value: AtomicI32::new(value), phantom: PhantomData, } } } impl<S> PiFutex<S> { /// Create a new [`PiFutex`] with an initial value. #[inline] pub const fn new(value: i32) -> Self { Self { value: AtomicI32::new(value), phantom: PhantomData, } } /// The `FUTEX_WAITERS` bit that indicates there are threads waiting. pub const WAITERS: i32 = -0x8000_0000; /// The `FUTEX_OWNER_DIED` bit that indicates the owning thread died. pub const OWNER_DIED: i32 = 0x4000_0000; /// The bits that are used for storing the thread id (`FUTEX_TID_MASK`). pub const TID_MASK: i32 = 0x3fffffff; } impl<S> Default for Futex<S> { fn default() -> Self { Self::new(0) } } impl<S> Default for PiFutex<S> { fn default() -> Self { Self::new(0) } } impl<S: Scope> Futex<S> { /// Wait until this futex is awoken by a `wake` call. /// /// The thread will only be sent to sleep if the futex's value matches the /// expected value. Otherwise, it returns directly with [`WaitError::WrongValue`]. #[inline] pub fn wait(&self, expected_value: i32) -> Result<(), WaitError> { let r = unsafe { FutexCall::new() .futex_op(libc::FUTEX_WAIT + S::futex_flag()) .uaddr(&self.value) .val(expected_value) .call() }; match r { Err(Error(libc::EAGAIN)) => Err(WaitError::WrongValue), Err(Error(libc::EINTR)) => Err(WaitError::Interrupted), Err(e) => e.panic("FUTEX_WAIT"), Ok(_) => Ok(()), } } /// Wait until this futex is awoken by a `wake` call, or until the timeout expires. /// /// The thread will only be sent to sleep if the futex's value matches the /// expected value. Otherwise, it returns directly with [`TimedWaitError::WrongValue`]. /// /// If you want an absolute point in time as timeout, use /// [`wait_bitset_until`][Futex::wait_bitset_until] instead, using a bitset of `!0`. #[inline] pub fn wait_for(&self, expected_value: i32, timeout: Duration) -> Result<(), TimedWaitError> { let timeout = as_timespec(timeout); let r = unsafe { FutexCall::new() .futex_op(libc::FUTEX_WAIT + S::futex_flag()) .uaddr(&self.value) .val(expected_value) .timeout(&timeout) .call() }; match r { Err(Error(libc::EAGAIN)) => Err(TimedWaitError::WrongValue), Err(Error(libc::EINTR)) => Err(TimedWaitError::Interrupted), Err(Error(libc::ETIMEDOUT)) => Err(TimedWaitError::TimedOut), Err(e) => e.panic("FUTEX_WAIT"), Ok(_) => Ok(()), } } /// Wake up `n` waiters. /// /// Returns the number of waiters that were woken up. #[inline] pub fn wake(&self, n: i32) -> i32 { let r = unsafe { FutexCall::new() .futex_op(libc::FUTEX_WAKE + S::futex_flag()) .uaddr(&self.value) .val(n) .call() }; match r { Err(e) => e.panic("FUTEX_WAKE"), Ok(v) => v, } } /// Wake up `n_wake` waiters, and requeue up to `n_requeue` waiters to another futex. /// /// Returns the number of waiters that were woken up. #[inline] pub fn requeue(&self, n_wake: i32, to: &Futex<S>, n_requeue: i32) -> i32 { let r = unsafe { FutexCall::new() .futex_op(libc::FUTEX_REQUEUE + S::futex_flag()) .uaddr(&self.value) .uaddr2(&to.value) .val(n_wake) .val2(n_requeue) .call() }; match r { Err(e) => e.panic("FUTEX_REQUEUE"), Ok(v) => v, } } /// Wake up `n_wake` waiters, and requeue up to `n_requeue` waiters to another futex. /// /// The operation will only execute if the futex's value matches the /// expected value. Otherwise, it returns directly with a [`WrongValueError`]. /// /// Returns the total number of waiters that were woken up or requeued to the other futex. #[inline] pub fn cmp_requeue( &self, expected_value: i32, n_wake: i32, to: &Futex<S>, n_requeue: i32, ) -> Result<i32, WrongValueError> { let r = unsafe { FutexCall::new() .futex_op(libc::FUTEX_CMP_REQUEUE + S::futex_flag()) .uaddr(&self.value) .uaddr2(&to.value) .val(n_wake) .val2(n_requeue) .val3(expected_value) .call() }; match r { Err(Error(libc::EAGAIN)) => Err(WrongValueError::WrongValue), Err(e) => e.panic("FUTEX_CMP_REQUEUE"), Ok(v) => Ok(v), } } /// Wait until this futex is awoken by a `wake` call matching a bitset. /// /// - Calls to [`wake`][Futex::wake] will match any bitset. /// - Calls to [`wake_bitset`][Futex::wake_bitset] will match if at least one 1-bit matches. /// /// The thread will only be sent to sleep if the futex's value matches the /// expected value. Otherwise, it returns directly with [`WaitError::WrongValue`]. #[inline] pub fn wait_bitset(&self, expected_value: i32, bitset: u32) -> Result<(), WaitError> { let r = unsafe { FutexCall::new() .uaddr(&self.value) .futex_op(libc::FUTEX_WAIT_BITSET + S::futex_flag()) .val(expected_value) .val3(bitset as i32) .call() }; match r { Err(Error(libc::EAGAIN)) => Err(WaitError::WrongValue), Err(Error(libc::EINTR)) => Err(WaitError::Interrupted), Err(e) => e.panic("FUTEX_WAIT_BITSET"), Ok(_) => Ok(()), } } /// Wait until this futex is awoken by a `wake` call matching a bitset, or until the timeout expires. /// /// - Calls to [`wake`][Futex::wake] will match any bitset. /// - Calls to [`wake_bitset`][Futex::wake_bitset] will match if at least one 1-bit matches. /// /// The thread will only be sent to sleep if the futex's value matches the /// expected value. Otherwise, it returns directly with [`TimedWaitError::WrongValue`]. #[inline] pub fn wait_bitset_until( &self, expected_value: i32, bitset: u32, timeout: impl Timeout, ) -> Result<(), TimedWaitError> { let timeout = timeout.as_timespec(); let r = unsafe { FutexCall::new() .uaddr(&self.value) .futex_op(libc::FUTEX_WAIT_BITSET + timeout.0 + S::futex_flag()) .val(expected_value) .val3(bitset as i32) .timeout(&timeout.1) .call() }; match r { Err(Error(libc::EAGAIN)) => Err(TimedWaitError::WrongValue), Err(Error(libc::EINTR)) => Err(TimedWaitError::Interrupted), Err(Error(libc::ETIMEDOUT)) => Err(TimedWaitError::TimedOut), Err(e) => e.panic("FUTEX_WAIT_BITSET"), Ok(_) => Ok(()), } } /// Wake up `n` waiters matching a bitset. /// /// - Waiters waiting using [`wake`][Futex::wake] are always woken up, /// regardless of the bitset. /// - Waiters waiting using [`wake_bitset`][Futex::wake_bitset] are woken up /// if they match at least one 1-bit. /// /// Returns the number of waiters that were woken up. #[inline] pub fn wake_bitset(&self, n: i32, bitset: u32) -> i32 { let r = unsafe { FutexCall::new() .futex_op(libc::FUTEX_WAKE_BITSET + S::futex_flag()) .uaddr(&self.value) .val(n) .val3(bitset as i32) .call() }; match r { Err(e) => e.panic("FUTEX_WAKE_BITSET"), Ok(v) => v, } } /// Wake up `n` waiters, and conditionally `n2` waiters on another futex after modifying it. /// /// This operation first applies an [operation][`op::Op`] to the second futex while remembering its old value, /// then wakes up `n` waiters on the first futex, and finally wakes `n2` waiters on the second futex if /// its old value matches [a condition][`op::Cmp`]. This all happens atomically. /// /// Returns the total number of waiters that were woken up on either futex. #[inline] pub fn wake_op(&self, n: i32, second: &Futex<S>, op: OpAndCmp, n2: i32) -> i32 { let r = unsafe { FutexCall::new() .futex_op(libc::FUTEX_WAKE_OP + S::futex_flag()) .uaddr(&self.value) .uaddr2(&second.value) .val(n) .val2(n2) .val3(op.raw_bits() as i32) .call() }; match r { Err(e) => e.panic("FUTEX_WAKE_OP"), Ok(v) => v, } } /// Wake up one waiter, and requeue up to `n_requeue` to a [`PiFutex`]. /// /// Only requeues waiters that are blocked by [`wait_requeue_pi`][Futex::wait_requeue_pi] /// or [`wait_requeue_pi_until`][Futex::wait_requeue_pi_until]. /// The [`PiFutex`] must be the same as the one the waiters are waiting to be requeued to. /// /// The number of waiters to wake cannot be chosen and is always 1. /// /// Returns the total number of waiters that were woken up or requeued to the other futex. #[inline] pub fn cmp_requeue_pi( &self, expected_value: i32, to: &PiFutex<S>, n_requeue: i32, ) -> Result<i32, TryAgainError> { let r = unsafe { FutexCall::new() .futex_op(libc::FUTEX_CMP_REQUEUE_PI + S::futex_flag()) .uaddr(&self.value) .uaddr2(&to.value) .val(1) .val2(n_requeue) .val3(expected_value) .call() }; match r { Err(Error(libc::EAGAIN)) => Err(TryAgainError::TryAgain), Err(e) => e.panic("FUTEX_CMP_REQUEUE_PI"), Ok(v) => Ok(v), } } /// Wait until this futex is awoken after potentially being requeued to a [`PiFutex`]. /// /// A call to [`cmp_requeue_pi`][Futex::cmp_requeue_pi] will requeue this waiter to /// the [`PiFutex`]. The call must refer to the same [`PiFutex`]. /// /// A call to [`wake`][Futex::wake] (or [`wake_bitset`][Futex::wake_bitset]) will /// wake this thread without requeueing. This results in an [`RequeuePiError::TryAgain`]. #[inline] pub fn wait_requeue_pi( &self, expected_value: i32, second: &PiFutex<S>, ) -> Result<(), RequeuePiError> { let r = unsafe { FutexCall::new() .futex_op(libc::FUTEX_WAIT_REQUEUE_PI + S::futex_flag()) .uaddr(&self.value) .uaddr2(&second.value) .val(expected_value) .call() }; match r { Err(Error(libc::EAGAIN)) => Err(RequeuePiError::TryAgain), Err(e) => e.panic("FUTEX_WAIT_REQUEUE_PI"), Ok(_) => Ok(()), } } /// Wait until this futex is awoken after potentially being requeued to a [`PiFutex`], or until the timeout expires. /// /// A call to [`cmp_requeue_pi`][Futex::cmp_requeue_pi] will requeue this waiter to /// the [`PiFutex`]. The call must refer to the same [`PiFutex`]. /// /// A call to [`wake`][Futex::wake] (or [`wake_bitset`][Futex::wake_bitset]) will /// wake this thread without requeueing. This results in an [`TimedRequeuePiError::TryAgain`]. #[inline] pub fn wait_requeue_pi_until( &self, expected_value: i32, second: &PiFutex<S>, timeout: impl Timeout, ) -> Result<(), TimedRequeuePiError> { let timeout = timeout.as_timespec(); let r = unsafe { FutexCall::new() .futex_op(libc::FUTEX_WAIT_REQUEUE_PI + timeout.0 + S::futex_flag()) .uaddr(&self.value) .uaddr2(&second.value) .val(expected_value) .timeout(&timeout.1) .call() }; match r { Err(Error(libc::EAGAIN)) => Err(TimedRequeuePiError::TryAgain), Err(Error(libc::ETIMEDOUT)) => Err(TimedRequeuePiError::TimedOut), Err(e) => e.panic("FUTEX_WAIT_REQUEUE_PI"), Ok(_) => Ok(()), } } } impl<S: Scope> PiFutex<S> { /// See `FUTEX_LOCK_PI` in the [Linux futex man page](http://man7.org/linux/man-pages/man2/futex.2.html). #[inline] pub fn lock_pi(&self) -> Result<(), TryAgainError> { let r = unsafe { FutexCall::new() .futex_op(libc::FUTEX_LOCK_PI + S::futex_flag()) .uaddr(&self.value) .call() }; match r { Err(Error(libc::EAGAIN)) => Err(TryAgainError::TryAgain), Err(e) => e.panic("FUTEX_LOCK_PI"), Ok(_) => Ok(()), } } /// See `FUTEX_LOCK_PI` in the [Linux futex man page](http://man7.org/linux/man-pages/man2/futex.2.html). #[inline] pub fn lock_pi_until(&self, timeout: Instant) -> Result<(), TimedLockError> { let timeout = timeout.as_timespec().1; let r = unsafe { FutexCall::new() .futex_op(libc::FUTEX_LOCK_PI + S::futex_flag()) .uaddr(&self.value) .timeout(&timeout) .call() }; match r { Err(Error(libc::EAGAIN)) => Err(TimedLockError::TryAgain), Err(Error(libc::ETIMEDOUT)) => Err(TimedLockError::TimedOut), Err(e) => e.panic("FUTEX_LOCK_PI"), Ok(_) => Ok(()), } } /// See `FUTEX_TRYLOCK_PI` in the [Linux futex man page](http://man7.org/linux/man-pages/man2/futex.2.html). #[inline] pub fn trylock_pi(&self) -> Result<(), TryAgainError> { let r = unsafe { FutexCall::new() .futex_op(libc::FUTEX_TRYLOCK_PI + S::futex_flag()) .uaddr(&self.value) .call() }; match r { Err(Error(libc::EAGAIN)) => Err(TryAgainError::TryAgain), Err(e) => e.panic("FUTEX_LOCK_PI"), Ok(_) => Ok(()), } } /// See `FUTEX_UNLOCK_PI` in the [Linux futex man page](http://man7.org/linux/man-pages/man2/futex.2.html). #[inline] pub fn unlock_pi(&self) { let r = unsafe { FutexCall::new() .futex_op(libc::FUTEX_UNLOCK_PI + S::futex_flag()) .uaddr(&self.value) .call() }; if let Err(e) = r { e.panic("FUTEX_UNLOCK_PI"); } } } impl<S> std::fmt::Debug for Futex<S> { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.debug_struct("Futex") .field("scope", &std::any::type_name::<S>()) .field("value", &self.value) .finish() } } impl<S> std::fmt::Debug for PiFutex<S> { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.debug_struct("PiFutex") .field("scope", &std::any::type_name::<S>()) .field("value", &self.value) .finish() } }
29.834897
117
0.647591
91b21ee31b272607a17b40f089d75a049121688d
1,560
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; use ::RegType::*; use ::instruction_def::*; use ::Operand::*; use ::Reg::*; use ::RegScale::*; use ::test::run_test; #[test] fn xorpd_1() { run_test(&Instruction { mnemonic: Mnemonic::XORPD, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM0)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 87, 240], OperandSize::Dword) } #[test] fn xorpd_2() { run_test(&Instruction { mnemonic: Mnemonic::XORPD, operand1: Some(Direct(XMM0)), operand2: Some(IndirectDisplaced(ECX, 1386622604, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 87, 129, 140, 46, 166, 82], OperandSize::Dword) } #[test] fn xorpd_3() { run_test(&Instruction { mnemonic: Mnemonic::XORPD, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM7)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 87, 255], OperandSize::Qword) } #[test] fn xorpd_4() { run_test(&Instruction { mnemonic: Mnemonic::XORPD, operand1: Some(Direct(XMM7)), operand2: Some(Indirect(RBX, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 87, 59], OperandSize::Qword) }
53.793103
356
0.700641
232976b01eaa8add207484cab2fc85b866aebe6a
46,259
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use super::{models, models::*, API_VERSION}; pub mod operations { use super::{models, models::*, API_VERSION}; pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationListResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/providers/Microsoft.OperationalInsights/operations", operation_config.base_path(), ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: OperationListResult = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, models::*, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod query_packs { use super::{models, models::*, API_VERSION}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, ) -> std::result::Result<LogAnalyticsQueryPackListResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.OperationalInsights/queryPacks", operation_config.base_path(), subscription_id ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: LogAnalyticsQueryPackListResult = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, models::*, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_resource_group( operation_config: &crate::OperationConfig, resource_group_name: &str, subscription_id: &str, ) -> std::result::Result<LogAnalyticsQueryPackListResult, list_by_resource_group::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/queryPacks", operation_config.base_path(), subscription_id, resource_group_name ); let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_resource_group::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_by_resource_group::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_resource_group::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: LogAnalyticsQueryPackListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_by_resource_group::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_by_resource_group { use super::{models, models::*, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, subscription_id: &str, query_pack_name: &str, ) -> std::result::Result<LogAnalyticsQueryPack, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/queryPacks/{}", operation_config.base_path(), subscription_id, resource_group_name, query_pack_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: LogAnalyticsQueryPack = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, models::*, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, resource_group_name: &str, subscription_id: &str, query_pack_name: &str, log_analytics_query_pack_payload: &LogAnalyticsQueryPack, ) -> std::result::Result<LogAnalyticsQueryPack, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/queryPacks/{}", operation_config.base_path(), subscription_id, resource_group_name, query_pack_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(log_analytics_query_pack_payload).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: LogAnalyticsQueryPack = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update { use super::{models, models::*, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update_tags( operation_config: &crate::OperationConfig, resource_group_name: &str, subscription_id: &str, query_pack_name: &str, query_pack_tags: &TagsResource, ) -> std::result::Result<LogAnalyticsQueryPack, update_tags::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/queryPacks/{}", operation_config.base_path(), subscription_id, resource_group_name, query_pack_name ); let mut url = url::Url::parse(url_str).map_err(update_tags::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update_tags::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(query_pack_tags).map_err(update_tags::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update_tags::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(update_tags::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: LogAnalyticsQueryPack = serde_json::from_slice(rsp_body).map_err(|source| update_tags::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| update_tags::Error::DeserializeError(source, rsp_body.clone()))?; Err(update_tags::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod update_tags { use super::{models, models::*, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, subscription_id: &str, query_pack_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/queryPacks/{}", operation_config.base_path(), subscription_id, resource_group_name, query_pack_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, models::*, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod queries { use super::{models, models::*, API_VERSION}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, query_pack_name: &str, top: Option<i64>, include_body: Option<bool>, skip_token: Option<&str>, ) -> std::result::Result<LogAnalyticsQueryPackQueryListResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/queryPacks/{}/queries", operation_config.base_path(), subscription_id, resource_group_name, query_pack_name ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(include_body) = include_body { url.query_pairs_mut().append_pair("includeBody", include_body.to_string().as_str()); } if let Some(skip_token) = skip_token { url.query_pairs_mut().append_pair("$skipToken", skip_token); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: LogAnalyticsQueryPackQueryListResult = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, models::*, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn search( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, query_pack_name: &str, top: Option<i64>, include_body: Option<bool>, skip_token: Option<&str>, query_search_properties: &LogAnalyticsQueryPackQuerySearchProperties, ) -> std::result::Result<LogAnalyticsQueryPackQueryListResult, search::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/queryPacks/{}/queries/search", operation_config.base_path(), subscription_id, resource_group_name, query_pack_name ); let mut url = url::Url::parse(url_str).map_err(search::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(search::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(include_body) = include_body { url.query_pairs_mut().append_pair("includeBody", include_body.to_string().as_str()); } if let Some(skip_token) = skip_token { url.query_pairs_mut().append_pair("$skipToken", skip_token); } req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(query_search_properties).map_err(search::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(search::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(search::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: LogAnalyticsQueryPackQueryListResult = serde_json::from_slice(rsp_body).map_err(|source| search::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| search::Error::DeserializeError(source, rsp_body.clone()))?; Err(search::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod search { use super::{models, models::*, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, query_pack_name: &str, id: &str, ) -> std::result::Result<LogAnalyticsQueryPackQuery, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/queryPacks/{}/queries/{}", operation_config.base_path(), subscription_id, resource_group_name, query_pack_name, id ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: LogAnalyticsQueryPackQuery = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, models::*, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn put( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, query_pack_name: &str, id: &str, query_payload: &LogAnalyticsQueryPackQuery, ) -> std::result::Result<LogAnalyticsQueryPackQuery, put::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/queryPacks/{}/queries/{}", operation_config.base_path(), subscription_id, resource_group_name, query_pack_name, id ); let mut url = url::Url::parse(url_str).map_err(put::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(put::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(query_payload).map_err(put::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(put::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(put::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: LogAnalyticsQueryPackQuery = serde_json::from_slice(rsp_body).map_err(|source| put::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| put::Error::DeserializeError(source, rsp_body.clone()))?; Err(put::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod put { use super::{models, models::*, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, query_pack_name: &str, id: &str, query_payload: &LogAnalyticsQueryPackQuery, ) -> std::result::Result<LogAnalyticsQueryPackQuery, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/queryPacks/{}/queries/{}", operation_config.base_path(), subscription_id, resource_group_name, query_pack_name, id ); let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(query_payload).map_err(update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: LogAnalyticsQueryPackQuery = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Err(update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod update { use super::{models, models::*, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, query_pack_name: &str, id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/queryPacks/{}/queries/{}", operation_config.base_path(), subscription_id, resource_group_name, query_pack_name, id ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, models::*, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } }
47.887164
135
0.584773
90ce94966c5a19ae79b393a0700a8a50db4bb300
647
use bevy::prelude::*; // example rewrite in progress, I'm not using Bevy so welcome // assist here. // //const LDTK_FILE_PATH: &str = "assets/game_1-1-3.ldtk"; fn main() { App::new() .insert_resource(WindowDescriptor { title: "title".to_string(), width: 1024.0, height: 768.0, ..Default::default() }) .add_plugins(DefaultPlugins) .add_startup_system(setup_camera) .run(); } fn setup_camera(mut commands: Commands) { let mut camera = OrthographicCameraBundle::new_2d(); camera.transform = Transform::from_translation(Vec3::new(0.0, 0.0, 50.0)); commands.spawn_bundle(camera); }
25.88
78
0.64915
4a393c14b1484c827cad3a95de974e5091d65477
3,707
use serde_derive::{Deserialize, Serialize}; use std::env; use std::path::PathBuf; use types::ChainSpec; /// The core configuration of a Lighthouse beacon node. #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(default)] pub struct Eth2Config { pub spec_constants: String, pub spec: ChainSpec, } impl Default for Eth2Config { fn default() -> Self { Self { spec_constants: "minimal".to_string(), spec: ChainSpec::minimal(), } } } impl Eth2Config { pub fn mainnet() -> Self { Self { spec_constants: "mainnet".to_string(), spec: ChainSpec::mainnet(), } } pub fn minimal() -> Self { Self { spec_constants: "minimal".to_string(), spec: ChainSpec::minimal(), } } pub fn interop() -> Self { Self { spec_constants: "interop".to_string(), spec: ChainSpec::interop(), } } } /// A directory that can be built by downloading files via HTTP. /// /// Used by the `eth2_testnet_config` crate to initialize testnet directories during build and /// access them at runtime. #[derive(Copy, Clone, Debug, PartialEq)] pub struct Eth2NetDirectory<'a> { pub name: &'a str, pub unique_id: &'a str, pub commit: &'a str, pub url_template: &'a str, pub genesis_is_known: bool, } impl<'a> Eth2NetDirectory<'a> { /// The directory that should be used to store files downloaded for this net. pub fn dir(&self) -> PathBuf { env::var("CARGO_MANIFEST_DIR") .expect("should know manifest dir") .parse::<PathBuf>() .expect("should parse manifest dir as path") .join(self.unique_id) } } #[macro_export] macro_rules! unique_id { ($name: tt, $commit: tt, $genesis_is_known: tt) => { concat!("testnet_", $name, "_", $commit, "_", $genesis_is_known); }; } macro_rules! define_net { ($title: ident, $macro_title: tt, $name: tt, $commit: tt, $url_template: tt, $genesis_is_known: tt) => { #[macro_use] pub mod $title { use super::*; pub const ETH2_NET_DIR: Eth2NetDirectory = Eth2NetDirectory { name: $name, unique_id: unique_id!($name, $commit, $genesis_is_known), commit: $commit, url_template: $url_template, genesis_is_known: $genesis_is_known, }; // A wrapper around `std::include_bytes` which includes a file from a specific testnet // directory. Used by upstream crates to import files at compile time. #[macro_export] macro_rules! $macro_title { ($base_dir: tt, $filename: tt) => { include_bytes!(concat!( $base_dir, unique_id!($name, $commit, $genesis_is_known), "/", $filename )) }; } } }; } define_net!( altona, include_altona_file, "altona", "a94e00c1a03df851f960fcf44a79f2a6b1d29af1", "https://raw.githubusercontent.com/sigp/witti/{{ commit }}/altona/lighthouse/{{ file }}", true ); define_net!( medalla, include_medalla_file, "medalla", "09bbf2c9d108944ac934f94ec6a1d0684ca062a5", "https://raw.githubusercontent.com/sigp/witti/{{ commit }}/medalla/{{ file }}", true ); #[cfg(test)] mod tests { use super::*; use toml; #[test] fn serde_serialize() { let _ = toml::to_string(&Eth2Config::default()).expect("Should serde encode default config"); } }
27.058394
108
0.564607
ddf3d6fcd2e819edfc4381a1a888244cad163563
1,666
use codespan::{FileId, Files}; use linkcheck::Link; use std::{cell::RefCell, fmt::Debug}; /// Search every file in the [`Files`] and collate all the links that are /// found. pub fn extract<I>( target_files: I, files: &Files<String>, ) -> (Vec<Link>, Vec<IncompleteLink>) where I: IntoIterator<Item = FileId>, { let mut links = Vec::new(); let broken_links = RefCell::new(Vec::new()); for file_id in target_files { let cb = on_broken_links(file_id, &broken_links); let src = files.source(file_id); log::debug!("Scanning {}", files.name(file_id).to_string_lossy()); links.extend( linkcheck::scanners::markdown_with_broken_link_callback(src, &cb) .map(|(link, span)| Link::new(link, span, file_id)), ); } (links, broken_links.into_inner()) } /// Get a closure which can be used as the broken links callback, adding a new /// [`IncompleteLink`] to the list. fn on_broken_links<'a>( file: FileId, dest: &'a RefCell<Vec<IncompleteLink>>, ) -> impl Fn(&str, &str) -> Option<(String, String)> + 'a { move |raw, _| { log::debug!("Found a (possibly) broken link to [{}]", raw); dest.borrow_mut().push(IncompleteLink { text: raw.to_string(), file, }); None } } /// A potential link that has a broken reference (e.g `[foo]` when there is no /// `[foo]: ...` entry at the bottom). #[derive(Debug, Clone, PartialEq)] pub struct IncompleteLink { /// The reference name (e.g. the `foo` in `[foo]`). pub text: String, /// Which file was the incomplete link found in? pub file: FileId, }
29.22807
78
0.605042
09e9745a91880ebd098668242a0695fe2ee7f67c
167
#[derive(Debug, PartialEq)] pub enum ResourceCreationError { /// Returned when a resource is already registered in the `ResourceManager`. AlreadyRegistered, }
27.833333
80
0.748503
9bbcb202f112c257d7a220f4df036fe6a26e6e07
5,588
use crc::{crc32, Hasher32}; use crate::{read_le_bytes, GPTError, Result, GUID}; const EFI_SIGNATURE: u64 = 0x5452415020494645; const GPT_REV: u32 = 0x00010000; #[derive(Debug)] pub struct GPTHeader { /// Size in bytes of the GPT Header. The [`Self::size`] must be greater than or equal to /// 92 and must be less than or equal to the logical block size. pub size: u32, /// CRC32 checksum for the GPT Header structure. This value is computed by /// setting this field to 0, and computing the 32-bit CRC for [`Self::size`] bytes. pub crc32: u32, /// The LBA that contains this data structure. pub my_lba: u64, /// LBA address of the alternate GPT Header. pub other_lba: u64, /// The first usable logical block that may be used by a partition described by a GUID /// Partition Entry. pub first_lba: u64, /// The last usable logical block that may be used by a partition described by a GUID /// Partition Entry. pub last_lba: u64, /// GUID that can be used to uniquely identify the disk. pub guid: GUID, /// The starting LBA of the GUID Partition Entry array. pub p_entry_lba: u64, /// The number of Partition Entries in the GUID Partition Entry array. pub num_parts: u32, /// The size, in bytes, of each the GUID Partition Entry structures in the GUID Partition Entry /// array. This field shall be set to a value of 128 x 2n where n is an integer greater than or /// equal to zero (e.g., 128, 256, 512, etc.). /// NOTE: Previous versions of this specification allowed any multiple of 8. pub size_of_p_entry: u32, /// The CRC32 of the GUID Partition Entry array. Starts at [`Self::p_entry_lba`] and is /// computed over a byte length of [`Self::num_parts`] * [`Self::size_of_p_entry`]. pub p_crc32: u32, } impl GPTHeader { /// Read the GPT header from buf and serializes it into this struct. /// Checks for `Signature` and `Revision`, but nothing else. pub fn parse(buf: &[u8]) -> Result<Self> { let sig = read_le_bytes!(buf, u64, 0..8); if sig != EFI_SIGNATURE { return Err(GPTError::InvalidSignature(sig)); } let rev = read_le_bytes!(buf, u32, 8..12); if rev != GPT_REV { return Err(GPTError::InvalidSignature(rev as u64)); } let size = read_le_bytes!(buf, u32, 12..16); let crc32 = read_le_bytes!(buf, u32, 16..20); let my_lba = read_le_bytes!(buf, u64, 24..32); let other_lba = read_le_bytes!(buf, u64, 32..40); let first_lba = read_le_bytes!(buf, u64, 40..48); let last_lba = read_le_bytes!(buf, u64, 48..56); // TODO: remove unwrap let guid = buf[56..72].try_into()?; let p_entry_lba = read_le_bytes!(buf, u64, 72..80); let num_parts = read_le_bytes!(buf, u32, 80..84); let size_of_p_entry = read_le_bytes!(buf, u32, 84..88); let p_crc32 = read_le_bytes!(buf, u32, 88..92); Ok(Self { size, crc32, my_lba, other_lba, first_lba, last_lba, guid, p_entry_lba, num_parts, size_of_p_entry, p_crc32, }) } /// Check this header for valid data. needs the bits of the partition table as input. pub fn validate(&self, my_lba: u64, part_table: &[u8]) -> Result<()> { if self.my_lba != my_lba { return Err(GPTError::InvalidLba(self.my_lba)); } self.validate_crc()?; self.validate_part_crc(part_table)?; Ok(()) } pub fn validate_part_crc(&self, part_table: &[u8]) -> Result<()> { let len = (self.num_parts * self.size_of_p_entry) as usize; if len > part_table.len() { return Err(GPTError::PartitionTableToShort(len as u32)); } let mut digest = crc32::Digest::new(crc32::IEEE); digest.write(&part_table[0..len]); let digest = digest.sum32(); if digest != self.p_crc32 { return Err(GPTError::InvalidCrcParts(digest, self.p_crc32)); } Ok(()) } /// Verifies own crc sum pub fn validate_crc(&self) -> Result<()> { let crc_cal = self.calculate_crc(); if crc_cal != self.crc32 { return Err(GPTError::InvalidCrcHeader(crc_cal, self.crc32)); } Ok(()) } pub fn calculate_crc(&self) -> u32 { let mut digest = crc32::Digest::new(crc32::IEEE); digest.write(&EFI_SIGNATURE.to_le_bytes()); digest.write(&GPT_REV.to_le_bytes()); digest.write(&self.size.to_le_bytes()); digest.write(&[0, 0, 0, 0, 0, 0, 0, 0]); digest.write(&self.my_lba.to_le_bytes()); digest.write(&self.other_lba.to_le_bytes()); digest.write(&self.first_lba.to_le_bytes()); digest.write(&self.last_lba.to_le_bytes()); digest.write(&self.guid.as_bytes()); digest.write(&self.p_entry_lba.to_le_bytes()); digest.write(&self.num_parts.to_le_bytes()); digest.write(&self.size_of_p_entry.to_le_bytes()); digest.write(&self.p_crc32.to_le_bytes()); digest.sum32() } } #[derive(Debug, Copy, Clone)] pub enum GptHeaderType { Main, Backup, } impl core::fmt::Display for GptHeaderType { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { match self { GptHeaderType::Main => write!(f, "main"), GptHeaderType::Backup => write!(f, "backup"), } } }
32.678363
99
0.601288
50dffc2e338ad953b46ca295a0f41b347294d63e
1,166
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::sync::Arc; use common_datavalues::DataSchema; use common_datavalues::DataSchemaRef; #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq)] pub struct OptimizeTablePlan { pub catalog: String, pub database: String, pub table: String, pub action: OptimizeTableAction, } impl OptimizeTablePlan { pub fn schema(&self) -> DataSchemaRef { Arc::new(DataSchema::empty()) } } #[derive(serde::Serialize, serde::Deserialize, Copy, Clone, Debug, PartialEq)] pub enum OptimizeTableAction { All, Purge, Compact, }
29.15
78
0.722985
79b23f8ae4508dd5ff46280bbdddbccdd5cb857a
1,758
//! Example program drawing circles on a page. #[macro_use] extern crate simple_pdf; use simple_pdf::graphicsstate::Color; use simple_pdf::units::{Points, UserSpace}; use simple_pdf::Pdf; use std::f32::consts::PI; use std::io; /// Create a `circles.pdf` file, with a single page containg a circle /// stroked in black, overwritten with a circle in a finer yellow /// stroke. /// The black circle is drawn using the `Canvas.circle` method, /// which approximates a circle with four bezier curves. /// The yellow circle is drawn as a 200-sided polygon. fn main() -> io::Result<()> { // Open our pdf document. let mut document = Pdf::create("circles.pdf").expect("Could not create file."); // Add a 400x400 pt page. // Render-page writes the pdf file structure for a page and // creates a Canvas which is sent to the function that is the last // argument of the render_page method. // That function then puts content on the page by calling methods // on the canvas. document.render_page(pt!(400), pt!(400), |c| { let (x, y) = (pt!(200), pt!(200)); let r = pt!(190); // Set a wide black pen and stroke a circle c.set_stroke_color(Color::rgb(0, 0, 0))?; c.set_line_width(pt!(2))?; c.circle(x, y, r)?; c.stroke()?; // Set a finer yellow pen and stroke a 200-sided polygon c.set_stroke_color(Color::rgb(255, 230, 150))?; c.set_line_width(pt!(1))?; c.move_to(x + r, y)?; let sides: u8 = 200; for n in 1..sides { let phi = f32::from(n) * 2.0 * PI / f32::from(sides); c.line_to(x + r * phi.cos(), y + r * phi.sin())?; } c.close_and_stroke() })?; document.finish() }
33.807692
70
0.608077
7a4d4a4c5cd9c3b05991516962116332cb1e49aa
94
pub mod hash_lsm_tree; pub mod hash_radix_tree; pub mod lazy_hollow_heap; pub mod union_find;
18.8
25
0.829787
1a626ce67a66890dec21716f168eb818ce952af8
81,389
//! Semantic analysis. //! //! This module primarily contains the type environment and term environment. //! //! The type environment is constructed by analyzing an input AST. The type //! environment records the types used in the input source and the types of our //! various rules and symbols. ISLE's type system is intentionally easy to //! check, only requires a single pass over the AST, and doesn't require any //! unification or anything like that. //! //! The term environment is constructed from both the AST and type //! envionment. It is sort of a typed and reorganized AST that more directly //! reflects ISLE semantics than the input ISLE source code (where as the AST is //! the opposite). use crate::ast; use crate::ast::Ident; use crate::error::*; use crate::lexer::Pos; use std::collections::btree_map::Entry; use std::collections::BTreeMap; use std::collections::BTreeSet; use std::sync::Arc; declare_id!( /// The id of an interned symbol. Sym ); declare_id!( /// The id of an interned type inside the `TypeEnv`. TypeId ); declare_id!( /// The id of a variant inside an enum. VariantId ); declare_id!( /// The id of a field inside a variant. FieldId ); declare_id!( /// The id of an interned term inside the `TermEnv`. TermId ); declare_id!( /// The id of an interned rule inside the `TermEnv`. RuleId ); declare_id!( /// The id of a bound variable inside a `Bindings`. VarId ); /// The type environment. /// /// Keeps track of which symbols and rules have which types. #[derive(Clone, Debug)] pub struct TypeEnv { /// Arena of input ISLE source filenames. /// /// We refer to these indirectly through the `Pos::file` indices. pub filenames: Vec<Arc<str>>, /// Arena of input ISLE source contents. /// /// We refer to these indirectly through the `Pos::file` indices. pub file_texts: Vec<Arc<str>>, /// Arena of interned symbol names. /// /// Referred to indirectly via `Sym` indices. pub syms: Vec<String>, /// Map of already-interned symbol names to their `Sym` ids. pub sym_map: BTreeMap<String, Sym>, /// Arena of type definitions. /// /// Referred to indirectly via `TypeId`s. pub types: Vec<Type>, /// A map from a type name symbol to its `TypeId`. pub type_map: BTreeMap<Sym, TypeId>, /// The types of constant symbols. pub const_types: BTreeMap<Sym, TypeId>, /// Type errors that we've found so far during type checking. pub errors: Vec<Error>, } /// A type. #[derive(Clone, Debug, PartialEq, Eq)] pub enum Type { /// A primitive, `Copy` type. /// /// These are always defined externally, and we allow literals of these /// types to pass through from ISLE source code to the emitted Rust code. Primitive(TypeId, Sym, Pos), /// A sum type. /// /// Note that enums with only one variant are equivalent to a "struct". Enum { /// The name of this enum. name: Sym, /// This `enum`'s type id. id: TypeId, /// Is this `enum` defined in external Rust code? /// /// If so, ISLE will not emit a definition for it. If not, then it will /// emit a Rust definition for it. is_extern: bool, /// Whether this type should *not* derive `Debug`. /// /// Incompatible with `is_extern`. is_nodebug: bool, /// The different variants for this enum. variants: Vec<Variant>, /// The ISLE source position where this `enum` is defined. pos: Pos, }, } impl Type { /// Get the name of this `Type`. pub fn name<'a>(&self, tyenv: &'a TypeEnv) -> &'a str { match self { Self::Primitive(_, name, _) | Self::Enum { name, .. } => &tyenv.syms[name.index()], } } /// Get the position where this type was defined. pub fn pos(&self) -> Pos { match self { Self::Primitive(_, _, pos) | Self::Enum { pos, .. } => *pos, } } /// Is this a primitive type? pub fn is_prim(&self) -> bool { matches!(self, Type::Primitive(..)) } } /// A variant of an enum. #[derive(Clone, Debug, PartialEq, Eq)] pub struct Variant { /// The name of this variant. pub name: Sym, /// The full, prefixed-with-the-enum's-name name of this variant. /// /// E.g. if the enum is `Foo` and this variant is `Bar`, then the /// `fullname` is `Foo.Bar`. pub fullname: Sym, /// The id of this variant, i.e. the index of this variant within its /// enum's `Type::Enum::variants`. pub id: VariantId, /// The data fields of this enum variant. pub fields: Vec<Field>, } /// A field of a `Variant`. #[derive(Clone, Debug, PartialEq, Eq)] pub struct Field { /// The name of this field. pub name: Sym, /// This field's id. pub id: FieldId, /// The type of this field. pub ty: TypeId, } /// The term environment. /// /// This is sort of a typed and reorganized AST that more directly reflects ISLE /// semantics than the input ISLE source code (where as the AST is the /// opposite). #[derive(Clone, Debug)] pub struct TermEnv { /// Arena of interned terms defined in this ISLE program. /// /// This is indexed by `TermId`. pub terms: Vec<Term>, /// A map from am interned `Term`'s name to its `TermId`. pub term_map: BTreeMap<Sym, TermId>, /// Arena of interned rules defined in this ISLE program. /// /// This is indexed by `RuleId`. pub rules: Vec<Rule>, /// Map from (inner_ty, outer_ty) pairs to term IDs, giving the /// defined implicit type-converter terms we can try to use to fit /// types together. pub converters: BTreeMap<(TypeId, TypeId), TermId>, } /// A term. /// /// Maps parameter types to result types if this is a constructor term, or /// result types to parameter types if this is an extractor term. Or both if /// this term can be either a constructor or an extractor. #[derive(Clone, Debug, PartialEq, Eq)] pub struct Term { /// This term's id. pub id: TermId, /// The source position where this term was declared. pub decl_pos: Pos, /// The name of this term. pub name: Sym, /// The parameter types to this term. pub arg_tys: Vec<TypeId>, /// The result types of this term. pub ret_ty: TypeId, /// The kind of this term. pub kind: TermKind, } /// The kind of a term. #[derive(Clone, Debug, PartialEq, Eq)] pub enum TermKind { /// An enum variant constructor or extractor. EnumVariant { /// Which variant of the enum: e.g. for enum type `A` if a term is /// `(A.A1 ...)` then the variant ID corresponds to `A1`. variant: VariantId, }, /// A term declared via a `(decl ...)` form. Decl { /// Whether the term is marked as `pure`. pure: bool, /// The kind of this term's constructor, if any. constructor_kind: Option<ConstructorKind>, /// The kind of this term's extractor, if any. extractor_kind: Option<ExtractorKind>, }, } /// The kind of a constructor for a term. #[derive(Clone, Debug, PartialEq, Eq)] pub enum ConstructorKind { /// A term with "internal" rules that work in the forward direction. Becomes /// a compiled Rust function in the generated code. InternalConstructor, /// A term defined solely by an external constructor function. ExternalConstructor { /// The external name of the constructor function. name: Sym, }, } /// The kind of an extractor for a term. #[derive(Clone, Debug, PartialEq, Eq)] pub enum ExtractorKind { /// A term that defines an "extractor macro" in the LHS of a pattern. Its /// arguments take patterns and are simply substituted with the given /// patterns when used. InternalExtractor { /// This extractor's pattern. template: ast::Pattern, }, /// A term defined solely by an external extractor function. ExternalExtractor { /// The external name of the extractor function. name: Sym, /// Is the external extractor infallible? infallible: bool, /// The position where this external extractor was declared. pos: Pos, }, } /// An external function signature. #[derive(Clone, Debug)] pub struct ExternalSig { /// The name of the external function. pub func_name: String, /// The name of the external function, prefixed with the context trait. pub full_name: String, /// The types of this function signature's parameters. pub param_tys: Vec<TypeId>, /// The types of this function signature's results. pub ret_tys: Vec<TypeId>, /// Whether this signature is infallible or not. pub infallible: bool, } impl Term { /// Get this term's type. pub fn ty(&self) -> TypeId { self.ret_ty } /// Is this term an enum variant? pub fn is_enum_variant(&self) -> bool { matches!(self.kind, TermKind::EnumVariant { .. }) } /// Does this term have a constructor? pub fn has_constructor(&self) -> bool { matches!( self.kind, TermKind::EnumVariant { .. } | TermKind::Decl { constructor_kind: Some(_), .. } ) } /// Does this term have an extractor? pub fn has_extractor(&self) -> bool { matches!( self.kind, TermKind::EnumVariant { .. } | TermKind::Decl { extractor_kind: Some(_), .. } ) } /// Is this term's extractor external? pub fn has_external_extractor(&self) -> bool { matches!( self.kind, TermKind::Decl { extractor_kind: Some(ExtractorKind::ExternalExtractor { .. }), .. } ) } /// Is this term's constructor external? pub fn has_external_constructor(&self) -> bool { matches!( self.kind, TermKind::Decl { constructor_kind: Some(ConstructorKind::ExternalConstructor { .. }), .. } ) } /// Get this term's extractor's external function signature, if any. pub fn extractor_sig(&self, tyenv: &TypeEnv) -> Option<ExternalSig> { match &self.kind { TermKind::Decl { extractor_kind: Some(ExtractorKind::ExternalExtractor { name, infallible, .. }), .. } => Some(ExternalSig { func_name: tyenv.syms[name.index()].clone(), full_name: format!("C::{}", tyenv.syms[name.index()]), param_tys: vec![self.ret_ty], ret_tys: self.arg_tys.clone(), infallible: *infallible, }), _ => None, } } /// Get this term's constructor's external function signature, if any. pub fn constructor_sig(&self, tyenv: &TypeEnv) -> Option<ExternalSig> { match &self.kind { TermKind::Decl { constructor_kind: Some(ConstructorKind::ExternalConstructor { name }), pure, .. } => Some(ExternalSig { func_name: tyenv.syms[name.index()].clone(), full_name: format!("C::{}", tyenv.syms[name.index()]), param_tys: self.arg_tys.clone(), ret_tys: vec![self.ret_ty], infallible: !pure, }), TermKind::Decl { constructor_kind: Some(ConstructorKind::InternalConstructor { .. }), .. } => { let name = format!("constructor_{}", tyenv.syms[self.name.index()]); Some(ExternalSig { func_name: name.clone(), full_name: name, param_tys: self.arg_tys.clone(), ret_tys: vec![self.ret_ty], // Internal constructors are always fallible, even // if not pure, because ISLE allows partial // matching at the toplevel (an entry point can // fail to rewrite). infallible: false, }) } _ => None, } } } /// A term rewrite rule. #[derive(Clone, Debug)] pub struct Rule { /// This rule's id. pub id: RuleId, /// The left-hand side pattern that this rule matches. pub lhs: Pattern, /// Any subpattern "if-let" clauses. pub iflets: Vec<IfLet>, /// The right-hand side expression that this rule evaluates upon successful /// match. pub rhs: Expr, /// The priority of this rule, if any. pub prio: Option<i64>, /// The source position where this rule is defined. pub pos: Pos, } /// An `if-let` clause with a subpattern match on an expr after the /// main LHS matches. #[derive(Clone, Debug)] pub struct IfLet { /// The left-hand side pattern that this `if-let` clause matches /// against the expression below. pub lhs: Pattern, /// The right-hand side expression that this pattern /// evaluates. Must be pure. pub rhs: Expr, } /// A left-hand side pattern of some rule. #[derive(Clone, Debug, PartialEq, Eq)] pub enum Pattern { /// Bind a variable of the given type from the current value. /// /// Keep matching on the value with the subpattern. BindPattern(TypeId, VarId, Box<Pattern>), /// Match the current value against an already bound variable with the given /// type. Var(TypeId, VarId), /// Match the current value against a constant integer of the given integer /// type. ConstInt(TypeId, i64), /// Match the current value against a constant primitive value of the given /// primitive type. ConstPrim(TypeId, Sym), /// Match the current value against the given extractor term with the given /// arguments. Term(TypeId, TermId, Vec<Pattern>), /// Match anything of the given type successfully. Wildcard(TypeId), /// Match all of the following patterns of the given type. And(TypeId, Vec<Pattern>), } /// A right-hand side expression of some rule. #[derive(Clone, Debug, PartialEq, Eq)] pub enum Expr { /// Invoke this term constructor with the given arguments. Term(TypeId, TermId, Vec<Expr>), /// Get the value of a variable that was bound in the left-hand side. Var(TypeId, VarId), /// Get a constant integer. ConstInt(TypeId, i64), /// Get a constant primitive. ConstPrim(TypeId, Sym), /// Evaluate the nested expressions and bind their results to the given /// variables, then evaluate the body expression. Let { /// The type of the result of this let expression. ty: TypeId, /// The expressions that are evaluated and bound to the given variables. bindings: Vec<(VarId, TypeId, Box<Expr>)>, /// The body expression that is evaluated after the bindings. body: Box<Expr>, }, } impl Pattern { /// Get this pattern's type. pub fn ty(&self) -> TypeId { match self { &Self::BindPattern(t, ..) => t, &Self::Var(t, ..) => t, &Self::ConstInt(t, ..) => t, &Self::ConstPrim(t, ..) => t, &Self::Term(t, ..) => t, &Self::Wildcard(t, ..) => t, &Self::And(t, ..) => t, } } /// Get the root term of this pattern, if any. pub fn root_term(&self) -> Option<TermId> { match self { &Pattern::Term(_, term, _) => Some(term), &Pattern::BindPattern(_, _, ref subpat) => subpat.root_term(), _ => None, } } } impl Expr { /// Get this expression's type. pub fn ty(&self) -> TypeId { match self { &Self::Term(t, ..) => t, &Self::Var(t, ..) => t, &Self::ConstInt(t, ..) => t, &Self::ConstPrim(t, ..) => t, &Self::Let { ty: t, .. } => t, } } } /// Given an `Option<T>`, unwrap the inner `T` value, or `continue` if it is /// `None`. /// /// Useful for when we encountered an error earlier in our analysis but kept /// going to find more errors, and now we've run into some missing data that /// would have been filled in if we didn't hit that original error, but we want /// to keep going to find more errors. macro_rules! unwrap_or_continue { ($e:expr) => { match $e { Some(x) => x, None => continue, } }; } impl TypeEnv { /// Construct the type environment from the AST. pub fn from_ast(defs: &ast::Defs) -> Result<TypeEnv> { let mut tyenv = TypeEnv { filenames: defs.filenames.clone(), file_texts: defs.file_texts.clone(), syms: vec![], sym_map: BTreeMap::new(), types: vec![], type_map: BTreeMap::new(), const_types: BTreeMap::new(), errors: vec![], }; // Traverse defs, assigning type IDs to type names. We'll fill // in types on a second pass. for def in &defs.defs { match def { &ast::Def::Type(ref td) => { let tid = TypeId(tyenv.type_map.len()); let name = tyenv.intern_mut(&td.name); if let Some(existing) = tyenv.type_map.get(&name).copied() { tyenv.report_error( td.pos, format!("Type with name '{}' defined more than once", td.name.0), ); let pos = unwrap_or_continue!(tyenv.types.get(existing.index())).pos(); tyenv.report_error( pos, format!("Type with name '{}' already defined here", td.name.0), ); continue; } tyenv.type_map.insert(name, tid); } _ => {} } } // Now lower AST nodes to type definitions, raising errors // where typenames of fields are undefined or field names are // duplicated. let mut tid = 0; for def in &defs.defs { match def { &ast::Def::Type(ref td) => { let ty = unwrap_or_continue!(tyenv.type_from_ast(TypeId(tid), td)); tyenv.types.push(ty); tid += 1; } _ => {} } } // Now collect types for extern constants. for def in &defs.defs { match def { &ast::Def::Extern(ast::Extern::Const { ref name, ref ty, pos, }) => { let ty = tyenv.intern_mut(ty); let ty = match tyenv.type_map.get(&ty) { Some(ty) => *ty, None => { tyenv.report_error(pos, "Unknown type for constant"); continue; } }; let name = tyenv.intern_mut(name); tyenv.const_types.insert(name, ty); } _ => {} } } tyenv.return_errors()?; Ok(tyenv) } fn return_errors(&mut self) -> Result<()> { match self.errors.len() { 0 => Ok(()), 1 => Err(self.errors.pop().unwrap()), _ => Err(Error::Errors(std::mem::take(&mut self.errors))), } } fn type_from_ast(&mut self, tid: TypeId, ty: &ast::Type) -> Option<Type> { let name = self.intern(&ty.name).unwrap(); match &ty.ty { &ast::TypeValue::Primitive(ref id, ..) => { if ty.is_nodebug { self.report_error(ty.pos, "primitive types cannot be marked `nodebug`"); return None; } if ty.is_extern { self.report_error(ty.pos, "primitive types cannot be marked `extern`"); return None; } Some(Type::Primitive(tid, self.intern_mut(id), ty.pos)) } &ast::TypeValue::Enum(ref ty_variants, ..) => { if ty.is_extern && ty.is_nodebug { self.report_error(ty.pos, "external types cannot be marked `nodebug`"); return None; } let mut variants = vec![]; for variant in ty_variants { let combined_ident = ast::Ident(format!("{}.{}", ty.name.0, variant.name.0), variant.name.1); let fullname = self.intern_mut(&combined_ident); let name = self.intern_mut(&variant.name); let id = VariantId(variants.len()); if variants.iter().any(|v: &Variant| v.name == name) { self.report_error( variant.pos, format!("Duplicate variant name in type: '{}'", variant.name.0), ); return None; } let mut fields = vec![]; for field in &variant.fields { let field_name = self.intern_mut(&field.name); if fields.iter().any(|f: &Field| f.name == field_name) { self.report_error( field.pos, format!( "Duplicate field name '{}' in variant '{}' of type", field.name.0, variant.name.0 ), ); return None; } let field_ty = self.intern_mut(&field.ty); let field_tid = match self.type_map.get(&field_ty) { Some(tid) => *tid, None => { self.report_error( field.ty.1, format!( "Unknown type '{}' for field '{}' in variant '{}'", field.ty.0, field.name.0, variant.name.0 ), ); return None; } }; fields.push(Field { name: field_name, id: FieldId(fields.len()), ty: field_tid, }); } variants.push(Variant { name, fullname, id, fields, }); } Some(Type::Enum { name, id: tid, is_extern: ty.is_extern, is_nodebug: ty.is_nodebug, variants, pos: ty.pos, }) } } } fn error(&self, pos: Pos, msg: impl Into<String>) -> Error { let e = Error::TypeError { msg: msg.into(), src: Source::new( self.filenames[pos.file].clone(), self.file_texts[pos.file].clone(), ), span: miette::SourceSpan::from((pos.offset, 1)), }; log::trace!("{}", e); e } fn report_error(&mut self, pos: Pos, msg: impl Into<String>) { let err = self.error(pos, msg); self.errors.push(err); } fn intern_mut(&mut self, ident: &ast::Ident) -> Sym { if let Some(s) = self.sym_map.get(&ident.0).copied() { s } else { let s = Sym(self.syms.len()); self.syms.push(ident.0.clone()); self.sym_map.insert(ident.0.clone(), s); s } } fn intern(&self, ident: &ast::Ident) -> Option<Sym> { self.sym_map.get(&ident.0).cloned() } } #[derive(Clone, Debug)] struct Bindings { next_var: usize, vars: Vec<BoundVar>, } #[derive(Clone, Debug)] struct BoundVar { name: Sym, id: VarId, ty: TypeId, } impl TermEnv { /// Construct the term environment from the AST and the type environment. pub fn from_ast(tyenv: &mut TypeEnv, defs: &ast::Defs) -> Result<TermEnv> { let mut env = TermEnv { terms: vec![], term_map: BTreeMap::new(), rules: vec![], converters: BTreeMap::new(), }; env.collect_term_sigs(tyenv, defs); env.collect_enum_variant_terms(tyenv); tyenv.return_errors()?; env.collect_constructors(tyenv, defs); env.collect_extractor_templates(tyenv, defs); tyenv.return_errors()?; env.collect_converters(tyenv, defs); tyenv.return_errors()?; env.collect_externs(tyenv, defs); tyenv.return_errors()?; env.collect_rules(tyenv, defs); env.check_for_undefined_decls(tyenv, defs); env.check_for_expr_terms_without_constructors(tyenv, defs); tyenv.return_errors()?; Ok(env) } fn collect_term_sigs(&mut self, tyenv: &mut TypeEnv, defs: &ast::Defs) { for def in &defs.defs { match def { &ast::Def::Decl(ref decl) => { let name = tyenv.intern_mut(&decl.term); if let Some(tid) = self.term_map.get(&name) { tyenv.report_error( decl.pos, format!("Duplicate decl for '{}'", decl.term.0), ); tyenv.report_error( self.terms[tid.index()].decl_pos, format!("Duplicate decl for '{}'", decl.term.0), ); } let arg_tys = decl .arg_tys .iter() .map(|id| { let sym = tyenv.intern_mut(id); tyenv.type_map.get(&sym).cloned().ok_or_else(|| { tyenv.report_error(id.1, format!("Unknown arg type: '{}'", id.0)); () }) }) .collect::<std::result::Result<Vec<_>, _>>(); let arg_tys = match arg_tys { Ok(a) => a, Err(_) => { continue; } }; let ret_ty = { let sym = tyenv.intern_mut(&decl.ret_ty); match tyenv.type_map.get(&sym).cloned() { Some(t) => t, None => { tyenv.report_error( decl.ret_ty.1, format!("Unknown return type: '{}'", decl.ret_ty.0), ); continue; } } }; let tid = TermId(self.terms.len()); self.term_map.insert(name, tid); self.terms.push(Term { id: tid, decl_pos: decl.pos, name, arg_tys, ret_ty, kind: TermKind::Decl { constructor_kind: None, extractor_kind: None, pure: decl.pure, }, }); } _ => {} } } } fn collect_enum_variant_terms(&mut self, tyenv: &mut TypeEnv) { 'types: for i in 0..tyenv.types.len() { let ty = &tyenv.types[i]; match ty { &Type::Enum { pos, id, ref variants, .. } => { for variant in variants { if self.term_map.contains_key(&variant.fullname) { let variant_name = tyenv.syms[variant.fullname.index()].clone(); tyenv.report_error( pos, format!("Duplicate enum variant constructor: '{}'", variant_name,), ); continue 'types; } let tid = TermId(self.terms.len()); let arg_tys = variant.fields.iter().map(|fld| fld.ty).collect::<Vec<_>>(); let ret_ty = id; self.terms.push(Term { id: tid, decl_pos: pos, name: variant.fullname, arg_tys, ret_ty, kind: TermKind::EnumVariant { variant: variant.id, }, }); self.term_map.insert(variant.fullname, tid); } } _ => {} } } } fn collect_constructors(&mut self, tyenv: &mut TypeEnv, defs: &ast::Defs) { for def in &defs.defs { log::debug!("collect_constructors from def: {:?}", def); match def { &ast::Def::Rule(ref rule) => { let pos = rule.pos; let term = match rule.pattern.root_term() { Some(t) => t, None => { tyenv.report_error( pos, "Rule does not have a term at the LHS root".to_string(), ); continue; } }; let sym = tyenv.intern_mut(&term); let term = match self.term_map.get(&sym) { Some(&tid) => tid, None => { tyenv .report_error(pos, "Rule LHS root term is not defined".to_string()); continue; } }; let termdata = &mut self.terms[term.index()]; match &mut termdata.kind { TermKind::Decl { constructor_kind, .. } => { match constructor_kind { None => { *constructor_kind = Some(ConstructorKind::InternalConstructor); } Some(ConstructorKind::InternalConstructor) => { // OK, no error; multiple rules can apply to // one internal constructor term. } Some(ConstructorKind::ExternalConstructor { .. }) => { tyenv.report_error( pos, "Rule LHS root term is incorrect kind; cannot \ be external constructor" .to_string(), ); continue; } } } TermKind::EnumVariant { .. } => { tyenv.report_error( pos, "Rule LHS root term is incorrect kind; cannot be enum variant" .to_string(), ); continue; } } } _ => {} } } } fn collect_extractor_templates(&mut self, tyenv: &mut TypeEnv, defs: &ast::Defs) { let mut extractor_call_graph = BTreeMap::new(); for def in &defs.defs { if let &ast::Def::Extractor(ref ext) = def { let sym = tyenv.intern_mut(&ext.term); let term = match self.term_map.get(&sym) { Some(x) => x, None => { tyenv.report_error( ext.pos, "Extractor macro body definition on a non-existent term".to_string(), ); return; } }; let template = ext.template.make_macro_template(&ext.args[..]); log::trace!("extractor def: {:?} becomes template {:?}", def, template); let mut callees = BTreeSet::new(); template.terms(&mut |pos, t| { let t = tyenv.intern_mut(t); callees.insert(t); if !self.term_map.contains_key(&t) { tyenv.report_error( pos, format!( "`{}` extractor definition references unknown term `{}`", ext.term.0, tyenv.syms[t.index()] ), ); } }); extractor_call_graph.insert(sym, callees); let termdata = &mut self.terms[term.index()]; match &mut termdata.kind { TermKind::EnumVariant { .. } => { tyenv.report_error( ext.pos, "Extractor macro body defined on term of incorrect kind; cannot be an \ enum variant", ); continue; } TermKind::Decl { extractor_kind, .. } => match extractor_kind { None => { *extractor_kind = Some(ExtractorKind::InternalExtractor { template }); } Some(ext_kind) => { tyenv.report_error( ext.pos, "Duplicate extractor definition".to_string(), ); let pos = match ext_kind { ExtractorKind::InternalExtractor { template } => template.pos(), ExtractorKind::ExternalExtractor { pos, .. } => *pos, }; tyenv.report_error( pos, "Extractor was already defined here".to_string(), ); continue; } }, } } } // Check for cycles in the extractor call graph. let mut stack = vec![]; 'outer: for root in extractor_call_graph.keys().copied() { stack.clear(); stack.push((root, vec![root], BTreeSet::new())); while let Some((caller, path, mut seen)) = stack.pop() { let is_new = seen.insert(caller); if is_new { if let Some(callees) = extractor_call_graph.get(&caller) { stack.extend(callees.iter().map(|callee| { let mut path = path.clone(); path.push(*callee); (*callee, path, seen.clone()) })); } } else { let term = match self.term_map.get(&caller) { Some(t) => t, None => { // Some other error must have already been recorded // if we don't have the caller's term data. assert!(!tyenv.errors.is_empty()); continue 'outer; } }; let pos = match &self.terms[term.index()].kind { TermKind::Decl { extractor_kind: Some(ExtractorKind::InternalExtractor { template }), .. } => template.pos(), _ => { // Again, there must have already been errors // recorded. assert!(!tyenv.errors.is_empty()); continue 'outer; } }; let path: Vec<_> = path .iter() .map(|sym| tyenv.syms[sym.index()].as_str()) .collect(); let msg = format!( "`{}` extractor definition is recursive: {}", tyenv.syms[root.index()], path.join(" -> ") ); tyenv.report_error(pos, msg); continue 'outer; } } } } fn collect_converters(&mut self, tyenv: &mut TypeEnv, defs: &ast::Defs) { for def in &defs.defs { match def { &ast::Def::Converter(ast::Converter { ref term, ref inner_ty, ref outer_ty, pos, }) => { let inner_ty_sym = tyenv.intern_mut(inner_ty); let inner_ty_id = match tyenv.type_map.get(&inner_ty_sym) { Some(ty) => *ty, None => { tyenv.report_error( inner_ty.1, format!("Unknown inner type for converter: '{}'", inner_ty.0), ); continue; } }; let outer_ty_sym = tyenv.intern_mut(outer_ty); let outer_ty_id = match tyenv.type_map.get(&outer_ty_sym) { Some(ty) => *ty, None => { tyenv.report_error( outer_ty.1, format!("Unknown outer type for converter: '{}'", outer_ty.0), ); continue; } }; let term_sym = tyenv.intern_mut(term); let term_id = match self.term_map.get(&term_sym) { Some(term_id) => *term_id, None => { tyenv.report_error( term.1, format!("Unknown term for converter: '{}'", term.0), ); continue; } }; match self.converters.entry((inner_ty_id, outer_ty_id)) { Entry::Vacant(v) => { v.insert(term_id); } Entry::Occupied(_) => { tyenv.report_error( pos, format!( "Converter already exists for this type pair: '{}', '{}'", inner_ty.0, outer_ty.0 ), ); continue; } } } _ => {} } } } fn collect_externs(&mut self, tyenv: &mut TypeEnv, defs: &ast::Defs) { for def in &defs.defs { match def { &ast::Def::Extern(ast::Extern::Constructor { ref term, ref func, pos, }) => { let term_sym = tyenv.intern_mut(term); let func_sym = tyenv.intern_mut(func); let term_id = match self.term_map.get(&term_sym) { Some(term) => term, None => { tyenv.report_error( pos, format!("Constructor declared on undefined term '{}'", term.0), ); continue; } }; let termdata = &mut self.terms[term_id.index()]; match &mut termdata.kind { TermKind::Decl { constructor_kind, .. } => match constructor_kind { None => { *constructor_kind = Some(ConstructorKind::ExternalConstructor { name: func_sym }); } Some(ConstructorKind::InternalConstructor) => { tyenv.report_error( pos, format!( "External constructor declared on term that already has rules: {}", term.0, ), ); } Some(ConstructorKind::ExternalConstructor { .. }) => { tyenv.report_error( pos, "Duplicate external constructor definition".to_string(), ); } }, TermKind::EnumVariant { .. } => { tyenv.report_error( pos, format!( "External constructor cannot be defined on enum variant: {}", term.0, ), ); } } } &ast::Def::Extern(ast::Extern::Extractor { ref term, ref func, pos, infallible, }) => { let term_sym = tyenv.intern_mut(term); let func_sym = tyenv.intern_mut(func); let term_id = match self.term_map.get(&term_sym) { Some(term) => term, None => { tyenv.report_error( pos, format!("Extractor declared on undefined term '{}'", term.0), ); continue; } }; let termdata = &mut self.terms[term_id.index()]; match &mut termdata.kind { TermKind::Decl { extractor_kind, .. } => match extractor_kind { None => { *extractor_kind = Some(ExtractorKind::ExternalExtractor { name: func_sym, infallible, pos, }); } Some(ExtractorKind::ExternalExtractor { pos: pos2, .. }) => { tyenv.report_error( pos, "Duplicate external extractor definition".to_string(), ); tyenv.report_error( *pos2, "External extractor already defined".to_string(), ); continue; } Some(ExtractorKind::InternalExtractor { template }) => { tyenv.report_error( pos, "Cannot define external extractor for term that already has an \ internal extractor macro body defined" .to_string(), ); tyenv.report_error( template.pos(), "Internal extractor macro body already defined".to_string(), ); continue; } }, TermKind::EnumVariant { .. } => { tyenv.report_error( pos, format!("Cannot define extractor for enum variant '{}'", term.0), ); continue; } } } _ => {} } } } fn collect_rules(&mut self, tyenv: &mut TypeEnv, defs: &ast::Defs) { for def in &defs.defs { match def { &ast::Def::Rule(ref rule) => { let pos = rule.pos; let mut bindings = Bindings { next_var: 0, vars: vec![], }; let rule_term = match rule.pattern.root_term() { Some(name) => { let sym = tyenv.intern_mut(name); match self.term_map.get(&sym) { Some(term) => *term, None => { tyenv.report_error( pos, "Cannot define a rule for an unknown term".to_string(), ); continue; } } } None => { tyenv.report_error( pos, "Rule does not have a term at the root of its left-hand side" .to_string(), ); continue; } }; let pure = match &self.terms[rule_term.index()].kind { &TermKind::Decl { pure, .. } => pure, _ => { tyenv.report_error( pos, "Cannot define a rule on a left-hand-side that is an enum variant" .to_string(), ); continue; } }; let (lhs, ty) = unwrap_or_continue!(self.translate_pattern( tyenv, rule_term, &rule.pattern, None, &mut bindings, /* is_root = */ true, )); let iflets = unwrap_or_continue!(self.translate_iflets( tyenv, rule_term, &rule.iflets[..], &mut bindings, )); let rhs = unwrap_or_continue!(self.translate_expr( tyenv, &rule.expr, Some(ty), &mut bindings, pure, )); let rid = RuleId(self.rules.len()); self.rules.push(Rule { id: rid, lhs, iflets, rhs, prio: rule.prio, pos, }); } _ => {} } } } fn check_for_undefined_decls(&self, tyenv: &mut TypeEnv, defs: &ast::Defs) { for def in &defs.defs { if let ast::Def::Decl(decl) = def { let sym = tyenv.intern_mut(&decl.term); let term = self.term_map[&sym]; let term = &self.terms[term.index()]; if !term.has_constructor() && !term.has_extractor() { tyenv.report_error( decl.pos, format!( "no rules, extractor, or external definition for declaration '{}'", decl.term.0 ), ); } } } } fn check_for_expr_terms_without_constructors(&self, tyenv: &mut TypeEnv, defs: &ast::Defs) { for def in &defs.defs { if let ast::Def::Rule(rule) = def { rule.expr.terms(&mut |pos, ident| { let sym = tyenv.intern_mut(ident); let term = match self.term_map.get(&sym) { None => { debug_assert!(!tyenv.errors.is_empty()); return; } Some(t) => t, }; let term = &self.terms[term.index()]; if !term.has_constructor() { tyenv.report_error( pos, format!( "term `{}` cannot be used in an expression because \ it does not have a constructor", ident.0 ), ) } }); } } } fn maybe_implicit_convert_pattern( &self, tyenv: &mut TypeEnv, pattern: &ast::Pattern, inner_ty: TypeId, outer_ty: TypeId, ) -> Option<ast::Pattern> { if let Some(converter_term) = self.converters.get(&(inner_ty, outer_ty)) { if self.terms[converter_term.index()].has_extractor() { // This is a little awkward: we have to // convert back to an Ident, to be // re-resolved. The pos doesn't matter // as it shouldn't result in a lookup // failure. let converter_term_ident = Ident( tyenv.syms[self.terms[converter_term.index()].name.index()].clone(), pattern.pos(), ); let expanded_pattern = ast::Pattern::Term { sym: converter_term_ident, pos: pattern.pos(), args: vec![pattern.clone()], }; return Some(expanded_pattern); } } None } fn translate_pattern( &self, tyenv: &mut TypeEnv, rule_term: TermId, pat: &ast::Pattern, expected_ty: Option<TypeId>, bindings: &mut Bindings, is_root: bool, ) -> Option<(Pattern, TypeId)> { log::trace!("translate_pattern: {:?}", pat); log::trace!("translate_pattern: bindings = {:?}", bindings); match pat { // TODO: flag on primitive type decl indicating it's an integer type? &ast::Pattern::ConstInt { val, pos } => { let ty = match expected_ty { Some(t) => t, None => { tyenv.report_error(pos, "Need an implied type for an integer constant"); return None; } }; if !tyenv.types[ty.index()].is_prim() { tyenv.report_error( pos, format!( "expected non-primitive type {}, but found integer literal '{}'", tyenv.types[ty.index()].name(tyenv), val, ), ); } Some((Pattern::ConstInt(ty, val), ty)) } &ast::Pattern::ConstPrim { ref val, pos } => { let val = tyenv.intern_mut(val); let const_ty = match tyenv.const_types.get(&val) { Some(ty) => *ty, None => { tyenv.report_error(pos, "Unknown constant"); return None; } }; if expected_ty.is_some() && expected_ty != Some(const_ty) { tyenv.report_error(pos, "Type mismatch for constant"); } Some((Pattern::ConstPrim(const_ty, val), const_ty)) } &ast::Pattern::Wildcard { pos } => { let ty = match expected_ty { Some(t) => t, None => { tyenv.report_error(pos, "Need an implied type for a wildcard"); return None; } }; Some((Pattern::Wildcard(ty), ty)) } &ast::Pattern::And { ref subpats, pos } => { let mut expected_ty = expected_ty; let mut children = vec![]; for subpat in subpats { let (subpat, ty) = unwrap_or_continue!(self.translate_pattern( tyenv, rule_term, &*subpat, expected_ty, bindings, /* is_root = */ false, )); expected_ty = expected_ty.or(Some(ty)); children.push(subpat); } if expected_ty.is_none() { tyenv.report_error(pos, "No type for (and ...) form.".to_string()); return None; } let ty = expected_ty.unwrap(); Some((Pattern::And(ty, children), ty)) } &ast::Pattern::BindPattern { ref var, ref subpat, pos, } => { // Do the subpattern first so we can resolve the type for sure. let (subpat, ty) = self.translate_pattern( tyenv, rule_term, &*subpat, expected_ty, bindings, /* is_root = */ false, )?; let name = tyenv.intern_mut(var); if bindings.vars.iter().any(|bv| bv.name == name) { tyenv.report_error( pos, format!("Re-bound variable name in LHS pattern: '{}'", var.0), ); // Try to keep going. } let id = VarId(bindings.next_var); bindings.next_var += 1; log::trace!("binding var {:?}", var.0); bindings.vars.push(BoundVar { name, id, ty }); Some((Pattern::BindPattern(ty, id, Box::new(subpat)), ty)) } &ast::Pattern::Var { ref var, pos } => { // Look up the variable; if it has already been bound, // then this becomes a `Var` node (which matches the // existing bound value), otherwise it becomes a // `BindPattern` with a wildcard subpattern to capture // at this location. let name = tyenv.intern_mut(var); match bindings.vars.iter().rev().find(|bv| bv.name == name) { None => { let ty = match expected_ty { Some(ty) => ty, None => { tyenv.report_error( pos, format!("Variable pattern '{}' not allowed in context without explicit type", var.0), ); return None; } }; let id = VarId(bindings.next_var); bindings.next_var += 1; log::trace!("binding var {:?}", var.0); bindings.vars.push(BoundVar { name, id, ty }); Some(( Pattern::BindPattern(ty, id, Box::new(Pattern::Wildcard(ty))), ty, )) } Some(bv) => { let ty = match expected_ty { None => bv.ty, Some(expected_ty) if expected_ty == bv.ty => bv.ty, Some(expected_ty) => { tyenv.report_error( pos, format!( "Mismatched types: pattern expects type '{}' but already-bound var '{}' has type '{}'", tyenv.types[expected_ty.index()].name(tyenv), var.0, tyenv.types[bv.ty.index()].name(tyenv))); bv.ty // Try to keep going for more errors. } }; Some((Pattern::Var(ty, bv.id), ty)) } } } &ast::Pattern::Term { ref sym, ref args, pos, } => { let name = tyenv.intern_mut(&sym); // Look up the term. let tid = match self.term_map.get(&name) { Some(t) => t, None => { tyenv.report_error(pos, format!("Unknown term in pattern: '{}'", sym.0)); return None; } }; // Get the return type and arg types. Verify the // expected type of this pattern, if any, against the // return type of the term. Insert an implicit // converter if needed. let ret_ty = self.terms[tid.index()].ret_ty; let ty = match expected_ty { None => ret_ty, Some(expected_ty) if expected_ty == ret_ty => ret_ty, Some(expected_ty) => { // Can we do an implicit type conversion? Look // up the converter term, if any. If one has // been registered, and the term has an // extractor, then build an expanded AST node // right here and recurse on it. if let Some(expanded_pattern) = self.maybe_implicit_convert_pattern(tyenv, pat, ret_ty, expected_ty) { return self.translate_pattern( tyenv, rule_term, &expanded_pattern, Some(expected_ty), bindings, /* is_root = */ false, ); } tyenv.report_error( pos, format!( "Mismatched types: pattern expects type '{}' but term has return type '{}'", tyenv.types[expected_ty.index()].name(tyenv), tyenv.types[ret_ty.index()].name(tyenv))); ret_ty // Try to keep going for more errors. } }; // Check that we have the correct argument count. if self.terms[tid.index()].arg_tys.len() != args.len() { tyenv.report_error( pos, format!( "Incorrect argument count for term '{}': got {}, expect {}", sym.0, args.len(), self.terms[tid.index()].arg_tys.len() ), ); } let termdata = &self.terms[tid.index()]; match &termdata.kind { TermKind::Decl { constructor_kind: Some(ConstructorKind::InternalConstructor), .. } if is_root && *tid == rule_term => {} TermKind::EnumVariant { .. } => {} TermKind::Decl { extractor_kind: Some(ExtractorKind::ExternalExtractor { .. }), .. } => {} TermKind::Decl { extractor_kind: Some(ExtractorKind::InternalExtractor { ref template }), .. } => { // Expand the extractor macro! We create a map // from macro args to AST pattern trees and // then evaluate the template with these // substitutions. let mut macro_args: Vec<ast::Pattern> = vec![]; for template_arg in args { macro_args.push(template_arg.clone()); } log::trace!("internal extractor macro args = {:?}", args); let pat = template.subst_macro_args(&macro_args[..])?; return self.translate_pattern( tyenv, rule_term, &pat, expected_ty, bindings, /* is_root = */ false, ); } TermKind::Decl { extractor_kind: None, .. } => { tyenv.report_error( pos, format!( "Cannot use term '{}' that does not have a defined extractor in a \ left-hand side pattern", sym.0 ), ); } } // Resolve subpatterns. let mut subpats = vec![]; for (i, arg) in args.iter().enumerate() { let term = unwrap_or_continue!(self.terms.get(tid.index())); let arg_ty = unwrap_or_continue!(term.arg_tys.get(i).copied()); let (subpat, _) = unwrap_or_continue!(self.translate_pattern( tyenv, rule_term, arg, Some(arg_ty), bindings, /* is_root = */ false, )); subpats.push(subpat); } Some((Pattern::Term(ty, *tid, subpats), ty)) } &ast::Pattern::MacroArg { .. } => unreachable!(), } } fn maybe_implicit_convert_expr( &self, tyenv: &mut TypeEnv, expr: &ast::Expr, inner_ty: TypeId, outer_ty: TypeId, ) -> Option<ast::Expr> { // Is there a converter for this type mismatch? if let Some(converter_term) = self.converters.get(&(inner_ty, outer_ty)) { if self.terms[converter_term.index()].has_constructor() { let converter_ident = ast::Ident( tyenv.syms[self.terms[converter_term.index()].name.index()].clone(), expr.pos(), ); return Some(ast::Expr::Term { sym: converter_ident, pos: expr.pos(), args: vec![expr.clone()], }); } } None } fn translate_expr( &self, tyenv: &mut TypeEnv, expr: &ast::Expr, ty: Option<TypeId>, bindings: &mut Bindings, pure: bool, ) -> Option<Expr> { log::trace!("translate_expr: {:?}", expr); match expr { &ast::Expr::Term { ref sym, ref args, pos, } => { // Look up the term. let name = tyenv.intern_mut(&sym); // Look up the term. let tid = match self.term_map.get(&name) { Some(t) => t, None => { tyenv.report_error(pos, format!("Unknown term in pattern: '{}'", sym.0)); return None; } }; // Get the return type and arg types. Verify the // expected type of this pattern, if any, against the // return type of the term, and determine whether we // are doing an implicit conversion. Report an error // if types don't match and no conversion is possible. let ret_ty = self.terms[tid.index()].ret_ty; let ty = if ty.is_some() && ret_ty != ty.unwrap() { // Is there a converter for this type mismatch? if let Some(expanded_expr) = self.maybe_implicit_convert_expr(tyenv, expr, ret_ty, ty.unwrap()) { return self.translate_expr(tyenv, &expanded_expr, ty, bindings, pure); } tyenv.report_error( pos, format!("Mismatched types: expression expects type '{}' but term has return type '{}'", tyenv.types[ty.unwrap().index()].name(tyenv), tyenv.types[ret_ty.index()].name(tyenv))); // Keep going, to discover more errors. ret_ty } else { ret_ty }; // Check that the term's constructor is pure. match &self.terms[tid.index()].kind { TermKind::Decl { pure: ctor_is_pure, .. } => { if pure && !ctor_is_pure { tyenv.report_error( pos, format!( "Used non-pure constructor '{}' in pure expression context", tyenv.syms[name.index()] ), ); } } _ => {} } // Check that we have the correct argument count. if self.terms[tid.index()].arg_tys.len() != args.len() { tyenv.report_error( pos, format!( "Incorrect argument count for term '{}': got {}, expect {}", sym.0, args.len(), self.terms[tid.index()].arg_tys.len() ), ); } // Resolve subexpressions. let mut subexprs = vec![]; for (i, arg) in args.iter().enumerate() { let term = unwrap_or_continue!(self.terms.get(tid.index())); let arg_ty = unwrap_or_continue!(term.arg_tys.get(i).copied()); let subexpr = unwrap_or_continue!(self.translate_expr( tyenv, arg, Some(arg_ty), bindings, pure )); subexprs.push(subexpr); } Some(Expr::Term(ty, *tid, subexprs)) } &ast::Expr::Var { ref name, pos } => { let sym = tyenv.intern_mut(name); // Look through bindings, innermost (most recent) first. let bv = match bindings.vars.iter().rev().find(|b| b.name == sym) { None => { tyenv.report_error(pos, format!("Unknown variable '{}'", name.0)); return None; } Some(bv) => bv, }; // Verify type. Maybe do an implicit conversion. if ty.is_some() && bv.ty != ty.unwrap() { // Is there a converter for this type mismatch? if let Some(expanded_expr) = self.maybe_implicit_convert_expr(tyenv, expr, bv.ty, ty.unwrap()) { return self.translate_expr(tyenv, &expanded_expr, ty, bindings, pure); } tyenv.report_error( pos, format!( "Variable '{}' has type {} but we need {} in context", name.0, tyenv.types[bv.ty.index()].name(tyenv), tyenv.types[ty.unwrap().index()].name(tyenv) ), ); } Some(Expr::Var(bv.ty, bv.id)) } &ast::Expr::ConstInt { val, pos } => { if ty.is_none() { tyenv.report_error( pos, "integer literal in a context that needs an explicit type".to_string(), ); return None; } let ty = ty.unwrap(); if !tyenv.types[ty.index()].is_prim() { tyenv.report_error( pos, format!( "expected non-primitive type {}, but found integer literal '{}'", tyenv.types[ty.index()].name(tyenv), val, ), ); } Some(Expr::ConstInt(ty, val)) } &ast::Expr::ConstPrim { ref val, pos } => { let val = tyenv.intern_mut(val); let const_ty = match tyenv.const_types.get(&val) { Some(ty) => *ty, None => { tyenv.report_error(pos, "Unknown constant"); return None; } }; if ty.is_some() && const_ty != ty.unwrap() { tyenv.report_error( pos, format!( "Constant '{}' has wrong type: expected {}, but is actually {}", tyenv.syms[val.index()], tyenv.types[ty.unwrap().index()].name(tyenv), tyenv.types[const_ty.index()].name(tyenv) ), ); return None; } Some(Expr::ConstPrim(const_ty, val)) } &ast::Expr::Let { ref defs, ref body, pos, } => { let orig_binding_len = bindings.vars.len(); // For each new binding... let mut let_defs = vec![]; for def in defs { // Check that the given variable name does not already exist. let name = tyenv.intern_mut(&def.var); if bindings.vars.iter().any(|bv| bv.name == name) { tyenv.report_error(pos, format!("Variable '{}' already bound", def.var.0)); } // Look up the type. let tysym = match tyenv.intern(&def.ty) { Some(ty) => ty, None => { tyenv.report_error( pos, format!("Unknown type {} for variable '{}'", def.ty.0, def.var.0), ); continue; } }; let tid = match tyenv.type_map.get(&tysym) { Some(tid) => *tid, None => { tyenv.report_error( pos, format!("Unknown type {} for variable '{}'", def.ty.0, def.var.0), ); continue; } }; // Evaluate the variable's value. let val = Box::new(unwrap_or_continue!(self.translate_expr( tyenv, &def.val, Some(tid), bindings, pure ))); // Bind the var with the given type. let id = VarId(bindings.next_var); bindings.next_var += 1; bindings.vars.push(BoundVar { name, id, ty: tid }); let_defs.push((id, tid, val)); } // Evaluate the body, expecting the type of the overall let-expr. let body = Box::new(self.translate_expr(tyenv, body, ty, bindings, pure)?); let body_ty = body.ty(); // Pop the bindings. bindings.vars.truncate(orig_binding_len); Some(Expr::Let { ty: body_ty, bindings: let_defs, body, }) } } } fn translate_iflets( &self, tyenv: &mut TypeEnv, rule_term: TermId, iflets: &[ast::IfLet], bindings: &mut Bindings, ) -> Option<Vec<IfLet>> { let mut translated = vec![]; for iflet in iflets { translated.push(unwrap_or_continue!( self.translate_iflet(tyenv, rule_term, iflet, bindings) )); } Some(translated) } fn translate_iflet( &self, tyenv: &mut TypeEnv, rule_term: TermId, iflet: &ast::IfLet, bindings: &mut Bindings, ) -> Option<IfLet> { // Translate the expr first. Ensure it's pure. let rhs = self.translate_expr(tyenv, &iflet.expr, None, bindings, /* pure = */ true)?; let ty = rhs.ty(); let (lhs, _lhs_ty) = self.translate_pattern( tyenv, rule_term, &iflet.pattern, Some(ty), bindings, /* is_root = */ true, )?; Some(IfLet { lhs, rhs }) } } #[cfg(test)] mod test { use super::*; use crate::ast::Ident; use crate::lexer::{Lexer, Pos}; use crate::parser::parse; #[test] fn build_type_env() { let text = r" (type u32 (primitive u32)) (type A extern (enum (B (f1 u32) (f2 u32)) (C (f1 u32)))) "; let ast = parse(Lexer::from_str(text, "file.isle").unwrap()).expect("should parse"); let tyenv = TypeEnv::from_ast(&ast).expect("should not have type-definition errors"); let sym_a = tyenv .intern(&Ident("A".to_string(), Default::default())) .unwrap(); let sym_b = tyenv .intern(&Ident("B".to_string(), Default::default())) .unwrap(); let sym_c = tyenv .intern(&Ident("C".to_string(), Default::default())) .unwrap(); let sym_a_b = tyenv .intern(&Ident("A.B".to_string(), Default::default())) .unwrap(); let sym_a_c = tyenv .intern(&Ident("A.C".to_string(), Default::default())) .unwrap(); let sym_u32 = tyenv .intern(&Ident("u32".to_string(), Default::default())) .unwrap(); let sym_f1 = tyenv .intern(&Ident("f1".to_string(), Default::default())) .unwrap(); let sym_f2 = tyenv .intern(&Ident("f2".to_string(), Default::default())) .unwrap(); assert_eq!(tyenv.type_map.get(&sym_u32).unwrap(), &TypeId(0)); assert_eq!(tyenv.type_map.get(&sym_a).unwrap(), &TypeId(1)); let expected_types = vec![ Type::Primitive( TypeId(0), sym_u32, Pos { file: 0, offset: 19, line: 2, col: 0, }, ), Type::Enum { name: sym_a, id: TypeId(1), is_extern: true, is_nodebug: false, variants: vec![ Variant { name: sym_b, fullname: sym_a_b, id: VariantId(0), fields: vec![ Field { name: sym_f1, id: FieldId(0), ty: TypeId(0), }, Field { name: sym_f2, id: FieldId(1), ty: TypeId(0), }, ], }, Variant { name: sym_c, fullname: sym_a_c, id: VariantId(1), fields: vec![Field { name: sym_f1, id: FieldId(0), ty: TypeId(0), }], }, ], pos: Pos { file: 0, offset: 58, line: 3, col: 0, }, }, ]; assert_eq!(tyenv.types.len(), expected_types.len()); for (i, (actual, expected)) in tyenv.types.iter().zip(&expected_types).enumerate() { assert_eq!(expected, actual, "`{}`th type is not equal!", i); } } }
37.767517
119
0.406505
4bb51eb2dc263f726c86604b820d72837b6ebcd9
620
use futures_util::StreamExt; use std::io::Write; use std::result::Result; #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { let mut stream = reqwest::get("http://i0.hdslb.com/bfs/face/42eb05e354476c2b22b5c512c4a484d93650020c.jpg") .await? .bytes_stream(); let p = std::path::Path::new(".cache/quin/face.jpg"); std::fs::DirBuilder::new() .recursive(true) .create(p.parent().unwrap())?; let mut f = std::fs::File::create(p)?; while let Some(item) = stream.next().await { f.write_all(item?.as_ref())?; } Ok(()) }
29.52381
97
0.585484
f44fa4485a10ce8559f39a77cf171d037290b5ff
1,784
pub struct IconDirectionsBoat { props: crate::Props, } impl yew::Component for IconDirectionsBoat { type Properties = crate::Props; type Message = (); fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self { Self { props } } fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender { true } fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender { false } fn view(&self) -> yew::prelude::Html { yew::prelude::html! { <svg class=self.props.class.unwrap_or("") width=self.props.size.unwrap_or(24).to_string() height=self.props.size.unwrap_or(24).to_string() viewBox="0 0 24 24" fill=self.props.fill.unwrap_or("none") stroke=self.props.color.unwrap_or("currentColor") stroke-width=self.props.stroke_width.unwrap_or(2).to_string() stroke-linecap=self.props.stroke_linecap.unwrap_or("round") stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round") > <svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0z" fill="none"/><path d="M20 21c-1.39 0-2.78-.47-4-1.32-2.44 1.71-5.56 1.71-8 0C6.78 20.53 5.39 21 4 21H2v2h2c1.38 0 2.74-.35 4-.99 2.52 1.29 5.48 1.29 8 0 1.26.65 2.62.99 4 .99h2v-2h-2zM3.95 19H4c1.6 0 3.02-.88 4-2 .98 1.12 2.4 2 4 2s3.02-.88 4-2c.98 1.12 2.4 2 4 2h.05l1.89-6.68c.08-.26.06-.54-.06-.78s-.34-.42-.6-.5L20 10.62V6c0-1.1-.9-2-2-2h-3V1H9v3H6c-1.1 0-2 .9-2 2v4.62l-1.29.42c-.26.08-.48.26-.6.5s-.15.52-.06.78L3.95 19zM6 6h12v3.97L12 8 6 9.97V6z"/></svg> </svg> } } }
38.782609
581
0.582399
2990c7ab585e3582d877b15f21dec2313839870e
10,359
use crate::core::ribosome::FnComponents; use crate::core::ribosome::HostAccess; use crate::core::ribosome::Invocation; use crate::core::ribosome::ZomesToInvoke; use derive_more::Constructor; use holochain_serialized_bytes::prelude::*; use holochain_types::prelude::*; use std::collections::BTreeMap; #[derive(Debug, Clone)] pub struct EntryDefsInvocation; impl EntryDefsInvocation { #[allow(clippy::new_without_default)] pub fn new() -> Self { Self } } #[derive(Clone, Constructor)] pub struct EntryDefsHostAccess; impl From<&HostAccess> for EntryDefsHostAccess { fn from(_: &HostAccess) -> Self { Self } } impl From<EntryDefsHostAccess> for HostAccess { fn from(entry_defs_host_access: EntryDefsHostAccess) -> Self { Self::EntryDefs(entry_defs_host_access) } } impl From<&EntryDefsHostAccess> for HostFnAccess { fn from(_: &EntryDefsHostAccess) -> Self { Self::none() } } impl Invocation for EntryDefsInvocation { fn zomes(&self) -> ZomesToInvoke { ZomesToInvoke::All } fn fn_components(&self) -> FnComponents { vec!["entry_defs".into()].into() } fn host_input(self) -> Result<ExternIO, SerializedBytesError> { ExternIO::encode(()) } } /// the aggregate result of _all_ entry defs callbacks #[derive(PartialEq, Debug, Clone)] pub enum EntryDefsResult { /// simple mapping between zome and defs Defs(BTreeMap<ZomeName, EntryDefs>), Err(ZomeName, String), } impl From<Vec<(ZomeName, EntryDefsCallbackResult)>> for EntryDefsResult { fn from(callback_results: Vec<(ZomeName, EntryDefsCallbackResult)>) -> Self { callback_results.into_iter().fold( EntryDefsResult::Defs(BTreeMap::new()), |acc, x| match x { // err overrides everything (zome_name, EntryDefsCallbackResult::Err(fail_string)) => { Self::Err(zome_name, fail_string) } // passing callback allows the acc to carry forward (zome_name, EntryDefsCallbackResult::Defs(defs)) => match acc { Self::Defs(mut btreemap) => { btreemap.insert(zome_name, defs); Self::Defs(btreemap) } Self::Err(_, _) => acc, }, }, ) } } #[cfg(test)] mod test { use super::EntryDefsHostAccess; use super::EntryDefsResult; use crate::core::ribosome::Invocation; use crate::core::ribosome::ZomesToInvoke; use crate::fixt::EntryDefsFixturator; use crate::fixt::EntryDefsInvocationFixturator; use crate::fixt::ZomeNameFixturator; use ::fixt::prelude::*; use holochain_types::prelude::*; use holochain_zome_types::entry_def::EntryDefsCallbackResult; use holochain_zome_types::ExternIO; use std::collections::BTreeMap; #[test] /// this is a non-standard fold test because the result is not so simple fn entry_defs_callback_result_fold() { let mut rng = ::fixt::rng(); let mut zome_name_fixturator = ZomeNameFixturator::new(::fixt::Unpredictable); let mut entry_defs_fixturator = EntryDefsFixturator::new(::fixt::Unpredictable); let mut string_fixturator = StringFixturator::new(::fixt::Unpredictable); // zero defs assert_eq!(EntryDefsResult::Defs(BTreeMap::new()), vec![].into(),); // one defs let zome_name = zome_name_fixturator.next().unwrap(); let entry_defs = entry_defs_fixturator.next().unwrap(); assert_eq!( EntryDefsResult::Defs({ let mut tree = BTreeMap::new(); tree.insert(zome_name.clone(), entry_defs.clone()); tree }), vec![(zome_name, EntryDefsCallbackResult::Defs(entry_defs)),].into(), ); // two defs let zome_name_one = zome_name_fixturator.next().unwrap(); let entry_defs_one = entry_defs_fixturator.next().unwrap(); let zome_name_two = zome_name_fixturator.next().unwrap(); let entry_defs_two = entry_defs_fixturator.next().unwrap(); assert_eq!( EntryDefsResult::Defs({ let mut tree = BTreeMap::new(); tree.insert(zome_name_one.clone(), entry_defs_one.clone()); tree.insert(zome_name_two.clone(), entry_defs_two.clone()); tree }), vec![ (zome_name_one, EntryDefsCallbackResult::Defs(entry_defs_one)), (zome_name_two, EntryDefsCallbackResult::Defs(entry_defs_two)), ] .into() ); // some err let mut results = vec![]; let number_of_fails = rng.gen_range(1, 3); let number_of_defs = rng.gen_range(0, 3); for _ in 0..number_of_fails { results.push(( zome_name_fixturator.next().unwrap(), EntryDefsCallbackResult::Err(string_fixturator.next().unwrap()), )); } for _ in 0..number_of_defs { results.push(( zome_name_fixturator.next().unwrap(), EntryDefsCallbackResult::Defs(entry_defs_fixturator.next().unwrap()), )); } results.shuffle(&mut rng); let result: EntryDefsResult = results.into(); match result { EntryDefsResult::Err(_, _) => assert!(true), _ => assert!(false), } } #[test] fn entry_defs_host_access() { assert_eq!( HostFnAccess::from(&EntryDefsHostAccess), HostFnAccess::none() ); } #[tokio::test(flavor = "multi_thread")] async fn entry_defs_invocation_zomes() { let entry_defs_invocation = EntryDefsInvocationFixturator::new(::fixt::Unpredictable) .next() .unwrap(); assert_eq!(ZomesToInvoke::All, entry_defs_invocation.zomes(),); } #[tokio::test(flavor = "multi_thread")] async fn entry_defs_invocation_fn_components() { let entry_defs_invocation = EntryDefsInvocationFixturator::new(::fixt::Unpredictable) .next() .unwrap(); let mut expected = vec!["entry_defs"]; for fn_component in entry_defs_invocation.fn_components() { assert_eq!(fn_component, expected.pop().unwrap()); } } #[tokio::test(flavor = "multi_thread")] async fn entry_defs_invocation_host_input() { let entry_defs_invocation = EntryDefsInvocationFixturator::new(::fixt::Unpredictable) .next() .unwrap(); let host_input = entry_defs_invocation.clone().host_input().unwrap(); assert_eq!(host_input, ExternIO::encode(()).unwrap()); } } #[cfg(test)] #[cfg(feature = "slow_tests")] mod slow_tests { use crate::core::ribosome::guest_callback::entry_defs::EntryDefsHostAccess; use crate::core::ribosome::guest_callback::entry_defs::EntryDefsResult; use crate::core::ribosome::RibosomeT; use crate::fixt::curve::Zomes; use crate::fixt::EntryDefsInvocationFixturator; use crate::fixt::RealRibosomeFixturator; use crate::fixt::ZomeCallHostAccessFixturator; use ::fixt::prelude::*; use holochain_state::host_fn_workspace::HostFnWorkspace; use holochain_types::prelude::*; use holochain_wasm_test_utils::TestWasm; pub use holochain_zome_types::entry_def::EntryVisibility; use std::collections::BTreeMap; #[tokio::test(flavor = "multi_thread")] async fn test_entry_defs_unimplemented() { let ribosome = RealRibosomeFixturator::new(Zomes(vec![TestWasm::Foo])) .next() .unwrap(); let entry_defs_invocation = EntryDefsInvocationFixturator::new(::fixt::Empty) .next() .unwrap(); let result = ribosome .run_entry_defs(EntryDefsHostAccess, entry_defs_invocation) .unwrap(); assert_eq!(result, EntryDefsResult::Defs(BTreeMap::new()),); } #[tokio::test(flavor = "multi_thread")] async fn test_entry_defs_index_lookup() { let test_env = holochain_state::test_utils::test_cell_env(); let test_cache = holochain_state::test_utils::test_cache_env(); let env = test_env.env(); let author = fake_agent_pubkey_1(); crate::test_utils::fake_genesis(env.clone()).await.unwrap(); let workspace = HostFnWorkspace::new(env.clone(), test_cache.env(), author) .await .unwrap(); let mut host_access = fixt!(ZomeCallHostAccess); host_access.workspace = workspace; let output: () = crate::call_test_ribosome!(host_access, TestWasm::EntryDefs, "assert_indexes", ()); assert_eq!(&(), &output); } #[tokio::test(flavor = "multi_thread")] async fn test_entry_defs_implemented_defs() { let ribosome = RealRibosomeFixturator::new(Zomes(vec![TestWasm::EntryDefs])) .next() .unwrap(); let entry_defs_invocation = EntryDefsInvocationFixturator::new(::fixt::Empty) .next() .unwrap(); let result = ribosome .run_entry_defs(EntryDefsHostAccess, entry_defs_invocation) .unwrap(); assert_eq!( result, EntryDefsResult::Defs({ let mut tree = BTreeMap::new(); let zome_name: ZomeName = "entry_defs".into(); let defs: EntryDefs = vec![ EntryDef { id: "post".into(), visibility: EntryVisibility::Public, crdt_type: CrdtType, required_validations: 5.into(), required_validation_type: Default::default(), }, EntryDef { id: "comment".into(), visibility: EntryVisibility::Private, crdt_type: CrdtType, required_validations: 5.into(), required_validation_type: Default::default(), }, ] .into(); tree.insert(zome_name, defs); tree }), ); } }
34.075658
95
0.593397
610bbc57f7fb38e1556fc26095c1fbe24793bbf8
19,012
#[cfg(feature = "num-traits")] use num_traits::Float; #[inline] pub(crate) fn scalar_sin_cos(x: f32) -> (f32, f32) { // // expect sse2 to be available on all x86 builds // #[cfg(target_feature = "sse2")] // unsafe { // let (sinx, cosx) = sin_cos_sse2(_mm_set1_ps(x)); // (_mm_cvtss_f32(sinx), _mm_cvtss_f32(cosx)) // } // #[cfg(not(target_feature = "sse2"))] x.sin_cos() } #[inline] pub fn scalar_acos(value: f32) -> f32 { // Based on https://github.com/microsoft/DirectXMath `XMScalarAcos` // Clamp input to [-1,1]. let nonnegative = value >= 0.0; let x = value.abs(); let mut omx = 1.0 - x; if omx < 0.0 { omx = 0.0; } let root = omx.sqrt(); // 7-degree minimax approximation #[allow(clippy::approx_constant)] let mut result = ((((((-0.001_262_491_1 * x + 0.006_670_09) * x - 0.017_088_126) * x + 0.030_891_88) * x - 0.050_174_303) * x + 0.088_978_99) * x - 0.214_598_8) * x + 1.570_796_3; result *= root; // acos(x) = pi - acos(-x) when x < 0 if nonnegative { result } else { core::f32::consts::PI - result } } #[cfg(vec4_sse2)] #[allow(clippy::excessive_precision)] pub(crate) mod sse2 { #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] use crate::f32::cast::UnionCast; #[cfg(target_arch = "x86")] use core::arch::x86::*; #[cfg(target_arch = "x86_64")] use core::arch::x86_64::*; macro_rules! _ps_const_ty { ($name:ident, $field:ident, $x:expr) => { #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] const $name: UnionCast = UnionCast { $field: [$x, $x, $x, $x], }; }; ($name:ident, $field:ident, $x:expr, $y:expr, $z:expr, $w:expr) => { #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] const $name: UnionCast = UnionCast { $field: [$x, $y, $z, $w], }; }; } _ps_const_ty!(PS_INV_SIGN_MASK, u32x4, !0x8000_0000); _ps_const_ty!(PS_SIGN_MASK, u32x4, 0x8000_0000); _ps_const_ty!(PS_NO_FRACTION, f32x4, 8388608.0); // _ps_const_ty!(PS_1_0, f32x4, 1.0); // _ps_const_ty!(PS_0_5, f32x4, 0.5); // _ps_const_ty!(PI32_1, i32x4, 1); // _ps_const_ty!(PI32_INV_1, i32x4, !1); // _ps_const_ty!(PI32_2, i32x4, 2); // _ps_const_ty!(PI32_4, i32x4, 4); // _ps_const_ty!(PS_MINUS_CEPHES_DP1, f32x4, -0.785_156_25); // _ps_const_ty!(PS_MINUS_CEPHES_DP2, f32x4, -2.418_756_5e-4); // _ps_const_ty!(PS_MINUS_CEPHES_DP3, f32x4, -3.774_895e-8); // _ps_const_ty!(PS_SINCOF_P0, f32x4, -1.951_529_6e-4); // _ps_const_ty!(PS_SINCOF_P1, f32x4, 8.332_161e-3); // _ps_const_ty!(PS_SINCOF_P2, f32x4, -1.666_665_5e-1); // _ps_const_ty!(PS_COSCOF_P0, f32x4, 2.443_315_7e-5); // _ps_const_ty!(PS_COSCOF_P1, f32x4, -1.388_731_6E-3); // _ps_const_ty!(PS_COSCOF_P2, f32x4, 4.166_664_6e-2); // _ps_const_ty!(PS_CEPHES_FOPI, f32x4, 1.273_239_5); // 4 / M_PI _ps_const_ty!(PS_NEGATIVE_ZERO, u32x4, 0x80000000); _ps_const_ty!(PS_PI, f32x4, core::f32::consts::PI); _ps_const_ty!(PS_HALF_PI, f32x4, core::f32::consts::FRAC_PI_2); _ps_const_ty!( PS_SIN_COEFFICIENTS0, f32x4, -0.16666667, 0.0083333310, -0.00019840874, 2.7525562e-06 ); _ps_const_ty!( PS_SIN_COEFFICIENTS1, f32x4, -2.3889859e-08, -0.16665852, /*Est1*/ 0.0083139502, /*Est2*/ -0.00018524670 /*Est3*/ ); _ps_const_ty!(PS_ONE, f32x4, 1.0); _ps_const_ty!(PS_TWO_PI, f32x4, core::f32::consts::PI * 2.0); _ps_const_ty!(PS_RECIPROCAL_TWO_PI, f32x4, 0.159154943); #[cfg(target_feature = "fma")] macro_rules! m128_mul_add { ($a:expr, $b:expr, $c:expr) => { _mm_fmadd_ps($a, $b, $c) }; } #[cfg(not(target_feature = "fma"))] macro_rules! m128_mul_add { ($a:expr, $b:expr, $c:expr) => { _mm_add_ps(_mm_mul_ps($a, $b), $c) }; } #[cfg(target_feature = "fma")] macro_rules! m128_neg_mul_sub { ($a:expr, $b:expr, $c:expr) => { _mm_fnmadd_ps($a, $b, $c) }; } #[cfg(not(target_feature = "fma"))] macro_rules! m128_neg_mul_sub { ($a:expr, $b:expr, $c:expr) => { _mm_sub_ps($c, _mm_mul_ps($a, $b)) }; } #[inline] pub(crate) unsafe fn m128_round(v: __m128) -> __m128 { // Based on https://github.com/microsoft/DirectXMath `XMVectorRound` let sign = _mm_and_ps(v, PS_SIGN_MASK.m128); let s_magic = _mm_or_ps(PS_NO_FRACTION.m128, sign); let r1 = _mm_add_ps(v, s_magic); let r1 = _mm_sub_ps(r1, s_magic); let r2 = _mm_and_ps(v, PS_INV_SIGN_MASK.m128); let mask = _mm_cmple_ps(r2, PS_NO_FRACTION.m128); let r2 = _mm_andnot_ps(mask, v); let r1 = _mm_and_ps(r1, mask); _mm_xor_ps(r1, r2) } #[inline] pub(crate) unsafe fn m128_floor(v: __m128) -> __m128 { // Based on https://github.com/microsoft/DirectXMath `XMVectorFloor` // To handle NAN, INF and numbers greater than 8388608, use masking let test = _mm_and_si128(_mm_castps_si128(v), PS_INV_SIGN_MASK.m128i); let test = _mm_cmplt_epi32(test, PS_NO_FRACTION.m128i); // Truncate let vint = _mm_cvttps_epi32(v); let result = _mm_cvtepi32_ps(vint); let larger = _mm_cmpgt_ps(result, v); // 0 -> 0, 0xffffffff -> -1.0f let larger = _mm_cvtepi32_ps(_mm_castps_si128(larger)); let result = _mm_add_ps(result, larger); // All numbers less than 8388608 will use the round to int let result = _mm_and_ps(result, _mm_castsi128_ps(test)); // All others, use the ORIGINAL value let test = _mm_andnot_si128(test, _mm_castps_si128(v)); _mm_or_ps(result, _mm_castsi128_ps(test)) } #[inline] pub(crate) unsafe fn m128_ceil(v: __m128) -> __m128 { // Based on https://github.com/microsoft/DirectXMath `XMVectorCeil` // To handle NAN, INF and numbers greater than 8388608, use masking let test = _mm_and_si128(_mm_castps_si128(v), PS_INV_SIGN_MASK.m128i); let test = _mm_cmplt_epi32(test, PS_NO_FRACTION.m128i); // Truncate let vint = _mm_cvttps_epi32(v); let result = _mm_cvtepi32_ps(vint); let smaller = _mm_cmplt_ps(result, v); // 0 -> 0, 0xffffffff -> -1.0f let smaller = _mm_cvtepi32_ps(_mm_castps_si128(smaller)); let result = _mm_sub_ps(result, smaller); // All numbers less than 8388608 will use the round to int let result = _mm_and_ps(result, _mm_castsi128_ps(test)); // All others, use the ORIGINAL value let test = _mm_andnot_si128(test, _mm_castps_si128(v)); _mm_or_ps(result, _mm_castsi128_ps(test)) } /// Returns a vector whose components are the corresponding components of Angles modulo 2PI. #[inline] pub(crate) unsafe fn m128_mod_angles(angles: __m128) -> __m128 { // Based on https://github.com/microsoft/DirectXMath `XMVectorModAngles` let v = _mm_mul_ps(angles, PS_RECIPROCAL_TWO_PI.m128); let v = m128_round(v); m128_neg_mul_sub!(PS_TWO_PI.m128, v, angles) } /// Computes the sine of the angle in each lane of `v`. Values outside /// the bounds of PI may produce an increasing error as the input angle /// drifts from `[-PI, PI]`. #[inline] pub(crate) unsafe fn m128_sin(v: __m128) -> __m128 { // Based on https://github.com/microsoft/DirectXMath `XMVectorSin` // 11-degree minimax approximation // Force the value within the bounds of pi let mut x = m128_mod_angles(v); // Map in [-pi/2,pi/2] with sin(y) = sin(x). let sign = _mm_and_ps(x, PS_NEGATIVE_ZERO.m128); // pi when x >= 0, -pi when x < 0 let c = _mm_or_ps(PS_PI.m128, sign); // |x| let absx = _mm_andnot_ps(sign, x); let rflx = _mm_sub_ps(c, x); let comp = _mm_cmple_ps(absx, PS_HALF_PI.m128); let select0 = _mm_and_ps(comp, x); let select1 = _mm_andnot_ps(comp, rflx); x = _mm_or_ps(select0, select1); let x2 = _mm_mul_ps(x, x); // Compute polynomial approximation const SC1: __m128 = unsafe { PS_SIN_COEFFICIENTS1.m128 }; let v_constants_b = _mm_shuffle_ps(SC1, SC1, 0b00_00_00_00); const SC0: __m128 = unsafe { PS_SIN_COEFFICIENTS0.m128 }; let mut v_constants = _mm_shuffle_ps(SC0, SC0, 0b11_11_11_11); let mut result = m128_mul_add!(v_constants_b, x2, v_constants); v_constants = _mm_shuffle_ps(SC0, SC0, 0b10_10_10_10); result = m128_mul_add!(result, x2, v_constants); v_constants = _mm_shuffle_ps(SC0, SC0, 0b01_01_01_01); result = m128_mul_add!(result, x2, v_constants); v_constants = _mm_shuffle_ps(SC0, SC0, 0b00_00_00_00); result = m128_mul_add!(result, x2, v_constants); result = m128_mul_add!(result, x2, PS_ONE.m128); result = _mm_mul_ps(result, x); result } // Based on http://gruntthepeon.free.fr/ssemath/sse_mathfun.h // #[cfg(target_feature = "sse2")] // unsafe fn sin_cos_sse2(x: __m128) -> (__m128, __m128) { // let mut sign_bit_sin = x; // // take the absolute value // let mut x = _mm_and_ps(x, PS_INV_SIGN_MASK.m128); // // extract the sign bit (upper one) // sign_bit_sin = _mm_and_ps(sign_bit_sin, PS_SIGN_MASK.m128); // // scale by 4/Pi // let mut y = _mm_mul_ps(x, PS_CEPHES_FOPI.m128); // // store the integer part of y in emm2 // let mut emm2 = _mm_cvttps_epi32(y); // // j=(j+1) & (~1) (see the cephes sources) // emm2 = _mm_add_epi32(emm2, PI32_1.m128i); // emm2 = _mm_and_si128(emm2, PI32_INV_1.m128i); // y = _mm_cvtepi32_ps(emm2); // let mut emm4 = emm2; // /* get the swap sign flag for the sine */ // let mut emm0 = _mm_and_si128(emm2, PI32_4.m128i); // emm0 = _mm_slli_epi32(emm0, 29); // let swap_sign_bit_sin = _mm_castsi128_ps(emm0); // /* get the polynom selection mask for the sine*/ // emm2 = _mm_and_si128(emm2, PI32_2.m128i); // emm2 = _mm_cmpeq_epi32(emm2, _mm_setzero_si128()); // let poly_mask = _mm_castsi128_ps(emm2); // /* The magic pass: "Extended precision modular arithmetic" // x = ((x - y * DP1) - y * DP2) - y * DP3; */ // let mut xmm1 = PS_MINUS_CEPHES_DP1.m128; // let mut xmm2 = PS_MINUS_CEPHES_DP2.m128; // let mut xmm3 = PS_MINUS_CEPHES_DP3.m128; // xmm1 = _mm_mul_ps(y, xmm1); // xmm2 = _mm_mul_ps(y, xmm2); // xmm3 = _mm_mul_ps(y, xmm3); // x = _mm_add_ps(x, xmm1); // x = _mm_add_ps(x, xmm2); // x = _mm_add_ps(x, xmm3); // emm4 = _mm_sub_epi32(emm4, PI32_2.m128i); // emm4 = _mm_andnot_si128(emm4, PI32_4.m128i); // emm4 = _mm_slli_epi32(emm4, 29); // let sign_bit_cos = _mm_castsi128_ps(emm4); // sign_bit_sin = _mm_xor_ps(sign_bit_sin, swap_sign_bit_sin); // // Evaluate the first polynom (0 <= x <= Pi/4) // let z = _mm_mul_ps(x, x); // y = PS_COSCOF_P0.m128; // y = _mm_mul_ps(y, z); // y = _mm_add_ps(y, PS_COSCOF_P1.m128); // y = _mm_mul_ps(y, z); // y = _mm_add_ps(y, PS_COSCOF_P2.m128); // y = _mm_mul_ps(y, z); // y = _mm_mul_ps(y, z); // let tmp = _mm_mul_ps(z, PS_0_5.m128); // y = _mm_sub_ps(y, tmp); // y = _mm_add_ps(y, PS_1_0.m128); // // Evaluate the second polynom (Pi/4 <= x <= 0) // let mut y2 = PS_SINCOF_P0.m128; // y2 = _mm_mul_ps(y2, z); // y2 = _mm_add_ps(y2, PS_SINCOF_P1.m128); // y2 = _mm_mul_ps(y2, z); // y2 = _mm_add_ps(y2, PS_SINCOF_P2.m128); // y2 = _mm_mul_ps(y2, z); // y2 = _mm_mul_ps(y2, x); // y2 = _mm_add_ps(y2, x); // // select the correct result from the two polynoms // xmm3 = poly_mask; // let ysin2 = _mm_and_ps(xmm3, y2); // let ysin1 = _mm_andnot_ps(xmm3, y); // y2 = _mm_sub_ps(y2, ysin2); // y = _mm_sub_ps(y, ysin1); // xmm1 = _mm_add_ps(ysin1, ysin2); // xmm2 = _mm_add_ps(y, y2); // // update the sign // ( // _mm_xor_ps(xmm1, sign_bit_sin), // _mm_xor_ps(xmm2, sign_bit_cos), // ) // } } #[cfg(test)] macro_rules! assert_approx_eq { ($a:expr, $b:expr) => {{ assert_approx_eq!($a, $b, core::f32::EPSILON); }}; ($a:expr, $b:expr, $eps:expr) => {{ let (a, b) = (&$a, &$b); let eps = $eps; assert!( (a - b).abs() <= eps, "assertion failed: `(left !== right)` \ (left: `{:?}`, right: `{:?}`, expect diff: `{:?}`, real diff: `{:?}`)", *a, *b, eps, (a - b).abs() ); }}; } #[cfg(test)] macro_rules! assert_relative_eq { ($a:expr, $b:expr) => {{ assert_relative_eq!($a, $b, core::f32::EPSILON); }}; ($a:expr, $b:expr, $eps:expr) => {{ let (a, b) = (&$a, &$b); let eps = $eps; let diff = (a - b).abs(); let largest = a.abs().max(b.abs()); assert!( diff <= largest * eps, "assertion failed: `(left !== right)` \ (left: `{:?}`, right: `{:?}`, expect diff: `{:?}`, real diff: `{:?}`)", *a, *b, largest * eps, diff ); }}; } #[test] fn test_scalar_acos() { fn test_scalar_acos_angle(a: f32) { // 1e-6 is the lowest epsilon that will pass assert_relative_eq!(scalar_acos(a), a.acos(), 1e-6); // assert_approx_eq!(scalar_acos(a), a.acos(), 1e-6); } // test 1024 floats between -1.0 and 1.0 inclusive const MAX_TESTS: u32 = 1024 / 2; const SIGN: u32 = 0x80_00_00_00; const PTVE_ONE: u32 = 0x3f_80_00_00; // 1.0_f32.to_bits(); const NGVE_ONE: u32 = SIGN | PTVE_ONE; const STEP_SIZE: usize = (PTVE_ONE / MAX_TESTS) as usize; for f in (SIGN..=NGVE_ONE) .step_by(STEP_SIZE) .map(|i| f32::from_bits(i)) { test_scalar_acos_angle(f); } for f in (0..=PTVE_ONE).step_by(STEP_SIZE).map(|i| f32::from_bits(i)) { test_scalar_acos_angle(f); } // input is clamped to -1.0..1.0 assert_approx_eq!(scalar_acos(2.0), 0.0); assert_approx_eq!(scalar_acos(-2.0), core::f32::consts::PI); } #[test] fn test_scalar_sin_cos() { fn test_scalar_sin_cos_angle(a: f32) { let (s1, c1) = scalar_sin_cos(a); let (s2, c2) = a.sin_cos(); // dbg!(a); assert_approx_eq!(s1, s2); assert_approx_eq!(c1, c2); } // test 1024 floats between -PI and PI inclusive const MAX_TESTS: u32 = 1024 / 2; const SIGN: u32 = 0x80_00_00_00; let ptve_pi = core::f32::consts::PI.to_bits(); let ngve_pi = SIGN | ptve_pi; let step_pi = (ptve_pi / MAX_TESTS) as usize; for f in (SIGN..=ngve_pi).step_by(step_pi).map(|i| f32::from_bits(i)) { test_scalar_sin_cos_angle(f); } for f in (0..=ptve_pi).step_by(step_pi).map(|i| f32::from_bits(i)) { test_scalar_sin_cos_angle(f); } // test 1024 floats between -INF and +INF exclusive let ptve_inf = core::f32::INFINITY.to_bits(); let ngve_inf = core::f32::NEG_INFINITY.to_bits(); let step_inf = (ptve_inf / MAX_TESTS) as usize; for f in (SIGN..ngve_inf) .step_by(step_inf) .map(|i| f32::from_bits(i)) { test_scalar_sin_cos_angle(f); } for f in (0..ptve_inf).step_by(step_inf).map(|i| f32::from_bits(i)) { test_scalar_sin_cos_angle(f); } // +inf and -inf should return NaN let (s, c) = scalar_sin_cos(core::f32::INFINITY); assert!(s.is_nan()); assert!(c.is_nan()); let (s, c) = scalar_sin_cos(core::f32::NEG_INFINITY); assert!(s.is_nan()); assert!(c.is_nan()); } #[test] #[cfg(vec4_sse2)] fn test_sse2_m128_sin() { use crate::Vec4; use core::f32::consts::PI; fn test_sse2_m128_sin_angle(a: f32) { let v = Vec4::splat(a); let v = unsafe { Vec4(sse2::m128_sin(v.0)) }; let a_sin = a.sin(); // dbg!((a, a_sin, v)); assert_approx_eq!(v.x, a_sin, 1e-6); assert_approx_eq!(v.z, a_sin, 1e-6); assert_approx_eq!(v.y, a_sin, 1e-6); assert_approx_eq!(v.w, a_sin, 1e-6); } let mut a = -PI; let end = PI; let step = PI / 8192.0; while a <= end { test_sse2_m128_sin_angle(a); a += step; } } // sse2::m128_sin is derived from the XMVectorSin in DirectXMath. It's been // observed both here and in the C++ version that the error rate increases // as the input angle drifts further from the bounds of PI. // // #[test] // #[cfg(vec4_sse2)] // fn test_sse2_m128_sin2() { // use crate::Vec4; // fn test_sse2_m128_sin_angle(a: f32) -> f32 { // let v = Vec4::splat(a); // let v = unsafe { Vec4(sse2::m128_sin(v.0)) }; // let a_sin = a.sin(); // let v_sin = v.x(); // // println!("{:?}", (a, a_sin, v_sin)); // assert_approx_eq!(a_sin, v.x(), 1e-4); // assert_approx_eq!(a_sin, v.z(), 1e-4); // assert_approx_eq!(a_sin, v.y(), 1e-4); // assert_approx_eq!(a_sin, v.w(), 1e-4); // v.x() // } // // test 1024 floats between -PI and PI inclusive // const MAX_TESTS: u32 = 1024 / 2; // const SIGN: u32 = 0x80_00_00_00; // let ptve_pi = std::f32::consts::PI.to_bits(); // let ngve_pi = SIGN | ptve_pi; // let step_pi = (ptve_pi / MAX_TESTS) as usize; // for f in (SIGN..=ngve_pi).step_by(step_pi).map(|i| f32::from_bits(i)) { // test_sse2_m128_sin_angle(f); // } // for f in (0..=ptve_pi).step_by(step_pi).map(|i| f32::from_bits(i)) { // test_sse2_m128_sin_angle(f); // } // // test 1024 floats between -INF and +INF exclusive // let ptve_inf = std::f32::INFINITY.to_bits(); // let ngve_inf = std::f32::NEG_INFINITY.to_bits(); // let step_inf = (ptve_inf / MAX_TESTS) as usize; // for f in (SIGN..ngve_inf) // .step_by(step_inf) // .map(|i| f32::from_bits(i)) // { // test_sse2_m128_sin_angle(f); // } // for f in (0..ptve_inf).step_by(step_inf).map(|i| f32::from_bits(i)) { // test_sse2_m128_sin_angle(f); // } // // +inf and -inf should return NaN // let s = test_sse2_m128_sin_angle(std::f32::INFINITY); // assert!(s.is_nan()); // let s = test_sse2_m128_sin_angle(std::f32::NEG_INFINITY); // assert!(s.is_nan()); // }
34.255856
96
0.569956
1da99d8ece85cf064bb4a304060efecefc75375d
3,594
// Copyright 2018-2021 Cargill Incorporated // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use super::CredentialsStoreOperations; use crate::biome::credentials::store::diesel::schema::user_credentials; use crate::biome::credentials::store::error::CredentialsStoreError; use crate::biome::credentials::store::{ CredentialsBuilder, CredentialsModel, PasswordEncryptionCost, }; use diesel::{dsl::update, prelude::*, result::Error::NotFound}; pub(in crate::biome::credentials) trait CredentialsStoreUpdateCredentialsOperation { fn update_credentials( &self, user_id: &str, username: &str, password: &str, password_encryption_cost: PasswordEncryptionCost, ) -> Result<(), CredentialsStoreError>; } impl<'a, C> CredentialsStoreUpdateCredentialsOperation for CredentialsStoreOperations<'a, C> where C: diesel::Connection, i64: diesel::deserialize::FromSql<diesel::sql_types::BigInt, C::Backend>, String: diesel::deserialize::FromSql<diesel::sql_types::Text, C::Backend>, { fn update_credentials( &self, user_id: &str, username: &str, password: &str, password_encryption_cost: PasswordEncryptionCost, ) -> Result<(), CredentialsStoreError> { let credentials_builder: CredentialsBuilder = Default::default(); let credentials = credentials_builder .with_user_id(user_id) .with_username(username) .with_password(password) .with_password_encryption_cost(password_encryption_cost) .build() .map_err(|err| CredentialsStoreError::OperationError { context: "Failed to build updated credentials".to_string(), source: Box::new(err), })?; let credential_exists = user_credentials::table .filter(user_credentials::user_id.eq(&credentials.user_id)) .first::<CredentialsModel>(self.conn) .map(Some) .or_else(|err| if err == NotFound { Ok(None) } else { Err(err) }) .map_err(|err| CredentialsStoreError::QueryError { context: "Failed check for existing user id".to_string(), source: Box::new(err), })?; if credential_exists.is_none() { return Err(CredentialsStoreError::NotFoundError(format!( "Credentials not found for user id: {}", &credentials.user_id ))); } update(user_credentials::table.filter(user_credentials::user_id.eq(&credentials.user_id))) .set(( user_credentials::user_id.eq(&credentials.user_id), user_credentials::username.eq(&credentials.username), user_credentials::password.eq(&credentials.password), )) .execute(self.conn) .map(|_| ()) .map_err(|err| CredentialsStoreError::OperationError { context: "Failed to update credentials".to_string(), source: Box::new(err), })?; Ok(()) } }
41.310345
98
0.641347
218dc60b1b6ac0e4bca5660864e40f6f3336fd26
4,131
#![cfg_attr(docsrs, feature(doc_cfg))] pub use sqlx_core::acquire::Acquire; pub use sqlx_core::arguments::{Arguments, IntoArguments}; pub use sqlx_core::column::Column; pub use sqlx_core::column::ColumnIndex; pub use sqlx_core::connection::{ConnectOptions, Connection}; pub use sqlx_core::database::{self, Database}; pub use sqlx_core::describe::Describe; pub use sqlx_core::done::Done; pub use sqlx_core::executor::{Execute, Executor}; pub use sqlx_core::from_row::FromRow; pub use sqlx_core::pool::{self, Pool}; pub use sqlx_core::query::{query, query_with}; pub use sqlx_core::query_as::{query_as, query_as_with}; pub use sqlx_core::query_scalar::{query_scalar, query_scalar_with}; pub use sqlx_core::row::Row; pub use sqlx_core::statement::Statement; pub use sqlx_core::transaction::{Transaction, TransactionManager}; pub use sqlx_core::type_info::TypeInfo; pub use sqlx_core::types::Type; pub use sqlx_core::value::{Value, ValueRef}; #[doc(inline)] pub use sqlx_core::error::{self, Error, Result}; #[cfg(feature = "migrate")] pub use sqlx_core::migrate; #[cfg(all( any( feature = "mysql", feature = "sqlite", feature = "postgres", feature = "mssql" ), feature = "any" ))] pub use sqlx_core::any::{self, Any, AnyConnection, AnyPool}; #[cfg(feature = "mysql")] #[cfg_attr(docsrs, doc(cfg(feature = "mysql")))] pub use sqlx_core::mysql::{self, MySql, MySqlConnection, MySqlPool}; #[cfg(feature = "mssql")] #[cfg_attr(docsrs, doc(cfg(feature = "mssql")))] pub use sqlx_core::mssql::{self, Mssql, MssqlConnection, MssqlPool}; #[cfg(feature = "postgres")] #[cfg_attr(docsrs, doc(cfg(feature = "postgres")))] pub use sqlx_core::postgres::{self, PgConnection, PgPool, Postgres}; #[cfg(feature = "sqlite")] #[cfg_attr(docsrs, doc(cfg(feature = "sqlite")))] pub use sqlx_core::sqlite::{self, Sqlite, SqliteConnection, SqlitePool}; #[cfg(feature = "macros")] #[doc(hidden)] pub extern crate sqlx_macros; // derives #[cfg(feature = "macros")] #[doc(hidden)] pub use sqlx_macros::{FromRow, Type}; #[cfg(feature = "macros")] mod macros; // macro support #[cfg(feature = "macros")] #[doc(hidden)] pub mod ty_match; /// Conversions between Rust and SQL types. /// /// To see how each SQL type maps to a Rust type, see the corresponding `types` module for each /// database: /// /// * [PostgreSQL](../postgres/types/index.html) /// * [MySQL](../mysql/types/index.html) /// * [SQLite](../sqlite/types/index.html) /// * [MSSQL](../mssql/types/index.html) /// /// Any external types that have had [`Type`] implemented for, are re-exported in this module /// for convenience as downstream users need to use a compatible version of the external crate /// to take advantage of the implementation. /// /// [`Type`]: types/trait.Type.html pub mod types { pub use sqlx_core::types::*; #[cfg(feature = "macros")] #[doc(hidden)] pub use sqlx_macros::Type; } /// Provides [`Encode`](encode/trait.Encode.html) for encoding values for the database. pub mod encode { pub use sqlx_core::encode::{Encode, IsNull}; #[cfg(feature = "macros")] #[doc(hidden)] pub use sqlx_macros::Encode; } pub use self::encode::Encode; /// Provides [`Decode`](decode/trait.Decode.html) for decoding values from the database. pub mod decode { pub use sqlx_core::decode::Decode; #[cfg(feature = "macros")] #[doc(hidden)] pub use sqlx_macros::Decode; } pub use self::decode::Decode; /// Types and traits for the `query` family of functions and macros. pub mod query { pub use sqlx_core::query::{Map, Query}; pub use sqlx_core::query::{MapRow, TryMapRow}; pub use sqlx_core::query_as::QueryAs; pub use sqlx_core::query_scalar::QueryScalar; } /// Convenience re-export of common traits. pub mod prelude { pub use super::Acquire; pub use super::ConnectOptions; pub use super::Connection; pub use super::Decode; pub use super::Done; pub use super::Encode; pub use super::Executor; pub use super::FromRow; pub use super::IntoArguments; pub use super::Row; pub use super::Statement; pub use super::Type; }
29.091549
95
0.687727
bbd8c88d8517af3198dbf7b0434e8934b489908d
66
#![feature(wrapping_int_impl)] pub mod concise; pub mod rleplus;
13.2
30
0.757576
56280fba0d37dc1993d38d2eedce77e7b21d6c17
8,797
// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::logging::AdapterLogSchema; use aptos_logger::prelude::*; use aptos_types::account_config::ChainSpecificAccountInfo; use move_binary_format::errors::VMError; use move_core_types::vm_status::{StatusCode, VMStatus}; /// Error codes that can be emitted by the prologue. These have special significance to the VM when /// they are raised during the prologue. /// These errors are only expected from the module that is registered as the account module for the system. /// The prologue should not emit any other error codes or fail for any reason, doing so will result /// in the VM throwing an invariant violation pub const EACCOUNT_FROZEN: u64 = 1000; // sending account is frozen pub const EBAD_ACCOUNT_AUTHENTICATION_KEY: u64 = 1001; // auth key in transaction is invalid pub const ESEQUENCE_NUMBER_TOO_OLD: u64 = 1002; // transaction sequence number is too old pub const ESEQUENCE_NUMBER_TOO_NEW: u64 = 1003; // transaction sequence number is too new pub const EACCOUNT_DOES_NOT_EXIST: u64 = 1004; // transaction sender's account does not exist pub const ECANT_PAY_GAS_DEPOSIT: u64 = 1005; // insufficient balance (to pay for gas deposit) pub const ETRANSACTION_EXPIRED: u64 = 1006; // transaction expiration time exceeds block time. pub const EBAD_CHAIN_ID: u64 = 1007; // chain_id in transaction doesn't match the one on-chain pub const ESCRIPT_NOT_ALLOWED: u64 = 1008; pub const EMODULE_NOT_ALLOWED: u64 = 1009; pub const EINVALID_WRITESET_SENDER: u64 = 1010; // invalid sender (not diem root) for write set pub const ESEQUENCE_NUMBER_TOO_BIG: u64 = 1011; pub const EBAD_TRANSACTION_FEE_CURRENCY: u64 = 1012; pub const ESECONDARY_KEYS_ADDRESSES_COUNT_MISMATCH: u64 = 1013; pub const ESEQ_NONCE_NONCE_INVALID: u64 = 1014; const INVALID_STATE: u8 = 1; const INVALID_ARGUMENT: u8 = 7; const LIMIT_EXCEEDED: u8 = 8; fn error_split(code: u64) -> (u8, u64) { let category = code as u8; let reason = code >> 8; (category, reason) } /// Converts particular Move abort codes to specific validation error codes for the prologue /// Any non-abort non-execution code is considered an invariant violation, specifically /// `UNEXPECTED_ERROR_FROM_KNOWN_MOVE_FUNCTION` pub fn convert_prologue_error( chain_specific_info: &ChainSpecificAccountInfo, error: VMError, log_context: &AdapterLogSchema, ) -> Result<(), VMStatus> { let status = error.into_vm_status(); Err(match status { VMStatus::Executed => VMStatus::Executed, VMStatus::MoveAbort(location, code) if !chain_specific_info.is_account_module_abort(&location) => { let (category, reason) = error_split(code); log_context.alert(); error!( *log_context, "[aptos_vm] Unexpected prologue Move abort: {:?}::{:?} (Category: {:?} Reason: {:?})", location, code, category, reason, ); VMStatus::Error(StatusCode::UNEXPECTED_ERROR_FROM_KNOWN_MOVE_FUNCTION) } VMStatus::MoveAbort(location, code) => { let new_major_status = match error_split(code) { (INVALID_STATE, EACCOUNT_FROZEN) => StatusCode::SENDING_ACCOUNT_FROZEN, // Invalid authentication key (INVALID_ARGUMENT, EBAD_ACCOUNT_AUTHENTICATION_KEY) => StatusCode::INVALID_AUTH_KEY, // Sequence number too old (INVALID_ARGUMENT, ESEQUENCE_NUMBER_TOO_OLD) => StatusCode::SEQUENCE_NUMBER_TOO_OLD, // Sequence number too new (INVALID_ARGUMENT, ESEQUENCE_NUMBER_TOO_NEW) => StatusCode::SEQUENCE_NUMBER_TOO_NEW, // Sequence number too new (INVALID_ARGUMENT, EACCOUNT_DOES_NOT_EXIST) => { StatusCode::SENDING_ACCOUNT_DOES_NOT_EXIST } // Can't pay for transaction gas deposit/fee (INVALID_ARGUMENT, ECANT_PAY_GAS_DEPOSIT) => { StatusCode::INSUFFICIENT_BALANCE_FOR_TRANSACTION_FEE } (INVALID_ARGUMENT, ETRANSACTION_EXPIRED) => StatusCode::TRANSACTION_EXPIRED, (INVALID_ARGUMENT, EBAD_CHAIN_ID) => StatusCode::BAD_CHAIN_ID, (INVALID_STATE, ESCRIPT_NOT_ALLOWED) => StatusCode::UNKNOWN_SCRIPT, (INVALID_STATE, EMODULE_NOT_ALLOWED) => StatusCode::INVALID_MODULE_PUBLISHER, (INVALID_ARGUMENT, EINVALID_WRITESET_SENDER) => StatusCode::REJECTED_WRITE_SET, // Sequence number will overflow (LIMIT_EXCEEDED, ESEQUENCE_NUMBER_TOO_BIG) => StatusCode::SEQUENCE_NUMBER_TOO_BIG, // The gas currency is not registered as a TransactionFee currency (INVALID_ARGUMENT, EBAD_TRANSACTION_FEE_CURRENCY) => { StatusCode::BAD_TRANSACTION_FEE_CURRENCY } (INVALID_ARGUMENT, ESECONDARY_KEYS_ADDRESSES_COUNT_MISMATCH) => { StatusCode::SECONDARY_KEYS_ADDRESSES_COUNT_MISMATCH } (INVALID_ARGUMENT, ESEQ_NONCE_NONCE_INVALID) => StatusCode::SEQUENCE_NONCE_INVALID, (category, reason) => { log_context.alert(); error!( *log_context, "[aptos_vm] Unexpected prologue Move abort: {:?}::{:?} (Category: {:?} Reason: {:?})", location, code, category, reason, ); return Err(VMStatus::Error( StatusCode::UNEXPECTED_ERROR_FROM_KNOWN_MOVE_FUNCTION, )); } }; VMStatus::Error(new_major_status) } status @ VMStatus::ExecutionFailure { .. } | status @ VMStatus::Error(_) => { log_context.alert(); error!( *log_context, "[aptos_vm] Unexpected prologue error: {:?}", status ); VMStatus::Error(StatusCode::UNEXPECTED_ERROR_FROM_KNOWN_MOVE_FUNCTION) } }) } /// Checks for only Move aborts or successful execution. /// Any other errors are mapped to the invariant violation /// `UNEXPECTED_ERROR_FROM_KNOWN_MOVE_FUNCTION` pub fn convert_epilogue_error( chain_specific_info: &ChainSpecificAccountInfo, error: VMError, log_context: &AdapterLogSchema, ) -> Result<(), VMStatus> { let status = error.into_vm_status(); Err(match status { VMStatus::Executed => VMStatus::Executed, VMStatus::MoveAbort(location, code) if !chain_specific_info.is_account_module_abort(&location) => { let (category, reason) = error_split(code); log_context.alert(); error!( *log_context, "[aptos_vm] Unexpected success epilogue Move abort: {:?}::{:?} (Category: {:?} Reason: {:?})", location, code, category, reason, ); VMStatus::Error(StatusCode::UNEXPECTED_ERROR_FROM_KNOWN_MOVE_FUNCTION) } VMStatus::MoveAbort(location, code) => match error_split(code) { (LIMIT_EXCEEDED, ECANT_PAY_GAS_DEPOSIT) => VMStatus::MoveAbort(location, code), (category, reason) => { log_context.alert(); error!( *log_context, "[aptos_vm] Unexpected success epilogue Move abort: {:?}::{:?} (Category: {:?} Reason: {:?})", location, code, category, reason, ); VMStatus::Error(StatusCode::UNEXPECTED_ERROR_FROM_KNOWN_MOVE_FUNCTION) } }, status => { log_context.alert(); error!( *log_context, "[aptos_vm] Unexpected success epilogue error: {:?}", status, ); VMStatus::Error(StatusCode::UNEXPECTED_ERROR_FROM_KNOWN_MOVE_FUNCTION) } }) } /// Checks for only successful execution /// Any errors are mapped to the invariant violation /// `UNEXPECTED_ERROR_FROM_KNOWN_MOVE_FUNCTION` pub fn expect_only_successful_execution( error: VMError, function_name: &str, log_context: &AdapterLogSchema, ) -> Result<(), VMStatus> { let status = error.into_vm_status(); Err(match status { VMStatus::Executed => VMStatus::Executed, status => { log_context.alert(); error!( *log_context, "[aptos_vm] Unexpected error from known Move function, '{}'. Error: {:?}", function_name, status, ); VMStatus::Error(StatusCode::UNEXPECTED_ERROR_FROM_KNOWN_MOVE_FUNCTION) } }) }
45.580311
114
0.629078
569971015a6aa5d1f34ec5c7dc789b246c968eaf
5,489
// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/vhdirk/gir-files.git) // DO NOT EDIT use crate::FormatOptions; use crate::Header; use crate::ParserOptions; use crate::Stream; use glib::object::IsA; use glib::translate::*; use std::fmt; glib::wrapper! { #[doc(alias = "GMimeHeaderList")] pub struct HeaderList(Object<ffi::GMimeHeaderList, ffi::GMimeHeaderListClass>); match fn { type_ => || ffi::g_mime_header_list_get_type(), } } impl HeaderList { #[doc(alias = "g_mime_header_list_new")] pub fn new(options: Option<&ParserOptions>) -> HeaderList { assert_initialized_main_thread!(); unsafe { from_glib_full(ffi::g_mime_header_list_new(mut_override( options.to_glib_none().0, ))) } } } pub const NONE_HEADER_LIST: Option<&HeaderList> = None; pub trait HeaderListExt: 'static { #[doc(alias = "g_mime_header_list_append")] fn append(&self, name: &str, value: &str, charset: &str); #[doc(alias = "g_mime_header_list_clear")] fn clear(&self); #[doc(alias = "g_mime_header_list_contains")] fn contains(&self, name: &str) -> bool; #[doc(alias = "g_mime_header_list_get_count")] #[doc(alias = "get_count")] fn count(&self) -> i32; #[doc(alias = "g_mime_header_list_get_header")] #[doc(alias = "get_header")] fn header(&self, name: &str) -> Option<Header>; #[doc(alias = "g_mime_header_list_get_header_at")] #[doc(alias = "get_header_at")] fn header_at(&self, index: i32) -> Option<Header>; #[doc(alias = "g_mime_header_list_prepend")] fn prepend(&self, name: &str, value: &str, charset: &str); #[doc(alias = "g_mime_header_list_remove")] fn remove(&self, name: &str) -> bool; #[doc(alias = "g_mime_header_list_remove_at")] fn remove_at(&self, index: i32); #[doc(alias = "g_mime_header_list_set")] fn set(&self, name: &str, value: &str, charset: &str); #[doc(alias = "g_mime_header_list_to_string")] fn to_string(&self, options: Option<&FormatOptions>) -> Option<glib::GString>; #[doc(alias = "g_mime_header_list_write_to_stream")] fn write_to_stream(&self, options: Option<&FormatOptions>, stream: &impl IsA<Stream>) -> isize; } impl<O: IsA<HeaderList>> HeaderListExt for O { fn append(&self, name: &str, value: &str, charset: &str) { unsafe { ffi::g_mime_header_list_append( self.as_ref().to_glib_none().0, name.to_glib_none().0, value.to_glib_none().0, charset.to_glib_none().0, ); } } fn clear(&self) { unsafe { ffi::g_mime_header_list_clear(self.as_ref().to_glib_none().0); } } fn contains(&self, name: &str) -> bool { unsafe { from_glib(ffi::g_mime_header_list_contains( self.as_ref().to_glib_none().0, name.to_glib_none().0, )) } } fn count(&self) -> i32 { unsafe { ffi::g_mime_header_list_get_count(self.as_ref().to_glib_none().0) } } fn header(&self, name: &str) -> Option<Header> { unsafe { from_glib_none(ffi::g_mime_header_list_get_header( self.as_ref().to_glib_none().0, name.to_glib_none().0, )) } } fn header_at(&self, index: i32) -> Option<Header> { unsafe { from_glib_none(ffi::g_mime_header_list_get_header_at( self.as_ref().to_glib_none().0, index, )) } } fn prepend(&self, name: &str, value: &str, charset: &str) { unsafe { ffi::g_mime_header_list_prepend( self.as_ref().to_glib_none().0, name.to_glib_none().0, value.to_glib_none().0, charset.to_glib_none().0, ); } } fn remove(&self, name: &str) -> bool { unsafe { from_glib(ffi::g_mime_header_list_remove( self.as_ref().to_glib_none().0, name.to_glib_none().0, )) } } fn remove_at(&self, index: i32) { unsafe { ffi::g_mime_header_list_remove_at(self.as_ref().to_glib_none().0, index); } } fn set(&self, name: &str, value: &str, charset: &str) { unsafe { ffi::g_mime_header_list_set( self.as_ref().to_glib_none().0, name.to_glib_none().0, value.to_glib_none().0, charset.to_glib_none().0, ); } } fn to_string(&self, options: Option<&FormatOptions>) -> Option<glib::GString> { unsafe { from_glib_full(ffi::g_mime_header_list_to_string( self.as_ref().to_glib_none().0, mut_override(options.to_glib_none().0), )) } } fn write_to_stream(&self, options: Option<&FormatOptions>, stream: &impl IsA<Stream>) -> isize { unsafe { ffi::g_mime_header_list_write_to_stream( self.as_ref().to_glib_none().0, mut_override(options.to_glib_none().0), stream.as_ref().to_glib_none().0, ) } } } impl fmt::Display for HeaderList { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("HeaderList") } }
29.196809
100
0.564948
6283b72d4bafcab4ff49028906bb8f6136baa10b
3,987
use crate::internal::*; use tract_pulse_opl::ops::Delay; pub fn register(registry: &mut Registry) { registry.register_dumper(TypeId::of::<Delay>(), ser_delay) } fn ser_delay(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> { let op = node.op().downcast_ref::<Delay>().unwrap(); let wire = ast.mapping[&node.inputs[0]].clone(); Ok(Some(invocation( "tract_pulse_delay", &[wire], &[ ("axis", numeric(op.axis)), ("delay", numeric(op.delay)), ("overlap", numeric(op.overlap)), ], ))) } impl PulsedOp for Delay { fn pulsed_output_facts(&self, inputs: &[&PulsedFact]) -> TractResult<TVec<PulsedFact>> { let mut fact = inputs[0].clone(); fact.shape[self.axis] += self.overlap; fact.delay += self.delay + self.overlap; Ok(tvec!(fact)) } as_op!(); pulsed_op_to_typed_op!(); } #[cfg(test)] mod test { use super::*; fn test_pulse_delay_over(pulse: usize, delay: usize, overlap: usize) { let mut model = PulsedModel::default(); let fact1 = PulsedFact { datum_type: u8::datum_type(), shape: tvec![pulse.to_dim()], axis: 0, dim: stream_dim(), delay: 0, }; let source = model.add_source("source", fact1.clone()).unwrap(); model .wire_node("delay", Delay::new(fact1.axis, &(&fact1).into(), delay, overlap), &[source]) .unwrap(); model.auto_outputs().unwrap(); let plan = SimplePlan::new(model).unwrap(); let mut state = tract_core::plan::SimpleState::new(plan).unwrap(); for i in 0..5 { let input: Vec<u8> = (pulse * i..(pulse * (i + 1))).map(|a| a as u8).collect(); let expect: Vec<u8> = (pulse * i..(pulse * (i + 1) + overlap)) .map(|i| i.saturating_sub(delay + overlap) as u8) .collect(); let output = state.run(tvec!(tensor1(&input))).unwrap(); let skip = (delay + overlap).saturating_sub(i * pulse).min(pulse + overlap); assert_eq!(&output[0].as_slice::<u8>().unwrap()[skip..], &expect[skip..]); } } #[test] fn sub_pulse() { test_pulse_delay_over(4, 1, 0); } #[test] fn supra_pulse() { test_pulse_delay_over(4, 5, 0); } #[test] fn sub_pulse_context() { test_pulse_delay_over(4, 0, 2); } #[test] fn supra_pulse_context() { test_pulse_delay_over(4, 0, 6); } #[test] fn test_two_delays() { let pulse = 4usize; let mut model = PulsedModel::default(); let fact_0 = PulsedFact { datum_type: u8::datum_type(), shape: tvec![pulse.to_dim()], axis: 0, dim: stream_dim(), delay: 0, }; let source = model.add_source("source", fact_0.clone()).unwrap(); let delay_1 = model .wire_node("delay-1", Delay::new(fact_0.axis, &(&fact_0).into(), 2, 0), &[source]) .unwrap()[0]; let fact_1 = model.outlet_fact(delay_1).unwrap().clone(); let delay_2 = model .wire_node("delay-1", Delay::new(fact_1.axis, &(&fact_1).into(), 2, 0), &[delay_1]) .unwrap(); model.set_output_outlets(&delay_2).unwrap(); let plan = SimplePlan::new(model).unwrap(); let mut state = tract_core::plan::SimpleState::new(plan).unwrap(); for i in 0..5 { let input: Vec<u8> = (pulse * i..(pulse * (i + 1))).map(|a| a as u8).collect(); let expect: Vec<u8> = (pulse * i..(pulse * (i + 1))).map(|i| i.saturating_sub(4) as u8).collect(); let skip = 4usize.saturating_sub(i * pulse).min(pulse); let output = state.run(tvec!(tensor1(&input))).unwrap(); assert_eq!(&output[0].as_slice::<u8>().unwrap()[skip..], &expect[skip..]); } } }
32.680328
100
0.534236
e6df6b7fd2724f318ce1d1957ffb698138eb2ba6
4,239
//! This module provides two passes: //! //! - [`CleanAscribeUserType`], that replaces all [`AscribeUserType`] //! statements with [`Nop`]. //! - [`CleanFakeReadsAndBorrows`], that replaces all [`FakeRead`] statements //! and borrows that are read by [`ForMatchGuard`] fake reads with [`Nop`]. //! //! The `CleanFakeReadsAndBorrows` "pass" is actually implemented as two //! traversals (aka visits) of the input MIR. The first traversal, //! [`DeleteAndRecordFakeReads`], deletes the fake reads and finds the //! temporaries read by [`ForMatchGuard`] reads, and [`DeleteFakeBorrows`] //! deletes the initialization of those temporaries. //! //! [`CleanAscribeUserType`]: cleanup_post_borrowck::CleanAscribeUserType //! [`CleanFakeReadsAndBorrows`]: cleanup_post_borrowck::CleanFakeReadsAndBorrows //! [`DeleteAndRecordFakeReads`]: cleanup_post_borrowck::DeleteAndRecordFakeReads //! [`DeleteFakeBorrows`]: cleanup_post_borrowck::DeleteFakeBorrows //! [`AscribeUserType`]: rustc::mir::StatementKind::AscribeUserType //! [`Nop`]: rustc::mir::StatementKind::Nop //! [`FakeRead`]: rustc::mir::StatementKind::FakeRead //! [`ForMatchGuard`]: rustc::mir::FakeReadCause::ForMatchGuard use rustc_data_structures::fx::FxHashSet; use rustc::mir::{BasicBlock, FakeReadCause, Local, Location, Mir, Place}; use rustc::mir::{Statement, StatementKind}; use rustc::mir::visit::MutVisitor; use rustc::ty::TyCtxt; use transform::{MirPass, MirSource}; pub struct CleanAscribeUserType; pub struct DeleteAscribeUserType; impl MirPass for CleanAscribeUserType { fn run_pass<'a, 'tcx>(&self, _tcx: TyCtxt<'a, 'tcx, 'tcx>, _source: MirSource, mir: &mut Mir<'tcx>) { let mut delete = DeleteAscribeUserType; delete.visit_mir(mir); } } impl<'tcx> MutVisitor<'tcx> for DeleteAscribeUserType { fn visit_statement(&mut self, block: BasicBlock, statement: &mut Statement<'tcx>, location: Location) { if let StatementKind::AscribeUserType(..) = statement.kind { statement.make_nop(); } self.super_statement(block, statement, location); } } pub struct CleanFakeReadsAndBorrows; #[derive(Default)] pub struct DeleteAndRecordFakeReads { fake_borrow_temporaries: FxHashSet<Local>, } pub struct DeleteFakeBorrows { fake_borrow_temporaries: FxHashSet<Local>, } // Removes any FakeReads from the MIR impl MirPass for CleanFakeReadsAndBorrows { fn run_pass<'a, 'tcx>(&self, _tcx: TyCtxt<'a, 'tcx, 'tcx>, _source: MirSource, mir: &mut Mir<'tcx>) { let mut delete_reads = DeleteAndRecordFakeReads::default(); delete_reads.visit_mir(mir); let mut delete_borrows = DeleteFakeBorrows { fake_borrow_temporaries: delete_reads.fake_borrow_temporaries, }; delete_borrows.visit_mir(mir); } } impl<'tcx> MutVisitor<'tcx> for DeleteAndRecordFakeReads { fn visit_statement(&mut self, block: BasicBlock, statement: &mut Statement<'tcx>, location: Location) { if let StatementKind::FakeRead(cause, ref place) = statement.kind { if let FakeReadCause::ForMatchGuard = cause { match *place { Place::Local(local) => self.fake_borrow_temporaries.insert(local), _ => bug!("Fake match guard read of non-local: {:?}", place), }; } statement.make_nop(); } self.super_statement(block, statement, location); } } impl<'tcx> MutVisitor<'tcx> for DeleteFakeBorrows { fn visit_statement(&mut self, block: BasicBlock, statement: &mut Statement<'tcx>, location: Location) { if let StatementKind::Assign(Place::Local(local), _) = statement.kind { if self.fake_borrow_temporaries.contains(&local) { statement.make_nop(); } } self.super_statement(block, statement, location); } }
37.184211
86
0.625147
01b575937e6361db37443bf0de91b1e6000b9712
1,909
use crate::prelude::*; use nu_engine::WholeStreamCommand; use nu_errors::ShellError; use nu_protocol::{Signature, SyntaxShape, UntaggedValue}; use nu_source::Tagged; pub struct Command; impl WholeStreamCommand for Command { fn name(&self) -> &str { "drop" } fn signature(&self) -> Signature { Signature::build("drop").optional( "rows", SyntaxShape::Number, "starting from the back, the number of rows to remove", ) } fn usage(&self) -> &str { "Remove the last number of rows or columns." } fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> { drop(args) } fn examples(&self) -> Vec<Example> { vec![ Example { description: "Remove the last item of a list/table", example: "echo [1 2 3] | drop", result: Some(vec![ UntaggedValue::int(1).into(), UntaggedValue::int(2).into(), ]), }, Example { description: "Remove the last 2 items of a list/table", example: "echo [1 2 3] | drop 2", result: Some(vec![UntaggedValue::int(1).into()]), }, ] } } fn drop(args: CommandArgs) -> Result<OutputStream, ShellError> { let args = args.evaluate_once()?; let rows: Option<Tagged<u64>> = args.opt(0)?; let v: Vec<_> = args.input.into_vec(); let rows_to_drop = if let Some(quantity) = rows { *quantity as usize } else { 1 }; Ok(if rows_to_drop == 0 { v.into_iter().map(Ok).to_input_stream() } else { let k = if v.len() < rows_to_drop { 0 } else { v.len() - rows_to_drop }; let iter = v.into_iter().map(Ok).take(k); iter.to_input_stream() }) }
25.797297
74
0.51912
2f233b720d088b2db3b35ec53ab828bde4478018
1,513
// Copyright 2019-2020 PolkaX. Licensed under MIT or Apache-2.0. use std::collections::HashMap; use std::sync::Arc; use std::sync::RwLock; use block_format::Block; use cid::Codec; use crate::error::{FormatError, Result}; use crate::format::Node; lazy_static::lazy_static! { static ref BLOCK_DECODERS: RwLock<HashMap<Codec, Arc<DecodeBlockFunc>>> = RwLock::new(HashMap::new()); } type DecodeBlockFunc = dyn Fn(&dyn Block) -> Result<Box<dyn Node>> + Send + Sync; /// Register decoder for all blocks with the passed codec. /// /// This will silently replace any existing registered block decoders. pub fn register<F>(codec: Codec, decoder: F) where F: Fn(&dyn Block) -> Result<Box<dyn Node>> + Send + Sync + 'static, { let mut block_decoders = BLOCK_DECODERS .write() .expect("get instance write lock failed"); block_decoders.insert(codec, Arc::new(decoder)); } /// Decode block into node with the decode function corresponding to the codec of the block's CID. pub fn decode(block: &impl Block) -> Result<Box<dyn Node>> { let codec = block.cid().codec(); let decoder_func = { // just get lock and release, let decode function could be parallel let block_decoders = BLOCK_DECODERS .read() .expect("get instance read lock failed"); // get a copy of arc pointer block_decoders .get(&codec) .ok_or(FormatError::DecoderNotRegister(codec))? .clone() }; decoder_func(block) }
31.520833
106
0.66226
08e2a122e33f578c308a214fd22b2e851bede58a
14,154
// Copyright (c) 2018-2019, The rav1e contributors. All rights reserved // // This source code is subject to the terms of the BSD 2 Clause License and // the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License // was not distributed with this source code in the LICENSE file, you can // obtain it at www.aomedia.org/license/software. If the Alliance for Open // Media Patent License 1.0 was not distributed with this source code in the // PATENTS file, you can obtain it at www.aomedia.org/license/patent. #![deny(missing_docs)] use crate::api::color::*; use crate::api::config::*; use crate::api::internal::*; use crate::api::util::*; use bitstream_io::*; use crate::encoder::*; use crate::frame::*; use crate::util::Pixel; use std::fmt; use std::io; /// The encoder context. /// /// Contains the encoding state. pub struct Context<T: Pixel> { pub(crate) inner: ContextInner<T>, pub(crate) config: EncoderConfig, pub(crate) pool: crate::rayon::ThreadPool, pub(crate) is_flushing: bool, } impl<T: Pixel> Context<T> { /// Allocates and returns a new frame. /// /// # Examples /// /// ``` /// use rav1e::prelude::*; /// /// # fn main() -> Result<(), InvalidConfig> { /// let cfg = Config::default(); /// let ctx: Context<u8> = cfg.new_context()?; /// let frame = ctx.new_frame(); /// # Ok(()) /// # } /// ``` #[inline] pub fn new_frame(&self) -> Frame<T> { Frame::new( self.config.width, self.config.height, self.config.chroma_sampling, ) } /// Sends the frame for encoding. /// /// This method adds the frame into the frame queue and runs the first passes /// of the look-ahead computation. /// /// Passing `None` is equivalent to calling [`flush`]. /// /// # Errors /// /// If this method is called with a frame after the encoder has been flushed /// or the encoder internal limit is hit (`std::i32::MAX` frames) the /// [`EncoderStatus::EnoughData`] error is returned. /// /// # Examples /// /// ``` /// use rav1e::prelude::*; /// /// # fn main() -> Result<(), Box<dyn std::error::Error>> { /// let cfg = Config::default(); /// let mut ctx: Context<u8> = cfg.new_context().unwrap(); /// let f1 = ctx.new_frame(); /// let f2 = f1.clone(); /// let info = FrameParameters { /// frame_type_override: FrameTypeOverride::Key, /// opaque: None, /// }; /// /// // Send the plain frame data /// ctx.send_frame(f1)?; /// // Send the data and the per-frame parameters /// // In this case the frame is forced to be a keyframe. /// ctx.send_frame((f2, info))?; /// // Flush the encoder, it is equivalent to a call to `flush()` /// ctx.send_frame(None)?; /// # Ok(()) /// # } /// ``` /// /// [`flush`]: #method.flush /// [`EncoderStatus::EnoughData`]: enum.EncoderStatus.html#variant.EnoughData #[inline] pub fn send_frame<F>(&mut self, frame: F) -> Result<(), EncoderStatus> where F: IntoFrame<T>, { let (frame, params) = frame.into(); if frame.is_none() { if self.is_flushing { return Ok(()); } self.inner.limit = Some(self.inner.frame_count); self.is_flushing = true; } else if self.is_flushing { return Err(EncoderStatus::EnoughData); // The rate control can process at most std::i32::MAX frames } else if self.inner.frame_count == std::i32::MAX as u64 - 1 { self.inner.limit = Some(self.inner.frame_count); self.is_flushing = true; } let inner = &mut self.inner; let pool = &mut self.pool; pool.install(|| inner.send_frame(frame, params)) } /// Returns the first-pass data of a two-pass encode for the frame that was /// just encoded. /// /// This should be called BEFORE every call to [`receive_packet`] (including /// the very first one), even if no packet was produced by the last call to /// [`receive_packet`], if any (i.e., [`EncoderStatus::Encoded`] was /// returned). It needs to be called once more after /// [`EncoderStatus::LimitReached`] is returned, to retrieve the header that /// should be written to the front of the stats file (overwriting the /// placeholder header that was emitted at the start of encoding). /// /// It is still safe to call this function when [`receive_packet`] returns /// any other error. It will return `None` instead of returning a duplicate /// copy of the previous frame's data. /// /// [`receive_packet`]: #method.receive_packet /// [`EncoderStatus::Encoded`]: enum.EncoderStatus.html#variant.Encoded /// [`EncoderStatus::LimitReached`]: /// enum.EncoderStatus.html#variant.LimitReached #[inline] pub fn twopass_out(&mut self) -> Option<&[u8]> { let params = self .inner .rc_state .get_twopass_out_params(&self.inner, self.inner.output_frameno); self.inner.rc_state.twopass_out(params) } /// Returns the number of bytes of the stats file needed before the next /// frame of the second pass in a two-pass encode can be encoded. /// /// This is a lower bound (more might be required), but if `0` is returned, /// then encoding can proceed. This is just a hint to the application, and /// does not need to be called for encoding the second pass to work, so long /// as the application continues to provide more data to [`twopass_in`] in a /// loop until [`twopass_in`] returns `0`. /// /// [`twopass_in`]: #method.twopass_in #[inline] pub fn twopass_bytes_needed(&mut self) -> usize { self.inner.rc_state.twopass_in(None).unwrap_or(0) } /// Provides the stats data produced in the first pass of a two-pass encode /// to the second pass. /// /// On success this returns the number of bytes of the data which were /// consumed. When encoding the second pass of a two-pass encode, this should /// be called repeatedly in a loop before every call to [`receive_packet`] /// (including the very first one) until no bytes are consumed, or until /// [`twopass_bytes_needed`] returns `0`. /// /// [`receive_packet`]: #method.receive_packet /// [`twopass_bytes_needed`]: #method.twopass_bytes_needed #[inline] pub fn twopass_in(&mut self, buf: &[u8]) -> Result<usize, EncoderStatus> { self.inner.rc_state.twopass_in(Some(buf)).or(Err(EncoderStatus::Failure)) } /// Encodes the next frame and returns the encoded data. /// /// This method is where the main encoding work is done. /// /// # Examples /// /// Encoding a single frame: /// /// ``` /// use rav1e::prelude::*; /// /// # fn main() -> Result<(), Box<dyn std::error::Error>> { /// let cfg = Config::default(); /// let mut ctx: Context<u8> = cfg.new_context()?; /// let frame = ctx.new_frame(); /// /// ctx.send_frame(frame)?; /// ctx.flush(); /// /// loop { /// match ctx.receive_packet() { /// Ok(packet) => { /* Mux the packet. */ }, /// Err(EncoderStatus::Encoded) => (), /// Err(EncoderStatus::LimitReached) => break, /// Err(err) => Err(err)?, /// } /// } /// # Ok(()) /// # } /// ``` /// /// Encoding a sequence of frames: /// /// ``` /// use std::sync::Arc; /// use rav1e::prelude::*; /// /// fn encode_frames( /// ctx: &mut Context<u8>, /// mut frames: impl Iterator<Item=Frame<u8>> /// ) -> Result<(), EncoderStatus> { /// // This is a slightly contrived example, intended to showcase the /// // various statuses that can be returned from receive_packet(). /// // Assume that, for example, there are a lot of frames in the /// // iterator, which are produced lazily, so you don't want to send /// // them all in at once as to not exhaust the memory. /// loop { /// match ctx.receive_packet() { /// Ok(packet) => { /* Mux the packet. */ }, /// Err(EncoderStatus::Encoded) => { /// // A frame was encoded without emitting a packet. This is /// // normal, just proceed as usual. /// }, /// Err(EncoderStatus::LimitReached) => { /// // All frames have been encoded. Time to break out of the /// // loop. /// break; /// }, /// Err(EncoderStatus::NeedMoreData) => { /// // The encoder has requested additional frames. Push the /// // next frame in, or flush the encoder if there are no /// // frames left (on None). /// ctx.send_frame(frames.next().map(Arc::new))?; /// }, /// Err(EncoderStatus::EnoughData) => { /// // Since we aren't trying to push frames after flushing, /// // this should never happen in this example. /// unreachable!(); /// }, /// Err(EncoderStatus::NotReady) => { /// // We're not doing two-pass encoding, so this can never /// // occur. /// unreachable!(); /// }, /// Err(EncoderStatus::Failure) => { /// return Err(EncoderStatus::Failure); /// }, /// } /// } /// /// Ok(()) /// } /// # fn main() -> Result<(), Box<dyn std::error::Error>> { /// # let mut enc = EncoderConfig::default(); /// # // So it runs faster. /// # enc.width = 16; /// # enc.height = 16; /// # let cfg = Config::new().with_encoder_config(enc); /// # let mut ctx: Context<u8> = cfg.new_context()?; /// # /// # let frames = vec![ctx.new_frame(); 4].into_iter(); /// # encode_frames(&mut ctx, frames); /// # /// # Ok(()) /// # } /// ``` #[inline] pub fn receive_packet(&mut self) -> Result<Packet<T>, EncoderStatus> { let inner = &mut self.inner; let pool = &mut self.pool; pool.install(|| inner.receive_packet()) } /// Flushes the encoder. /// /// Flushing signals the end of the video. After the encoder has been /// flushed, no additional frames are accepted. #[inline] pub fn flush(&mut self) { self.send_frame(None).unwrap(); } /// Produces a sequence header matching the current encoding context. /// /// Its format is compatible with the AV1 Matroska and ISOBMFF specification. /// Note that the returned header does not include any config OBUs which are /// required for some uses. See [the specification]. /// /// [the specification]: /// https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox-section #[inline] pub fn container_sequence_header(&self) -> Vec<u8> { fn sequence_header_inner(seq: &Sequence) -> io::Result<Vec<u8>> { let mut buf = Vec::new(); { let mut bw = BitWriter::endian(&mut buf, BigEndian); bw.write_bit(true)?; // marker bw.write(7, 1)?; // version bw.write(3, seq.profile)?; bw.write(5, 31)?; // level bw.write_bit(false)?; // tier bw.write_bit(seq.bit_depth > 8)?; // high_bitdepth bw.write_bit(seq.bit_depth == 12)?; // twelve_bit bw.write_bit(seq.bit_depth == 1)?; // monochrome bw.write_bit(seq.chroma_sampling != ChromaSampling::Cs444)?; // chroma_subsampling_x bw.write_bit(seq.chroma_sampling == ChromaSampling::Cs420)?; // chroma_subsampling_y bw.write(2, 0)?; // sample_position bw.write(3, 0)?; // reserved bw.write_bit(false)?; // initial_presentation_delay_present bw.write(4, 0)?; // reserved } Ok(buf) } let seq = Sequence::new(&self.config); sequence_header_inner(&seq).unwrap() } } /// Rate Control Data pub enum RcData { /// A Rate Control Summary Packet /// /// It is emitted once, after the encoder is flushed. /// /// It contains a summary of the rate control information for the /// encoding process that just terminated. Summary(Box<[u8]>), /// A Rate Control Frame-specific Packet /// /// It is emitted every time a frame is processed. /// /// The information contained is required to encode its matching /// frame in a second pass encoding. Frame(Box<[u8]>), } impl<T: Pixel> Context<T> { /// Return the Rate Control Summary Packet size /// /// It is useful mainly to preserve space when saving /// both Rate Control Summary and Frame Packets in a single file. pub fn rc_summary_size(&self) -> usize { crate::rate::TWOPASS_HEADER_SZ } /// Return the first pass data /// /// Call it after receive_packet. /// /// It will return a `RcData::Summary` once the encoder is flushed. pub fn rc_receive_pass_data(&mut self) -> RcData { if self.inner.done_processing() && self.inner.rc_state.pass1_data_retrieved { let data = self.inner.rc_state.emit_summary(); RcData::Summary(data.to_vec().into_boxed_slice()) } else if let Some(data) = self.inner.rc_state.emit_frame_data() { RcData::Frame(data.to_vec().into_boxed_slice()) } else { unreachable!( "The encoder received more frames than its internal limit allows" ) } } /// Number of pass data packets required to progress the encoding process. /// pub fn rc_second_pass_data_required(&self) -> usize { if self.inner.done_processing() { 0 } else { self.inner.rc_state.twopass_in_frames_needed() as usize } } /// Feed the first pass Rate Control data to the encoder, /// Frame-specific Packets only. /// /// Call it before receive_packet() /// /// It may return `EncoderStatus::Failure` if the data provided is incorrect pub fn rc_send_pass_data( &mut self, data: &[u8], ) -> Result<(), EncoderStatus> { self .inner .rc_state .parse_frame_data_packet(data) .map_err(|_| EncoderStatus::Failure) } } impl<T: Pixel> fmt::Debug for Context<T> { fn fmt( &self, f: &mut fmt::Formatter<'_>, ) -> std::result::Result<(), fmt::Error> { write!( f, "{{ \ config: {:?}, \ is_flushing: {}, \ }}", self.config, self.is_flushing, ) } }
33.147541
92
0.597075
7542e58facf1fdb3b3a00f564427ce53efdb2b80
907
// primitive_types2.rs // Fill in the rest of the line that has code missing! // No hints, there's no tricks, just get used to typing these :) fn main() { // Characters (`char`) let my_first_initial = 'C'; if my_first_initial.is_alphabetic() { println!("Alphabetical!"); } else if my_first_initial.is_numeric() { println!("Numerical!"); } else { println!("Neither alphabetic nor numeric!"); } let your_character = 'f';// Finish this line like the example! What's your favorite character? // Try a letter, try a number, try a special character, try a character // from a different language than your own, try an emoji! if your_character.is_alphabetic() { println!("Alphabetical!"); } else if your_character.is_numeric() { println!("Numerical!"); } else { println!("Neither alphabetic nor numeric!"); } }
31.275862
98
0.638368
87ffa81f5d7391cbe0516bba6256ce100fa1d885
5,620
// SPDX-License-Identifier: MIT OR Apache-2.0 // // Copyright (c) 2018-2021 Andre Richter <[email protected]> //! Memory Management. pub mod mmu; use crate::common; use core::{ convert::TryFrom, fmt, marker::PhantomData, ops::{AddAssign, RangeInclusive, SubAssign}, }; //-------------------------------------------------------------------------------------------------- // Public Definitions //-------------------------------------------------------------------------------------------------- /// Metadata trait for marking the type of an address. pub trait AddressType: Copy + Clone + PartialOrd + PartialEq {} /// Zero-sized type to mark a physical address. #[derive(Copy, Clone, PartialOrd, PartialEq)] pub enum Physical {} /// Zero-sized type to mark a virtual address. #[derive(Copy, Clone, PartialOrd, PartialEq)] pub enum Virtual {} /// Generic address type. #[derive(Copy, Clone, PartialOrd, PartialEq)] pub struct Address<ATYPE: AddressType> { value: usize, _address_type: PhantomData<fn() -> ATYPE>, } //-------------------------------------------------------------------------------------------------- // Public Code //-------------------------------------------------------------------------------------------------- impl AddressType for Physical {} impl AddressType for Virtual {} impl<ATYPE: AddressType> Address<ATYPE> { /// Create an instance. pub const fn new(value: usize) -> Self { Self { value, _address_type: PhantomData, } } /// Align down. pub const fn align_down(self, alignment: usize) -> Self { let aligned = common::align_down(self.value, alignment); Self { value: aligned, _address_type: PhantomData, } } /// Converts `Address` into an usize. pub const fn into_usize(self) -> usize { self.value } } impl TryFrom<Address<Virtual>> for Address<Physical> { type Error = mmu::TranslationError; fn try_from(virt: Address<Virtual>) -> Result<Self, Self::Error> { mmu::try_virt_to_phys(virt) } } impl<ATYPE: AddressType> core::ops::Add<usize> for Address<ATYPE> { type Output = Self; fn add(self, other: usize) -> Self { Self { value: self.value + other, _address_type: PhantomData, } } } impl<ATYPE: AddressType> AddAssign for Address<ATYPE> { fn add_assign(&mut self, other: Self) { *self = Self { value: self.value + other.into_usize(), _address_type: PhantomData, }; } } impl<ATYPE: AddressType> core::ops::Sub<usize> for Address<ATYPE> { type Output = Self; fn sub(self, other: usize) -> Self { Self { value: self.value - other, _address_type: PhantomData, } } } impl<ATYPE: AddressType> SubAssign for Address<ATYPE> { fn sub_assign(&mut self, other: Self) { *self = Self { value: self.value - other.into_usize(), _address_type: PhantomData, }; } } impl fmt::Display for Address<Physical> { // Don't expect to see physical addresses greater than 40 bit. fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let q3: u8 = ((self.value >> 32) & 0xff) as u8; let q2: u16 = ((self.value >> 16) & 0xffff) as u16; let q1: u16 = (self.value & 0xffff) as u16; write!(f, "0x")?; write!(f, "{:02x}_", q3)?; write!(f, "{:04x}_", q2)?; write!(f, "{:04x}", q1) } } impl fmt::Display for Address<Virtual> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let q4: u16 = ((self.value >> 48) & 0xffff) as u16; let q3: u16 = ((self.value >> 32) & 0xffff) as u16; let q2: u16 = ((self.value >> 16) & 0xffff) as u16; let q1: u16 = (self.value & 0xffff) as u16; write!(f, "0x")?; write!(f, "{:04x}_", q4)?; write!(f, "{:04x}_", q3)?; write!(f, "{:04x}_", q2)?; write!(f, "{:04x}", q1) } } /// Zero out an inclusive memory range. /// /// # Safety /// /// - `range.start` and `range.end` must be valid. /// - `range.start` and `range.end` must be `T` aligned. pub unsafe fn zero_volatile<T>(range: RangeInclusive<*mut T>) where T: From<u8>, { let mut ptr = *range.start(); let end_inclusive = *range.end(); while ptr <= end_inclusive { core::ptr::write_volatile(ptr, T::from(0)); ptr = ptr.offset(1); } } //-------------------------------------------------------------------------------------------------- // Testing //-------------------------------------------------------------------------------------------------- #[cfg(test)] mod tests { use super::*; use test_macros::kernel_test; /// Check `zero_volatile()`. #[kernel_test] fn zero_volatile_works() { let mut x: [usize; 3] = [10, 11, 12]; let x_range = x.as_mut_ptr_range(); let x_range_inclusive = RangeInclusive::new(x_range.start, unsafe { x_range.end.offset(-1) }); unsafe { zero_volatile(x_range_inclusive) }; assert_eq!(x, [0, 0, 0]); } /// Check `bss` section layout. #[kernel_test] fn bss_section_is_sane() { use crate::bsp::memory::bss_range_inclusive; use core::mem; let start = *bss_range_inclusive().start() as usize; let end = *bss_range_inclusive().end() as usize; assert_eq!(start % mem::size_of::<usize>(), 0); assert_eq!(end % mem::size_of::<usize>(), 0); assert!(end >= start); } }
27.684729
100
0.516726
38885efa77c826abadacb4cf2ac5d9419e25df6e
9,888
pub mod cons_list; use std::cell::RefCell; use std::cmp::Ordering; use std::collections::HashMap; use std::rc::Rc; use std::str::FromStr; use crate::vm::VM; use cons_list::ConsList; pub type Program = ConsList<Node>; pub type BuiltinFn = fn(&mut VM, ConsList<Node>) -> Result<Node, String>; impl ::std::fmt::Debug for Program { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { let mut s = String::new(); self.iter() .for_each(|item| s.push_str(&format!("{} ", item))); write!(f, "({})", s.trim_end()) } } // pub type NodeRef = Rc<RefCell<Node>>; thread_local! { pub static TRUE_KW: Node = Node::new_keyword("t"); pub static FALSE_KW: Node = Node::new_keyword("f"); } type HashMapRef = Rc<RefCell<HashMap<String, Node>>>; #[derive(Clone)] pub enum Node { Symbol(SymbolRef), Keyword(String), Number(i64), Float(f64), String(String), List(ConsList<Node>), Function(Callable), Map(HashMapRef), Empty, } impl Node { pub fn empty_list() -> Node { Node::List(ConsList::new()) } pub fn from_vec(v: Vec<Node>) -> Node { let mut c = ConsList::new(); for i in v.iter().rev().cloned() { c = c.append(i); } Node::List(c) } pub fn from_vec_ref(v: Vec<&Node>) -> Node { let mut c = ConsList::new(); for i in v.iter().rev() { c = c.append((**i).clone()); } Node::List(c) } pub fn new_keyword(name: &str) -> Self { Node::Keyword(str_to_symbol_name(name)) } pub fn from_hashmap(m: HashMap<String, Node>) -> Self { Node::Map(Rc::new(RefCell::new(m))) } pub fn from_string(s: String) -> Self { Node::String(s) } pub fn bool_obj(b: bool) -> Node { if b { TRUE_KW.with(|t| t.clone()) } else { FALSE_KW.with(|f| f.clone()) } } pub fn type_str(&self) -> &str { match self { Node::Symbol(_) => "Symbol", Node::Keyword(_) => "Keyword", Node::Number(_) => "Number", Node::Float(_) => "Float", Node::String(_) => "String", Node::List(_) => "List", Node::Function(_) => "Function", Node::Map(_) => "Map", Node::Empty => "Empty", } } pub fn is_truthy(&self) -> bool { match self { Node::Symbol(_) | Node::Function(_) | Node::Map(_) => true, Node::Keyword(_) => TRUE_KW.with(|t| self == t), Node::Number(n) => *n != 0, Node::Float(n) => *n != 0.0, Node::String(s) => !s.is_empty(), Node::List(l) => !l.is_empty(), Node::Empty => false, } } } impl PartialEq for Node { // Can't derive this since Symbol contains a SymbolRef and you can't // implement external traits on an external type (Rc<T>) fn eq(&self, other: &Node) -> bool { match (self, other) { (Node::Symbol(v1), Node::Symbol(v2)) => v1.borrow().name == v2.borrow().name, (Node::Keyword(v1), Node::Keyword(v2)) => v1 == v2, (Node::Number(v1), Node::Number(v2)) => v1 == v2, (Node::Float(v1), Node::Float(v2)) => v1 == v2, (Node::String(v1), Node::String(v2)) => v1 == v2, (Node::Function(v1), Node::Function(v2)) => v1 == v2, (Node::List(v1), Node::List(v2)) => v1 == v2, (Node::Map(v1), Node::Map(v2)) => v1 == v2, (Node::Empty, Node::Empty) => true, _ => false, } } } impl PartialOrd for Node { fn partial_cmp(&self, other: &Node) -> Option<Ordering> { match (self, other) { (Node::Symbol(v1), Node::Symbol(v2)) => Some(v1.borrow().name.cmp(&(v2.borrow().name))), (Node::Keyword(v1), Node::Keyword(v2)) => Some(v1.cmp(&v2)), (Node::Number(v1), Node::Number(v2)) => Some(v1.cmp(&v2)), (Node::Float(v1), Node::Float(v2)) => v1.partial_cmp(v2), (Node::String(v1), Node::String(v2)) => Some(v1.cmp(&v2)), (Node::List(v1), Node::List(v2)) => Some(v1.len().cmp(&(v2.len()))), (Node::Empty, Node::Empty) => Some(Ordering::Equal), _ => None, } } } impl Default for Node { fn default() -> Self { FALSE_KW.with(|f| f.clone()) } } impl FromStr for Node { type Err = (); fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(Node::String(s.to_owned())) } } impl ::std::fmt::Display for Node { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { match self { Node::Empty => write!(f, "Empty"), Node::Symbol(v) => write!(f, "{}", v.borrow()), Node::Keyword(v) => write!(f, ":{}", v), Node::Number(v) => write!(f, "{}", v), Node::Float(v) => write!(f, "{}", v), Node::String(v) => write!(f, "{}", v), Node::List(v) => { let mut s = String::new(); v.iter().for_each(|item| s.push_str(&format!("{} ", item))); write!(f, "({})", s.trim_end()) } Node::Function(_) => write!(f, "#<callable>"), Node::Map(v) => { write!(f, "{{")?; for (k, v) in v.borrow().iter() { write!(f, "{} => {:?}, ", k, v)?; } write!(f, "}}") } } } } impl ::std::fmt::Debug for Node { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { match self { Node::String(v) => write!(f, "\"{}\"", v), Node::List(v) => { let mut s = String::new(); v.iter() .for_each(|item| s.push_str(&format!("{:?} ", item))); write!(f, "({})", s.trim_end()) } Node::Map(v) => { write!(f, "{{")?; for (k, v) in v.borrow().iter() { write!(f, "{:?} => {:?}, ", k, v)?; } write!(f, "}}") } _ => write!(f, "{}", self), } } } type SymbolProps = HashMap<String, Node>; pub type SymbolRef = Rc<RefCell<Symbol>>; pub fn symbolref_to_node(sym: SymbolRef) -> Node { Node::Symbol(sym) } pub fn str_to_symbol_name(s: &str) -> String { s.to_owned() //s.to_uppercase().to_owned() } #[derive(Clone)] pub struct Symbol { name: String, pub value: Option<Node>, // Used when this symbol is evaulated outside a callable context pub function: Option<Callable>, // Used when this symbol is evaluated as a callable object properties: Option<RefCell<SymbolProps>>, // Only created when needed } impl Symbol { pub fn new(name: &str) -> Self { Symbol { name: str_to_symbol_name(&name), value: None, function: None, properties: None, } } pub fn with_builtin(name: &str, func: BuiltinFn) -> Self { Symbol { name: str_to_symbol_name(&name), value: None, function: Some(Callable::Builtin(func)), properties: None, } } pub fn with_value(name: &str, val: Node) -> Self { Symbol { name: str_to_symbol_name(name), value: Some(val), function: None, properties: None, } } pub fn into_ref(self) -> SymbolRef { Rc::new(RefCell::new(self)) } pub fn into_node(self) -> Node { Node::Symbol(self.into_ref()) } pub fn name(&self) -> &str { &self.name } pub fn value(&self) -> Node { if let Some(val) = &self.value { val.clone() } else { Node::String(self.name.clone()) } } pub fn set_property(&mut self, key: &str, value: Node) -> Option<Node> { if self.properties.is_none() { self.properties = Some(RefCell::new(HashMap::with_capacity(2))); } match &self.properties { Some(m) => m.borrow_mut().insert(key.to_owned(), value), None => None, } } pub fn get_property(&self, key: &str) -> Option<Node> { match &self.properties { Some(m) => m.borrow().get(key).cloned(), None => None, } } pub fn has_property(&self, key: &str) -> bool { match &self.properties { Some(m) => m.borrow().contains_key(key), None => false, } } } impl ::std::fmt::Display for Symbol { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "{}", self.value()) } } impl ::std::fmt::Debug for Symbol { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "{:?}", self.value()) } } #[derive(Clone, PartialEq)] pub struct Function { pub params: Vec<String>, pub body: Box<Node>, } #[derive(Clone)] pub enum Callable { Builtin(BuiltinFn), Func(Function), Macro(Function), } impl Callable { pub fn into_macro(self) -> Self { match self { Callable::Macro(_) => self, Callable::Func(f) => Callable::Macro(f), Callable::Builtin(_) => panic!("Cannot make builtin func into a macro"), } } } impl PartialEq for Callable { fn eq(&self, other: &Callable) -> bool { match (self, other) { (Callable::Builtin(v1), Callable::Builtin(v2)) => { v1 as *const BuiltinFn as usize == v2 as *const BuiltinFn as usize } (Callable::Macro(v1), Callable::Macro(v2)) => v1 == v2, (Callable::Func(v1), Callable::Func(v2)) => v1 == v2, _ => false, } } }
28.090909
100
0.485437
76603e8db3ec0765e9ce3aeb349dadbdeb64c630
5,028
use crate::{ storage::{Storage, StorageEntity}, user::NormalUser, }; use anyhow::Result; use chrono::prelude::*; use sqlx::prelude::*; use tangram_id::Id; pub struct Repo { pub id: String, pub title: String, pub owner_name: Option<String>, } pub async fn get_repo(db: &mut sqlx::Transaction<'_, sqlx::Any>, id: Id) -> Result<Repo> { let row = sqlx::query( " select repos.id, repos.title from repos where repos.id = $1 ", ) .bind(&id.to_string()) .fetch_one(&mut *db) .await?; let id: String = row.get(0); let id: Id = id.parse().unwrap(); let title = row.get(1); let repo = Repo { id: id.to_string(), owner_name: None, title, }; Ok(repo) } pub async fn delete_repo( db: &mut sqlx::Transaction<'_, sqlx::Any>, storage: &Storage, repo_id: Id, ) -> Result<()> { sqlx::query( " delete from repos where id = $1 ", ) .bind(&repo_id.to_string()) .execute(&mut *db) .await?; let model_version_ids = get_model_version_ids(db, repo_id).await?; for model_id in model_version_ids.into_iter() { storage.remove(StorageEntity::Model, model_id).await?; } Ok(()) } pub async fn repos_for_root(db: &mut sqlx::Transaction<'_, sqlx::Any>) -> Result<Vec<Repo>> { let rows = sqlx::query( " select repos.id, repos.title from repos ", ) .fetch_all(&mut *db) .await?; let repos = rows .iter() .map(|row| { let id: String = row.get(0); let id: Id = id.parse().unwrap(); let title = row.get(1); Repo { id: id.to_string(), owner_name: None, title, } }) .collect(); Ok(repos) } pub async fn repos_for_user( db: &mut sqlx::Transaction<'_, sqlx::Any>, user: &NormalUser, ) -> Result<Vec<Repo>> { let mut repos = Vec::new(); let rows = sqlx::query( " select repos.id, repos.title from repos where repos.user_id = $1 ", ) .bind(&user.id.to_string()) .fetch_all(&mut *db) .await?; for row in rows { let id = row.get(0); let title = row.get(1); let owner_name = user.email.clone(); repos.push(Repo { id, title, owner_name: Some(owner_name), }); } let rows = sqlx::query( " select repos.id, repos.title, organizations.name from repos inner join organizations on organizations.id = repos.organization_id inner join organizations_users on organizations_users.organization_id = repos.organization_id and organizations_users.user_id = $1 ", ) .bind(&user.id.to_string()) .fetch_all(&mut *db) .await?; for row in rows { let id = row.get(0); let title = row.get(1); let owner_name = row.get(2); repos.push(Repo { id, title, owner_name, }); } Ok(repos) } pub async fn create_root_repo( db: &mut sqlx::Transaction<'_, sqlx::Any>, repo_id: Id, title: &str, ) -> Result<()> { sqlx::query( " insert into repos ( id, created_at, title ) values ( $1, $2, $3 ) ", ) .bind(&repo_id.to_string()) .bind(&Utc::now().timestamp()) .bind(&title) .execute(&mut *db) .await?; Ok(()) } pub async fn create_user_repo( db: &mut sqlx::Transaction<'_, sqlx::Any>, user_id: Id, repo_id: Id, title: &str, ) -> Result<()> { sqlx::query( " insert into repos ( id, created_at, title, user_id ) values ( $1, $2, $3, $4 ) ", ) .bind(&repo_id.to_string()) .bind(&Utc::now().timestamp()) .bind(&title) .bind(&user_id.to_string()) .execute(&mut *db) .await?; Ok(()) } pub async fn create_org_repo( db: &mut sqlx::Transaction<'_, sqlx::Any>, org_id: Id, repo_id: Id, title: &str, ) -> Result<()> { sqlx::query( " insert into repos ( id, created_at, title, organization_id ) values ( $1, $2, $3, $4 ) ", ) .bind(&repo_id.to_string()) .bind(&Utc::now().timestamp()) .bind(&title) .bind(&org_id.to_string()) .execute(&mut *db) .await?; Ok(()) } pub async fn add_model_version( db: &mut sqlx::Transaction<'_, sqlx::Any>, data_storage: &Storage, repo_id: Id, model_id: Id, bytes: &[u8], ) -> Result<()> { sqlx::query( " insert into models ( id, created_at, repo_id ) values ( $1, $2, $3 ) ", ) .bind(&model_id.to_string()) .bind(&Utc::now().timestamp()) .bind(&repo_id.to_string()) .execute(&mut *db) .await?; data_storage .set(StorageEntity::Model, model_id, bytes) .await?; Ok(()) } pub async fn delete_model_version( db: &mut sqlx::Transaction<'_, sqlx::Any>, data_storage: &Storage, model_id: Id, ) -> Result<()> { sqlx::query( " delete from models where id = $1 ", ) .bind(&model_id.to_string()) .execute(&mut *db) .await?; data_storage.remove(StorageEntity::Model, model_id).await?; Ok(()) } pub async fn get_model_version_ids( db: &mut sqlx::Transaction<'_, sqlx::Any>, repo_id: Id, ) -> Result<Vec<Id>> { Ok(sqlx::query( " select models.id from models join repos on models.repo_id = repos.id where repos.id = $1 ", ) .bind(&repo_id.to_string()) .fetch_all(&mut *db) .await? .iter() .map(|row| row.get::<String, _>(0).parse().unwrap()) .collect()) }
17.829787
93
0.608194
14b1f5f80c1b52eaae141a4d12e6df548278f1c7
3,692
//! Loading and playing sounds. use crate::file::get_file_path; use crate::{file::load_file, get_context}; use std::collections::HashMap; #[cfg(all(feature = "audio"))] use quad_snd::{AudioContext as QuadSndContext, Sound as QuadSndSound}; #[cfg(all(feature = "audio"))] pub use quad_snd::PlaySoundParams; #[cfg(not(feature = "audio"))] mod dummy_audio { use crate::audio::PlaySoundParams; pub struct AudioContext {} impl AudioContext { pub fn new() -> AudioContext { AudioContext {} } pub fn pause(&mut self) {} pub fn resume(&mut self) {} } pub struct Sound {} impl Sound { pub fn load(_ctx: &mut AudioContext, _data: &[u8]) -> Sound { Sound {} } pub fn is_loaded(&self) -> bool { true } pub fn play(&mut self, _ctx: &mut AudioContext, _params: PlaySoundParams) {} pub fn stop(&mut self, _ctx: &mut AudioContext) {} pub fn set_volume(&mut self, _ctx: &mut AudioContext, _volume: f32) {} } } #[cfg(not(feature = "audio"))] use dummy_audio::{AudioContext as QuadSndContext, Sound as QuadSndSound}; #[cfg(not(feature = "audio"))] pub struct PlaySoundParams { pub looped: bool, pub volume: f32, } pub struct AudioContext { native_ctx: QuadSndContext, sounds: HashMap<usize, QuadSndSound>, id: usize, } impl AudioContext { pub fn new() -> AudioContext { AudioContext { native_ctx: QuadSndContext::new(), sounds: HashMap::new(), id: 0, } } #[cfg(target_os = "android")] pub fn pause(&mut self) { self.native_ctx.pause() } #[cfg(target_os = "android")] pub fn resume(&mut self) { self.native_ctx.resume() } } #[derive(Debug, Clone, Copy, PartialEq)] pub struct Sound(usize); /// Load audio file. /// /// Attempts to automatically detect the format of the source of data. pub async fn load_sound(path: &str) -> Result<Sound, crate::file::FileError> { let data = load_file(&get_file_path(path.to_string())).await?; load_sound_from_bytes(&data).await } /// Load audio data. /// /// Attempts to automatically detect the format of the source of data. pub async fn load_sound_from_bytes(data: &[u8]) -> Result<Sound, crate::file::FileError> { let sound = { let ctx = &mut get_context().audio_context; QuadSndSound::load(&mut ctx.native_ctx, data) }; // only on wasm the sound is not ready right away #[cfg(target_arch = "wasm32")] while sound.is_loaded() == false { crate::window::next_frame().await; } let ctx = &mut get_context().audio_context; let id = ctx.id; ctx.sounds.insert(id, sound); ctx.id += 1; Ok(Sound(id)) } pub fn play_sound_once(sound: Sound) { let ctx = &mut get_context().audio_context; let sound = &mut ctx.sounds.get_mut(&sound.0).unwrap(); sound.play( &mut ctx.native_ctx, PlaySoundParams { looped: false, volume: 1.0, }, ); } pub fn play_sound(sound: Sound, params: PlaySoundParams) { let ctx = &mut get_context().audio_context; let sound = &mut ctx.sounds.get_mut(&sound.0).unwrap(); sound.play(&mut ctx.native_ctx, params); } pub fn stop_sound(sound: Sound) { let ctx = &mut get_context().audio_context; let sound = &mut ctx.sounds.get_mut(&sound.0).unwrap(); sound.stop(&mut ctx.native_ctx); } pub fn set_sound_volume(sound: Sound, volume: f32) { let ctx = &mut get_context().audio_context; let sound = &mut ctx.sounds.get_mut(&sound.0).unwrap(); sound.set_volume(&mut ctx.native_ctx, volume) }
24.613333
90
0.62026
1a4bfaf371b06410ecd0ecbe774e6ed2afc53cbf
138
// Copyright 2021 Siemens AG // SPDX-License-Identifier: MIT #![allow(non_snake_case)] #![allow(unused_imports)] pub mod dtasm_generated;
23
31
0.76087
8f8e22ba1c34d7cd9a3bf93a94c956143ad7766b
1,685
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // aux-build:parent-source-spans.rs #![feature(decl_macro, proc_macro_hygiene)] extern crate parent_source_spans; use parent_source_spans::parent_source_spans; macro one($a:expr, $b:expr) { two!($a, $b); //~^ ERROR first parent: "hello" //~| ERROR second parent: "world" } macro two($a:expr, $b:expr) { three!($a, $b); //~^ ERROR first final: "hello" //~| ERROR second final: "world" //~| ERROR first final: "yay" //~| ERROR second final: "rust" } // forwarding tokens directly doesn't create a new source chain macro three($($tokens:tt)*) { four!($($tokens)*); } macro four($($tokens:tt)*) { parent_source_spans!($($tokens)*); } fn main() { one!("hello", "world"); //~^ ERROR first grandparent: "hello" //~| ERROR second grandparent: "world" //~| ERROR first source: "hello" //~| ERROR second source: "world" two!("yay", "rust"); //~^ ERROR first parent: "yay" //~| ERROR second parent: "rust" //~| ERROR first source: "yay" //~| ERROR second source: "rust" three!("hip", "hop"); //~^ ERROR first final: "hip" //~| ERROR second final: "hop" //~| ERROR first source: "hip" //~| ERROR second source: "hop" }
27.177419
68
0.631454
f94b6a7db41f87140d5a3d374f4ede0f3e35920d
12,246
use swc_atoms::JsWord; use swc_common::{ comments::{Comment, SingleThreadedComments}, Loc, SourceMap, Span, DUMMY_SP, }; use swc_ecma_ast as ast; use swc_ecma_visit::{self, Node, Visit, VisitWith}; pub fn analyze_dependencies( module: &ast::Module, source_map: &SourceMap, comments: &SingleThreadedComments, ) -> Vec<DependencyDescriptor> { let mut v = DependencyCollector { comments, source_map, items: vec![], is_top_level: true, }; module.visit_with(&ast::Invalid { span: DUMMY_SP }, &mut v); v.items } #[derive(Clone, Debug, Eq, PartialEq)] pub enum DependencyKind { Import, ImportType, Export, ExportType, Require, } #[derive(Clone, Debug, Eq, PartialEq)] pub struct DependencyDescriptor { pub kind: DependencyKind, /// A flag indicating if the import is dynamic or not. pub is_dynamic: bool, /// Any leading comments associated with the dependency. This is used for /// further processing of supported pragma that impact the dependency. pub leading_comments: Vec<Comment>, /// The location of the import/export statement. pub col: usize, pub line: usize, /// The text specifier associated with the import/export statement. pub specifier: JsWord, } struct DependencyCollector<'a> { comments: &'a SingleThreadedComments, pub items: Vec<DependencyDescriptor>, source_map: &'a SourceMap, // This field is used to determine if currently visited "require" // is top level and "static", or inside module body and "dynamic". is_top_level: bool, } impl<'a> DependencyCollector<'a> { fn get_location_and_comments(&self, span: Span) -> (Loc, Vec<Comment>) { let location = self.source_map.lookup_char_pos(span.lo); let leading_comments = self .comments .with_leading(span.lo, |comments| comments.to_vec()); (location, leading_comments) } } impl<'a> Visit for DependencyCollector<'a> { fn visit_import_decl(&mut self, node: &ast::ImportDecl, _parent: &dyn Node) { let specifier = node.src.value.clone(); let span = node.span; let (location, leading_comments) = self.get_location_and_comments(span); let kind = if node.type_only { DependencyKind::ImportType } else { DependencyKind::Import }; self.items.push(DependencyDescriptor { kind, is_dynamic: false, leading_comments, col: location.col_display, line: location.line, specifier, }); } fn visit_named_export(&mut self, node: &ast::NamedExport, _parent: &dyn Node) { if let Some(src) = &node.src { let specifier = src.value.clone(); let span = node.span; let (location, leading_comments) = self.get_location_and_comments(span); let kind = if node.type_only { DependencyKind::ExportType } else { DependencyKind::Export }; self.items.push(DependencyDescriptor { kind, is_dynamic: false, leading_comments, col: location.col_display, line: location.line, specifier, }); } } fn visit_export_all(&mut self, node: &ast::ExportAll, _parent: &dyn Node) { let specifier = node.src.value.clone(); let span = node.span; let (location, leading_comments) = self.get_location_and_comments(span); self.items.push(DependencyDescriptor { kind: DependencyKind::Export, is_dynamic: false, leading_comments, col: location.col_display, line: location.line, specifier, }); } fn visit_ts_import_type(&mut self, node: &ast::TsImportType, _parent: &dyn Node) { let specifier = node.arg.value.clone(); let span = node.span; let (location, leading_comments) = self.get_location_and_comments(span); self.items.push(DependencyDescriptor { kind: DependencyKind::ImportType, is_dynamic: false, leading_comments, col: location.col_display, line: location.line, specifier, }); } fn visit_module_items(&mut self, items: &[ast::ModuleItem], _parent: &dyn Node) { swc_ecma_visit::visit_module_items(self, items, _parent); } fn visit_stmts(&mut self, items: &[ast::Stmt], _parent: &dyn Node) { self.is_top_level = false; swc_ecma_visit::visit_stmts(self, items, _parent); self.is_top_level = true; } fn visit_call_expr(&mut self, node: &ast::CallExpr, _parent: &dyn Node) { use ast::{Expr::*, ExprOrSuper::*}; swc_ecma_visit::visit_call_expr(self, node, _parent); let call_expr = match node.callee.clone() { Super(_) => return, Expr(boxed) => boxed, }; let kind = match &*call_expr { Ident(ident) => match ident.sym.to_string().as_str() { "import" => DependencyKind::Import, "require" => DependencyKind::Require, _ => return, }, _ => return, }; // import() are always dynamic, even if at top level let is_dynamic = !self.is_top_level || kind == DependencyKind::Import; if let Some(arg) = node.args.get(0) { if let Lit(lit) = &*arg.expr { if let ast::Lit::Str(str_) = lit { let specifier = str_.value.clone(); let span = node.span; let (location, leading_comments) = self.get_location_and_comments(span); self.items.push(DependencyDescriptor { kind, is_dynamic, leading_comments, col: location.col_display, line: location.line, specifier, }); } } } } } #[cfg(test)] mod tests { use super::*; use swc_common::{ comments::{Comment, CommentKind}, sync::Lrc, BytePos, FileName, Span, SyntaxContext, }; use swc_ecma_parser::{lexer::Lexer, JscTarget, Parser, StringInput, Syntax, TsConfig}; fn helper( file_name: &str, source: &str, ) -> Result<(ast::Module, Lrc<SourceMap>, SingleThreadedComments), testing::StdErr> { let output = ::testing::run_test(true, |cm, handler| { let fm = cm.new_source_file(FileName::Custom(file_name.to_string()), source.to_string()); let comments = SingleThreadedComments::default(); let lexer: Lexer<StringInput<'_>> = Lexer::new( Syntax::Typescript(TsConfig { dts: file_name.ends_with(".d.ts"), tsx: file_name.contains("tsx"), dynamic_import: true, decorators: true, no_early_errors: true, ..Default::default() }), JscTarget::Es2015, (&*fm).into(), Some(&comments), ); let mut p = Parser::new_from(lexer); let res = p .parse_module() .map_err(|e| e.into_diagnostic(&handler).emit()); for err in p.take_errors() { err.into_diagnostic(&handler).emit(); } if handler.has_errors() { return Err(()); } Ok((res.unwrap(), cm, comments)) }); output } #[test] fn test_parsed_module_get_dependencies() { let source = r#"import * as bar from "./test.ts"; /** JSDoc */ import type { Foo } from "./foo.d.ts"; /// <reference foo="bar" /> export * as Buzz from "./buzz.ts"; // @some-pragma /** * Foo */ export type { Fizz } from "./fizz.d.ts"; const { join } = require("path"); // dynamic await import("./foo1.ts"); try { const foo = await import("./foo.ts"); } catch (e) { // pass } try { const foo = require("some_package"); } catch (e) { // pass } "#; let (module, source_map, comments) = helper("test.ts", &source).unwrap(); // eprintln!("module {:#?}", module); let dependencies = analyze_dependencies(&module, &source_map, &comments); assert_eq!(dependencies.len(), 8); assert_eq!( dependencies, vec![ DependencyDescriptor { kind: DependencyKind::Import, is_dynamic: false, leading_comments: Vec::new(), col: 0, line: 1, specifier: JsWord::from("./test.ts") }, DependencyDescriptor { kind: DependencyKind::ImportType, is_dynamic: false, leading_comments: vec![Comment { kind: CommentKind::Block, text: r#"* JSDoc "#.to_string(), span: Span::new(BytePos(34), BytePos(46), SyntaxContext::empty()), }], col: 0, line: 3, specifier: JsWord::from("./foo.d.ts") }, DependencyDescriptor { kind: DependencyKind::Export, is_dynamic: false, leading_comments: vec![Comment { kind: CommentKind::Line, text: r#"/ <reference foo="bar" />"#.to_string(), span: Span::new(BytePos(86), BytePos(113), SyntaxContext::empty()), }], col: 0, line: 5, specifier: JsWord::from("./buzz.ts") }, DependencyDescriptor { kind: DependencyKind::ExportType, is_dynamic: false, leading_comments: vec![ Comment { kind: CommentKind::Line, text: r#" @some-pragma"#.to_string(), span: Span::new(BytePos(149), BytePos(164), SyntaxContext::empty()), }, Comment { kind: CommentKind::Block, text: "*\n * Foo\n ".to_string(), span: Span::new(BytePos(165), BytePos(179), SyntaxContext::empty()), } ], col: 0, line: 10, specifier: JsWord::from("./fizz.d.ts") }, DependencyDescriptor { kind: DependencyKind::Require, is_dynamic: false, leading_comments: Vec::new(), col: 17, line: 11, specifier: JsWord::from("path") }, DependencyDescriptor { kind: DependencyKind::Import, is_dynamic: true, leading_comments: Vec::new(), col: 6, line: 14, specifier: JsWord::from("./foo1.ts") }, DependencyDescriptor { kind: DependencyKind::Import, is_dynamic: true, leading_comments: Vec::new(), col: 22, line: 17, specifier: JsWord::from("./foo.ts") }, DependencyDescriptor { kind: DependencyKind::Require, is_dynamic: true, leading_comments: Vec::new(), col: 16, line: 23, specifier: JsWord::from("some_package") } ] ); } }
33.735537
96
0.501878
895de9910e4ae754f1cc9a8578e842c7a0b883de
1,093
pub mod composite; pub mod model; use super::*; use crate::constants::output_fields::*; /// Initializes output object type caches on the context. /// This is a critical first step to ensure that all model and composite output /// object types are present and that subsequent schema computation has a base to rely on. /// Called only once at the very beginning of schema building. #[tracing::instrument(skip(ctx))] pub(crate) fn initialize_caches(ctx: &mut BuilderContext) { model::initialize_cache(ctx); composite::initialize_cache(ctx); model::initialize_fields(ctx); composite::initialize_fields(ctx); } pub(crate) fn affected_records_object_type(ctx: &mut BuilderContext) -> ObjectTypeWeakRef { let ident = Identifier::new("AffectedRowsOutput".to_owned(), PRISMA_NAMESPACE); return_cached_output!(ctx, &ident); let object_type = Arc::new(object_type( ident.clone(), vec![field(AFFECTED_COUNT, vec![], OutputType::int(), None)], None, )); ctx.cache_output_type(ident, object_type.clone()); Arc::downgrade(&object_type) }
33.121212
91
0.717292
7a45bd243958e0b604f5fd9c1666554a4a6413b2
4,301
//! Owned and borrowed Unix-like file descriptors. #![unstable(feature = "io_safety", issue = "87074")] #![deny(unsafe_op_in_unsafe_fn)] use crate::fs; use crate::sys_common::{AsInner, FromInner, IntoInner}; /// A borrowed file descriptor. /// /// This has a lifetime parameter to tie it to the lifetime of something that /// owns the file descriptor. /// /// This uses `repr(transparent)` and has the representation of a host file /// descriptor, so it can be used in FFI in places where a file descriptor is /// passed as an argument, it is not captured or consumed, and it never has the /// value `-1`. #[unstable(feature = "io_safety", issue = "87074")] pub use rustix::fd::BorrowedFd; /// An owned file descriptor. /// /// This closes the file descriptor on drop. /// /// This uses `repr(transparent)` and has the representation of a host file /// descriptor, so it can be used in FFI in places where a file descriptor is /// passed as a consumed argument or returned as an owned value, and it never /// has the value `-1`. #[unstable(feature = "io_safety", issue = "87074")] pub use rustix::io::OwnedFd; /// A trait to borrow the file descriptor from an underlying object. /// /// This is only available on unix platforms and must be imported in order to /// call the method. Windows platforms have a corresponding `AsHandle` and /// `AsSocket` set of traits. #[unstable(feature = "io_safety", issue = "87074")] pub use rustix::fd::AsFd; #[unstable(feature = "io_safety", issue = "87074")] impl AsFd for fs::File { #[inline] fn as_fd(&self) -> BorrowedFd<'_> { self.as_inner().as_fd() } } #[unstable(feature = "io_safety", issue = "87074")] impl From<fs::File> for OwnedFd { #[inline] fn from(file: fs::File) -> OwnedFd { file.into_inner().into_inner().into_inner() } } #[unstable(feature = "io_safety", issue = "87074")] impl From<OwnedFd> for fs::File { #[inline] fn from(owned_fd: OwnedFd) -> Self { Self::from_inner(FromInner::from_inner(FromInner::from_inner(owned_fd))) } } #[unstable(feature = "io_safety", issue = "87074")] impl AsFd for crate::net::TcpStream { #[inline] fn as_fd(&self) -> BorrowedFd<'_> { self.as_inner().socket().as_fd() } } #[unstable(feature = "io_safety", issue = "87074")] impl From<crate::net::TcpStream> for OwnedFd { #[inline] fn from(tcp_stream: crate::net::TcpStream) -> OwnedFd { tcp_stream.into_inner().into_socket().into_inner().into_inner().into() } } #[unstable(feature = "io_safety", issue = "87074")] impl From<OwnedFd> for crate::net::TcpStream { #[inline] fn from(owned_fd: OwnedFd) -> Self { Self::from_inner(FromInner::from_inner(FromInner::from_inner(FromInner::from_inner( owned_fd, )))) } } #[unstable(feature = "io_safety", issue = "87074")] impl AsFd for crate::net::TcpListener { #[inline] fn as_fd(&self) -> BorrowedFd<'_> { self.as_inner().socket().as_fd() } } #[unstable(feature = "io_safety", issue = "87074")] impl From<crate::net::TcpListener> for OwnedFd { #[inline] fn from(tcp_listener: crate::net::TcpListener) -> OwnedFd { tcp_listener.into_inner().into_socket().into_inner().into_inner().into() } } #[unstable(feature = "io_safety", issue = "87074")] impl From<OwnedFd> for crate::net::TcpListener { #[inline] fn from(owned_fd: OwnedFd) -> Self { Self::from_inner(FromInner::from_inner(FromInner::from_inner(FromInner::from_inner( owned_fd, )))) } } #[unstable(feature = "io_safety", issue = "87074")] impl AsFd for crate::net::UdpSocket { #[inline] fn as_fd(&self) -> BorrowedFd<'_> { self.as_inner().socket().as_fd() } } #[unstable(feature = "io_safety", issue = "87074")] impl From<crate::net::UdpSocket> for OwnedFd { #[inline] fn from(udp_socket: crate::net::UdpSocket) -> OwnedFd { udp_socket.into_inner().into_socket().into_inner().into_inner().into() } } #[unstable(feature = "io_safety", issue = "87074")] impl From<OwnedFd> for crate::net::UdpSocket { #[inline] fn from(owned_fd: OwnedFd) -> Self { Self::from_inner(FromInner::from_inner(FromInner::from_inner(FromInner::from_inner( owned_fd, )))) } }
30.503546
91
0.649849
71950c511c3e4fe49d790ffedec9549b72d3f136
2,320
use ansi_term::Style; use printer::{Colors, InteractivePrinter}; #[derive(Clone)] pub struct DecorationText { pub width: usize, pub text: String, } pub trait Decoration { fn generate( &self, line_number: usize, continuation: bool, printer: &InteractivePrinter, ) -> DecorationText; fn width(&self) -> usize; } pub struct LineNumberDecoration { color: Style, cached_wrap: DecorationText, cached_wrap_invalid_at: usize, } impl LineNumberDecoration { pub fn new(colors: &Colors) -> Self { LineNumberDecoration { color: colors.line_number, cached_wrap_invalid_at: 10000, cached_wrap: DecorationText { text: colors.line_number.paint(" ".repeat(4)).to_string(), width: 4, }, } } } impl Decoration for LineNumberDecoration { fn generate( &self, line_number: usize, continuation: bool, _printer: &InteractivePrinter, ) -> DecorationText { if continuation { if line_number > self.cached_wrap_invalid_at { let new_width = self.cached_wrap.width + 1; return DecorationText { text: self.color.paint(" ".repeat(new_width)).to_string(), width: new_width, }; } self.cached_wrap.clone() } else { let plain: String = format!("{:4}", line_number); DecorationText { width: plain.len(), text: self.color.paint(plain).to_string(), } } } fn width(&self) -> usize { 4 } } pub struct GridBorderDecoration { cached: DecorationText, } impl GridBorderDecoration { pub fn new(colors: &Colors) -> Self { GridBorderDecoration { cached: DecorationText { text: colors.grid.paint("│").to_string(), width: 1, }, } } } impl Decoration for GridBorderDecoration { fn generate( &self, _line_number: usize, _continuation: bool, _printer: &InteractivePrinter, ) -> DecorationText { self.cached.clone() } fn width(&self) -> usize { self.cached.width } }
23.434343
78
0.547414
762d046242b619541748dd0a4f97f956dddea931
9,395
// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files) // DO NOT EDIT use crate::Window; use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use glib::StaticType; use glib::ToValue; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; glib::wrapper! { pub struct MountOperation(Object<ffi::GtkMountOperation, ffi::GtkMountOperationClass>) @extends gio::MountOperation; match fn { type_ => || ffi::gtk_mount_operation_get_type(), } } impl MountOperation { #[doc(alias = "gtk_mount_operation_new")] pub fn new<P: IsA<Window>>(parent: Option<&P>) -> MountOperation { assert_initialized_main_thread!(); unsafe { gio::MountOperation::from_glib_full(ffi::gtk_mount_operation_new( parent.map(|p| p.as_ref()).to_glib_none().0, )) .unsafe_cast() } } } #[derive(Clone, Default)] pub struct MountOperationBuilder { parent: Option<Window>, screen: Option<gdk::Screen>, anonymous: Option<bool>, choice: Option<i32>, domain: Option<String>, is_tcrypt_hidden_volume: Option<bool>, is_tcrypt_system_volume: Option<bool>, password: Option<String>, //password-save: /*Unknown type*/, pim: Option<u32>, username: Option<String>, } impl MountOperationBuilder { pub fn new() -> Self { Self::default() } pub fn build(self) -> MountOperation { let mut properties: Vec<(&str, &dyn ToValue)> = vec![]; if let Some(ref parent) = self.parent { properties.push(("parent", parent)); } if let Some(ref screen) = self.screen { properties.push(("screen", screen)); } if let Some(ref anonymous) = self.anonymous { properties.push(("anonymous", anonymous)); } if let Some(ref choice) = self.choice { properties.push(("choice", choice)); } if let Some(ref domain) = self.domain { properties.push(("domain", domain)); } if let Some(ref is_tcrypt_hidden_volume) = self.is_tcrypt_hidden_volume { properties.push(("is-tcrypt-hidden-volume", is_tcrypt_hidden_volume)); } if let Some(ref is_tcrypt_system_volume) = self.is_tcrypt_system_volume { properties.push(("is-tcrypt-system-volume", is_tcrypt_system_volume)); } if let Some(ref password) = self.password { properties.push(("password", password)); } if let Some(ref pim) = self.pim { properties.push(("pim", pim)); } if let Some(ref username) = self.username { properties.push(("username", username)); } glib::Object::new::<MountOperation>(&properties) .expect("Failed to create an instance of MountOperation") } pub fn parent<P: IsA<Window>>(mut self, parent: &P) -> Self { self.parent = Some(parent.clone().upcast()); self } pub fn screen(mut self, screen: &gdk::Screen) -> Self { self.screen = Some(screen.clone()); self } pub fn anonymous(mut self, anonymous: bool) -> Self { self.anonymous = Some(anonymous); self } pub fn choice(mut self, choice: i32) -> Self { self.choice = Some(choice); self } pub fn domain(mut self, domain: &str) -> Self { self.domain = Some(domain.to_string()); self } pub fn is_tcrypt_hidden_volume(mut self, is_tcrypt_hidden_volume: bool) -> Self { self.is_tcrypt_hidden_volume = Some(is_tcrypt_hidden_volume); self } pub fn is_tcrypt_system_volume(mut self, is_tcrypt_system_volume: bool) -> Self { self.is_tcrypt_system_volume = Some(is_tcrypt_system_volume); self } pub fn password(mut self, password: &str) -> Self { self.password = Some(password.to_string()); self } pub fn pim(mut self, pim: u32) -> Self { self.pim = Some(pim); self } pub fn username(mut self, username: &str) -> Self { self.username = Some(username.to_string()); self } } pub const NONE_MOUNT_OPERATION: Option<&MountOperation> = None; pub trait MountOperationExt: 'static { #[doc(alias = "gtk_mount_operation_get_parent")] #[doc(alias = "get_parent")] fn parent(&self) -> Option<Window>; #[doc(alias = "gtk_mount_operation_get_screen")] #[doc(alias = "get_screen")] fn screen(&self) -> Option<gdk::Screen>; #[doc(alias = "gtk_mount_operation_is_showing")] fn is_showing(&self) -> bool; #[doc(alias = "gtk_mount_operation_set_parent")] fn set_parent<P: IsA<Window>>(&self, parent: Option<&P>); #[doc(alias = "gtk_mount_operation_set_screen")] fn set_screen(&self, screen: &gdk::Screen); #[doc(alias = "is-showing")] fn connect_is_showing_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "parent")] fn connect_parent_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "screen")] fn connect_screen_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<MountOperation>> MountOperationExt for O { fn parent(&self) -> Option<Window> { unsafe { from_glib_none(ffi::gtk_mount_operation_get_parent( self.as_ref().to_glib_none().0, )) } } fn screen(&self) -> Option<gdk::Screen> { unsafe { from_glib_none(ffi::gtk_mount_operation_get_screen( self.as_ref().to_glib_none().0, )) } } fn is_showing(&self) -> bool { unsafe { from_glib(ffi::gtk_mount_operation_is_showing( self.as_ref().to_glib_none().0, )) } } fn set_parent<P: IsA<Window>>(&self, parent: Option<&P>) { unsafe { ffi::gtk_mount_operation_set_parent( self.as_ref().to_glib_none().0, parent.map(|p| p.as_ref()).to_glib_none().0, ); } } fn set_screen(&self, screen: &gdk::Screen) { unsafe { ffi::gtk_mount_operation_set_screen( self.as_ref().to_glib_none().0, screen.to_glib_none().0, ); } } #[doc(alias = "is-showing")] fn connect_is_showing_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_is_showing_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::GtkMountOperation, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<MountOperation>, { let f: &F = &*(f as *const F); f(&MountOperation::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::is-showing\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_is_showing_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "parent")] fn connect_parent_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_parent_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::GtkMountOperation, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<MountOperation>, { let f: &F = &*(f as *const F); f(&MountOperation::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::parent\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_parent_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "screen")] fn connect_screen_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_screen_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::GtkMountOperation, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<MountOperation>, { let f: &F = &*(f as *const F); f(&MountOperation::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::screen\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_screen_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } } impl fmt::Display for MountOperation { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("MountOperation") } }
31.421405
120
0.558382
de7beb9b9ab0904d41f0ead9e8048167c28d6feb
248,812
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. /// <p>The configured access rules for the domain's document and search endpoints, and the current status of those rules.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct AccessPoliciesStatus { /// <p>Access rules for a domain's document or search service endpoints. For more information, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-access.html" target="_blank">Configuring Access for a Search Domain</a> in the <i>Amazon CloudSearch Developer Guide</i>. The maximum size of a policy document is 100 KB.</p> pub options: std::option::Option<std::string::String>, /// <p>The status of domain configuration option.</p> pub status: std::option::Option<crate::model::OptionStatus>, } impl AccessPoliciesStatus { /// <p>Access rules for a domain's document or search service endpoints. For more information, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-access.html" target="_blank">Configuring Access for a Search Domain</a> in the <i>Amazon CloudSearch Developer Guide</i>. The maximum size of a policy document is 100 KB.</p> pub fn options(&self) -> std::option::Option<&str> { self.options.as_deref() } /// <p>The status of domain configuration option.</p> pub fn status(&self) -> std::option::Option<&crate::model::OptionStatus> { self.status.as_ref() } } impl std::fmt::Debug for AccessPoliciesStatus { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("AccessPoliciesStatus"); formatter.field("options", &self.options); formatter.field("status", &self.status); formatter.finish() } } /// See [`AccessPoliciesStatus`](crate::model::AccessPoliciesStatus) pub mod access_policies_status { /// A builder for [`AccessPoliciesStatus`](crate::model::AccessPoliciesStatus) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) options: std::option::Option<std::string::String>, pub(crate) status: std::option::Option<crate::model::OptionStatus>, } impl Builder { /// <p>Access rules for a domain's document or search service endpoints. For more information, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-access.html" target="_blank">Configuring Access for a Search Domain</a> in the <i>Amazon CloudSearch Developer Guide</i>. The maximum size of a policy document is 100 KB.</p> pub fn options(mut self, input: impl Into<std::string::String>) -> Self { self.options = Some(input.into()); self } /// <p>Access rules for a domain's document or search service endpoints. For more information, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-access.html" target="_blank">Configuring Access for a Search Domain</a> in the <i>Amazon CloudSearch Developer Guide</i>. The maximum size of a policy document is 100 KB.</p> pub fn set_options(mut self, input: std::option::Option<std::string::String>) -> Self { self.options = input; self } /// <p>The status of domain configuration option.</p> pub fn status(mut self, input: crate::model::OptionStatus) -> Self { self.status = Some(input); self } /// <p>The status of domain configuration option.</p> pub fn set_status( mut self, input: std::option::Option<crate::model::OptionStatus>, ) -> Self { self.status = input; self } /// Consumes the builder and constructs a [`AccessPoliciesStatus`](crate::model::AccessPoliciesStatus) pub fn build(self) -> crate::model::AccessPoliciesStatus { crate::model::AccessPoliciesStatus { options: self.options, status: self.status, } } } } impl AccessPoliciesStatus { /// Creates a new builder-style object to manufacture [`AccessPoliciesStatus`](crate::model::AccessPoliciesStatus) pub fn builder() -> crate::model::access_policies_status::Builder { crate::model::access_policies_status::Builder::default() } } /// <p>The status of domain configuration option.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct OptionStatus { /// <p>A timestamp for when this option was created.</p> pub creation_date: std::option::Option<aws_smithy_types::Instant>, /// <p>A timestamp for when this option was last updated.</p> pub update_date: std::option::Option<aws_smithy_types::Instant>, /// <p>A unique integer that indicates when this option was last updated.</p> pub update_version: i32, /// <p>The state of processing a change to an option. Possible values:</p><ul> /// <li><code>RequiresIndexDocuments</code>: the option's latest value will not be deployed until <a>IndexDocuments</a> has been called and indexing is complete.</li> /// <li><code>Processing</code>: the option's latest value is in the process of being activated. </li> /// <li><code>Active</code>: the option's latest value is completely deployed.</li> /// <li><code>FailedToValidate</code>: the option value is not compatible with the domain's data and cannot be used to index the data. You must either modify the option value or update or remove the incompatible documents.</li> /// </ul> pub state: std::option::Option<crate::model::OptionState>, /// <p>Indicates that the option will be deleted once processing is complete.</p> pub pending_deletion: std::option::Option<bool>, } impl OptionStatus { /// <p>A timestamp for when this option was created.</p> pub fn creation_date(&self) -> std::option::Option<&aws_smithy_types::Instant> { self.creation_date.as_ref() } /// <p>A timestamp for when this option was last updated.</p> pub fn update_date(&self) -> std::option::Option<&aws_smithy_types::Instant> { self.update_date.as_ref() } /// <p>A unique integer that indicates when this option was last updated.</p> pub fn update_version(&self) -> i32 { self.update_version } /// <p>The state of processing a change to an option. Possible values:</p><ul> /// <li><code>RequiresIndexDocuments</code>: the option's latest value will not be deployed until <a>IndexDocuments</a> has been called and indexing is complete.</li> /// <li><code>Processing</code>: the option's latest value is in the process of being activated. </li> /// <li><code>Active</code>: the option's latest value is completely deployed.</li> /// <li><code>FailedToValidate</code>: the option value is not compatible with the domain's data and cannot be used to index the data. You must either modify the option value or update or remove the incompatible documents.</li> /// </ul> pub fn state(&self) -> std::option::Option<&crate::model::OptionState> { self.state.as_ref() } /// <p>Indicates that the option will be deleted once processing is complete.</p> pub fn pending_deletion(&self) -> std::option::Option<bool> { self.pending_deletion } } impl std::fmt::Debug for OptionStatus { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("OptionStatus"); formatter.field("creation_date", &self.creation_date); formatter.field("update_date", &self.update_date); formatter.field("update_version", &self.update_version); formatter.field("state", &self.state); formatter.field("pending_deletion", &self.pending_deletion); formatter.finish() } } /// See [`OptionStatus`](crate::model::OptionStatus) pub mod option_status { /// A builder for [`OptionStatus`](crate::model::OptionStatus) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) creation_date: std::option::Option<aws_smithy_types::Instant>, pub(crate) update_date: std::option::Option<aws_smithy_types::Instant>, pub(crate) update_version: std::option::Option<i32>, pub(crate) state: std::option::Option<crate::model::OptionState>, pub(crate) pending_deletion: std::option::Option<bool>, } impl Builder { /// <p>A timestamp for when this option was created.</p> pub fn creation_date(mut self, input: aws_smithy_types::Instant) -> Self { self.creation_date = Some(input); self } /// <p>A timestamp for when this option was created.</p> pub fn set_creation_date( mut self, input: std::option::Option<aws_smithy_types::Instant>, ) -> Self { self.creation_date = input; self } /// <p>A timestamp for when this option was last updated.</p> pub fn update_date(mut self, input: aws_smithy_types::Instant) -> Self { self.update_date = Some(input); self } /// <p>A timestamp for when this option was last updated.</p> pub fn set_update_date( mut self, input: std::option::Option<aws_smithy_types::Instant>, ) -> Self { self.update_date = input; self } /// <p>A unique integer that indicates when this option was last updated.</p> pub fn update_version(mut self, input: i32) -> Self { self.update_version = Some(input); self } /// <p>A unique integer that indicates when this option was last updated.</p> pub fn set_update_version(mut self, input: std::option::Option<i32>) -> Self { self.update_version = input; self } /// <p>The state of processing a change to an option. Possible values:</p><ul> /// <li><code>RequiresIndexDocuments</code>: the option's latest value will not be deployed until <a>IndexDocuments</a> has been called and indexing is complete.</li> /// <li><code>Processing</code>: the option's latest value is in the process of being activated. </li> /// <li><code>Active</code>: the option's latest value is completely deployed.</li> /// <li><code>FailedToValidate</code>: the option value is not compatible with the domain's data and cannot be used to index the data. You must either modify the option value or update or remove the incompatible documents.</li> /// </ul> pub fn state(mut self, input: crate::model::OptionState) -> Self { self.state = Some(input); self } /// <p>The state of processing a change to an option. Possible values:</p><ul> /// <li><code>RequiresIndexDocuments</code>: the option's latest value will not be deployed until <a>IndexDocuments</a> has been called and indexing is complete.</li> /// <li><code>Processing</code>: the option's latest value is in the process of being activated. </li> /// <li><code>Active</code>: the option's latest value is completely deployed.</li> /// <li><code>FailedToValidate</code>: the option value is not compatible with the domain's data and cannot be used to index the data. You must either modify the option value or update or remove the incompatible documents.</li> /// </ul> pub fn set_state(mut self, input: std::option::Option<crate::model::OptionState>) -> Self { self.state = input; self } /// <p>Indicates that the option will be deleted once processing is complete.</p> pub fn pending_deletion(mut self, input: bool) -> Self { self.pending_deletion = Some(input); self } /// <p>Indicates that the option will be deleted once processing is complete.</p> pub fn set_pending_deletion(mut self, input: std::option::Option<bool>) -> Self { self.pending_deletion = input; self } /// Consumes the builder and constructs a [`OptionStatus`](crate::model::OptionStatus) pub fn build(self) -> crate::model::OptionStatus { crate::model::OptionStatus { creation_date: self.creation_date, update_date: self.update_date, update_version: self.update_version.unwrap_or_default(), state: self.state, pending_deletion: self.pending_deletion, } } } } impl OptionStatus { /// Creates a new builder-style object to manufacture [`OptionStatus`](crate::model::OptionStatus) pub fn builder() -> crate::model::option_status::Builder { crate::model::option_status::Builder::default() } } /// <p>The state of processing a change to an option. One of:</p> /// <ul> /// <li>RequiresIndexDocuments: The option's latest value will not be deployed until <a>IndexDocuments</a> has been called and indexing is complete.</li> /// <li>Processing: The option's latest value is in the process of being activated.</li> /// <li>Active: The option's latest value is fully deployed. </li> /// <li>FailedToValidate: The option value is not compatible with the domain's data and cannot be used to index the data. You must either modify the option value or update or remove the incompatible documents.</li> /// </ul> #[non_exhaustive] #[derive( std::clone::Clone, std::cmp::Eq, std::cmp::Ord, std::cmp::PartialEq, std::cmp::PartialOrd, std::fmt::Debug, std::hash::Hash, )] pub enum OptionState { #[allow(missing_docs)] // documentation missing in model Active, #[allow(missing_docs)] // documentation missing in model FailedToValidate, #[allow(missing_docs)] // documentation missing in model Processing, #[allow(missing_docs)] // documentation missing in model RequiresIndexDocuments, /// Unknown contains new variants that have been added since this code was generated. Unknown(String), } impl std::convert::From<&str> for OptionState { fn from(s: &str) -> Self { match s { "Active" => OptionState::Active, "FailedToValidate" => OptionState::FailedToValidate, "Processing" => OptionState::Processing, "RequiresIndexDocuments" => OptionState::RequiresIndexDocuments, other => OptionState::Unknown(other.to_owned()), } } } impl std::str::FromStr for OptionState { type Err = std::convert::Infallible; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Ok(OptionState::from(s)) } } impl OptionState { /// Returns the `&str` value of the enum member. pub fn as_str(&self) -> &str { match self { OptionState::Active => "Active", OptionState::FailedToValidate => "FailedToValidate", OptionState::Processing => "Processing", OptionState::RequiresIndexDocuments => "RequiresIndexDocuments", OptionState::Unknown(s) => s.as_ref(), } } /// Returns all the `&str` values of the enum members. pub fn values() -> &'static [&'static str] { &[ "Active", "FailedToValidate", "Processing", "RequiresIndexDocuments", ] } } impl AsRef<str> for OptionState { fn as_ref(&self) -> &str { self.as_str() } } /// <p>The status and configuration of a search domain's scaling parameters. </p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct ScalingParametersStatus { /// <p>The desired instance type and desired number of replicas of each index partition.</p> pub options: std::option::Option<crate::model::ScalingParameters>, /// <p>The status of domain configuration option.</p> pub status: std::option::Option<crate::model::OptionStatus>, } impl ScalingParametersStatus { /// <p>The desired instance type and desired number of replicas of each index partition.</p> pub fn options(&self) -> std::option::Option<&crate::model::ScalingParameters> { self.options.as_ref() } /// <p>The status of domain configuration option.</p> pub fn status(&self) -> std::option::Option<&crate::model::OptionStatus> { self.status.as_ref() } } impl std::fmt::Debug for ScalingParametersStatus { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("ScalingParametersStatus"); formatter.field("options", &self.options); formatter.field("status", &self.status); formatter.finish() } } /// See [`ScalingParametersStatus`](crate::model::ScalingParametersStatus) pub mod scaling_parameters_status { /// A builder for [`ScalingParametersStatus`](crate::model::ScalingParametersStatus) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) options: std::option::Option<crate::model::ScalingParameters>, pub(crate) status: std::option::Option<crate::model::OptionStatus>, } impl Builder { /// <p>The desired instance type and desired number of replicas of each index partition.</p> pub fn options(mut self, input: crate::model::ScalingParameters) -> Self { self.options = Some(input); self } /// <p>The desired instance type and desired number of replicas of each index partition.</p> pub fn set_options( mut self, input: std::option::Option<crate::model::ScalingParameters>, ) -> Self { self.options = input; self } /// <p>The status of domain configuration option.</p> pub fn status(mut self, input: crate::model::OptionStatus) -> Self { self.status = Some(input); self } /// <p>The status of domain configuration option.</p> pub fn set_status( mut self, input: std::option::Option<crate::model::OptionStatus>, ) -> Self { self.status = input; self } /// Consumes the builder and constructs a [`ScalingParametersStatus`](crate::model::ScalingParametersStatus) pub fn build(self) -> crate::model::ScalingParametersStatus { crate::model::ScalingParametersStatus { options: self.options, status: self.status, } } } } impl ScalingParametersStatus { /// Creates a new builder-style object to manufacture [`ScalingParametersStatus`](crate::model::ScalingParametersStatus) pub fn builder() -> crate::model::scaling_parameters_status::Builder { crate::model::scaling_parameters_status::Builder::default() } } /// <p>The desired instance type and desired number of replicas of each index partition.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct ScalingParameters { /// <p>The instance type that you want to preconfigure for your domain. For example, <code>search.m1.small</code>.</p> pub desired_instance_type: std::option::Option<crate::model::PartitionInstanceType>, /// <p>The number of replicas you want to preconfigure for each index partition.</p> pub desired_replication_count: i32, /// <p>The number of partitions you want to preconfigure for your domain. Only valid when /// you select <code>m2.2xlarge</code> as the desired instance type.</p> pub desired_partition_count: i32, } impl ScalingParameters { /// <p>The instance type that you want to preconfigure for your domain. For example, <code>search.m1.small</code>.</p> pub fn desired_instance_type( &self, ) -> std::option::Option<&crate::model::PartitionInstanceType> { self.desired_instance_type.as_ref() } /// <p>The number of replicas you want to preconfigure for each index partition.</p> pub fn desired_replication_count(&self) -> i32 { self.desired_replication_count } /// <p>The number of partitions you want to preconfigure for your domain. Only valid when /// you select <code>m2.2xlarge</code> as the desired instance type.</p> pub fn desired_partition_count(&self) -> i32 { self.desired_partition_count } } impl std::fmt::Debug for ScalingParameters { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("ScalingParameters"); formatter.field("desired_instance_type", &self.desired_instance_type); formatter.field("desired_replication_count", &self.desired_replication_count); formatter.field("desired_partition_count", &self.desired_partition_count); formatter.finish() } } /// See [`ScalingParameters`](crate::model::ScalingParameters) pub mod scaling_parameters { /// A builder for [`ScalingParameters`](crate::model::ScalingParameters) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) desired_instance_type: std::option::Option<crate::model::PartitionInstanceType>, pub(crate) desired_replication_count: std::option::Option<i32>, pub(crate) desired_partition_count: std::option::Option<i32>, } impl Builder { /// <p>The instance type that you want to preconfigure for your domain. For example, <code>search.m1.small</code>.</p> pub fn desired_instance_type(mut self, input: crate::model::PartitionInstanceType) -> Self { self.desired_instance_type = Some(input); self } /// <p>The instance type that you want to preconfigure for your domain. For example, <code>search.m1.small</code>.</p> pub fn set_desired_instance_type( mut self, input: std::option::Option<crate::model::PartitionInstanceType>, ) -> Self { self.desired_instance_type = input; self } /// <p>The number of replicas you want to preconfigure for each index partition.</p> pub fn desired_replication_count(mut self, input: i32) -> Self { self.desired_replication_count = Some(input); self } /// <p>The number of replicas you want to preconfigure for each index partition.</p> pub fn set_desired_replication_count(mut self, input: std::option::Option<i32>) -> Self { self.desired_replication_count = input; self } /// <p>The number of partitions you want to preconfigure for your domain. Only valid when /// you select <code>m2.2xlarge</code> as the desired instance type.</p> pub fn desired_partition_count(mut self, input: i32) -> Self { self.desired_partition_count = Some(input); self } /// <p>The number of partitions you want to preconfigure for your domain. Only valid when /// you select <code>m2.2xlarge</code> as the desired instance type.</p> pub fn set_desired_partition_count(mut self, input: std::option::Option<i32>) -> Self { self.desired_partition_count = input; self } /// Consumes the builder and constructs a [`ScalingParameters`](crate::model::ScalingParameters) pub fn build(self) -> crate::model::ScalingParameters { crate::model::ScalingParameters { desired_instance_type: self.desired_instance_type, desired_replication_count: self.desired_replication_count.unwrap_or_default(), desired_partition_count: self.desired_partition_count.unwrap_or_default(), } } } } impl ScalingParameters { /// Creates a new builder-style object to manufacture [`ScalingParameters`](crate::model::ScalingParameters) pub fn builder() -> crate::model::scaling_parameters::Builder { crate::model::scaling_parameters::Builder::default() } } /// <p>The instance type (such as <code>search.m1.small</code>) on which an index partition is hosted.</p> #[non_exhaustive] #[derive( std::clone::Clone, std::cmp::Eq, std::cmp::Ord, std::cmp::PartialEq, std::cmp::PartialOrd, std::fmt::Debug, std::hash::Hash, )] pub enum PartitionInstanceType { #[allow(missing_docs)] // documentation missing in model Search2xlarge, #[allow(missing_docs)] // documentation missing in model SearchLarge, #[allow(missing_docs)] // documentation missing in model SearchM1Large, #[allow(missing_docs)] // documentation missing in model SearchM1Small, #[allow(missing_docs)] // documentation missing in model SearchM22xlarge, #[allow(missing_docs)] // documentation missing in model SearchM2Xlarge, #[allow(missing_docs)] // documentation missing in model SearchM32xlarge, #[allow(missing_docs)] // documentation missing in model SearchM3Large, #[allow(missing_docs)] // documentation missing in model SearchM3Medium, #[allow(missing_docs)] // documentation missing in model SearchM3Xlarge, #[allow(missing_docs)] // documentation missing in model SearchMedium, #[allow(missing_docs)] // documentation missing in model SearchPreviousgeneration2xlarge, #[allow(missing_docs)] // documentation missing in model SearchPreviousgenerationLarge, #[allow(missing_docs)] // documentation missing in model SearchPreviousgenerationSmall, #[allow(missing_docs)] // documentation missing in model SearchPreviousgenerationXlarge, #[allow(missing_docs)] // documentation missing in model SearchSmall, #[allow(missing_docs)] // documentation missing in model SearchXlarge, /// Unknown contains new variants that have been added since this code was generated. Unknown(String), } impl std::convert::From<&str> for PartitionInstanceType { fn from(s: &str) -> Self { match s { "search.2xlarge" => PartitionInstanceType::Search2xlarge, "search.large" => PartitionInstanceType::SearchLarge, "search.m1.large" => PartitionInstanceType::SearchM1Large, "search.m1.small" => PartitionInstanceType::SearchM1Small, "search.m2.2xlarge" => PartitionInstanceType::SearchM22xlarge, "search.m2.xlarge" => PartitionInstanceType::SearchM2Xlarge, "search.m3.2xlarge" => PartitionInstanceType::SearchM32xlarge, "search.m3.large" => PartitionInstanceType::SearchM3Large, "search.m3.medium" => PartitionInstanceType::SearchM3Medium, "search.m3.xlarge" => PartitionInstanceType::SearchM3Xlarge, "search.medium" => PartitionInstanceType::SearchMedium, "search.previousgeneration.2xlarge" => { PartitionInstanceType::SearchPreviousgeneration2xlarge } "search.previousgeneration.large" => { PartitionInstanceType::SearchPreviousgenerationLarge } "search.previousgeneration.small" => { PartitionInstanceType::SearchPreviousgenerationSmall } "search.previousgeneration.xlarge" => { PartitionInstanceType::SearchPreviousgenerationXlarge } "search.small" => PartitionInstanceType::SearchSmall, "search.xlarge" => PartitionInstanceType::SearchXlarge, other => PartitionInstanceType::Unknown(other.to_owned()), } } } impl std::str::FromStr for PartitionInstanceType { type Err = std::convert::Infallible; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Ok(PartitionInstanceType::from(s)) } } impl PartitionInstanceType { /// Returns the `&str` value of the enum member. pub fn as_str(&self) -> &str { match self { PartitionInstanceType::Search2xlarge => "search.2xlarge", PartitionInstanceType::SearchLarge => "search.large", PartitionInstanceType::SearchM1Large => "search.m1.large", PartitionInstanceType::SearchM1Small => "search.m1.small", PartitionInstanceType::SearchM22xlarge => "search.m2.2xlarge", PartitionInstanceType::SearchM2Xlarge => "search.m2.xlarge", PartitionInstanceType::SearchM32xlarge => "search.m3.2xlarge", PartitionInstanceType::SearchM3Large => "search.m3.large", PartitionInstanceType::SearchM3Medium => "search.m3.medium", PartitionInstanceType::SearchM3Xlarge => "search.m3.xlarge", PartitionInstanceType::SearchMedium => "search.medium", PartitionInstanceType::SearchPreviousgeneration2xlarge => { "search.previousgeneration.2xlarge" } PartitionInstanceType::SearchPreviousgenerationLarge => { "search.previousgeneration.large" } PartitionInstanceType::SearchPreviousgenerationSmall => { "search.previousgeneration.small" } PartitionInstanceType::SearchPreviousgenerationXlarge => { "search.previousgeneration.xlarge" } PartitionInstanceType::SearchSmall => "search.small", PartitionInstanceType::SearchXlarge => "search.xlarge", PartitionInstanceType::Unknown(s) => s.as_ref(), } } /// Returns all the `&str` values of the enum members. pub fn values() -> &'static [&'static str] { &[ "search.2xlarge", "search.large", "search.m1.large", "search.m1.small", "search.m2.2xlarge", "search.m2.xlarge", "search.m3.2xlarge", "search.m3.large", "search.m3.medium", "search.m3.xlarge", "search.medium", "search.previousgeneration.2xlarge", "search.previousgeneration.large", "search.previousgeneration.small", "search.previousgeneration.xlarge", "search.small", "search.xlarge", ] } } impl AsRef<str> for PartitionInstanceType { fn as_ref(&self) -> &str { self.as_str() } } /// <p>The configuration and status of the domain's endpoint options.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct DomainEndpointOptionsStatus { /// <p>The domain endpoint options configured for the domain.</p> pub options: std::option::Option<crate::model::DomainEndpointOptions>, /// <p>The status of the configured domain endpoint options.</p> pub status: std::option::Option<crate::model::OptionStatus>, } impl DomainEndpointOptionsStatus { /// <p>The domain endpoint options configured for the domain.</p> pub fn options(&self) -> std::option::Option<&crate::model::DomainEndpointOptions> { self.options.as_ref() } /// <p>The status of the configured domain endpoint options.</p> pub fn status(&self) -> std::option::Option<&crate::model::OptionStatus> { self.status.as_ref() } } impl std::fmt::Debug for DomainEndpointOptionsStatus { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("DomainEndpointOptionsStatus"); formatter.field("options", &self.options); formatter.field("status", &self.status); formatter.finish() } } /// See [`DomainEndpointOptionsStatus`](crate::model::DomainEndpointOptionsStatus) pub mod domain_endpoint_options_status { /// A builder for [`DomainEndpointOptionsStatus`](crate::model::DomainEndpointOptionsStatus) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) options: std::option::Option<crate::model::DomainEndpointOptions>, pub(crate) status: std::option::Option<crate::model::OptionStatus>, } impl Builder { /// <p>The domain endpoint options configured for the domain.</p> pub fn options(mut self, input: crate::model::DomainEndpointOptions) -> Self { self.options = Some(input); self } /// <p>The domain endpoint options configured for the domain.</p> pub fn set_options( mut self, input: std::option::Option<crate::model::DomainEndpointOptions>, ) -> Self { self.options = input; self } /// <p>The status of the configured domain endpoint options.</p> pub fn status(mut self, input: crate::model::OptionStatus) -> Self { self.status = Some(input); self } /// <p>The status of the configured domain endpoint options.</p> pub fn set_status( mut self, input: std::option::Option<crate::model::OptionStatus>, ) -> Self { self.status = input; self } /// Consumes the builder and constructs a [`DomainEndpointOptionsStatus`](crate::model::DomainEndpointOptionsStatus) pub fn build(self) -> crate::model::DomainEndpointOptionsStatus { crate::model::DomainEndpointOptionsStatus { options: self.options, status: self.status, } } } } impl DomainEndpointOptionsStatus { /// Creates a new builder-style object to manufacture [`DomainEndpointOptionsStatus`](crate::model::DomainEndpointOptionsStatus) pub fn builder() -> crate::model::domain_endpoint_options_status::Builder { crate::model::domain_endpoint_options_status::Builder::default() } } /// <p>The domain's endpoint options.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct DomainEndpointOptions { /// <p>Whether the domain is HTTPS only enabled.</p> pub enforce_https: std::option::Option<bool>, /// <p>The minimum required TLS version</p> pub tls_security_policy: std::option::Option<crate::model::TlsSecurityPolicy>, } impl DomainEndpointOptions { /// <p>Whether the domain is HTTPS only enabled.</p> pub fn enforce_https(&self) -> std::option::Option<bool> { self.enforce_https } /// <p>The minimum required TLS version</p> pub fn tls_security_policy(&self) -> std::option::Option<&crate::model::TlsSecurityPolicy> { self.tls_security_policy.as_ref() } } impl std::fmt::Debug for DomainEndpointOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("DomainEndpointOptions"); formatter.field("enforce_https", &self.enforce_https); formatter.field("tls_security_policy", &self.tls_security_policy); formatter.finish() } } /// See [`DomainEndpointOptions`](crate::model::DomainEndpointOptions) pub mod domain_endpoint_options { /// A builder for [`DomainEndpointOptions`](crate::model::DomainEndpointOptions) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) enforce_https: std::option::Option<bool>, pub(crate) tls_security_policy: std::option::Option<crate::model::TlsSecurityPolicy>, } impl Builder { /// <p>Whether the domain is HTTPS only enabled.</p> pub fn enforce_https(mut self, input: bool) -> Self { self.enforce_https = Some(input); self } /// <p>Whether the domain is HTTPS only enabled.</p> pub fn set_enforce_https(mut self, input: std::option::Option<bool>) -> Self { self.enforce_https = input; self } /// <p>The minimum required TLS version</p> pub fn tls_security_policy(mut self, input: crate::model::TlsSecurityPolicy) -> Self { self.tls_security_policy = Some(input); self } /// <p>The minimum required TLS version</p> pub fn set_tls_security_policy( mut self, input: std::option::Option<crate::model::TlsSecurityPolicy>, ) -> Self { self.tls_security_policy = input; self } /// Consumes the builder and constructs a [`DomainEndpointOptions`](crate::model::DomainEndpointOptions) pub fn build(self) -> crate::model::DomainEndpointOptions { crate::model::DomainEndpointOptions { enforce_https: self.enforce_https, tls_security_policy: self.tls_security_policy, } } } } impl DomainEndpointOptions { /// Creates a new builder-style object to manufacture [`DomainEndpointOptions`](crate::model::DomainEndpointOptions) pub fn builder() -> crate::model::domain_endpoint_options::Builder { crate::model::domain_endpoint_options::Builder::default() } } /// <p>The minimum required TLS version.</p> #[non_exhaustive] #[derive( std::clone::Clone, std::cmp::Eq, std::cmp::Ord, std::cmp::PartialEq, std::cmp::PartialOrd, std::fmt::Debug, std::hash::Hash, )] pub enum TlsSecurityPolicy { #[allow(missing_docs)] // documentation missing in model PolicyMinTls10201907, #[allow(missing_docs)] // documentation missing in model PolicyMinTls12201907, /// Unknown contains new variants that have been added since this code was generated. Unknown(String), } impl std::convert::From<&str> for TlsSecurityPolicy { fn from(s: &str) -> Self { match s { "Policy-Min-TLS-1-0-2019-07" => TlsSecurityPolicy::PolicyMinTls10201907, "Policy-Min-TLS-1-2-2019-07" => TlsSecurityPolicy::PolicyMinTls12201907, other => TlsSecurityPolicy::Unknown(other.to_owned()), } } } impl std::str::FromStr for TlsSecurityPolicy { type Err = std::convert::Infallible; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Ok(TlsSecurityPolicy::from(s)) } } impl TlsSecurityPolicy { /// Returns the `&str` value of the enum member. pub fn as_str(&self) -> &str { match self { TlsSecurityPolicy::PolicyMinTls10201907 => "Policy-Min-TLS-1-0-2019-07", TlsSecurityPolicy::PolicyMinTls12201907 => "Policy-Min-TLS-1-2-2019-07", TlsSecurityPolicy::Unknown(s) => s.as_ref(), } } /// Returns all the `&str` values of the enum members. pub fn values() -> &'static [&'static str] { &["Policy-Min-TLS-1-0-2019-07", "Policy-Min-TLS-1-2-2019-07"] } } impl AsRef<str> for TlsSecurityPolicy { fn as_ref(&self) -> &str { self.as_str() } } /// <p>The status and configuration of the domain's availability options.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct AvailabilityOptionsStatus { /// <p>The availability options configured for the domain.</p> pub options: bool, /// <p>The status of domain configuration option.</p> pub status: std::option::Option<crate::model::OptionStatus>, } impl AvailabilityOptionsStatus { /// <p>The availability options configured for the domain.</p> pub fn options(&self) -> bool { self.options } /// <p>The status of domain configuration option.</p> pub fn status(&self) -> std::option::Option<&crate::model::OptionStatus> { self.status.as_ref() } } impl std::fmt::Debug for AvailabilityOptionsStatus { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("AvailabilityOptionsStatus"); formatter.field("options", &self.options); formatter.field("status", &self.status); formatter.finish() } } /// See [`AvailabilityOptionsStatus`](crate::model::AvailabilityOptionsStatus) pub mod availability_options_status { /// A builder for [`AvailabilityOptionsStatus`](crate::model::AvailabilityOptionsStatus) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) options: std::option::Option<bool>, pub(crate) status: std::option::Option<crate::model::OptionStatus>, } impl Builder { /// <p>The availability options configured for the domain.</p> pub fn options(mut self, input: bool) -> Self { self.options = Some(input); self } /// <p>The availability options configured for the domain.</p> pub fn set_options(mut self, input: std::option::Option<bool>) -> Self { self.options = input; self } /// <p>The status of domain configuration option.</p> pub fn status(mut self, input: crate::model::OptionStatus) -> Self { self.status = Some(input); self } /// <p>The status of domain configuration option.</p> pub fn set_status( mut self, input: std::option::Option<crate::model::OptionStatus>, ) -> Self { self.status = input; self } /// Consumes the builder and constructs a [`AvailabilityOptionsStatus`](crate::model::AvailabilityOptionsStatus) pub fn build(self) -> crate::model::AvailabilityOptionsStatus { crate::model::AvailabilityOptionsStatus { options: self.options.unwrap_or_default(), status: self.status, } } } } impl AvailabilityOptionsStatus { /// Creates a new builder-style object to manufacture [`AvailabilityOptionsStatus`](crate::model::AvailabilityOptionsStatus) pub fn builder() -> crate::model::availability_options_status::Builder { crate::model::availability_options_status::Builder::default() } } /// <p>The value of a <code>Suggester</code> and its current status.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct SuggesterStatus { /// <p>Configuration information for a search suggester. Each suggester has a unique name and specifies the text field you want to use for suggestions. The following options can be configured for a suggester: <code>FuzzyMatching</code>, <code>SortExpression</code>. </p> pub options: std::option::Option<crate::model::Suggester>, /// <p>The status of domain configuration option.</p> pub status: std::option::Option<crate::model::OptionStatus>, } impl SuggesterStatus { /// <p>Configuration information for a search suggester. Each suggester has a unique name and specifies the text field you want to use for suggestions. The following options can be configured for a suggester: <code>FuzzyMatching</code>, <code>SortExpression</code>. </p> pub fn options(&self) -> std::option::Option<&crate::model::Suggester> { self.options.as_ref() } /// <p>The status of domain configuration option.</p> pub fn status(&self) -> std::option::Option<&crate::model::OptionStatus> { self.status.as_ref() } } impl std::fmt::Debug for SuggesterStatus { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("SuggesterStatus"); formatter.field("options", &self.options); formatter.field("status", &self.status); formatter.finish() } } /// See [`SuggesterStatus`](crate::model::SuggesterStatus) pub mod suggester_status { /// A builder for [`SuggesterStatus`](crate::model::SuggesterStatus) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) options: std::option::Option<crate::model::Suggester>, pub(crate) status: std::option::Option<crate::model::OptionStatus>, } impl Builder { /// <p>Configuration information for a search suggester. Each suggester has a unique name and specifies the text field you want to use for suggestions. The following options can be configured for a suggester: <code>FuzzyMatching</code>, <code>SortExpression</code>. </p> pub fn options(mut self, input: crate::model::Suggester) -> Self { self.options = Some(input); self } /// <p>Configuration information for a search suggester. Each suggester has a unique name and specifies the text field you want to use for suggestions. The following options can be configured for a suggester: <code>FuzzyMatching</code>, <code>SortExpression</code>. </p> pub fn set_options(mut self, input: std::option::Option<crate::model::Suggester>) -> Self { self.options = input; self } /// <p>The status of domain configuration option.</p> pub fn status(mut self, input: crate::model::OptionStatus) -> Self { self.status = Some(input); self } /// <p>The status of domain configuration option.</p> pub fn set_status( mut self, input: std::option::Option<crate::model::OptionStatus>, ) -> Self { self.status = input; self } /// Consumes the builder and constructs a [`SuggesterStatus`](crate::model::SuggesterStatus) pub fn build(self) -> crate::model::SuggesterStatus { crate::model::SuggesterStatus { options: self.options, status: self.status, } } } } impl SuggesterStatus { /// Creates a new builder-style object to manufacture [`SuggesterStatus`](crate::model::SuggesterStatus) pub fn builder() -> crate::model::suggester_status::Builder { crate::model::suggester_status::Builder::default() } } /// <p>Configuration information for a search suggester. Each suggester has a unique name and specifies the text field you want to use for suggestions. The following options can be configured for a suggester: <code>FuzzyMatching</code>, <code>SortExpression</code>. </p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct Suggester { /// <p>Names must begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore).</p> pub suggester_name: std::option::Option<std::string::String>, /// <p>Options for a search suggester.</p> pub document_suggester_options: std::option::Option<crate::model::DocumentSuggesterOptions>, } impl Suggester { /// <p>Names must begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore).</p> pub fn suggester_name(&self) -> std::option::Option<&str> { self.suggester_name.as_deref() } /// <p>Options for a search suggester.</p> pub fn document_suggester_options( &self, ) -> std::option::Option<&crate::model::DocumentSuggesterOptions> { self.document_suggester_options.as_ref() } } impl std::fmt::Debug for Suggester { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("Suggester"); formatter.field("suggester_name", &self.suggester_name); formatter.field( "document_suggester_options", &self.document_suggester_options, ); formatter.finish() } } /// See [`Suggester`](crate::model::Suggester) pub mod suggester { /// A builder for [`Suggester`](crate::model::Suggester) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) suggester_name: std::option::Option<std::string::String>, pub(crate) document_suggester_options: std::option::Option<crate::model::DocumentSuggesterOptions>, } impl Builder { /// <p>Names must begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore).</p> pub fn suggester_name(mut self, input: impl Into<std::string::String>) -> Self { self.suggester_name = Some(input.into()); self } /// <p>Names must begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore).</p> pub fn set_suggester_name( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.suggester_name = input; self } /// <p>Options for a search suggester.</p> pub fn document_suggester_options( mut self, input: crate::model::DocumentSuggesterOptions, ) -> Self { self.document_suggester_options = Some(input); self } /// <p>Options for a search suggester.</p> pub fn set_document_suggester_options( mut self, input: std::option::Option<crate::model::DocumentSuggesterOptions>, ) -> Self { self.document_suggester_options = input; self } /// Consumes the builder and constructs a [`Suggester`](crate::model::Suggester) pub fn build(self) -> crate::model::Suggester { crate::model::Suggester { suggester_name: self.suggester_name, document_suggester_options: self.document_suggester_options, } } } } impl Suggester { /// Creates a new builder-style object to manufacture [`Suggester`](crate::model::Suggester) pub fn builder() -> crate::model::suggester::Builder { crate::model::suggester::Builder::default() } } /// <p>Options for a search suggester.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct DocumentSuggesterOptions { /// <p>The name of the index field you want to use for suggestions. /// </p> pub source_field: std::option::Option<std::string::String>, /// <p>The level of fuzziness allowed when suggesting matches for a string: <code>none</code>, <code>low</code>, or <code>high</code>. With none, the specified string is treated as an exact prefix. With low, suggestions must differ from the specified string by no more than one character. With high, suggestions can differ by up to two characters. The default is none. </p> pub fuzzy_matching: std::option::Option<crate::model::SuggesterFuzzyMatching>, /// <p>An expression that computes a score for each suggestion to control how they are sorted. The scores are rounded to the nearest /// integer, with a floor of 0 and a ceiling of 2^31-1. A document's relevance score is not computed /// for suggestions, so sort expressions cannot reference the <code>_score</code> value. /// To sort suggestions using a numeric field or existing expression, simply specify /// the name of the field or expression. If no expression is configured for the suggester, the /// suggestions are sorted with the closest matches listed first.</p> pub sort_expression: std::option::Option<std::string::String>, } impl DocumentSuggesterOptions { /// <p>The name of the index field you want to use for suggestions. /// </p> pub fn source_field(&self) -> std::option::Option<&str> { self.source_field.as_deref() } /// <p>The level of fuzziness allowed when suggesting matches for a string: <code>none</code>, <code>low</code>, or <code>high</code>. With none, the specified string is treated as an exact prefix. With low, suggestions must differ from the specified string by no more than one character. With high, suggestions can differ by up to two characters. The default is none. </p> pub fn fuzzy_matching(&self) -> std::option::Option<&crate::model::SuggesterFuzzyMatching> { self.fuzzy_matching.as_ref() } /// <p>An expression that computes a score for each suggestion to control how they are sorted. The scores are rounded to the nearest /// integer, with a floor of 0 and a ceiling of 2^31-1. A document's relevance score is not computed /// for suggestions, so sort expressions cannot reference the <code>_score</code> value. /// To sort suggestions using a numeric field or existing expression, simply specify /// the name of the field or expression. If no expression is configured for the suggester, the /// suggestions are sorted with the closest matches listed first.</p> pub fn sort_expression(&self) -> std::option::Option<&str> { self.sort_expression.as_deref() } } impl std::fmt::Debug for DocumentSuggesterOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("DocumentSuggesterOptions"); formatter.field("source_field", &self.source_field); formatter.field("fuzzy_matching", &self.fuzzy_matching); formatter.field("sort_expression", &self.sort_expression); formatter.finish() } } /// See [`DocumentSuggesterOptions`](crate::model::DocumentSuggesterOptions) pub mod document_suggester_options { /// A builder for [`DocumentSuggesterOptions`](crate::model::DocumentSuggesterOptions) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) source_field: std::option::Option<std::string::String>, pub(crate) fuzzy_matching: std::option::Option<crate::model::SuggesterFuzzyMatching>, pub(crate) sort_expression: std::option::Option<std::string::String>, } impl Builder { /// <p>The name of the index field you want to use for suggestions. /// </p> pub fn source_field(mut self, input: impl Into<std::string::String>) -> Self { self.source_field = Some(input.into()); self } /// <p>The name of the index field you want to use for suggestions. /// </p> pub fn set_source_field(mut self, input: std::option::Option<std::string::String>) -> Self { self.source_field = input; self } /// <p>The level of fuzziness allowed when suggesting matches for a string: <code>none</code>, <code>low</code>, or <code>high</code>. With none, the specified string is treated as an exact prefix. With low, suggestions must differ from the specified string by no more than one character. With high, suggestions can differ by up to two characters. The default is none. </p> pub fn fuzzy_matching(mut self, input: crate::model::SuggesterFuzzyMatching) -> Self { self.fuzzy_matching = Some(input); self } /// <p>The level of fuzziness allowed when suggesting matches for a string: <code>none</code>, <code>low</code>, or <code>high</code>. With none, the specified string is treated as an exact prefix. With low, suggestions must differ from the specified string by no more than one character. With high, suggestions can differ by up to two characters. The default is none. </p> pub fn set_fuzzy_matching( mut self, input: std::option::Option<crate::model::SuggesterFuzzyMatching>, ) -> Self { self.fuzzy_matching = input; self } /// <p>An expression that computes a score for each suggestion to control how they are sorted. The scores are rounded to the nearest /// integer, with a floor of 0 and a ceiling of 2^31-1. A document's relevance score is not computed /// for suggestions, so sort expressions cannot reference the <code>_score</code> value. /// To sort suggestions using a numeric field or existing expression, simply specify /// the name of the field or expression. If no expression is configured for the suggester, the /// suggestions are sorted with the closest matches listed first.</p> pub fn sort_expression(mut self, input: impl Into<std::string::String>) -> Self { self.sort_expression = Some(input.into()); self } /// <p>An expression that computes a score for each suggestion to control how they are sorted. The scores are rounded to the nearest /// integer, with a floor of 0 and a ceiling of 2^31-1. A document's relevance score is not computed /// for suggestions, so sort expressions cannot reference the <code>_score</code> value. /// To sort suggestions using a numeric field or existing expression, simply specify /// the name of the field or expression. If no expression is configured for the suggester, the /// suggestions are sorted with the closest matches listed first.</p> pub fn set_sort_expression( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.sort_expression = input; self } /// Consumes the builder and constructs a [`DocumentSuggesterOptions`](crate::model::DocumentSuggesterOptions) pub fn build(self) -> crate::model::DocumentSuggesterOptions { crate::model::DocumentSuggesterOptions { source_field: self.source_field, fuzzy_matching: self.fuzzy_matching, sort_expression: self.sort_expression, } } } } impl DocumentSuggesterOptions { /// Creates a new builder-style object to manufacture [`DocumentSuggesterOptions`](crate::model::DocumentSuggesterOptions) pub fn builder() -> crate::model::document_suggester_options::Builder { crate::model::document_suggester_options::Builder::default() } } #[allow(missing_docs)] // documentation missing in model #[non_exhaustive] #[derive( std::clone::Clone, std::cmp::Eq, std::cmp::Ord, std::cmp::PartialEq, std::cmp::PartialOrd, std::fmt::Debug, std::hash::Hash, )] pub enum SuggesterFuzzyMatching { #[allow(missing_docs)] // documentation missing in model High, #[allow(missing_docs)] // documentation missing in model Low, #[allow(missing_docs)] // documentation missing in model None, /// Unknown contains new variants that have been added since this code was generated. Unknown(String), } impl std::convert::From<&str> for SuggesterFuzzyMatching { fn from(s: &str) -> Self { match s { "high" => SuggesterFuzzyMatching::High, "low" => SuggesterFuzzyMatching::Low, "none" => SuggesterFuzzyMatching::None, other => SuggesterFuzzyMatching::Unknown(other.to_owned()), } } } impl std::str::FromStr for SuggesterFuzzyMatching { type Err = std::convert::Infallible; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Ok(SuggesterFuzzyMatching::from(s)) } } impl SuggesterFuzzyMatching { /// Returns the `&str` value of the enum member. pub fn as_str(&self) -> &str { match self { SuggesterFuzzyMatching::High => "high", SuggesterFuzzyMatching::Low => "low", SuggesterFuzzyMatching::None => "none", SuggesterFuzzyMatching::Unknown(s) => s.as_ref(), } } /// Returns all the `&str` values of the enum members. pub fn values() -> &'static [&'static str] { &["high", "low", "none"] } } impl AsRef<str> for SuggesterFuzzyMatching { fn as_ref(&self) -> &str { self.as_str() } } /// <p>The value of an <code>IndexField</code> and its current status.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct IndexFieldStatus { /// <p>Configuration information for a field in the index, including its name, type, and options. The supported options depend on the <code><a>IndexFieldType</a></code>.</p> pub options: std::option::Option<crate::model::IndexField>, /// <p>The status of domain configuration option.</p> pub status: std::option::Option<crate::model::OptionStatus>, } impl IndexFieldStatus { /// <p>Configuration information for a field in the index, including its name, type, and options. The supported options depend on the <code><a>IndexFieldType</a></code>.</p> pub fn options(&self) -> std::option::Option<&crate::model::IndexField> { self.options.as_ref() } /// <p>The status of domain configuration option.</p> pub fn status(&self) -> std::option::Option<&crate::model::OptionStatus> { self.status.as_ref() } } impl std::fmt::Debug for IndexFieldStatus { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("IndexFieldStatus"); formatter.field("options", &self.options); formatter.field("status", &self.status); formatter.finish() } } /// See [`IndexFieldStatus`](crate::model::IndexFieldStatus) pub mod index_field_status { /// A builder for [`IndexFieldStatus`](crate::model::IndexFieldStatus) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) options: std::option::Option<crate::model::IndexField>, pub(crate) status: std::option::Option<crate::model::OptionStatus>, } impl Builder { /// <p>Configuration information for a field in the index, including its name, type, and options. The supported options depend on the <code><a>IndexFieldType</a></code>.</p> pub fn options(mut self, input: crate::model::IndexField) -> Self { self.options = Some(input); self } /// <p>Configuration information for a field in the index, including its name, type, and options. The supported options depend on the <code><a>IndexFieldType</a></code>.</p> pub fn set_options(mut self, input: std::option::Option<crate::model::IndexField>) -> Self { self.options = input; self } /// <p>The status of domain configuration option.</p> pub fn status(mut self, input: crate::model::OptionStatus) -> Self { self.status = Some(input); self } /// <p>The status of domain configuration option.</p> pub fn set_status( mut self, input: std::option::Option<crate::model::OptionStatus>, ) -> Self { self.status = input; self } /// Consumes the builder and constructs a [`IndexFieldStatus`](crate::model::IndexFieldStatus) pub fn build(self) -> crate::model::IndexFieldStatus { crate::model::IndexFieldStatus { options: self.options, status: self.status, } } } } impl IndexFieldStatus { /// Creates a new builder-style object to manufacture [`IndexFieldStatus`](crate::model::IndexFieldStatus) pub fn builder() -> crate::model::index_field_status::Builder { crate::model::index_field_status::Builder::default() } } /// <p>Configuration information for a field in the index, including its name, type, and options. The supported options depend on the <code><a>IndexFieldType</a></code>.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct IndexField { /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub index_field_name: std::option::Option<std::string::String>, /// <p>The type of field. The valid options for a field depend on the field type. For more information about the supported field types, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-index-fields.html" target="_blank">Configuring Index Fields</a> in the <i>Amazon CloudSearch Developer Guide</i>.</p> pub index_field_type: std::option::Option<crate::model::IndexFieldType>, /// <p>Options for a 64-bit signed integer field. Present if <code>IndexFieldType</code> specifies the field is of type <code>int</code>. All options are enabled by default.</p> pub int_options: std::option::Option<crate::model::IntOptions>, /// <p>Options for a double-precision 64-bit floating point field. Present if <code>IndexFieldType</code> specifies the field is of type <code>double</code>. All options are enabled by default.</p> pub double_options: std::option::Option<crate::model::DoubleOptions>, /// <p>Options for literal field. Present if <code>IndexFieldType</code> specifies the field is of type <code>literal</code>. All options are enabled by default.</p> pub literal_options: std::option::Option<crate::model::LiteralOptions>, /// <p>Options for text field. Present if <code>IndexFieldType</code> specifies the field is of type <code>text</code>. A <code>text</code> field is always searchable. All options are enabled by default.</p> pub text_options: std::option::Option<crate::model::TextOptions>, /// <p>Options for a date field. Dates and times are specified in UTC (Coordinated Universal Time) according to IETF RFC3339: yyyy-mm-ddT00:00:00Z. Present if <code>IndexFieldType</code> specifies the field is of type <code>date</code>. All options are enabled by default.</p> pub date_options: std::option::Option<crate::model::DateOptions>, /// <p>Options for a latlon field. A latlon field contains a location stored as a latitude and longitude value pair. Present if <code>IndexFieldType</code> specifies the field is of type <code>latlon</code>. All options are enabled by default.</p> pub lat_lon_options: std::option::Option<crate::model::LatLonOptions>, /// <p>Options for a field that contains an array of 64-bit signed integers. Present if <code>IndexFieldType</code> specifies the field is of type <code>int-array</code>. All options are enabled by default.</p> pub int_array_options: std::option::Option<crate::model::IntArrayOptions>, /// <p>Options for a field that contains an array of double-precision 64-bit floating point values. Present if <code>IndexFieldType</code> specifies the field is of type <code>double-array</code>. All options are enabled by default.</p> pub double_array_options: std::option::Option<crate::model::DoubleArrayOptions>, /// <p>Options for a field that contains an array of literal strings. Present if <code>IndexFieldType</code> specifies the field is of type <code>literal-array</code>. All options are enabled by default.</p> pub literal_array_options: std::option::Option<crate::model::LiteralArrayOptions>, /// <p>Options for a field that contains an array of text strings. Present if <code>IndexFieldType</code> specifies the field is of type <code>text-array</code>. A <code>text-array</code> field is always searchable. All options are enabled by default.</p> pub text_array_options: std::option::Option<crate::model::TextArrayOptions>, /// <p>Options for a field that contains an array of dates. Present if <code>IndexFieldType</code> specifies the field is of type <code>date-array</code>. All options are enabled by default.</p> pub date_array_options: std::option::Option<crate::model::DateArrayOptions>, } impl IndexField { /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn index_field_name(&self) -> std::option::Option<&str> { self.index_field_name.as_deref() } /// <p>The type of field. The valid options for a field depend on the field type. For more information about the supported field types, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-index-fields.html" target="_blank">Configuring Index Fields</a> in the <i>Amazon CloudSearch Developer Guide</i>.</p> pub fn index_field_type(&self) -> std::option::Option<&crate::model::IndexFieldType> { self.index_field_type.as_ref() } /// <p>Options for a 64-bit signed integer field. Present if <code>IndexFieldType</code> specifies the field is of type <code>int</code>. All options are enabled by default.</p> pub fn int_options(&self) -> std::option::Option<&crate::model::IntOptions> { self.int_options.as_ref() } /// <p>Options for a double-precision 64-bit floating point field. Present if <code>IndexFieldType</code> specifies the field is of type <code>double</code>. All options are enabled by default.</p> pub fn double_options(&self) -> std::option::Option<&crate::model::DoubleOptions> { self.double_options.as_ref() } /// <p>Options for literal field. Present if <code>IndexFieldType</code> specifies the field is of type <code>literal</code>. All options are enabled by default.</p> pub fn literal_options(&self) -> std::option::Option<&crate::model::LiteralOptions> { self.literal_options.as_ref() } /// <p>Options for text field. Present if <code>IndexFieldType</code> specifies the field is of type <code>text</code>. A <code>text</code> field is always searchable. All options are enabled by default.</p> pub fn text_options(&self) -> std::option::Option<&crate::model::TextOptions> { self.text_options.as_ref() } /// <p>Options for a date field. Dates and times are specified in UTC (Coordinated Universal Time) according to IETF RFC3339: yyyy-mm-ddT00:00:00Z. Present if <code>IndexFieldType</code> specifies the field is of type <code>date</code>. All options are enabled by default.</p> pub fn date_options(&self) -> std::option::Option<&crate::model::DateOptions> { self.date_options.as_ref() } /// <p>Options for a latlon field. A latlon field contains a location stored as a latitude and longitude value pair. Present if <code>IndexFieldType</code> specifies the field is of type <code>latlon</code>. All options are enabled by default.</p> pub fn lat_lon_options(&self) -> std::option::Option<&crate::model::LatLonOptions> { self.lat_lon_options.as_ref() } /// <p>Options for a field that contains an array of 64-bit signed integers. Present if <code>IndexFieldType</code> specifies the field is of type <code>int-array</code>. All options are enabled by default.</p> pub fn int_array_options(&self) -> std::option::Option<&crate::model::IntArrayOptions> { self.int_array_options.as_ref() } /// <p>Options for a field that contains an array of double-precision 64-bit floating point values. Present if <code>IndexFieldType</code> specifies the field is of type <code>double-array</code>. All options are enabled by default.</p> pub fn double_array_options(&self) -> std::option::Option<&crate::model::DoubleArrayOptions> { self.double_array_options.as_ref() } /// <p>Options for a field that contains an array of literal strings. Present if <code>IndexFieldType</code> specifies the field is of type <code>literal-array</code>. All options are enabled by default.</p> pub fn literal_array_options(&self) -> std::option::Option<&crate::model::LiteralArrayOptions> { self.literal_array_options.as_ref() } /// <p>Options for a field that contains an array of text strings. Present if <code>IndexFieldType</code> specifies the field is of type <code>text-array</code>. A <code>text-array</code> field is always searchable. All options are enabled by default.</p> pub fn text_array_options(&self) -> std::option::Option<&crate::model::TextArrayOptions> { self.text_array_options.as_ref() } /// <p>Options for a field that contains an array of dates. Present if <code>IndexFieldType</code> specifies the field is of type <code>date-array</code>. All options are enabled by default.</p> pub fn date_array_options(&self) -> std::option::Option<&crate::model::DateArrayOptions> { self.date_array_options.as_ref() } } impl std::fmt::Debug for IndexField { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("IndexField"); formatter.field("index_field_name", &self.index_field_name); formatter.field("index_field_type", &self.index_field_type); formatter.field("int_options", &self.int_options); formatter.field("double_options", &self.double_options); formatter.field("literal_options", &self.literal_options); formatter.field("text_options", &self.text_options); formatter.field("date_options", &self.date_options); formatter.field("lat_lon_options", &self.lat_lon_options); formatter.field("int_array_options", &self.int_array_options); formatter.field("double_array_options", &self.double_array_options); formatter.field("literal_array_options", &self.literal_array_options); formatter.field("text_array_options", &self.text_array_options); formatter.field("date_array_options", &self.date_array_options); formatter.finish() } } /// See [`IndexField`](crate::model::IndexField) pub mod index_field { /// A builder for [`IndexField`](crate::model::IndexField) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) index_field_name: std::option::Option<std::string::String>, pub(crate) index_field_type: std::option::Option<crate::model::IndexFieldType>, pub(crate) int_options: std::option::Option<crate::model::IntOptions>, pub(crate) double_options: std::option::Option<crate::model::DoubleOptions>, pub(crate) literal_options: std::option::Option<crate::model::LiteralOptions>, pub(crate) text_options: std::option::Option<crate::model::TextOptions>, pub(crate) date_options: std::option::Option<crate::model::DateOptions>, pub(crate) lat_lon_options: std::option::Option<crate::model::LatLonOptions>, pub(crate) int_array_options: std::option::Option<crate::model::IntArrayOptions>, pub(crate) double_array_options: std::option::Option<crate::model::DoubleArrayOptions>, pub(crate) literal_array_options: std::option::Option<crate::model::LiteralArrayOptions>, pub(crate) text_array_options: std::option::Option<crate::model::TextArrayOptions>, pub(crate) date_array_options: std::option::Option<crate::model::DateArrayOptions>, } impl Builder { /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn index_field_name(mut self, input: impl Into<std::string::String>) -> Self { self.index_field_name = Some(input.into()); self } /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn set_index_field_name( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.index_field_name = input; self } /// <p>The type of field. The valid options for a field depend on the field type. For more information about the supported field types, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-index-fields.html" target="_blank">Configuring Index Fields</a> in the <i>Amazon CloudSearch Developer Guide</i>.</p> pub fn index_field_type(mut self, input: crate::model::IndexFieldType) -> Self { self.index_field_type = Some(input); self } /// <p>The type of field. The valid options for a field depend on the field type. For more information about the supported field types, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-index-fields.html" target="_blank">Configuring Index Fields</a> in the <i>Amazon CloudSearch Developer Guide</i>.</p> pub fn set_index_field_type( mut self, input: std::option::Option<crate::model::IndexFieldType>, ) -> Self { self.index_field_type = input; self } /// <p>Options for a 64-bit signed integer field. Present if <code>IndexFieldType</code> specifies the field is of type <code>int</code>. All options are enabled by default.</p> pub fn int_options(mut self, input: crate::model::IntOptions) -> Self { self.int_options = Some(input); self } /// <p>Options for a 64-bit signed integer field. Present if <code>IndexFieldType</code> specifies the field is of type <code>int</code>. All options are enabled by default.</p> pub fn set_int_options( mut self, input: std::option::Option<crate::model::IntOptions>, ) -> Self { self.int_options = input; self } /// <p>Options for a double-precision 64-bit floating point field. Present if <code>IndexFieldType</code> specifies the field is of type <code>double</code>. All options are enabled by default.</p> pub fn double_options(mut self, input: crate::model::DoubleOptions) -> Self { self.double_options = Some(input); self } /// <p>Options for a double-precision 64-bit floating point field. Present if <code>IndexFieldType</code> specifies the field is of type <code>double</code>. All options are enabled by default.</p> pub fn set_double_options( mut self, input: std::option::Option<crate::model::DoubleOptions>, ) -> Self { self.double_options = input; self } /// <p>Options for literal field. Present if <code>IndexFieldType</code> specifies the field is of type <code>literal</code>. All options are enabled by default.</p> pub fn literal_options(mut self, input: crate::model::LiteralOptions) -> Self { self.literal_options = Some(input); self } /// <p>Options for literal field. Present if <code>IndexFieldType</code> specifies the field is of type <code>literal</code>. All options are enabled by default.</p> pub fn set_literal_options( mut self, input: std::option::Option<crate::model::LiteralOptions>, ) -> Self { self.literal_options = input; self } /// <p>Options for text field. Present if <code>IndexFieldType</code> specifies the field is of type <code>text</code>. A <code>text</code> field is always searchable. All options are enabled by default.</p> pub fn text_options(mut self, input: crate::model::TextOptions) -> Self { self.text_options = Some(input); self } /// <p>Options for text field. Present if <code>IndexFieldType</code> specifies the field is of type <code>text</code>. A <code>text</code> field is always searchable. All options are enabled by default.</p> pub fn set_text_options( mut self, input: std::option::Option<crate::model::TextOptions>, ) -> Self { self.text_options = input; self } /// <p>Options for a date field. Dates and times are specified in UTC (Coordinated Universal Time) according to IETF RFC3339: yyyy-mm-ddT00:00:00Z. Present if <code>IndexFieldType</code> specifies the field is of type <code>date</code>. All options are enabled by default.</p> pub fn date_options(mut self, input: crate::model::DateOptions) -> Self { self.date_options = Some(input); self } /// <p>Options for a date field. Dates and times are specified in UTC (Coordinated Universal Time) according to IETF RFC3339: yyyy-mm-ddT00:00:00Z. Present if <code>IndexFieldType</code> specifies the field is of type <code>date</code>. All options are enabled by default.</p> pub fn set_date_options( mut self, input: std::option::Option<crate::model::DateOptions>, ) -> Self { self.date_options = input; self } /// <p>Options for a latlon field. A latlon field contains a location stored as a latitude and longitude value pair. Present if <code>IndexFieldType</code> specifies the field is of type <code>latlon</code>. All options are enabled by default.</p> pub fn lat_lon_options(mut self, input: crate::model::LatLonOptions) -> Self { self.lat_lon_options = Some(input); self } /// <p>Options for a latlon field. A latlon field contains a location stored as a latitude and longitude value pair. Present if <code>IndexFieldType</code> specifies the field is of type <code>latlon</code>. All options are enabled by default.</p> pub fn set_lat_lon_options( mut self, input: std::option::Option<crate::model::LatLonOptions>, ) -> Self { self.lat_lon_options = input; self } /// <p>Options for a field that contains an array of 64-bit signed integers. Present if <code>IndexFieldType</code> specifies the field is of type <code>int-array</code>. All options are enabled by default.</p> pub fn int_array_options(mut self, input: crate::model::IntArrayOptions) -> Self { self.int_array_options = Some(input); self } /// <p>Options for a field that contains an array of 64-bit signed integers. Present if <code>IndexFieldType</code> specifies the field is of type <code>int-array</code>. All options are enabled by default.</p> pub fn set_int_array_options( mut self, input: std::option::Option<crate::model::IntArrayOptions>, ) -> Self { self.int_array_options = input; self } /// <p>Options for a field that contains an array of double-precision 64-bit floating point values. Present if <code>IndexFieldType</code> specifies the field is of type <code>double-array</code>. All options are enabled by default.</p> pub fn double_array_options(mut self, input: crate::model::DoubleArrayOptions) -> Self { self.double_array_options = Some(input); self } /// <p>Options for a field that contains an array of double-precision 64-bit floating point values. Present if <code>IndexFieldType</code> specifies the field is of type <code>double-array</code>. All options are enabled by default.</p> pub fn set_double_array_options( mut self, input: std::option::Option<crate::model::DoubleArrayOptions>, ) -> Self { self.double_array_options = input; self } /// <p>Options for a field that contains an array of literal strings. Present if <code>IndexFieldType</code> specifies the field is of type <code>literal-array</code>. All options are enabled by default.</p> pub fn literal_array_options(mut self, input: crate::model::LiteralArrayOptions) -> Self { self.literal_array_options = Some(input); self } /// <p>Options for a field that contains an array of literal strings. Present if <code>IndexFieldType</code> specifies the field is of type <code>literal-array</code>. All options are enabled by default.</p> pub fn set_literal_array_options( mut self, input: std::option::Option<crate::model::LiteralArrayOptions>, ) -> Self { self.literal_array_options = input; self } /// <p>Options for a field that contains an array of text strings. Present if <code>IndexFieldType</code> specifies the field is of type <code>text-array</code>. A <code>text-array</code> field is always searchable. All options are enabled by default.</p> pub fn text_array_options(mut self, input: crate::model::TextArrayOptions) -> Self { self.text_array_options = Some(input); self } /// <p>Options for a field that contains an array of text strings. Present if <code>IndexFieldType</code> specifies the field is of type <code>text-array</code>. A <code>text-array</code> field is always searchable. All options are enabled by default.</p> pub fn set_text_array_options( mut self, input: std::option::Option<crate::model::TextArrayOptions>, ) -> Self { self.text_array_options = input; self } /// <p>Options for a field that contains an array of dates. Present if <code>IndexFieldType</code> specifies the field is of type <code>date-array</code>. All options are enabled by default.</p> pub fn date_array_options(mut self, input: crate::model::DateArrayOptions) -> Self { self.date_array_options = Some(input); self } /// <p>Options for a field that contains an array of dates. Present if <code>IndexFieldType</code> specifies the field is of type <code>date-array</code>. All options are enabled by default.</p> pub fn set_date_array_options( mut self, input: std::option::Option<crate::model::DateArrayOptions>, ) -> Self { self.date_array_options = input; self } /// Consumes the builder and constructs a [`IndexField`](crate::model::IndexField) pub fn build(self) -> crate::model::IndexField { crate::model::IndexField { index_field_name: self.index_field_name, index_field_type: self.index_field_type, int_options: self.int_options, double_options: self.double_options, literal_options: self.literal_options, text_options: self.text_options, date_options: self.date_options, lat_lon_options: self.lat_lon_options, int_array_options: self.int_array_options, double_array_options: self.double_array_options, literal_array_options: self.literal_array_options, text_array_options: self.text_array_options, date_array_options: self.date_array_options, } } } } impl IndexField { /// Creates a new builder-style object to manufacture [`IndexField`](crate::model::IndexField) pub fn builder() -> crate::model::index_field::Builder { crate::model::index_field::Builder::default() } } /// <p>Options for a field that contains an array of dates. Present if <code>IndexFieldType</code> specifies the field is of type <code>date-array</code>. All options are enabled by default.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct DateArrayOptions { /// A value to use for the field if the field isn't specified for a document. pub default_value: std::option::Option<std::string::String>, /// <p>A list of source fields to map to the field. </p> pub source_fields: std::option::Option<std::string::String>, /// <p>Whether facet information can be returned for the field.</p> pub facet_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field are searchable.</p> pub search_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field can be returned in the search results.</p> pub return_enabled: std::option::Option<bool>, } impl DateArrayOptions { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(&self) -> std::option::Option<&str> { self.default_value.as_deref() } /// <p>A list of source fields to map to the field. </p> pub fn source_fields(&self) -> std::option::Option<&str> { self.source_fields.as_deref() } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(&self) -> std::option::Option<bool> { self.facet_enabled } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(&self) -> std::option::Option<bool> { self.search_enabled } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(&self) -> std::option::Option<bool> { self.return_enabled } } impl std::fmt::Debug for DateArrayOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("DateArrayOptions"); formatter.field("default_value", &self.default_value); formatter.field("source_fields", &self.source_fields); formatter.field("facet_enabled", &self.facet_enabled); formatter.field("search_enabled", &self.search_enabled); formatter.field("return_enabled", &self.return_enabled); formatter.finish() } } /// See [`DateArrayOptions`](crate::model::DateArrayOptions) pub mod date_array_options { /// A builder for [`DateArrayOptions`](crate::model::DateArrayOptions) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) default_value: std::option::Option<std::string::String>, pub(crate) source_fields: std::option::Option<std::string::String>, pub(crate) facet_enabled: std::option::Option<bool>, pub(crate) search_enabled: std::option::Option<bool>, pub(crate) return_enabled: std::option::Option<bool>, } impl Builder { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(mut self, input: impl Into<std::string::String>) -> Self { self.default_value = Some(input.into()); self } /// A value to use for the field if the field isn't specified for a document. pub fn set_default_value( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.default_value = input; self } /// <p>A list of source fields to map to the field. </p> pub fn source_fields(mut self, input: impl Into<std::string::String>) -> Self { self.source_fields = Some(input.into()); self } /// <p>A list of source fields to map to the field. </p> pub fn set_source_fields( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.source_fields = input; self } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(mut self, input: bool) -> Self { self.facet_enabled = Some(input); self } /// <p>Whether facet information can be returned for the field.</p> pub fn set_facet_enabled(mut self, input: std::option::Option<bool>) -> Self { self.facet_enabled = input; self } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(mut self, input: bool) -> Self { self.search_enabled = Some(input); self } /// <p>Whether the contents of the field are searchable.</p> pub fn set_search_enabled(mut self, input: std::option::Option<bool>) -> Self { self.search_enabled = input; self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(mut self, input: bool) -> Self { self.return_enabled = Some(input); self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn set_return_enabled(mut self, input: std::option::Option<bool>) -> Self { self.return_enabled = input; self } /// Consumes the builder and constructs a [`DateArrayOptions`](crate::model::DateArrayOptions) pub fn build(self) -> crate::model::DateArrayOptions { crate::model::DateArrayOptions { default_value: self.default_value, source_fields: self.source_fields, facet_enabled: self.facet_enabled, search_enabled: self.search_enabled, return_enabled: self.return_enabled, } } } } impl DateArrayOptions { /// Creates a new builder-style object to manufacture [`DateArrayOptions`](crate::model::DateArrayOptions) pub fn builder() -> crate::model::date_array_options::Builder { crate::model::date_array_options::Builder::default() } } /// <p>Options for a field that contains an array of text strings. Present if <code>IndexFieldType</code> specifies the field is of type <code>text-array</code>. A <code>text-array</code> field is always searchable. All options are enabled by default.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct TextArrayOptions { /// A value to use for the field if the field isn't specified for a document. pub default_value: std::option::Option<std::string::String>, /// <p>A list of source fields to map to the field. </p> pub source_fields: std::option::Option<std::string::String>, /// <p>Whether the contents of the field can be returned in the search results.</p> pub return_enabled: std::option::Option<bool>, /// <p>Whether highlights can be returned for the field.</p> pub highlight_enabled: std::option::Option<bool>, /// <p>The name of an analysis scheme for a <code>text-array</code> field.</p> pub analysis_scheme: std::option::Option<std::string::String>, } impl TextArrayOptions { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(&self) -> std::option::Option<&str> { self.default_value.as_deref() } /// <p>A list of source fields to map to the field. </p> pub fn source_fields(&self) -> std::option::Option<&str> { self.source_fields.as_deref() } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(&self) -> std::option::Option<bool> { self.return_enabled } /// <p>Whether highlights can be returned for the field.</p> pub fn highlight_enabled(&self) -> std::option::Option<bool> { self.highlight_enabled } /// <p>The name of an analysis scheme for a <code>text-array</code> field.</p> pub fn analysis_scheme(&self) -> std::option::Option<&str> { self.analysis_scheme.as_deref() } } impl std::fmt::Debug for TextArrayOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("TextArrayOptions"); formatter.field("default_value", &self.default_value); formatter.field("source_fields", &self.source_fields); formatter.field("return_enabled", &self.return_enabled); formatter.field("highlight_enabled", &self.highlight_enabled); formatter.field("analysis_scheme", &self.analysis_scheme); formatter.finish() } } /// See [`TextArrayOptions`](crate::model::TextArrayOptions) pub mod text_array_options { /// A builder for [`TextArrayOptions`](crate::model::TextArrayOptions) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) default_value: std::option::Option<std::string::String>, pub(crate) source_fields: std::option::Option<std::string::String>, pub(crate) return_enabled: std::option::Option<bool>, pub(crate) highlight_enabled: std::option::Option<bool>, pub(crate) analysis_scheme: std::option::Option<std::string::String>, } impl Builder { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(mut self, input: impl Into<std::string::String>) -> Self { self.default_value = Some(input.into()); self } /// A value to use for the field if the field isn't specified for a document. pub fn set_default_value( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.default_value = input; self } /// <p>A list of source fields to map to the field. </p> pub fn source_fields(mut self, input: impl Into<std::string::String>) -> Self { self.source_fields = Some(input.into()); self } /// <p>A list of source fields to map to the field. </p> pub fn set_source_fields( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.source_fields = input; self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(mut self, input: bool) -> Self { self.return_enabled = Some(input); self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn set_return_enabled(mut self, input: std::option::Option<bool>) -> Self { self.return_enabled = input; self } /// <p>Whether highlights can be returned for the field.</p> pub fn highlight_enabled(mut self, input: bool) -> Self { self.highlight_enabled = Some(input); self } /// <p>Whether highlights can be returned for the field.</p> pub fn set_highlight_enabled(mut self, input: std::option::Option<bool>) -> Self { self.highlight_enabled = input; self } /// <p>The name of an analysis scheme for a <code>text-array</code> field.</p> pub fn analysis_scheme(mut self, input: impl Into<std::string::String>) -> Self { self.analysis_scheme = Some(input.into()); self } /// <p>The name of an analysis scheme for a <code>text-array</code> field.</p> pub fn set_analysis_scheme( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.analysis_scheme = input; self } /// Consumes the builder and constructs a [`TextArrayOptions`](crate::model::TextArrayOptions) pub fn build(self) -> crate::model::TextArrayOptions { crate::model::TextArrayOptions { default_value: self.default_value, source_fields: self.source_fields, return_enabled: self.return_enabled, highlight_enabled: self.highlight_enabled, analysis_scheme: self.analysis_scheme, } } } } impl TextArrayOptions { /// Creates a new builder-style object to manufacture [`TextArrayOptions`](crate::model::TextArrayOptions) pub fn builder() -> crate::model::text_array_options::Builder { crate::model::text_array_options::Builder::default() } } /// <p>Options for a field that contains an array of literal strings. Present if <code>IndexFieldType</code> specifies the field is of type <code>literal-array</code>. All options are enabled by default.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct LiteralArrayOptions { /// A value to use for the field if the field isn't specified for a document. pub default_value: std::option::Option<std::string::String>, /// <p>A list of source fields to map to the field. </p> pub source_fields: std::option::Option<std::string::String>, /// <p>Whether facet information can be returned for the field.</p> pub facet_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field are searchable.</p> pub search_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field can be returned in the search results.</p> pub return_enabled: std::option::Option<bool>, } impl LiteralArrayOptions { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(&self) -> std::option::Option<&str> { self.default_value.as_deref() } /// <p>A list of source fields to map to the field. </p> pub fn source_fields(&self) -> std::option::Option<&str> { self.source_fields.as_deref() } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(&self) -> std::option::Option<bool> { self.facet_enabled } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(&self) -> std::option::Option<bool> { self.search_enabled } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(&self) -> std::option::Option<bool> { self.return_enabled } } impl std::fmt::Debug for LiteralArrayOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("LiteralArrayOptions"); formatter.field("default_value", &self.default_value); formatter.field("source_fields", &self.source_fields); formatter.field("facet_enabled", &self.facet_enabled); formatter.field("search_enabled", &self.search_enabled); formatter.field("return_enabled", &self.return_enabled); formatter.finish() } } /// See [`LiteralArrayOptions`](crate::model::LiteralArrayOptions) pub mod literal_array_options { /// A builder for [`LiteralArrayOptions`](crate::model::LiteralArrayOptions) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) default_value: std::option::Option<std::string::String>, pub(crate) source_fields: std::option::Option<std::string::String>, pub(crate) facet_enabled: std::option::Option<bool>, pub(crate) search_enabled: std::option::Option<bool>, pub(crate) return_enabled: std::option::Option<bool>, } impl Builder { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(mut self, input: impl Into<std::string::String>) -> Self { self.default_value = Some(input.into()); self } /// A value to use for the field if the field isn't specified for a document. pub fn set_default_value( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.default_value = input; self } /// <p>A list of source fields to map to the field. </p> pub fn source_fields(mut self, input: impl Into<std::string::String>) -> Self { self.source_fields = Some(input.into()); self } /// <p>A list of source fields to map to the field. </p> pub fn set_source_fields( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.source_fields = input; self } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(mut self, input: bool) -> Self { self.facet_enabled = Some(input); self } /// <p>Whether facet information can be returned for the field.</p> pub fn set_facet_enabled(mut self, input: std::option::Option<bool>) -> Self { self.facet_enabled = input; self } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(mut self, input: bool) -> Self { self.search_enabled = Some(input); self } /// <p>Whether the contents of the field are searchable.</p> pub fn set_search_enabled(mut self, input: std::option::Option<bool>) -> Self { self.search_enabled = input; self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(mut self, input: bool) -> Self { self.return_enabled = Some(input); self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn set_return_enabled(mut self, input: std::option::Option<bool>) -> Self { self.return_enabled = input; self } /// Consumes the builder and constructs a [`LiteralArrayOptions`](crate::model::LiteralArrayOptions) pub fn build(self) -> crate::model::LiteralArrayOptions { crate::model::LiteralArrayOptions { default_value: self.default_value, source_fields: self.source_fields, facet_enabled: self.facet_enabled, search_enabled: self.search_enabled, return_enabled: self.return_enabled, } } } } impl LiteralArrayOptions { /// Creates a new builder-style object to manufacture [`LiteralArrayOptions`](crate::model::LiteralArrayOptions) pub fn builder() -> crate::model::literal_array_options::Builder { crate::model::literal_array_options::Builder::default() } } /// <p>Options for a field that contains an array of double-precision 64-bit floating point values. Present if <code>IndexFieldType</code> specifies the field is of type <code>double-array</code>. All options are enabled by default.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct DoubleArrayOptions { /// A value to use for the field if the field isn't specified for a document. pub default_value: std::option::Option<f64>, /// <p>A list of source fields to map to the field. </p> pub source_fields: std::option::Option<std::string::String>, /// <p>Whether facet information can be returned for the field.</p> pub facet_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field are searchable.</p> pub search_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field can be returned in the search results.</p> pub return_enabled: std::option::Option<bool>, } impl DoubleArrayOptions { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(&self) -> std::option::Option<f64> { self.default_value } /// <p>A list of source fields to map to the field. </p> pub fn source_fields(&self) -> std::option::Option<&str> { self.source_fields.as_deref() } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(&self) -> std::option::Option<bool> { self.facet_enabled } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(&self) -> std::option::Option<bool> { self.search_enabled } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(&self) -> std::option::Option<bool> { self.return_enabled } } impl std::fmt::Debug for DoubleArrayOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("DoubleArrayOptions"); formatter.field("default_value", &self.default_value); formatter.field("source_fields", &self.source_fields); formatter.field("facet_enabled", &self.facet_enabled); formatter.field("search_enabled", &self.search_enabled); formatter.field("return_enabled", &self.return_enabled); formatter.finish() } } /// See [`DoubleArrayOptions`](crate::model::DoubleArrayOptions) pub mod double_array_options { /// A builder for [`DoubleArrayOptions`](crate::model::DoubleArrayOptions) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) default_value: std::option::Option<f64>, pub(crate) source_fields: std::option::Option<std::string::String>, pub(crate) facet_enabled: std::option::Option<bool>, pub(crate) search_enabled: std::option::Option<bool>, pub(crate) return_enabled: std::option::Option<bool>, } impl Builder { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(mut self, input: f64) -> Self { self.default_value = Some(input); self } /// A value to use for the field if the field isn't specified for a document. pub fn set_default_value(mut self, input: std::option::Option<f64>) -> Self { self.default_value = input; self } /// <p>A list of source fields to map to the field. </p> pub fn source_fields(mut self, input: impl Into<std::string::String>) -> Self { self.source_fields = Some(input.into()); self } /// <p>A list of source fields to map to the field. </p> pub fn set_source_fields( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.source_fields = input; self } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(mut self, input: bool) -> Self { self.facet_enabled = Some(input); self } /// <p>Whether facet information can be returned for the field.</p> pub fn set_facet_enabled(mut self, input: std::option::Option<bool>) -> Self { self.facet_enabled = input; self } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(mut self, input: bool) -> Self { self.search_enabled = Some(input); self } /// <p>Whether the contents of the field are searchable.</p> pub fn set_search_enabled(mut self, input: std::option::Option<bool>) -> Self { self.search_enabled = input; self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(mut self, input: bool) -> Self { self.return_enabled = Some(input); self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn set_return_enabled(mut self, input: std::option::Option<bool>) -> Self { self.return_enabled = input; self } /// Consumes the builder and constructs a [`DoubleArrayOptions`](crate::model::DoubleArrayOptions) pub fn build(self) -> crate::model::DoubleArrayOptions { crate::model::DoubleArrayOptions { default_value: self.default_value, source_fields: self.source_fields, facet_enabled: self.facet_enabled, search_enabled: self.search_enabled, return_enabled: self.return_enabled, } } } } impl DoubleArrayOptions { /// Creates a new builder-style object to manufacture [`DoubleArrayOptions`](crate::model::DoubleArrayOptions) pub fn builder() -> crate::model::double_array_options::Builder { crate::model::double_array_options::Builder::default() } } /// <p>Options for a field that contains an array of 64-bit signed integers. Present if <code>IndexFieldType</code> specifies the field is of type <code>int-array</code>. All options are enabled by default.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct IntArrayOptions { /// A value to use for the field if the field isn't specified for a document. pub default_value: std::option::Option<i64>, /// <p>A list of source fields to map to the field. </p> pub source_fields: std::option::Option<std::string::String>, /// <p>Whether facet information can be returned for the field.</p> pub facet_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field are searchable.</p> pub search_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field can be returned in the search results.</p> pub return_enabled: std::option::Option<bool>, } impl IntArrayOptions { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(&self) -> std::option::Option<i64> { self.default_value } /// <p>A list of source fields to map to the field. </p> pub fn source_fields(&self) -> std::option::Option<&str> { self.source_fields.as_deref() } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(&self) -> std::option::Option<bool> { self.facet_enabled } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(&self) -> std::option::Option<bool> { self.search_enabled } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(&self) -> std::option::Option<bool> { self.return_enabled } } impl std::fmt::Debug for IntArrayOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("IntArrayOptions"); formatter.field("default_value", &self.default_value); formatter.field("source_fields", &self.source_fields); formatter.field("facet_enabled", &self.facet_enabled); formatter.field("search_enabled", &self.search_enabled); formatter.field("return_enabled", &self.return_enabled); formatter.finish() } } /// See [`IntArrayOptions`](crate::model::IntArrayOptions) pub mod int_array_options { /// A builder for [`IntArrayOptions`](crate::model::IntArrayOptions) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) default_value: std::option::Option<i64>, pub(crate) source_fields: std::option::Option<std::string::String>, pub(crate) facet_enabled: std::option::Option<bool>, pub(crate) search_enabled: std::option::Option<bool>, pub(crate) return_enabled: std::option::Option<bool>, } impl Builder { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(mut self, input: i64) -> Self { self.default_value = Some(input); self } /// A value to use for the field if the field isn't specified for a document. pub fn set_default_value(mut self, input: std::option::Option<i64>) -> Self { self.default_value = input; self } /// <p>A list of source fields to map to the field. </p> pub fn source_fields(mut self, input: impl Into<std::string::String>) -> Self { self.source_fields = Some(input.into()); self } /// <p>A list of source fields to map to the field. </p> pub fn set_source_fields( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.source_fields = input; self } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(mut self, input: bool) -> Self { self.facet_enabled = Some(input); self } /// <p>Whether facet information can be returned for the field.</p> pub fn set_facet_enabled(mut self, input: std::option::Option<bool>) -> Self { self.facet_enabled = input; self } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(mut self, input: bool) -> Self { self.search_enabled = Some(input); self } /// <p>Whether the contents of the field are searchable.</p> pub fn set_search_enabled(mut self, input: std::option::Option<bool>) -> Self { self.search_enabled = input; self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(mut self, input: bool) -> Self { self.return_enabled = Some(input); self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn set_return_enabled(mut self, input: std::option::Option<bool>) -> Self { self.return_enabled = input; self } /// Consumes the builder and constructs a [`IntArrayOptions`](crate::model::IntArrayOptions) pub fn build(self) -> crate::model::IntArrayOptions { crate::model::IntArrayOptions { default_value: self.default_value, source_fields: self.source_fields, facet_enabled: self.facet_enabled, search_enabled: self.search_enabled, return_enabled: self.return_enabled, } } } } impl IntArrayOptions { /// Creates a new builder-style object to manufacture [`IntArrayOptions`](crate::model::IntArrayOptions) pub fn builder() -> crate::model::int_array_options::Builder { crate::model::int_array_options::Builder::default() } } /// <p>Options for a latlon field. A latlon field contains a location stored as a latitude and longitude value pair. Present if <code>IndexFieldType</code> specifies the field is of type <code>latlon</code>. All options are enabled by default.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct LatLonOptions { /// A value to use for the field if the field isn't specified for a document. pub default_value: std::option::Option<std::string::String>, /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub source_field: std::option::Option<std::string::String>, /// <p>Whether facet information can be returned for the field.</p> pub facet_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field are searchable.</p> pub search_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field can be returned in the search results.</p> pub return_enabled: std::option::Option<bool>, /// <p>Whether the field can be used to sort the search results.</p> pub sort_enabled: std::option::Option<bool>, } impl LatLonOptions { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(&self) -> std::option::Option<&str> { self.default_value.as_deref() } /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn source_field(&self) -> std::option::Option<&str> { self.source_field.as_deref() } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(&self) -> std::option::Option<bool> { self.facet_enabled } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(&self) -> std::option::Option<bool> { self.search_enabled } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(&self) -> std::option::Option<bool> { self.return_enabled } /// <p>Whether the field can be used to sort the search results.</p> pub fn sort_enabled(&self) -> std::option::Option<bool> { self.sort_enabled } } impl std::fmt::Debug for LatLonOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("LatLonOptions"); formatter.field("default_value", &self.default_value); formatter.field("source_field", &self.source_field); formatter.field("facet_enabled", &self.facet_enabled); formatter.field("search_enabled", &self.search_enabled); formatter.field("return_enabled", &self.return_enabled); formatter.field("sort_enabled", &self.sort_enabled); formatter.finish() } } /// See [`LatLonOptions`](crate::model::LatLonOptions) pub mod lat_lon_options { /// A builder for [`LatLonOptions`](crate::model::LatLonOptions) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) default_value: std::option::Option<std::string::String>, pub(crate) source_field: std::option::Option<std::string::String>, pub(crate) facet_enabled: std::option::Option<bool>, pub(crate) search_enabled: std::option::Option<bool>, pub(crate) return_enabled: std::option::Option<bool>, pub(crate) sort_enabled: std::option::Option<bool>, } impl Builder { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(mut self, input: impl Into<std::string::String>) -> Self { self.default_value = Some(input.into()); self } /// A value to use for the field if the field isn't specified for a document. pub fn set_default_value( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.default_value = input; self } /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn source_field(mut self, input: impl Into<std::string::String>) -> Self { self.source_field = Some(input.into()); self } /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn set_source_field(mut self, input: std::option::Option<std::string::String>) -> Self { self.source_field = input; self } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(mut self, input: bool) -> Self { self.facet_enabled = Some(input); self } /// <p>Whether facet information can be returned for the field.</p> pub fn set_facet_enabled(mut self, input: std::option::Option<bool>) -> Self { self.facet_enabled = input; self } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(mut self, input: bool) -> Self { self.search_enabled = Some(input); self } /// <p>Whether the contents of the field are searchable.</p> pub fn set_search_enabled(mut self, input: std::option::Option<bool>) -> Self { self.search_enabled = input; self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(mut self, input: bool) -> Self { self.return_enabled = Some(input); self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn set_return_enabled(mut self, input: std::option::Option<bool>) -> Self { self.return_enabled = input; self } /// <p>Whether the field can be used to sort the search results.</p> pub fn sort_enabled(mut self, input: bool) -> Self { self.sort_enabled = Some(input); self } /// <p>Whether the field can be used to sort the search results.</p> pub fn set_sort_enabled(mut self, input: std::option::Option<bool>) -> Self { self.sort_enabled = input; self } /// Consumes the builder and constructs a [`LatLonOptions`](crate::model::LatLonOptions) pub fn build(self) -> crate::model::LatLonOptions { crate::model::LatLonOptions { default_value: self.default_value, source_field: self.source_field, facet_enabled: self.facet_enabled, search_enabled: self.search_enabled, return_enabled: self.return_enabled, sort_enabled: self.sort_enabled, } } } } impl LatLonOptions { /// Creates a new builder-style object to manufacture [`LatLonOptions`](crate::model::LatLonOptions) pub fn builder() -> crate::model::lat_lon_options::Builder { crate::model::lat_lon_options::Builder::default() } } /// <p>Options for a date field. Dates and times are specified in UTC (Coordinated Universal Time) according to IETF RFC3339: yyyy-mm-ddT00:00:00Z. Present if <code>IndexFieldType</code> specifies the field is of type <code>date</code>. All options are enabled by default.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct DateOptions { /// A value to use for the field if the field isn't specified for a document. pub default_value: std::option::Option<std::string::String>, /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub source_field: std::option::Option<std::string::String>, /// <p>Whether facet information can be returned for the field.</p> pub facet_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field are searchable.</p> pub search_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field can be returned in the search results.</p> pub return_enabled: std::option::Option<bool>, /// <p>Whether the field can be used to sort the search results.</p> pub sort_enabled: std::option::Option<bool>, } impl DateOptions { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(&self) -> std::option::Option<&str> { self.default_value.as_deref() } /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn source_field(&self) -> std::option::Option<&str> { self.source_field.as_deref() } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(&self) -> std::option::Option<bool> { self.facet_enabled } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(&self) -> std::option::Option<bool> { self.search_enabled } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(&self) -> std::option::Option<bool> { self.return_enabled } /// <p>Whether the field can be used to sort the search results.</p> pub fn sort_enabled(&self) -> std::option::Option<bool> { self.sort_enabled } } impl std::fmt::Debug for DateOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("DateOptions"); formatter.field("default_value", &self.default_value); formatter.field("source_field", &self.source_field); formatter.field("facet_enabled", &self.facet_enabled); formatter.field("search_enabled", &self.search_enabled); formatter.field("return_enabled", &self.return_enabled); formatter.field("sort_enabled", &self.sort_enabled); formatter.finish() } } /// See [`DateOptions`](crate::model::DateOptions) pub mod date_options { /// A builder for [`DateOptions`](crate::model::DateOptions) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) default_value: std::option::Option<std::string::String>, pub(crate) source_field: std::option::Option<std::string::String>, pub(crate) facet_enabled: std::option::Option<bool>, pub(crate) search_enabled: std::option::Option<bool>, pub(crate) return_enabled: std::option::Option<bool>, pub(crate) sort_enabled: std::option::Option<bool>, } impl Builder { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(mut self, input: impl Into<std::string::String>) -> Self { self.default_value = Some(input.into()); self } /// A value to use for the field if the field isn't specified for a document. pub fn set_default_value( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.default_value = input; self } /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn source_field(mut self, input: impl Into<std::string::String>) -> Self { self.source_field = Some(input.into()); self } /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn set_source_field(mut self, input: std::option::Option<std::string::String>) -> Self { self.source_field = input; self } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(mut self, input: bool) -> Self { self.facet_enabled = Some(input); self } /// <p>Whether facet information can be returned for the field.</p> pub fn set_facet_enabled(mut self, input: std::option::Option<bool>) -> Self { self.facet_enabled = input; self } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(mut self, input: bool) -> Self { self.search_enabled = Some(input); self } /// <p>Whether the contents of the field are searchable.</p> pub fn set_search_enabled(mut self, input: std::option::Option<bool>) -> Self { self.search_enabled = input; self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(mut self, input: bool) -> Self { self.return_enabled = Some(input); self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn set_return_enabled(mut self, input: std::option::Option<bool>) -> Self { self.return_enabled = input; self } /// <p>Whether the field can be used to sort the search results.</p> pub fn sort_enabled(mut self, input: bool) -> Self { self.sort_enabled = Some(input); self } /// <p>Whether the field can be used to sort the search results.</p> pub fn set_sort_enabled(mut self, input: std::option::Option<bool>) -> Self { self.sort_enabled = input; self } /// Consumes the builder and constructs a [`DateOptions`](crate::model::DateOptions) pub fn build(self) -> crate::model::DateOptions { crate::model::DateOptions { default_value: self.default_value, source_field: self.source_field, facet_enabled: self.facet_enabled, search_enabled: self.search_enabled, return_enabled: self.return_enabled, sort_enabled: self.sort_enabled, } } } } impl DateOptions { /// Creates a new builder-style object to manufacture [`DateOptions`](crate::model::DateOptions) pub fn builder() -> crate::model::date_options::Builder { crate::model::date_options::Builder::default() } } /// <p>Options for text field. Present if <code>IndexFieldType</code> specifies the field is of type <code>text</code>. A <code>text</code> field is always searchable. All options are enabled by default.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct TextOptions { /// A value to use for the field if the field isn't specified for a document. pub default_value: std::option::Option<std::string::String>, /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub source_field: std::option::Option<std::string::String>, /// <p>Whether the contents of the field can be returned in the search results.</p> pub return_enabled: std::option::Option<bool>, /// <p>Whether the field can be used to sort the search results.</p> pub sort_enabled: std::option::Option<bool>, /// <p>Whether highlights can be returned for the field.</p> pub highlight_enabled: std::option::Option<bool>, /// <p>The name of an analysis scheme for a <code>text</code> field.</p> pub analysis_scheme: std::option::Option<std::string::String>, } impl TextOptions { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(&self) -> std::option::Option<&str> { self.default_value.as_deref() } /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn source_field(&self) -> std::option::Option<&str> { self.source_field.as_deref() } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(&self) -> std::option::Option<bool> { self.return_enabled } /// <p>Whether the field can be used to sort the search results.</p> pub fn sort_enabled(&self) -> std::option::Option<bool> { self.sort_enabled } /// <p>Whether highlights can be returned for the field.</p> pub fn highlight_enabled(&self) -> std::option::Option<bool> { self.highlight_enabled } /// <p>The name of an analysis scheme for a <code>text</code> field.</p> pub fn analysis_scheme(&self) -> std::option::Option<&str> { self.analysis_scheme.as_deref() } } impl std::fmt::Debug for TextOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("TextOptions"); formatter.field("default_value", &self.default_value); formatter.field("source_field", &self.source_field); formatter.field("return_enabled", &self.return_enabled); formatter.field("sort_enabled", &self.sort_enabled); formatter.field("highlight_enabled", &self.highlight_enabled); formatter.field("analysis_scheme", &self.analysis_scheme); formatter.finish() } } /// See [`TextOptions`](crate::model::TextOptions) pub mod text_options { /// A builder for [`TextOptions`](crate::model::TextOptions) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) default_value: std::option::Option<std::string::String>, pub(crate) source_field: std::option::Option<std::string::String>, pub(crate) return_enabled: std::option::Option<bool>, pub(crate) sort_enabled: std::option::Option<bool>, pub(crate) highlight_enabled: std::option::Option<bool>, pub(crate) analysis_scheme: std::option::Option<std::string::String>, } impl Builder { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(mut self, input: impl Into<std::string::String>) -> Self { self.default_value = Some(input.into()); self } /// A value to use for the field if the field isn't specified for a document. pub fn set_default_value( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.default_value = input; self } /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn source_field(mut self, input: impl Into<std::string::String>) -> Self { self.source_field = Some(input.into()); self } /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn set_source_field(mut self, input: std::option::Option<std::string::String>) -> Self { self.source_field = input; self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(mut self, input: bool) -> Self { self.return_enabled = Some(input); self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn set_return_enabled(mut self, input: std::option::Option<bool>) -> Self { self.return_enabled = input; self } /// <p>Whether the field can be used to sort the search results.</p> pub fn sort_enabled(mut self, input: bool) -> Self { self.sort_enabled = Some(input); self } /// <p>Whether the field can be used to sort the search results.</p> pub fn set_sort_enabled(mut self, input: std::option::Option<bool>) -> Self { self.sort_enabled = input; self } /// <p>Whether highlights can be returned for the field.</p> pub fn highlight_enabled(mut self, input: bool) -> Self { self.highlight_enabled = Some(input); self } /// <p>Whether highlights can be returned for the field.</p> pub fn set_highlight_enabled(mut self, input: std::option::Option<bool>) -> Self { self.highlight_enabled = input; self } /// <p>The name of an analysis scheme for a <code>text</code> field.</p> pub fn analysis_scheme(mut self, input: impl Into<std::string::String>) -> Self { self.analysis_scheme = Some(input.into()); self } /// <p>The name of an analysis scheme for a <code>text</code> field.</p> pub fn set_analysis_scheme( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.analysis_scheme = input; self } /// Consumes the builder and constructs a [`TextOptions`](crate::model::TextOptions) pub fn build(self) -> crate::model::TextOptions { crate::model::TextOptions { default_value: self.default_value, source_field: self.source_field, return_enabled: self.return_enabled, sort_enabled: self.sort_enabled, highlight_enabled: self.highlight_enabled, analysis_scheme: self.analysis_scheme, } } } } impl TextOptions { /// Creates a new builder-style object to manufacture [`TextOptions`](crate::model::TextOptions) pub fn builder() -> crate::model::text_options::Builder { crate::model::text_options::Builder::default() } } /// <p>Options for literal field. Present if <code>IndexFieldType</code> specifies the field is of type <code>literal</code>. All options are enabled by default.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct LiteralOptions { /// A value to use for the field if the field isn't specified for a document. pub default_value: std::option::Option<std::string::String>, /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub source_field: std::option::Option<std::string::String>, /// <p>Whether facet information can be returned for the field.</p> pub facet_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field are searchable.</p> pub search_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field can be returned in the search results.</p> pub return_enabled: std::option::Option<bool>, /// <p>Whether the field can be used to sort the search results.</p> pub sort_enabled: std::option::Option<bool>, } impl LiteralOptions { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(&self) -> std::option::Option<&str> { self.default_value.as_deref() } /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn source_field(&self) -> std::option::Option<&str> { self.source_field.as_deref() } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(&self) -> std::option::Option<bool> { self.facet_enabled } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(&self) -> std::option::Option<bool> { self.search_enabled } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(&self) -> std::option::Option<bool> { self.return_enabled } /// <p>Whether the field can be used to sort the search results.</p> pub fn sort_enabled(&self) -> std::option::Option<bool> { self.sort_enabled } } impl std::fmt::Debug for LiteralOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("LiteralOptions"); formatter.field("default_value", &self.default_value); formatter.field("source_field", &self.source_field); formatter.field("facet_enabled", &self.facet_enabled); formatter.field("search_enabled", &self.search_enabled); formatter.field("return_enabled", &self.return_enabled); formatter.field("sort_enabled", &self.sort_enabled); formatter.finish() } } /// See [`LiteralOptions`](crate::model::LiteralOptions) pub mod literal_options { /// A builder for [`LiteralOptions`](crate::model::LiteralOptions) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) default_value: std::option::Option<std::string::String>, pub(crate) source_field: std::option::Option<std::string::String>, pub(crate) facet_enabled: std::option::Option<bool>, pub(crate) search_enabled: std::option::Option<bool>, pub(crate) return_enabled: std::option::Option<bool>, pub(crate) sort_enabled: std::option::Option<bool>, } impl Builder { /// A value to use for the field if the field isn't specified for a document. pub fn default_value(mut self, input: impl Into<std::string::String>) -> Self { self.default_value = Some(input.into()); self } /// A value to use for the field if the field isn't specified for a document. pub fn set_default_value( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.default_value = input; self } /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn source_field(mut self, input: impl Into<std::string::String>) -> Self { self.source_field = Some(input.into()); self } /// <p>A string that represents the name of an index field. CloudSearch supports regular index fields as well as dynamic fields. /// A dynamic field's name defines a pattern that begins or ends with a wildcard. /// Any document fields that don't map to a regular index field but do match a /// dynamic field's pattern are configured with the dynamic field's indexing options. /// </p> /// <p>Regular field names begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore). /// Dynamic field names must begin or end with a wildcard (*). /// The wildcard can also be the only character in a dynamic field name. /// Multiple wildcards, and wildcards embedded within a string are not supported. /// </p> /// <p>The name <code>score</code> is reserved and cannot be used as a field name. /// To reference a document's ID, you can use the name <code>_id</code>. /// </p> pub fn set_source_field(mut self, input: std::option::Option<std::string::String>) -> Self { self.source_field = input; self } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(mut self, input: bool) -> Self { self.facet_enabled = Some(input); self } /// <p>Whether facet information can be returned for the field.</p> pub fn set_facet_enabled(mut self, input: std::option::Option<bool>) -> Self { self.facet_enabled = input; self } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(mut self, input: bool) -> Self { self.search_enabled = Some(input); self } /// <p>Whether the contents of the field are searchable.</p> pub fn set_search_enabled(mut self, input: std::option::Option<bool>) -> Self { self.search_enabled = input; self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(mut self, input: bool) -> Self { self.return_enabled = Some(input); self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn set_return_enabled(mut self, input: std::option::Option<bool>) -> Self { self.return_enabled = input; self } /// <p>Whether the field can be used to sort the search results.</p> pub fn sort_enabled(mut self, input: bool) -> Self { self.sort_enabled = Some(input); self } /// <p>Whether the field can be used to sort the search results.</p> pub fn set_sort_enabled(mut self, input: std::option::Option<bool>) -> Self { self.sort_enabled = input; self } /// Consumes the builder and constructs a [`LiteralOptions`](crate::model::LiteralOptions) pub fn build(self) -> crate::model::LiteralOptions { crate::model::LiteralOptions { default_value: self.default_value, source_field: self.source_field, facet_enabled: self.facet_enabled, search_enabled: self.search_enabled, return_enabled: self.return_enabled, sort_enabled: self.sort_enabled, } } } } impl LiteralOptions { /// Creates a new builder-style object to manufacture [`LiteralOptions`](crate::model::LiteralOptions) pub fn builder() -> crate::model::literal_options::Builder { crate::model::literal_options::Builder::default() } } /// <p>Options for a double-precision 64-bit floating point field. Present if <code>IndexFieldType</code> specifies the field is of type <code>double</code>. All options are enabled by default.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct DoubleOptions { /// <p>A value to use for the field if the field isn't specified for a document. This can be important if you are using the field in an expression and that field is not present in every document.</p> pub default_value: std::option::Option<f64>, /// <p>The name of the source field to map to the field. </p> pub source_field: std::option::Option<std::string::String>, /// <p>Whether facet information can be returned for the field.</p> pub facet_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field are searchable.</p> pub search_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field can be returned in the search results.</p> pub return_enabled: std::option::Option<bool>, /// <p>Whether the field can be used to sort the search results.</p> pub sort_enabled: std::option::Option<bool>, } impl DoubleOptions { /// <p>A value to use for the field if the field isn't specified for a document. This can be important if you are using the field in an expression and that field is not present in every document.</p> pub fn default_value(&self) -> std::option::Option<f64> { self.default_value } /// <p>The name of the source field to map to the field. </p> pub fn source_field(&self) -> std::option::Option<&str> { self.source_field.as_deref() } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(&self) -> std::option::Option<bool> { self.facet_enabled } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(&self) -> std::option::Option<bool> { self.search_enabled } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(&self) -> std::option::Option<bool> { self.return_enabled } /// <p>Whether the field can be used to sort the search results.</p> pub fn sort_enabled(&self) -> std::option::Option<bool> { self.sort_enabled } } impl std::fmt::Debug for DoubleOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("DoubleOptions"); formatter.field("default_value", &self.default_value); formatter.field("source_field", &self.source_field); formatter.field("facet_enabled", &self.facet_enabled); formatter.field("search_enabled", &self.search_enabled); formatter.field("return_enabled", &self.return_enabled); formatter.field("sort_enabled", &self.sort_enabled); formatter.finish() } } /// See [`DoubleOptions`](crate::model::DoubleOptions) pub mod double_options { /// A builder for [`DoubleOptions`](crate::model::DoubleOptions) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) default_value: std::option::Option<f64>, pub(crate) source_field: std::option::Option<std::string::String>, pub(crate) facet_enabled: std::option::Option<bool>, pub(crate) search_enabled: std::option::Option<bool>, pub(crate) return_enabled: std::option::Option<bool>, pub(crate) sort_enabled: std::option::Option<bool>, } impl Builder { /// <p>A value to use for the field if the field isn't specified for a document. This can be important if you are using the field in an expression and that field is not present in every document.</p> pub fn default_value(mut self, input: f64) -> Self { self.default_value = Some(input); self } /// <p>A value to use for the field if the field isn't specified for a document. This can be important if you are using the field in an expression and that field is not present in every document.</p> pub fn set_default_value(mut self, input: std::option::Option<f64>) -> Self { self.default_value = input; self } /// <p>The name of the source field to map to the field. </p> pub fn source_field(mut self, input: impl Into<std::string::String>) -> Self { self.source_field = Some(input.into()); self } /// <p>The name of the source field to map to the field. </p> pub fn set_source_field(mut self, input: std::option::Option<std::string::String>) -> Self { self.source_field = input; self } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(mut self, input: bool) -> Self { self.facet_enabled = Some(input); self } /// <p>Whether facet information can be returned for the field.</p> pub fn set_facet_enabled(mut self, input: std::option::Option<bool>) -> Self { self.facet_enabled = input; self } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(mut self, input: bool) -> Self { self.search_enabled = Some(input); self } /// <p>Whether the contents of the field are searchable.</p> pub fn set_search_enabled(mut self, input: std::option::Option<bool>) -> Self { self.search_enabled = input; self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(mut self, input: bool) -> Self { self.return_enabled = Some(input); self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn set_return_enabled(mut self, input: std::option::Option<bool>) -> Self { self.return_enabled = input; self } /// <p>Whether the field can be used to sort the search results.</p> pub fn sort_enabled(mut self, input: bool) -> Self { self.sort_enabled = Some(input); self } /// <p>Whether the field can be used to sort the search results.</p> pub fn set_sort_enabled(mut self, input: std::option::Option<bool>) -> Self { self.sort_enabled = input; self } /// Consumes the builder and constructs a [`DoubleOptions`](crate::model::DoubleOptions) pub fn build(self) -> crate::model::DoubleOptions { crate::model::DoubleOptions { default_value: self.default_value, source_field: self.source_field, facet_enabled: self.facet_enabled, search_enabled: self.search_enabled, return_enabled: self.return_enabled, sort_enabled: self.sort_enabled, } } } } impl DoubleOptions { /// Creates a new builder-style object to manufacture [`DoubleOptions`](crate::model::DoubleOptions) pub fn builder() -> crate::model::double_options::Builder { crate::model::double_options::Builder::default() } } /// <p>Options for a 64-bit signed integer field. Present if <code>IndexFieldType</code> specifies the field is of type <code>int</code>. All options are enabled by default.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct IntOptions { /// A value to use for the field if the field isn't specified for a document. This can be important if you are using the field in an expression and that field is not present in every document. pub default_value: std::option::Option<i64>, /// <p>The name of the source field to map to the field. </p> pub source_field: std::option::Option<std::string::String>, /// <p>Whether facet information can be returned for the field.</p> pub facet_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field are searchable.</p> pub search_enabled: std::option::Option<bool>, /// <p>Whether the contents of the field can be returned in the search results.</p> pub return_enabled: std::option::Option<bool>, /// <p>Whether the field can be used to sort the search results.</p> pub sort_enabled: std::option::Option<bool>, } impl IntOptions { /// A value to use for the field if the field isn't specified for a document. This can be important if you are using the field in an expression and that field is not present in every document. pub fn default_value(&self) -> std::option::Option<i64> { self.default_value } /// <p>The name of the source field to map to the field. </p> pub fn source_field(&self) -> std::option::Option<&str> { self.source_field.as_deref() } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(&self) -> std::option::Option<bool> { self.facet_enabled } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(&self) -> std::option::Option<bool> { self.search_enabled } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(&self) -> std::option::Option<bool> { self.return_enabled } /// <p>Whether the field can be used to sort the search results.</p> pub fn sort_enabled(&self) -> std::option::Option<bool> { self.sort_enabled } } impl std::fmt::Debug for IntOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("IntOptions"); formatter.field("default_value", &self.default_value); formatter.field("source_field", &self.source_field); formatter.field("facet_enabled", &self.facet_enabled); formatter.field("search_enabled", &self.search_enabled); formatter.field("return_enabled", &self.return_enabled); formatter.field("sort_enabled", &self.sort_enabled); formatter.finish() } } /// See [`IntOptions`](crate::model::IntOptions) pub mod int_options { /// A builder for [`IntOptions`](crate::model::IntOptions) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) default_value: std::option::Option<i64>, pub(crate) source_field: std::option::Option<std::string::String>, pub(crate) facet_enabled: std::option::Option<bool>, pub(crate) search_enabled: std::option::Option<bool>, pub(crate) return_enabled: std::option::Option<bool>, pub(crate) sort_enabled: std::option::Option<bool>, } impl Builder { /// A value to use for the field if the field isn't specified for a document. This can be important if you are using the field in an expression and that field is not present in every document. pub fn default_value(mut self, input: i64) -> Self { self.default_value = Some(input); self } /// A value to use for the field if the field isn't specified for a document. This can be important if you are using the field in an expression and that field is not present in every document. pub fn set_default_value(mut self, input: std::option::Option<i64>) -> Self { self.default_value = input; self } /// <p>The name of the source field to map to the field. </p> pub fn source_field(mut self, input: impl Into<std::string::String>) -> Self { self.source_field = Some(input.into()); self } /// <p>The name of the source field to map to the field. </p> pub fn set_source_field(mut self, input: std::option::Option<std::string::String>) -> Self { self.source_field = input; self } /// <p>Whether facet information can be returned for the field.</p> pub fn facet_enabled(mut self, input: bool) -> Self { self.facet_enabled = Some(input); self } /// <p>Whether facet information can be returned for the field.</p> pub fn set_facet_enabled(mut self, input: std::option::Option<bool>) -> Self { self.facet_enabled = input; self } /// <p>Whether the contents of the field are searchable.</p> pub fn search_enabled(mut self, input: bool) -> Self { self.search_enabled = Some(input); self } /// <p>Whether the contents of the field are searchable.</p> pub fn set_search_enabled(mut self, input: std::option::Option<bool>) -> Self { self.search_enabled = input; self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn return_enabled(mut self, input: bool) -> Self { self.return_enabled = Some(input); self } /// <p>Whether the contents of the field can be returned in the search results.</p> pub fn set_return_enabled(mut self, input: std::option::Option<bool>) -> Self { self.return_enabled = input; self } /// <p>Whether the field can be used to sort the search results.</p> pub fn sort_enabled(mut self, input: bool) -> Self { self.sort_enabled = Some(input); self } /// <p>Whether the field can be used to sort the search results.</p> pub fn set_sort_enabled(mut self, input: std::option::Option<bool>) -> Self { self.sort_enabled = input; self } /// Consumes the builder and constructs a [`IntOptions`](crate::model::IntOptions) pub fn build(self) -> crate::model::IntOptions { crate::model::IntOptions { default_value: self.default_value, source_field: self.source_field, facet_enabled: self.facet_enabled, search_enabled: self.search_enabled, return_enabled: self.return_enabled, sort_enabled: self.sort_enabled, } } } } impl IntOptions { /// Creates a new builder-style object to manufacture [`IntOptions`](crate::model::IntOptions) pub fn builder() -> crate::model::int_options::Builder { crate::model::int_options::Builder::default() } } /// <p>The type of field. The valid options for a field depend on the field type. For more information about the supported field types, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-index-fields.html" target="_blank">Configuring Index Fields</a> in the <i>Amazon CloudSearch Developer Guide</i>.</p> #[non_exhaustive] #[derive( std::clone::Clone, std::cmp::Eq, std::cmp::Ord, std::cmp::PartialEq, std::cmp::PartialOrd, std::fmt::Debug, std::hash::Hash, )] pub enum IndexFieldType { #[allow(missing_docs)] // documentation missing in model Date, #[allow(missing_docs)] // documentation missing in model DateArray, #[allow(missing_docs)] // documentation missing in model Double, #[allow(missing_docs)] // documentation missing in model DoubleArray, #[allow(missing_docs)] // documentation missing in model Int, #[allow(missing_docs)] // documentation missing in model IntArray, #[allow(missing_docs)] // documentation missing in model Latlon, #[allow(missing_docs)] // documentation missing in model Literal, #[allow(missing_docs)] // documentation missing in model LiteralArray, #[allow(missing_docs)] // documentation missing in model Text, #[allow(missing_docs)] // documentation missing in model TextArray, /// Unknown contains new variants that have been added since this code was generated. Unknown(String), } impl std::convert::From<&str> for IndexFieldType { fn from(s: &str) -> Self { match s { "date" => IndexFieldType::Date, "date-array" => IndexFieldType::DateArray, "double" => IndexFieldType::Double, "double-array" => IndexFieldType::DoubleArray, "int" => IndexFieldType::Int, "int-array" => IndexFieldType::IntArray, "latlon" => IndexFieldType::Latlon, "literal" => IndexFieldType::Literal, "literal-array" => IndexFieldType::LiteralArray, "text" => IndexFieldType::Text, "text-array" => IndexFieldType::TextArray, other => IndexFieldType::Unknown(other.to_owned()), } } } impl std::str::FromStr for IndexFieldType { type Err = std::convert::Infallible; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Ok(IndexFieldType::from(s)) } } impl IndexFieldType { /// Returns the `&str` value of the enum member. pub fn as_str(&self) -> &str { match self { IndexFieldType::Date => "date", IndexFieldType::DateArray => "date-array", IndexFieldType::Double => "double", IndexFieldType::DoubleArray => "double-array", IndexFieldType::Int => "int", IndexFieldType::IntArray => "int-array", IndexFieldType::Latlon => "latlon", IndexFieldType::Literal => "literal", IndexFieldType::LiteralArray => "literal-array", IndexFieldType::Text => "text", IndexFieldType::TextArray => "text-array", IndexFieldType::Unknown(s) => s.as_ref(), } } /// Returns all the `&str` values of the enum members. pub fn values() -> &'static [&'static str] { &[ "date", "date-array", "double", "double-array", "int", "int-array", "latlon", "literal", "literal-array", "text", "text-array", ] } } impl AsRef<str> for IndexFieldType { fn as_ref(&self) -> &str { self.as_str() } } /// <p>The value of an <code>Expression</code> and its current status.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct ExpressionStatus { /// <p>The expression that is evaluated for sorting while processing a search request.</p> pub options: std::option::Option<crate::model::Expression>, /// <p>The status of domain configuration option.</p> pub status: std::option::Option<crate::model::OptionStatus>, } impl ExpressionStatus { /// <p>The expression that is evaluated for sorting while processing a search request.</p> pub fn options(&self) -> std::option::Option<&crate::model::Expression> { self.options.as_ref() } /// <p>The status of domain configuration option.</p> pub fn status(&self) -> std::option::Option<&crate::model::OptionStatus> { self.status.as_ref() } } impl std::fmt::Debug for ExpressionStatus { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("ExpressionStatus"); formatter.field("options", &self.options); formatter.field("status", &self.status); formatter.finish() } } /// See [`ExpressionStatus`](crate::model::ExpressionStatus) pub mod expression_status { /// A builder for [`ExpressionStatus`](crate::model::ExpressionStatus) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) options: std::option::Option<crate::model::Expression>, pub(crate) status: std::option::Option<crate::model::OptionStatus>, } impl Builder { /// <p>The expression that is evaluated for sorting while processing a search request.</p> pub fn options(mut self, input: crate::model::Expression) -> Self { self.options = Some(input); self } /// <p>The expression that is evaluated for sorting while processing a search request.</p> pub fn set_options(mut self, input: std::option::Option<crate::model::Expression>) -> Self { self.options = input; self } /// <p>The status of domain configuration option.</p> pub fn status(mut self, input: crate::model::OptionStatus) -> Self { self.status = Some(input); self } /// <p>The status of domain configuration option.</p> pub fn set_status( mut self, input: std::option::Option<crate::model::OptionStatus>, ) -> Self { self.status = input; self } /// Consumes the builder and constructs a [`ExpressionStatus`](crate::model::ExpressionStatus) pub fn build(self) -> crate::model::ExpressionStatus { crate::model::ExpressionStatus { options: self.options, status: self.status, } } } } impl ExpressionStatus { /// Creates a new builder-style object to manufacture [`ExpressionStatus`](crate::model::ExpressionStatus) pub fn builder() -> crate::model::expression_status::Builder { crate::model::expression_status::Builder::default() } } /// <p>A named expression that can be evaluated at search time. Can be used to sort the search results, define other expressions, or return computed information in the search results. </p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct Expression { /// <p>Names must begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore).</p> pub expression_name: std::option::Option<std::string::String>, /// <p>The expression to evaluate for sorting while processing a search request. The <code>Expression</code> syntax is based on JavaScript expressions. For more information, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-expressions.html" target="_blank">Configuring Expressions</a> in the <i>Amazon CloudSearch Developer Guide</i>.</p> pub expression_value: std::option::Option<std::string::String>, } impl Expression { /// <p>Names must begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore).</p> pub fn expression_name(&self) -> std::option::Option<&str> { self.expression_name.as_deref() } /// <p>The expression to evaluate for sorting while processing a search request. The <code>Expression</code> syntax is based on JavaScript expressions. For more information, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-expressions.html" target="_blank">Configuring Expressions</a> in the <i>Amazon CloudSearch Developer Guide</i>.</p> pub fn expression_value(&self) -> std::option::Option<&str> { self.expression_value.as_deref() } } impl std::fmt::Debug for Expression { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("Expression"); formatter.field("expression_name", &self.expression_name); formatter.field("expression_value", &self.expression_value); formatter.finish() } } /// See [`Expression`](crate::model::Expression) pub mod expression { /// A builder for [`Expression`](crate::model::Expression) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) expression_name: std::option::Option<std::string::String>, pub(crate) expression_value: std::option::Option<std::string::String>, } impl Builder { /// <p>Names must begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore).</p> pub fn expression_name(mut self, input: impl Into<std::string::String>) -> Self { self.expression_name = Some(input.into()); self } /// <p>Names must begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore).</p> pub fn set_expression_name( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.expression_name = input; self } /// <p>The expression to evaluate for sorting while processing a search request. The <code>Expression</code> syntax is based on JavaScript expressions. For more information, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-expressions.html" target="_blank">Configuring Expressions</a> in the <i>Amazon CloudSearch Developer Guide</i>.</p> pub fn expression_value(mut self, input: impl Into<std::string::String>) -> Self { self.expression_value = Some(input.into()); self } /// <p>The expression to evaluate for sorting while processing a search request. The <code>Expression</code> syntax is based on JavaScript expressions. For more information, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-expressions.html" target="_blank">Configuring Expressions</a> in the <i>Amazon CloudSearch Developer Guide</i>.</p> pub fn set_expression_value( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.expression_value = input; self } /// Consumes the builder and constructs a [`Expression`](crate::model::Expression) pub fn build(self) -> crate::model::Expression { crate::model::Expression { expression_name: self.expression_name, expression_value: self.expression_value, } } } } impl Expression { /// Creates a new builder-style object to manufacture [`Expression`](crate::model::Expression) pub fn builder() -> crate::model::expression::Builder { crate::model::expression::Builder::default() } } /// <p>The current status of the search domain.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct DomainStatus { /// <p>An internally generated unique identifier for a domain.</p> pub domain_id: std::option::Option<std::string::String>, /// <p>A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen).</p> pub domain_name: std::option::Option<std::string::String>, /// <p>The Amazon Resource Name (ARN) of the search domain. See <a href="http://docs.aws.amazon.com/IAM/latest/UserGuide/index.html?Using_Identifiers.html" target="_blank">Identifiers for IAM Entities</a> in <i>Using AWS Identity and Access Management</i> for more information.</p> pub arn: std::option::Option<std::string::String>, /// <p>True if the search domain is created. It can take several minutes to initialize a domain when <a>CreateDomain</a> is called. Newly created search domains are returned from <a>DescribeDomains</a> with a false value for Created until domain creation is complete.</p> pub created: std::option::Option<bool>, /// <p>True if the search domain has been deleted. The system must clean up resources dedicated to the search domain when <a>DeleteDomain</a> is called. Newly deleted search domains are returned from <a>DescribeDomains</a> with a true value for IsDeleted for several minutes until resource cleanup is complete.</p> pub deleted: std::option::Option<bool>, /// <p>The service endpoint for updating documents in a search domain.</p> pub doc_service: std::option::Option<crate::model::ServiceEndpoint>, /// <p>The service endpoint for requesting search results from a search domain.</p> pub search_service: std::option::Option<crate::model::ServiceEndpoint>, /// <p>True if <a>IndexDocuments</a> needs to be called to activate the current domain configuration.</p> pub requires_index_documents: std::option::Option<bool>, /// <p>True if processing is being done to activate the current domain configuration.</p> pub processing: std::option::Option<bool>, /// <p>The instance type that is being used to process search requests.</p> pub search_instance_type: std::option::Option<std::string::String>, /// <p>The number of partitions across which the search index is spread.</p> pub search_partition_count: i32, /// <p>The number of search instances that are available to process search requests.</p> pub search_instance_count: i32, #[allow(missing_docs)] // documentation missing in model pub limits: std::option::Option<crate::model::Limits>, } impl DomainStatus { /// <p>An internally generated unique identifier for a domain.</p> pub fn domain_id(&self) -> std::option::Option<&str> { self.domain_id.as_deref() } /// <p>A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen).</p> pub fn domain_name(&self) -> std::option::Option<&str> { self.domain_name.as_deref() } /// <p>The Amazon Resource Name (ARN) of the search domain. See <a href="http://docs.aws.amazon.com/IAM/latest/UserGuide/index.html?Using_Identifiers.html" target="_blank">Identifiers for IAM Entities</a> in <i>Using AWS Identity and Access Management</i> for more information.</p> pub fn arn(&self) -> std::option::Option<&str> { self.arn.as_deref() } /// <p>True if the search domain is created. It can take several minutes to initialize a domain when <a>CreateDomain</a> is called. Newly created search domains are returned from <a>DescribeDomains</a> with a false value for Created until domain creation is complete.</p> pub fn created(&self) -> std::option::Option<bool> { self.created } /// <p>True if the search domain has been deleted. The system must clean up resources dedicated to the search domain when <a>DeleteDomain</a> is called. Newly deleted search domains are returned from <a>DescribeDomains</a> with a true value for IsDeleted for several minutes until resource cleanup is complete.</p> pub fn deleted(&self) -> std::option::Option<bool> { self.deleted } /// <p>The service endpoint for updating documents in a search domain.</p> pub fn doc_service(&self) -> std::option::Option<&crate::model::ServiceEndpoint> { self.doc_service.as_ref() } /// <p>The service endpoint for requesting search results from a search domain.</p> pub fn search_service(&self) -> std::option::Option<&crate::model::ServiceEndpoint> { self.search_service.as_ref() } /// <p>True if <a>IndexDocuments</a> needs to be called to activate the current domain configuration.</p> pub fn requires_index_documents(&self) -> std::option::Option<bool> { self.requires_index_documents } /// <p>True if processing is being done to activate the current domain configuration.</p> pub fn processing(&self) -> std::option::Option<bool> { self.processing } /// <p>The instance type that is being used to process search requests.</p> pub fn search_instance_type(&self) -> std::option::Option<&str> { self.search_instance_type.as_deref() } /// <p>The number of partitions across which the search index is spread.</p> pub fn search_partition_count(&self) -> i32 { self.search_partition_count } /// <p>The number of search instances that are available to process search requests.</p> pub fn search_instance_count(&self) -> i32 { self.search_instance_count } #[allow(missing_docs)] // documentation missing in model pub fn limits(&self) -> std::option::Option<&crate::model::Limits> { self.limits.as_ref() } } impl std::fmt::Debug for DomainStatus { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("DomainStatus"); formatter.field("domain_id", &self.domain_id); formatter.field("domain_name", &self.domain_name); formatter.field("arn", &self.arn); formatter.field("created", &self.created); formatter.field("deleted", &self.deleted); formatter.field("doc_service", &self.doc_service); formatter.field("search_service", &self.search_service); formatter.field("requires_index_documents", &self.requires_index_documents); formatter.field("processing", &self.processing); formatter.field("search_instance_type", &self.search_instance_type); formatter.field("search_partition_count", &self.search_partition_count); formatter.field("search_instance_count", &self.search_instance_count); formatter.field("limits", &self.limits); formatter.finish() } } /// See [`DomainStatus`](crate::model::DomainStatus) pub mod domain_status { /// A builder for [`DomainStatus`](crate::model::DomainStatus) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) domain_id: std::option::Option<std::string::String>, pub(crate) domain_name: std::option::Option<std::string::String>, pub(crate) arn: std::option::Option<std::string::String>, pub(crate) created: std::option::Option<bool>, pub(crate) deleted: std::option::Option<bool>, pub(crate) doc_service: std::option::Option<crate::model::ServiceEndpoint>, pub(crate) search_service: std::option::Option<crate::model::ServiceEndpoint>, pub(crate) requires_index_documents: std::option::Option<bool>, pub(crate) processing: std::option::Option<bool>, pub(crate) search_instance_type: std::option::Option<std::string::String>, pub(crate) search_partition_count: std::option::Option<i32>, pub(crate) search_instance_count: std::option::Option<i32>, pub(crate) limits: std::option::Option<crate::model::Limits>, } impl Builder { /// <p>An internally generated unique identifier for a domain.</p> pub fn domain_id(mut self, input: impl Into<std::string::String>) -> Self { self.domain_id = Some(input.into()); self } /// <p>An internally generated unique identifier for a domain.</p> pub fn set_domain_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.domain_id = input; self } /// <p>A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen).</p> pub fn domain_name(mut self, input: impl Into<std::string::String>) -> Self { self.domain_name = Some(input.into()); self } /// <p>A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen).</p> pub fn set_domain_name(mut self, input: std::option::Option<std::string::String>) -> Self { self.domain_name = input; self } /// <p>The Amazon Resource Name (ARN) of the search domain. See <a href="http://docs.aws.amazon.com/IAM/latest/UserGuide/index.html?Using_Identifiers.html" target="_blank">Identifiers for IAM Entities</a> in <i>Using AWS Identity and Access Management</i> for more information.</p> pub fn arn(mut self, input: impl Into<std::string::String>) -> Self { self.arn = Some(input.into()); self } /// <p>The Amazon Resource Name (ARN) of the search domain. See <a href="http://docs.aws.amazon.com/IAM/latest/UserGuide/index.html?Using_Identifiers.html" target="_blank">Identifiers for IAM Entities</a> in <i>Using AWS Identity and Access Management</i> for more information.</p> pub fn set_arn(mut self, input: std::option::Option<std::string::String>) -> Self { self.arn = input; self } /// <p>True if the search domain is created. It can take several minutes to initialize a domain when <a>CreateDomain</a> is called. Newly created search domains are returned from <a>DescribeDomains</a> with a false value for Created until domain creation is complete.</p> pub fn created(mut self, input: bool) -> Self { self.created = Some(input); self } /// <p>True if the search domain is created. It can take several minutes to initialize a domain when <a>CreateDomain</a> is called. Newly created search domains are returned from <a>DescribeDomains</a> with a false value for Created until domain creation is complete.</p> pub fn set_created(mut self, input: std::option::Option<bool>) -> Self { self.created = input; self } /// <p>True if the search domain has been deleted. The system must clean up resources dedicated to the search domain when <a>DeleteDomain</a> is called. Newly deleted search domains are returned from <a>DescribeDomains</a> with a true value for IsDeleted for several minutes until resource cleanup is complete.</p> pub fn deleted(mut self, input: bool) -> Self { self.deleted = Some(input); self } /// <p>True if the search domain has been deleted. The system must clean up resources dedicated to the search domain when <a>DeleteDomain</a> is called. Newly deleted search domains are returned from <a>DescribeDomains</a> with a true value for IsDeleted for several minutes until resource cleanup is complete.</p> pub fn set_deleted(mut self, input: std::option::Option<bool>) -> Self { self.deleted = input; self } /// <p>The service endpoint for updating documents in a search domain.</p> pub fn doc_service(mut self, input: crate::model::ServiceEndpoint) -> Self { self.doc_service = Some(input); self } /// <p>The service endpoint for updating documents in a search domain.</p> pub fn set_doc_service( mut self, input: std::option::Option<crate::model::ServiceEndpoint>, ) -> Self { self.doc_service = input; self } /// <p>The service endpoint for requesting search results from a search domain.</p> pub fn search_service(mut self, input: crate::model::ServiceEndpoint) -> Self { self.search_service = Some(input); self } /// <p>The service endpoint for requesting search results from a search domain.</p> pub fn set_search_service( mut self, input: std::option::Option<crate::model::ServiceEndpoint>, ) -> Self { self.search_service = input; self } /// <p>True if <a>IndexDocuments</a> needs to be called to activate the current domain configuration.</p> pub fn requires_index_documents(mut self, input: bool) -> Self { self.requires_index_documents = Some(input); self } /// <p>True if <a>IndexDocuments</a> needs to be called to activate the current domain configuration.</p> pub fn set_requires_index_documents(mut self, input: std::option::Option<bool>) -> Self { self.requires_index_documents = input; self } /// <p>True if processing is being done to activate the current domain configuration.</p> pub fn processing(mut self, input: bool) -> Self { self.processing = Some(input); self } /// <p>True if processing is being done to activate the current domain configuration.</p> pub fn set_processing(mut self, input: std::option::Option<bool>) -> Self { self.processing = input; self } /// <p>The instance type that is being used to process search requests.</p> pub fn search_instance_type(mut self, input: impl Into<std::string::String>) -> Self { self.search_instance_type = Some(input.into()); self } /// <p>The instance type that is being used to process search requests.</p> pub fn set_search_instance_type( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.search_instance_type = input; self } /// <p>The number of partitions across which the search index is spread.</p> pub fn search_partition_count(mut self, input: i32) -> Self { self.search_partition_count = Some(input); self } /// <p>The number of partitions across which the search index is spread.</p> pub fn set_search_partition_count(mut self, input: std::option::Option<i32>) -> Self { self.search_partition_count = input; self } /// <p>The number of search instances that are available to process search requests.</p> pub fn search_instance_count(mut self, input: i32) -> Self { self.search_instance_count = Some(input); self } /// <p>The number of search instances that are available to process search requests.</p> pub fn set_search_instance_count(mut self, input: std::option::Option<i32>) -> Self { self.search_instance_count = input; self } #[allow(missing_docs)] // documentation missing in model pub fn limits(mut self, input: crate::model::Limits) -> Self { self.limits = Some(input); self } #[allow(missing_docs)] // documentation missing in model pub fn set_limits(mut self, input: std::option::Option<crate::model::Limits>) -> Self { self.limits = input; self } /// Consumes the builder and constructs a [`DomainStatus`](crate::model::DomainStatus) pub fn build(self) -> crate::model::DomainStatus { crate::model::DomainStatus { domain_id: self.domain_id, domain_name: self.domain_name, arn: self.arn, created: self.created, deleted: self.deleted, doc_service: self.doc_service, search_service: self.search_service, requires_index_documents: self.requires_index_documents, processing: self.processing, search_instance_type: self.search_instance_type, search_partition_count: self.search_partition_count.unwrap_or_default(), search_instance_count: self.search_instance_count.unwrap_or_default(), limits: self.limits, } } } } impl DomainStatus { /// Creates a new builder-style object to manufacture [`DomainStatus`](crate::model::DomainStatus) pub fn builder() -> crate::model::domain_status::Builder { crate::model::domain_status::Builder::default() } } #[allow(missing_docs)] // documentation missing in model #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct Limits { #[allow(missing_docs)] // documentation missing in model pub maximum_replication_count: i32, #[allow(missing_docs)] // documentation missing in model pub maximum_partition_count: i32, } impl Limits { #[allow(missing_docs)] // documentation missing in model pub fn maximum_replication_count(&self) -> i32 { self.maximum_replication_count } #[allow(missing_docs)] // documentation missing in model pub fn maximum_partition_count(&self) -> i32 { self.maximum_partition_count } } impl std::fmt::Debug for Limits { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("Limits"); formatter.field("maximum_replication_count", &self.maximum_replication_count); formatter.field("maximum_partition_count", &self.maximum_partition_count); formatter.finish() } } /// See [`Limits`](crate::model::Limits) pub mod limits { /// A builder for [`Limits`](crate::model::Limits) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) maximum_replication_count: std::option::Option<i32>, pub(crate) maximum_partition_count: std::option::Option<i32>, } impl Builder { #[allow(missing_docs)] // documentation missing in model pub fn maximum_replication_count(mut self, input: i32) -> Self { self.maximum_replication_count = Some(input); self } #[allow(missing_docs)] // documentation missing in model pub fn set_maximum_replication_count(mut self, input: std::option::Option<i32>) -> Self { self.maximum_replication_count = input; self } #[allow(missing_docs)] // documentation missing in model pub fn maximum_partition_count(mut self, input: i32) -> Self { self.maximum_partition_count = Some(input); self } #[allow(missing_docs)] // documentation missing in model pub fn set_maximum_partition_count(mut self, input: std::option::Option<i32>) -> Self { self.maximum_partition_count = input; self } /// Consumes the builder and constructs a [`Limits`](crate::model::Limits) pub fn build(self) -> crate::model::Limits { crate::model::Limits { maximum_replication_count: self.maximum_replication_count.unwrap_or_default(), maximum_partition_count: self.maximum_partition_count.unwrap_or_default(), } } } } impl Limits { /// Creates a new builder-style object to manufacture [`Limits`](crate::model::Limits) pub fn builder() -> crate::model::limits::Builder { crate::model::limits::Builder::default() } } /// <p>The endpoint to which service requests can be submitted.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct ServiceEndpoint { /// <p>The endpoint to which service requests can be submitted. For example, <code>search-imdb-movies-oopcnjfn6ugofer3zx5iadxxca.eu-west-1.cloudsearch.amazonaws.com</code> or <code>doc-imdb-movies-oopcnjfn6ugofer3zx5iadxxca.eu-west-1.cloudsearch.amazonaws.com</code>.</p> pub endpoint: std::option::Option<std::string::String>, } impl ServiceEndpoint { /// <p>The endpoint to which service requests can be submitted. For example, <code>search-imdb-movies-oopcnjfn6ugofer3zx5iadxxca.eu-west-1.cloudsearch.amazonaws.com</code> or <code>doc-imdb-movies-oopcnjfn6ugofer3zx5iadxxca.eu-west-1.cloudsearch.amazonaws.com</code>.</p> pub fn endpoint(&self) -> std::option::Option<&str> { self.endpoint.as_deref() } } impl std::fmt::Debug for ServiceEndpoint { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("ServiceEndpoint"); formatter.field("endpoint", &self.endpoint); formatter.finish() } } /// See [`ServiceEndpoint`](crate::model::ServiceEndpoint) pub mod service_endpoint { /// A builder for [`ServiceEndpoint`](crate::model::ServiceEndpoint) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) endpoint: std::option::Option<std::string::String>, } impl Builder { /// <p>The endpoint to which service requests can be submitted. For example, <code>search-imdb-movies-oopcnjfn6ugofer3zx5iadxxca.eu-west-1.cloudsearch.amazonaws.com</code> or <code>doc-imdb-movies-oopcnjfn6ugofer3zx5iadxxca.eu-west-1.cloudsearch.amazonaws.com</code>.</p> pub fn endpoint(mut self, input: impl Into<std::string::String>) -> Self { self.endpoint = Some(input.into()); self } /// <p>The endpoint to which service requests can be submitted. For example, <code>search-imdb-movies-oopcnjfn6ugofer3zx5iadxxca.eu-west-1.cloudsearch.amazonaws.com</code> or <code>doc-imdb-movies-oopcnjfn6ugofer3zx5iadxxca.eu-west-1.cloudsearch.amazonaws.com</code>.</p> pub fn set_endpoint(mut self, input: std::option::Option<std::string::String>) -> Self { self.endpoint = input; self } /// Consumes the builder and constructs a [`ServiceEndpoint`](crate::model::ServiceEndpoint) pub fn build(self) -> crate::model::ServiceEndpoint { crate::model::ServiceEndpoint { endpoint: self.endpoint, } } } } impl ServiceEndpoint { /// Creates a new builder-style object to manufacture [`ServiceEndpoint`](crate::model::ServiceEndpoint) pub fn builder() -> crate::model::service_endpoint::Builder { crate::model::service_endpoint::Builder::default() } } /// <p>The status and configuration of an <code>AnalysisScheme</code>.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct AnalysisSchemeStatus { /// <p>Configuration information for an analysis scheme. Each analysis scheme has a unique name and specifies the language of the text to be processed. The following options can be configured for an analysis scheme: <code>Synonyms</code>, <code>Stopwords</code>, <code>StemmingDictionary</code>, <code>JapaneseTokenizationDictionary</code> and <code>AlgorithmicStemming</code>.</p> pub options: std::option::Option<crate::model::AnalysisScheme>, /// <p>The status of domain configuration option.</p> pub status: std::option::Option<crate::model::OptionStatus>, } impl AnalysisSchemeStatus { /// <p>Configuration information for an analysis scheme. Each analysis scheme has a unique name and specifies the language of the text to be processed. The following options can be configured for an analysis scheme: <code>Synonyms</code>, <code>Stopwords</code>, <code>StemmingDictionary</code>, <code>JapaneseTokenizationDictionary</code> and <code>AlgorithmicStemming</code>.</p> pub fn options(&self) -> std::option::Option<&crate::model::AnalysisScheme> { self.options.as_ref() } /// <p>The status of domain configuration option.</p> pub fn status(&self) -> std::option::Option<&crate::model::OptionStatus> { self.status.as_ref() } } impl std::fmt::Debug for AnalysisSchemeStatus { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("AnalysisSchemeStatus"); formatter.field("options", &self.options); formatter.field("status", &self.status); formatter.finish() } } /// See [`AnalysisSchemeStatus`](crate::model::AnalysisSchemeStatus) pub mod analysis_scheme_status { /// A builder for [`AnalysisSchemeStatus`](crate::model::AnalysisSchemeStatus) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) options: std::option::Option<crate::model::AnalysisScheme>, pub(crate) status: std::option::Option<crate::model::OptionStatus>, } impl Builder { /// <p>Configuration information for an analysis scheme. Each analysis scheme has a unique name and specifies the language of the text to be processed. The following options can be configured for an analysis scheme: <code>Synonyms</code>, <code>Stopwords</code>, <code>StemmingDictionary</code>, <code>JapaneseTokenizationDictionary</code> and <code>AlgorithmicStemming</code>.</p> pub fn options(mut self, input: crate::model::AnalysisScheme) -> Self { self.options = Some(input); self } /// <p>Configuration information for an analysis scheme. Each analysis scheme has a unique name and specifies the language of the text to be processed. The following options can be configured for an analysis scheme: <code>Synonyms</code>, <code>Stopwords</code>, <code>StemmingDictionary</code>, <code>JapaneseTokenizationDictionary</code> and <code>AlgorithmicStemming</code>.</p> pub fn set_options( mut self, input: std::option::Option<crate::model::AnalysisScheme>, ) -> Self { self.options = input; self } /// <p>The status of domain configuration option.</p> pub fn status(mut self, input: crate::model::OptionStatus) -> Self { self.status = Some(input); self } /// <p>The status of domain configuration option.</p> pub fn set_status( mut self, input: std::option::Option<crate::model::OptionStatus>, ) -> Self { self.status = input; self } /// Consumes the builder and constructs a [`AnalysisSchemeStatus`](crate::model::AnalysisSchemeStatus) pub fn build(self) -> crate::model::AnalysisSchemeStatus { crate::model::AnalysisSchemeStatus { options: self.options, status: self.status, } } } } impl AnalysisSchemeStatus { /// Creates a new builder-style object to manufacture [`AnalysisSchemeStatus`](crate::model::AnalysisSchemeStatus) pub fn builder() -> crate::model::analysis_scheme_status::Builder { crate::model::analysis_scheme_status::Builder::default() } } /// <p>Configuration information for an analysis scheme. Each analysis scheme has a unique name and specifies the language of the text to be processed. The following options can be configured for an analysis scheme: <code>Synonyms</code>, <code>Stopwords</code>, <code>StemmingDictionary</code>, <code>JapaneseTokenizationDictionary</code> and <code>AlgorithmicStemming</code>.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct AnalysisScheme { /// <p>Names must begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore).</p> pub analysis_scheme_name: std::option::Option<std::string::String>, /// <p>An <a href="http://tools.ietf.org/html/rfc4646" target="_blank">IETF RFC 4646</a> language code or <code>mul</code> for multiple languages.</p> pub analysis_scheme_language: std::option::Option<crate::model::AnalysisSchemeLanguage>, /// <p>Synonyms, stopwords, and stemming options for an analysis scheme. Includes tokenization dictionary for Japanese.</p> pub analysis_options: std::option::Option<crate::model::AnalysisOptions>, } impl AnalysisScheme { /// <p>Names must begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore).</p> pub fn analysis_scheme_name(&self) -> std::option::Option<&str> { self.analysis_scheme_name.as_deref() } /// <p>An <a href="http://tools.ietf.org/html/rfc4646" target="_blank">IETF RFC 4646</a> language code or <code>mul</code> for multiple languages.</p> pub fn analysis_scheme_language( &self, ) -> std::option::Option<&crate::model::AnalysisSchemeLanguage> { self.analysis_scheme_language.as_ref() } /// <p>Synonyms, stopwords, and stemming options for an analysis scheme. Includes tokenization dictionary for Japanese.</p> pub fn analysis_options(&self) -> std::option::Option<&crate::model::AnalysisOptions> { self.analysis_options.as_ref() } } impl std::fmt::Debug for AnalysisScheme { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("AnalysisScheme"); formatter.field("analysis_scheme_name", &self.analysis_scheme_name); formatter.field("analysis_scheme_language", &self.analysis_scheme_language); formatter.field("analysis_options", &self.analysis_options); formatter.finish() } } /// See [`AnalysisScheme`](crate::model::AnalysisScheme) pub mod analysis_scheme { /// A builder for [`AnalysisScheme`](crate::model::AnalysisScheme) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) analysis_scheme_name: std::option::Option<std::string::String>, pub(crate) analysis_scheme_language: std::option::Option<crate::model::AnalysisSchemeLanguage>, pub(crate) analysis_options: std::option::Option<crate::model::AnalysisOptions>, } impl Builder { /// <p>Names must begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore).</p> pub fn analysis_scheme_name(mut self, input: impl Into<std::string::String>) -> Self { self.analysis_scheme_name = Some(input.into()); self } /// <p>Names must begin with a letter and can contain the following characters: /// a-z (lowercase), 0-9, and _ (underscore).</p> pub fn set_analysis_scheme_name( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.analysis_scheme_name = input; self } /// <p>An <a href="http://tools.ietf.org/html/rfc4646" target="_blank">IETF RFC 4646</a> language code or <code>mul</code> for multiple languages.</p> pub fn analysis_scheme_language( mut self, input: crate::model::AnalysisSchemeLanguage, ) -> Self { self.analysis_scheme_language = Some(input); self } /// <p>An <a href="http://tools.ietf.org/html/rfc4646" target="_blank">IETF RFC 4646</a> language code or <code>mul</code> for multiple languages.</p> pub fn set_analysis_scheme_language( mut self, input: std::option::Option<crate::model::AnalysisSchemeLanguage>, ) -> Self { self.analysis_scheme_language = input; self } /// <p>Synonyms, stopwords, and stemming options for an analysis scheme. Includes tokenization dictionary for Japanese.</p> pub fn analysis_options(mut self, input: crate::model::AnalysisOptions) -> Self { self.analysis_options = Some(input); self } /// <p>Synonyms, stopwords, and stemming options for an analysis scheme. Includes tokenization dictionary for Japanese.</p> pub fn set_analysis_options( mut self, input: std::option::Option<crate::model::AnalysisOptions>, ) -> Self { self.analysis_options = input; self } /// Consumes the builder and constructs a [`AnalysisScheme`](crate::model::AnalysisScheme) pub fn build(self) -> crate::model::AnalysisScheme { crate::model::AnalysisScheme { analysis_scheme_name: self.analysis_scheme_name, analysis_scheme_language: self.analysis_scheme_language, analysis_options: self.analysis_options, } } } } impl AnalysisScheme { /// Creates a new builder-style object to manufacture [`AnalysisScheme`](crate::model::AnalysisScheme) pub fn builder() -> crate::model::analysis_scheme::Builder { crate::model::analysis_scheme::Builder::default() } } /// <p>Synonyms, stopwords, and stemming options for an analysis scheme. Includes tokenization dictionary for Japanese.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct AnalysisOptions { /// <p>A JSON object that defines synonym groups and aliases. A synonym group is an array of arrays, where each sub-array is a group of terms where each term in the group is considered a synonym of every other term in the group. The aliases value is an object that contains a collection of string:value pairs where the string specifies a term and the array of values specifies each of the aliases for that term. An alias is considered a synonym of the specified term, but the term is not considered a synonym of the alias. For more information about specifying synonyms, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-analysis-schemes.html#synonyms">Synonyms</a> in the <i>Amazon CloudSearch Developer Guide</i>.</p> pub synonyms: std::option::Option<std::string::String>, /// <p>A JSON array of terms to ignore during indexing and searching. For example, <code>["a", "an", "the", "of"]</code>. The stopwords dictionary must explicitly list each word you want to ignore. Wildcards and regular expressions are not supported. </p> pub stopwords: std::option::Option<std::string::String>, /// <p>A JSON object that contains a collection of string:value pairs that each map a term to its stem. For example, <code>{"term1": "stem1", "term2": "stem2", "term3": "stem3"}</code>. The stemming dictionary is applied in addition to any algorithmic stemming. This enables you to override the results of the algorithmic stemming to correct specific cases of overstemming or understemming. The maximum size of a stemming dictionary is 500 KB.</p> pub stemming_dictionary: std::option::Option<std::string::String>, /// <p>A JSON array that contains a collection of terms, tokens, readings and part of speech for Japanese Tokenizaiton. The Japanese tokenization dictionary enables you to override the default tokenization for selected terms. This is only valid for Japanese language fields.</p> pub japanese_tokenization_dictionary: std::option::Option<std::string::String>, /// <p>The level of algorithmic stemming to perform: <code>none</code>, <code>minimal</code>, <code>light</code>, or <code>full</code>. The available levels vary depending on the language. For more information, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/text-processing.html#text-processing-settings" target="_blank">Language Specific Text Processing Settings</a> in the <i>Amazon CloudSearch Developer Guide</i> </p> pub algorithmic_stemming: std::option::Option<crate::model::AlgorithmicStemming>, } impl AnalysisOptions { /// <p>A JSON object that defines synonym groups and aliases. A synonym group is an array of arrays, where each sub-array is a group of terms where each term in the group is considered a synonym of every other term in the group. The aliases value is an object that contains a collection of string:value pairs where the string specifies a term and the array of values specifies each of the aliases for that term. An alias is considered a synonym of the specified term, but the term is not considered a synonym of the alias. For more information about specifying synonyms, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-analysis-schemes.html#synonyms">Synonyms</a> in the <i>Amazon CloudSearch Developer Guide</i>.</p> pub fn synonyms(&self) -> std::option::Option<&str> { self.synonyms.as_deref() } /// <p>A JSON array of terms to ignore during indexing and searching. For example, <code>["a", "an", "the", "of"]</code>. The stopwords dictionary must explicitly list each word you want to ignore. Wildcards and regular expressions are not supported. </p> pub fn stopwords(&self) -> std::option::Option<&str> { self.stopwords.as_deref() } /// <p>A JSON object that contains a collection of string:value pairs that each map a term to its stem. For example, <code>{"term1": "stem1", "term2": "stem2", "term3": "stem3"}</code>. The stemming dictionary is applied in addition to any algorithmic stemming. This enables you to override the results of the algorithmic stemming to correct specific cases of overstemming or understemming. The maximum size of a stemming dictionary is 500 KB.</p> pub fn stemming_dictionary(&self) -> std::option::Option<&str> { self.stemming_dictionary.as_deref() } /// <p>A JSON array that contains a collection of terms, tokens, readings and part of speech for Japanese Tokenizaiton. The Japanese tokenization dictionary enables you to override the default tokenization for selected terms. This is only valid for Japanese language fields.</p> pub fn japanese_tokenization_dictionary(&self) -> std::option::Option<&str> { self.japanese_tokenization_dictionary.as_deref() } /// <p>The level of algorithmic stemming to perform: <code>none</code>, <code>minimal</code>, <code>light</code>, or <code>full</code>. The available levels vary depending on the language. For more information, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/text-processing.html#text-processing-settings" target="_blank">Language Specific Text Processing Settings</a> in the <i>Amazon CloudSearch Developer Guide</i> </p> pub fn algorithmic_stemming(&self) -> std::option::Option<&crate::model::AlgorithmicStemming> { self.algorithmic_stemming.as_ref() } } impl std::fmt::Debug for AnalysisOptions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("AnalysisOptions"); formatter.field("synonyms", &self.synonyms); formatter.field("stopwords", &self.stopwords); formatter.field("stemming_dictionary", &self.stemming_dictionary); formatter.field( "japanese_tokenization_dictionary", &self.japanese_tokenization_dictionary, ); formatter.field("algorithmic_stemming", &self.algorithmic_stemming); formatter.finish() } } /// See [`AnalysisOptions`](crate::model::AnalysisOptions) pub mod analysis_options { /// A builder for [`AnalysisOptions`](crate::model::AnalysisOptions) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) synonyms: std::option::Option<std::string::String>, pub(crate) stopwords: std::option::Option<std::string::String>, pub(crate) stemming_dictionary: std::option::Option<std::string::String>, pub(crate) japanese_tokenization_dictionary: std::option::Option<std::string::String>, pub(crate) algorithmic_stemming: std::option::Option<crate::model::AlgorithmicStemming>, } impl Builder { /// <p>A JSON object that defines synonym groups and aliases. A synonym group is an array of arrays, where each sub-array is a group of terms where each term in the group is considered a synonym of every other term in the group. The aliases value is an object that contains a collection of string:value pairs where the string specifies a term and the array of values specifies each of the aliases for that term. An alias is considered a synonym of the specified term, but the term is not considered a synonym of the alias. For more information about specifying synonyms, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-analysis-schemes.html#synonyms">Synonyms</a> in the <i>Amazon CloudSearch Developer Guide</i>.</p> pub fn synonyms(mut self, input: impl Into<std::string::String>) -> Self { self.synonyms = Some(input.into()); self } /// <p>A JSON object that defines synonym groups and aliases. A synonym group is an array of arrays, where each sub-array is a group of terms where each term in the group is considered a synonym of every other term in the group. The aliases value is an object that contains a collection of string:value pairs where the string specifies a term and the array of values specifies each of the aliases for that term. An alias is considered a synonym of the specified term, but the term is not considered a synonym of the alias. For more information about specifying synonyms, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-analysis-schemes.html#synonyms">Synonyms</a> in the <i>Amazon CloudSearch Developer Guide</i>.</p> pub fn set_synonyms(mut self, input: std::option::Option<std::string::String>) -> Self { self.synonyms = input; self } /// <p>A JSON array of terms to ignore during indexing and searching. For example, <code>["a", "an", "the", "of"]</code>. The stopwords dictionary must explicitly list each word you want to ignore. Wildcards and regular expressions are not supported. </p> pub fn stopwords(mut self, input: impl Into<std::string::String>) -> Self { self.stopwords = Some(input.into()); self } /// <p>A JSON array of terms to ignore during indexing and searching. For example, <code>["a", "an", "the", "of"]</code>. The stopwords dictionary must explicitly list each word you want to ignore. Wildcards and regular expressions are not supported. </p> pub fn set_stopwords(mut self, input: std::option::Option<std::string::String>) -> Self { self.stopwords = input; self } /// <p>A JSON object that contains a collection of string:value pairs that each map a term to its stem. For example, <code>{"term1": "stem1", "term2": "stem2", "term3": "stem3"}</code>. The stemming dictionary is applied in addition to any algorithmic stemming. This enables you to override the results of the algorithmic stemming to correct specific cases of overstemming or understemming. The maximum size of a stemming dictionary is 500 KB.</p> pub fn stemming_dictionary(mut self, input: impl Into<std::string::String>) -> Self { self.stemming_dictionary = Some(input.into()); self } /// <p>A JSON object that contains a collection of string:value pairs that each map a term to its stem. For example, <code>{"term1": "stem1", "term2": "stem2", "term3": "stem3"}</code>. The stemming dictionary is applied in addition to any algorithmic stemming. This enables you to override the results of the algorithmic stemming to correct specific cases of overstemming or understemming. The maximum size of a stemming dictionary is 500 KB.</p> pub fn set_stemming_dictionary( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.stemming_dictionary = input; self } /// <p>A JSON array that contains a collection of terms, tokens, readings and part of speech for Japanese Tokenizaiton. The Japanese tokenization dictionary enables you to override the default tokenization for selected terms. This is only valid for Japanese language fields.</p> pub fn japanese_tokenization_dictionary( mut self, input: impl Into<std::string::String>, ) -> Self { self.japanese_tokenization_dictionary = Some(input.into()); self } /// <p>A JSON array that contains a collection of terms, tokens, readings and part of speech for Japanese Tokenizaiton. The Japanese tokenization dictionary enables you to override the default tokenization for selected terms. This is only valid for Japanese language fields.</p> pub fn set_japanese_tokenization_dictionary( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.japanese_tokenization_dictionary = input; self } /// <p>The level of algorithmic stemming to perform: <code>none</code>, <code>minimal</code>, <code>light</code>, or <code>full</code>. The available levels vary depending on the language. For more information, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/text-processing.html#text-processing-settings" target="_blank">Language Specific Text Processing Settings</a> in the <i>Amazon CloudSearch Developer Guide</i> </p> pub fn algorithmic_stemming(mut self, input: crate::model::AlgorithmicStemming) -> Self { self.algorithmic_stemming = Some(input); self } /// <p>The level of algorithmic stemming to perform: <code>none</code>, <code>minimal</code>, <code>light</code>, or <code>full</code>. The available levels vary depending on the language. For more information, see <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/text-processing.html#text-processing-settings" target="_blank">Language Specific Text Processing Settings</a> in the <i>Amazon CloudSearch Developer Guide</i> </p> pub fn set_algorithmic_stemming( mut self, input: std::option::Option<crate::model::AlgorithmicStemming>, ) -> Self { self.algorithmic_stemming = input; self } /// Consumes the builder and constructs a [`AnalysisOptions`](crate::model::AnalysisOptions) pub fn build(self) -> crate::model::AnalysisOptions { crate::model::AnalysisOptions { synonyms: self.synonyms, stopwords: self.stopwords, stemming_dictionary: self.stemming_dictionary, japanese_tokenization_dictionary: self.japanese_tokenization_dictionary, algorithmic_stemming: self.algorithmic_stemming, } } } } impl AnalysisOptions { /// Creates a new builder-style object to manufacture [`AnalysisOptions`](crate::model::AnalysisOptions) pub fn builder() -> crate::model::analysis_options::Builder { crate::model::analysis_options::Builder::default() } } #[allow(missing_docs)] // documentation missing in model #[non_exhaustive] #[derive( std::clone::Clone, std::cmp::Eq, std::cmp::Ord, std::cmp::PartialEq, std::cmp::PartialOrd, std::fmt::Debug, std::hash::Hash, )] pub enum AlgorithmicStemming { #[allow(missing_docs)] // documentation missing in model Full, #[allow(missing_docs)] // documentation missing in model Light, #[allow(missing_docs)] // documentation missing in model Minimal, #[allow(missing_docs)] // documentation missing in model None, /// Unknown contains new variants that have been added since this code was generated. Unknown(String), } impl std::convert::From<&str> for AlgorithmicStemming { fn from(s: &str) -> Self { match s { "full" => AlgorithmicStemming::Full, "light" => AlgorithmicStemming::Light, "minimal" => AlgorithmicStemming::Minimal, "none" => AlgorithmicStemming::None, other => AlgorithmicStemming::Unknown(other.to_owned()), } } } impl std::str::FromStr for AlgorithmicStemming { type Err = std::convert::Infallible; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Ok(AlgorithmicStemming::from(s)) } } impl AlgorithmicStemming { /// Returns the `&str` value of the enum member. pub fn as_str(&self) -> &str { match self { AlgorithmicStemming::Full => "full", AlgorithmicStemming::Light => "light", AlgorithmicStemming::Minimal => "minimal", AlgorithmicStemming::None => "none", AlgorithmicStemming::Unknown(s) => s.as_ref(), } } /// Returns all the `&str` values of the enum members. pub fn values() -> &'static [&'static str] { &["full", "light", "minimal", "none"] } } impl AsRef<str> for AlgorithmicStemming { fn as_ref(&self) -> &str { self.as_str() } } /// <p>An <a href="http://tools.ietf.org/html/rfc4646" target="_blank">IETF RFC 4646</a> language code or <code>mul</code> for multiple languages.</p> #[non_exhaustive] #[derive( std::clone::Clone, std::cmp::Eq, std::cmp::Ord, std::cmp::PartialEq, std::cmp::PartialOrd, std::fmt::Debug, std::hash::Hash, )] pub enum AnalysisSchemeLanguage { #[allow(missing_docs)] // documentation missing in model Ar, #[allow(missing_docs)] // documentation missing in model Bg, #[allow(missing_docs)] // documentation missing in model Ca, #[allow(missing_docs)] // documentation missing in model Cs, #[allow(missing_docs)] // documentation missing in model Da, #[allow(missing_docs)] // documentation missing in model De, #[allow(missing_docs)] // documentation missing in model El, #[allow(missing_docs)] // documentation missing in model En, #[allow(missing_docs)] // documentation missing in model Es, #[allow(missing_docs)] // documentation missing in model Eu, #[allow(missing_docs)] // documentation missing in model Fa, #[allow(missing_docs)] // documentation missing in model Fi, #[allow(missing_docs)] // documentation missing in model Fr, #[allow(missing_docs)] // documentation missing in model Ga, #[allow(missing_docs)] // documentation missing in model Gl, #[allow(missing_docs)] // documentation missing in model He, #[allow(missing_docs)] // documentation missing in model Hi, #[allow(missing_docs)] // documentation missing in model Hu, #[allow(missing_docs)] // documentation missing in model Hy, #[allow(missing_docs)] // documentation missing in model Id, #[allow(missing_docs)] // documentation missing in model It, #[allow(missing_docs)] // documentation missing in model Ja, #[allow(missing_docs)] // documentation missing in model Ko, #[allow(missing_docs)] // documentation missing in model Lv, #[allow(missing_docs)] // documentation missing in model Mul, #[allow(missing_docs)] // documentation missing in model Nl, #[allow(missing_docs)] // documentation missing in model No, #[allow(missing_docs)] // documentation missing in model Pt, #[allow(missing_docs)] // documentation missing in model Ro, #[allow(missing_docs)] // documentation missing in model Ru, #[allow(missing_docs)] // documentation missing in model Sv, #[allow(missing_docs)] // documentation missing in model Th, #[allow(missing_docs)] // documentation missing in model Tr, #[allow(missing_docs)] // documentation missing in model ZhHans, #[allow(missing_docs)] // documentation missing in model ZhHant, /// Unknown contains new variants that have been added since this code was generated. Unknown(String), } impl std::convert::From<&str> for AnalysisSchemeLanguage { fn from(s: &str) -> Self { match s { "ar" => AnalysisSchemeLanguage::Ar, "bg" => AnalysisSchemeLanguage::Bg, "ca" => AnalysisSchemeLanguage::Ca, "cs" => AnalysisSchemeLanguage::Cs, "da" => AnalysisSchemeLanguage::Da, "de" => AnalysisSchemeLanguage::De, "el" => AnalysisSchemeLanguage::El, "en" => AnalysisSchemeLanguage::En, "es" => AnalysisSchemeLanguage::Es, "eu" => AnalysisSchemeLanguage::Eu, "fa" => AnalysisSchemeLanguage::Fa, "fi" => AnalysisSchemeLanguage::Fi, "fr" => AnalysisSchemeLanguage::Fr, "ga" => AnalysisSchemeLanguage::Ga, "gl" => AnalysisSchemeLanguage::Gl, "he" => AnalysisSchemeLanguage::He, "hi" => AnalysisSchemeLanguage::Hi, "hu" => AnalysisSchemeLanguage::Hu, "hy" => AnalysisSchemeLanguage::Hy, "id" => AnalysisSchemeLanguage::Id, "it" => AnalysisSchemeLanguage::It, "ja" => AnalysisSchemeLanguage::Ja, "ko" => AnalysisSchemeLanguage::Ko, "lv" => AnalysisSchemeLanguage::Lv, "mul" => AnalysisSchemeLanguage::Mul, "nl" => AnalysisSchemeLanguage::Nl, "no" => AnalysisSchemeLanguage::No, "pt" => AnalysisSchemeLanguage::Pt, "ro" => AnalysisSchemeLanguage::Ro, "ru" => AnalysisSchemeLanguage::Ru, "sv" => AnalysisSchemeLanguage::Sv, "th" => AnalysisSchemeLanguage::Th, "tr" => AnalysisSchemeLanguage::Tr, "zh-Hans" => AnalysisSchemeLanguage::ZhHans, "zh-Hant" => AnalysisSchemeLanguage::ZhHant, other => AnalysisSchemeLanguage::Unknown(other.to_owned()), } } } impl std::str::FromStr for AnalysisSchemeLanguage { type Err = std::convert::Infallible; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Ok(AnalysisSchemeLanguage::from(s)) } } impl AnalysisSchemeLanguage { /// Returns the `&str` value of the enum member. pub fn as_str(&self) -> &str { match self { AnalysisSchemeLanguage::Ar => "ar", AnalysisSchemeLanguage::Bg => "bg", AnalysisSchemeLanguage::Ca => "ca", AnalysisSchemeLanguage::Cs => "cs", AnalysisSchemeLanguage::Da => "da", AnalysisSchemeLanguage::De => "de", AnalysisSchemeLanguage::El => "el", AnalysisSchemeLanguage::En => "en", AnalysisSchemeLanguage::Es => "es", AnalysisSchemeLanguage::Eu => "eu", AnalysisSchemeLanguage::Fa => "fa", AnalysisSchemeLanguage::Fi => "fi", AnalysisSchemeLanguage::Fr => "fr", AnalysisSchemeLanguage::Ga => "ga", AnalysisSchemeLanguage::Gl => "gl", AnalysisSchemeLanguage::He => "he", AnalysisSchemeLanguage::Hi => "hi", AnalysisSchemeLanguage::Hu => "hu", AnalysisSchemeLanguage::Hy => "hy", AnalysisSchemeLanguage::Id => "id", AnalysisSchemeLanguage::It => "it", AnalysisSchemeLanguage::Ja => "ja", AnalysisSchemeLanguage::Ko => "ko", AnalysisSchemeLanguage::Lv => "lv", AnalysisSchemeLanguage::Mul => "mul", AnalysisSchemeLanguage::Nl => "nl", AnalysisSchemeLanguage::No => "no", AnalysisSchemeLanguage::Pt => "pt", AnalysisSchemeLanguage::Ro => "ro", AnalysisSchemeLanguage::Ru => "ru", AnalysisSchemeLanguage::Sv => "sv", AnalysisSchemeLanguage::Th => "th", AnalysisSchemeLanguage::Tr => "tr", AnalysisSchemeLanguage::ZhHans => "zh-Hans", AnalysisSchemeLanguage::ZhHant => "zh-Hant", AnalysisSchemeLanguage::Unknown(s) => s.as_ref(), } } /// Returns all the `&str` values of the enum members. pub fn values() -> &'static [&'static str] { &[ "ar", "bg", "ca", "cs", "da", "de", "el", "en", "es", "eu", "fa", "fi", "fr", "ga", "gl", "he", "hi", "hu", "hy", "id", "it", "ja", "ko", "lv", "mul", "nl", "no", "pt", "ro", "ru", "sv", "th", "tr", "zh-Hans", "zh-Hant", ] } } impl AsRef<str> for AnalysisSchemeLanguage { fn as_ref(&self) -> &str { self.as_str() } }
52.020071
763
0.645652
e86ea0d8fdfe2e02cc5ba40b5ba9ee8924b48935
5,902
//! Array-based data structures using densely numbered entity references as mapping keys. //! //! This crate defines a number of data structures based on arrays. The arrays are not indexed by //! `usize` as usual, but by *entity references* which are integers wrapped in new-types. This has //! a couple advantages: //! //! - Improved type safety. The various map and set types accept a specific key type, so there is //! no confusion about the meaning of an array index, as there is with plain arrays. //! - Smaller indexes. The normal `usize` index is often 64 bits which is way too large for most //! purposes. The entity reference types can be smaller, allowing for more compact data //! structures. //! //! The `EntityRef` trait should be implemented by types to be used as indexed. The `entity_impl!` //! macro provides convenient defaults for types wrapping `u32` which is common. //! //! - [`PrimaryMap`](struct.PrimaryMap.html) is used to keep track of a vector of entities, //! assigning a unique entity reference to each. //! - [`SecondaryMap`](struct.SecondaryMap.html) is used to associate secondary information to an entity. //! The map is implemented as a simple vector, so it does not keep track of which entities have //! been inserted. Instead, any unknown entities map to the default value. //! - [`SparseMap`](struct.SparseMap.html) is used to associate secondary information to a small //! number of entities. It tracks accurately which entities have been inserted. This is a //! specialized data structure which can use a lot of memory, so read the documentation before //! using it. //! - [`EntitySet`](struct.EntitySet.html) is used to represent a secondary set of entities. //! The set is implemented as a simple vector, so it does not keep track of which entities have //! been inserted into the primary map. Instead, any unknown entities are not in the set. //! - [`EntityList`](struct.EntityList.html) is a compact representation of lists of entity //! references allocated from an associated memory pool. It has a much smaller footprint than //! `Vec`. #![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)] #![warn(unused_import_braces)] #![cfg_attr(feature = "std", deny(unstable_features))] #![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))] #![cfg_attr( feature = "cargo-clippy", allow(new_without_default, new_without_default_derive) )] #![cfg_attr( feature = "cargo-clippy", warn( clippy::float_arithmetic, clippy::mut_mut, clippy::nonminimal_bool, clippy::option_map_unwrap_or, clippy::option_map_unwrap_or_else, clippy::print_stdout, clippy::unicode_not_nfc, clippy::use_self ) )] // Turns on no_std and alloc features if std is not available. #![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(not(feature = "std"), feature(alloc))] /// This replaces `std` in builds with `core`. #[cfg(not(feature = "std"))] mod std { extern crate alloc; pub use self::alloc::{boxed, string, vec}; pub use core::*; } // Re-export core so that the macros works with both std and no_std crates #[doc(hidden)] pub extern crate core as __core; /// A type wrapping a small integer index should implement `EntityRef` so it can be used as the key /// of an `SecondaryMap` or `SparseMap`. pub trait EntityRef: Copy + Eq { /// Create a new entity reference from a small integer. /// This should crash if the requested index is not representable. fn new(usize) -> Self; /// Get the index that was used to create this entity reference. fn index(self) -> usize; } /// Macro which provides the common implementation of a 32-bit entity reference. #[macro_export] macro_rules! entity_impl { // Basic traits. ($entity:ident) => { impl $crate::EntityRef for $entity { fn new(index: usize) -> Self { debug_assert!(index < ($crate::__core::u32::MAX as usize)); $entity(index as u32) } fn index(self) -> usize { self.0 as usize } } impl $crate::packed_option::ReservedValue for $entity { fn reserved_value() -> $entity { $entity($crate::__core::u32::MAX) } } impl $entity { /// Return the underlying index value as a `u32`. #[allow(dead_code)] pub fn from_u32(x: u32) -> Self { debug_assert!(x < $crate::__core::u32::MAX); $entity(x) } /// Return the underlying index value as a `u32`. #[allow(dead_code)] pub fn as_u32(self) -> u32 { self.0 } } }; // Include basic `Display` impl using the given display prefix. // Display an `Ebb` reference as "ebb12". ($entity:ident, $display_prefix:expr) => { entity_impl!($entity); impl $crate::__core::fmt::Display for $entity { fn fmt(&self, f: &mut $crate::__core::fmt::Formatter) -> $crate::__core::fmt::Result { write!(f, concat!($display_prefix, "{}"), self.0) } } impl $crate::__core::fmt::Debug for $entity { fn fmt(&self, f: &mut $crate::__core::fmt::Formatter) -> $crate::__core::fmt::Result { (self as &$crate::__core::fmt::Display).fmt(f) } } }; } pub mod packed_option; mod boxed_slice; mod iter; mod keys; mod list; mod map; mod primary; mod set; mod sparse; pub use self::boxed_slice::BoxedSlice; pub use self::iter::{Iter, IterMut}; pub use self::keys::Keys; pub use self::list::{EntityList, ListPool}; pub use self::map::SecondaryMap; pub use self::primary::PrimaryMap; pub use self::set::EntitySet; pub use self::sparse::{SparseMap, SparseMapValue, SparseSet};
37.833333
105
0.646391
de5bfe171ffe12866479a316da0ec0782d0733f4
1,047
//! Creates a new BCF file. //! //! This writes a BCF file format, VCF header, and a single VCF to stdout. //! //! Verify the output by piping to `bcftools view --no-version`. use std::{convert::TryFrom, io}; use noodles_bcf as bcf; use noodles_vcf::{self as vcf, header::Contig, record::Position}; fn main() -> Result<(), Box<dyn std::error::Error>> { let stdout = io::stdout(); let handle = stdout.lock(); let mut writer = bcf::Writer::new(handle); writer.write_file_format()?; let header = vcf::Header::builder() .add_filter(vcf::header::Filter::pass()) .add_contig(Contig::new(String::from("sq0"))) .build(); writer.write_header(&header)?; let raw_header = header.to_string(); let string_map = raw_header.parse()?; let record = vcf::Record::builder() .set_chromosome("sq0".parse()?) .set_position(Position::try_from(1)?) .set_reference_bases("A".parse()?) .build()?; writer.write_vcf_record(&header, &string_map, &record)?; Ok(()) }
26.846154
74
0.621777
eb4a125841f036745d0e6d3185a84cc1d510484f
1,961
#[doc = "Reader of register PC6_SEL"] pub type R = crate::R<u32, super::PC6_SEL>; #[doc = "Writer for register PC6_SEL"] pub type W = crate::W<u32, super::PC6_SEL>; #[doc = "Register PC6_SEL `reset()`'s with value 0"] impl crate::ResetValue for super::PC6_SEL { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `Reserved32`"] pub type RESERVED32_R = crate::R<u32, u32>; #[doc = "Write proxy for field `Reserved32`"] pub struct RESERVED32_W<'a> { w: &'a mut W, } impl<'a> RESERVED32_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07ff_ffff << 5)) | (((value as u32) & 0x07ff_ffff) << 5); self.w } } #[doc = "Reader of field `PC6_sel`"] pub type PC6_SEL_R = crate::R<u8, u8>; #[doc = "Write proxy for field `PC6_sel`"] pub struct PC6_SEL_W<'a> { w: &'a mut W, } impl<'a> PC6_SEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x1f) | ((value as u32) & 0x1f); self.w } } impl R { #[doc = "Bits 5:31 - 31:5\\] Reserved"] #[inline(always)] pub fn reserved32(&self) -> RESERVED32_R { RESERVED32_R::new(((self.bits >> 5) & 0x07ff_ffff) as u32) } #[doc = "Bits 0:4 - 4:0\\] Select one peripheral signal output for PC6."] #[inline(always)] pub fn pc6_sel(&self) -> PC6_SEL_R { PC6_SEL_R::new((self.bits & 0x1f) as u8) } } impl W { #[doc = "Bits 5:31 - 31:5\\] Reserved"] #[inline(always)] pub fn reserved32(&mut self) -> RESERVED32_W { RESERVED32_W { w: self } } #[doc = "Bits 0:4 - 4:0\\] Select one peripheral signal output for PC6."] #[inline(always)] pub fn pc6_sel(&mut self) -> PC6_SEL_W { PC6_SEL_W { w: self } } }
30.169231
98
0.574707
75cfc851657dd7160ac6a02371c9dfe6ab469cad
1,569
pub mod u32 { use crate::hashes::dummy_hasher::DummyHasherBuilder; type HashIntegerType = u32; const MULTIPLIER: HashIntegerType = 0xdc7d07b1; const MULT_INV: HashIntegerType = 0xfd0ee151; pub const MULT_A: HashIntegerType = 0x58107bed; pub const MULT_C: HashIntegerType = 0x6da984cf; pub const MULT_G: HashIntegerType = 0x7d6c2d5d; pub const MULT_T: HashIntegerType = 0x3ea1c319; include!("base/cn_rkhash_base.rs"); } pub mod u64 { use crate::hashes::dummy_hasher::DummyHasherBuilder; type HashIntegerType = u64; const MULTIPLIER: HashIntegerType = 0x660b123642ca9149; const MULT_INV: HashIntegerType = 0x397f178c6ae330f9; pub const MULT_A: HashIntegerType = 0x34889973de695e1b; pub const MULT_C: HashIntegerType = 0x72dacb3a60672825; pub const MULT_G: HashIntegerType = 0x61bf33e452d231a5; pub const MULT_T: HashIntegerType = 0x759db32ccd931bb5; include!("base/cn_rkhash_base.rs"); } pub mod u128 { use crate::hashes::dummy_hasher::DummyHasherBuilder; type HashIntegerType = u128; const MULTIPLIER: HashIntegerType = 0x3eb9402f3e733993add64d3ca00e1b6b; const MULT_INV: HashIntegerType = 0x9cb6ff6f1b1a6d733e0952e899c3943; pub const MULT_A: HashIntegerType = 0x4751137d01d863c5b8c36de2b7d399df; pub const MULT_C: HashIntegerType = 0x37ea3a13226503fb783f5cb69f4552bd; pub const MULT_G: HashIntegerType = 0x50796b285343f09a0c53113ae736572b; pub const MULT_T: HashIntegerType = 0x1e62d96a5e1f5ade2d4e68d8f88110b7; include!("base/cn_rkhash_base.rs"); }
34.866667
75
0.765456
29c6e7cc9ab49c9bd19587e7815e5003f3838e34
464
use crate::jcli_app::rest::{Error, RestArgs}; use structopt::StructOpt; /// Shutdown node #[derive(StructOpt)] #[structopt(rename_all = "kebab-case")] pub enum Shutdown { Post { #[structopt(flatten)] args: RestArgs, }, } impl Shutdown { pub fn exec(self) -> Result<(), Error> { let Shutdown::Post { args } = self; args.client()?.get(&["v0", "shutdown"]).execute()?; println!("Success"); Ok(()) } }
21.090909
59
0.5625
f9c064a105cf747a62504cc77188ad423b959009
643
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-flags: -Z parse-only trait Foo { pub fn foo(); //~^ ERROR expected one of `async`, `const`, `extern`, `fn`, `type`, `unsafe`, or `}`, found } fn main() {}
33.842105
96
0.692068
28610dc6a974cc81e6685c86a3d9b24e6b22a32c
365
use serde::Deserialize; use tokio::fs; #[derive(Deserialize)] pub struct Config { pub server: String, pub token: String, pub org: String, pub repo: String, } pub async fn read_config_file() -> anyhow::Result<Config> { let config_file = fs::read_to_string("config.toml").await?; toml::from_str(&config_file).map_err(anyhow::Error::from) }
21.470588
63
0.682192
db516f2ae34995044c3abf3ce5fbf9af3cee19a5
839
/* * Copyright (c) 2021 gematik GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ use serde::{Deserialize, Serialize}; use super::primitives::Id; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PractitionerRole { pub id: Id, pub practitioner: String, pub organization: String, }
29.964286
75
0.724672
7624c67e628967759c5a806a0adf4a264efff647
195
use crate::my_strategy::{ Vec2, Location, }; pub trait Positionable { fn position(&self) -> Vec2; fn location(&self) -> Location { self.position().as_location() } }
15
37
0.584615
1c2722e7cf41afb3e4276ee7cfd6170db69081c2
6,771
extern crate tiny_http; use std::io::{Read, Write}; use std::net::{Shutdown, TcpStream}; use std::thread; use std::time::Duration; #[allow(dead_code)] mod support; #[test] fn connection_close_header() { let mut client = support::new_client_to_hello_world_server(); (write!(client, "GET / HTTP/1.1\r\nConnection: keep-alive\r\n\r\n")).unwrap(); thread::sleep(Duration::from_millis(1000)); (write!(client, "GET / HTTP/1.1\r\nConnection: close\r\n\r\n")).unwrap(); // if the connection was not closed, this will err with timeout // client.set_keepalive(Some(1)).unwrap(); FIXME: reenable this let mut out = Vec::new(); client.read_to_end(&mut out).unwrap(); } #[test] fn http_1_0_connection_close() { let mut client = support::new_client_to_hello_world_server(); (write!(client, "GET / HTTP/1.0\r\nHost: localhost\r\n\r\n")).unwrap(); // if the connection was not closed, this will err with timeout // client.set_keepalive(Some(1)).unwrap(); FIXME: reenable this let mut out = Vec::new(); client.read_to_end(&mut out).unwrap(); } #[test] fn detect_connection_closed() { let mut client = support::new_client_to_hello_world_server(); (write!(client, "GET / HTTP/1.1\r\nConnection: keep-alive\r\n\r\n")).unwrap(); thread::sleep(Duration::from_millis(1000)); client.shutdown(Shutdown::Write).unwrap(); // if the connection was not closed, this will err with timeout // client.set_keepalive(Some(1)).unwrap(); FIXME: reenable this let mut out = Vec::new(); client.read_to_end(&mut out).unwrap(); } #[test] fn poor_network_test() { let mut client = support::new_client_to_hello_world_server(); (write!(client, "G")).unwrap(); thread::sleep(Duration::from_millis(100)); (write!(client, "ET /he")).unwrap(); thread::sleep(Duration::from_millis(100)); (write!(client, "llo HT")).unwrap(); thread::sleep(Duration::from_millis(100)); (write!(client, "TP/1.")).unwrap(); thread::sleep(Duration::from_millis(100)); (write!(client, "1\r\nHo")).unwrap(); thread::sleep(Duration::from_millis(100)); (write!(client, "st: localho")).unwrap(); thread::sleep(Duration::from_millis(100)); (write!(client, "st\r\nConnec")).unwrap(); thread::sleep(Duration::from_millis(100)); (write!(client, "tion: close\r")).unwrap(); thread::sleep(Duration::from_millis(100)); (write!(client, "\n\r")).unwrap(); thread::sleep(Duration::from_millis(100)); (write!(client, "\n")).unwrap(); // client.set_keepalive(Some(2)).unwrap(); FIXME: reenable this let mut data = String::new(); client.read_to_string(&mut data).unwrap(); assert!(data.ends_with("hello world")); } #[test] fn pipelining_test() { let mut client = support::new_client_to_hello_world_server(); (write!(client, "GET / HTTP/1.1\r\nHost: localhost\r\n\r\n")).unwrap(); (write!(client, "GET /hello HTTP/1.1\r\nHost: localhost\r\n\r\n")).unwrap(); (write!( client, "GET /world HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n" )) .unwrap(); // client.set_keepalive(Some(2)).unwrap(); FIXME: reenable this let mut data = String::new(); client.read_to_string(&mut data).unwrap(); assert_eq!(data.split("hello world").count(), 4); } #[test] fn server_crash_results_in_response() { let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); let port = server.server_addr().port(); let mut client = TcpStream::connect(("127.0.0.1", port)).unwrap(); thread::spawn(move || { server.recv().unwrap(); // oops, server crash }); (write!( client, "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n" )) .unwrap(); // client.set_keepalive(Some(2)).unwrap(); FIXME: reenable this let mut content = String::new(); client.read_to_string(&mut content).unwrap(); assert!(&content[9..].starts_with("5")); // 5xx status code } #[test] fn responses_reordered() { let (server, mut client) = support::new_one_server_one_client(); (write!(client, "GET / HTTP/1.1\r\nHost: localhost\r\n\r\n")).unwrap(); (write!( client, "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n" )) .unwrap(); thread::spawn(move || { let rq1 = server.recv().unwrap(); let rq2 = server.recv().unwrap(); thread::spawn(move || { rq2.respond(tiny_http::Response::from_string(format!("second request"))) .unwrap(); }); thread::sleep(Duration::from_millis(100)); thread::spawn(move || { rq1.respond(tiny_http::Response::from_string(format!("first request"))) .unwrap(); }); }); // client.set_keepalive(Some(2)).unwrap(); FIXME: reenable this let mut content = String::new(); client.read_to_string(&mut content).unwrap(); assert!(content.ends_with("second request")); } #[test] fn no_transfer_encoding_on_204() { let (server, mut client) = support::new_one_server_one_client(); (write!( client, "GET / HTTP/1.1\r\nHost: localhost\r\nTE: chunked\r\nConnection: close\r\n\r\n" )) .unwrap(); thread::spawn(move || { let rq = server.recv().unwrap(); let resp = tiny_http::Response::empty(tiny_http::StatusCode(204)); rq.respond(resp).unwrap(); }); let mut content = String::new(); client.read_to_string(&mut content).unwrap(); assert!(content.starts_with("HTTP/1.1 204")); assert!(!content.contains("Transfer-Encoding: chunked")); } /* FIXME: uncomment and fix #[test] fn connection_timeout() { let (server, mut client) = { let server = tiny_http::ServerBuilder::new() .with_client_connections_timeout(3000) .with_random_port().build().unwrap(); let port = server.server_addr().port(); let client = TcpStream::connect(("127.0.0.1", port)).unwrap(); (server, client) }; let (tx_stop, rx_stop) = mpsc::channel(); // executing server in parallel thread::spawn(move || { loop { server.try_recv(); thread::sleep(Duration::from_millis(100)); if rx_stop.try_recv().is_ok() { break } } }); // waiting for the 408 response let mut content = String::new(); client.read_to_string(&mut content).unwrap(); assert!(&content[9..].starts_with("408")); // stopping server tx_stop.send(()); } */ #[test] fn chunked_threshold() { let resp = tiny_http::Response::from_string("test".to_string()); assert_eq!(resp.chunked_threshold(), 32768); assert_eq!(resp.with_chunked_threshold(42).chunked_threshold(), 42); }
30.638009
87
0.621769
8a8fb776b9be9837415d9573e3f03cd2d9bd896e
4,483
use crate::Id; use mqtt4bytes::{Packet, Publish, QoS, Subscribe}; use rumqttlog::{ Connection, ConnectionAck, Data, Event, Notification, Receiver, RecvError, SendError, Sender, }; #[derive(Debug, thiserror::Error)] pub enum LinkError { #[error("Unexpected router message")] NotConnectionAck(Notification), #[error("Connack error {0}")] ConnectionAck(String), #[error("Channel send error")] Send(#[from] SendError<(Id, Event)>), #[error("Channel recv error")] Recv(#[from] RecvError), } pub struct LinkTx { id: usize, router_tx: Sender<(Id, Event)>, client_id: String, } impl LinkTx { pub(crate) fn new(client_id: &str, router_tx: Sender<(Id, Event)>) -> LinkTx { LinkTx { id: 0, router_tx, client_id: client_id.to_owned(), } } pub fn connect(&mut self, max_inflight_requests: usize) -> Result<LinkRx, LinkError> { // connection queue capacity should match that maximum inflight requests let (connection, link_rx) = Connection::new_remote(&self.client_id, true, max_inflight_requests); let message = (0, Event::Connect(connection)); self.router_tx.send(message).unwrap(); // Right now link identifies failure with dropped rx in router, which is probably ok // We need this here to get id assigned by router match link_rx.recv()? { Notification::ConnectionAck(ack) => match ack { ConnectionAck::Success((id, _, _)) => self.id = id, ConnectionAck::Failure(reason) => return Err(LinkError::ConnectionAck(reason)), }, message => return Err(LinkError::NotConnectionAck(message)), }; // Send initialization requests from tracker [topics request and acks request] let rx = LinkRx::new(self.id, self.router_tx.clone(), link_rx); Ok(rx) } /// Sends a MQTT Publish to the router pub fn publish<S, V>(&mut self, topic: S, retain: bool, payload: V) -> Result<(), LinkError> where S: Into<String>, V: Into<Vec<u8>>, { let mut publish = Publish::new(topic, QoS::AtLeastOnce, payload); publish.retain = retain; let message = Event::Data(vec![Packet::Publish(publish)]); self.router_tx.send((self.id, message))?; Ok(()) } /// Sends a MQTT Subscribe to the eventloop pub fn subscribe<S: Into<String>>(&mut self, filter: S) -> Result<(), LinkError> { let subscribe = Subscribe::new(filter.into(), QoS::AtMostOnce); let packet = Packet::Subscribe(subscribe); let message = Event::Data(vec![packet]); self.router_tx.send((self.id, message))?; Ok(()) } } pub struct LinkRx { id: usize, router_tx: Sender<(Id, Event)>, link_rx: Receiver<Notification>, } impl LinkRx { pub(crate) fn new( id: usize, router_tx: Sender<(Id, Event)>, link_rx: Receiver<Notification>, ) -> LinkRx { LinkRx { id, router_tx, link_rx, } } pub fn recv(&mut self) -> Result<Option<Data>, LinkError> { let message = self.link_rx.recv()?; let message = self.handle_router_response(message)?; Ok(message) } pub async fn async_recv(&mut self) -> Result<Option<Data>, LinkError> { let message = self.link_rx.async_recv().await?; let message = self.handle_router_response(message)?; Ok(message) } fn handle_router_response(&mut self, message: Notification) -> Result<Option<Data>, LinkError> { match message { Notification::ConnectionAck(_) => Ok(None), Notification::Message(_) => { unreachable!("Local links are always clean"); } Notification::Data(reply) => { trace!( "{:11} {:14} Id = {}, Count = {}", "data", "reply", self.id, reply.payload.len() ); Ok(Some(reply)) } Notification::Pause => { let message = (self.id, Event::Ready); self.router_tx.send(message)?; Ok(None) } notification => { warn!("{:?} not supported in local link", notification); Ok(None) } } } }
31.794326
100
0.558108
ab12ace24a63a8b4695471b1e9446245cb798389
1,109
#![allow(non_snake_case)] use crate::comctl; use crate::comctl::decl::INITCOMMONCONTROLSEX; use crate::kernel::decl::{LANGID, WinResult}; use crate::kernel::privs::bool_to_winresult; /// [`InitCommonControls`](https://docs.microsoft.com/en-us/windows/win32/api/commctrl/nf-commctrl-initcommoncontrols) /// function. #[cfg_attr(docsrs, doc(cfg(feature = "comctl")))] pub fn InitCommonControls() { unsafe { comctl::ffi::InitCommonControls() } } /// [`InitCommonControlsEx`](https://docs.microsoft.com/en-us/windows/win32/api/commctrl/nf-commctrl-initcommoncontrolsex) /// function. #[cfg_attr(docsrs, doc(cfg(feature = "comctl")))] pub fn InitCommonControlsEx(icce: &INITCOMMONCONTROLSEX) -> WinResult<()> { bool_to_winresult( unsafe { comctl::ffi::InitCommonControlsEx(icce as *const _ as _) } ) } /// [`InitMUILanguage`](https://docs.microsoft.com/en-us/windows/win32/api/commctrl/nf-commctrl-initmuilanguage) /// function. #[cfg_attr(docsrs, doc(cfg(feature = "comctl")))] pub fn InitMUILanguage(ui_lang: LANGID) { unsafe { comctl::ffi::InitMUILanguage(ui_lang.0) } }
36.966667
123
0.716862
de96b20f2c0979de75a30e5f2be567be1cbb566f
7,968
use branca::Branca; use rocket::{ http::{Header, Status}, request::{FromRequest, Outcome, Request}, Response, }; use serde::{Deserialize, Serialize}; use std::collections::{BTreeMap, BTreeSet}; static COOKIE_NAME: &str = "authToken"; lazy_static::lazy_static! { static ref ACCESS_LEVELS: BTreeMap<String, BTreeSet<String>> = build_access_levels(); } pub struct AuthenticatedAccount { pub id: i64, pub access: &'static std::collections::BTreeSet<String>, } #[derive(Debug)] pub enum AuthenticationError { MissingCookie, InvalidToken, DatabaseError(sqlx::Error), } #[derive(Debug)] pub enum AuthorizationError { AccessDenied, DatabaseError(sqlx::Error), } #[derive(Serialize, Deserialize)] struct AuthToken { version: i32, account_id: i64, } pub struct CookieSetter(pub String, pub bool); impl<'r> rocket::response::Responder<'r, 'static> for CookieSetter { fn respond_to(self, _: &'r rocket::request::Request<'_>) -> rocket::response::Result<'static> { // XXX: Secure is set via a parameter in CookieSetter, but we can get this from the App let mut response = Response::new(); let mut cookie = format!( "{}={}; Path=/; HttpOnly; SameSite=Strict; Max-Age=2678400", COOKIE_NAME, self.0 ); if self.1 { cookie += "; Secure"; } response.set_header(Header::new("Set-Cookie", cookie)); Ok(response) } } fn decode_token(token: &str, secret: &[u8]) -> Result<AuthToken, AuthenticationError> { let branca = Branca::new(secret).unwrap(); let payload = match branca.decode(token, 31 * 86400) { Err(_) => return Err(AuthenticationError::InvalidToken), Ok(p) => p, }; let decoded: AuthToken = match rmp_serde::from_read_ref(&payload) { Err(_) => return Err(AuthenticationError::InvalidToken), Ok(d) => d, }; if decoded.version != 1 || decoded.account_id <= 0 { return Err(AuthenticationError::InvalidToken); } Ok(decoded) } pub fn create_cookie(app: &crate::app::Application, account_id: i64) -> CookieSetter { let mut branca = Branca::new(&app.token_secret).unwrap(); let token = AuthToken { version: 1, account_id, }; let payload = rmp_serde::to_vec_named(&token).unwrap(); let encoded = branca.encode(&payload).unwrap(); CookieSetter(encoded, app.config.esi.url.starts_with("https:")) } #[rocket::async_trait] impl<'r> FromRequest<'r> for AuthenticatedAccount { type Error = AuthenticationError; async fn from_request(req: &'r Request<'_>) -> Outcome<Self, Self::Error> { let app = req .guard::<&rocket::State<crate::app::Application>>() .await .unwrap(); let token = match req.cookies().get(COOKIE_NAME) { None => { return Outcome::Failure((Status::Unauthorized, AuthenticationError::MissingCookie)) } Some(t) => match decode_token(t.value(), &app.token_secret) { Ok(d) => d, Err(e) => return Outcome::Failure((Status::Unauthorized, e)), }, }; let access_level = match sqlx::query!( "SELECT * FROM admins WHERE character_id=?", token.account_id ) .fetch_optional(app.get_db()) .await { Err(e) => { return Outcome::Failure(( Status::InternalServerError, AuthenticationError::DatabaseError(e), )) } Ok(Some(r)) => r.level, Ok(None) => "user".to_string(), }; let access_keys = match ACCESS_LEVELS.get(&access_level) { Some(l) => l, None => { return Outcome::Failure((Status::Unauthorized, AuthenticationError::InvalidToken)) } }; Outcome::Success(AuthenticatedAccount { id: token.account_id, access: access_keys, }) } } impl AuthenticatedAccount { pub fn require_access(&self, key: &'static str) -> Result<(), AuthorizationError> { match self.access.contains(key) { true => Ok(()), false => Err(AuthorizationError::AccessDenied), } } } fn build_access_levels() -> BTreeMap<String, BTreeSet<String>> { fn build_level( working: &mut BTreeMap<String, BTreeSet<String>>, source: &str, dest: &str, keys: Vec<&str>, ) { let mut level = working.get(source).cloned().unwrap(); for key in keys { level.insert(key.to_string()); } working.insert(dest.to_string(), level); } let mut result = BTreeMap::new(); result.insert("user".to_string(), BTreeSet::new()); // PILOT ROLES (combinations) L / B / W / LB / LBW / BW / LW // MULTI ROLE TABLE WOULD REQUIRE A LOT OF CODE REWRITE build_level(&mut result, "user", "l", vec!["waitlist-tag:LOGI"]); build_level(&mut result, "user", "b", vec!["waitlist-tag:BASTION"]); build_level(&mut result, "user", "w", vec!["waitlist-tag:WEB"]); build_level(&mut result, "l", "lb", vec!["waitlist-tag:BASTION"]); build_level(&mut result, "lb", "lbw", vec!["waitlist-tag:WEB"]); build_level(&mut result, "b", "bw", vec!["waitlist-tag:WEB"]); build_level(&mut result, "l", "lw", vec!["waitlist-tag:WEB"]); // END OF PILOT ROLES build_level( &mut result, "l", "trainee", vec![ "fleet-configure", "fleet-invite", "fleet-view", "pilot-view", "waitlist-view", "waitlist-tag:TRAINEE", ], ); build_level( &mut result, "trainee", "trainee-advanced", vec!["fit-view", "skill-view", "waitlist-manage"], ); build_level( &mut result, "trainee-advanced", "fc", vec![ "bans-view", "bans-manage", "fleet-activity-view", "fleet-comp-history", "fit-history-view", "search", "skill-history-view", "waitlist-edit", "stats-view", "access-view", "waitlist-tag:HQ-FC", "access-manage", "access-manage:l", "access-manage:b", "access-manage:w", "access-manage:lb", "access-manage:lbw", "access-manage:bw", "access-manage:lw", "notes-view", "notes-add", ], ); build_level( &mut result, "fc", "fc-trainer", vec![ "access-manage:trainee", "access-manage:trainee-advanced", "access-manage:fc", ], ); build_level( &mut result, "fc-trainer", "council", vec!["access-manage:fc-trainer"], ); build_level(&mut result, "council", "admin", vec!["access-manage-all"]); result } pub fn get_access_keys(level: &str) -> Option<&'static BTreeSet<String>> { ACCESS_LEVELS.get(level) } pub async fn authorize_character( db: &crate::DB, account: &AuthenticatedAccount, character_id: i64, permission_override: Option<&str>, ) -> Result<(), AuthorizationError> { if account.id == character_id { return Ok(()); } if permission_override.is_some() && account.access.contains(permission_override.unwrap()) { return Ok(()); } let alt_character = sqlx::query!( "SELECT alt_id FROM alt_character WHERE account_id = ? AND alt_id = ?", account.id, character_id ) .fetch_optional(db) .await; match alt_character { Err(e) => Err(AuthorizationError::DatabaseError(e)), Ok(None) => Err(AuthorizationError::AccessDenied), Ok(Some(_)) => Ok(()), } }
28.55914
99
0.558484
8f3ec74387edea30d4ddbaac5f43a4af2f1ee81c
2,514
use super::Fix; use crate::common::*; pub(crate) struct LowercaseKeyFixer<'a> { name: &'a str, } impl Default for LowercaseKeyFixer<'_> { fn default() -> Self { Self { name: "LowercaseKey", } } } impl Fix for LowercaseKeyFixer<'_> { fn name(&self) -> &str { self.name } fn fix_line(&self, line: &mut LineEntry) -> Option<()> { let key = line.get_key()?; let key = key.to_uppercase(); line.raw_string = format!("{}={}", key, line.get_value()?); Some(()) } } #[cfg(test)] mod tests { use super::*; use std::path::PathBuf; #[test] fn fix_line_test() { let fixer = LowercaseKeyFixer::default(); let mut line = LineEntry { number: 1, file: FileEntry { path: PathBuf::from(".env"), file_name: ".env".to_string(), total_lines: 1, }, raw_string: String::from("foO=BAR"), }; assert_eq!(Some(()), fixer.fix_line(&mut line)); assert_eq!("FOO=BAR", line.raw_string); } #[test] fn fix_warnings_test() { let fixer = LowercaseKeyFixer::default(); let mut lines = vec![ LineEntry { number: 1, file: FileEntry { path: PathBuf::from(".env"), file_name: ".env".to_string(), total_lines: 3, }, raw_string: String::from("foO=BAR"), }, LineEntry { number: 2, file: FileEntry { path: PathBuf::from(".env"), file_name: ".env".to_string(), total_lines: 3, }, raw_string: String::from("Z=Y"), }, LineEntry { number: 3, file: FileEntry { path: PathBuf::from(".env"), file_name: ".env".to_string(), total_lines: 3, }, raw_string: String::from("\n"), }, ]; let mut warning = Warning::new( lines[0].clone(), "LowercaseKey", String::from("The FOO key should be in uppercase"), ); assert_eq!(Some(1), fixer.fix_warnings(vec![&mut warning], &mut lines)); assert_eq!("FOO=BAR", lines[0].raw_string); assert!(warning.is_fixed); } }
26.744681
80
0.448687
e401bc2351bcaf226b3a60889accf46b0a12ff61
3,825
use super::METADATA_SLOTS; use config::{DEFAULT_CACHE_SIZE, DEFAULT_POOL_SIZE, NetbricksConfiguration}; use native::libnuma; use native::zcsi; use std::cell::Cell; use std::ffi::CString; /// Initialize the system, whitelisting some set of NICs and allocating mempool of given size. fn init_system_wl_with_mempool(name: &str, core: i32, pci: &[String], pool_size: u32, cache_size: u32) { let name_cstr = CString::new(name).unwrap(); let pci_cstr: Vec<_> = pci.iter().map(|p| CString::new(&p[..]).unwrap()).collect(); let mut whitelist: Vec<_> = pci_cstr.iter().map(|p| p.as_ptr()).collect(); unsafe { let ret = zcsi::init_system_whitelisted(name_cstr.as_ptr(), name.len() as i32, core, whitelist.as_mut_ptr(), pci.len() as i32, pool_size, cache_size, METADATA_SLOTS); if ret != 0 { panic!("Could not initialize the system errno {}", ret) } } } /// Initialize the system, whitelisting some set of NICs. pub fn init_system_wl(name: &str, core: i32, pci: &[String]) { init_system_wl_with_mempool(name, core, pci, DEFAULT_POOL_SIZE, DEFAULT_CACHE_SIZE); set_numa_domain(); } /// Initialize the system as a DPDK secondary process with a set of VDEVs. User must specify mempool name to use. pub fn init_system_secondary(name: &str, core: i32) { let name_cstr = CString::new(name).unwrap(); let mut vdev_list = vec![]; unsafe { let ret = zcsi::init_secondary(name_cstr.as_ptr(), name.len() as i32, core, vdev_list.as_mut_ptr(), 0); if ret != 0 { panic!("Could not initialize secondary process errno {}", ret) } } set_numa_domain(); } /// Initialize the system based on the supplied scheduler configuration. pub fn init_system(config: &NetbricksConfiguration) { if config.name.is_empty() { panic!("Configuration must provide a name."); } // We init with all devices blacklisted and rely on the attach logic to white list them as necessary. if config.secondary { // We do not have control over any of the other settings in this case. init_system_secondary(&config.name[..], config.primary_core); } else { init_system_wl_with_mempool(&config.name[..], config.primary_core, &[], config.pool_size, config.cache_size); } set_numa_domain(); } thread_local!(static NUMA_DOMAIN: Cell<i32> = Cell::new(-1)); fn set_numa_domain() { let domain = unsafe { if libnuma::numa_available() == -1 { println!("No NUMA information found, support disabled"); -1 } else { let domain = libnuma::numa_preferred(); println!("Running on node {}", domain); domain } }; NUMA_DOMAIN.with(|f| f.set(domain)) } /// Affinitize a pthread to a core and assign a DPDK thread ID. pub fn init_thread(tid: i32, core: i32) { let numa = unsafe { zcsi::init_thread(tid, core) }; NUMA_DOMAIN.with(|f| { f.set(numa); }); if numa == -1 { println!("No NUMA information found, support disabled"); } else { println!("Running on node {}", numa); }; } #[inline] pub fn get_domain() -> i32 { NUMA_DOMAIN.with(|f| f.get()) }
37.871287
113
0.540131
bbb41e09f5df03adab603bbb9cfc701503e8060d
30,976
// This file is Copyright its original authors, visible in version control // history. // // This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE> // or the MIT license <LICENSE-MIT>, at your option. // You may not use this file except in accordance with one or both of these // licenses. //! Printing logic for basic blocks of Rust-mapped code - parts of functions and declarations but //! not the full mapping logic. use std::fs::File; use std::io::Write; use proc_macro2::{TokenTree, Span}; use crate::types::*; /// Writes out a C++ wrapper class for the given type, which contains various utilities to access /// the underlying C-mapped type safely avoiding some common memory management issues by handling /// resource-freeing and prevending accidental raw copies. pub fn write_cpp_wrapper(cpp_header_file: &mut File, ty: &str, has_destructor: bool) { writeln!(cpp_header_file, "class {} {{", ty).unwrap(); writeln!(cpp_header_file, "private:").unwrap(); writeln!(cpp_header_file, "\tLDK{} self;", ty).unwrap(); writeln!(cpp_header_file, "public:").unwrap(); writeln!(cpp_header_file, "\t{}(const {}&) = delete;", ty, ty).unwrap(); writeln!(cpp_header_file, "\t{}({}&& o) : self(o.self) {{ memset(&o, 0, sizeof({})); }}", ty, ty, ty).unwrap(); writeln!(cpp_header_file, "\t{}(LDK{}&& m_self) : self(m_self) {{ memset(&m_self, 0, sizeof(LDK{})); }}", ty, ty, ty).unwrap(); writeln!(cpp_header_file, "\toperator LDK{}() && {{ LDK{} res = self; memset(&self, 0, sizeof(LDK{})); return res; }}", ty, ty, ty).unwrap(); if has_destructor { writeln!(cpp_header_file, "\t~{}() {{ {}_free(self); }}", ty, ty).unwrap(); writeln!(cpp_header_file, "\t{}& operator=({}&& o) {{ {}_free(self); self = o.self; memset(&o, 0, sizeof({})); return *this; }}", ty, ty, ty, ty).unwrap(); } else { writeln!(cpp_header_file, "\t{}& operator=({}&& o) {{ self = o.self; memset(&o, 0, sizeof({})); return *this; }}", ty, ty, ty).unwrap(); } writeln!(cpp_header_file, "\tLDK{}* operator &() {{ return &self; }}", ty).unwrap(); writeln!(cpp_header_file, "\tLDK{}* operator ->() {{ return &self; }}", ty).unwrap(); writeln!(cpp_header_file, "\tconst LDK{}* operator &() const {{ return &self; }}", ty).unwrap(); writeln!(cpp_header_file, "\tconst LDK{}* operator ->() const {{ return &self; }}", ty).unwrap(); writeln!(cpp_header_file, "}};").unwrap(); } /// Writes out a C-callable concrete Result<A, B> struct and utility methods pub fn write_result_block<W: std::io::Write>(w: &mut W, mangled_container: &str, ok_type: &str, err_type: &str, clonable: bool) { writeln!(w, "#[repr(C)]").unwrap(); writeln!(w, "/// The contents of {}", mangled_container).unwrap(); writeln!(w, "pub union {}Ptr {{", mangled_container).unwrap(); if ok_type != "()" { writeln!(w, "\t/// A pointer to the contents in the success state.").unwrap(); writeln!(w, "\t/// Reading from this pointer when `result_ok` is not set is undefined.").unwrap(); writeln!(w, "\tpub result: *mut {},", ok_type).unwrap(); } else { writeln!(w, "\t/// Note that this value is always NULL, as there are no contents in the OK variant").unwrap(); writeln!(w, "\tpub result: *mut std::ffi::c_void,").unwrap(); } if err_type != "()" { writeln!(w, "\t/// A pointer to the contents in the error state.").unwrap(); writeln!(w, "\t/// Reading from this pointer when `result_ok` is set is undefined.").unwrap(); writeln!(w, "\tpub err: *mut {},", err_type).unwrap(); } else { writeln!(w, "\t/// Note that this value is always NULL, as there are no contents in the Err variant").unwrap(); writeln!(w, "\tpub err: *mut std::ffi::c_void,").unwrap(); } writeln!(w, "}}").unwrap(); writeln!(w, "#[repr(C)]").unwrap(); writeln!(w, "/// A {} represents the result of a fallible operation,", mangled_container).unwrap(); writeln!(w, "/// containing a {} on success and a {} on failure.", ok_type, err_type).unwrap(); writeln!(w, "/// `result_ok` indicates the overall state, and the contents are provided via `contents`.").unwrap(); writeln!(w, "pub struct {} {{", mangled_container).unwrap(); writeln!(w, "\t/// The contents of this {}, accessible via either", mangled_container).unwrap(); writeln!(w, "\t/// `err` or `result` depending on the state of `result_ok`.").unwrap(); writeln!(w, "\tpub contents: {}Ptr,", mangled_container).unwrap(); writeln!(w, "\t/// Whether this {} represents a success state.", mangled_container).unwrap(); writeln!(w, "\tpub result_ok: bool,").unwrap(); writeln!(w, "}}").unwrap(); writeln!(w, "#[no_mangle]").unwrap(); if ok_type != "()" { writeln!(w, "/// Creates a new {} in the success state.", mangled_container).unwrap(); writeln!(w, "pub extern \"C\" fn {}_ok(o: {}) -> {} {{", mangled_container, ok_type, mangled_container).unwrap(); } else { writeln!(w, "/// Creates a new {} in the success state.", mangled_container).unwrap(); writeln!(w, "pub extern \"C\" fn {}_ok() -> {} {{", mangled_container, mangled_container).unwrap(); } writeln!(w, "\t{} {{", mangled_container).unwrap(); writeln!(w, "\t\tcontents: {}Ptr {{", mangled_container).unwrap(); if ok_type != "()" { writeln!(w, "\t\t\tresult: Box::into_raw(Box::new(o)),").unwrap(); } else { writeln!(w, "\t\t\tresult: std::ptr::null_mut(),").unwrap(); } writeln!(w, "\t\t}},").unwrap(); writeln!(w, "\t\tresult_ok: true,").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "}}").unwrap(); writeln!(w, "#[no_mangle]").unwrap(); if err_type != "()" { writeln!(w, "/// Creates a new {} in the error state.", mangled_container).unwrap(); writeln!(w, "pub extern \"C\" fn {}_err(e: {}) -> {} {{", mangled_container, err_type, mangled_container).unwrap(); } else { writeln!(w, "/// Creates a new {} in the error state.", mangled_container).unwrap(); writeln!(w, "pub extern \"C\" fn {}_err() -> {} {{", mangled_container, mangled_container).unwrap(); } writeln!(w, "\t{} {{", mangled_container).unwrap(); writeln!(w, "\t\tcontents: {}Ptr {{", mangled_container).unwrap(); if err_type != "()" { writeln!(w, "\t\t\terr: Box::into_raw(Box::new(e)),").unwrap(); } else { writeln!(w, "\t\t\terr: std::ptr::null_mut(),").unwrap(); } writeln!(w, "\t\t}},").unwrap(); writeln!(w, "\t\tresult_ok: false,").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "}}").unwrap(); writeln!(w, "#[no_mangle]").unwrap(); writeln!(w, "/// Frees any resources used by the {}.", mangled_container).unwrap(); writeln!(w, "pub extern \"C\" fn {}_free(_res: {}) {{ }}", mangled_container, mangled_container).unwrap(); writeln!(w, "impl Drop for {} {{", mangled_container).unwrap(); writeln!(w, "\tfn drop(&mut self) {{").unwrap(); writeln!(w, "\t\tif self.result_ok {{").unwrap(); if ok_type != "()" { writeln!(w, "\t\t\tif unsafe {{ !(self.contents.result as *mut ()).is_null() }} {{").unwrap(); writeln!(w, "\t\t\t\tlet _ = unsafe {{ Box::from_raw(self.contents.result) }};").unwrap(); writeln!(w, "\t\t\t}}").unwrap(); } writeln!(w, "\t\t}} else {{").unwrap(); if err_type != "()" { writeln!(w, "\t\t\tif unsafe {{ !(self.contents.err as *mut ()).is_null() }} {{").unwrap(); writeln!(w, "\t\t\t\tlet _ = unsafe {{ Box::from_raw(self.contents.err) }};").unwrap(); writeln!(w, "\t\t\t}}").unwrap(); } writeln!(w, "\t\t}}").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "}}").unwrap(); // TODO: Templates should use () now that they can, too let templ_ok_type = if ok_type != "()" { ok_type } else { "u8" }; let templ_err_type = if err_type != "()" { err_type } else { "u8" }; writeln!(w, "impl From<crate::c_types::CResultTempl<{}, {}>> for {} {{", templ_ok_type, templ_err_type, mangled_container).unwrap(); writeln!(w, "\tfn from(mut o: crate::c_types::CResultTempl<{}, {}>) -> Self {{", templ_ok_type, templ_err_type).unwrap(); writeln!(w, "\t\tlet contents = if o.result_ok {{").unwrap(); if ok_type != "()" { writeln!(w, "\t\t\tlet result = unsafe {{ o.contents.result }};").unwrap(); writeln!(w, "\t\t\tunsafe {{ o.contents.result = std::ptr::null_mut() }};").unwrap(); writeln!(w, "\t\t\t{}Ptr {{ result }}", mangled_container).unwrap(); } else { writeln!(w, "\t\t\tlet _ = unsafe {{ Box::from_raw(o.contents.result) }};").unwrap(); writeln!(w, "\t\t\to.contents.result = std::ptr::null_mut();").unwrap(); writeln!(w, "\t\t\t{}Ptr {{ result: std::ptr::null_mut() }}", mangled_container).unwrap(); } writeln!(w, "\t\t}} else {{").unwrap(); if err_type != "()" { writeln!(w, "\t\t\tlet err = unsafe {{ o.contents.err }};").unwrap(); writeln!(w, "\t\t\tunsafe {{ o.contents.err = std::ptr::null_mut(); }}").unwrap(); writeln!(w, "\t\t\t{}Ptr {{ err }}", mangled_container).unwrap(); } else { writeln!(w, "\t\t\tlet _ = unsafe {{ Box::from_raw(o.contents.err) }};").unwrap(); writeln!(w, "\t\t\to.contents.err = std::ptr::null_mut();").unwrap(); writeln!(w, "\t\t\t{}Ptr {{ err: std::ptr::null_mut() }}", mangled_container).unwrap(); } writeln!(w, "\t\t}};").unwrap(); writeln!(w, "\t\tSelf {{").unwrap(); writeln!(w, "\t\t\tcontents,").unwrap(); writeln!(w, "\t\t\tresult_ok: o.result_ok,").unwrap(); writeln!(w, "\t\t}}").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "}}").unwrap(); if clonable { writeln!(w, "impl Clone for {} {{", mangled_container).unwrap(); writeln!(w, "\tfn clone(&self) -> Self {{").unwrap(); writeln!(w, "\t\tif self.result_ok {{").unwrap(); writeln!(w, "\t\t\tSelf {{ result_ok: true, contents: {}Ptr {{", mangled_container).unwrap(); if ok_type != "()" { writeln!(w, "\t\t\t\tresult: Box::into_raw(Box::new(<{}>::clone(unsafe {{ &*self.contents.result }})))", ok_type).unwrap(); } else { writeln!(w, "\t\t\t\tresult: std::ptr::null_mut()").unwrap(); } writeln!(w, "\t\t\t}} }}").unwrap(); writeln!(w, "\t\t}} else {{").unwrap(); writeln!(w, "\t\t\tSelf {{ result_ok: false, contents: {}Ptr {{", mangled_container).unwrap(); if err_type != "()" { writeln!(w, "\t\t\t\terr: Box::into_raw(Box::new(<{}>::clone(unsafe {{ &*self.contents.err }})))", err_type).unwrap(); } else { writeln!(w, "\t\t\t\terr: std::ptr::null_mut()").unwrap(); } writeln!(w, "\t\t\t}} }}").unwrap(); writeln!(w, "\t\t}}").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "}}").unwrap(); writeln!(w, "#[no_mangle]").unwrap(); writeln!(w, "/// Creates a new {} which has the same data as `orig`", mangled_container).unwrap(); writeln!(w, "/// but with all dynamically-allocated buffers duplicated in new buffers.").unwrap(); writeln!(w, "pub extern \"C\" fn {}_clone(orig: &{}) -> {} {{ orig.clone() }}", mangled_container, mangled_container, mangled_container).unwrap(); } } /// Writes out a C-callable concrete Vec<A> struct and utility methods pub fn write_vec_block<W: std::io::Write>(w: &mut W, mangled_container: &str, inner_type: &str, clonable: bool) { writeln!(w, "#[repr(C)]").unwrap(); writeln!(w, "/// A dynamically-allocated array of {}s of arbitrary size.", inner_type).unwrap(); writeln!(w, "/// This corresponds to std::vector in C++").unwrap(); writeln!(w, "pub struct {} {{", mangled_container).unwrap(); writeln!(w, "\t/// The elements in the array.").unwrap(); writeln!(w, "\t/// If datalen is non-0 this must be a valid, non-NULL pointer allocated by malloc().").unwrap(); writeln!(w, "\tpub data: *mut {},", inner_type).unwrap(); writeln!(w, "\t/// The number of elements pointed to by `data`.").unwrap(); writeln!(w, "\tpub datalen: usize").unwrap(); writeln!(w, "}}").unwrap(); writeln!(w, "impl {} {{", mangled_container).unwrap(); writeln!(w, "\t#[allow(unused)] pub(crate) fn into_rust(&mut self) -> Vec<{}> {{", inner_type).unwrap(); writeln!(w, "\t\tif self.datalen == 0 {{ return Vec::new(); }}").unwrap(); writeln!(w, "\t\tlet ret = unsafe {{ Box::from_raw(std::slice::from_raw_parts_mut(self.data, self.datalen)) }}.into();").unwrap(); writeln!(w, "\t\tself.data = std::ptr::null_mut();").unwrap(); writeln!(w, "\t\tself.datalen = 0;").unwrap(); writeln!(w, "\t\tret").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "\t#[allow(unused)] pub(crate) fn as_slice(&self) -> &[{}] {{", inner_type).unwrap(); writeln!(w, "\t\tunsafe {{ std::slice::from_raw_parts_mut(self.data, self.datalen) }}").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "}}").unwrap(); writeln!(w, "impl From<Vec<{}>> for {} {{", inner_type, mangled_container).unwrap(); writeln!(w, "\tfn from(v: Vec<{}>) -> Self {{", inner_type).unwrap(); writeln!(w, "\t\tlet datalen = v.len();").unwrap(); writeln!(w, "\t\tlet data = Box::into_raw(v.into_boxed_slice());").unwrap(); writeln!(w, "\t\tSelf {{ datalen, data: unsafe {{ (*data).as_mut_ptr() }} }}").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "}}").unwrap(); writeln!(w, "#[no_mangle]").unwrap(); writeln!(w, "/// Frees the buffer pointed to by `data` if `datalen` is non-0.").unwrap(); writeln!(w, "pub extern \"C\" fn {}_free(_res: {}) {{ }}", mangled_container, mangled_container).unwrap(); writeln!(w, "impl Drop for {} {{", mangled_container).unwrap(); writeln!(w, "\tfn drop(&mut self) {{").unwrap(); writeln!(w, "\t\tif self.datalen == 0 {{ return; }}").unwrap(); writeln!(w, "\t\tunsafe {{ Box::from_raw(std::slice::from_raw_parts_mut(self.data, self.datalen)) }};").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "}}").unwrap(); if clonable { writeln!(w, "impl Clone for {} {{", mangled_container).unwrap(); writeln!(w, "\tfn clone(&self) -> Self {{").unwrap(); writeln!(w, "\t\tlet mut res = Vec::new();").unwrap(); writeln!(w, "\t\tif self.datalen == 0 {{ return Self::from(res); }}").unwrap(); writeln!(w, "\t\tres.extend_from_slice(unsafe {{ std::slice::from_raw_parts_mut(self.data, self.datalen) }});").unwrap(); writeln!(w, "\t\tSelf::from(res)").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "}}").unwrap(); } } /// Writes out a C-callable concrete (A, B, ...) struct and utility methods pub fn write_tuple_block<W: std::io::Write>(w: &mut W, mangled_container: &str, types: &[String], clonable: bool) { writeln!(w, "#[repr(C)]").unwrap(); writeln!(w, "/// A tuple of {} elements. See the individual fields for the types contained.", types.len()).unwrap(); writeln!(w, "pub struct {} {{", mangled_container).unwrap(); for (idx, ty) in types.iter().enumerate() { writeln!(w, "\t/// The element at position {}", idx).unwrap(); writeln!(w, "\tpub {}: {},", ('a' as u8 + idx as u8) as char, ty).unwrap(); } writeln!(w, "}}").unwrap(); let mut tuple_str = "(".to_owned(); for (idx, ty) in types.iter().enumerate() { if idx != 0 { tuple_str += ", "; } tuple_str += ty; } tuple_str += ")"; writeln!(w, "impl From<{}> for {} {{", tuple_str, mangled_container).unwrap(); writeln!(w, "\tfn from (tup: {}) -> Self {{", tuple_str).unwrap(); writeln!(w, "\t\tSelf {{").unwrap(); for idx in 0..types.len() { writeln!(w, "\t\t\t{}: tup.{},", ('a' as u8 + idx as u8) as char, idx).unwrap(); } writeln!(w, "\t\t}}").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "}}").unwrap(); writeln!(w, "impl {} {{", mangled_container).unwrap(); writeln!(w, "\t#[allow(unused)] pub(crate) fn to_rust(mut self) -> {} {{", tuple_str).unwrap(); write!(w, "\t\t(").unwrap(); for idx in 0..types.len() { write!(w, "{}self.{}", if idx != 0 {", "} else {""}, ('a' as u8 + idx as u8) as char).unwrap(); } writeln!(w, ")").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "}}").unwrap(); if clonable { writeln!(w, "impl Clone for {} {{", mangled_container).unwrap(); writeln!(w, "\tfn clone(&self) -> Self {{").unwrap(); writeln!(w, "\t\tSelf {{").unwrap(); for idx in 0..types.len() { writeln!(w, "\t\t\t{}: self.{}.clone(),", ('a' as u8 + idx as u8) as char, ('a' as u8 + idx as u8) as char).unwrap(); } writeln!(w, "\t\t}}").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "}}").unwrap(); writeln!(w, "#[no_mangle]").unwrap(); writeln!(w, "/// Creates a new tuple which has the same data as `orig`").unwrap(); writeln!(w, "/// but with all dynamically-allocated buffers duplicated in new buffers.").unwrap(); writeln!(w, "pub extern \"C\" fn {}_clone(orig: &{}) -> {} {{ orig.clone() }}", mangled_container, mangled_container, mangled_container).unwrap(); } writeln!(w, "/// Creates a new {} from the contained elements.", mangled_container).unwrap(); write!(w, "#[no_mangle]\npub extern \"C\" fn {}_new(", mangled_container).unwrap(); for (idx, gen) in types.iter().enumerate() { write!(w, "{}{}: ", if idx != 0 { ", " } else { "" }, ('a' as u8 + idx as u8) as char).unwrap(); //if !self.write_c_type_intern(&mut created_container, gen, generics, false, false, false) { return false; } write!(w, "{}", gen).unwrap(); } writeln!(w, ") -> {} {{", mangled_container).unwrap(); write!(w, "\t{} {{ ", mangled_container).unwrap(); for idx in 0..types.len() { write!(w, "{}, ", ('a' as u8 + idx as u8) as char).unwrap(); } writeln!(w, "}}\n}}\n").unwrap(); writeln!(w, "#[no_mangle]").unwrap(); writeln!(w, "/// Frees any resources used by the {}.", mangled_container).unwrap(); writeln!(w, "pub extern \"C\" fn {}_free(_res: {}) {{ }}", mangled_container, mangled_container).unwrap(); } /// Writes out a C-callable concrete Option<A> struct and utility methods pub fn write_option_block<W: std::io::Write>(w: &mut W, mangled_container: &str, inner_type: &str, clonable: bool) { writeln!(w, "#[repr(C)]").unwrap(); if clonable { writeln!(w, "#[derive(Clone)]").unwrap(); } writeln!(w, "/// An enum which can either contain a {} or not", inner_type).unwrap(); writeln!(w, "pub enum {} {{", mangled_container).unwrap(); writeln!(w, "\t/// When we're in this state, this {} contains a {}", mangled_container, inner_type).unwrap(); writeln!(w, "\tSome({}),", inner_type).unwrap(); writeln!(w, "\t/// When we're in this state, this {} contains nothing", mangled_container).unwrap(); writeln!(w, "\tNone").unwrap(); writeln!(w, "}}").unwrap(); writeln!(w, "impl {} {{", mangled_container).unwrap(); writeln!(w, "\t#[allow(unused)] pub(crate) fn is_some(&self) -> bool {{").unwrap(); writeln!(w, "\t\tif let Self::Some(_) = self {{ true }} else {{ false }}").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "\t#[allow(unused)] pub(crate) fn take(mut self) -> {} {{", inner_type).unwrap(); writeln!(w, "\t\tif let Self::Some(v) = self {{ v }} else {{ unreachable!() }}").unwrap(); writeln!(w, "\t}}").unwrap(); writeln!(w, "}}").unwrap(); writeln!(w, "#[no_mangle]").unwrap(); writeln!(w, "/// Constructs a new {} containing a {}", mangled_container, inner_type).unwrap(); writeln!(w, "pub extern \"C\" fn {}_some(o: {}) -> {} {{", mangled_container, inner_type, mangled_container).unwrap(); writeln!(w, "\t{}::Some(o)", mangled_container).unwrap(); writeln!(w, "}}").unwrap(); writeln!(w, "#[no_mangle]").unwrap(); writeln!(w, "/// Constructs a new {} containing nothing", mangled_container).unwrap(); writeln!(w, "pub extern \"C\" fn {}_none() -> {} {{", mangled_container, mangled_container).unwrap(); writeln!(w, "\t{}::None", mangled_container).unwrap(); writeln!(w, "}}").unwrap(); writeln!(w, "#[no_mangle]").unwrap(); writeln!(w, "/// Frees any resources associated with the {}, if we are in the Some state", inner_type).unwrap(); writeln!(w, "pub extern \"C\" fn {}_free(_res: {}) {{ }}", mangled_container, mangled_container).unwrap(); if clonable { writeln!(w, "#[no_mangle]").unwrap(); writeln!(w, "/// Creates a new {} which has the same data as `orig`", mangled_container).unwrap(); writeln!(w, "/// but with all dynamically-allocated buffers duplicated in new buffers.").unwrap(); writeln!(w, "pub extern \"C\" fn {}_clone(orig: &{}) -> {} {{ orig.clone() }}", mangled_container, mangled_container, mangled_container).unwrap(); } } /// Prints the docs from a given attribute list unless its tagged no export pub fn writeln_docs<W: std::io::Write>(w: &mut W, attrs: &[syn::Attribute], prefix: &str) { for attr in attrs.iter() { let tokens_clone = attr.tokens.clone(); let mut token_iter = tokens_clone.into_iter(); if let Some(token) = token_iter.next() { match token { TokenTree::Punct(c) if c.as_char() == '=' => { // syn gets '=' from '///' or '//!' as it is syntax for #[doc = ""] }, TokenTree::Group(_) => continue, // eg #[derive()] _ => unimplemented!(), } } else { continue; } match attr.style { syn::AttrStyle::Inner(_) => { match token_iter.next().unwrap() { TokenTree::Literal(lit) => { // Drop the first and last chars from lit as they are always " let doc = format!("{}", lit); writeln!(w, "{}//!{}", prefix, &doc[1..doc.len() - 1]).unwrap(); }, _ => unimplemented!(), } }, syn::AttrStyle::Outer => { match token_iter.next().unwrap() { TokenTree::Literal(lit) => { // Drop the first and last chars from lit as they are always " let doc = format!("{}", lit); writeln!(w, "{}///{}", prefix, &doc[1..doc.len() - 1]).unwrap(); }, _ => unimplemented!(), } }, } } } /// Print the parameters in a method declaration, starting after the open parenthesis, through and /// including the closing parenthesis and return value, but not including the open bracket or any /// trailing semicolons. /// /// Usable both for a function definition and declaration. /// /// this_param is used when returning Self or accepting a self parameter, and should be the /// concrete, mapped type. pub fn write_method_params<W: std::io::Write>(w: &mut W, sig: &syn::Signature, this_param: &str, types: &mut TypeResolver, generics: Option<&GenericTypes>, self_ptr: bool, fn_decl: bool) { if sig.constness.is_some() || sig.asyncness.is_some() || sig.unsafety.is_some() || sig.abi.is_some() || sig.variadic.is_some() { unimplemented!(); } if sig.generics.lt_token.is_some() { for generic in sig.generics.params.iter() { match generic { syn::GenericParam::Type(_)|syn::GenericParam::Lifetime(_) => { // We ignore these, if they're not on skipped args, we'll blow up // later, and lifetimes we just hope the C client enforces. }, _ => unimplemented!(), } } } let mut first_arg = true; let mut num_unused = 0; for inp in sig.inputs.iter() { match inp { syn::FnArg::Receiver(recv) => { if !recv.attrs.is_empty() || recv.reference.is_none() { unimplemented!(); } write!(w, "this_arg: {}{}", match (self_ptr, recv.mutability.is_some()) { (true, true) => "*mut ", (true, false) => "*const ", (false, true) => "&mut ", (false, false) => "&", }, this_param).unwrap(); assert!(first_arg); first_arg = false; }, syn::FnArg::Typed(arg) => { if types.skip_arg(&*arg.ty, generics) { continue; } if !arg.attrs.is_empty() { unimplemented!(); } // First get the c type so that we can check if it ends up being a reference: let mut c_type = Vec::new(); types.write_c_type(&mut c_type, &*arg.ty, generics, false); match &*arg.pat { syn::Pat::Ident(ident) => { if !ident.attrs.is_empty() || ident.subpat.is_some() { unimplemented!(); } write!(w, "{}{}{}: ", if first_arg { "" } else { ", " }, if !fn_decl || c_type[0] == '&' as u8 || c_type[0] == '*' as u8 { "" } else { "mut " }, ident.ident).unwrap(); first_arg = false; }, syn::Pat::Wild(wild) => { if !wild.attrs.is_empty() { unimplemented!(); } write!(w, "{}unused_{}: ", if first_arg { "" } else { ", " }, num_unused).unwrap(); num_unused += 1; }, _ => unimplemented!(), } w.write(&c_type).unwrap(); } } } write!(w, ")").unwrap(); match &sig.output { syn::ReturnType::Type(_, rtype) => { write!(w, " -> ").unwrap(); if let Some(mut remaining_path) = first_seg_self(&*rtype) { if remaining_path.next().is_none() { write!(w, "{}", this_param).unwrap(); return; } } if let syn::Type::Reference(r) = &**rtype { // We can't return a reference, cause we allocate things on the stack. types.write_c_type(w, &*r.elem, generics, true); } else { types.write_c_type(w, &*rtype, generics, true); } }, _ => {}, } } /// Print the main part of a method declaration body, starting with a newline after the function /// open bracket and converting each function parameter to or from C-mapped types. Ends with "let /// mut ret = " assuming the next print will be the unmapped Rust function to call followed by the /// parameters we mapped to/from C here. pub fn write_method_var_decl_body<W: std::io::Write>(w: &mut W, sig: &syn::Signature, extra_indent: &str, types: &TypeResolver, generics: Option<&GenericTypes>, to_c: bool) { let mut num_unused = 0; for inp in sig.inputs.iter() { match inp { syn::FnArg::Receiver(_) => {}, syn::FnArg::Typed(arg) => { if types.skip_arg(&*arg.ty, generics) { continue; } if !arg.attrs.is_empty() { unimplemented!(); } macro_rules! write_new_var { ($ident: expr, $ty: expr) => { if to_c { if types.write_to_c_conversion_new_var(w, &$ident, &$ty, generics, false) { write!(w, "\n\t{}", extra_indent).unwrap(); } } else { if types.write_from_c_conversion_new_var(w, &$ident, &$ty, generics) { write!(w, "\n\t{}", extra_indent).unwrap(); } } } } match &*arg.pat { syn::Pat::Ident(ident) => { if !ident.attrs.is_empty() || ident.subpat.is_some() { unimplemented!(); } write_new_var!(ident.ident, *arg.ty); }, syn::Pat::Wild(w) => { if !w.attrs.is_empty() { unimplemented!(); } write_new_var!(syn::Ident::new(&format!("unused_{}", num_unused), Span::call_site()), *arg.ty); num_unused += 1; }, _ => unimplemented!(), } } } } match &sig.output { syn::ReturnType::Type(_, _) => { write!(w, "let mut ret = ").unwrap(); }, _ => {}, } } /// Prints the parameters in a method call, starting after the open parenthesis and ending with a /// final return statement returning the method's result. Should be followed by a single closing /// bracket. /// /// The return value is expected to be bound to a variable named `ret` which is available after a /// method-call-ending semicolon. pub fn write_method_call_params<W: std::io::Write>(w: &mut W, sig: &syn::Signature, extra_indent: &str, types: &TypeResolver, generics: Option<&GenericTypes>, this_type: &str, to_c: bool) { let mut first_arg = true; let mut num_unused = 0; for inp in sig.inputs.iter() { match inp { syn::FnArg::Receiver(recv) => { if !recv.attrs.is_empty() || recv.reference.is_none() { unimplemented!(); } if to_c { write!(w, "self.this_arg").unwrap(); first_arg = false; } }, syn::FnArg::Typed(arg) => { if types.skip_arg(&*arg.ty, generics) { if !to_c { if !first_arg { write!(w, ", ").unwrap(); } first_arg = false; types.no_arg_to_rust(w, &*arg.ty, generics); } continue; } if !arg.attrs.is_empty() { unimplemented!(); } macro_rules! write_ident { ($ident: expr) => { if !first_arg { write!(w, ", ").unwrap(); } first_arg = false; if to_c { types.write_to_c_conversion_inline_prefix(w, &*arg.ty, generics, false); write!(w, "{}", $ident).unwrap(); types.write_to_c_conversion_inline_suffix(w, &*arg.ty, generics, false); } else { types.write_from_c_conversion_prefix(w, &*arg.ty, generics); write!(w, "{}", $ident).unwrap(); types.write_from_c_conversion_suffix(w, &*arg.ty, generics); } } } match &*arg.pat { syn::Pat::Ident(ident) => { if !ident.attrs.is_empty() || ident.subpat.is_some() { unimplemented!(); } write_ident!(ident.ident); }, syn::Pat::Wild(w) => { if !w.attrs.is_empty() { unimplemented!(); } write_ident!(format!("unused_{}", num_unused)); num_unused += 1; }, _ => unimplemented!(), } } } } write!(w, ")").unwrap(); match &sig.output { syn::ReturnType::Type(_, rtype) => { write!(w, ";\n\t{}", extra_indent).unwrap(); let self_segs_iter = first_seg_self(&*rtype); if to_c && first_seg_self(&*rtype).is_some() { // Assume rather blindly that we're returning an associated trait from a C fn call to a Rust trait object. write!(w, "ret").unwrap(); } else if !to_c && self_segs_iter.is_some() && self_segs_iter.unwrap().next().is_none() { // If we're returning "Self" (and not "Self::X"), just do it manually write!(w, "{} {{ inner: Box::into_raw(Box::new(ret)), is_owned: true }}", this_type).unwrap(); } else if to_c { let new_var = types.write_from_c_conversion_new_var(w, &syn::Ident::new("ret", Span::call_site()), rtype, generics); if new_var { write!(w, "\n\t{}", extra_indent).unwrap(); } types.write_from_c_conversion_prefix(w, &*rtype, generics); write!(w, "ret").unwrap(); types.write_from_c_conversion_suffix(w, &*rtype, generics); } else { let ret_returned = if let syn::Type::Reference(_) = &**rtype { true } else { false }; let new_var = types.write_to_c_conversion_new_var(w, &syn::Ident::new("ret", Span::call_site()), &rtype, generics, true); if new_var { write!(w, "\n\t{}", extra_indent).unwrap(); } types.write_to_c_conversion_inline_prefix(w, &rtype, generics, true); write!(w, "{}ret", if ret_returned && !new_var { "*" } else { "" }).unwrap(); types.write_to_c_conversion_inline_suffix(w, &rtype, generics, true); } } _ => {}, } } /// Prints concrete generic parameters for a struct/trait/function, including the less-than and /// greater-than symbols, if any generic parameters are defined. pub fn maybe_write_generics<W: std::io::Write>(w: &mut W, generics: &syn::Generics, types: &TypeResolver, concrete_lifetimes: bool) { let mut gen_types = GenericTypes::new(); assert!(gen_types.learn_generics(generics, types)); if !generics.params.is_empty() { write!(w, "<").unwrap(); for (idx, generic) in generics.params.iter().enumerate() { match generic { syn::GenericParam::Type(type_param) => { let mut printed_param = false; for bound in type_param.bounds.iter() { if let syn::TypeParamBound::Trait(trait_bound) = bound { assert_simple_bound(&trait_bound); write!(w, "{}{}", if idx != 0 { ", " } else { "" }, gen_types.maybe_resolve_ident(&type_param.ident).unwrap()).unwrap(); if printed_param { unimplemented!("Can't print generic params that have multiple non-lifetime bounds"); } printed_param = true; } } }, syn::GenericParam::Lifetime(lt) => { if concrete_lifetimes { write!(w, "'static").unwrap(); } else { write!(w, "{}'{}", if idx != 0 { ", " } else { "" }, lt.lifetime.ident).unwrap(); } }, _ => unimplemented!(), } } write!(w, ">").unwrap(); } }
45.154519
189
0.60521
69a8f87d58f544109c142a415f074ac08fa64745
1,108
// Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0. pub(crate) mod metrics; mod raft_client; pub mod config; pub mod debug; mod engine_factory; pub mod errors; pub mod gc_worker; pub mod load_statistics; pub mod lock_manager; pub mod node; mod proxy; pub mod raftkv; mod reset_to_version; pub mod resolve; pub mod server; pub mod service; pub mod snap; pub mod status_server; pub mod transport; pub mod ttl; pub use engine_factory::{KvEngineFactory, KvEngineFactoryBuilder}; #[cfg(any(test, feature = "testexport"))] pub use self::server::test_router::TestRaftStoreRouter; pub use self::{ config::{Config, ServerConfigManager, DEFAULT_CLUSTER_ID, DEFAULT_LISTENING_ADDR}, errors::{Error, Result}, metrics::{CONFIG_ROCKSDB_GAUGE, CPU_CORES_QUOTA_GAUGE, MEM_TRACE_SUM_GAUGE}, node::{create_raft_storage, Node}, proxy::{build_forward_option, get_target_address, Proxy}, raft_client::{ConnectionBuilder, RaftClient}, raftkv::RaftKv, resolve::{PdStoreAddrResolver, StoreAddrResolver}, server::{Server, GRPC_THREAD_PREFIX}, transport::ServerTransport, };
27.02439
86
0.76444
62b6486f54d7ea28ac5561a33aa5b374943e26ec
37,749
// Copyright 2017-2019 Parity Technologies (UK) Ltd. // This file is part of Substrate. // Substrate is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Substrate is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Substrate. If not, see <http://www.gnu.org/licenses/>. //! RocksDB-based light client blockchain storage. use std::{sync::Arc, collections::HashMap}; use parking_lot::RwLock; use kvdb::{KeyValueDB, DBTransaction}; use client::backend::{AuxStore, NewBlockState}; use client::blockchain::{BlockStatus, Cache as BlockchainCache, HeaderBackend as BlockchainHeaderBackend, Info as BlockchainInfo}; use client::cht; use client::leaves::{LeafSet, FinalizationDisplaced}; use client::error::{ErrorKind as ClientErrorKind, Result as ClientResult}; use client::light::blockchain::Storage as LightBlockchainStorage; use parity_codec::{Decode, Encode}; use primitives::Blake2Hasher; use runtime_primitives::generic::BlockId; use runtime_primitives::traits::{Block as BlockT, Header as HeaderT, Zero, One, As, NumberFor, Digest, DigestItem}; use consensus_common::well_known_cache_keys; use crate::cache::{DbCacheSync, DbCache, ComplexBlockId}; use crate::utils::{self, meta_keys, Meta, db_err, open_database, read_db, block_id_to_lookup_key, read_meta}; use crate::DatabaseSettings; use log::{trace, warn, debug}; pub(crate) mod columns { pub const META: Option<u32> = crate::utils::COLUMN_META; pub const KEY_LOOKUP: Option<u32> = Some(1); pub const HEADER: Option<u32> = Some(2); pub const CACHE: Option<u32> = Some(3); pub const CHT: Option<u32> = Some(4); pub const AUX: Option<u32> = Some(5); } /// Prefix for headers CHT. const HEADER_CHT_PREFIX: u8 = 0; /// Prefix for changes tries roots CHT. const CHANGES_TRIE_CHT_PREFIX: u8 = 1; /// Light blockchain storage. Stores most recent headers + CHTs for older headers. /// Locks order: meta, leaves, cache. pub struct LightStorage<Block: BlockT> { db: Arc<KeyValueDB>, meta: RwLock<Meta<NumberFor<Block>, Block::Hash>>, leaves: RwLock<LeafSet<Block::Hash, NumberFor<Block>>>, cache: Arc<DbCacheSync<Block>>, } impl<Block> LightStorage<Block> where Block: BlockT, { /// Create new storage with given settings. pub fn new(config: DatabaseSettings) -> ClientResult<Self> { let db = open_database(&config, columns::META, "light")?; Self::from_kvdb(db as Arc<_>) } #[cfg(any(test, feature = "test-helpers"))] pub fn new_test() -> Self { use utils::NUM_COLUMNS; let db = Arc::new(::kvdb_memorydb::create(NUM_COLUMNS)); Self::from_kvdb(db as Arc<_>).expect("failed to create test-db") } fn from_kvdb(db: Arc<KeyValueDB>) -> ClientResult<Self> { let meta = read_meta::<Block>(&*db, columns::META, columns::HEADER)?; let leaves = LeafSet::read_from_db(&*db, columns::META, meta_keys::LEAF_PREFIX)?; let cache = DbCache::new( db.clone(), columns::KEY_LOOKUP, columns::HEADER, columns::CACHE, ComplexBlockId::new(meta.finalized_hash, meta.finalized_number), ); Ok(LightStorage { db, meta: RwLock::new(meta), cache: Arc::new(DbCacheSync(RwLock::new(cache))), leaves: RwLock::new(leaves), }) } #[cfg(test)] pub(crate) fn cache(&self) -> &DbCacheSync<Block> { &self.cache } fn update_meta( &self, hash: Block::Hash, number: NumberFor<Block>, is_best: bool, is_finalized: bool, ) { let mut meta = self.meta.write(); if number.is_zero() { meta.genesis_hash = hash; meta.finalized_hash = hash; } if is_best { meta.best_number = number; meta.best_hash = hash; } if is_finalized { meta.finalized_number = number; meta.finalized_hash = hash; } } } impl<Block> BlockchainHeaderBackend<Block> for LightStorage<Block> where Block: BlockT, { fn header(&self, id: BlockId<Block>) -> ClientResult<Option<Block::Header>> { utils::read_header(&*self.db, columns::KEY_LOOKUP, columns::HEADER, id) } fn info(&self) -> ClientResult<BlockchainInfo<Block>> { let meta = self.meta.read(); Ok(BlockchainInfo { best_hash: meta.best_hash, best_number: meta.best_number, genesis_hash: meta.genesis_hash, finalized_hash: meta.finalized_hash, finalized_number: meta.finalized_number, }) } fn status(&self, id: BlockId<Block>) -> ClientResult<BlockStatus> { let exists = match id { BlockId::Hash(_) => read_db( &*self.db, columns::KEY_LOOKUP, columns::HEADER, id )?.is_some(), BlockId::Number(n) => n <= self.meta.read().best_number, }; match exists { true => Ok(BlockStatus::InChain), false => Ok(BlockStatus::Unknown), } } fn number(&self, hash: Block::Hash) -> ClientResult<Option<NumberFor<Block>>> { if let Some(lookup_key) = block_id_to_lookup_key::<Block>(&*self.db, columns::KEY_LOOKUP, BlockId::Hash(hash))? { let number = utils::lookup_key_to_number(&lookup_key)?; Ok(Some(number)) } else { Ok(None) } } fn hash(&self, number: NumberFor<Block>) -> ClientResult<Option<Block::Hash>> { Ok(self.header(BlockId::Number(number))?.map(|header| header.hash().clone())) } } impl<Block: BlockT> LightStorage<Block> { // Get block changes trie root, if available. fn changes_trie_root(&self, block: BlockId<Block>) -> ClientResult<Option<Block::Hash>> { self.header(block) .map(|header| header.and_then(|header| header.digest().log(DigestItem::as_changes_trie_root) .cloned())) } /// Handle setting head within a transaction. `route_to` should be the last /// block that existed in the database. `best_to` should be the best block /// to be set. /// /// In the case where the new best block is a block to be imported, `route_to` /// should be the parent of `best_to`. In the case where we set an existing block /// to be best, `route_to` should equal to `best_to`. fn set_head_with_transaction(&self, transaction: &mut DBTransaction, route_to: Block::Hash, best_to: (NumberFor<Block>, Block::Hash)) -> Result<(), client::error::Error> { let lookup_key = utils::number_and_hash_to_lookup_key(best_to.0, &best_to.1); // handle reorg. let meta = self.meta.read(); if meta.best_hash != Default::default() { let tree_route = ::client::blockchain::tree_route( self, BlockId::Hash(meta.best_hash), BlockId::Hash(route_to), )?; // update block number to hash lookup entries. for retracted in tree_route.retracted() { if retracted.hash == meta.finalized_hash { // TODO: can we recover here? warn!("Safety failure: reverting finalized block {:?}", (&retracted.number, &retracted.hash)); } utils::remove_number_to_key_mapping( transaction, columns::KEY_LOOKUP, retracted.number ); } for enacted in tree_route.enacted() { utils::insert_number_to_key_mapping( transaction, columns::KEY_LOOKUP, enacted.number, enacted.hash ); } } transaction.put(columns::META, meta_keys::BEST_BLOCK, &lookup_key); utils::insert_number_to_key_mapping( transaction, columns::KEY_LOOKUP, best_to.0, best_to.1, ); Ok(()) } // Note that a block is finalized. Only call with child of last finalized block. fn note_finalized( &self, transaction: &mut DBTransaction, header: &Block::Header, hash: Block::Hash, displaced: &mut Option<FinalizationDisplaced<Block::Hash, NumberFor<Block>>>, ) -> ClientResult<()> { let meta = self.meta.read(); if &meta.finalized_hash != header.parent_hash() { return Err(::client::error::ErrorKind::NonSequentialFinalization( format!("Last finalized {:?} not parent of {:?}", meta.finalized_hash, hash), ).into()) } let lookup_key = utils::number_and_hash_to_lookup_key(header.number().clone(), hash); transaction.put(columns::META, meta_keys::FINALIZED_BLOCK, &lookup_key); // build new CHT(s) if required if let Some(new_cht_number) = cht::is_build_required(cht::SIZE, *header.number()) { let new_cht_start: NumberFor<Block> = cht::start_number(cht::SIZE, new_cht_number); let new_header_cht_root = cht::compute_root::<Block::Header, Blake2Hasher, _>( cht::SIZE, new_cht_number, (new_cht_start.as_()..) .map(|num| self.hash(As::sa(num))) )?; transaction.put( columns::CHT, &cht_key(HEADER_CHT_PREFIX, new_cht_start), new_header_cht_root.as_ref() ); // if the header includes changes trie root, let's build a changes tries roots CHT if header.digest().log(DigestItem::as_changes_trie_root).is_some() { let new_changes_trie_cht_root = cht::compute_root::<Block::Header, Blake2Hasher, _>( cht::SIZE, new_cht_number, (new_cht_start.as_()..) .map(|num| self.changes_trie_root(BlockId::Number(As::sa(num)))) )?; transaction.put( columns::CHT, &cht_key(CHANGES_TRIE_CHT_PREFIX, new_cht_start), new_changes_trie_cht_root.as_ref() ); } // prune headers that are replaced with CHT let mut prune_block = new_cht_start; let new_cht_end = cht::end_number(cht::SIZE, new_cht_number); trace!(target: "db", "Replacing blocks [{}..{}] with CHT#{}", new_cht_start, new_cht_end, new_cht_number); while prune_block <= new_cht_end { if let Some(hash) = self.hash(prune_block)? { let lookup_key = block_id_to_lookup_key::<Block>(&*self.db, columns::KEY_LOOKUP, BlockId::Number(prune_block))? .expect("retrieved hash for `prune_block` right above. therefore retrieving lookup key must succeed. q.e.d."); utils::remove_key_mappings( transaction, columns::KEY_LOOKUP, prune_block, hash ); transaction.delete(columns::HEADER, &lookup_key); } prune_block += One::one(); } } let new_displaced = self.leaves.write().finalize_height(header.number().clone()); match displaced { x @ &mut None => *x = Some(new_displaced), &mut Some(ref mut displaced) => displaced.merge(new_displaced), } Ok(()) } /// Read CHT root of given type for the block. fn read_cht_root( &self, cht_type: u8, cht_size: u64, block: NumberFor<Block> ) -> ClientResult<Block::Hash> { let no_cht_for_block = || ClientErrorKind::Backend(format!("CHT for block {} not exists", block)).into(); let cht_number = cht::block_to_cht_number(cht_size, block).ok_or_else(no_cht_for_block)?; let cht_start = cht::start_number(cht_size, cht_number); self.db.get(columns::CHT, &cht_key(cht_type, cht_start)).map_err(db_err)? .ok_or_else(no_cht_for_block) .and_then(|hash| Block::Hash::decode(&mut &*hash).ok_or_else(no_cht_for_block)) } } impl<Block> AuxStore for LightStorage<Block> where Block: BlockT, { fn insert_aux< 'a, 'b: 'a, 'c: 'a, I: IntoIterator<Item=&'a(&'c [u8], &'c [u8])>, D: IntoIterator<Item=&'a &'b [u8]>, >(&self, insert: I, delete: D) -> ClientResult<()> { let mut transaction = DBTransaction::new(); for (k, v) in insert { transaction.put(columns::AUX, k, v); } for k in delete { transaction.delete(columns::AUX, k); } self.db.write(transaction).map_err(db_err) } fn get_aux(&self, key: &[u8]) -> ClientResult<Option<Vec<u8>>> { self.db.get(columns::AUX, key).map(|r| r.map(|v| v.to_vec())).map_err(db_err) } } impl<Block> LightBlockchainStorage<Block> for LightStorage<Block> where Block: BlockT, { fn import_header( &self, header: Block::Header, cache_at: HashMap<well_known_cache_keys::Id, Vec<u8>>, leaf_state: NewBlockState, aux_ops: Vec<(Vec<u8>, Option<Vec<u8>>)>, ) -> ClientResult<()> { let mut finalization_displaced_leaves = None; let mut transaction = DBTransaction::new(); let hash = header.hash(); let number = *header.number(); let parent_hash = *header.parent_hash(); for (key, maybe_val) in aux_ops { match maybe_val { Some(val) => transaction.put_vec(columns::AUX, &key, val), None => transaction.delete(columns::AUX, &key), } } // blocks are keyed by number + hash. let lookup_key = utils::number_and_hash_to_lookup_key(number, &hash); if leaf_state.is_best() { self.set_head_with_transaction(&mut transaction, parent_hash, (number, hash))?; } utils::insert_hash_to_key_mapping( &mut transaction, columns::KEY_LOOKUP, number, hash, ); transaction.put(columns::HEADER, &lookup_key, &header.encode()); let is_genesis = number.is_zero(); if is_genesis { transaction.put(columns::META, meta_keys::GENESIS_HASH, hash.as_ref()); } let finalized = match leaf_state { _ if is_genesis => true, NewBlockState::Final => true, _ => false, }; if finalized { self.note_finalized( &mut transaction, &header, hash, &mut finalization_displaced_leaves, )?; } { let mut leaves = self.leaves.write(); let displaced_leaf = leaves.import(hash, number, parent_hash); let mut cache = self.cache.0.write(); let cache_ops = cache.transaction(&mut transaction) .on_block_insert( ComplexBlockId::new(*header.parent_hash(), if number.is_zero() { Zero::zero() } else { number - One::one() }), ComplexBlockId::new(hash, number), cache_at, finalized, )? .into_ops(); debug!("Light DB Commit {:?} ({})", hash, number); let write_result = self.db.write(transaction).map_err(db_err); if let Err(e) = write_result { let mut leaves = self.leaves.write(); let mut undo = leaves.undo(); // revert leaves set update if there was one. if let Some(displaced_leaf) = displaced_leaf { undo.undo_import(displaced_leaf); } if let Some(finalization_displaced) = finalization_displaced_leaves { undo.undo_finalization(finalization_displaced); } return Err(e); } cache.commit(cache_ops); } self.update_meta(hash, number, leaf_state.is_best(), finalized); Ok(()) } fn set_head(&self, id: BlockId<Block>) -> ClientResult<()> { if let Some(header) = self.header(id)? { let hash = header.hash(); let number = header.number(); let mut transaction = DBTransaction::new(); self.set_head_with_transaction(&mut transaction, hash.clone(), (number.clone(), hash.clone()))?; self.db.write(transaction).map_err(db_err)?; Ok(()) } else { Err(ClientErrorKind::UnknownBlock(format!("Cannot set head {:?}", id)).into()) } } fn header_cht_root(&self, cht_size: u64, block: NumberFor<Block>) -> ClientResult<Block::Hash> { self.read_cht_root(HEADER_CHT_PREFIX, cht_size, block) } fn changes_trie_cht_root(&self, cht_size: u64, block: NumberFor<Block>) -> ClientResult<Block::Hash> { self.read_cht_root(CHANGES_TRIE_CHT_PREFIX, cht_size, block) } fn finalize_header(&self, id: BlockId<Block>) -> ClientResult<()> { if let Some(header) = self.header(id)? { let mut displaced = None; let mut transaction = DBTransaction::new(); let hash = header.hash(); let number = *header.number(); self.note_finalized(&mut transaction, &header, hash.clone(), &mut displaced)?; { let mut cache = self.cache.0.write(); let cache_ops = cache.transaction(&mut transaction) .on_block_finalize( ComplexBlockId::new(*header.parent_hash(), if number.is_zero() { Zero::zero() } else { number - One::one() }), ComplexBlockId::new(hash, number) )? .into_ops(); if let Err(e) = self.db.write(transaction).map_err(db_err) { if let Some(displaced) = displaced { self.leaves.write().undo().undo_finalization(displaced); } return Err(e); } cache.commit(cache_ops); } self.update_meta(hash, header.number().clone(), false, true); Ok(()) } else { Err(ClientErrorKind::UnknownBlock(format!("Cannot finalize block {:?}", id)).into()) } } fn last_finalized(&self) -> ClientResult<Block::Hash> { Ok(self.meta.read().finalized_hash.clone()) } fn cache(&self) -> Option<Arc<BlockchainCache<Block>>> { Some(self.cache.clone()) } } /// Build the key for inserting header-CHT at given block. fn cht_key<N: As<u64>>(cht_type: u8, block: N) -> [u8; 5] { let mut key = [cht_type; 5]; key[1..].copy_from_slice(&utils::number_index_key(block)); key } #[cfg(test)] pub(crate) mod tests { use client::cht; use runtime_primitives::generic::DigestItem; use runtime_primitives::testing::{H256 as Hash, Header, Block as RawBlock, ExtrinsicWrapper}; use runtime_primitives::traits::AuthorityIdFor; use super::*; type Block = RawBlock<ExtrinsicWrapper<u32>>; type AuthorityId = AuthorityIdFor<Block>; pub fn default_header(parent: &Hash, number: u64) -> Header { Header { number: number.into(), parent_hash: *parent, state_root: Hash::random(), digest: Default::default(), extrinsics_root: Default::default(), } } fn header_with_changes_trie(parent: &Hash, number: u64) -> Header { let mut header = default_header(parent, number); header.digest.logs.push(DigestItem::ChangesTrieRoot([(number % 256) as u8; 32].into())); header } fn header_with_extrinsics_root(parent: &Hash, number: u64, extrinsics_root: Hash) -> Header { let mut header = default_header(parent, number); header.extrinsics_root = extrinsics_root; header } pub fn insert_block<F: Fn() -> Header>( db: &LightStorage<Block>, cache: HashMap<well_known_cache_keys::Id, Vec<u8>>, header: F, ) -> Hash { let header = header(); let hash = header.hash(); db.import_header(header, cache, NewBlockState::Best, Vec::new()).unwrap(); hash } fn insert_final_block<F: Fn() -> Header>( db: &LightStorage<Block>, cache: HashMap<well_known_cache_keys::Id, Vec<u8>>, header: F, ) -> Hash { let header = header(); let hash = header.hash(); db.import_header(header, cache, NewBlockState::Final, Vec::new()).unwrap(); hash } fn insert_non_best_block<F: Fn() -> Header>( db: &LightStorage<Block>, cache: HashMap<well_known_cache_keys::Id, Vec<u8>>, header: F, ) -> Hash { let header = header(); let hash = header.hash(); db.import_header(header, cache, NewBlockState::Normal, Vec::new()).unwrap(); hash } #[test] fn returns_known_header() { let db = LightStorage::new_test(); let known_hash = insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); let header_by_hash = db.header(BlockId::Hash(known_hash)).unwrap().unwrap(); let header_by_number = db.header(BlockId::Number(0)).unwrap().unwrap(); assert_eq!(header_by_hash, header_by_number); } #[test] fn does_not_return_unknown_header() { let db = LightStorage::<Block>::new_test(); assert!(db.header(BlockId::Hash(Hash::from_low_u64_be(1))).unwrap().is_none()); assert!(db.header(BlockId::Number(0)).unwrap().is_none()); } #[test] fn returns_info() { let db = LightStorage::new_test(); let genesis_hash = insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); let info = db.info().unwrap(); assert_eq!(info.best_hash, genesis_hash); assert_eq!(info.best_number, 0); assert_eq!(info.genesis_hash, genesis_hash); let best_hash = insert_block(&db, HashMap::new(), || default_header(&genesis_hash, 1)); let info = db.info().unwrap(); assert_eq!(info.best_hash, best_hash); assert_eq!(info.best_number, 1); assert_eq!(info.genesis_hash, genesis_hash); } #[test] fn returns_block_status() { let db = LightStorage::new_test(); let genesis_hash = insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); assert_eq!(db.status(BlockId::Hash(genesis_hash)).unwrap(), BlockStatus::InChain); assert_eq!(db.status(BlockId::Number(0)).unwrap(), BlockStatus::InChain); assert_eq!(db.status(BlockId::Hash(Hash::from_low_u64_be(1))).unwrap(), BlockStatus::Unknown); assert_eq!(db.status(BlockId::Number(1)).unwrap(), BlockStatus::Unknown); } #[test] fn returns_block_hash() { let db = LightStorage::new_test(); let genesis_hash = insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); assert_eq!(db.hash(0).unwrap(), Some(genesis_hash)); assert_eq!(db.hash(1).unwrap(), None); } #[test] fn import_header_works() { let db = LightStorage::new_test(); let genesis_hash = insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); assert_eq!(db.db.iter(columns::HEADER).count(), 1); assert_eq!(db.db.iter(columns::KEY_LOOKUP).count(), 2); let _ = insert_block(&db, HashMap::new(), || default_header(&genesis_hash, 1)); assert_eq!(db.db.iter(columns::HEADER).count(), 2); assert_eq!(db.db.iter(columns::KEY_LOOKUP).count(), 4); } #[test] fn finalized_ancient_headers_are_replaced_with_cht() { fn insert_headers<F: Fn(&Hash, u64) -> Header>(header_producer: F) -> LightStorage<Block> { let db = LightStorage::new_test(); // insert genesis block header (never pruned) let mut prev_hash = insert_final_block(&db, HashMap::new(), || header_producer(&Default::default(), 0)); // insert SIZE blocks && ensure that nothing is pruned for number in 0..cht::SIZE { prev_hash = insert_block(&db, HashMap::new(), || header_producer(&prev_hash, 1 + number)); } assert_eq!(db.db.iter(columns::HEADER).count(), (1 + cht::SIZE) as usize); assert_eq!(db.db.iter(columns::CHT).count(), 0); // insert next SIZE blocks && ensure that nothing is pruned for number in 0..cht::SIZE { prev_hash = insert_block(&db, HashMap::new(), || header_producer(&prev_hash, 1 + cht::SIZE + number)); } assert_eq!(db.db.iter(columns::HEADER).count(), (1 + cht::SIZE + cht::SIZE) as usize); assert_eq!(db.db.iter(columns::CHT).count(), 0); // insert block #{2 * cht::SIZE + 1} && check that new CHT is created + headers of this CHT are pruned // nothing is yet finalized, so nothing is pruned. prev_hash = insert_block(&db, HashMap::new(), || header_producer(&prev_hash, 1 + cht::SIZE + cht::SIZE)); assert_eq!(db.db.iter(columns::HEADER).count(), (2 + cht::SIZE + cht::SIZE) as usize); assert_eq!(db.db.iter(columns::CHT).count(), 0); // now finalize the block. for i in (0..(cht::SIZE + cht::SIZE)).map(|i| i + 1) { db.finalize_header(BlockId::Number(i)).unwrap(); } db.finalize_header(BlockId::Hash(prev_hash)).unwrap(); db } // when headers are created without changes tries roots let db = insert_headers(default_header); assert_eq!(db.db.iter(columns::HEADER).count(), (1 + cht::SIZE + 1) as usize); assert_eq!(db.db.iter(columns::KEY_LOOKUP).count(), (2 * (1 + cht::SIZE + 1)) as usize); assert_eq!(db.db.iter(columns::CHT).count(), 1); assert!((0..cht::SIZE).all(|i| db.header(BlockId::Number(1 + i)).unwrap().is_none())); assert!(db.header_cht_root(cht::SIZE, cht::SIZE / 2).is_ok()); assert!(db.header_cht_root(cht::SIZE, cht::SIZE + cht::SIZE / 2).is_err()); assert!(db.changes_trie_cht_root(cht::SIZE, cht::SIZE / 2).is_err()); assert!(db.changes_trie_cht_root(cht::SIZE, cht::SIZE + cht::SIZE / 2).is_err()); // when headers are created with changes tries roots let db = insert_headers(header_with_changes_trie); assert_eq!(db.db.iter(columns::HEADER).count(), (1 + cht::SIZE + 1) as usize); assert_eq!(db.db.iter(columns::CHT).count(), 2); assert!((0..cht::SIZE).all(|i| db.header(BlockId::Number(1 + i)).unwrap().is_none())); assert!(db.header_cht_root(cht::SIZE, cht::SIZE / 2).is_ok()); assert!(db.header_cht_root(cht::SIZE, cht::SIZE + cht::SIZE / 2).is_err()); assert!(db.changes_trie_cht_root(cht::SIZE, cht::SIZE / 2).is_ok()); assert!(db.changes_trie_cht_root(cht::SIZE, cht::SIZE + cht::SIZE / 2).is_err()); } #[test] fn get_cht_fails_for_genesis_block() { assert!(LightStorage::<Block>::new_test().header_cht_root(cht::SIZE, 0).is_err()); } #[test] fn get_cht_fails_for_non_existant_cht() { assert!(LightStorage::<Block>::new_test().header_cht_root(cht::SIZE, (cht::SIZE / 2) as u64).is_err()); } #[test] fn get_cht_works() { let db = LightStorage::new_test(); // insert 1 + SIZE + SIZE + 1 blocks so that CHT#0 is created let mut prev_hash = insert_final_block(&db, HashMap::new(), || header_with_changes_trie(&Default::default(), 0)); for i in 1..1 + cht::SIZE + cht::SIZE + 1 { prev_hash = insert_block(&db, HashMap::new(), || header_with_changes_trie(&prev_hash, i as u64)); db.finalize_header(BlockId::Hash(prev_hash)).unwrap(); } let cht_root_1 = db.header_cht_root(cht::SIZE, cht::start_number(cht::SIZE, 0)).unwrap(); let cht_root_2 = db.header_cht_root(cht::SIZE, (cht::start_number(cht::SIZE, 0) + cht::SIZE / 2) as u64).unwrap(); let cht_root_3 = db.header_cht_root(cht::SIZE, cht::end_number(cht::SIZE, 0)).unwrap(); assert_eq!(cht_root_1, cht_root_2); assert_eq!(cht_root_2, cht_root_3); let cht_root_1 = db.changes_trie_cht_root(cht::SIZE, cht::start_number(cht::SIZE, 0)).unwrap(); let cht_root_2 = db.changes_trie_cht_root(cht::SIZE, (cht::start_number(cht::SIZE, 0) + cht::SIZE / 2) as u64).unwrap(); let cht_root_3 = db.changes_trie_cht_root(cht::SIZE, cht::end_number(cht::SIZE, 0)).unwrap(); assert_eq!(cht_root_1, cht_root_2); assert_eq!(cht_root_2, cht_root_3); } #[test] fn tree_route_works() { let db = LightStorage::new_test(); let block0 = insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); // fork from genesis: 3 prong. let a1 = insert_block(&db, HashMap::new(), || default_header(&block0, 1)); let a2 = insert_block(&db, HashMap::new(), || default_header(&a1, 2)); let a3 = insert_block(&db, HashMap::new(), || default_header(&a2, 3)); // fork from genesis: 2 prong. let b1 = insert_block(&db, HashMap::new(), || header_with_extrinsics_root(&block0, 1, Hash::from([1; 32]))); let b2 = insert_block(&db, HashMap::new(), || default_header(&b1, 2)); { let tree_route = ::client::blockchain::tree_route( &db, BlockId::Hash(a3), BlockId::Hash(b2) ).unwrap(); assert_eq!(tree_route.common_block().hash, block0); assert_eq!(tree_route.retracted().iter().map(|r| r.hash).collect::<Vec<_>>(), vec![a3, a2, a1]); assert_eq!(tree_route.enacted().iter().map(|r| r.hash).collect::<Vec<_>>(), vec![b1, b2]); } { let tree_route = ::client::blockchain::tree_route( &db, BlockId::Hash(a1), BlockId::Hash(a3), ).unwrap(); assert_eq!(tree_route.common_block().hash, a1); assert!(tree_route.retracted().is_empty()); assert_eq!(tree_route.enacted().iter().map(|r| r.hash).collect::<Vec<_>>(), vec![a2, a3]); } { let tree_route = ::client::blockchain::tree_route( &db, BlockId::Hash(a3), BlockId::Hash(a1), ).unwrap(); assert_eq!(tree_route.common_block().hash, a1); assert_eq!(tree_route.retracted().iter().map(|r| r.hash).collect::<Vec<_>>(), vec![a3, a2]); assert!(tree_route.enacted().is_empty()); } { let tree_route = ::client::blockchain::tree_route( &db, BlockId::Hash(a2), BlockId::Hash(a2), ).unwrap(); assert_eq!(tree_route.common_block().hash, a2); assert!(tree_route.retracted().is_empty()); assert!(tree_route.enacted().is_empty()); } } #[test] fn authorities_are_cached() { let db = LightStorage::new_test(); fn run_checks(db: &LightStorage<Block>, max: u64, checks: &[(u64, Option<Vec<AuthorityIdFor<Block>>>)]) { for (at, expected) in checks.iter().take_while(|(at, _)| *at <= max) { let actual = get_authorities(db.cache(), BlockId::Number(*at)); assert_eq!(*expected, actual); } } fn same_authorities() -> HashMap<well_known_cache_keys::Id, Vec<u8>> { HashMap::new() } fn make_authorities(authorities: Vec<AuthorityId>) -> HashMap<well_known_cache_keys::Id, Vec<u8>> { let mut map = HashMap::new(); map.insert(well_known_cache_keys::AUTHORITIES, authorities.encode()); map } fn get_authorities(cache: &BlockchainCache<Block>, at: BlockId<Block>) -> Option<Vec<AuthorityId>> { cache.get_at(&well_known_cache_keys::AUTHORITIES, &at).and_then(|val| Decode::decode(&mut &val[..])) } let auth1 = || AuthorityId::from_raw([1u8; 32]); let auth2 = || AuthorityId::from_raw([2u8; 32]); let auth3 = || AuthorityId::from_raw([3u8; 32]); let auth4 = || AuthorityId::from_raw([4u8; 32]); let auth5 = || AuthorityId::from_raw([5u8; 32]); let auth6 = || AuthorityId::from_raw([6u8; 32]); let (hash2, hash6) = { // first few blocks are instantly finalized // B0(None) -> B1(None) -> B2(1) -> B3(1) -> B4(1, 2) -> B5(1, 2) -> B6(1, 2) let checks = vec![ (0, None), (1, None), (2, Some(vec![auth1()])), (3, Some(vec![auth1()])), (4, Some(vec![auth1(), auth2()])), (5, Some(vec![auth1(), auth2()])), (6, Some(vec![auth1(), auth2()])), ]; let hash0 = insert_final_block(&db, same_authorities(), || default_header(&Default::default(), 0)); run_checks(&db, 0, &checks); let hash1 = insert_final_block(&db, same_authorities(), || default_header(&hash0, 1)); run_checks(&db, 1, &checks); let hash2 = insert_final_block(&db, make_authorities(vec![auth1()]), || default_header(&hash1, 2)); run_checks(&db, 2, &checks); let hash3 = insert_final_block(&db, make_authorities(vec![auth1()]), || default_header(&hash2, 3)); run_checks(&db, 3, &checks); let hash4 = insert_final_block(&db, make_authorities(vec![auth1(), auth2()]), || default_header(&hash3, 4)); run_checks(&db, 4, &checks); let hash5 = insert_final_block(&db, make_authorities(vec![auth1(), auth2()]), || default_header(&hash4, 5)); run_checks(&db, 5, &checks); let hash6 = insert_final_block(&db, same_authorities(), || default_header(&hash5, 6)); run_checks(&db, 6, &checks); (hash2, hash6) }; { // some older non-best blocks are inserted // ... -> B2(1) -> B2_1(1) -> B2_2(2) // => the cache ignores all writes before best finalized block let hash2_1 = insert_non_best_block(&db, make_authorities(vec![auth1()]), || default_header(&hash2, 3)); assert_eq!(None, get_authorities(db.cache(), BlockId::Hash(hash2_1))); let hash2_2 = insert_non_best_block(&db, make_authorities(vec![auth1(), auth2()]), || default_header(&hash2_1, 4)); assert_eq!(None, get_authorities(db.cache(), BlockId::Hash(hash2_2))); } let (hash7, hash8, hash6_1, hash6_2, hash6_1_1, hash6_1_2) = { // inserting non-finalized blocks // B6(None) -> B7(3) -> B8(3) // \> B6_1(4) -> B6_2(4) // \> B6_1_1(5) // \> B6_1_2(6) -> B6_1_3(7) let hash7 = insert_block(&db, make_authorities(vec![auth3()]), || default_header(&hash6, 7)); assert_eq!( get_authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]), ); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash7)), Some(vec![auth3()])); let hash8 = insert_block(&db, make_authorities(vec![auth3()]), || default_header(&hash7, 8)); assert_eq!( get_authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]), ); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash7)), Some(vec![auth3()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash8)), Some(vec![auth3()])); let hash6_1 = insert_block(&db, make_authorities(vec![auth4()]), || default_header(&hash6, 7)); assert_eq!( get_authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]), ); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash7)), Some(vec![auth3()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash8)), Some(vec![auth3()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1)), Some(vec![auth4()])); let hash6_1_1 = insert_non_best_block(&db, make_authorities(vec![auth5()]), || default_header(&hash6_1, 8)); assert_eq!( get_authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]), ); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash7)), Some(vec![auth3()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash8)), Some(vec![auth3()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1)), Some(vec![auth4()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1_1)), Some(vec![auth5()])); let hash6_1_2 = insert_non_best_block(&db, make_authorities(vec![auth6()]), || default_header(&hash6_1, 8)); assert_eq!( get_authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]), ); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash7)), Some(vec![auth3()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash8)), Some(vec![auth3()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1)), Some(vec![auth4()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1_1)), Some(vec![auth5()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1_2)), Some(vec![auth6()])); let hash6_2 = insert_block(&db, make_authorities(vec![auth4()]), || default_header(&hash6_1, 8)); assert_eq!( get_authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]), ); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash7)), Some(vec![auth3()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash8)), Some(vec![auth3()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1)), Some(vec![auth4()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1_1)), Some(vec![auth5()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1_2)), Some(vec![auth6()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_2)), Some(vec![auth4()])); (hash7, hash8, hash6_1, hash6_2, hash6_1_1, hash6_1_2) }; { // finalize block hash6_1 db.finalize_header(BlockId::Hash(hash6_1)).unwrap(); assert_eq!( get_authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]), ); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash7)), None); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash8)), None); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1)), Some(vec![auth4()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1_1)), Some(vec![auth5()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1_2)), Some(vec![auth6()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_2)), Some(vec![auth4()])); // finalize block hash6_2 db.finalize_header(BlockId::Hash(hash6_2)).unwrap(); assert_eq!( get_authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]), ); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash7)), None); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash8)), None); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1)), Some(vec![auth4()])); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1_1)), None); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_1_2)), None); assert_eq!(get_authorities(db.cache(), BlockId::Hash(hash6_2)), Some(vec![auth4()])); } } #[test] fn database_is_reopened() { let db = LightStorage::new_test(); let hash0 = insert_final_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); assert_eq!(db.info().unwrap().best_hash, hash0); assert_eq!(db.header(BlockId::Hash(hash0)).unwrap().unwrap().hash(), hash0); let db = db.db; let db = LightStorage::from_kvdb(db).unwrap(); assert_eq!(db.info().unwrap().best_hash, hash0); assert_eq!(db.header(BlockId::Hash::<Block>(hash0)).unwrap().unwrap().hash(), hash0); } #[test] fn aux_store_works() { let db = LightStorage::<Block>::new_test(); // insert aux1 + aux2 using direct store access db.insert_aux(&[(&[1][..], &[101][..]), (&[2][..], &[102][..])], ::std::iter::empty()).unwrap(); // check aux values assert_eq!(db.get_aux(&[1]).unwrap(), Some(vec![101])); assert_eq!(db.get_aux(&[2]).unwrap(), Some(vec![102])); assert_eq!(db.get_aux(&[3]).unwrap(), None); // delete aux1 + insert aux3 using import operation db.import_header(default_header(&Default::default(), 0), HashMap::new(), NewBlockState::Best, vec![ (vec![3], Some(vec![103])), (vec![1], None), ]).unwrap(); // check aux values assert_eq!(db.get_aux(&[1]).unwrap(), None); assert_eq!(db.get_aux(&[2]).unwrap(), Some(vec![102])); assert_eq!(db.get_aux(&[3]).unwrap(), Some(vec![103])); } #[test] fn test_leaves_pruned_on_finality() { let db = LightStorage::<Block>::new_test(); let block0 = insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); let block1_a = insert_block(&db, HashMap::new(), || default_header(&block0, 1)); let block1_b = insert_block(&db, HashMap::new(), || header_with_extrinsics_root(&block0, 1, [1; 32].into())); let block1_c = insert_block(&db, HashMap::new(), || header_with_extrinsics_root(&block0, 1, [2; 32].into())); assert_eq!(db.leaves.read().hashes(), vec![block1_a, block1_b, block1_c]); let block2_a = insert_block(&db, HashMap::new(), || default_header(&block1_a, 2)); let block2_b = insert_block(&db, HashMap::new(), || header_with_extrinsics_root(&block1_b, 2, [1; 32].into())); let block2_c = insert_block(&db, HashMap::new(), || header_with_extrinsics_root(&block1_b, 2, [2; 32].into())); assert_eq!(db.leaves.read().hashes(), vec![block2_a, block2_b, block2_c, block1_c]); db.finalize_header(BlockId::hash(block1_a)).unwrap(); db.finalize_header(BlockId::hash(block2_a)).unwrap(); // leaves at same height stay. Leaves at lower heights pruned. assert_eq!(db.leaves.read().hashes(), vec![block2_a, block2_b, block2_c]); } }
36.158046
172
0.671064
cca0e01460c86c04f6653e3443754b8c8c1f0552
1,474
#![no_std] #![no_main] use contract::{ contract_api::{account, runtime, system}, unwrap_or_revert::UnwrapOrRevert, }; use types::{ApiError, URef, U512}; #[repr(u16)] enum Error { TransferFromSourceToPayment = 0, TransferFromPaymentToSource, GetBalance, CheckBalance, } #[no_mangle] pub extern "C" fn call() { let pos_pointer = system::get_proof_of_stake(); let source_purse = account::get_main_purse(); let payment_amount: U512 = 100.into(); // amount passed to payment contract let payment_fund: U512 = runtime::get_arg(0) .unwrap_or_revert_with(ApiError::MissingArgument) .unwrap_or_revert_with(ApiError::InvalidArgument); let payment_purse: URef = runtime::call_contract(pos_pointer, ("get_payment_purse",)); // can deposit system::transfer_from_purse_to_purse(source_purse, payment_purse, payment_amount) .unwrap_or_revert_with(ApiError::User(Error::TransferFromSourceToPayment as u16)); let payment_balance = system::get_balance(payment_purse) .unwrap_or_revert_with(ApiError::User(Error::GetBalance as u16)); if payment_balance.saturating_sub(payment_fund) != payment_amount { runtime::revert(ApiError::User(Error::CheckBalance as u16)) } // cannot withdraw if system::transfer_from_purse_to_purse(payment_purse, source_purse, payment_amount).is_ok() { runtime::revert(ApiError::User(Error::TransferFromPaymentToSource as u16)); } }
32.755556
98
0.721167
56f61911b85f64f3393a8088b47bac7f8ff34645
17,228
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 //! Protocol used to ensure peer liveness //! //! The HealthChecker is responsible for ensuring liveness of all peers of a node. //! It does so by periodically selecting a random connected peer and sending a Ping probe. A //! healthy peer is expected to respond with a corresponding Pong message. //! //! If a certain number of successive liveness probes for a peer fail, the HealthChecker initiates a //! disconnect from the peer. It relies on ConnectivityManager or the remote peer to re-establish //! the connection. //! //! Future Work //! ----------- //! We can make a few other improvements to the health checker. These are: //! - Make the policy for interpreting ping failures pluggable //! - Use successful inbound pings as a sign of remote note being healthy //! - Ping a peer only in periods of no application-level communication with the peer use crate::{ constants::NETWORK_CHANNEL_SIZE, counters, error::NetworkError, logging::NetworkSchema, peer_manager::{ConnectionRequestSender, PeerManagerRequestSender}, protocols::{ network::{Event, NetworkEvents, NetworkSender, NewNetworkSender}, rpc::error::RpcError, }, ProtocolId, }; use bytes::Bytes; use channel::message_queues::QueueStyle; use futures::{ channel::oneshot, stream::{FusedStream, FuturesUnordered, Stream, StreamExt}, }; use libra_config::network_id::NetworkContext; use libra_logger::prelude::*; use libra_metrics::IntCounterVec; use libra_types::PeerId; use rand::{rngs::SmallRng, seq::SliceRandom, Rng, SeedableRng}; use serde::{Deserialize, Serialize}; use std::{collections::HashMap, sync::Arc, time::Duration}; pub mod builder; #[cfg(test)] mod test; /// The interface from Network to HealthChecker layer. /// /// `HealthCheckerNetworkEvents` is a `Stream` of `PeerManagerNotification` where the /// raw `Bytes` rpc messages are deserialized into /// `HealthCheckerMsg` types. `HealthCheckerNetworkEvents` is a thin wrapper /// around an `channel::Receiver<PeerManagerNotification>`. pub type HealthCheckerNetworkEvents = NetworkEvents<HealthCheckerMsg>; /// The interface from HealthChecker to Networking layer. /// /// This is a thin wrapper around a `NetworkSender<HealthCheckerMsg>`, so it is /// easy to clone and send off to a separate task. For example, the rpc requests /// return Futures that encapsulate the whole flow, from sending the request to /// remote, to finally receiving the response and deserializing. It therefore /// makes the most sense to make the rpc call on a separate async task, which /// requires the `HealthCheckerNetworkSender` to be `Clone` and `Send`. #[derive(Clone)] pub struct HealthCheckerNetworkSender { inner: NetworkSender<HealthCheckerMsg>, } /// Configuration for the network endpoints to support HealthChecker. pub fn network_endpoint_config() -> ( Vec<ProtocolId>, Vec<ProtocolId>, QueueStyle, usize, Option<&'static IntCounterVec>, ) { ( vec![ProtocolId::HealthCheckerRpc], vec![], QueueStyle::LIFO, NETWORK_CHANNEL_SIZE, Some(&counters::PENDING_HEALTH_CHECKER_NETWORK_EVENTS), ) } impl NewNetworkSender for HealthCheckerNetworkSender { fn new( peer_mgr_reqs_tx: PeerManagerRequestSender, connection_reqs_tx: ConnectionRequestSender, ) -> Self { Self { inner: NetworkSender::new(peer_mgr_reqs_tx, connection_reqs_tx), } } } impl HealthCheckerNetworkSender { /// Send a HealthChecker Ping RPC request to remote peer `recipient`. Returns /// the remote peer's future `Pong` reply. /// /// The rpc request can be canceled at any point by dropping the returned /// future. pub async fn send_rpc( &mut self, recipient: PeerId, req_msg: HealthCheckerMsg, timeout: Duration, ) -> Result<HealthCheckerMsg, RpcError> { let protocol = ProtocolId::HealthCheckerRpc; self.inner .send_rpc(recipient, protocol, req_msg, timeout) .await } pub async fn disconnect_peer(&mut self, peer_id: PeerId) -> Result<(), NetworkError> { self.inner.disconnect_peer(peer_id).await } } #[derive(Clone, Debug, Deserialize, Serialize)] pub enum HealthCheckerMsg { Ping(Ping), Pong(Pong), } #[derive(Clone, Debug, Deserialize, Serialize)] pub struct Ping(u32); #[derive(Clone, Debug, Deserialize, Serialize)] pub struct Pong(u32); /// The actor performing health checks by running the Ping protocol pub struct HealthChecker<TTicker> { network_context: Arc<NetworkContext>, /// Ticker to trigger ping to a random peer. In production, the ticker is likely to be /// fixed duration interval timer. ticker: TTicker, /// Channel to send requests to Network layer. network_tx: HealthCheckerNetworkSender, /// Channel to receive notifications from Network layer about new/lost connections. network_rx: HealthCheckerNetworkEvents, /// Map from connected peer to last round of successful ping, and number of failures since /// then. connected: HashMap<PeerId, (u64, u64)>, /// Random-number generator. rng: SmallRng, /// Ping timmeout duration. ping_timeout: Duration, /// Number of successive ping failures we tolerate before declaring a node as unhealthy and /// disconnecting from it. In the future, this can be replaced with a more general failure /// detection policy. ping_failures_tolerated: u64, /// Counter incremented in each round of health checks round: u64, } impl<TTicker> HealthChecker<TTicker> where TTicker: Stream + FusedStream + Unpin, { /// Create new instance of the [`HealthChecker`] actor. pub fn new( network_context: Arc<NetworkContext>, ticker: TTicker, network_tx: HealthCheckerNetworkSender, network_rx: HealthCheckerNetworkEvents, ping_timeout: Duration, ping_failures_tolerated: u64, ) -> Self { HealthChecker { network_context, ticker, network_tx, network_rx, connected: HashMap::new(), rng: SmallRng::from_entropy(), ping_timeout, ping_failures_tolerated, round: 0, } } pub async fn start(mut self) { let mut tick_handlers = FuturesUnordered::new(); loop { futures::select! { event = self.network_rx.select_next_some() => { match event { Ok(Event::NewPeer(peer_id, _origin)) => { self.connected.insert(peer_id, (self.round, 0)); }, Ok(Event::LostPeer(peer_id, _origin)) => { self.connected.remove(&peer_id); }, Ok(Event::RpcRequest((peer_id, msg, res_tx))) => { match msg { HealthCheckerMsg::Ping(ping) => self.handle_ping_request(peer_id, ping, res_tx), _ => { error!( SecurityEvent::InvalidHealthCheckerMsg, NetworkSchema::new(&self.network_context) .remote_peer(&peer_id), rpc_message = msg, "{} Unexpected RPC message from {}", self.network_context, peer_id ); }, }; } Ok(Event::Message(msg)) => { error!( SecurityEvent::InvalidNetworkEventHC, NetworkSchema::new(&self.network_context), "{} Unexpected network event: {:?}", self.network_context, msg ); debug_assert!(false, "Unexpected network event"); }, Err(err) => { error!( SecurityEvent::InvalidNetworkEventHC, NetworkSchema::new(&self.network_context) .debug_error(&err), "{} Unexpected network error: {}", self.network_context, err ); debug_assert!(false, "Unexpected network error"); } } } _ = self.ticker.select_next_some() => { self.round += 1; match self.sample_random_peer() { Some(peer_id) => { let nonce = self.sample_nonce(); debug!( NetworkSchema::new(&self.network_context), round = self.round, "{} Will ping: {} for round: {} nonce: {}", self.network_context, peer_id.short_str(), self.round, nonce ); tick_handlers.push( Self::ping_peer( self.network_context.clone(), self.network_tx.clone(), peer_id, self.round, nonce, self.ping_timeout.clone())); } None => { debug!( NetworkSchema::new(&self.network_context), round = self.round, "{} No connected peer to ping round: {}", self.network_context, self.round ); } } } res = tick_handlers.select_next_some() => { let (peer_id, round, nonce, ping_result) = res; self.handle_ping_response(peer_id, round, nonce, ping_result).await; } complete => { break; } } } error!( NetworkSchema::new(&self.network_context), "{} Health checker actor terminated", self.network_context ); } fn handle_ping_request( &mut self, peer_id: PeerId, ping: Ping, res_tx: oneshot::Sender<Result<Bytes, RpcError>>, ) { let message = match lcs::to_bytes(&HealthCheckerMsg::Pong(Pong(ping.0))) { Ok(msg) => msg, Err(e) => { warn!( NetworkSchema::new(&self.network_context).debug_error(&e), "{} Unable to serialize pong response: {}", self.network_context, e ); return; } }; debug!( NetworkSchema::new(&self.network_context).remote_peer(&peer_id), "{} Sending Pong response to peer: {} with nonce: {}", self.network_context, peer_id.short_str(), ping.0, ); let _ = res_tx.send(Ok(message.into())); } async fn handle_ping_response( &mut self, peer_id: PeerId, round: u64, req_nonce: u32, ping_result: Result<Pong, RpcError>, ) { match ping_result { Ok(pong) => { if pong.0 == req_nonce { debug!( NetworkSchema::new(&self.network_context).remote_peer(&peer_id), rount = round, "{} Ping successful for peer: {} round: {}", self.network_context, peer_id.short_str(), round ); // Update last successful ping to current round. self.connected .entry(peer_id) .and_modify(|(ref mut r, ref mut count)| { if round > *r { *r = round; *count = 0; } }); } else { error!( SecurityEvent::InvalidHealthCheckerMsg, NetworkSchema::new(&self.network_context).remote_peer(&peer_id), "{} Pong nonce doesn't match Ping nonce. Round: {}, Pong: {}, Ping: {}", self.network_context, round, pong.0, req_nonce ); debug_assert!(false, "Pong nonce doesn't match our challenge Ping nonce"); } } Err(err) => { warn!( NetworkSchema::new(&self.network_context) .remote_peer(&peer_id) .debug_error(&err), round = round, "{} Ping failed for peer: {} round: {} with error: {:?}", self.network_context, peer_id.short_str(), round, err ); match self.connected.get_mut(&peer_id) { None => { // If we are no longer connected to the peer, we ignore ping // failure. } Some((ref mut prev, ref mut failures)) => { // If this is the result of an older ping, we ignore it. if *prev > round { return; } // Increment num of failures. If the ping failures are now more than // `self.ping_failures_tolerated`, we disconnect from the node. // The HealthChecker only performs the disconnect. It relies on // ConnectivityManager or the remote peer to re-establish the connection. *failures += 1; if *failures > self.ping_failures_tolerated { info!( NetworkSchema::new(&self.network_context).remote_peer(&peer_id), "{} Disconnecting from peer: {}", self.network_context, peer_id.short_str() ); if let Err(err) = self.network_tx.disconnect_peer(peer_id).await { warn!( NetworkSchema::new(&self.network_context) .remote_peer(&peer_id) .debug_error(&err), "{} Failed to disconnect from peer: {} with error: {:?}", self.network_context, peer_id.short_str(), err ); } } } } } } } async fn ping_peer( network_context: Arc<NetworkContext>, mut network_tx: HealthCheckerNetworkSender, peer_id: PeerId, round: u64, nonce: u32, ping_timeout: Duration, ) -> (PeerId, u64, u32, Result<Pong, RpcError>) { debug!( NetworkSchema::new(&network_context).remote_peer(&peer_id), round = round, "{} Sending Ping request to peer: {} for round: {} nonce: {}", network_context, peer_id.short_str(), round, nonce ); let res_pong_msg = network_tx .send_rpc(peer_id, HealthCheckerMsg::Ping(Ping(nonce)), ping_timeout) .await .and_then(|msg| match msg { HealthCheckerMsg::Pong(res) => Ok(res), _ => Err(RpcError::InvalidRpcResponse), }); (peer_id, round, nonce, res_pong_msg) } fn sample_random_peer(&mut self) -> Option<PeerId> { let peers: Vec<_> = self.connected.keys().cloned().collect(); peers.choose(&mut self.rng).cloned() } fn sample_nonce(&mut self) -> u32 { self.rng.gen::<u32>() } }
39.154545
108
0.496749
1db14cc00db2f3560370d22cded8a7a8a36a3115
4,153
use std::collections::HashSet; use std::io::{BufReader, BufWriter}; use anyhow::Result; use fs_err::File; use geo::prelude::Contains; use geo::{LineString, Point, Polygon}; use osmio::obj_types::ArcOSMObj; use osmio::{Node, OSMObj, OSMObjBase, OSMObjectType, OSMReader, OSMWriter, Relation, Way}; use geom::LonLat; pub fn run(pbf_path: String, clip_path: String, out_path: String) -> Result<()> { let boundary_pts = LonLat::read_osmosis_polygon(&clip_path)?; let raw_pts: Vec<(f64, f64)> = boundary_pts .into_iter() .map(|pt| (pt.x(), pt.y())) .collect(); let boundary = Polygon::new(LineString::from(raw_pts), Vec::new()); clip(&pbf_path, &boundary, &out_path) } fn clip(pbf_path: &str, boundary: &Polygon<f64>, out_path: &str) -> Result<()> { // TODO Maybe just have a single map with RcOSMObj. But then the order we write will be wrong. let mut way_node_ids: HashSet<i64> = HashSet::new(); let mut way_ids: HashSet<i64> = HashSet::new(); let mut relation_ids: HashSet<i64> = HashSet::new(); { // First Pass: accumulate the IDs we want to include in the output let mut reader = osmio::pbf::PBFReader::new(BufReader::new(File::open(pbf_path)?)); let mut node_ids_within_boundary: HashSet<i64> = HashSet::new(); for obj in reader.objects() { match obj.object_type() { OSMObjectType::Node => { let node = obj.into_node().unwrap(); if let Some(lat_lon) = node.lat_lon() { if boundary.contains(&to_pt(lat_lon)) { node_ids_within_boundary.insert(node.id()); } } } OSMObjectType::Way => { // Assume all nodes appear before any way. let way = obj.into_way().unwrap(); if way .nodes() .iter() .any(|id| node_ids_within_boundary.contains(id)) { way_ids.insert(way.id()); // To properly compute border nodes, we include all nodes of ways that are // at least partially in the boundary. way_node_ids.extend(way.nodes().iter().cloned()); } } OSMObjectType::Relation => { let relation = obj.into_relation().unwrap(); if relation.members().any(|(obj_type, id, _)| { (obj_type == OSMObjectType::Node && node_ids_within_boundary.contains(&id)) || (obj_type == OSMObjectType::Way && way_ids.contains(&id)) || (obj_type == OSMObjectType::Relation && relation_ids.contains(&id)) }) { relation_ids.insert(relation.id()); } } } } } let mut writer = osmio::xml::XMLWriter::new(BufWriter::new(File::create(out_path)?)); // Second Pass: write the feature for each ID accumulated in the first pass let mut reader = osmio::pbf::PBFReader::new(BufReader::new(File::open(pbf_path)?)); for obj in reader.objects() { match &obj { ArcOSMObj::Node(node) => { if way_node_ids.contains(&node.id()) { writer.write_obj(&obj)?; } } ArcOSMObj::Way(way) => { if way_ids.contains(&way.id()) { writer.write_obj(&obj)?; } } ArcOSMObj::Relation(relation) => { if relation_ids.contains(&relation.id()) { writer.write_obj(&obj)?; } } } } // Don't call write.close() -- it happens when writer gets dropped, and the implementation // isn't idempotent. Ok(()) } fn to_pt(pair: (osmio::Lat, osmio::Lon)) -> Point<f64> { // Note our polygon uses (lon, lat) (pair.1.into(), pair.0.into()).into() }
39.932692
99
0.516013
1edb208e7b9c5c439570c085055001925a4aa7c2
1,035
//! Sample capsule for Tock course at SOSP. It handles an alarm to //! sample the ambient light sensor. #![feature(const_fn,const_cell_new)] #![no_std] #[allow(unused_imports)] #[macro_use(debug)] extern crate kernel; use kernel::hil::time::{self, Alarm, Frequency}; use kernel::hil::sensors::{AmbientLight, AmbientLightClient}; pub struct Sosp<'a, A: Alarm + 'a> { alarm: &'a A, light: &'a AmbientLight, } impl<'a, A: Alarm> Sosp<'a, A> { pub fn new(alarm: &'a A, light: &'a AmbientLight) -> Sosp<'a, A> { Sosp { alarm: alarm, light: light, } } pub fn start(&self) { self.alarm.set_alarm( self.alarm.now().wrapping_add(<A::Frequency>::frequency())); } } impl<'a, A: Alarm> time::Client for Sosp<'a, A> { fn fired(&self) { self.light.read_light_intensity(); } } impl<'a, A: Alarm> AmbientLightClient for Sosp<'a, A> { fn callback(&self, lux: usize) { debug!("Light reading: {}", lux); self.start(); } }
22.5
72
0.584541
8f35a5becfd4de5f61361cdea1d4d1921487d6b3
83
jasarsoft.TestDugmeBrojac2 jasarsoft.DugmeBrojacOkvir jasarsoft.DugmeBrojacOkvir$1
20.75
28
0.915663
1da50171525fcdd88d7f352aa559f9b66ad7f500
71,090
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use std::path::Path; use crate::ast::{Ast, AstBuilder, AstKey, ObjectEntry, Primitive, RequestParameters}; use crate::constants::CODEGEN_CONSTANTS; use common::{NamedItem, WithLocation}; use graphql_ir::{ Argument, Condition, ConditionValue, ConstantValue, Directive, FragmentDefinition, FragmentSpread, InlineFragment, LinkedField, OperationDefinition, ScalarField, Selection, Value, VariableDefinition, }; use graphql_syntax::OperationKind; use interner::{Intern, StringKey}; use md5::{Digest, Md5}; use relay_transforms::{ extract_connection_metadata_from_directive, extract_handle_field_directives, extract_refetch_metadata_from_directive, extract_values_from_handle_field_directive, extract_variable_name, generate_abstract_type_refinement_key, remove_directive, ConnectionConstants, ConnectionMetadata, DeferDirective, RelayDirective, StreamDirective, ACTION_ARGUMENT, CLIENT_EXTENSION_DIRECTIVE_NAME, DEFER_STREAM_CONSTANTS, DIRECTIVE_SPLIT_OPERATION, INLINE_DATA_CONSTANTS, INTERNAL_METADATA_DIRECTIVE, MATCH_CONSTANTS, NO_INLINE_DIRECTIVE_NAME, PATH_METADATA_ARGUMENT, REACT_FLIGHT_SCALAR_FLIGHT_FIELD_METADATA_KEY, RELAY_ACTOR_CHANGE_DIRECTIVE_FOR_CODEGEN, RELAY_CLIENT_COMPONENT_MODULE_ID_ARGUMENT_NAME, RELAY_CLIENT_COMPONENT_SERVER_DIRECTIVE_NAME, RELAY_RESOLVER_IMPORT_PATH_ARGUMENT_NAME, RELAY_RESOLVER_METADATA_DIRECTIVE_NAME, RELAY_RESOLVER_METADATA_FIELD_ALIAS, RELAY_RESOLVER_METADATA_FIELD_NAME, REQUIRED_METADATA_KEY, TYPE_DISCRIMINATOR_DIRECTIVE_NAME, }; use schema::{SDLSchema, Schema}; pub fn build_request_params_ast_key( schema: &SDLSchema, request_parameters: RequestParameters, ast_builder: &mut AstBuilder, operation: &OperationDefinition, ) -> AstKey { let mut operation_builder = CodegenBuilder::new(schema, CodegenVariant::Normalization, ast_builder); operation_builder.build_request_parameters(operation, request_parameters) } pub fn build_request( schema: &SDLSchema, ast_builder: &mut AstBuilder, operation: &OperationDefinition, fragment: &FragmentDefinition, request_parameters: AstKey, ) -> AstKey { let mut operation_builder = CodegenBuilder::new(schema, CodegenVariant::Normalization, ast_builder); let operation = Primitive::Key(operation_builder.build_operation(operation)); let mut fragment_builder = CodegenBuilder::new(schema, CodegenVariant::Reader, ast_builder); let fragment = Primitive::Key(fragment_builder.build_fragment(fragment, true)); ast_builder.intern(Ast::Object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.fragment, value: fragment, }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.request), }, ObjectEntry { key: CODEGEN_CONSTANTS.operation, value: operation, }, ObjectEntry { key: CODEGEN_CONSTANTS.params, value: Primitive::Key(request_parameters), }, ])) } pub fn build_request_params(operation: &OperationDefinition) -> RequestParameters { RequestParameters { name: operation.name.item, operation_kind: operation.kind, metadata: Default::default(), id: None, text: None, } } pub fn build_operation( schema: &SDLSchema, ast_builder: &mut AstBuilder, operation: &OperationDefinition, ) -> AstKey { let mut builder = CodegenBuilder::new(schema, CodegenVariant::Normalization, ast_builder); builder.build_operation(operation) } pub fn build_fragment( schema: &SDLSchema, ast_builder: &mut AstBuilder, fragment: &FragmentDefinition, ) -> AstKey { let mut builder = CodegenBuilder::new(schema, CodegenVariant::Reader, ast_builder); builder.build_fragment(fragment, false) } struct CodegenBuilder<'schema, 'builder> { connection_constants: ConnectionConstants, schema: &'schema SDLSchema, variant: CodegenVariant, ast_builder: &'builder mut AstBuilder, } #[derive(PartialEq)] enum CodegenVariant { Reader, Normalization, } impl<'schema, 'builder> CodegenBuilder<'schema, 'builder> { fn new( schema: &'schema SDLSchema, variant: CodegenVariant, ast_builder: &'builder mut AstBuilder, ) -> Self { Self { connection_constants: Default::default(), schema, variant, ast_builder, } } fn object(&mut self, object: Vec<ObjectEntry>) -> AstKey { self.ast_builder.intern(Ast::Object(object)) } fn array(&mut self, array: Vec<Primitive>) -> AstKey { self.ast_builder.intern(Ast::Array(array)) } fn build_operation(&mut self, operation: &OperationDefinition) -> AstKey { match operation.directives.named(*DIRECTIVE_SPLIT_OPERATION) { Some(_split_directive) => { let metadata = Primitive::Key(self.object(vec![])); let selections = self.build_selections(operation.selections.iter()); let mut fields = vec![ ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.split_operation), }, ObjectEntry { key: CODEGEN_CONSTANTS.metadata, value: metadata, }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(operation.name.item), }, ObjectEntry { key: CODEGEN_CONSTANTS.selections, value: selections, }, ]; if !operation.variable_definitions.is_empty() { let argument_definitions = self.build_operation_variable_definitions(&operation.variable_definitions); fields.insert( 0, ObjectEntry { key: CODEGEN_CONSTANTS.argument_definitions, value: argument_definitions, }, ); } self.object(fields) } None => { let argument_definitions = self.build_operation_variable_definitions(&operation.variable_definitions); let selections = self.build_selections(operation.selections.iter()); self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.argument_definitions, value: argument_definitions, }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.operation_value), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(operation.name.item), }, ObjectEntry { key: CODEGEN_CONSTANTS.selections, value: selections, }, ]) } } } fn build_fragment(&mut self, fragment: &FragmentDefinition, skip_metadata: bool) -> AstKey { if fragment .directives .named(INLINE_DATA_CONSTANTS.directive_name) .is_some() { return self.build_inline_data_fragment(fragment); } let object = vec![ ObjectEntry { key: CODEGEN_CONSTANTS.argument_definitions, value: self.build_fragment_variable_definitions( &fragment.variable_definitions, &fragment.used_global_variables, ), }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.fragment_value), }, ObjectEntry { key: CODEGEN_CONSTANTS.metadata, value: if skip_metadata { Primitive::Null } else { self.build_fragment_metadata(fragment) }, }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(fragment.name.item), }, ObjectEntry { key: CODEGEN_CONSTANTS.selections, value: self.build_selections(fragment.selections.iter()), }, ObjectEntry { key: CODEGEN_CONSTANTS.type_, value: Primitive::String(self.schema.get_type_name(fragment.type_condition)), }, ObjectEntry { key: CODEGEN_CONSTANTS.abstract_key, value: if fragment.type_condition.is_abstract_type() { Primitive::String(generate_abstract_type_refinement_key( self.schema, fragment.type_condition, )) } else { Primitive::Null }, }, ]; self.object(object) } fn build_fragment_metadata(&mut self, fragment: &FragmentDefinition) -> Primitive { let connection_metadata = extract_connection_metadata_from_directive(&fragment.directives); let mut plural = false; let mut unmask = false; if let Some(relay_directive) = RelayDirective::find(&fragment.directives) { plural = relay_directive.plural; unmask = relay_directive.unmask; }; let mut metadata = vec![]; if let Some(connection_metadata) = &connection_metadata { metadata.push(self.build_connection_metadata(connection_metadata)) } if unmask { metadata.push(ObjectEntry { key: CODEGEN_CONSTANTS.mask, value: Primitive::Bool(false), }) } if plural { metadata.push(ObjectEntry { key: CODEGEN_CONSTANTS.plural, value: Primitive::Bool(true), }) } if let Some(refetch_metadata) = extract_refetch_metadata_from_directive(&fragment.directives) { let refetch_connection = if let Some(connection_metadata) = connection_metadata { let metadata = &connection_metadata[0]; // Validated in `transform_refetchable` let connection_object = vec![ ObjectEntry { key: CODEGEN_CONSTANTS.forward, value: if let Some(first) = metadata.first { Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.count, value: Primitive::String(first), }, ObjectEntry { key: CODEGEN_CONSTANTS.cursor, value: Primitive::string_or_null(metadata.after), }, ])) } else { Primitive::Null }, }, ObjectEntry { key: CODEGEN_CONSTANTS.backward, value: if let Some(last) = metadata.last { Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.count, value: Primitive::String(last), }, ObjectEntry { key: CODEGEN_CONSTANTS.cursor, value: Primitive::string_or_null(metadata.before), }, ])) } else { Primitive::Null }, }, ObjectEntry { key: CODEGEN_CONSTANTS.path, value: Primitive::Key( self.array( metadata .path .as_ref() .expect("Expected path to exist") .iter() .cloned() .map(Primitive::String) .collect(), ), ), }, ]; Primitive::Key(self.object(connection_object)) } else { Primitive::Null }; let mut refetch_object = vec![ ObjectEntry { key: CODEGEN_CONSTANTS.connection, value: refetch_connection, }, ObjectEntry { key: CODEGEN_CONSTANTS.fragment_path_in_result, value: Primitive::Key( self.array( refetch_metadata .path .into_iter() .map(Primitive::String) .collect(), ), ), }, ObjectEntry { key: CODEGEN_CONSTANTS.operation, value: Primitive::GraphQLModuleDependency(refetch_metadata.operation_name), }, ]; if let Some(identifier_field) = refetch_metadata.identifier_field { refetch_object.push(ObjectEntry { key: CODEGEN_CONSTANTS.identifier_field, value: Primitive::String(identifier_field), }); } metadata.push(ObjectEntry { key: CODEGEN_CONSTANTS.refetch, value: Primitive::Key(self.object(refetch_object)), }) } if metadata.is_empty() { Primitive::Null } else { Primitive::Key(self.object(metadata)) } } fn build_connection_metadata( &mut self, connection_metadata: &[ConnectionMetadata], ) -> ObjectEntry { let array = connection_metadata .iter() .map(|metadata| { let path = match &metadata.path { None => Primitive::Null, Some(path) => Primitive::Key( self.array(path.iter().cloned().map(Primitive::String).collect()), ), }; let (count, cursor) = if metadata.direction == self.connection_constants.direction_forward { (metadata.first, metadata.after) } else if metadata.direction == self.connection_constants.direction_backward { (metadata.last, metadata.before) } else { (None, None) }; let mut object = vec![ ObjectEntry { key: CODEGEN_CONSTANTS.count, value: Primitive::string_or_null(count), }, ObjectEntry { key: CODEGEN_CONSTANTS.cursor, value: Primitive::string_or_null(cursor), }, ObjectEntry { key: CODEGEN_CONSTANTS.direction, value: Primitive::String(metadata.direction), }, ObjectEntry { key: CODEGEN_CONSTANTS.path, value: path, }, ]; if metadata.is_stream_connection { object.push(ObjectEntry { key: DEFER_STREAM_CONSTANTS.stream_name, value: Primitive::Bool(true), }) } Primitive::Key(self.object(object)) }) .collect::<Vec<_>>(); ObjectEntry { key: CODEGEN_CONSTANTS.connection, value: Primitive::Key(self.array(array)), } } fn build_inline_data_fragment(&mut self, fragment: &FragmentDefinition) -> AstKey { let object = vec![ ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.inline_data_fragment), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(fragment.name.item), }, ]; self.object(object) } fn build_selections<'a, Selections>(&mut self, selections: Selections) -> Primitive where Selections: Iterator<Item = &'a Selection>, { let selections = selections .flat_map(|selection| self.build_selections_from_selection(selection)) .collect(); Primitive::Key(self.array(selections)) } fn build_selections_from_selection(&mut self, selection: &Selection) -> Vec<Primitive> { match selection { Selection::Condition(condition) => vec![self.build_condition(&condition)], Selection::FragmentSpread(frag_spread) => { vec![self.build_fragment_spread(&frag_spread)] } Selection::InlineFragment(inline_fragment) => { let defer = inline_fragment .directives .named(DEFER_STREAM_CONSTANTS.defer_name); if let Some(defer) = defer { vec![self.build_defer(&inline_fragment, defer)] } else if let Some(inline_data_directive) = inline_fragment .directives .named(INLINE_DATA_CONSTANTS.internal_directive_name) { // If inline fragment has @__inline directive (created by inline_data_fragment transform) // we will return selection wrapped with InlineDataFragmentSpread vec![self.build_inline_data_fragment_spread( &inline_fragment, &inline_data_directive, )] } else if let Some(match_directive) = inline_fragment .directives .named(MATCH_CONSTANTS.custom_module_directive_name) { self.build_module_import_selections(match_directive) } else if inline_fragment .directives .named(*RELAY_ACTOR_CHANGE_DIRECTIVE_FOR_CODEGEN) .is_some() { vec![self.build_actor_change(&inline_fragment)] } else { vec![self.build_inline_fragment(&inline_fragment)] } } Selection::LinkedField(field) => { let stream = field.directives.named(DEFER_STREAM_CONSTANTS.stream_name); match stream { Some(stream) => vec![self.build_stream(&field, stream)], None => self.build_linked_field_and_handles(field), } } Selection::ScalarField(field) => { if field.directives.len() == 1 && field.directives[0].name.item == *TYPE_DISCRIMINATOR_DIRECTIVE_NAME { match self.variant { CodegenVariant::Reader => vec![], CodegenVariant::Normalization => vec![self.build_type_discriminator(field)], } } else { self.build_scalar_field_and_handles(field) } } } } fn build_type_discriminator(&mut self, field: &ScalarField) -> Primitive { Primitive::Key(self.object(vec![ ObjectEntry{key:CODEGEN_CONSTANTS.kind,value: Primitive::String(CODEGEN_CONSTANTS.type_discriminator)}, ObjectEntry{key: CODEGEN_CONSTANTS.abstract_key,value: Primitive::String(field.alias.expect( "Expected the type discriminator field to contain the abstract key alias.", ).item), }, ])) } fn build_scalar_field_and_handles(&mut self, field: &ScalarField) -> Vec<Primitive> { match self.variant { CodegenVariant::Reader => vec![self.build_scalar_field(field)], CodegenVariant::Normalization => { let mut result = vec![self.build_scalar_field(field)]; self.build_scalar_handles(&mut result, field); result } } } fn build_required_field( &mut self, required_directive: &Directive, primitive: Primitive, ) -> Primitive { Primitive::Key(self.object(vec![ ObjectEntry{key: CODEGEN_CONSTANTS.kind,value: Primitive::String(CODEGEN_CONSTANTS.required_field), }, ObjectEntry{key:CODEGEN_CONSTANTS.field,value: primitive}, ObjectEntry{key: CODEGEN_CONSTANTS.action,value: Primitive::String( required_directive .arguments .named(*ACTION_ARGUMENT) .unwrap().value.item.get_string_literal().unwrap() ), }, ObjectEntry{key: CODEGEN_CONSTANTS.path,value: Primitive::String( required_directive .arguments .named(*PATH_METADATA_ARGUMENT) .unwrap().value.item.get_string_literal().unwrap() ), }, ])) } fn build_scalar_field(&mut self, field: &ScalarField) -> Primitive { let schema_field = self.schema.field(field.definition.item); let (name, alias) = self.build_field_name_and_alias(schema_field.name, field.alias, &field.directives); let args = self.build_arguments(&field.arguments); let kind = match field .directives .named(*REACT_FLIGHT_SCALAR_FLIGHT_FIELD_METADATA_KEY) { Some(_flight_directive) => Primitive::String(CODEGEN_CONSTANTS.flight_field), None => Primitive::String(CODEGEN_CONSTANTS.scalar_field), }; let primitive = Primitive::Key(self.object(vec![ build_alias(alias, name), ObjectEntry { key: CODEGEN_CONSTANTS.args, value: match args { None => Primitive::Null, Some(key) => Primitive::Key(key), }, }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: kind, }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(name), }, ObjectEntry { key: CODEGEN_CONSTANTS.storage_key, value: match args { None => Primitive::Null, Some(key) => { if is_static_storage_key_available(&field.arguments) { Primitive::StorageKey(name, key) } else { Primitive::Null } } }, }, ])); match field.directives.named(*REQUIRED_METADATA_KEY) { Some(required_directive) => self.build_required_field(required_directive, primitive), None => primitive, } } fn build_scalar_handles(&mut self, result: &mut Vec<Primitive>, field: &ScalarField) { let schema_field = self.schema.field(field.definition.item); let field_name = schema_field.name; let handle_field_directives = extract_handle_field_directives(&field.directives); for directive in handle_field_directives { let values = extract_values_from_handle_field_directive(&directive); let filters = match values.filters { None => Primitive::Null, Some(strs) => { Primitive::Key(self.array(strs.into_iter().map(Primitive::String).collect())) } }; let arguments = match self.build_arguments(&field.arguments) { None => Primitive::Null, Some(key) => Primitive::Key(key), }; let mut object = vec![ build_alias(field.alias.map(|a| a.item), field_name), ObjectEntry { key: CODEGEN_CONSTANTS.args, value: arguments, }, ObjectEntry { key: CODEGEN_CONSTANTS.filters, value: filters, }, ObjectEntry { key: CODEGEN_CONSTANTS.handle, value: Primitive::String(values.handle), }, ObjectEntry { key: CODEGEN_CONSTANTS.key, value: Primitive::String(values.key), }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.scalar_handle), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(field_name), }, ]; if let Some(handle_args) = values.handle_args { let args = self.build_arguments(&handle_args); if let Some(args) = args { object.push(ObjectEntry { key: CODEGEN_CONSTANTS.handle_args, value: Primitive::Key(args), }); } }; result.push(Primitive::Key(self.object(object))); } } fn build_linked_field_and_handles(&mut self, field: &LinkedField) -> Vec<Primitive> { match self.variant { CodegenVariant::Reader => vec![self.build_linked_field(field)], CodegenVariant::Normalization => { let mut result = vec![self.build_linked_field(field)]; self.build_linked_handles(&mut result, field); result } } } fn build_linked_field(&mut self, field: &LinkedField) -> Primitive { let schema_field = self.schema.field(field.definition.item); let (name, alias) = self.build_field_name_and_alias(schema_field.name, field.alias, &field.directives); let args = self.build_arguments(&field.arguments); let selections = self.build_selections(field.selections.iter()); let primitive = Primitive::Key(self.object(vec![ build_alias(alias, name), ObjectEntry { key: CODEGEN_CONSTANTS.args, value: match args { None => Primitive::Null, Some(key) => Primitive::Key(key), }, }, ObjectEntry { key: CODEGEN_CONSTANTS.concrete_type, value: if schema_field.type_.inner().is_abstract_type() { Primitive::Null } else { Primitive::String(self.schema.get_type_name(schema_field.type_.inner())) }, }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.linked_field), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(name), }, ObjectEntry { key: CODEGEN_CONSTANTS.plural, value: Primitive::Bool(schema_field.type_.is_list()), }, ObjectEntry { key: CODEGEN_CONSTANTS.selections, value: selections, }, ObjectEntry { key: CODEGEN_CONSTANTS.storage_key, value: match args { None => Primitive::Null, Some(key) => { if is_static_storage_key_available(&field.arguments) { Primitive::StorageKey(name, key) } else { Primitive::Null } } }, }, ])); match field.directives.named(*REQUIRED_METADATA_KEY) { Some(required_directive) => self.build_required_field(required_directive, primitive), None => primitive, } } fn build_linked_handles(&mut self, result: &mut Vec<Primitive>, field: &LinkedField) { let schema_field = self.schema.field(field.definition.item); let field_name = schema_field.name; let handle_field_directives = extract_handle_field_directives(&field.directives); for directive in handle_field_directives { let values = extract_values_from_handle_field_directive(&directive); let dynamic_key = match &values.dynamic_key { Some(val) => self.build_argument(CODEGEN_CONSTANTS.dynamic_key_argument, val), None => None, }; let filters = match values.filters { None => Primitive::Null, Some(strings) => { Primitive::Key(self.array(strings.into_iter().map(Primitive::String).collect())) } }; let mut object = vec![ build_alias(field.alias.map(|a| a.item), field_name), ObjectEntry { key: CODEGEN_CONSTANTS.args, value: match self.build_arguments(&field.arguments) { None => Primitive::Null, Some(key) => Primitive::Key(key), }, }, ObjectEntry { key: CODEGEN_CONSTANTS.filters, value: filters, }, ObjectEntry { key: CODEGEN_CONSTANTS.handle, value: Primitive::String(values.handle), }, ObjectEntry { key: CODEGEN_CONSTANTS.key, value: Primitive::String(values.key), }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.linked_handle), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(field_name), }, ]; if let Some(dynamic_key) = dynamic_key { object.push(ObjectEntry { key: CODEGEN_CONSTANTS.dynamic_key, value: Primitive::Key(dynamic_key), }); }; if let Some(handle_args) = values.handle_args { let args = self.build_arguments(&handle_args); if let Some(args) = args { object.push(ObjectEntry { key: CODEGEN_CONSTANTS.handle_args, value: Primitive::Key(args), }); } }; result.push(Primitive::Key(self.object(object))) } } fn build_field_name_and_alias( &self, mut name: StringKey, alias: Option<WithLocation<StringKey>>, directives: &[Directive], ) -> (StringKey, Option<StringKey>) { let mut alias = alias.map(|alias| alias.item); if self.variant == CodegenVariant::Reader { let mut handle_field_directives = extract_handle_field_directives(directives); if let Some(handle_field_directive) = handle_field_directives.next() { if let Some(other_handle_field_directive) = handle_field_directives.next() { panic!( "Expected at most one handle directive, got `{:?}` and `{:?}`.", handle_field_directive, other_handle_field_directive ); } let values = extract_values_from_handle_field_directive(&handle_field_directive); alias = alias.or(Some(name)); name = if values.key == CODEGEN_CONSTANTS.default_handle_key { format!("__{}_{}", name, values.handle).intern() } else { format!("__{}_{}", values.key, values.handle).intern() } } } (name, alias) } fn build_fragment_spread(&mut self, frag_spread: &FragmentSpread) -> Primitive { if frag_spread .directives .named(*NO_INLINE_DIRECTIVE_NAME) .is_some() { return self.build_normalization_fragment_spread(frag_spread); } if self.variant == CodegenVariant::Normalization && frag_spread .directives .named(*RELAY_CLIENT_COMPONENT_SERVER_DIRECTIVE_NAME) .is_some() { return self.build_relay_client_component_fragment_spread(frag_spread); } let args = self.build_arguments(&frag_spread.arguments); let primitive = Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.args, value: match args { None => Primitive::Null, Some(key) => Primitive::Key(key), }, }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.fragment_spread), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(frag_spread.fragment.item), }, ])); match frag_spread .directives .named(*RELAY_RESOLVER_METADATA_DIRECTIVE_NAME) { Some(directive) => self.build_relay_resolver(primitive, directive), None => primitive, } } fn build_relay_resolver( &mut self, fragment_primitive: Primitive, directive: &Directive, ) -> Primitive { let module = directive .arguments .named(*RELAY_RESOLVER_IMPORT_PATH_ARGUMENT_NAME) .unwrap() .value .item .expect_string_literal() .to_string() .intern(); let field_name = directive .arguments .named(*RELAY_RESOLVER_METADATA_FIELD_NAME) .unwrap() .value .item .expect_string_literal() .to_string() .intern(); let field_alias = directive .arguments .named(*RELAY_RESOLVER_METADATA_FIELD_ALIAS) .map(|arg| arg.value.item.expect_string_literal().to_string().intern()); // TODO(T86853359): Support non-haste environments when generating Relay Resolver RederAST let haste_import_name = Path::new(&module.to_string()) .file_stem() .unwrap() .to_string_lossy() .intern(); Primitive::Key(self.object(vec![ build_alias(field_alias, field_name), ObjectEntry { key: CODEGEN_CONSTANTS.fragment, value: fragment_primitive, }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.relay_resolver), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(field_name), }, ObjectEntry { key: CODEGEN_CONSTANTS.resolver_module, value: Primitive::JSModuleDependency(haste_import_name), }, ])) } fn build_normalization_fragment_spread(&mut self, frag_spread: &FragmentSpread) -> Primitive { let args = self.build_arguments(&frag_spread.arguments); Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.args, value: match args { None => Primitive::Null, Some(key) => Primitive::Key(key), }, }, ObjectEntry { key: CODEGEN_CONSTANTS.fragment, value: Primitive::GraphQLModuleDependency(frag_spread.fragment.item), }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.fragment_spread), }, ])) } fn build_relay_client_component_fragment_spread( &mut self, frag_spread: &FragmentSpread, ) -> Primitive { let normalization_name = frag_spread .directives .named(*RELAY_CLIENT_COMPONENT_SERVER_DIRECTIVE_NAME) .unwrap() .arguments .named(*RELAY_CLIENT_COMPONENT_MODULE_ID_ARGUMENT_NAME) .unwrap() .value .item .expect_string_literal() .to_string() .trim_end_matches(".graphql") .intern(); Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.fragment, value: Primitive::GraphQLModuleDependency(normalization_name), }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.client_component), }, ])) } fn build_defer(&mut self, inline_fragment: &InlineFragment, defer: &Directive) -> Primitive { match self.variant { CodegenVariant::Reader => self.build_defer_reader(inline_fragment), CodegenVariant::Normalization => self.build_defer_normalization(inline_fragment, defer), } } fn build_defer_reader(&mut self, inline_fragment: &InlineFragment) -> Primitive { let next_selections = if let Selection::FragmentSpread(frag_spread) = &inline_fragment.selections[0] { let next_selections = vec![self.build_fragment_spread(frag_spread)]; Primitive::Key(self.array(next_selections)) } else { self.build_selections(inline_fragment.selections.iter()) }; Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.defer), }, ObjectEntry { key: CODEGEN_CONSTANTS.selections, value: next_selections, }, ])) } fn build_defer_normalization( &mut self, inline_fragment: &InlineFragment, defer: &Directive, ) -> Primitive { let next_selections = self.build_selections(inline_fragment.selections.iter()); let DeferDirective { if_arg, label_arg } = DeferDirective::from(defer); let if_variable_name = extract_variable_name(if_arg); let label_name = label_arg.unwrap().value.item.expect_string_literal(); Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.if_, value: Primitive::string_or_null(if_variable_name), }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.defer), }, ObjectEntry { key: CODEGEN_CONSTANTS.label, value: Primitive::String(label_name), }, ObjectEntry { key: CODEGEN_CONSTANTS.selections, value: next_selections, }, ])) } fn build_stream(&mut self, linked_field: &LinkedField, stream: &Directive) -> Primitive { let next_selections = self.build_linked_field_and_handles(&LinkedField { directives: remove_directive( &linked_field.directives, DEFER_STREAM_CONSTANTS.stream_name, ), ..linked_field.to_owned() }); let next_selections = Primitive::Key(self.array(next_selections)); Primitive::Key(match self.variant { CodegenVariant::Reader => self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.stream), }, ObjectEntry { key: CODEGEN_CONSTANTS.selections, value: next_selections, }, ]), CodegenVariant::Normalization => { let StreamDirective { if_arg, label_arg, use_customized_batch_arg, initial_count_arg: _, } = StreamDirective::from(stream); let if_variable_name = extract_variable_name(if_arg); let use_customized_batch_variable_name = extract_variable_name(use_customized_batch_arg); let label_name = label_arg.unwrap().value.item.expect_string_literal(); self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.if_, value: Primitive::string_or_null(if_variable_name), }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.stream), }, ObjectEntry { key: CODEGEN_CONSTANTS.label, value: Primitive::String(label_name), }, ObjectEntry { key: CODEGEN_CONSTANTS.metadata, value: Primitive::Null, }, ObjectEntry { key: CODEGEN_CONSTANTS.selections, value: next_selections, }, ObjectEntry { key: CODEGEN_CONSTANTS.use_customized_batch, value: Primitive::string_or_null(use_customized_batch_variable_name), }, ]) } }) } fn build_inline_fragment(&mut self, inline_frag: &InlineFragment) -> Primitive { match inline_frag.type_condition { None => { // TODO(T63388023): Use typed custom directives if inline_frag.directives.len() == 1 && inline_frag.directives[0].name.item == *CLIENT_EXTENSION_DIRECTIVE_NAME { let selections = self.build_selections(inline_frag.selections.iter()); Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.client_extension), }, ObjectEntry { key: CODEGEN_CONSTANTS.selections, value: selections, }, ])) } else { // TODO(T63559346): Handle anonymous inline fragments with no directives panic!( "Unexpected custom directives: {:#?}", inline_frag.directives ); } } Some(type_condition) => { if self.variant == CodegenVariant::Normalization { let is_abstract_inline_fragment = type_condition.is_abstract_type(); if is_abstract_inline_fragment { // Maintain a few invariants: // - InlineFragment (and `selections` arrays generally) cannot be empty // - Don't emit a TypeDiscriminator under an InlineFragment unless it has // a different abstractKey // This means we have to handle two cases: // - The inline fragment only contains a TypeDiscriminator with the same // abstractKey: replace the Fragment w the Discriminator // - The inline fragment contains other selections: return all the selections // minus any Discriminators w the same key let type_discriminator_index = inline_frag.selections.iter().position(|selection| { if let Selection::ScalarField(selection) = selection { selection .directives .named(*TYPE_DISCRIMINATOR_DIRECTIVE_NAME) .is_some() } else { false } }); if let Some(type_discriminator_index) = type_discriminator_index { if inline_frag.selections.len() == 1 { return self.build_type_discriminator( if let Selection::ScalarField(field) = &inline_frag.selections[0] { field } else { panic!("Expected a scalar field.") }, ); } else { let selections = self.build_selections( inline_frag .selections .iter() .take(type_discriminator_index) .chain( inline_frag .selections .iter() .skip(type_discriminator_index + 1), ), ); return Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.inline_fragment), }, ObjectEntry { key: CODEGEN_CONSTANTS.selections, value: selections, }, ObjectEntry { key: CODEGEN_CONSTANTS.type_, value: Primitive::String( self.schema.get_type_name(type_condition), ), }, ObjectEntry { key: CODEGEN_CONSTANTS.abstract_key, value: Primitive::String( generate_abstract_type_refinement_key( self.schema, type_condition, ), ), }, ])); } } } } let selections = self.build_selections(inline_frag.selections.iter()); Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.inline_fragment), }, ObjectEntry { key: CODEGEN_CONSTANTS.selections, value: selections, }, ObjectEntry { key: CODEGEN_CONSTANTS.type_, value: Primitive::String(self.schema.get_type_name(type_condition)), }, ObjectEntry { key: CODEGEN_CONSTANTS.abstract_key, value: if type_condition.is_abstract_type() { Primitive::String(generate_abstract_type_refinement_key( self.schema, type_condition, )) } else { Primitive::Null }, }, ])) } } } fn build_condition(&mut self, condition: &Condition) -> Primitive { let selections = self.build_selections(condition.selections.iter()); Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.condition, value: Primitive::String(match &condition.value { ConditionValue::Variable(variable) => variable.name.item, ConditionValue::Constant(_) => panic!( "Expected Condition with static value to have been pruned or inlined." ), }), }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.condition_value), }, ObjectEntry { key: CODEGEN_CONSTANTS.passing_value, value: Primitive::Bool(condition.passing_value), }, ObjectEntry { key: CODEGEN_CONSTANTS.selections, value: selections, }, ])) } fn build_operation_variable_definitions( &mut self, variable_definitions: &[VariableDefinition], ) -> Primitive { let var_defs = variable_definitions .iter() .map(|def| { let default_value = if let Some(const_val) = &def.default_value { self.build_constant_value(&const_val) } else { Primitive::Null }; Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.default_value, value: default_value, }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.local_argument), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(def.name.item), }, ])) }) .collect::<Vec<_>>(); Primitive::Key(self.array(var_defs)) } fn build_fragment_variable_definitions( &mut self, local_variable_definitions: &[VariableDefinition], global_variable_definitions: &[VariableDefinition], ) -> Primitive { // TODO(T63164787) this will produce argument_definitions in a different order than our JS codegen let mut var_defs = Vec::with_capacity( local_variable_definitions.len() + global_variable_definitions.len(), ); for def in local_variable_definitions { let object = vec![ ObjectEntry { key: CODEGEN_CONSTANTS.default_value, value: if let Some(const_val) = &def.default_value { self.build_constant_value(&const_val) } else { Primitive::Null }, }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.local_argument), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(def.name.item), }, ]; var_defs.push((def.name.item, Primitive::Key(self.object(object)))); } for def in global_variable_definitions { var_defs.push(( def.name.item, Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.root_argument), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(def.name.item), }, ])), )); } var_defs.sort_unstable_by(|(name_a, _), (name_b, _)| name_a.cmp(name_b)); let mut sorted_var_defs = Vec::with_capacity(var_defs.len()); for (_, var_def) in var_defs { sorted_var_defs.push(var_def); } Primitive::Key(self.array(sorted_var_defs)) } fn build_arguments(&mut self, arguments: &[Argument]) -> Option<AstKey> { let mut sorted_args: Vec<&Argument> = arguments.iter().collect(); sorted_args.sort_unstable_by_key(|arg| arg.name.item); let args = sorted_args .into_iter() // We are filtering out "null" arguments matching JS behavior .filter_map(|arg| self.build_argument(arg.name.item, &arg.value.item)) .map(Primitive::Key) .collect::<Vec<_>>(); if args.is_empty() { None } else { Some(self.array(args)) } } fn build_argument(&mut self, arg_name: StringKey, arg_value: &Value) -> Option<AstKey> { match arg_value { Value::Constant(const_val) => { if let Some(concrete_const_val) = self.build_constant_argument(arg_name, &const_val) { Some(concrete_const_val) } else { None } } Value::Variable(variable) => { let name = Primitive::String(arg_name); let variable_name = Primitive::String(variable.name.item); Some(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.variable), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: name, }, // TODO(T63303966) type is always skipped in JS compiler ObjectEntry { key: CODEGEN_CONSTANTS.variable_name, value: variable_name, }, ])) } Value::List(list) => { let items = list .iter() .enumerate() .map(|(i, val)| { let item_name = format!("{}.{}", arg_name, i).as_str().intern(); match self.build_argument(item_name, val) { None => Primitive::Null, Some(key) => Primitive::Key(key), } }) .collect::<Vec<_>>(); let object = vec![ ObjectEntry { key: CODEGEN_CONSTANTS.items, value: Primitive::Key(self.array(items)), }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.list_value), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(arg_name), }, ]; Some(self.object(object)) } Value::Object(object) => { let mut sorted_object = object.clone(); sorted_object.sort_by_key(|arg| arg.name); let fields = sorted_object .into_iter() .map(|arg| { let field_name = arg.name.item; if let Some(concrete_arg) = self.build_argument(field_name, &arg.value.item) { Primitive::Key(concrete_arg) } else { // For object types, we do want to keep the literal argument // for null, instead of filtering it out, matching JS behavior Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.literal), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(field_name), }, ObjectEntry { key: CODEGEN_CONSTANTS.value, value: Primitive::Null, }, ])) } }) .collect::<Vec<_>>(); let object = vec![ ObjectEntry { key: CODEGEN_CONSTANTS.fields, value: Primitive::Key(self.array(fields)), }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.object_value), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(arg_name), }, ]; Some(self.object(object)) } } } fn build_constant_argument( &mut self, arg_name: StringKey, arg_value: &ConstantValue, ) -> Option<AstKey> { match arg_value { // We return None here to filter out "null" arguments, matching JS behavior ConstantValue::Null() => None, _ => { let value = self.build_constant_value(arg_value); Some(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.literal), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(arg_name), }, ObjectEntry { key: CODEGEN_CONSTANTS.value, value, }, ])) } } } fn build_constant_value(&mut self, value: &ConstantValue) -> Primitive { match value { ConstantValue::Int(val) => Primitive::Int(*val), ConstantValue::Float(val) => Primitive::Float(*val), ConstantValue::String(val) => Primitive::String(*val), ConstantValue::Boolean(val) => Primitive::Bool(*val), ConstantValue::Null() => Primitive::Null, ConstantValue::Enum(val) => Primitive::String(*val), ConstantValue::List(val_list) => { let json_values = val_list .iter() .map(|val| self.build_constant_value(val)) .collect::<Vec<_>>(); Primitive::Key(self.array(json_values)) } ConstantValue::Object(val_object) => { let mut sorted_val_object: Vec<&_> = val_object.iter().collect(); sorted_val_object.sort_unstable_by_key(|arg| arg.name.item); let json_values = sorted_val_object .into_iter() .map(|arg| ObjectEntry { key: arg.name.item, value: self.build_constant_value(&arg.value.item), }) .collect::<Vec<_>>(); Primitive::Key(self.object(json_values)) } } } fn build_module_import_selections(&mut self, directive: &Directive) -> Vec<Primitive> { let fragment_name = directive .arguments .named(MATCH_CONSTANTS.name_arg) .unwrap() .value .item .expect_string_literal(); let key = directive .arguments .named(MATCH_CONSTANTS.key_arg) .unwrap() .value .item .expect_string_literal(); let fragment_name_str = fragment_name.lookup(); let underscore_idx = fragment_name_str.find('_').unwrap_or_else(|| { panic!( "@module fragments should be named 'FragmentName_propName', got '{}'.", fragment_name ) }); let selection = Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.document_name, value: Primitive::String(key), }, ObjectEntry { key: CODEGEN_CONSTANTS.fragment_name, value: Primitive::String(fragment_name), }, ObjectEntry { key: CODEGEN_CONSTANTS.fragment_prop_name, value: Primitive::String(fragment_name_str[underscore_idx + 1..].intern()), }, ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.module_import), }, ])); vec![selection] } /// This method will wrap inline fragment with @__inline directive // (created by `inline_fragment_data` transform) /// with the node `InlineDataFragmentSpread` fn build_inline_data_fragment_spread( &mut self, inline_fragment: &InlineFragment, directive: &Directive, ) -> Primitive { let selections = self.build_selections(inline_fragment.selections.iter()); let fragment_name: StringKey = directive.arguments[0].value.item.expect_string_literal(); Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.inline_data_fragment_spread), }, ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(fragment_name), }, ObjectEntry { key: CODEGEN_CONSTANTS.selections, value: selections, }, ])) } fn build_request_parameters( &mut self, operation: &OperationDefinition, mut request_parameters: RequestParameters, ) -> AstKey { let mut metadata_items: Vec<ObjectEntry> = operation .directives .iter() .filter_map(|directive| { if directive.name.item == *INTERNAL_METADATA_DIRECTIVE { if directive.arguments.len() != 1 { panic!("@__metadata directive should have only one argument!"); } let arg = &directive.arguments[0]; let key = arg.name.item; let value = match &arg.value.item { Value::Constant(value) => self.build_constant_value(value), _ => { panic!("@__metadata directive expect only constant argument values."); } }; Some(ObjectEntry { key, value }) } else { None } }) .collect(); // add connection metadata let connection_metadata = extract_connection_metadata_from_directive(&operation.directives); if let Some(connection_metadata) = connection_metadata { metadata_items.push(self.build_connection_metadata(&connection_metadata)) } // add request parameters metadata let metadata_values: Vec<(String, String)> = request_parameters.metadata.drain().collect(); for (key, value) in metadata_values { metadata_items.push(ObjectEntry { key: key.intern(), value: Primitive::RawString(value), }); } // sort metadata keys metadata_items.sort_unstable_by_key(|entry| entry.key); // Construct metadata object let metadata_prop = ObjectEntry { key: CODEGEN_CONSTANTS.metadata, value: Primitive::Key(self.object(metadata_items)), }; let name_prop = ObjectEntry { key: CODEGEN_CONSTANTS.name, value: Primitive::String(request_parameters.name), }; let operation_kind_prop = ObjectEntry { key: CODEGEN_CONSTANTS.operation_kind, value: Primitive::String(match request_parameters.operation_kind { OperationKind::Query => CODEGEN_CONSTANTS.query, OperationKind::Mutation => CODEGEN_CONSTANTS.mutation, OperationKind::Subscription => CODEGEN_CONSTANTS.subscription, }), }; let id_prop = ObjectEntry { key: CODEGEN_CONSTANTS.id, value: if let Some(id) = request_parameters.id { Primitive::RawString(id) } else { Primitive::Null }, }; let params_object = if let Some(text) = request_parameters.text { vec![ ObjectEntry { key: CODEGEN_CONSTANTS.cache_id, value: Primitive::RawString(md5(&text)), }, id_prop, metadata_prop, name_prop, operation_kind_prop, ObjectEntry { key: CODEGEN_CONSTANTS.text, value: Primitive::RawString(text), }, ] } else { vec![ id_prop, metadata_prop, name_prop, operation_kind_prop, ObjectEntry { key: CODEGEN_CONSTANTS.text, value: Primitive::Null, }, ] }; self.object(params_object) } fn build_actor_change(&mut self, actor_change: &InlineFragment) -> Primitive { let linked_field = &self.build_selections_from_selection(&actor_change.selections[0])[0]; Primitive::Key(self.object(vec![ ObjectEntry { key: CODEGEN_CONSTANTS.kind, value: Primitive::String(CODEGEN_CONSTANTS.actor_change), }, ObjectEntry { key: CODEGEN_CONSTANTS.linked_field_property, value: Primitive::Key(linked_field.assert_key()), }, ])) } } // Storage key is only pre-computable if the arguments don't contain variables fn is_static_storage_key_available(arguments: &[Argument]) -> bool { !arguments .iter() .any(|arg| value_contains_variable(&arg.value.item)) } fn value_contains_variable(value: &Value) -> bool { match value { Value::Variable(_) => true, Value::Constant(_) => false, Value::List(values) => values.iter().any(value_contains_variable), Value::Object(objects) => objects .iter() .any(|arg| value_contains_variable(&arg.value.item)), } } fn build_alias(alias: Option<StringKey>, name: StringKey) -> ObjectEntry { let alias = match alias { None => Primitive::Null, Some(alias) => { if alias == name { Primitive::Null } else { Primitive::String(alias) } } }; ObjectEntry { key: CODEGEN_CONSTANTS.alias, value: alias, } } /// Computes the md5 hash of a string. pub fn md5(data: &str) -> String { let mut md5 = Md5::new(); md5.input(data); hex::encode(md5.result()) }
39.737283
115
0.493192
2879353e725bd85eeed43f652473148cb41e8df8
11,069
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::OC4CONCLR { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct OCMR { bits: u8, } impl OCMR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct OCTSELR { bits: bool, } impl OCTSELR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct OCFLTR { bits: bool, } impl OCFLTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct OC32R { bits: bool, } impl OC32R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct SIDLR { bits: bool, } impl SIDLR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FRZR { bits: bool, } impl FRZR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ONR { bits: bool, } impl ONR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Proxy"] pub struct _OCMW<'a> { w: &'a mut W, } impl<'a> _OCMW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 7; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _OCTSELW<'a> { w: &'a mut W, } impl<'a> _OCTSELW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 3; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _OCFLTW<'a> { w: &'a mut W, } impl<'a> _OCFLTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 4; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _OC32W<'a> { w: &'a mut W, } impl<'a> _OC32W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 5; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _SIDLW<'a> { w: &'a mut W, } impl<'a> _SIDLW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 13; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FRZW<'a> { w: &'a mut W, } impl<'a> _FRZW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 14; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ONW<'a> { w: &'a mut W, } impl<'a> _ONW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 15; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 0:2"] #[inline] pub fn ocm(&self) -> OCMR { let bits = { const MASK: u8 = 7; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u8 }; OCMR { bits } } #[doc = "Bit 3"] #[inline] pub fn octsel(&self) -> OCTSELR { let bits = { const MASK: bool = true; const OFFSET: u8 = 3; ((self.bits >> OFFSET) & MASK as u32) != 0 }; OCTSELR { bits } } #[doc = "Bit 4"] #[inline] pub fn ocflt(&self) -> OCFLTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u32) != 0 }; OCFLTR { bits } } #[doc = "Bit 5"] #[inline] pub fn oc32(&self) -> OC32R { let bits = { const MASK: bool = true; const OFFSET: u8 = 5; ((self.bits >> OFFSET) & MASK as u32) != 0 }; OC32R { bits } } #[doc = "Bit 13"] #[inline] pub fn sidl(&self) -> SIDLR { let bits = { const MASK: bool = true; const OFFSET: u8 = 13; ((self.bits >> OFFSET) & MASK as u32) != 0 }; SIDLR { bits } } #[doc = "Bit 14"] #[inline] pub fn frz(&self) -> FRZR { let bits = { const MASK: bool = true; const OFFSET: u8 = 14; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FRZR { bits } } #[doc = "Bit 15"] #[inline] pub fn on(&self) -> ONR { let bits = { const MASK: bool = true; const OFFSET: u8 = 15; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ONR { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:2"] #[inline] pub fn ocm(&mut self) -> _OCMW { _OCMW { w: self } } #[doc = "Bit 3"] #[inline] pub fn octsel(&mut self) -> _OCTSELW { _OCTSELW { w: self } } #[doc = "Bit 4"] #[inline] pub fn ocflt(&mut self) -> _OCFLTW { _OCFLTW { w: self } } #[doc = "Bit 5"] #[inline] pub fn oc32(&mut self) -> _OC32W { _OC32W { w: self } } #[doc = "Bit 13"] #[inline] pub fn sidl(&mut self) -> _SIDLW { _SIDLW { w: self } } #[doc = "Bit 14"] #[inline] pub fn frz(&mut self) -> _FRZW { _FRZW { w: self } } #[doc = "Bit 15"] #[inline] pub fn on(&mut self) -> _ONW { _ONW { w: self } } }
24.063043
59
0.48008
fcca2aab1d0e04c83a52166285b0327672ffe1fb
18,315
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::CIR { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct LSIRDYFR { bits: bool, } impl LSIRDYFR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct LSERDYFR { bits: bool, } impl LSERDYFR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct HSIRDYFR { bits: bool, } impl HSIRDYFR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct HSERDYFR { bits: bool, } impl HSERDYFR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct PLLRDYFR { bits: bool, } impl PLLRDYFR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct CSSFR { bits: bool, } impl CSSFR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct LSIRDYIER { bits: bool, } impl LSIRDYIER { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct LSERDYIER { bits: bool, } impl LSERDYIER { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct HSIRDYIER { bits: bool, } impl HSIRDYIER { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct HSERDYIER { bits: bool, } impl HSERDYIER { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct PLLRDYIER { bits: bool, } impl PLLRDYIER { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Proxy"] pub struct _LSIRDYIEW<'a> { w: &'a mut W, } impl<'a> _LSIRDYIEW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 8; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _LSERDYIEW<'a> { w: &'a mut W, } impl<'a> _LSERDYIEW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 9; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _HSIRDYIEW<'a> { w: &'a mut W, } impl<'a> _HSIRDYIEW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 10; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _HSERDYIEW<'a> { w: &'a mut W, } impl<'a> _HSERDYIEW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 11; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _PLLRDYIEW<'a> { w: &'a mut W, } impl<'a> _PLLRDYIEW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 12; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _LSIRDYCW<'a> { w: &'a mut W, } impl<'a> _LSIRDYCW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 16; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _LSERDYCW<'a> { w: &'a mut W, } impl<'a> _LSERDYCW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 17; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _HSIRDYCW<'a> { w: &'a mut W, } impl<'a> _HSIRDYCW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 18; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _HSERDYCW<'a> { w: &'a mut W, } impl<'a> _HSERDYCW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 19; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _PLLRDYCW<'a> { w: &'a mut W, } impl<'a> _PLLRDYCW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 20; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _CSSCW<'a> { w: &'a mut W, } impl<'a> _CSSCW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 23; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 0 - LSI Ready Interrupt flag"] #[inline] pub fn lsirdyf(&self) -> LSIRDYFR { let bits = { const MASK: bool = true; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) != 0 }; LSIRDYFR { bits } } #[doc = "Bit 1 - LSE Ready Interrupt flag"] #[inline] pub fn lserdyf(&self) -> LSERDYFR { let bits = { const MASK: bool = true; const OFFSET: u8 = 1; ((self.bits >> OFFSET) & MASK as u32) != 0 }; LSERDYFR { bits } } #[doc = "Bit 2 - HSI Ready Interrupt flag"] #[inline] pub fn hsirdyf(&self) -> HSIRDYFR { let bits = { const MASK: bool = true; const OFFSET: u8 = 2; ((self.bits >> OFFSET) & MASK as u32) != 0 }; HSIRDYFR { bits } } #[doc = "Bit 3 - HSE Ready Interrupt flag"] #[inline] pub fn hserdyf(&self) -> HSERDYFR { let bits = { const MASK: bool = true; const OFFSET: u8 = 3; ((self.bits >> OFFSET) & MASK as u32) != 0 }; HSERDYFR { bits } } #[doc = "Bit 4 - PLL Ready Interrupt flag"] #[inline] pub fn pllrdyf(&self) -> PLLRDYFR { let bits = { const MASK: bool = true; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u32) != 0 }; PLLRDYFR { bits } } #[doc = "Bit 7 - Clock Security System Interrupt flag"] #[inline] pub fn cssf(&self) -> CSSFR { let bits = { const MASK: bool = true; const OFFSET: u8 = 7; ((self.bits >> OFFSET) & MASK as u32) != 0 }; CSSFR { bits } } #[doc = "Bit 8 - LSI Ready Interrupt Enable"] #[inline] pub fn lsirdyie(&self) -> LSIRDYIER { let bits = { const MASK: bool = true; const OFFSET: u8 = 8; ((self.bits >> OFFSET) & MASK as u32) != 0 }; LSIRDYIER { bits } } #[doc = "Bit 9 - LSE Ready Interrupt Enable"] #[inline] pub fn lserdyie(&self) -> LSERDYIER { let bits = { const MASK: bool = true; const OFFSET: u8 = 9; ((self.bits >> OFFSET) & MASK as u32) != 0 }; LSERDYIER { bits } } #[doc = "Bit 10 - HSI Ready Interrupt Enable"] #[inline] pub fn hsirdyie(&self) -> HSIRDYIER { let bits = { const MASK: bool = true; const OFFSET: u8 = 10; ((self.bits >> OFFSET) & MASK as u32) != 0 }; HSIRDYIER { bits } } #[doc = "Bit 11 - HSE Ready Interrupt Enable"] #[inline] pub fn hserdyie(&self) -> HSERDYIER { let bits = { const MASK: bool = true; const OFFSET: u8 = 11; ((self.bits >> OFFSET) & MASK as u32) != 0 }; HSERDYIER { bits } } #[doc = "Bit 12 - PLL Ready Interrupt Enable"] #[inline] pub fn pllrdyie(&self) -> PLLRDYIER { let bits = { const MASK: bool = true; const OFFSET: u8 = 12; ((self.bits >> OFFSET) & MASK as u32) != 0 }; PLLRDYIER { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 8 - LSI Ready Interrupt Enable"] #[inline] pub fn lsirdyie(&mut self) -> _LSIRDYIEW { _LSIRDYIEW { w: self } } #[doc = "Bit 9 - LSE Ready Interrupt Enable"] #[inline] pub fn lserdyie(&mut self) -> _LSERDYIEW { _LSERDYIEW { w: self } } #[doc = "Bit 10 - HSI Ready Interrupt Enable"] #[inline] pub fn hsirdyie(&mut self) -> _HSIRDYIEW { _HSIRDYIEW { w: self } } #[doc = "Bit 11 - HSE Ready Interrupt Enable"] #[inline] pub fn hserdyie(&mut self) -> _HSERDYIEW { _HSERDYIEW { w: self } } #[doc = "Bit 12 - PLL Ready Interrupt Enable"] #[inline] pub fn pllrdyie(&mut self) -> _PLLRDYIEW { _PLLRDYIEW { w: self } } #[doc = "Bit 16 - LSI Ready Interrupt Clear"] #[inline] pub fn lsirdyc(&mut self) -> _LSIRDYCW { _LSIRDYCW { w: self } } #[doc = "Bit 17 - LSE Ready Interrupt Clear"] #[inline] pub fn lserdyc(&mut self) -> _LSERDYCW { _LSERDYCW { w: self } } #[doc = "Bit 18 - HSI Ready Interrupt Clear"] #[inline] pub fn hsirdyc(&mut self) -> _HSIRDYCW { _HSIRDYCW { w: self } } #[doc = "Bit 19 - HSE Ready Interrupt Clear"] #[inline] pub fn hserdyc(&mut self) -> _HSERDYCW { _HSERDYCW { w: self } } #[doc = "Bit 20 - PLL Ready Interrupt Clear"] #[inline] pub fn pllrdyc(&mut self) -> _PLLRDYCW { _PLLRDYCW { w: self } } #[doc = "Bit 23 - Clock security system interrupt clear"] #[inline] pub fn cssc(&mut self) -> _CSSCW { _CSSCW { w: self } } }
25.651261
61
0.501447
ab43117110c1f0a6a750481e64b615c5101dd273
7,300
mod clip; mod neighborhoods; mod osm; mod remove_disconnected; mod split_ways; use abstutil::Timer; use geom::{Distance, FindClosest, GPSBounds, LonLat, PolyLine, Polygon, Pt2D}; use kml::ExtraShapes; use map_model::{raw_data, OffstreetParking, LANE_THICKNESS}; use std::fs::File; use std::io::{BufRead, BufReader}; use structopt::StructOpt; #[derive(StructOpt, Debug)] #[structopt(name = "convert_osm")] pub struct Flags { /// OSM XML file to read #[structopt(long = "osm")] pub osm: String, /// ExtraShapes file with blockface, produced using the kml crate. Optional. #[structopt(long = "parking_shapes", default_value = "")] pub parking_shapes: String, /// KML file with offstreet parking info. Optional. #[structopt(long = "offstreet_parking", default_value = "")] pub offstreet_parking: String, /// GTFS directory. Optional. #[structopt(long = "gtfs", default_value = "")] pub gtfs: String, /// Neighborhood GeoJSON path. Optional. #[structopt(long = "neighborhoods", default_value = "")] pub neighborhoods: String, /// Osmosis clipping polgon. Required. #[structopt(long = "clip", default_value = "")] pub clip: String, /// Output .bin path #[structopt(long = "output")] pub output: String, /// Disable blockface #[structopt(long = "fast_dev")] pub fast_dev: bool, } pub fn convert(flags: &Flags, timer: &mut abstutil::Timer) -> raw_data::Map { if flags.clip.is_empty() { panic!("You must specify an Osmosis boundary polygon with --clip"); } let mut map = split_ways::split_up_roads( osm::extract_osm(&flags.osm, read_osmosis_polygon(&flags.clip), timer), timer, ); clip::clip_map(&mut map, timer); remove_disconnected::remove_disconnected_roads(&mut map, timer); if flags.fast_dev { return map; } if !flags.parking_shapes.is_empty() { use_parking_hints(&mut map, &flags.parking_shapes, timer); } if !flags.offstreet_parking.is_empty() { use_offstreet_parking(&mut map, &flags.offstreet_parking, timer); } if !flags.gtfs.is_empty() { timer.start("load GTFS"); map.bus_routes = gtfs::load(&flags.gtfs).unwrap(); timer.stop("load GTFS"); } if !flags.neighborhoods.is_empty() { timer.start("convert neighborhood polygons"); let map_name = abstutil::basename(&flags.output); neighborhoods::convert(&flags.neighborhoods, map_name, &map.gps_bounds); timer.stop("convert neighborhood polygons"); } map } fn use_parking_hints(map: &mut raw_data::Map, path: &str, timer: &mut Timer) { timer.start("apply parking hints"); println!("Loading blockface shapes from {}", path); let shapes: ExtraShapes = abstutil::read_binary(path, timer).expect("loading blockface failed"); // Match shapes with the nearest road + direction (true for forwards) let mut closest: FindClosest<(raw_data::StableRoadID, bool)> = FindClosest::new(&map.gps_bounds.to_bounds()); for (id, r) in &map.roads { let center = PolyLine::new(r.center_points.clone()); closest.add( (*id, true), center.shift_right(LANE_THICKNESS).get(timer).points(), ); closest.add( (*id, false), center.shift_left(LANE_THICKNESS).get(timer).points(), ); } 'SHAPE: for s in shapes.shapes.into_iter() { let mut pts: Vec<Pt2D> = Vec::new(); for pt in s.points.into_iter() { if let Some(pt) = Pt2D::from_gps(pt, &map.gps_bounds) { pts.push(pt); } else { continue 'SHAPE; } } if pts.len() > 1 { // The blockface line endpoints will be close to other roads, so match based on the // middle of the blockface. // TODO Long blockfaces sometimes cover two roads. Should maybe find ALL matches within // the threshold distance? let middle = PolyLine::new(pts).middle(); if let Some(((r, fwds), _)) = closest.closest_pt(middle, LANE_THICKNESS * 5.0) { let category = s.attributes.get("PARKING_CATEGORY"); let has_parking = category != Some(&"None".to_string()) && category != Some(&"No Parking Allowed".to_string()); // Blindly override prior values. if fwds { map.roads.get_mut(&r).unwrap().parking_lane_fwd = has_parking; } else { map.roads.get_mut(&r).unwrap().parking_lane_back = has_parking; } } } } timer.stop("apply parking hints"); } fn read_osmosis_polygon(path: &str) -> raw_data::Map { let mut pts: Vec<LonLat> = Vec::new(); let mut gps_bounds = GPSBounds::new(); for (idx, maybe_line) in BufReader::new(File::open(path).unwrap()) .lines() .enumerate() { if idx == 0 || idx == 1 { continue; } let line = maybe_line.unwrap(); if line == "END" { break; } let parts: Vec<&str> = line.trim_start().split(" ").collect(); assert!(parts.len() == 2); let pt = LonLat::new( parts[0].parse::<f64>().unwrap(), parts[1].parse::<f64>().unwrap(), ); pts.push(pt); gps_bounds.update(pt); } let mut map = raw_data::Map::blank(); map.boundary_polygon = Polygon::new(&gps_bounds.must_convert(&pts)); map.gps_bounds = gps_bounds; map } fn use_offstreet_parking(map: &mut raw_data::Map, path: &str, timer: &mut Timer) { timer.start("match offstreet parking points"); let shapes = kml::load(path, &map.gps_bounds, timer).expect("loading offstreet_parking failed"); // Building indices let mut closest: FindClosest<usize> = FindClosest::new(&map.gps_bounds.to_bounds()); for (idx, b) in map.buildings.iter().enumerate() { closest.add(idx, b.polygon.points()); } // TODO Another function just to use ?. Try blocks would rock. let mut handle_shape: Box<dyn FnMut(kml::ExtraShape) -> Option<()>> = Box::new(|s| { assert_eq!(s.points.len(), 1); let pt = Pt2D::from_gps(s.points[0], &map.gps_bounds)?; let (idx, _) = closest.closest_pt(pt, Distance::meters(50.0))?; // TODO Handle parking lots. if !map.buildings[idx].polygon.contains_pt(pt) { return None; } let name = s.attributes.get("DEA_FACILITY_NAME")?.to_string(); let num_stalls = s.attributes.get("DEA_STALLS")?.parse::<usize>().ok()?; // TODO Update the existing one instead if let Some(ref existing) = map.buildings[idx].parking { // TODO Can't use timer inside this closure println!( "Two offstreet parking hints apply to building {}: {} @ {}, and {} @ {}", idx, existing.num_stalls, existing.name, num_stalls, name ); } map.buildings[idx].parking = Some(OffstreetParking { name, num_stalls }); None }); for s in shapes.shapes.into_iter() { handle_shape(s); } timer.stop("match offstreet parking points"); }
35.2657
100
0.59726
5d601a25663fa58f241136f3fb6296e91730c8af
425
//! Resquest and response messages use bytes::Bytes; use controller::Controller; mod meta; mod test; pub use self::meta::{RpcMeta, RpcRequestMeta, RpcResponseMeta}; /// RPC request headers and parameters pub type RequestPackage = (RpcRequestMeta, Controller, Bytes); /// RPC response headers and body pub type ResponsePackage = (RpcResponseMeta, Controller, Bytes); #[cfg(test)] pub(crate) use self::test::TestMessage;
22.368421
64
0.755294
6134dd8f4453a1c36fb796e81106cdf906ebca19
1,592
use std::fmt; use crate::import::*; pub type SysDatetime = DateTime<BeijingTimezone>; pub type SysDate = Date<BeijingTimezone>; #[derive(Copy, Clone, PartialEq, Eq)] pub struct BeijingTimezone; impl BeijingTimezone { /// Returns a `Date` which corresponds to the current date. pub fn today() -> SysDate { SysDate::from_utc(Utc::today().naive_local(), BeijingTimezone) } pub fn now() -> SysDatetime { SysDatetime::from_utc(Utc::now().naive_local(), BeijingTimezone) } } // 以下代码抄自 chrono的 UTC impl TimeZone for BeijingTimezone { type Offset = BeijingTimezone; fn from_offset(_state: &Self::Offset) -> BeijingTimezone { BeijingTimezone } fn offset_from_local_date(&self, _local: &NaiveDate) -> LocalResult<BeijingTimezone> { LocalResult::Single(BeijingTimezone) } fn offset_from_local_datetime(&self, _local: &NaiveDateTime) -> LocalResult<BeijingTimezone> { LocalResult::Single(BeijingTimezone) } fn offset_from_utc_date(&self, _utc: &NaiveDate) -> BeijingTimezone { BeijingTimezone } fn offset_from_utc_datetime(&self, _utc: &NaiveDateTime) -> BeijingTimezone { BeijingTimezone } } impl Offset for BeijingTimezone { fn fix(&self) -> FixedOffset { FixedOffset::east(8 * 3600) } } impl fmt::Debug for BeijingTimezone { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "GMT+8") } } impl fmt::Display for BeijingTimezone { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "GMT+8") } }
25.677419
98
0.659548
ff49c4e3b329df7a6fb21c7ea5d27c4de7347572
2,560
//! System bindings for the SBF platform //! //! This module contains the facade (aka platform-specific) implementations of //! OS level functionality for SBF //! //! This is all super highly experimental and not actually intended for //! wide/production use yet, it's still all in the experimental category. This //! will likely change over time. //! //! Currently all functions here are basically stubs that immediately return //! errors. The hope is that with a portability lint we can turn actually just //! remove all this and just omit parts of the standard library if we're //! compiling for SBF. That way it's a compile time error for something that's //! guaranteed to be a runtime error! use crate::os::raw::c_char; pub mod alloc; pub mod args; //#[cfg(feature = "backtrace")] //pub mod backtrace; pub mod cmath; pub mod env; pub mod fs; pub mod io; pub mod memchr; pub mod net; pub mod os; pub mod path; pub mod pipe; pub mod process; pub mod thread; pub mod time; pub mod stdio; #[path = "../unix/os_str.rs"] pub mod os_str; pub mod condvar; pub mod mutex; pub mod rwlock; pub mod thread_local_dtor; pub mod thread_local_key; extern "C" { fn abort() -> !; #[allow(improper_ctypes)] fn custom_panic(info: &core::panic::PanicInfo<'_>); fn sol_log_(message: *const u8, length: u64); } pub fn sol_log(message: &str) { unsafe { sol_log_(message.as_ptr(), message.len() as u64); } } pub fn panic(info: &core::panic::PanicInfo<'_>) -> ! { unsafe { custom_panic(info); } unsafe { abort(); } } pub fn unsupported<T>() -> crate::io::Result<T> { Err(unsupported_err()) } pub fn unsupported_err() -> crate::io::Error { crate::io::Error::new(crate::io::ErrorKind::Other, "operation not supported on SBF yet") } pub fn decode_error_kind(_code: i32) -> crate::io::ErrorKind { crate::io::ErrorKind::Other } // This enum is used as the storage for a bunch of types which can't actually // exist. #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] pub enum Void {} pub unsafe fn strlen(mut s: *const c_char) -> usize { let mut n = 0; while *s != 0 { n += 1; s = s.offset(1); } return n } pub fn abort_internal() -> ! { unsafe { abort() } } // We don't have randomness yet, but I totally used a random number generator to // generate these numbers. // // More seriously though this is just for DOS protection in hash maps. It's ok // if we don't do that on SBF just yet. pub fn hashmap_random_keys() -> (u64, u64) { (1, 2) }
25.098039
80
0.667578
22bdbc44eef3e8ddde2158eeee29dfc98a06743a
37,133
#![allow(clippy::integer_arithmetic)] //! A library for generating a message from a sequence of instructions use crate::sanitize::{Sanitize, SanitizeError}; use crate::serialize_utils::{ append_slice, append_u16, append_u8, read_pubkey, read_slice, read_u16, read_u8, }; use crate::{ bpf_loader, bpf_loader_deprecated, bpf_loader_upgradeable, hash::Hash, instruction::{AccountMeta, CompiledInstruction, Instruction}, message::MessageHeader, pubkey::Pubkey, short_vec, system_instruction, system_program, sysvar, }; use itertools::Itertools; use lazy_static::lazy_static; use std::{convert::TryFrom, str::FromStr}; lazy_static! { // Copied keys over since direct references create cyclical dependency. pub static ref BUILTIN_PROGRAMS_KEYS: [Pubkey; 10] = { let parse = |s| Pubkey::from_str(s).unwrap(); [ parse("Config1111111111111111111111111111111111111"), parse("Feature111111111111111111111111111111111111"), parse("NativeLoader1111111111111111111111111111111"), parse("Stake11111111111111111111111111111111111111"), parse("StakeConfig11111111111111111111111111111111"), parse("Vote111111111111111111111111111111111111111"), system_program::id(), bpf_loader::id(), bpf_loader_deprecated::id(), bpf_loader_upgradeable::id(), ] }; } fn position(keys: &[Pubkey], key: &Pubkey) -> u8 { keys.iter().position(|k| k == key).unwrap() as u8 } fn compile_instruction(ix: &Instruction, keys: &[Pubkey]) -> CompiledInstruction { let accounts: Vec<_> = ix .accounts .iter() .map(|account_meta| position(keys, &account_meta.pubkey)) .collect(); CompiledInstruction { program_id_index: position(keys, &ix.program_id), data: ix.data.clone(), accounts, } } fn compile_instructions(ixs: &[Instruction], keys: &[Pubkey]) -> Vec<CompiledInstruction> { ixs.iter().map(|ix| compile_instruction(ix, keys)).collect() } /// A helper struct to collect pubkeys referenced by a set of instructions and read-only counts #[derive(Debug, PartialEq, Eq)] struct InstructionKeys { pub signed_keys: Vec<Pubkey>, pub unsigned_keys: Vec<Pubkey>, pub num_readonly_signed_accounts: u8, pub num_readonly_unsigned_accounts: u8, } impl InstructionKeys { fn new( signed_keys: Vec<Pubkey>, unsigned_keys: Vec<Pubkey>, num_readonly_signed_accounts: u8, num_readonly_unsigned_accounts: u8, ) -> Self { Self { signed_keys, unsigned_keys, num_readonly_signed_accounts, num_readonly_unsigned_accounts, } } } /// Return pubkeys referenced by all instructions, with the ones needing signatures first. If the /// payer key is provided, it is always placed first in the list of signed keys. Read-only signed /// accounts are placed last in the set of signed accounts. Read-only unsigned accounts, /// including program ids, are placed last in the set. No duplicates and order is preserved. fn get_keys(instructions: &[Instruction], payer: Option<&Pubkey>) -> InstructionKeys { let programs: Vec<_> = get_program_ids(instructions) .iter() .map(|program_id| AccountMeta { pubkey: *program_id, is_signer: false, is_writable: false, }) .collect(); let mut keys_and_signed: Vec<_> = instructions .iter() .flat_map(|ix| ix.accounts.iter()) .collect(); keys_and_signed.extend(&programs); keys_and_signed.sort_by(|x, y| { y.is_signer .cmp(&x.is_signer) .then(y.is_writable.cmp(&x.is_writable)) }); let payer_account_meta; if let Some(payer) = payer { payer_account_meta = AccountMeta { pubkey: *payer, is_signer: true, is_writable: true, }; keys_and_signed.insert(0, &payer_account_meta); } let mut unique_metas: Vec<AccountMeta> = vec![]; for account_meta in keys_and_signed { // Promote to writable if a later AccountMeta requires it if let Some(x) = unique_metas .iter_mut() .find(|x| x.pubkey == account_meta.pubkey) { x.is_writable |= account_meta.is_writable; continue; } unique_metas.push(account_meta.clone()); } let mut signed_keys = vec![]; let mut unsigned_keys = vec![]; let mut num_readonly_signed_accounts = 0; let mut num_readonly_unsigned_accounts = 0; for account_meta in unique_metas { if account_meta.is_signer { signed_keys.push(account_meta.pubkey); if !account_meta.is_writable { num_readonly_signed_accounts += 1; } } else { unsigned_keys.push(account_meta.pubkey); if !account_meta.is_writable { num_readonly_unsigned_accounts += 1; } } } InstructionKeys::new( signed_keys, unsigned_keys, num_readonly_signed_accounts, num_readonly_unsigned_accounts, ) } /// Return program ids referenced by all instructions. No duplicates and order is preserved. fn get_program_ids(instructions: &[Instruction]) -> Vec<Pubkey> { instructions .iter() .map(|ix| ix.program_id) .unique() .collect() } // NOTE: Serialization-related changes must be paired with the custom serialization // for versioned messages in the `RemainingLegacyMessage` struct. #[frozen_abi(digest = "2KnLEqfLcTBQqitE22Pp8JYkaqVVbAkGbCfdeHoyxcAU")] #[derive(Serialize, Deserialize, Default, Debug, PartialEq, Eq, Clone, AbiExample)] #[serde(rename_all = "camelCase")] pub struct Message { /// The message header, identifying signed and read-only `account_keys` /// NOTE: Serialization-related changes must be paired with the direct read at sigverify. pub header: MessageHeader, /// All the account keys used by this transaction #[serde(with = "short_vec")] pub account_keys: Vec<Pubkey>, /// The id of a recent ledger entry. pub recent_blockhash: Hash, /// Programs that will be executed in sequence and committed in one atomic transaction if all /// succeed. #[serde(with = "short_vec")] pub instructions: Vec<CompiledInstruction>, } impl Sanitize for Message { fn sanitize(&self) -> std::result::Result<(), SanitizeError> { // signing area and read-only non-signing area should not overlap if self.header.num_required_signatures as usize + self.header.num_readonly_unsigned_accounts as usize > self.account_keys.len() { return Err(SanitizeError::IndexOutOfBounds); } // there should be at least 1 RW fee-payer account. if self.header.num_readonly_signed_accounts >= self.header.num_required_signatures { return Err(SanitizeError::IndexOutOfBounds); } for ci in &self.instructions { if ci.program_id_index as usize >= self.account_keys.len() { return Err(SanitizeError::IndexOutOfBounds); } // A program cannot be a payer. if ci.program_id_index == 0 { return Err(SanitizeError::IndexOutOfBounds); } for ai in &ci.accounts { if *ai as usize >= self.account_keys.len() { return Err(SanitizeError::IndexOutOfBounds); } } } self.account_keys.sanitize()?; self.recent_blockhash.sanitize()?; self.instructions.sanitize()?; Ok(()) } } impl Message { pub fn new_with_compiled_instructions( num_required_signatures: u8, num_readonly_signed_accounts: u8, num_readonly_unsigned_accounts: u8, account_keys: Vec<Pubkey>, recent_blockhash: Hash, instructions: Vec<CompiledInstruction>, ) -> Self { Self { header: MessageHeader { num_required_signatures, num_readonly_signed_accounts, num_readonly_unsigned_accounts, }, account_keys, recent_blockhash, instructions, } } pub fn new(instructions: &[Instruction], payer: Option<&Pubkey>) -> Self { let InstructionKeys { mut signed_keys, unsigned_keys, num_readonly_signed_accounts, num_readonly_unsigned_accounts, } = get_keys(instructions, payer); let num_required_signatures = signed_keys.len() as u8; signed_keys.extend(&unsigned_keys); let instructions = compile_instructions(instructions, &signed_keys); Self::new_with_compiled_instructions( num_required_signatures, num_readonly_signed_accounts, num_readonly_unsigned_accounts, signed_keys, Hash::default(), instructions, ) } pub fn new_with_nonce( mut instructions: Vec<Instruction>, payer: Option<&Pubkey>, nonce_account_pubkey: &Pubkey, nonce_authority_pubkey: &Pubkey, ) -> Self { let nonce_ix = system_instruction::advance_nonce_account(nonce_account_pubkey, nonce_authority_pubkey); instructions.insert(0, nonce_ix); Self::new(&instructions, payer) } /// Compute the blake3 hash of this transaction's message #[cfg(not(target_arch = "bpf"))] pub fn hash(&self) -> Hash { let message_bytes = self.serialize(); Self::hash_raw_message(&message_bytes) } /// Compute the blake3 hash of a raw transaction message #[cfg(not(target_arch = "bpf"))] pub fn hash_raw_message(message_bytes: &[u8]) -> Hash { use blake3::traits::digest::Digest; let mut hasher = blake3::Hasher::new(); hasher.update(b"solana-tx-message-v1"); hasher.update(message_bytes); Hash(<[u8; crate::hash::HASH_BYTES]>::try_from(hasher.finalize().as_slice()).unwrap()) } pub fn compile_instruction(&self, ix: &Instruction) -> CompiledInstruction { compile_instruction(ix, &self.account_keys) } pub fn serialize(&self) -> Vec<u8> { bincode::serialize(self).unwrap() } pub fn program_id(&self, instruction_index: usize) -> Option<&Pubkey> { Some( &self.account_keys[self.instructions.get(instruction_index)?.program_id_index as usize], ) } pub fn program_index(&self, instruction_index: usize) -> Option<usize> { Some(self.instructions.get(instruction_index)?.program_id_index as usize) } pub fn program_ids(&self) -> Vec<&Pubkey> { self.instructions .iter() .map(|ix| &self.account_keys[ix.program_id_index as usize]) .collect() } pub fn is_key_passed_to_program(&self, key_index: usize) -> bool { if let Ok(key_index) = u8::try_from(key_index) { self.instructions .iter() .any(|ix| ix.accounts.contains(&key_index)) } else { false } } pub fn is_key_called_as_program(&self, key_index: usize) -> bool { if let Ok(key_index) = u8::try_from(key_index) { self.instructions .iter() .any(|ix| ix.program_id_index == key_index) } else { false } } pub fn is_non_loader_key(&self, key_index: usize) -> bool { !self.is_key_called_as_program(key_index) || self.is_key_passed_to_program(key_index) } pub fn program_position(&self, index: usize) -> Option<usize> { let program_ids = self.program_ids(); program_ids .iter() .position(|&&pubkey| pubkey == self.account_keys[index]) } pub fn maybe_executable(&self, i: usize) -> bool { self.program_position(i).is_some() } pub fn is_writable(&self, i: usize) -> bool { (i < (self.header.num_required_signatures - self.header.num_readonly_signed_accounts) as usize || (i >= self.header.num_required_signatures as usize && i < self.account_keys.len() - self.header.num_readonly_unsigned_accounts as usize)) && !{ let key = self.account_keys[i]; sysvar::is_sysvar_id(&key) || BUILTIN_PROGRAMS_KEYS.contains(&key) } } pub fn is_signer(&self, i: usize) -> bool { i < self.header.num_required_signatures as usize } pub fn get_account_keys_by_lock_type(&self) -> (Vec<&Pubkey>, Vec<&Pubkey>) { let mut writable_keys = vec![]; let mut readonly_keys = vec![]; for (i, key) in self.account_keys.iter().enumerate() { if self.is_writable(i) { writable_keys.push(key); } else { readonly_keys.push(key); } } (writable_keys, readonly_keys) } // First encode the number of instructions: // [0..2 - num_instructions // // Then a table of offsets of where to find them in the data // 3..2 * num_instructions table of instruction offsets // // Each instruction is then encoded as: // 0..2 - num_accounts // 2 - meta_byte -> (bit 0 signer, bit 1 is_writable) // 3..35 - pubkey - 32 bytes // 35..67 - program_id // 67..69 - data len - u16 // 69..data_len - data pub fn serialize_instructions(&self) -> Vec<u8> { // 64 bytes is a reasonable guess, calculating exactly is slower in benchmarks let mut data = Vec::with_capacity(self.instructions.len() * (32 * 2)); append_u16(&mut data, self.instructions.len() as u16); for _ in 0..self.instructions.len() { append_u16(&mut data, 0); } for (i, instruction) in self.instructions.iter().enumerate() { let start_instruction_offset = data.len() as u16; let start = 2 + (2 * i); data[start..start + 2].copy_from_slice(&start_instruction_offset.to_le_bytes()); append_u16(&mut data, instruction.accounts.len() as u16); for account_index in &instruction.accounts { let account_index = *account_index as usize; let is_signer = self.is_signer(account_index); let is_writable = self.is_writable(account_index); let mut meta_byte = 0; if is_signer { meta_byte |= 1 << Self::IS_SIGNER_BIT; } if is_writable { meta_byte |= 1 << Self::IS_WRITABLE_BIT; } append_u8(&mut data, meta_byte); append_slice(&mut data, self.account_keys[account_index].as_ref()); } let program_id = &self.account_keys[instruction.program_id_index as usize]; append_slice(&mut data, program_id.as_ref()); append_u16(&mut data, instruction.data.len() as u16); append_slice(&mut data, &instruction.data); } data } const IS_SIGNER_BIT: usize = 0; const IS_WRITABLE_BIT: usize = 1; pub fn deserialize_instruction( index: usize, data: &[u8], ) -> Result<Instruction, SanitizeError> { let mut current = 0; let num_instructions = read_u16(&mut current, data)?; if index >= num_instructions as usize { return Err(SanitizeError::IndexOutOfBounds); } // index into the instruction byte-offset table. current += index * 2; let start = read_u16(&mut current, data)?; current = start as usize; let num_accounts = read_u16(&mut current, data)?; let mut accounts = Vec::with_capacity(num_accounts as usize); for _ in 0..num_accounts { let meta_byte = read_u8(&mut current, data)?; let mut is_signer = false; let mut is_writable = false; if meta_byte & (1 << Self::IS_SIGNER_BIT) != 0 { is_signer = true; } if meta_byte & (1 << Self::IS_WRITABLE_BIT) != 0 { is_writable = true; } let pubkey = read_pubkey(&mut current, data)?; accounts.push(AccountMeta { pubkey, is_signer, is_writable, }); } let program_id = read_pubkey(&mut current, data)?; let data_len = read_u16(&mut current, data)?; let data = read_slice(&mut current, data, data_len as usize)?; Ok(Instruction { program_id, accounts, data, }) } pub fn signer_keys(&self) -> Vec<&Pubkey> { // Clamp in case we're working on un-`sanitize()`ed input let last_key = self .account_keys .len() .min(self.header.num_required_signatures as usize); self.account_keys[..last_key].iter().collect() } } #[cfg(test)] mod tests { use super::*; use crate::{hash, instruction::AccountMeta, message::MESSAGE_HEADER_LENGTH}; use std::collections::HashSet; #[test] fn test_message_unique_program_ids() { let program_id0 = Pubkey::default(); let program_ids = get_program_ids(&[ Instruction::new_with_bincode(program_id0, &0, vec![]), Instruction::new_with_bincode(program_id0, &0, vec![]), ]); assert_eq!(program_ids, vec![program_id0]); } #[test] fn test_builtin_program_keys() { let keys: HashSet<Pubkey> = BUILTIN_PROGRAMS_KEYS.iter().copied().collect(); assert_eq!(keys.len(), 10); for k in keys { let k = format!("{}", k); assert!(k.ends_with("11111111111111111111111")); } } #[test] fn test_builtin_program_keys_abi_freeze() { // Once the feature is flipped on, we can't further modify // BUILTIN_PROGRAMS_KEYS without the risk of breaking consensus. let builtins = format!("{:?}", *BUILTIN_PROGRAMS_KEYS); assert_eq!( format!("{}", hash::hash(builtins.as_bytes())), "ACqmMkYbo9eqK6QrRSrB3HLyR6uHhLf31SCfGUAJjiWj" ); } #[test] fn test_message_unique_program_ids_not_adjacent() { let program_id0 = Pubkey::default(); let program_id1 = Pubkey::new_unique(); let program_ids = get_program_ids(&[ Instruction::new_with_bincode(program_id0, &0, vec![]), Instruction::new_with_bincode(program_id1, &0, vec![]), Instruction::new_with_bincode(program_id0, &0, vec![]), ]); assert_eq!(program_ids, vec![program_id0, program_id1]); } #[test] fn test_message_unique_program_ids_order_preserved() { let program_id0 = Pubkey::new_unique(); let program_id1 = Pubkey::default(); // Key less than program_id0 let program_ids = get_program_ids(&[ Instruction::new_with_bincode(program_id0, &0, vec![]), Instruction::new_with_bincode(program_id1, &0, vec![]), Instruction::new_with_bincode(program_id0, &0, vec![]), ]); assert_eq!(program_ids, vec![program_id0, program_id1]); } #[test] fn test_message_unique_keys_both_signed() { let program_id = Pubkey::default(); let id0 = Pubkey::default(); let keys = get_keys( &[ Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, true)]), Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, true)]), ], None, ); assert_eq!(keys, InstructionKeys::new(vec![id0], vec![], 0, 0)); } #[test] fn test_message_unique_keys_signed_and_payer() { let program_id = Pubkey::default(); let id0 = Pubkey::default(); let keys = get_keys( &[Instruction::new_with_bincode( program_id, &0, vec![AccountMeta::new(id0, true)], )], Some(&id0), ); assert_eq!(keys, InstructionKeys::new(vec![id0], vec![], 0, 0)); } #[test] fn test_message_unique_keys_unsigned_and_payer() { let program_id = Pubkey::default(); let id0 = Pubkey::default(); let keys = get_keys( &[Instruction::new_with_bincode( program_id, &0, vec![AccountMeta::new(id0, false)], )], Some(&id0), ); assert_eq!(keys, InstructionKeys::new(vec![id0], vec![], 0, 0)); } #[test] fn test_message_unique_keys_one_signed() { let program_id = Pubkey::default(); let id0 = Pubkey::default(); let keys = get_keys( &[ Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, false)]), Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, true)]), ], None, ); assert_eq!(keys, InstructionKeys::new(vec![id0], vec![], 0, 0)); } #[test] fn test_message_unique_keys_one_readonly_signed() { let program_id = Pubkey::default(); let id0 = Pubkey::default(); let keys = get_keys( &[ Instruction::new_with_bincode( program_id, &0, vec![AccountMeta::new_readonly(id0, true)], ), Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, true)]), ], None, ); // Ensure the key is no longer readonly assert_eq!(keys, InstructionKeys::new(vec![id0], vec![], 0, 0)); } #[test] fn test_message_unique_keys_one_readonly_unsigned() { let program_id = Pubkey::default(); let id0 = Pubkey::default(); let keys = get_keys( &[ Instruction::new_with_bincode( program_id, &0, vec![AccountMeta::new_readonly(id0, false)], ), Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, false)]), ], None, ); // Ensure the key is no longer readonly assert_eq!(keys, InstructionKeys::new(vec![], vec![id0], 0, 0)); } #[test] fn test_message_unique_keys_order_preserved() { let program_id = Pubkey::default(); let id0 = Pubkey::new_unique(); let id1 = Pubkey::default(); // Key less than id0 let keys = get_keys( &[ Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, false)]), Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id1, false)]), ], None, ); assert_eq!(keys, InstructionKeys::new(vec![], vec![id0, id1], 0, 0)); } #[test] fn test_message_unique_keys_not_adjacent() { let program_id = Pubkey::default(); let id0 = Pubkey::default(); let id1 = Pubkey::new_unique(); let keys = get_keys( &[ Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, false)]), Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id1, false)]), Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, true)]), ], None, ); assert_eq!(keys, InstructionKeys::new(vec![id0], vec![id1], 0, 0)); } #[test] fn test_message_signed_keys_first() { let program_id = Pubkey::default(); let id0 = Pubkey::default(); let id1 = Pubkey::new_unique(); let keys = get_keys( &[ Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, false)]), Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id1, true)]), ], None, ); assert_eq!(keys, InstructionKeys::new(vec![id1], vec![id0], 0, 0)); } #[test] // Ensure there's a way to calculate the number of required signatures. fn test_message_signed_keys_len() { let program_id = Pubkey::default(); let id0 = Pubkey::default(); let ix = Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, false)]); let message = Message::new(&[ix], None); assert_eq!(message.header.num_required_signatures, 0); let ix = Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, true)]); let message = Message::new(&[ix], Some(&id0)); assert_eq!(message.header.num_required_signatures, 1); } #[test] fn test_message_readonly_keys_last() { let program_id = Pubkey::default(); let id0 = Pubkey::default(); // Identical key/program_id should be de-duped let id1 = Pubkey::new_unique(); let id2 = Pubkey::new_unique(); let id3 = Pubkey::new_unique(); let keys = get_keys( &[ Instruction::new_with_bincode( program_id, &0, vec![AccountMeta::new_readonly(id0, false)], ), Instruction::new_with_bincode( program_id, &0, vec![AccountMeta::new_readonly(id1, true)], ), Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id2, false)]), Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id3, true)]), ], None, ); assert_eq!( keys, InstructionKeys::new(vec![id3, id1], vec![id2, id0], 1, 1) ); } #[test] fn test_message_kitchen_sink() { let program_id0 = Pubkey::new_unique(); let program_id1 = Pubkey::new_unique(); let id0 = Pubkey::default(); let id1 = Pubkey::new_unique(); let message = Message::new( &[ Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id0, false)]), Instruction::new_with_bincode(program_id1, &0, vec![AccountMeta::new(id1, true)]), Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id1, false)]), ], Some(&id1), ); assert_eq!( message.instructions[0], CompiledInstruction::new(2, &0, vec![1]) ); assert_eq!( message.instructions[1], CompiledInstruction::new(3, &0, vec![0]) ); assert_eq!( message.instructions[2], CompiledInstruction::new(2, &0, vec![0]) ); } #[test] fn test_message_payer_first() { let program_id = Pubkey::default(); let payer = Pubkey::new_unique(); let id0 = Pubkey::default(); let ix = Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, false)]); let message = Message::new(&[ix], Some(&payer)); assert_eq!(message.header.num_required_signatures, 1); let ix = Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, true)]); let message = Message::new(&[ix], Some(&payer)); assert_eq!(message.header.num_required_signatures, 2); let ix = Instruction::new_with_bincode( program_id, &0, vec![AccountMeta::new(payer, true), AccountMeta::new(id0, true)], ); let message = Message::new(&[ix], Some(&payer)); assert_eq!(message.header.num_required_signatures, 2); } #[test] fn test_message_program_last() { let program_id = Pubkey::default(); let id0 = Pubkey::new_unique(); let id1 = Pubkey::new_unique(); let keys = get_keys( &[ Instruction::new_with_bincode( program_id, &0, vec![AccountMeta::new_readonly(id0, false)], ), Instruction::new_with_bincode( program_id, &0, vec![AccountMeta::new_readonly(id1, true)], ), ], None, ); assert_eq!( keys, InstructionKeys::new(vec![id1], vec![id0, program_id], 1, 2) ); } #[test] fn test_program_position() { let program_id0 = Pubkey::default(); let program_id1 = Pubkey::new_unique(); let id = Pubkey::new_unique(); let message = Message::new( &[ Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id, false)]), Instruction::new_with_bincode(program_id1, &0, vec![AccountMeta::new(id, true)]), ], Some(&id), ); assert_eq!(message.program_position(0), None); assert_eq!(message.program_position(1), Some(0)); assert_eq!(message.program_position(2), Some(1)); } #[test] fn test_is_writable() { let key0 = Pubkey::new_unique(); let key1 = Pubkey::new_unique(); let key2 = Pubkey::new_unique(); let key3 = Pubkey::new_unique(); let key4 = Pubkey::new_unique(); let key5 = Pubkey::new_unique(); let message = Message { header: MessageHeader { num_required_signatures: 3, num_readonly_signed_accounts: 2, num_readonly_unsigned_accounts: 1, }, account_keys: vec![key0, key1, key2, key3, key4, key5], recent_blockhash: Hash::default(), instructions: vec![], }; assert!(message.is_writable(0)); assert!(!message.is_writable(1)); assert!(!message.is_writable(2)); assert!(message.is_writable(3)); assert!(message.is_writable(4)); assert!(!message.is_writable(5)); } #[test] fn test_get_account_keys_by_lock_type() { let program_id = Pubkey::default(); let id0 = Pubkey::new_unique(); let id1 = Pubkey::new_unique(); let id2 = Pubkey::new_unique(); let id3 = Pubkey::new_unique(); let message = Message::new( &[ Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, false)]), Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id1, true)]), Instruction::new_with_bincode( program_id, &0, vec![AccountMeta::new_readonly(id2, false)], ), Instruction::new_with_bincode( program_id, &0, vec![AccountMeta::new_readonly(id3, true)], ), ], Some(&id1), ); assert_eq!( message.get_account_keys_by_lock_type(), (vec![&id1, &id0], vec![&id3, &id2, &program_id]) ); } #[test] fn test_decompile_instructions() { solana_logger::setup(); let program_id0 = Pubkey::new_unique(); let program_id1 = Pubkey::new_unique(); let id0 = Pubkey::new_unique(); let id1 = Pubkey::new_unique(); let id2 = Pubkey::new_unique(); let id3 = Pubkey::new_unique(); let instructions = vec![ Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id0, false)]), Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id1, true)]), Instruction::new_with_bincode( program_id1, &0, vec![AccountMeta::new_readonly(id2, false)], ), Instruction::new_with_bincode( program_id1, &0, vec![AccountMeta::new_readonly(id3, true)], ), ]; let message = Message::new(&instructions, Some(&id1)); let serialized = message.serialize_instructions(); for (i, instruction) in instructions.iter().enumerate() { assert_eq!( Message::deserialize_instruction(i, &serialized).unwrap(), *instruction ); } } #[test] fn test_decompile_instructions_out_of_bounds() { solana_logger::setup(); let program_id0 = Pubkey::new_unique(); let id0 = Pubkey::new_unique(); let id1 = Pubkey::new_unique(); let instructions = vec![ Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id0, false)]), Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id1, true)]), ]; let message = Message::new(&instructions, Some(&id1)); let serialized = message.serialize_instructions(); assert_eq!( Message::deserialize_instruction(instructions.len(), &serialized).unwrap_err(), SanitizeError::IndexOutOfBounds, ); } #[test] fn test_program_ids() { let key0 = Pubkey::new_unique(); let key1 = Pubkey::new_unique(); let loader2 = Pubkey::new_unique(); let instructions = vec![CompiledInstruction::new(2, &(), vec![0, 1])]; let message = Message::new_with_compiled_instructions( 1, 0, 2, vec![key0, key1, loader2], Hash::default(), instructions, ); assert_eq!(message.program_ids(), vec![&loader2]); } #[test] fn test_is_key_passed_to_program() { let key0 = Pubkey::new_unique(); let key1 = Pubkey::new_unique(); let loader2 = Pubkey::new_unique(); let instructions = vec![CompiledInstruction::new(2, &(), vec![0, 1])]; let message = Message::new_with_compiled_instructions( 1, 0, 2, vec![key0, key1, loader2], Hash::default(), instructions, ); assert!(message.is_key_passed_to_program(0)); assert!(message.is_key_passed_to_program(1)); assert!(!message.is_key_passed_to_program(2)); } #[test] fn test_is_non_loader_key() { let key0 = Pubkey::new_unique(); let key1 = Pubkey::new_unique(); let loader2 = Pubkey::new_unique(); let instructions = vec![CompiledInstruction::new(2, &(), vec![0, 1])]; let message = Message::new_with_compiled_instructions( 1, 0, 2, vec![key0, key1, loader2], Hash::default(), instructions, ); assert!(message.is_non_loader_key(0)); assert!(message.is_non_loader_key(1)); assert!(!message.is_non_loader_key(2)); } #[test] fn test_message_header_len_constant() { assert_eq!( bincode::serialized_size(&MessageHeader::default()).unwrap() as usize, MESSAGE_HEADER_LENGTH ); } #[test] fn test_message_hash() { // when this test fails, it's most likely due to a new serialized format of a message. // in this case, the domain prefix `solana-tx-message-v1` should be updated. let program_id0 = Pubkey::from_str("4uQeVj5tqViQh7yWWGStvkEG1Zmhx6uasJtWCJziofM").unwrap(); let program_id1 = Pubkey::from_str("8opHzTAnfzRpPEx21XtnrVTX28YQuCpAjcn1PczScKh").unwrap(); let id0 = Pubkey::from_str("CiDwVBFgWV9E5MvXWoLgnEgn2hK7rJikbvfWavzAQz3").unwrap(); let id1 = Pubkey::from_str("GcdayuLaLyrdmUu324nahyv33G5poQdLUEZ1nEytDeP").unwrap(); let id2 = Pubkey::from_str("LX3EUdRUBUa3TbsYXLEUdj9J3prXkWXvLYSWyYyc2Jj").unwrap(); let id3 = Pubkey::from_str("QRSsyMWN1yHT9ir42bgNZUNZ4PdEhcSWCrL2AryKpy5").unwrap(); let instructions = vec![ Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id0, false)]), Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id1, true)]), Instruction::new_with_bincode( program_id1, &0, vec![AccountMeta::new_readonly(id2, false)], ), Instruction::new_with_bincode( program_id1, &0, vec![AccountMeta::new_readonly(id3, true)], ), ]; let message = Message::new(&instructions, Some(&id1)); assert_eq!( message.hash(), Hash::from_str("CXRH7GHLieaQZRUjH1mpnNnUZQtU4V4RpJpAFgy77i3z").unwrap() ) } }
35.739172
100
0.575014
d6f858697e7ada2cc444c8a1f3c4a8c492e08112
21,263
//! Tock kernel for the Nordic Semiconductor nRF52840 development kit (DK). //! //! It is based on nRF52840 SoC (Cortex M4 core with a BLE transceiver) with //! many exported I/O and peripherals. //! //! Pin Configuration //! ------------------- //! //! ### `GPIO` //! //! | # | Pin | Ix | Header | Arduino | //! |----|-------|----|--------|---------| //! | 0 | P1.01 | 33 | P3 1 | D0 | //! | 1 | P1.02 | 34 | P3 2 | D1 | //! | 2 | P1.03 | 35 | P3 3 | D2 | //! | 3 | P1.04 | 36 | P3 4 | D3 | //! | 4 | P1.05 | 37 | P3 5 | D4 | //! | 5 | P1.06 | 38 | P3 6 | D5 | //! | 6 | P1.07 | 39 | P3 7 | D6 | //! | 7 | P1.08 | 40 | P3 8 | D7 | //! | 8 | P1.10 | 42 | P4 1 | D8 | //! | 9 | P1.11 | 43 | P4 2 | D9 | //! | 10 | P1.12 | 44 | P4 3 | D10 | //! | 11 | P1.13 | 45 | P4 4 | D11 | //! | 12 | P1.14 | 46 | P4 5 | D12 | //! | 13 | P1.15 | 47 | P4 6 | D13 | //! | 14 | P0.26 | 26 | P4 9 | D14 | //! | 15 | P0.27 | 27 | P4 10 | D15 | //! //! ### `GPIO` / Analog Inputs //! //! | # | Pin | Header | Arduino | //! |----|------------|--------|---------| //! | 16 | P0.03 AIN1 | P2 1 | A0 | //! | 17 | P0.04 AIN2 | P2 2 | A1 | //! | 18 | P0.28 AIN4 | P2 3 | A2 | //! | 19 | P0.29 AIN5 | P2 4 | A3 | //! | 20 | P0.30 AIN6 | P2 5 | A4 | //! | 21 | P0.31 AIN7 | P2 6 | A5 | //! | 22 | P0.02 AIN0 | P4 8 | AVDD | //! //! ### Onboard Functions //! //! | Pin | Header | Function | //! |-------|--------|----------| //! | P0.05 | P6 3 | UART RTS | //! | P0.06 | P6 4 | UART TXD | //! | P0.07 | P6 5 | UART CTS | //! | P0.08 | P6 6 | UART RXT | //! | P0.11 | P24 1 | Button 1 | //! | P0.12 | P24 2 | Button 2 | //! | P0.13 | P24 3 | LED 1 | //! | P0.14 | P24 4 | LED 2 | //! | P0.15 | P24 5 | LED 3 | //! | P0.16 | P24 6 | LED 4 | //! | P0.18 | P24 8 | Reset | //! | P0.19 | P24 9 | SPI CLK | //! | P0.20 | P24 10 | SPI MOSI | //! | P0.21 | P24 11 | SPI MISO | //! | P0.24 | P24 14 | Button 3 | //! | P0.25 | P24 15 | Button 4 | #![no_std] // Disable this attribute when documenting, as a workaround for // https://github.com/rust-lang/rust/issues/62184. #![cfg_attr(not(doc), no_main)] #![deny(missing_docs)] #![feature(const_in_array_repeat_expressions)] use capsules::net::ieee802154::MacAddress; use capsules::net::ipv6::ip_utils::IPAddr; use capsules::virtual_alarm::VirtualMuxAlarm; use kernel::common::dynamic_deferred_call::{DynamicDeferredCall, DynamicDeferredCallClientState}; use kernel::component::Component; use kernel::hil::time::Counter; #[allow(unused_imports)] use kernel::hil::usb::Client; #[allow(unused_imports)] use kernel::{capabilities, create_capability, debug, debug_gpio, debug_verbose, static_init}; use nrf52840::gpio::Pin; use nrf52_components::{self, UartChannel, UartPins}; // The nRF52840DK LEDs (see back of board) const LED1_PIN: Pin = Pin::P0_13; const LED2_PIN: Pin = Pin::P0_14; const LED3_PIN: Pin = Pin::P0_15; const LED4_PIN: Pin = Pin::P0_16; // The nRF52840DK buttons (see back of board) const BUTTON1_PIN: Pin = Pin::P0_11; const BUTTON2_PIN: Pin = Pin::P0_12; const BUTTON3_PIN: Pin = Pin::P0_24; const BUTTON4_PIN: Pin = Pin::P0_25; const BUTTON_RST_PIN: Pin = Pin::P0_18; const UART_RTS: Option<Pin> = Some(Pin::P0_05); const UART_TXD: Pin = Pin::P0_06; const UART_CTS: Option<Pin> = Some(Pin::P0_07); const UART_RXD: Pin = Pin::P0_08; const SPI_MOSI: Pin = Pin::P0_20; const SPI_MISO: Pin = Pin::P0_21; const SPI_CLK: Pin = Pin::P0_19; const SPI_MX25R6435F_CHIP_SELECT: Pin = Pin::P0_17; const SPI_MX25R6435F_WRITE_PROTECT_PIN: Pin = Pin::P0_22; const SPI_MX25R6435F_HOLD_PIN: Pin = Pin::P0_23; // Constants related to the configuration of the 15.4 network stack const PAN_ID: u16 = 0xABCD; const DST_MAC_ADDR: capsules::net::ieee802154::MacAddress = capsules::net::ieee802154::MacAddress::Short(49138); const DEFAULT_CTX_PREFIX_LEN: u8 = 8; //Length of context for 6LoWPAN compression const DEFAULT_CTX_PREFIX: [u8; 16] = [0x0 as u8; 16]; //Context for 6LoWPAN Compression /// Debug Writer pub mod io; // Whether to use UART debugging or Segger RTT (USB) debugging. // - Set to false to use UART. // - Set to true to use Segger RTT over USB. const USB_DEBUGGING: bool = false; // State for loading and holding applications. // How should the kernel respond when a process faults. const FAULT_RESPONSE: kernel::procs::FaultResponse = kernel::procs::FaultResponse::Panic; // Number of concurrent processes this platform supports. const NUM_PROCS: usize = 8; static mut PROCESSES: [Option<&'static dyn kernel::procs::ProcessType>; NUM_PROCS] = [None; NUM_PROCS]; static mut CHIP: Option<&'static nrf52840::chip::Chip> = None; /// Dummy buffer that causes the linker to reserve enough space for the stack. #[no_mangle] #[link_section = ".stack_buffer"] pub static mut STACK_MEMORY: [u8; 0x1000] = [0; 0x1000]; /// Supported drivers by the platform pub struct Platform { ble_radio: &'static capsules::ble_advertising_driver::BLE< 'static, nrf52840::ble_radio::Radio<'static>, VirtualMuxAlarm<'static, nrf52840::rtc::Rtc<'static>>, >, ieee802154_radio: &'static capsules::ieee802154::RadioDriver<'static>, button: &'static capsules::button::Button<'static, nrf52840::gpio::GPIOPin<'static>>, pconsole: &'static capsules::process_console::ProcessConsole< 'static, components::process_console::Capability, >, console: &'static capsules::console::Console<'static>, gpio: &'static capsules::gpio::GPIO<'static, nrf52840::gpio::GPIOPin<'static>>, led: &'static capsules::led::LED<'static, nrf52840::gpio::GPIOPin<'static>>, rng: &'static capsules::rng::RngDriver<'static>, temp: &'static capsules::temperature::TemperatureSensor<'static>, ipc: kernel::ipc::IPC, analog_comparator: &'static capsules::analog_comparator::AnalogComparator< 'static, nrf52840::acomp::Comparator<'static>, >, alarm: &'static capsules::alarm::AlarmDriver< 'static, capsules::virtual_alarm::VirtualMuxAlarm<'static, nrf52840::rtc::Rtc<'static>>, >, nonvolatile_storage: &'static capsules::nonvolatile_storage_driver::NonvolatileStorage<'static>, udp_driver: &'static capsules::net::udp::UDPDriver<'static>, } impl kernel::Platform for Platform { fn with_driver<F, R>(&self, driver_num: usize, f: F) -> R where F: FnOnce(Option<&dyn kernel::Driver>) -> R, { match driver_num { capsules::console::DRIVER_NUM => f(Some(self.console)), capsules::gpio::DRIVER_NUM => f(Some(self.gpio)), capsules::alarm::DRIVER_NUM => f(Some(self.alarm)), capsules::led::DRIVER_NUM => f(Some(self.led)), capsules::button::DRIVER_NUM => f(Some(self.button)), capsules::rng::DRIVER_NUM => f(Some(self.rng)), capsules::ble_advertising_driver::DRIVER_NUM => f(Some(self.ble_radio)), capsules::ieee802154::DRIVER_NUM => f(Some(self.ieee802154_radio)), capsules::temperature::DRIVER_NUM => f(Some(self.temp)), capsules::analog_comparator::DRIVER_NUM => f(Some(self.analog_comparator)), capsules::nonvolatile_storage_driver::DRIVER_NUM => f(Some(self.nonvolatile_storage)), capsules::net::udp::DRIVER_NUM => f(Some(self.udp_driver)), kernel::ipc::DRIVER_NUM => f(Some(&self.ipc)), _ => f(None), } } } /// Entry point in the vector table called on hard reset. #[no_mangle] pub unsafe fn reset_handler() { // Loads relocations and clears BSS nrf52840::init(); let uart_channel = if USB_DEBUGGING { // Initialize early so any panic beyond this point can use the RTT memory object. let mut rtt_memory_refs = components::segger_rtt::SeggerRttMemoryComponent::new().finalize(()); // XXX: This is inherently unsafe as it aliases the mutable reference to rtt_memory. This // aliases reference is only used inside a panic handler, which should be OK, but maybe we // should use a const reference to rtt_memory and leverage interior mutability instead. self::io::set_rtt_memory(&mut *rtt_memory_refs.get_rtt_memory_ptr()); UartChannel::Rtt(rtt_memory_refs) } else { UartChannel::Pins(UartPins::new(UART_RTS, UART_TXD, UART_CTS, UART_RXD)) }; let board_kernel = static_init!(kernel::Kernel, kernel::Kernel::new(&PROCESSES)); let gpio = components::gpio::GpioComponent::new( board_kernel, components::gpio_component_helper!( nrf52840::gpio::GPIOPin, 0 => &nrf52840::gpio::PORT[Pin::P1_01], 1 => &nrf52840::gpio::PORT[Pin::P1_02], 2 => &nrf52840::gpio::PORT[Pin::P1_03], 3 => &nrf52840::gpio::PORT[Pin::P1_04], 4 => &nrf52840::gpio::PORT[Pin::P1_05], 5 => &nrf52840::gpio::PORT[Pin::P1_06], 6 => &nrf52840::gpio::PORT[Pin::P1_07], 7 => &nrf52840::gpio::PORT[Pin::P1_08], 8 => &nrf52840::gpio::PORT[Pin::P1_10], 9 => &nrf52840::gpio::PORT[Pin::P1_11], 10 => &nrf52840::gpio::PORT[Pin::P1_12], 11 => &nrf52840::gpio::PORT[Pin::P1_13], 12 => &nrf52840::gpio::PORT[Pin::P1_14], 13 => &nrf52840::gpio::PORT[Pin::P1_15], 14 => &nrf52840::gpio::PORT[Pin::P0_26], 15 => &nrf52840::gpio::PORT[Pin::P0_27] ), ) .finalize(components::gpio_component_buf!(nrf52840::gpio::GPIOPin)); let button = components::button::ButtonComponent::new( board_kernel, components::button_component_helper!( nrf52840::gpio::GPIOPin, ( &nrf52840::gpio::PORT[BUTTON1_PIN], kernel::hil::gpio::ActivationMode::ActiveLow, kernel::hil::gpio::FloatingState::PullUp ), //13 ( &nrf52840::gpio::PORT[BUTTON2_PIN], kernel::hil::gpio::ActivationMode::ActiveLow, kernel::hil::gpio::FloatingState::PullUp ), //14 ( &nrf52840::gpio::PORT[BUTTON3_PIN], kernel::hil::gpio::ActivationMode::ActiveLow, kernel::hil::gpio::FloatingState::PullUp ), //15 ( &nrf52840::gpio::PORT[BUTTON4_PIN], kernel::hil::gpio::ActivationMode::ActiveLow, kernel::hil::gpio::FloatingState::PullUp ) //16 ), ) .finalize(components::button_component_buf!(nrf52840::gpio::GPIOPin)); let led = components::led::LedsComponent::new(components::led_component_helper!( nrf52840::gpio::GPIOPin, ( &nrf52840::gpio::PORT[LED1_PIN], kernel::hil::gpio::ActivationMode::ActiveLow ), ( &nrf52840::gpio::PORT[LED2_PIN], kernel::hil::gpio::ActivationMode::ActiveLow ), ( &nrf52840::gpio::PORT[LED3_PIN], kernel::hil::gpio::ActivationMode::ActiveLow ), ( &nrf52840::gpio::PORT[LED4_PIN], kernel::hil::gpio::ActivationMode::ActiveLow ) )) .finalize(components::led_component_buf!(nrf52840::gpio::GPIOPin)); let chip = static_init!(nrf52840::chip::Chip, nrf52840::chip::new()); CHIP = Some(chip); nrf52_components::startup::NrfStartupComponent::new( false, BUTTON_RST_PIN, nrf52840::uicr::Regulator0Output::DEFAULT, ) .finalize(()); // Create capabilities that the board needs to call certain protected kernel // functions. let process_management_capability = create_capability!(capabilities::ProcessManagementCapability); let main_loop_capability = create_capability!(capabilities::MainLoopCapability); let memory_allocation_capability = create_capability!(capabilities::MemoryAllocationCapability); let gpio_port = &nrf52840::gpio::PORT; // Configure kernel debug gpios as early as possible kernel::debug::assign_gpios( Some(&gpio_port[LED1_PIN]), Some(&gpio_port[LED2_PIN]), Some(&gpio_port[LED3_PIN]), ); let rtc = &nrf52840::rtc::RTC; rtc.start(); let mux_alarm = components::alarm::AlarmMuxComponent::new(rtc) .finalize(components::alarm_mux_component_helper!(nrf52840::rtc::Rtc)); let alarm = components::alarm::AlarmDriverComponent::new(board_kernel, mux_alarm) .finalize(components::alarm_component_helper!(nrf52840::rtc::Rtc)); let channel = nrf52_components::UartChannelComponent::new(uart_channel, mux_alarm).finalize(()); let dynamic_deferred_call_clients = static_init!([DynamicDeferredCallClientState; 2], Default::default()); let dynamic_deferred_caller = static_init!( DynamicDeferredCall, DynamicDeferredCall::new(dynamic_deferred_call_clients) ); DynamicDeferredCall::set_global_instance(dynamic_deferred_caller); // Create a shared UART channel for the console and for kernel debug. let uart_mux = components::console::UartMuxComponent::new(channel, 115200, dynamic_deferred_caller) .finalize(()); let pconsole = components::process_console::ProcessConsoleComponent::new(board_kernel, uart_mux) .finalize(()); // Setup the console. let console = components::console::ConsoleComponent::new(board_kernel, uart_mux).finalize(()); // Create the debugger object that handles calls to `debug!()`. components::debug_writer::DebugWriterComponent::new(uart_mux).finalize(()); let ble_radio = nrf52_components::BLEComponent::new(board_kernel, &nrf52840::ble_radio::RADIO, mux_alarm) .finalize(()); let serial_num = nrf52840::ficr::FICR_INSTANCE.address(); let serial_num_bottom_16 = serial_num[0] as u16 + ((serial_num[1] as u16) << 8); let src_mac_from_serial_num: MacAddress = MacAddress::Short(serial_num_bottom_16); let (ieee802154_radio, mux_mac) = components::ieee802154::Ieee802154Component::new( board_kernel, &nrf52840::ieee802154_radio::RADIO, &nrf52840::aes::AESECB, PAN_ID, serial_num_bottom_16, ) .finalize(components::ieee802154_component_helper!( nrf52840::ieee802154_radio::Radio, nrf52840::aes::AesECB<'static> )); let local_ip_ifaces = static_init!( [IPAddr; 3], [ IPAddr([ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, ]), IPAddr([ 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, ]), IPAddr::generate_from_mac(capsules::net::ieee802154::MacAddress::Short( serial_num_bottom_16 )), ] ); let (udp_send_mux, udp_recv_mux, udp_port_table) = components::udp_mux::UDPMuxComponent::new( mux_mac, DEFAULT_CTX_PREFIX_LEN, DEFAULT_CTX_PREFIX, DST_MAC_ADDR, src_mac_from_serial_num, local_ip_ifaces, mux_alarm, ) .finalize(components::udp_mux_component_helper!(nrf52840::rtc::Rtc)); // UDP driver initialization happens here let udp_driver = components::udp_driver::UDPDriverComponent::new( board_kernel, udp_send_mux, udp_recv_mux, udp_port_table, local_ip_ifaces, ) .finalize(components::udp_driver_component_helper!(nrf52840::rtc::Rtc)); let temp = components::temperature::TemperatureComponent::new( board_kernel, &nrf52840::temperature::TEMP, ) .finalize(()); let rng = components::rng::RngComponent::new(board_kernel, &nrf52840::trng::TRNG).finalize(()); // SPI let mux_spi = components::spi::SpiMuxComponent::new(&nrf52840::spi::SPIM0) .finalize(components::spi_mux_component_helper!(nrf52840::spi::SPIM)); nrf52840::spi::SPIM0.configure( nrf52840::pinmux::Pinmux::new(SPI_MOSI as u32), nrf52840::pinmux::Pinmux::new(SPI_MISO as u32), nrf52840::pinmux::Pinmux::new(SPI_CLK as u32), ); let mx25r6435f = components::mx25r6435f::Mx25r6435fComponent::new( &gpio_port[SPI_MX25R6435F_WRITE_PROTECT_PIN], &gpio_port[SPI_MX25R6435F_HOLD_PIN], &gpio_port[SPI_MX25R6435F_CHIP_SELECT] as &dyn kernel::hil::gpio::Pin, mux_alarm, mux_spi, ) .finalize(components::mx25r6435f_component_helper!( nrf52840::spi::SPIM, nrf52840::gpio::GPIOPin, nrf52840::rtc::Rtc )); let nonvolatile_storage = components::nonvolatile_storage::NonvolatileStorageComponent::new( board_kernel, mx25r6435f, 0x60000, // Start address for userspace accessible region 0x20000, // Length of userspace accessible region 0, // Start address of kernel region 0x60000, // Length of kernel region ) .finalize(components::nv_storage_component_helper!( capsules::mx25r6435f::MX25R6435F< 'static, capsules::virtual_spi::VirtualSpiMasterDevice<'static, nrf52840::spi::SPIM>, nrf52840::gpio::GPIOPin, VirtualMuxAlarm<'static, nrf52840::rtc::Rtc>, > )); // Initialize AC using AIN5 (P0.29) as VIN+ and VIN- as AIN0 (P0.02) // These are hardcoded pin assignments specified in the driver let analog_comparator = components::analog_comparator::AcComponent::new( &nrf52840::acomp::ACOMP, components::acomp_component_helper!( nrf52840::acomp::Channel, &nrf52840::acomp::CHANNEL_AC0 ), ) .finalize(components::acomp_component_buf!( nrf52840::acomp::Comparator )); nrf52_components::NrfClockComponent::new().finalize(()); // let alarm_test_component = // components::test::multi_alarm_test::MultiAlarmTestComponent::new(&mux_alarm).finalize( // components::multi_alarm_test_component_buf!(nrf52840::rtc::Rtc), // ); //-------------------------------------------------------------------------- // USB CTAP EXAMPLE //-------------------------------------------------------------------------- // Uncomment to experiment with this. // // Create the strings we include in the USB descriptor. // let strings = static_init!( // [&str; 3], // [ // "Nordic Semiconductor", // Manufacturer // "nRF52840dk - TockOS", // Product // "serial0001", // Serial number // ] // ); // let ctap_send_buffer = static_init!([u8; 64], [0; 64]); // let ctap_recv_buffer = static_init!([u8; 64], [0; 64]); // let (ctap, _ctap_driver) = components::ctap::CtapComponent::new( // &nrf52840::usbd::USBD, // 0x1915, // Nordic Semiconductor // 0x503a, // lowRISC generic FS USB // strings, // board_kernel, // ctap_send_buffer, // ctap_recv_buffer, // ) // .finalize(components::usb_ctap_component_helper!(nrf52840::usbd::Usbd)); // ctap.enable(); // ctap.attach(); let platform = Platform { button, ble_radio, ieee802154_radio, pconsole, console, led, gpio, rng, temp, alarm, analog_comparator, nonvolatile_storage, udp_driver, ipc: kernel::ipc::IPC::new(board_kernel, &memory_allocation_capability), }; platform.pconsole.start(); debug!("Initialization complete. Entering main loop\r"); debug!("{}", &nrf52840::ficr::FICR_INSTANCE); // alarm_test_component.run(); /// These symbols are defined in the linker script. extern "C" { /// Beginning of the ROM region containing app images. static _sapps: u8; /// End of the ROM region containing app images. static _eapps: u8; /// Beginning of the RAM region for app memory. static mut _sappmem: u8; /// End of the RAM region for app memory. static _eappmem: u8; } kernel::procs::load_processes( board_kernel, chip, core::slice::from_raw_parts( &_sapps as *const u8, &_eapps as *const u8 as usize - &_sapps as *const u8 as usize, ), core::slice::from_raw_parts_mut( &mut _sappmem as *mut u8, &_eappmem as *const u8 as usize - &_sappmem as *const u8 as usize, ), &mut PROCESSES, FAULT_RESPONSE, &process_management_capability, ) .unwrap_or_else(|err| { debug!("Error loading processes!"); debug!("{:?}", err); }); let scheduler = components::sched::round_robin::RoundRobinComponent::new(&PROCESSES) .finalize(components::rr_component_helper!(NUM_PROCS)); board_kernel.kernel_loop( &platform, chip, Some(&platform.ipc), scheduler, &main_loop_capability, ); }
37.369069
100
0.606217
bbb346eb3edffd4b18be6893ec1e587015f21d0b
2,107
#[cfg(test)] use cpu::instruction::{AddressMode, Operation}; #[cfg(test)] use data_flow::ReadWrite; #[cfg(test)] use cpu::Cpu; #[cfg(test)] const DEF_ADDR_MODE: &AddressMode = &AddressMode::XXX; #[cfg(test)] const ROOT: u16 = 0x0000; #[cfg(test)] mod inc { use super::*; const OP: &Operation = &Operation::INC; #[test] fn increments_memory() { let mut cpu = Cpu::new(); cpu.write_addr(ROOT, 0x00); cpu.perform_operation(ROOT, OP, DEF_ADDR_MODE); assert_eq!(cpu.read_addr(ROOT), 0x01); } #[test] fn flag_n() { let mut cpu = Cpu::new(); cpu.write_addr(ROOT, 0b0111_1111); cpu.perform_operation(ROOT, OP, DEF_ADDR_MODE); assert_eq!(cpu.status, 0b1000_0000); } #[test] fn flag_z() { let mut cpu = Cpu::new(); cpu.write_addr(ROOT, 0xFF); cpu.perform_operation(ROOT, OP, DEF_ADDR_MODE); assert_eq!(cpu.status, 0b0000_0010); } } #[cfg(test)] mod inx { use super::*; const OP: &Operation = &Operation::INX; #[test] fn increments_register() { let mut cpu = Cpu::new(); cpu.x = 0x00; cpu.perform_operation(ROOT, OP, DEF_ADDR_MODE); assert_eq!(cpu.x, 0x01); } #[test] fn flag_n() { let mut cpu = Cpu::new(); cpu.x = 0b0111_1111; cpu.perform_operation(ROOT, OP, DEF_ADDR_MODE); assert_eq!(cpu.status, 0b1000_0000); } #[test] fn flag_z() { let mut cpu = Cpu::new(); cpu.x = 0xFF; cpu.perform_operation(ROOT, OP, DEF_ADDR_MODE); assert_eq!(cpu.status, 0b0000_0010); } } #[cfg(test)] mod iny { use super::*; const OP: &Operation = &Operation::INY; #[test] fn increments_register() { let mut cpu = Cpu::new(); cpu.y = 0x00; cpu.perform_operation(ROOT, OP, DEF_ADDR_MODE); assert_eq!(cpu.y, 0x01); } #[test] fn flag_n() { let mut cpu = Cpu::new(); cpu.y = 0b0111_1111; cpu.perform_operation(ROOT, OP, DEF_ADDR_MODE); assert_eq!(cpu.status, 0b1000_0000); } #[test] fn flag_z() { let mut cpu = Cpu::new(); cpu.y = 0xFF; cpu.perform_operation(ROOT, OP, DEF_ADDR_MODE); assert_eq!(cpu.status, 0b0000_0010); } }
21.947917
54
0.621737
11b7f71ccfc071658df4174d7be13f2be454f4ea
5,269
mod controller; mod store; pub use store::*; pub use context::*; mod context { use std::sync::Arc; use std::fmt::Display; use tracing::debug; use event_listener::Event; use event_listener::EventListener; use async_rwlock::RwLockReadGuard; use crate::FluvioError; use crate::metadata::core::Spec; use crate::metadata::store::LocalStore; use crate::metadata::store::DualEpochMap; use crate::metadata::store::MetadataStoreObject; use crate::metadata::spu::SpuSpec; use crate::metadata::core::MetadataItem; pub(crate) type CacheMetadataStoreObject<S> = MetadataStoreObject<S, AlwaysNewContext>; /// context that always updates #[derive(Debug, Default, Clone, PartialEq)] pub struct AlwaysNewContext {} impl MetadataItem for AlwaysNewContext { type UId = u64; fn uid(&self) -> &Self::UId { &0 } fn is_newer(&self, _another: &Self) -> bool { true } } #[derive(Debug, Clone)] pub struct StoreContext<S> where S: Spec, { store: Arc<LocalStore<S, AlwaysNewContext>>, spec_event: Arc<Event>, status_event: Arc<Event>, } impl<S> StoreContext<S> where S: Spec, { pub fn new() -> Self { Self { store: LocalStore::new_shared(), spec_event: Arc::new(Event::new()), status_event: Arc::new(Event::new()), } } pub fn store(&self) -> &Arc<LocalStore<S, AlwaysNewContext>> { &self.store } pub fn listen(&self) -> EventListener { self.spec_event.listen() } #[allow(unused)] pub fn status_listen(&self) -> EventListener { self.status_event.listen() } /// notify changes to specs pub fn notify_spec_changes(&self) { self.spec_event.notify(usize::MAX); } /// notify changes to status pub fn notify_status_changes(&self) { self.status_event.notify(usize::MAX); } /// look up object by index key #[allow(unused)] pub async fn try_lookup_by_key( &self, key: &S::IndexKey, ) -> Option<CacheMetadataStoreObject<S>> { let read_lock = self.store.read().await; read_lock.get(key).map(|value| value.inner().clone()) } pub async fn lookup_by_key( &self, key: &S::IndexKey, ) -> Result<CacheMetadataStoreObject<S>, FluvioError> where S: 'static, S::IndexKey: Display, { debug!("lookup for {} key: {}", S::LABEL, key); self.lookup_and_wait(|g| g.get(key).map(|v| v.inner().clone())) .await } /// look up value for key, if it doesn't exists, wait with max timeout pub async fn lookup_and_wait<'a, F>( &'a self, search: F, ) -> Result<CacheMetadataStoreObject<S>, FluvioError> where S: 'static, S::IndexKey: Display, F: Fn( RwLockReadGuard<'a, DualEpochMap<S::IndexKey, CacheMetadataStoreObject<S>>>, ) -> Option<CacheMetadataStoreObject<S>>, { use std::time::Duration; use std::io::Error as IoError; use std::io::ErrorKind; use tokio::select; use fluvio_future::timer::sleep; const TIMER_DURATION: u64 = 300; let mut timer = sleep(Duration::from_millis(TIMER_DURATION)); loop { debug!(SPEC = S::LABEL, "checking to see if exists"); if let Some(value) = search(self.store().read().await) { debug!(SPEC = S::LABEL, "found value"); return Ok(value); } else { debug!(SPEC = S::LABEL, "value not found, waiting"); select! { _ = &mut timer => { debug!( SPEC = S::LABEL, "store look up timeout expired"); return Err(IoError::new( ErrorKind::TimedOut, format!("{} store lookup failed due to timeout",S::LABEL), ).into()) }, _ = self.listen() => { debug!( SPEC = S::LABEL, "store updated"); } } } } } } impl<S> Default for StoreContext<S> where S: Spec, { fn default() -> Self { Self::new() } } impl StoreContext<SpuSpec> { pub async fn look_up_by_id( &self, id: i32, ) -> Result<CacheMetadataStoreObject<SpuSpec>, FluvioError> { self.lookup_and_wait(|g| { for spu in g.values() { if spu.spec.id == id { return Some(spu.inner().clone()); } } None }) .await } } }
27.731579
92
0.485102
9c79aaec3a72b9d1c3a0d75e74b0dcf9efb322f6
49,748
//! Abstract syntax tree (AST). pub mod check; pub mod flatbuffers; pub mod walk; use super::DefaultHasher; use crate::scanner; use std::collections::HashMap; use std::fmt; use std::str::FromStr; use std::vec::Vec; use chrono::FixedOffset; use derive_more::Display; use serde::de::{Deserialize, Deserializer, Error, Visitor}; use serde::ser::{Serialize, SerializeSeq, Serializer}; use serde_aux::prelude::*; /// The default package name. pub const DEFAULT_PACKAGE_NAME: &str = "main"; /// Position is the AST counterpart of [`scanner::Position`]. /// It adds serde capabilities. #[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct Position { pub line: u32, pub column: u32, } impl Position { #[allow(missing_docs)] pub fn is_valid(&self) -> bool { self.line > 0 && self.column > 0 } #[allow(missing_docs)] pub fn invalid() -> Self { Position { line: 0, column: 0 } } } impl From<&scanner::Position> for Position { fn from(item: &scanner::Position) -> Self { Position { line: item.line, column: item.column, } } } impl From<&Position> for scanner::Position { fn from(item: &Position) -> Self { scanner::Position { line: item.line, column: item.column, } } } impl Default for Position { fn default() -> Self { Self::invalid() } } /// Convert a Position to a lsp_types::Position /// https://microsoft.github.io/language-server-protocol/specification#position #[cfg(feature = "lsp")] impl Into<lsp_types::Position> for Position { fn into(self) -> lsp_types::Position { lsp_types::Position { line: self.line - 1, character: self.column - 1, } } } /// Represents the location of a node in the AST. #[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize)] pub struct SourceLocation { /// File is the optional file name. #[serde(skip_serializing_if = "skip_string_option")] pub file: Option<String>, /// Start is the location in the source the node starts. pub start: Position, /// End is the location in the source the node ends. pub end: Position, /// Source is optional raw source. #[serde(skip_serializing_if = "skip_string_option")] pub source: Option<String>, } impl SourceLocation { #[allow(missing_docs)] pub fn is_valid(&self) -> bool { self.start.is_valid() && self.end.is_valid() } #[allow(missing_docs)] pub fn is_multiline(&self) -> bool { self.start.line != self.end.line } } fn skip_string_option(opt_str: &Option<String>) -> bool { opt_str.is_none() || opt_str.as_ref().unwrap().is_empty() } impl fmt::Display for SourceLocation { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let fname = match &self.file { Some(s) => s.clone(), None => "".to_string(), }; write!( f, "{}@{}:{}-{}:{}", fname, self.start.line, self.start.column, self.end.line, self.end.column ) } } // serialize_to_string serializes an object that implements ToString to its string representation. fn serialize_to_string<T, S>(field: &T, ser: S) -> Result<S::Ok, S::Error> where S: Serializer, T: ToString, { let s = field.to_string(); ser.serialize_str(s.as_str()) } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub enum Expression { Identifier(Identifier), #[serde(rename = "ArrayExpression")] Array(Box<ArrayExpr>), #[serde(rename = "DictExpression")] Dict(Box<DictExpr>), #[serde(rename = "FunctionExpression")] Function(Box<FunctionExpr>), #[serde(rename = "LogicalExpression")] Logical(Box<LogicalExpr>), #[serde(rename = "ObjectExpression")] Object(Box<ObjectExpr>), #[serde(rename = "MemberExpression")] Member(Box<MemberExpr>), #[serde(rename = "IndexExpression")] Index(Box<IndexExpr>), #[serde(rename = "BinaryExpression")] Binary(Box<BinaryExpr>), #[serde(rename = "UnaryExpression")] Unary(Box<UnaryExpr>), #[serde(rename = "PipeExpression")] PipeExpr(Box<PipeExpr>), #[serde(rename = "CallExpression")] Call(Box<CallExpr>), #[serde(rename = "ConditionalExpression")] Conditional(Box<ConditionalExpr>), #[serde(rename = "StringExpression")] StringExpr(Box<StringExpr>), #[serde(rename = "ParenExpression")] Paren(Box<ParenExpr>), #[serde(rename = "IntegerLiteral")] Integer(IntegerLit), #[serde(rename = "FloatLiteral")] Float(FloatLit), #[serde(rename = "StringLiteral")] StringLit(StringLit), #[serde(rename = "DurationLiteral")] Duration(DurationLit), #[serde(rename = "UnsignedIntegerLiteral")] Uint(UintLit), #[serde(rename = "BooleanLiteral")] Boolean(BooleanLit), #[serde(rename = "DateTimeLiteral")] DateTime(DateTimeLit), #[serde(rename = "RegexpLiteral")] Regexp(RegexpLit), #[serde(rename = "PipeLiteral")] PipeLit(PipeLit), #[serde(rename = "BadExpression")] Bad(Box<BadExpr>), } impl Expression { /// Returns the [`BaseNode`] for an [`Expression`]. pub fn base(&self) -> &BaseNode { match self { Expression::Identifier(wrapped) => &wrapped.base, Expression::Array(wrapped) => &wrapped.base, Expression::Dict(wrapped) => &wrapped.base, Expression::Function(wrapped) => &wrapped.base, Expression::Logical(wrapped) => &wrapped.base, Expression::Object(wrapped) => &wrapped.base, Expression::Member(wrapped) => &wrapped.base, Expression::Index(wrapped) => &wrapped.base, Expression::Binary(wrapped) => &wrapped.base, Expression::Unary(wrapped) => &wrapped.base, Expression::PipeExpr(wrapped) => &wrapped.base, Expression::Call(wrapped) => &wrapped.base, Expression::Conditional(wrapped) => &wrapped.base, Expression::Integer(wrapped) => &wrapped.base, Expression::Float(wrapped) => &wrapped.base, Expression::StringLit(wrapped) => &wrapped.base, Expression::Duration(wrapped) => &wrapped.base, Expression::Uint(wrapped) => &wrapped.base, Expression::Boolean(wrapped) => &wrapped.base, Expression::DateTime(wrapped) => &wrapped.base, Expression::Regexp(wrapped) => &wrapped.base, Expression::PipeLit(wrapped) => &wrapped.base, Expression::Bad(wrapped) => &wrapped.base, Expression::StringExpr(wrapped) => &wrapped.base, Expression::Paren(wrapped) => &wrapped.base, } } } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub enum Statement { #[serde(rename = "ExpressionStatement")] Expr(Box<ExprStmt>), #[serde(rename = "VariableAssignment")] Variable(Box<VariableAssgn>), #[serde(rename = "OptionStatement")] Option(Box<OptionStmt>), #[serde(rename = "ReturnStatement")] Return(Box<ReturnStmt>), #[serde(rename = "BadStatement")] Bad(Box<BadStmt>), #[serde(rename = "TestStatement")] Test(Box<TestStmt>), #[serde(rename = "TestCaseStatement")] TestCase(Box<TestCaseStmt>), #[serde(rename = "BuiltinStatement")] Builtin(Box<BuiltinStmt>), } impl Statement { /// Returns the [`BaseNode`] for a [`Statement`]. pub fn base(&self) -> &BaseNode { match self { Statement::Expr(wrapped) => &wrapped.base, Statement::Variable(wrapped) => &wrapped.base, Statement::Option(wrapped) => &wrapped.base, Statement::Return(wrapped) => &wrapped.base, Statement::Bad(wrapped) => &wrapped.base, Statement::Test(wrapped) => &wrapped.base, Statement::TestCase(wrapped) => &wrapped.base, Statement::Builtin(wrapped) => &wrapped.base, } } /// Returns an integer-based type value. pub fn typ(&self) -> i8 { match self { Statement::Expr(_) => 0, Statement::Variable(_) => 1, Statement::Option(_) => 2, Statement::Return(_) => 3, Statement::Bad(_) => 4, Statement::Test(_) => 5, Statement::TestCase(_) => 7, Statement::Builtin(_) => 6, } } } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub enum Assignment { #[serde(rename = "VariableAssignment")] Variable(Box<VariableAssgn>), #[serde(rename = "MemberAssignment")] Member(Box<MemberAssgn>), } impl Assignment { /// Returns the [`BaseNode`] for an [`Assignment`]. pub fn base(&self) -> &BaseNode { match self { Assignment::Variable(wrapped) => &wrapped.base, Assignment::Member(wrapped) => &wrapped.base, } } } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub enum PropertyKey { Identifier(Identifier), #[serde(rename = "StringLiteral")] StringLit(StringLit), } impl PropertyKey { /// Returns the [`BaseNode`] for a [`PropertyKey`]. pub fn base(&self) -> &BaseNode { match self { PropertyKey::Identifier(wrapped) => &wrapped.base, PropertyKey::StringLit(wrapped) => &wrapped.base, } } } // This matches the grammar, and not ast.go: // ParenExpression = "(" Expression ")" . // FunctionExpressionSuffix = "=>" FunctionBodyExpression . // FunctionBodyExpression = Block | Expression . #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(untagged)] #[allow(missing_docs)] pub enum FunctionBody { Block(Block), Expr(Expression), } impl FunctionBody { /// Returns the [`BaseNode`] for a [`FunctionBody`]. pub fn base(&self) -> &BaseNode { match self { FunctionBody::Block(wrapped) => &wrapped.base, FunctionBody::Expr(wrapped) => wrapped.base(), } } } fn serialize_errors<S>(errors: &[String], ser: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut seq = ser.serialize_seq(Some(errors.len()))?; for e in errors { let mut me: HashMap<String, &String, DefaultHasher> = HashMap::default(); me.insert("msg".to_string(), e); seq.serialize_element(&me)?; } seq.end() } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct Comment { pub text: String, } /// BaseNode holds the attributes every expression or statement must have. #[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct BaseNode { #[serde(default)] pub location: SourceLocation, // If the base node is for a terminal the comments will be here. We also // use the base node comments when a non-terminal contains just one // terminal on the right hand side. This saves us populating the // type-specific AST nodes with comment lists when we can avoid it.. #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub comments: Vec<Comment>, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(serialize_with = "serialize_errors")] #[serde(default)] pub errors: Vec<String>, } impl BaseNode { #[allow(missing_docs)] pub fn is_empty(&self) -> bool { self.errors.is_empty() && !self.location.is_valid() } #[allow(missing_docs)] pub fn is_multiline(&self) -> bool { self.location.is_multiline() } #[allow(missing_docs)] pub fn set_comments(&mut self, comments: Vec<Comment>) { self.comments = comments; } } /// Package represents a complete package source tree. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub struct Package { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, #[serde(skip_serializing_if = "String::is_empty")] #[serde(default)] pub path: String, pub package: String, pub files: Vec<File>, } impl From<File> for Package { fn from(file: File) -> Self { Package { base: BaseNode { ..BaseNode::default() }, path: String::from(""), package: String::from(file.get_package()), files: vec![file], } } } /// Represents a source from a single file. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub struct File { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, #[serde(skip_serializing_if = "String::is_empty")] #[serde(default)] pub name: String, #[serde(skip_serializing_if = "String::is_empty")] #[serde(default)] pub metadata: String, pub package: Option<PackageClause>, #[serde(deserialize_with = "deserialize_default_from_null")] pub imports: Vec<ImportDeclaration>, pub body: Vec<Statement>, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub eof: Vec<Comment>, } impl File { fn get_package(self: &File) -> &str { match &self.package { Some(pkg_clause) => pkg_clause.name.name.as_str(), None => DEFAULT_PACKAGE_NAME, } } } /// Defines the current package identifier. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub struct PackageClause { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub name: Identifier, } /// Declares a single import. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub struct ImportDeclaration { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, #[serde(rename = "as")] pub alias: Option<Identifier>, pub path: StringLit, } /// Block is a set of statements. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub struct Block { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub lbrace: Vec<Comment>, pub body: Vec<Statement>, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub rbrace: Vec<Comment>, } /// BadStmt is a placeholder for statements for which no correct statement nodes /// can be created. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(rename = "BadStatement")] #[allow(missing_docs)] pub struct BadStmt { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub text: String, } /// ExprStmt may consist of an expression that does not return a value /// and is executed solely for its side-effects. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct ExprStmt { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub expression: Expression, } /// Defines an Expression to return. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct ReturnStmt { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub argument: Expression, } /// An option statement. /// /// Syntactically, is a single variable declaration. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct OptionStmt { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub assignment: Assignment, } /// BuiltinStmt declares a builtin identifier and its struct. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct BuiltinStmt { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub colon: Vec<Comment>, pub id: Identifier, pub ty: TypeExpression, } /// A monotype. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub enum MonoType { #[serde(rename = "TvarType")] Tvar(TvarType), #[serde(rename = "NamedType")] Basic(NamedType), #[serde(rename = "ArrayType")] Array(Box<ArrayType>), #[serde(rename = "DictType")] Dict(Box<DictType>), #[serde(rename = "RecordType")] Record(RecordType), #[serde(rename = "FunctionType")] Function(Box<FunctionType>), } impl MonoType { #[allow(missing_docs)] pub fn base(&self) -> &BaseNode { match self { MonoType::Basic(t) => &t.base, MonoType::Tvar(t) => &t.base, MonoType::Array(t) => &t.base, MonoType::Dict(t) => &t.base, MonoType::Record(t) => &t.base, MonoType::Function(t) => &t.base, } } } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct NamedType { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub name: Identifier, } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct TvarType { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub name: Identifier, } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct ArrayType { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub element: MonoType, } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct DictType { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub key: MonoType, pub val: MonoType, } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct FunctionType { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub parameters: Vec<ParameterType>, pub monotype: MonoType, } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub enum ParameterType { Required { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] base: BaseNode, name: Identifier, monotype: MonoType, }, Optional { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] base: BaseNode, name: Identifier, monotype: MonoType, }, Pipe { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] base: BaseNode, #[serde(skip_serializing_if = "Option::is_none")] name: Option<Identifier>, monotype: MonoType, }, } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub struct TypeExpression { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub monotype: MonoType, pub constraints: Vec<TypeConstraint>, } fn get_err_basenode(b: BaseNode) -> String { if !b.errors.is_empty() { return b.errors[0].clone(); } "".to_string() } #[allow(missing_docs)] pub fn get_err_type_expression(ty: TypeExpression) -> String { if !ty.base.errors.is_empty() { return ty.base.errors[0].clone(); } let mt_errs = get_err_monotype(ty.monotype); if !mt_errs.is_empty() { return mt_errs; } get_err_constraints(ty.constraints) } fn get_err_monotype(mt: MonoType) -> String { match mt { MonoType::Basic(t) => { let e = get_err_basenode(t.base); if !e.is_empty() { return e; } get_err_identifier(t.name) } MonoType::Tvar(t) => { let e = get_err_basenode(t.base); if !e.is_empty() { return e; } get_err_identifier(t.name) } MonoType::Array(t) => { let e = get_err_basenode((*t).base); if !e.is_empty() { return e; } get_err_monotype((*t).element) } MonoType::Dict(t) => { let e = get_err_basenode(t.base); if !e.is_empty() { return e; } let e = get_err_monotype(t.key); if !e.is_empty() { return e; } get_err_monotype(t.val) } MonoType::Record(t) => { let e = get_err_basenode(t.base); if !e.is_empty() { return e; } if let Some(tv) = t.tvar { let e = get_err_identifier(tv); if !e.is_empty() { return e; } } get_err_properties(t.properties) } MonoType::Function(t) => { let e = get_err_basenode((*t).base); if !e.is_empty() { return e; } let e = get_err_parameters((*t).parameters); if !e.is_empty() { return e; } get_err_monotype((*t).monotype) } } } fn get_err_parameters(prs: Vec<ParameterType>) -> String { for pr in prs { get_err_parameter(pr); } "".to_string() } fn get_err_parameter(pr: ParameterType) -> String { match pr { ParameterType::Required { base, name, monotype, } => { let e = get_err_basenode(base); if !e.is_empty() { return e; } let e = get_err_identifier(name); if !e.is_empty() { return e; } get_err_monotype(monotype) } ParameterType::Pipe { base, name, monotype, } => { let e = get_err_basenode(base); if !e.is_empty() { return e; } if let Some(i) = name { let e = get_err_identifier(i); if !e.is_empty() { return e; } } get_err_monotype(monotype) } ParameterType::Optional { base, name, monotype, } => { let e = get_err_basenode(base); if !e.is_empty() { return e; } let e = get_err_identifier(name); if !e.is_empty() { return e; } get_err_monotype(monotype) } } } fn get_err_properties(ps: Vec<PropertyType>) -> String { for p in ps { let e = get_err_property(p); if !e.is_empty() { return e; } } "".to_string() } fn get_err_property(p: PropertyType) -> String { let e = get_err_basenode(p.base); if !e.is_empty() { return e; } let e = get_err_identifier(p.name); if !e.is_empty() { return e; } get_err_monotype(p.monotype) } fn get_err_identifier(i: Identifier) -> String { if !i.base.errors.is_empty() { return i.base.errors[0].clone(); } "".to_string() } fn get_err_constraints(tc: Vec<TypeConstraint>) -> String { for c in tc { let e = get_err_constraint(c); if !e.is_empty() { return e; } } "".to_string() } fn get_err_constraint(c: TypeConstraint) -> String { let e = get_err_basenode(c.base); if !e.is_empty() { return e; } let e = get_err_identifier(c.tvar); if !e.is_empty() { return e; } get_err_kinds(c.kinds) } fn get_err_kinds(ks: Vec<Identifier>) -> String { for k in ks { let e = get_err_identifier(k); if !e.is_empty() { return e; } } "".to_string() } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub struct TypeConstraint { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub tvar: Identifier, pub kinds: Vec<Identifier>, } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct RecordType { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, #[serde(skip_serializing_if = "Option::is_none")] pub tvar: Option<Identifier>, pub properties: Vec<PropertyType>, } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct PropertyType { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub name: Identifier, pub monotype: MonoType, } /// TestStmt declares a Flux test case. #[allow(missing_docs)] #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct TestStmt { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub assignment: VariableAssgn, } /// Declares a Flux test case. // XXX: rockstar (17 Nov 2020) - This should replace the TestStmt above, once // it has been extended enough to cover the existing use cases. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct TestCaseStmt { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub id: Identifier, #[serde(skip_serializing_if = "Option::is_none")] pub extends: Option<StringLit>, pub block: Block, } /// Represents the declaration of a variable. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct VariableAssgn { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub id: Identifier, pub init: Expression, } /// Represents an assignement into a member of an object. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct MemberAssgn { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub member: MemberExpr, pub init: Expression, } /// Represents an interpolated string. #[allow(missing_docs)] #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct StringExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub parts: Vec<StringExprPart>, } /// Represents part of an interpolated string. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub enum StringExprPart { #[serde(rename = "TextPart")] Text(TextPart), #[serde(rename = "InterpolatedPart")] Interpolated(InterpolatedPart), } /// Represents the text part of an interpolated string. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct TextPart { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub value: String, } /// Represents the expression part of an interpolated string. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct InterpolatedPart { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub expression: Expression, } /// Represents an expression wrapped in parenthesis. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct ParenExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub lparen: Vec<Comment>, pub expression: Expression, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub rparen: Vec<Comment>, } /// Represents a function call. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct CallExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub callee: Expression, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub lparen: Vec<Comment>, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub arguments: Vec<Expression>, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub rparen: Vec<Comment>, } /// Represents a call expression using the pipe forward syntax. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct PipeExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub argument: Expression, pub call: CallExpr, } /// Represents calling a property of a Call. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct MemberExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub object: Expression, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub lbrack: Vec<Comment>, pub property: PropertyKey, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub rbrack: Vec<Comment>, } /// Represents indexing into an array. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct IndexExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub array: Expression, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub lbrack: Vec<Comment>, pub index: Expression, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub rbrack: Vec<Comment>, } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct FunctionExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub lparen: Vec<Comment>, #[serde(deserialize_with = "deserialize_default_from_null")] pub params: Vec<Property>, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub rparen: Vec<Comment>, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub arrow: Vec<Comment>, pub body: FunctionBody, } /// Represents Equality and Arithmetic operators. /// /// Result of evaluating an equality operator is always of type `bool` /// based on whether the comparison is true. /// Arithmetic operators take numerical values (either literals or variables) /// as their operands and return a single numerical value. #[derive(Debug, Display, PartialEq, Clone)] #[allow(missing_docs)] pub enum Operator { #[display(fmt = "*")] MultiplicationOperator, #[display(fmt = "/")] DivisionOperator, #[display(fmt = "%")] ModuloOperator, #[display(fmt = "^")] PowerOperator, #[display(fmt = "+")] AdditionOperator, #[display(fmt = "-")] SubtractionOperator, #[display(fmt = "<=")] LessThanEqualOperator, #[display(fmt = "<")] LessThanOperator, #[display(fmt = ">=")] GreaterThanEqualOperator, #[display(fmt = ">")] GreaterThanOperator, #[display(fmt = "startswith")] StartsWithOperator, #[display(fmt = "in")] InOperator, #[display(fmt = "not")] NotOperator, #[display(fmt = "exists")] ExistsOperator, #[display(fmt = "not empty")] NotEmptyOperator, #[display(fmt = "empty")] EmptyOperator, #[display(fmt = "==")] EqualOperator, #[display(fmt = "!=")] NotEqualOperator, #[display(fmt = "=~")] RegexpMatchOperator, #[display(fmt = "!~")] NotRegexpMatchOperator, // this is necessary for bad binary expressions. #[display(fmt = "<INVALID_OP>")] InvalidOperator, } impl Serialize for Operator { fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error> where S: Serializer, { serialize_to_string(self, serializer) } } impl FromStr for Operator { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "*" => Ok(Operator::MultiplicationOperator), "/" => Ok(Operator::DivisionOperator), "%" => Ok(Operator::ModuloOperator), "^" => Ok(Operator::PowerOperator), "+" => Ok(Operator::AdditionOperator), "-" => Ok(Operator::SubtractionOperator), "<=" => Ok(Operator::LessThanEqualOperator), "<" => Ok(Operator::LessThanOperator), ">=" => Ok(Operator::GreaterThanEqualOperator), ">" => Ok(Operator::GreaterThanOperator), "startswith" => Ok(Operator::StartsWithOperator), "in" => Ok(Operator::InOperator), "not" => Ok(Operator::NotOperator), "exists" => Ok(Operator::ExistsOperator), "not empty" => Ok(Operator::NotEmptyOperator), "empty" => Ok(Operator::EmptyOperator), "==" => Ok(Operator::EqualOperator), "!=" => Ok(Operator::NotEqualOperator), "=~" => Ok(Operator::RegexpMatchOperator), "!~" => Ok(Operator::NotRegexpMatchOperator), "<INVALID_OP>" => Ok(Operator::InvalidOperator), _ => Err(format!("unknown operator: {}", s)), } } } struct OperatorVisitor; impl<'de> Visitor<'de> for OperatorVisitor { type Value = Operator; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("a valid string valid for an operator") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: Error, { let r = value.parse::<Operator>(); match r { Ok(v) => Ok(v), Err(s) => Err(E::custom(s)), } } } impl<'de> Deserialize<'de> for Operator { fn deserialize<D>(d: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { d.deserialize_str(OperatorVisitor) } } /// BinaryExpr use binary operators act on two operands in an expression. /// BinaryExpr includes relational and arithmetic operators #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(rename = "BinaryExpression")] #[allow(missing_docs)] pub struct BinaryExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub operator: Operator, pub left: Expression, pub right: Expression, } /// UnaryExpr use operators act on a single operand in an expression. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(rename = "UnaryExpression")] #[allow(missing_docs)] pub struct UnaryExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub operator: Operator, pub argument: Expression, } /// LogicalOperator are used with boolean (logical) values. #[derive(Debug, PartialEq, Clone)] #[allow(missing_docs)] pub enum LogicalOperator { AndOperator, OrOperator, } impl ToString for LogicalOperator { fn to_string(&self) -> String { match self { LogicalOperator::AndOperator => "and".to_string(), LogicalOperator::OrOperator => "or".to_string(), } } } impl Serialize for LogicalOperator { fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error> where S: Serializer, { serialize_to_string(self, serializer) } } impl FromStr for LogicalOperator { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "and" => Ok(LogicalOperator::AndOperator), "or" => Ok(LogicalOperator::OrOperator), _ => Err(format!("unknown logical operator: {}", s)), } } } struct LogicalOperatorVisitor; impl<'de> Visitor<'de> for LogicalOperatorVisitor { type Value = LogicalOperator; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("a valid string valid for a logical operator") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: Error, { let r = value.parse::<LogicalOperator>(); match r { Ok(v) => Ok(v), Err(s) => Err(E::custom(s)), } } } impl<'de> Deserialize<'de> for LogicalOperator { fn deserialize<D>(d: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { d.deserialize_str(LogicalOperatorVisitor) } } /// LogicalExpr represent the rule conditions that collectively evaluate to either true or false. /// `or` expressions compute the disjunction of two boolean expressions and return boolean values. /// `and`` expressions compute the conjunction of two boolean expressions and return boolean values. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct LogicalExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub operator: LogicalOperator, pub left: Expression, pub right: Expression, } /// ArrayExpr is used to create and directly specify the elements of an array object #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct ArrayItem { #[serde(default)] #[serde(flatten)] pub expression: Expression, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub comma: Vec<Comment>, } /// ArrayExpr is used to create and directly specify the elements of an array object #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct ArrayExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub lbrack: Vec<Comment>, #[serde(deserialize_with = "deserialize_default_from_null")] pub elements: Vec<ArrayItem>, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub rbrack: Vec<Comment>, } /// DictExpr represents a dictionary literal #[allow(missing_docs)] #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct DictExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub lbrack: Vec<Comment>, #[serde(deserialize_with = "deserialize_default_from_null")] pub elements: Vec<DictItem>, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub rbrack: Vec<Comment>, } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct DictItem { pub key: Expression, pub val: Expression, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub comma: Vec<Comment>, } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct WithSource { #[serde(default)] #[serde(flatten)] pub source: Identifier, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub with: Vec<Comment>, } /// ObjectExpr allows the declaration of an anonymous object within a declaration. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct ObjectExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub lbrace: Vec<Comment>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(default)] pub with: Option<WithSource>, #[serde(deserialize_with = "deserialize_default_from_null")] pub properties: Vec<Property>, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub rbrace: Vec<Comment>, } /// ConditionalExpr selects one of two expressions, `Alternate` or `Consequent` /// depending on a third, boolean, expression, `Test`. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct ConditionalExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub tk_if: Vec<Comment>, pub test: Expression, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub tk_then: Vec<Comment>, pub consequent: Expression, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub tk_else: Vec<Comment>, pub alternate: Expression, } /// BadExpr is a malformed expression that contains the reason why in `text`. /// It can contain another expression, so that the parser can make a chained list of bad expressions. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct BadExpr { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub text: String, pub expression: Option<Expression>, } /// Property is the value associated with a key. /// A property's key can be either an identifier or string literal. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(tag = "type")] #[allow(missing_docs)] pub struct Property { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub key: PropertyKey, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub separator: Vec<Comment>, // `value` is optional, because of the shortcut: {a} <--> {a: a} pub value: Option<Expression>, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub comma: Vec<Comment>, } /// Identifier represents a name that identifies a unique Node #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct Identifier { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub name: String, } /// PipeLit represents an specialized literal value, indicating the left hand value of a pipe expression. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct PipeLit { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, } /// StringLit expressions begin and end with double quote marks. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct StringLit { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub value: String, } /// Boolean represent boolean values #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct BooleanLit { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub value: bool, } /// Represent floating point numbers according to the double representations /// defined by [IEEE-754-1985](https://en.wikipedia.org/wiki/IEEE_754-1985). #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct FloatLit { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub value: f64, } /// Represents integer numbers. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct IntegerLit { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, #[serde(serialize_with = "serialize_to_string")] #[serde(deserialize_with = "deserialize_str_i64")] pub value: i64, } /// Represents integer numbers. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct UintLit { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, #[serde(serialize_with = "serialize_to_string")] #[serde(deserialize_with = "deserialize_str_u64")] pub value: u64, } struct U64Visitor; impl<'de> Visitor<'de> for U64Visitor { type Value = u64; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("a string representation for an unsigned integer") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: Error, { let r = value.parse::<u64>(); match r { Ok(v) => Ok(v), Err(s) => Err(E::custom(s)), } } } fn deserialize_str_u64<'de, D>(d: D) -> Result<u64, D::Error> where D: Deserializer<'de>, { d.deserialize_str(U64Visitor) } struct I64Visitor; impl<'de> Visitor<'de> for I64Visitor { type Value = i64; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("a string representation for an integer") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: Error, { let r = value.parse::<i64>(); match r { Ok(v) => Ok(v), Err(s) => Err(E::custom(s)), } } fn visit_string<E>(self, value: String) -> Result<Self::Value, E> where E: Error, { let r = value.parse::<i64>(); match r { Ok(v) => Ok(v), Err(s) => Err(E::custom(s)), } } } fn deserialize_str_i64<'de, D>(d: D) -> Result<i64, D::Error> where D: Deserializer<'de>, { d.deserialize_str(I64Visitor) } /// RegexpLit expressions begin and end with `/` and are regular expressions with syntax accepted by RE2. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct RegexpLit { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub value: String, } /// DurationLit is a pair consisting of length of time and the unit of time measured. /// It is the atomic unit from which all duration literals are composed. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[serde(rename = "Duration")] #[allow(missing_docs)] pub struct Duration { pub magnitude: i64, pub unit: String, } /// DurationLit represents the elapsed time between two instants as an /// int64 nanosecond count with syntax of [golang's time.Duration]. /// /// [golang's time.Duration]: https://golang.org/pkg/time/#Duration // TODO: this may be better as a class initialization // All magnitudes in Duration vector should have the same sign #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct DurationLit { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub values: Vec<Duration>, } /// DateTimeLit represents an instant in time with nanosecond precision using /// the syntax of golang's RFC3339 Nanosecond variant. // TODO: we need a "duration from" that takes a time and a durationliteral, and gives an exact time.DurationLit instead of an approximation // TODO: this may be better as a class initialization #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[allow(missing_docs)] pub struct DateTimeLit { #[serde(skip_serializing_if = "BaseNode::is_empty")] #[serde(default)] #[serde(flatten)] pub base: BaseNode, pub value: chrono::DateTime<FixedOffset>, } #[cfg(test)] mod tests;
29.315262
139
0.625794
f422b7df9e1408f126248b535885a6d2e04ce3da
3,636
use super::SeriesF64; use crate::dataframe::ColumnType; use linalg::vectors::floats::FloatsVector; use prettytable::{Cell, Row, Table}; use wasm_bindgen::prelude::*; impl SeriesF64 { pub fn new_rs(name: String, data: Vec<f64>) -> SeriesF64 { let col_data = FloatsVector::new(data); SeriesF64 { name, data: col_data, } } } #[wasm_bindgen] impl SeriesF64 { #[wasm_bindgen(constructor)] pub fn new(name: JsValue, data: JsValue) -> SeriesF64 { let col_name = serde_wasm_bindgen::from_value(name).unwrap(); let serde_data: Vec<f64> = serde_wasm_bindgen::from_value(data).unwrap(); let col_data = FloatsVector::new(serde_data); SeriesF64 { name: col_name, data: col_data, } } #[wasm_bindgen(js_name = toJson)] pub fn to_json(&self) -> JsValue { let js_series = self; serde_wasm_bindgen::to_value(&js_series).unwrap() } pub fn len(&self) -> usize { self.data.len() } pub fn dtype(&self) -> ColumnType { ColumnType::FLOAT } pub fn shape(&self) -> JsValue { self.data.shape_to_js() } pub fn data(&self) -> JsValue { self.data.data_to_js() } #[wasm_bindgen(js_name = toString)] pub fn to_string(&self) -> String { self.data.to_string() } pub fn get(&self, index: usize) -> f64 { self.data.get(index) } pub fn set(&mut self, index: usize, value: f64) { self.data.set(index, value); } pub fn swap(&mut self, index1: usize, index2: usize) { self.data.swap(index1, index2); } pub fn reverse(&mut self) { self.data.reverse(); } pub fn reversed(&self) -> FloatsVector { self.data.reversed() } pub fn append(&mut self, element: f64) { self.data.append(element); } pub fn appended(&mut self, element: f64) -> FloatsVector { self.data.appended(element) } pub fn extend(&mut self, data_arr: JsValue) { let data_arr = serde_wasm_bindgen::from_value(data_arr).unwrap(); let ndarray_data_arr = FloatsVector::new(data_arr); self.data.extend(ndarray_data_arr) } pub fn extended(&mut self, data_arr: JsValue) -> FloatsVector { let data_arr = serde_wasm_bindgen::from_value(data_arr).unwrap(); let ndarray_data_arr = FloatsVector::new(data_arr); self.data.extended(ndarray_data_arr) } pub fn insert(&mut self, index: usize, value: f64) { self.data.insert(index, value); } pub fn inserted(&mut self, index: usize, value: f64) -> FloatsVector { self.data.inserted(index, value) } pub fn splice(&mut self, index: usize) -> f64 { self.data.splice(index) } pub fn spliced(&mut self, index: usize) -> js_sys::Array { self.data.spliced(index) } pub fn name(&self) -> String { self.name.clone() } #[wasm_bindgen(js_name = updateName)] pub fn update_name(&mut self, column_name: JsValue) -> String { let column_name = serde_wasm_bindgen::from_value(column_name).unwrap(); self.name = column_name; self.name.clone() } #[wasm_bindgen(getter,js_name = display)] pub fn show(&self) -> String { let mut table = Table::new(); let col_name = self.name.clone(); table.add_row(row![col_name]); for i in 0..self.len() { let val = self.data.data[i]; table.add_row(Row::new(vec![Cell::new(&val.to_string())])); } table.to_string() } }
25.787234
81
0.589934
4adf4a42cc27be3478196c5f2ac32a6425b7a877
5,637
//! Array-based data structures using densely numbered entity references as mapping keys. //! //! This crate defines a number of data structures based on arrays. The arrays are not indexed by //! `usize` as usual, but by *entity references* which are integers wrapped in new-types. This has //! a couple advantages: //! //! - Improved type safety. The various map and set types accept a specific key type, so there is //! no confusion about the meaning of an array index, as there is with plain arrays. //! - Smaller indexes. The normal `usize` index is often 64 bits which is way too large for most //! purposes. The entity reference types can be smaller, allowing for more compact data //! structures. //! //! The `EntityRef` trait should be implemented by types to be used as indexed. The `entity_impl!` //! macro provides convenient defaults for types wrapping `u32` which is common. //! //! - [`PrimaryMap`](struct.PrimaryMap.html) is used to keep track of a vector of entities, //! assigning a unique entity reference to each. //! - [`SecondaryMap`](struct.SecondaryMap.html) is used to associate secondary information to an //! entity. The map is implemented as a simple vector, so it does not keep track of which //! entities have been inserted. Instead, any unknown entities map to the default value. //! - [`SparseMap`](struct.SparseMap.html) is used to associate secondary information to a small //! number of entities. It tracks accurately which entities have been inserted. This is a //! specialized data structure which can use a lot of memory, so read the documentation before //! using it. //! - [`EntitySet`](struct.EntitySet.html) is used to represent a secondary set of entities. //! The set is implemented as a simple vector, so it does not keep track of which entities have //! been inserted into the primary map. Instead, any unknown entities are not in the set. //! - [`EntityList`](struct.EntityList.html) is a compact representation of lists of entity //! references allocated from an associated memory pool. It has a much smaller footprint than //! `Vec`. #![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)] #![warn(unused_import_braces)] #![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))] #![cfg_attr(feature = "cargo-clippy", allow(clippy::new_without_default))] #![cfg_attr( feature = "cargo-clippy", warn( clippy::float_arithmetic, clippy::mut_mut, clippy::nonminimal_bool, clippy::option_map_unwrap_or, clippy::option_map_unwrap_or_else, clippy::print_stdout, clippy::unicode_not_nfc, clippy::use_self ) )] #![no_std] extern crate alloc; // Re-export core so that the macros works with both std and no_std crates #[doc(hidden)] pub extern crate core as __core; /// A type wrapping a small integer index should implement `EntityRef` so it can be used as the key /// of an `SecondaryMap` or `SparseMap`. pub trait EntityRef: Copy + Eq { /// Create a new entity reference from a small integer. /// This should crash if the requested index is not representable. fn new(_: usize) -> Self; /// Get the index that was used to create this entity reference. fn index(self) -> usize; } /// Macro which provides the common implementation of a 32-bit entity reference. #[macro_export] macro_rules! entity_impl { // Basic traits. ($entity:ident) => { impl $crate::EntityRef for $entity { fn new(index: usize) -> Self { debug_assert!(index < ($crate::__core::u32::MAX as usize)); $entity(index as u32) } fn index(self) -> usize { self.0 as usize } } impl $crate::packed_option::ReservedValue for $entity { fn reserved_value() -> $entity { $entity($crate::__core::u32::MAX) } fn is_reserved_value(&self) -> bool { self.0 == $crate::__core::u32::MAX } } impl $entity { /// Return the underlying index value as a `u32`. #[allow(dead_code)] pub fn from_u32(x: u32) -> Self { debug_assert!(x < $crate::__core::u32::MAX); $entity(x) } /// Return the underlying index value as a `u32`. #[allow(dead_code)] pub fn as_u32(self) -> u32 { self.0 } } }; // Include basic `Display` impl using the given display prefix. // Display a `Block` reference as "block12". ($entity:ident, $display_prefix:expr) => { entity_impl!($entity); impl $crate::__core::fmt::Display for $entity { fn fmt(&self, f: &mut $crate::__core::fmt::Formatter) -> $crate::__core::fmt::Result { write!(f, concat!($display_prefix, "{}"), self.0) } } impl $crate::__core::fmt::Debug for $entity { fn fmt(&self, f: &mut $crate::__core::fmt::Formatter) -> $crate::__core::fmt::Result { (self as &dyn $crate::__core::fmt::Display).fmt(f) } } }; } pub mod packed_option; mod boxed_slice; mod iter; mod keys; mod list; mod map; mod primary; mod set; mod sparse; pub use self::boxed_slice::BoxedSlice; pub use self::iter::{Iter, IterMut}; pub use self::keys::Keys; pub use self::list::{EntityList, ListPool}; pub use self::map::SecondaryMap; pub use self::primary::PrimaryMap; pub use self::set::EntitySet; pub use self::sparse::{SparseMap, SparseMapValue, SparseSet};
38.087838
99
0.641831
8ab0a7108fe2fbd8466ad38c8bf657ad5aa28120
917
extern crate crowbar; mod common; use anyhow::Result; use crowbar::config::aws::{AwsConfig, AWS_CONFIG_FILE, PROFILE_KEY}; use std::env; use std::io::Write; use tempfile::NamedTempFile; #[test] fn deletes_profile_key_from_file() -> Result<()> { let mut file = NamedTempFile::new()?; let location = file.path().to_path_buf(); let app_profile = common::short_app_profile_a(); let profile_name = &app_profile.name; writeln!(file, "{}", common::long_aws_profile())?; env::set_var(AWS_CONFIG_FILE, location); let config = AwsConfig::new()?; config.delete_profile(profile_name)?.write()?; let new_config = AwsConfig::new()?; assert_eq!( None, new_config .profiles .get_from(Some(format!("profile {}", profile_name)), PROFILE_KEY) ); assert_eq!(1, new_config.profiles.len()); env::remove_var(AWS_CONFIG_FILE); Ok(()) }
24.131579
77
0.651036
26ba4ca294b0f9060845360b0455711e69fb341e
7,247
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. //! This mod provides functions to remap a `JsError` based on a source map. use deno_core::error::JsError; use sourcemap::SourceMap; use std::collections::HashMap; use std::str; use std::sync::Arc; pub trait SourceMapGetter: Sync + Send { /// Returns the raw source map file. fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>>; fn get_source_line( &self, file_name: &str, line_number: usize, ) -> Option<String>; } /// Cached filename lookups. The key can be None if a previous lookup failed to /// find a SourceMap. pub type CachedMaps = HashMap<String, Option<SourceMap>>; /// Apply a source map to a `deno_core::JsError`, returning a `JsError` where /// file names and line/column numbers point to the location in the original /// source, rather than the transpiled source code. pub fn apply_source_map<G: SourceMapGetter>( js_error: &JsError, getter: Arc<G>, ) -> JsError { // Note that js_error.frames has already been source mapped in // prepareStackTrace(). let mut mappings_map: CachedMaps = HashMap::new(); let (script_resource_name, line_number, start_column, source_line) = get_maybe_orig_position( js_error.script_resource_name.clone(), js_error.line_number, // start_column is 0-based, we need 1-based here. js_error.start_column.map(|n| n + 1), js_error.source_line.clone(), &mut mappings_map, getter, ); let start_column = start_column.map(|n| n - 1); // It is better to just move end_column to be the same distance away from // start column because sometimes the code point is not available in the // source file map. let end_column = match js_error.end_column { Some(ec) => { if let Some(sc) = start_column { Some(ec - (js_error.start_column.unwrap() - sc)) } else { None } } _ => None, }; JsError { message: js_error.message.clone(), source_line, script_resource_name, line_number, start_column, end_column, frames: js_error.frames.clone(), stack: None, } } fn get_maybe_orig_position<G: SourceMapGetter>( file_name: Option<String>, line_number: Option<i64>, column_number: Option<i64>, source_line: Option<String>, mappings_map: &mut CachedMaps, getter: Arc<G>, ) -> (Option<String>, Option<i64>, Option<i64>, Option<String>) { match (file_name, line_number, column_number) { (Some(file_name_v), Some(line_v), Some(column_v)) => { let (file_name, line_number, column_number, source_line) = get_orig_position( file_name_v, line_v, column_v, source_line, mappings_map, getter, ); ( Some(file_name), Some(line_number), Some(column_number), source_line, ) } _ => (None, None, None, source_line), } } pub fn get_orig_position<G: SourceMapGetter>( file_name: String, line_number: i64, column_number: i64, source_line: Option<String>, mappings_map: &mut CachedMaps, getter: Arc<G>, ) -> (String, i64, i64, Option<String>) { let maybe_source_map = get_mappings(&file_name, mappings_map, getter.clone()); let default_pos = (file_name, line_number, column_number, source_line.clone()); // Lookup expects 0-based line and column numbers, but ours are 1-based. let line_number = line_number - 1; let column_number = column_number - 1; match maybe_source_map { None => default_pos, Some(source_map) => { match source_map.lookup_token(line_number as u32, column_number as u32) { None => default_pos, Some(token) => match token.get_source() { None => default_pos, Some(original) => { let maybe_source_line = if let Some(source_view) = token.get_source_view() { source_view.get_line(token.get_src_line()) } else { None }; let source_line = if let Some(source_line) = maybe_source_line { Some(source_line.to_string()) } else if let Some(source_line) = getter.get_source_line( original, // Getter expects 0-based line numbers, but ours are 1-based. token.get_src_line() as usize, ) { Some(source_line) } else { source_line }; ( original.to_string(), i64::from(token.get_src_line()) + 1, i64::from(token.get_src_col()) + 1, source_line, ) } }, } } } } fn get_mappings<'a, G: SourceMapGetter>( file_name: &str, mappings_map: &'a mut CachedMaps, getter: Arc<G>, ) -> &'a Option<SourceMap> { mappings_map .entry(file_name.to_string()) .or_insert_with(|| parse_map_string(file_name, getter)) } // TODO(kitsonk) parsed source maps should probably be cached in state in // the module meta data. fn parse_map_string<G: SourceMapGetter>( file_name: &str, getter: Arc<G>, ) -> Option<SourceMap> { getter .get_source_map(file_name) .and_then(|raw_source_map| SourceMap::from_slice(&raw_source_map).ok()) } #[cfg(test)] mod tests { use super::*; struct MockSourceMapGetter {} impl SourceMapGetter for MockSourceMapGetter { fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> { let s = match file_name { "foo_bar.ts" => { r#"{"sources": ["foo_bar.ts"], "mappings":";;;IAIA,OAAO,CAAC,GAAG,CAAC,qBAAqB,EAAE,EAAE,CAAC,OAAO,CAAC,CAAC;IAC/C,OAAO,CAAC,GAAG,CAAC,eAAe,EAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,WAAW,EAAE,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;IAE3C,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC"}"# } "bar_baz.ts" => { r#"{"sources": ["bar_baz.ts"], "mappings":";;;IAEA,CAAC,KAAK,IAAI,EAAE;QACV,MAAM,GAAG,GAAG,sDAAa,OAAO,2BAAC,CAAC;QAClC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IACnB,CAAC,CAAC,EAAE,CAAC;IAEQ,QAAA,GAAG,GAAG,KAAK,CAAC;IAEzB,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC"}"# } _ => return None, }; Some(s.as_bytes().to_owned()) } fn get_source_line( &self, file_name: &str, line_number: usize, ) -> Option<String> { let s = match file_name { "foo_bar.ts" => vec![ "console.log('foo');", "console.log('foo');", "console.log('foo');", "console.log('foo');", "console.log('foo');", ], _ => return None, }; if s.len() > line_number { Some(s[line_number].to_string()) } else { None } } } #[test] fn apply_source_map_line() { let e = JsError { message: "TypeError: baz".to_string(), source_line: Some("foo".to_string()), script_resource_name: Some("foo_bar.ts".to_string()), line_number: Some(4), start_column: Some(16), end_column: None, frames: vec![], stack: None, }; let getter = Arc::new(MockSourceMapGetter {}); let actual = apply_source_map(&e, getter); assert_eq!(actual.source_line, Some("console.log('foo');".to_string())); } }
30.195833
304
0.618601
1acae247f22b473062043b6f6354fab7b896d6b0
16,550
use crate::{http, Profile, Receiver, Target}; use api::destination_client::DestinationClient; use futures::{future, prelude::*, ready, select_biased}; use http_body::Body as HttpBody; use linkerd2_addr::Addr; use linkerd2_dns_name::Name; use linkerd2_error::{Error, Recover}; use linkerd2_proxy_api::destination as api; use linkerd2_proxy_api_resolve::pb as resolve; use pin_project::pin_project; use regex::Regex; use std::{ convert::TryInto, future::Future, pin::Pin, str::FromStr, sync::Arc, task::{Context, Poll}, time::Duration, }; use tokio::sync::watch; use tonic::{ self as grpc, body::{Body, BoxBody}, client::GrpcService, }; use tower::retry::budget::Budget; use tracing::{debug, debug_span, error, trace, warn}; use tracing_futures::Instrument; #[derive(Clone, Debug)] pub struct Client<S, R> { service: DestinationClient<S>, recover: R, context_token: String, } #[pin_project] pub struct ProfileFuture<S, R> where S: GrpcService<BoxBody>, R: Recover, { #[pin] inner: Option<Inner<S, R>>, } #[pin_project] struct Inner<S, R> where S: GrpcService<BoxBody>, R: Recover, { service: DestinationClient<S>, recover: R, #[pin] state: State<R::Backoff>, request: api::GetDestination, } #[pin_project(project = StateProj)] enum State<B> { Disconnected { backoff: Option<B>, }, Waiting { future: Pin< Box< dyn Future< Output = Result< tonic::Response<tonic::Streaming<api::DestinationProfile>>, grpc::Status, >, > + Send + 'static, >, >, backoff: Option<B>, }, Streaming(#[pin] grpc::Streaming<api::DestinationProfile>), Backoff(Option<B>), } // === impl Client === impl<S, R> Client<S, R> where // These bounds aren't *required* here, they just help detect the problem // earlier (as Client::new), instead of when trying to passing a `Client` // to something that wants `impl GetProfile`. S: GrpcService<BoxBody> + Clone + Send + 'static, S::ResponseBody: Send, <S::ResponseBody as Body>::Data: Send, <S::ResponseBody as HttpBody>::Error: Into<Box<dyn std::error::Error + Send + Sync + 'static>> + Send, S::Future: Send, R: Recover, R::Backoff: Unpin, { pub fn new(service: S, recover: R, context_token: String) -> Self { Self { service: DestinationClient::new(service), recover, context_token, } } } impl<T, S, R> tower::Service<T> for Client<S, R> where T: ToString, S: GrpcService<BoxBody> + Clone + Send + 'static, S::ResponseBody: Send, <S::ResponseBody as Body>::Data: Send, <S::ResponseBody as HttpBody>::Error: Into<Box<dyn std::error::Error + Send + Sync + 'static>> + Send, S::Future: Send, R: Recover + Send + Clone + 'static, R::Backoff: Unpin + Send, { type Response = Option<Receiver>; type Error = Error; type Future = ProfileFuture<S, R>; fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { // Tonic will internally drive the client service to readiness. Poll::Ready(Ok(())) } fn call(&mut self, t: T) -> Self::Future { let request = api::GetDestination { path: t.to_string(), context_token: self.context_token.clone(), ..Default::default() }; let inner = Inner { request, service: self.service.clone(), recover: self.recover.clone(), state: State::Disconnected { backoff: None }, }; ProfileFuture { inner: Some(inner) } } } impl<S, R> Future for ProfileFuture<S, R> where S: GrpcService<BoxBody> + Clone + Send + 'static, S::ResponseBody: Send, <S::ResponseBody as Body>::Data: Send, <S::ResponseBody as HttpBody>::Error: Into<Error> + Send, S::Future: Send, R: Recover + Send + 'static, R::Backoff: Unpin, R::Backoff: Send, { type Output = Result<Option<Receiver>, Error>; fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { let mut this = self.project(); let profile = match this .inner .as_mut() .as_pin_mut() .expect("polled after ready") .poll_profile(cx) { Poll::Pending => return Poll::Pending, Poll::Ready(Err(error)) => { trace!(%error, "failed to fetch profile"); return Poll::Ready(Err(error)); } Poll::Ready(Ok(profile)) => profile, }; trace!("daemonizing"); let (tx, rx) = watch::channel(profile); let inner = this.inner.take().expect("polled after ready"); let daemon = async move { tokio::pin!(inner); loop { select_biased! { _ = tx.closed().fuse() => { trace!("profile observation dropped"); return; }, profile = future::poll_fn(|cx| inner.as_mut().poll_profile(cx) ).fuse() => { match profile { Err(error) => { error!(%error, "profile client died"); return; } Ok(profile) => { trace!(?profile, "publishing"); if tx.send(profile).is_err() { trace!("failed to publish profile"); return; } } } } } } }; tokio::spawn(daemon.in_current_span()); Poll::Ready(Ok(Some(rx))) } } // === impl Inner === impl<S, R> Inner<S, R> where S: GrpcService<BoxBody> + Clone + Send + 'static, S::ResponseBody: Send, <S::ResponseBody as Body>::Data: Send + 'static, <S::ResponseBody as HttpBody>::Error: Into<Error> + Send, S::Future: Send, R: Recover, R::Backoff: Unpin, { fn poll_rx( rx: Pin<&mut grpc::Streaming<api::DestinationProfile>>, cx: &mut Context<'_>, ) -> Poll<Option<Result<Profile, grpc::Status>>> { trace!("poll"); let profile = ready!(rx.poll_next(cx)).map(|res| { res.map(|proto| { debug!("profile received: {:?}", proto); let name = Name::from_str(&proto.fully_qualified_name).ok(); let retry_budget = proto.retry_budget.and_then(convert_retry_budget); let http_routes = proto .routes .into_iter() .filter_map(move |orig| convert_route(orig, retry_budget.as_ref())) .collect(); let targets = proto .dst_overrides .into_iter() .filter_map(convert_dst_override) .collect(); let endpoint = proto.endpoint.and_then(|e| { let labels = std::collections::HashMap::new(); resolve::to_addr_meta(e, &labels) }); Profile { name, http_routes, targets, opaque_protocol: proto.opaque_protocol, endpoint, } }) }); Poll::Ready(profile) } fn poll_profile( mut self: Pin<&mut Self>, cx: &mut Context<'_>, ) -> Poll<Result<Profile, Error>> { let span = debug_span!("poll_profile"); let _enter = span.enter(); loop { let mut this = self.as_mut().project(); match this.state.as_mut().project() { StateProj::Disconnected { backoff } => { trace!("disconnected"); let mut svc = this.service.clone(); let req = this.request.clone(); let future = Box::pin(async move { svc.get_profile(grpc::Request::new(req)).await }); let backoff = backoff.take(); this.state.as_mut().set(State::Waiting { future, backoff }); } StateProj::Waiting { future, backoff } => { trace!("waiting"); match ready!(Pin::new(future).poll(cx)) { Ok(rsp) => this.state.set(State::Streaming(rsp.into_inner())), Err(e) => { let error = e.into(); warn!(%error, "Could not fetch profile"); let new_backoff = this.recover.recover(error)?; let backoff = Some(backoff.take().unwrap_or(new_backoff)); this.state.set(State::Disconnected { backoff }); } } } StateProj::Streaming(s) => { trace!("streaming"); let status = match ready!(Self::poll_rx(s, cx)) { Some(Ok(profile)) => return Poll::Ready(Ok(profile.into())), None => grpc::Status::new(grpc::Code::Ok, ""), Some(Err(status)) => status, }; trace!(?status); let backoff = this.recover.recover(status.into())?; this.state.set(State::Backoff(Some(backoff))); } StateProj::Backoff(ref mut backoff) => { trace!("backoff"); let backoff = match ready!(backoff.as_mut().unwrap().poll_next_unpin(cx)) { Some(()) => backoff.take(), None => None, }; this.state.set(State::Disconnected { backoff }); } }; } } } fn convert_route( orig: api::Route, retry_budget: Option<&Arc<Budget>>, ) -> Option<(http::RequestMatch, http::Route)> { let req_match = orig.condition.and_then(convert_req_match)?; let rsp_classes = orig .response_classes .into_iter() .filter_map(convert_rsp_class) .collect(); let mut route = http::Route::new(orig.metrics_labels.into_iter(), rsp_classes); if orig.is_retryable { set_route_retry(&mut route, retry_budget); } if let Some(timeout) = orig.timeout { set_route_timeout(&mut route, timeout.try_into()); } Some((req_match, route)) } fn convert_dst_override(orig: api::WeightedDst) -> Option<Target> { if orig.weight == 0 { return None; } let addr = Addr::from_str(orig.authority.as_str()).ok()?; Some(Target { addr, weight: orig.weight, }) } fn set_route_retry(route: &mut http::Route, retry_budget: Option<&Arc<Budget>>) { let budget = match retry_budget { Some(budget) => budget.clone(), None => { warn!("retry_budget is missing: {:?}", route); return; } }; route.set_retries(budget); } fn set_route_timeout(route: &mut http::Route, timeout: Result<Duration, Duration>) { match timeout { Ok(dur) => { route.set_timeout(dur); } Err(_) => { warn!("route timeout is negative: {:?}", route); } } } fn convert_req_match(orig: api::RequestMatch) -> Option<http::RequestMatch> { let m = match orig.r#match? { api::request_match::Match::All(ms) => { let ms = ms.matches.into_iter().filter_map(convert_req_match); http::RequestMatch::All(ms.collect()) } api::request_match::Match::Any(ms) => { let ms = ms.matches.into_iter().filter_map(convert_req_match); http::RequestMatch::Any(ms.collect()) } api::request_match::Match::Not(m) => { let m = convert_req_match(*m)?; http::RequestMatch::Not(Box::new(m)) } api::request_match::Match::Path(api::PathMatch { regex }) => { let regex = regex.trim(); let re = match (regex.starts_with('^'), regex.ends_with('$')) { (true, true) => Regex::new(regex).ok()?, (hd_anchor, tl_anchor) => { let hd = if hd_anchor { "" } else { "^" }; let tl = if tl_anchor { "" } else { "$" }; let re = format!("{}{}{}", hd, regex, tl); Regex::new(&re).ok()? } }; http::RequestMatch::Path(re) } api::request_match::Match::Method(mm) => { let m = mm.r#type.and_then(|m| (&m).try_into().ok())?; http::RequestMatch::Method(m) } }; Some(m) } fn convert_rsp_class(orig: api::ResponseClass) -> Option<http::ResponseClass> { let c = orig.condition.and_then(convert_rsp_match)?; Some(http::ResponseClass::new(orig.is_failure, c)) } fn convert_rsp_match(orig: api::ResponseMatch) -> Option<http::ResponseMatch> { let m = match orig.r#match? { api::response_match::Match::All(ms) => { let ms = ms .matches .into_iter() .filter_map(convert_rsp_match) .collect::<Vec<_>>(); if ms.is_empty() { return None; } http::ResponseMatch::All(ms) } api::response_match::Match::Any(ms) => { let ms = ms .matches .into_iter() .filter_map(convert_rsp_match) .collect::<Vec<_>>(); if ms.is_empty() { return None; } http::ResponseMatch::Any(ms) } api::response_match::Match::Not(m) => { let m = convert_rsp_match(*m)?; http::ResponseMatch::Not(Box::new(m)) } api::response_match::Match::Status(range) => { let min = ::http::StatusCode::from_u16(range.min as u16).ok()?; let max = ::http::StatusCode::from_u16(range.max as u16).ok()?; http::ResponseMatch::Status { min, max } } }; Some(m) } fn convert_retry_budget(orig: api::RetryBudget) -> Option<Arc<Budget>> { let min_retries = if orig.min_retries_per_second <= ::std::i32::MAX as u32 { orig.min_retries_per_second } else { warn!( "retry_budget min_retries_per_second overflow: {:?}", orig.min_retries_per_second ); return None; }; let retry_ratio = orig.retry_ratio; if retry_ratio > 1000.0 || retry_ratio < 0.0 { warn!("retry_budget retry_ratio invalid: {:?}", retry_ratio); return None; } let ttl = match orig.ttl { Some(pb_dur) => match pb_dur.try_into() { Ok(dur) => { if dur > Duration::from_secs(60) || dur < Duration::from_secs(1) { warn!("retry_budget ttl invalid: {:?}", dur); return None; } dur } Err(negative) => { warn!("retry_budget ttl negative: {:?}", negative); return None; } }, None => { warn!("retry_budget ttl missing"); return None; } }; Some(Arc::new(Budget::new(ttl, min_retries, retry_ratio))) } #[cfg(test)] mod tests { use super::*; use quickcheck::*; quickcheck! { fn retry_budget_from_proto( min_retries_per_second: u32, retry_ratio: f32, seconds: i64, nanos: i32 ) -> bool { let proto = api::RetryBudget { min_retries_per_second, retry_ratio, ttl: Some(prost_types::Duration { seconds, nanos, }), }; convert_retry_budget(proto); // simply not panicking is good enough true } } }
32.135922
96
0.495468
dd18fccc0891a8960c44b6f6242272d538de9d6d
7,780
#![allow(non_camel_case_types)] #![allow(non_snake_case)] #![deny(unstable_features)] #![deny(unused_import_braces)] #![deny(unused_qualifications)] #![deny(warnings)] #[allow(unused_imports)] #[macro_use] extern crate log; #[macro_use] extern crate failure; extern crate libc; pub mod fsm; pub mod fmr; use std::ptr; use std::ffi::CString; use libc::{c_void, c_char, c_int}; type _lf_device = *const c_void; type _lf_function_index = u8; type _fmr_return = u32; pub const LF_VERSION: u16 = 0x0001; #[derive(Debug, Fail)] pub enum FlipperError { #[fail(display = "failed to attach to a Flipper device")] Attach, } type Result<T> = std::result::Result<T, FlipperError>; /// The libflipper native representation of a module. An `_lf_module` struct /// is the most important piece of information in FMR because it's name is a /// unique identifier, its index maps it to a specific memory location on the /// device, and the device pointer specifies which Flipper this module sends /// calls to. #[repr(C)] #[derive(Debug)] struct _lf_module { /// The name and unique identifier of this module. name: *const c_char, /// An optional description of what this module does. description: *const c_char, /// A version of this module. Important to update when APIs change so that /// FMR can keep track of how to execute functions correctly. version: u16, /// A checksum of the module. crc: u16, /// The index into Flipper's module store where the native code for this /// module is kept. Flipper loads modules sequentially, with the standard /// modules automatically pre-loaded from 0-X. index: u16, /// A pointer representing the specific Flipper device on which this module's /// native code lives and therefore where calls using this module will be /// executed. If `NULL`, calls will automatically go to the currently-selected /// Flipper, as tracked by libflipper. device: *const c_void, } /// Contains the information necessary to interact with a standard module within /// libflipper. This is a thin wrapper for the purpose of hiding the underlying /// representation. pub struct StandardModuleFFI { /// A pointer to a standard module within libflipper. module_meta: *const _lf_module, } /// Contains owned metadata about a user module in the proper format for /// interacting with libflipper. pub struct UserModuleFFI { /// Since we assign the name of the module before binding it with /// lf_bind, we use a CString to back the memory of the string. /// By storing the CString adjacent to the _lf_module in the same /// struct, we ensure that the string lives as long as /// the _lf_module. _name: CString, module_meta: _lf_module, } impl<'a> From<(&'a str, u16, u16, u16)> for UserModuleFFI { fn from(vals: (&'a str, u16, u16, u16)) -> Self { let (name, version, crc, index) = vals; let string = CString::new(name).unwrap(); let string_ref = string.as_ptr(); UserModuleFFI { _name: string, module_meta: _lf_module { name: string_ref, description: ptr::null(), version, crc, index, device: ptr::null(), }, } } } impl UserModuleFFI { /// User module metadata is owned by the rust bindings. In order to /// instantiate a user module, one creates an uninitialized owned /// representation of the user module with nothing but the name /// populated inside. This uninitialized but named metadata struct /// is then passed to `lf_bind`, where libflipper then initializes /// all of the module metadata. pub fn uninitialized(name: &str) -> UserModuleFFI { UserModuleFFI::from((name, 0, 0, 0)) } } /// A unified representation of standard and user modules for simplifying /// the interaction between module bindings and libflipper. pub enum ModuleFFI { Standard(StandardModuleFFI), User(UserModuleFFI), } impl ModuleFFI { fn as_ptr(&self) -> *const _lf_module { match *self { ModuleFFI::Standard(ref standard) => standard.module_meta, ModuleFFI::User(ref user) => &user.module_meta, } } } /// Standard Modules are modules that are built into Flipper and always /// available on the device. /// /// Standard Modules must implement `new` and `bind`. The `new` constructor /// gives an instance of the module that is attached to the "default" /// Flipper. `bind` gives an instance which is attached to a specific /// Flipper. pub trait StandardModule { fn new() -> Self; fn bind(flipper: &Flipper) -> Self; } /// User Modules are modules which are written in C by the user. /// /// When a user creates a module, they give it a name which is used by /// Flipper to load and bind to it. To use it from the Rust bindings, /// the user must specify the name so that rust can find the module. pub trait UserModule<'a>: From<UserModuleFFI> { const NAME: &'a str; fn new() -> Self; /// Binds an instance of a User Module to the given Flipper. /// /// ```rust,no_run /// use flipper::{Flipper, UserModule, ModuleFFI, UserModuleFFI}; /// /// struct MyModule { /// ffi: ModuleFFI, /// } /// /// impl<'a> UserModule<'a> for MyModule { /// const NAME: &'a str = "My module"; /// fn new() -> Self { /// MyModule { /// ffi: ModuleFFI::User(UserModuleFFI::uninitialized(Self::NAME)), /// } /// } /// } /// /// impl From<UserModuleFFI> for MyModule { /// fn from(module: UserModuleFFI) -> Self { /// MyModule { /// ffi: ModuleFFI::User(module), /// } /// } /// } /// /// impl MyModule { /// fn my_func(&self) { /// // Do FMR invocation /// } /// } /// /// let flipper = Flipper::attach().expect("should attach to Flipper"); /// /// // Any of the following will bind "MyModule" /// let my_module = MyModule::new(); // Attaches to default ("active") flipper /// let my_module = MyModule::bind(&flipper); // Attaches to specified flipper /// /// my_module.my_func(); /// ``` fn bind(flipper: &Flipper) -> Self { let mut module = UserModuleFFI::uninitialized(Self::NAME); unsafe { lf_bind(&mut module.module_meta, flipper.device); } Self::from(module) } } #[link(name = "flipper")] extern { fn flipper_attach() -> _lf_device; fn carbon_attach_hostname(hostname: *const c_char) -> _lf_device; fn carbon_select_u2_gpio(device: _lf_device); // TODO remove after loader improvements fn lf_bind(module: *mut _lf_module, device: *const c_void) -> c_int; } pub struct Flipper { /// A reference to an active Flipper profile in libflipper. This /// is used when communicating with libflipper to specify which /// device functions should be executed on. device: _lf_device, } impl Flipper { pub fn attach() -> Result<Flipper> { unsafe { let device = flipper_attach(); if device == ptr::null() { return Err(FlipperError::Attach); } Ok(Flipper { device }) } } pub fn attach_hostname(hostname: &str) -> Result<Flipper> { unsafe { let hostname_CString = CString::new(hostname).unwrap(); let device = carbon_attach_hostname(hostname_CString.as_ptr()); if device == ptr::null() { return Err(FlipperError::Attach); } Ok(Flipper { device }) } } pub fn select_u2_gpio(&self) { unsafe { carbon_select_u2_gpio(self.device) }; } }
32.966102
91
0.633419
8f0cc30bd4e188eb66d1eb3ef9e280cbe241a2ed
4,632
use ndarray::prelude::*; use rayon::prelude::*; use serde_json::json; use std::{collections::BTreeMap, path::Path}; use tangram_linear::Progress; use tangram_table::prelude::*; use tangram_zip::{pzip, zip}; fn main() { // Load the data. let csv_file_path_train = Path::new("data/higgs_train.csv"); let csv_file_path_test = Path::new("data/higgs_test.csv"); let target_column_index = 0; let options = tangram_table::FromCsvOptions { column_types: Some(BTreeMap::from([ ( "signal".to_owned(), TableColumnType::Enum { variants: vec!["false".to_owned(), "true".to_owned()], }, ), ("lepton_pt".to_owned(), TableColumnType::Number), ("lepton_eta".to_owned(), TableColumnType::Number), ("lepton_phi".to_owned(), TableColumnType::Number), ( "missing_energy_magnitude".to_owned(), TableColumnType::Number, ), ("missing_energy_phi".to_owned(), TableColumnType::Number), ("jet_1_pt".to_owned(), TableColumnType::Number), ("jet_1_eta".to_owned(), TableColumnType::Number), ("jet_1_phi".to_owned(), TableColumnType::Number), ("jet_1_b_tag".to_owned(), TableColumnType::Number), ("jet_2_pt".to_owned(), TableColumnType::Number), ("jet_2_eta".to_owned(), TableColumnType::Number), ("jet_2_phi".to_owned(), TableColumnType::Number), ("jet_2_b_tag".to_owned(), TableColumnType::Number), ("jet_3_pt".to_owned(), TableColumnType::Number), ("jet_3_eta".to_owned(), TableColumnType::Number), ("jet_3_phi".to_owned(), TableColumnType::Number), ("jet_3_b_tag".to_owned(), TableColumnType::Number), ("jet_4_pt".to_owned(), TableColumnType::Number), ("jet_4_eta".to_owned(), TableColumnType::Number), ("jet_4_phi".to_owned(), TableColumnType::Number), ("jet_4_b_tag".to_owned(), TableColumnType::Number), ("m_jj".to_owned(), TableColumnType::Number), ("m_jjj".to_owned(), TableColumnType::Number), ("m_lv".to_owned(), TableColumnType::Number), ("m_jlv".to_owned(), TableColumnType::Number), ("m_bb".to_owned(), TableColumnType::Number), ("m_wbb".to_owned(), TableColumnType::Number), ("m_wwbb".to_owned(), TableColumnType::Number), ])), ..Default::default() }; let mut features_train = Table::from_path(csv_file_path_train, options.clone(), &mut |_| {}).unwrap(); let labels_train = features_train.columns_mut().remove(target_column_index); let labels_train = labels_train.as_enum().unwrap(); let mut features_test = Table::from_path(csv_file_path_test, options.clone(), &mut |_| {}).unwrap(); let labels_test = features_test.columns_mut().remove(target_column_index); let labels_test = labels_test.as_enum().unwrap(); let feature_groups: Vec<tangram_features::FeatureGroup> = features_train .columns() .iter() .map(|column| match column { TableColumn::Number(column) => { let mean_variance = tangram_metrics::MeanVariance::compute( column.view().as_slice().iter().cloned(), ); tangram_features::FeatureGroup::Normalized( tangram_features::NormalizedFeatureGroup { source_column_name: column.name().clone().unwrap(), mean: mean_variance.mean, variance: mean_variance.variance, }, ) } _ => unreachable!(), }) .collect(); let features_train = tangram_features::compute_features_array_f32( &features_train.view(), feature_groups.as_slice(), &|| {}, ); let features_test = tangram_features::compute_features_array_f32( &features_test.view(), feature_groups.as_slice(), &|| {}, ); // Train the model. let train_output = tangram_linear::BinaryClassifier::train( features_train.view(), labels_train.view(), &tangram_linear::TrainOptions { learning_rate: 0.01, max_epochs: 1, n_examples_per_batch: 1000, ..Default::default() }, Progress { kill_chip: &tangram_kill_chip::KillChip::default(), handle_progress_event: &mut |_| {}, }, ); // Make predictions on the test data. let chunk_size = (features_test.nrows() + rayon::current_num_threads() - 1) / rayon::current_num_threads(); let mut probabilities = Array::zeros(features_test.nrows()); pzip!( features_test.axis_chunks_iter(Axis(0), chunk_size), probabilities.axis_chunks_iter_mut(Axis(0), chunk_size), ) .for_each(|(features_test_chunk, probabilities_chunk)| { train_output .model .predict(features_test_chunk, probabilities_chunk); }); // Compute metrics. let input = zip!(probabilities.iter(), labels_test.iter()) .map(|(probability, label)| (*probability, label.unwrap())) .collect(); let auc_roc = tangram_metrics::AucRoc::compute(input); let output = json!({ "auc_roc": auc_roc, }); println!("{}", output); }
34.311111
92
0.697755
90f7480cccbe0da0a7efdcb81b7533db9a6e27f3
2,469
extern crate clap; use clap::{App, Arg, ArgGroup}; use std::process::Command; fn main() { let args = App::new("cubic-backlight") .version("0.1.0") .author("Árni Dagur <[email protected]>") .arg( Arg::with_name("N") .short("N") .value_name("NUM") .help("Number of steps") .default_value("10") .takes_value(true), ) .arg( Arg::with_name("inc") .short("i") .long("inc") .help("Increase brightness"), ) .arg( Arg::with_name("dec") .short("d") .long("dec") .help("Decrease brightness"), ) .group( ArgGroup::with_name("operator") .arg("inc") .arg("dec") .required(true), ) .arg( Arg::with_name("min") .short("m") .long("min") .default_value("0") .help("Minimum brightness percentage"), ) .arg( Arg::with_name("max") .short("M") .long("max") .default_value("100") .help("Minimum brightness percentage"), ) .get_matches(); // Get values of arguments let n = args.value_of("N").unwrap().parse::<f32>().unwrap(); let max = args.value_of("max").unwrap().parse::<f32>().unwrap(); let min = args.value_of("min").unwrap().parse::<f32>().unwrap(); // Get current brightness, assign it to b let output = Command::new("xbacklight") .output() .expect("failed to execute process") .stdout; let output = String::from_utf8(output).unwrap(); let output = output.trim(); let b = output.parse::<f32>().unwrap(); // Calculate new brightness let cbrt_b = b.powf(1.0 / 3.0); let step = (max.powf(1.0 / 3.0) - min.powf(1.0 / 3.0)) / n; let mut nb; if args.is_present("dec") { nb = cbrt_b - step; } else { nb = cbrt_b + step; } nb = nb.powf(3.0); // Make sure min <= nb <= max if max < nb { nb = max; } else if nb < min { nb = min; } // Set new brightness Command::new("xbacklight") .arg("-set") .arg(&nb.to_string()) .spawn() .expect("failed to execute process"); }
27.131868
68
0.452815
0374a6f33f1b58113eecf170cd0bd320f0afce44
596
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(managed_boxes)] use std::gc::GC; fn main() { let f; //~ ERROR cyclic type of infinite size f = box(GC) f; }
31.368421
68
0.711409
75a4c930b4414aace98301fd2faeb93b7026a0a9
20,342
//! This module contains a dedicated thread pool for running "cpu //! intensive" workloads such as DataFusion plans #![deny(rustdoc::broken_intra_doc_links, rustdoc::bare_urls, rust_2018_idioms)] #![warn( missing_copy_implementations, missing_debug_implementations, missing_docs, clippy::explicit_iter_loop, clippy::future_not_send, clippy::use_self, clippy::clone_on_ref_ptr )] use parking_lot::Mutex; use pin_project::{pin_project, pinned_drop}; use std::{pin::Pin, sync::Arc}; use tokio::sync::oneshot::{error::RecvError, Receiver}; use tokio_util::sync::CancellationToken; use futures::{ future::{BoxFuture, Shared}, Future, FutureExt, TryFutureExt, }; use observability_deps::tracing::warn; /// Task that can be added to the executor-internal queue. /// /// Every task within the executor is represented by a [`Job`] that can be polled by the API user. struct Task { fut: Pin<Box<dyn Future<Output = ()> + Send>>, cancel: CancellationToken, #[allow(dead_code)] task_ref: Arc<()>, } impl Task { /// Run task. /// /// This runs the payload or cancels if the linked [`Job`] is dropped. async fn run(self) { tokio::select! { _ = self.cancel.cancelled() => (), _ = self.fut => (), } } } /// The type of error that is returned from tasks in this module pub type Error = tokio::sync::oneshot::error::RecvError; /// Job within the executor. /// /// Dropping the job will cancel its linked task. #[pin_project(PinnedDrop)] #[derive(Debug)] pub struct Job<T> { cancel: CancellationToken, detached: bool, #[pin] rx: Receiver<T>, } impl<T> Job<T> { /// Detached job so dropping it does not cancel it. /// /// You must ensure that this task eventually finishes, otherwise [`DedicatedExecutor::join`] may never return! pub fn detach(mut self) { // cannot destructure `Self` because we implement `Drop`, so we use a flag instead to prevent cancelation. self.detached = true; } } impl<T> Future for Job<T> { type Output = Result<T, Error>; fn poll( self: Pin<&mut Self>, cx: &mut std::task::Context<'_>, ) -> std::task::Poll<Self::Output> { let this = self.project(); this.rx.poll(cx) } } #[pinned_drop] impl<T> PinnedDrop for Job<T> { fn drop(self: Pin<&mut Self>) { if !self.detached { self.cancel.cancel(); } } } /// Runs futures (and any `tasks` that are `tokio::task::spawned` by /// them) on a separate tokio Executor #[derive(Clone)] pub struct DedicatedExecutor { state: Arc<Mutex<State>>, } /// Runs futures (and any `tasks` that are `tokio::task::spawned` by /// them) on a separate tokio Executor struct State { /// Channel for requests -- the dedicated executor takes requests /// from here and runs them. /// /// This is `None` if we triggered shutdown. requests: Option<std::sync::mpsc::Sender<Task>>, /// Receiver side indicating that shutdown is complete. completed_shutdown: Shared<BoxFuture<'static, Result<(), Arc<RecvError>>>>, /// Task counter (uses Arc strong count). task_refs: Arc<()>, /// The inner thread that can be used to join during drop. thread: Option<std::thread::JoinHandle<()>>, } // IMPORTANT: Implement `Drop` for `State`, NOT for `DedicatedExecutor`, because the executor can be cloned and clones // share their inner state. impl Drop for State { fn drop(&mut self) { if self.requests.is_some() { warn!("DedicatedExecutor dropped without calling shutdown()"); self.requests = None; } // do NOT poll the shared future if we are panicking due to https://github.com/rust-lang/futures-rs/issues/2575 if !std::thread::panicking() && self.completed_shutdown.clone().now_or_never().is_none() { warn!("DedicatedExecutor dropped without waiting for worker termination",); } // join thread but don't care about the results self.thread.take().expect("not dropped yet").join().ok(); } } /// The default worker priority (value passed to `libc::setpriority`); const WORKER_PRIORITY: i32 = 10; impl std::fmt::Debug for DedicatedExecutor { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { // Avoid taking the mutex in debug formatting write!(f, "DedicatedExecutor") } } impl DedicatedExecutor { /// Creates a new `DedicatedExecutor` with a dedicated tokio /// executor that is separate from the threadpool created via /// `[tokio::main]` or similar. /// /// The worker thread priority is set to low so that such tasks do /// not starve other more important tasks (such as answering health checks) /// /// Follows the example from to stack overflow and spawns a new /// thread to install a Tokio runtime "context" /// <https://stackoverflow.com/questions/62536566> /// /// If you try to do this from a async context you see something like /// thread 'plan::stringset::tests::test_builder_plan' panicked at 'Cannot /// drop a runtime in a context where blocking is not allowed. This /// happens when a runtime is dropped from within an asynchronous /// context.', .../tokio-1.4.0/src/runtime/blocking/shutdown.rs:51:21 pub fn new(thread_name: &str, num_threads: usize) -> Self { let thread_name = thread_name.to_string(); let (tx_tasks, rx_tasks) = std::sync::mpsc::channel::<Task>(); let (tx_shutdown, rx_shutdown) = tokio::sync::oneshot::channel(); let thread = std::thread::spawn(move || { let runtime = tokio::runtime::Builder::new_multi_thread() .enable_all() .thread_name(&thread_name) .worker_threads(num_threads) .on_thread_start(move || set_current_thread_priority(WORKER_PRIORITY)) .build() .expect("Creating tokio runtime"); runtime.block_on(async move { // Dropping the tokio runtime only waits for tasks to yield not to complete // // We therefore use a RwLock to wait for tasks to complete let join = Arc::new(tokio::sync::RwLock::new(())); while let Ok(task) = rx_tasks.recv() { let join = Arc::clone(&join); let handle = join.read_owned().await; tokio::task::spawn(async move { task.run().await; std::mem::drop(handle); }); } // Wait for all tasks to finish join.write().await; // signal shutdown, but it's OK if the other side is gone tx_shutdown.send(()).ok(); }) }); let state = State { requests: Some(tx_tasks), task_refs: Arc::new(()), completed_shutdown: rx_shutdown.map_err(Arc::new).boxed().shared(), thread: Some(thread), }; Self { state: Arc::new(Mutex::new(state)), } } /// Runs the specified Future (and any tasks it spawns) on the /// `DedicatedExecutor`. /// /// Currently all tasks are added to the tokio executor /// immediately and compete for the threadpool's resources. pub fn spawn<T>(&self, task: T) -> Job<T::Output> where T: Future + Send + 'static, T::Output: Send + 'static, { let (tx, rx) = tokio::sync::oneshot::channel(); let fut = Box::pin(async move { let task_output = task.await; if tx.send(task_output).is_err() { warn!("Spawned task output ignored: receiver dropped") } }); let cancel = CancellationToken::new(); let mut state = self.state.lock(); let task = Task { fut, cancel: cancel.clone(), task_ref: Arc::clone(&state.task_refs), }; if let Some(requests) = &mut state.requests { // would fail if someone has started shutdown requests.send(task).ok(); } else { warn!("tried to schedule task on an executor that was shutdown"); } Job { rx, cancel, detached: false, } } /// Number of currently active tasks. pub fn tasks(&self) -> usize { let state = self.state.lock(); // the strong count is always `1 + jobs` because of the Arc we hold within Self Arc::strong_count(&state.task_refs).saturating_sub(1) } /// signals shutdown of this executor and any Clones pub fn shutdown(&self) { // hang up the channel which will cause the dedicated thread // to quit let mut state = self.state.lock(); state.requests = None; } /// Stops all subsequent task executions, and waits for the worker /// thread to complete. Note this will shutdown all clones of this /// `DedicatedExecutor` as well. /// /// Only the first all to `join` will actually wait for the /// executing thread to complete. All other calls to join will /// complete immediately. /// /// # Panic / Drop /// [`DedicatedExecutor`] implements shutdown on [`Drop`]. You should just use this behavior and NOT call /// [`join`](Self::join) manually during [`Drop`] or panics because this might lead to another panic, see /// <https://github.com/rust-lang/futures-rs/issues/2575>. pub async fn join(&self) { self.shutdown(); // get handle mutex is held let handle = { let state = self.state.lock(); state.completed_shutdown.clone() }; // wait for completion while not holding the mutex to avoid // deadlocks handle.await.expect("Thread died?") } } #[cfg(unix)] fn set_current_thread_priority(prio: i32) { // on linux setpriority sets the current thread's priority // (as opposed to the current process). unsafe { libc::setpriority(0, 0, prio) }; } #[cfg(not(unix))] fn set_current_thread_priority(prio: i32) { warn!("Setting worker thread priority not supported on this platform"); } #[cfg(test)] mod tests { use super::*; use std::{ sync::{Arc, Barrier}, time::Duration, }; use tokio::sync::Barrier as AsyncBarrier; #[cfg(unix)] fn get_current_thread_priority() -> i32 { // on linux setpriority sets the current thread's priority // (as opposed to the current process). unsafe { libc::getpriority(0, 0) } } #[cfg(not(unix))] fn get_current_thread_priority() -> i32 { WORKER_PRIORITY } #[tokio::test] async fn basic() { let barrier = Arc::new(Barrier::new(2)); let exec = DedicatedExecutor::new("Test DedicatedExecutor", 1); let dedicated_task = exec.spawn(do_work(42, Arc::clone(&barrier))); // Note the dedicated task will never complete if it runs on // the main tokio thread (as this test is not using the // 'multithreaded' version of the executor and the call to // barrier.wait actually blocks the tokio thread) barrier.wait(); // should be able to get the result assert_eq!(dedicated_task.await.unwrap(), 42); exec.join().await; } #[tokio::test] async fn basic_clone() { let barrier = Arc::new(Barrier::new(2)); let exec = DedicatedExecutor::new("Test DedicatedExecutor", 1); // Run task on clone should work fine let dedicated_task = exec.clone().spawn(do_work(42, Arc::clone(&barrier))); barrier.wait(); assert_eq!(dedicated_task.await.unwrap(), 42); exec.join().await; } #[tokio::test] async fn drop_clone() { let barrier = Arc::new(Barrier::new(2)); let exec = DedicatedExecutor::new("Test DedicatedExecutor", 1); drop(exec.clone()); let task = exec.spawn(do_work(42, Arc::clone(&barrier))); barrier.wait(); assert_eq!(task.await.unwrap(), 42); exec.join().await; } #[tokio::test] #[should_panic(expected = "foo")] async fn just_panic() { struct S(DedicatedExecutor); impl Drop for S { fn drop(&mut self) { self.0.join().now_or_never(); } } let exec = DedicatedExecutor::new("Test DedicatedExecutor", 1); let _s = S(exec); // this must not lead to a double-panic and SIGILL panic!("foo") } #[tokio::test] async fn multi_task() { let barrier = Arc::new(Barrier::new(3)); // make an executor with two threads let exec = DedicatedExecutor::new("Test DedicatedExecutor", 2); let dedicated_task1 = exec.spawn(do_work(11, Arc::clone(&barrier))); let dedicated_task2 = exec.spawn(do_work(42, Arc::clone(&barrier))); // block main thread until completion of other two tasks barrier.wait(); // should be able to get the result assert_eq!(dedicated_task1.await.unwrap(), 11); assert_eq!(dedicated_task2.await.unwrap(), 42); exec.join().await; } #[tokio::test] async fn worker_priority() { let exec = DedicatedExecutor::new("Test DedicatedExecutor", 2); let dedicated_task = exec.spawn(async move { get_current_thread_priority() }); assert_eq!(dedicated_task.await.unwrap(), WORKER_PRIORITY); exec.join().await; } #[tokio::test] async fn tokio_spawn() { let exec = DedicatedExecutor::new("Test DedicatedExecutor", 2); // spawn a task that spawns to other tasks and ensure they run on the dedicated // executor let dedicated_task = exec.spawn(async move { // spawn separate tasks let t1 = tokio::task::spawn(async { assert_eq!( std::thread::current().name(), Some("Test DedicatedExecutor") ); 25usize }); t1.await.unwrap() }); // Validate the inner task ran to completion (aka it did not panic) assert_eq!(dedicated_task.await.unwrap(), 25); exec.join().await; } #[tokio::test] async fn panic_on_executor() { let exec = DedicatedExecutor::new("Test DedicatedExecutor", 1); let dedicated_task = exec.spawn(async move { if true { panic!("At the disco, on the dedicated task scheduler"); } else { 42 } }); // should not be able to get the result dedicated_task.await.unwrap_err(); exec.join().await; } #[tokio::test] async fn executor_shutdown_while_task_running() { let barrier = Arc::new(Barrier::new(2)); let captured = Arc::clone(&barrier); let exec = DedicatedExecutor::new("Test DedicatedExecutor", 1); let dedicated_task = exec.spawn(async move { tokio::time::sleep(tokio::time::Duration::from_millis(1)).await; do_work(42, captured).await }); exec.shutdown(); // block main thread until completion of the outstanding task barrier.wait(); // task should complete successfully assert_eq!(dedicated_task.await.unwrap(), 42); exec.join().await; } #[tokio::test] async fn executor_submit_task_after_shutdown() { let exec = DedicatedExecutor::new("Test DedicatedExecutor", 1); // Simulate trying to submit tasks once executor has shutdown exec.shutdown(); let dedicated_task = exec.spawn(async { 11 }); // task should complete, but return an error dedicated_task.await.unwrap_err(); exec.join().await; } #[tokio::test] async fn executor_submit_task_after_clone_shutdown() { let exec = DedicatedExecutor::new("Test DedicatedExecutor", 1); // shutdown the clone (but not the exec) exec.clone().join().await; // Simulate trying to submit tasks once executor has shutdown let dedicated_task = exec.spawn(async { 11 }); // task should complete, but return an error dedicated_task.await.unwrap_err(); exec.join().await; } #[tokio::test] async fn executor_join() { let exec = DedicatedExecutor::new("Test DedicatedExecutor", 1); // test it doesn't hang exec.join().await; } #[tokio::test] async fn executor_join2() { let exec = DedicatedExecutor::new("Test DedicatedExecutor", 1); // test it doesn't hang exec.join().await; exec.join().await; } #[tokio::test] #[allow(clippy::redundant_clone)] async fn executor_clone_join() { let exec = DedicatedExecutor::new("Test DedicatedExecutor", 1); // test it doesn't hang exec.clone().join().await; exec.clone().join().await; exec.join().await; } #[tokio::test] async fn drop_receiver() { // create empty executor let exec = DedicatedExecutor::new("Test DedicatedExecutor", 1); assert_eq!(exec.tasks(), 0); // create first blocked task let barrier1 = Arc::new(AsyncBarrier::new(2)); let dedicated_task1 = exec.spawn(do_work_async(11, Arc::clone(&barrier1))); assert_eq!(exec.tasks(), 1); // create second blocked task let barrier2 = Arc::new(AsyncBarrier::new(2)); let dedicated_task2 = exec.spawn(do_work_async(22, Arc::clone(&barrier2))); assert_eq!(exec.tasks(), 2); // cancel task drop(dedicated_task1); // cancelation might take a short while wait_for_tasks(&exec, 1).await; // unblock other task barrier2.wait().await; assert_eq!(dedicated_task2.await.unwrap(), 22); wait_for_tasks(&exec, 0).await; assert_eq!(exec.tasks(), 0); exec.join().await; } #[tokio::test] async fn detach_receiver() { // create empty executor let exec = DedicatedExecutor::new("Test DedicatedExecutor", 1); assert_eq!(exec.tasks(), 0); // create first task // `detach()` consumes the task but doesn't abort the task (in contrast to `drop`). We'll proof the that the // task is still running by linking it to a 2nd task using a barrier with size 3 (two tasks plus the main thread). let barrier = Arc::new(AsyncBarrier::new(3)); let dedicated_task = exec.spawn(do_work_async(11, Arc::clone(&barrier))); dedicated_task.detach(); assert_eq!(exec.tasks(), 1); // create second task let dedicated_task = exec.spawn(do_work_async(22, Arc::clone(&barrier))); assert_eq!(exec.tasks(), 2); // wait a bit just to make sure that our tasks doesn't get dropped tokio::time::sleep(Duration::from_millis(10)).await; assert_eq!(exec.tasks(), 2); // tasks should be unblocked because they both wait on the same barrier // unblock tasks barrier.wait().await; wait_for_tasks(&exec, 0).await; let result = dedicated_task.await.unwrap(); assert_eq!(result, 22); exec.join().await; } /// Wait for the barrier and then return `result` async fn do_work(result: usize, barrier: Arc<Barrier>) -> usize { barrier.wait(); result } /// Wait for the barrier and then return `result` async fn do_work_async(result: usize, barrier: Arc<AsyncBarrier>) -> usize { barrier.wait().await; result } // waits for up to 1 sec for the correct number of tasks async fn wait_for_tasks(exec: &DedicatedExecutor, num: usize) { tokio::time::timeout(Duration::from_secs(1), async { loop { if dbg!(exec.tasks()) == num { return; } tokio::time::sleep(Duration::from_millis(1)).await; } }) .await .expect("Did not find expected num tasks within a second") } }
32.085174
122
0.595025
ebe2125e502ecf59e8e69e6de934cc1f7a43465b
9,688
use anyhow::{Context, Result}; use serde::de::DeserializeOwned; use std::convert::TryFrom; use std::io::ErrorKind; use std::mem::size_of; use tokio::{ io::{AsyncReadExt, AsyncWriteExt}, net::TcpStream, }; #[derive(Debug)] pub enum ConnectionError { HelloFailed(&'static str), } impl std::fmt::Display for ConnectionError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { ConnectionError::HelloFailed(error) => { write!(f, "Failed to connect to Criterion.rs benchmark:\n{}", error) } } } } impl std::error::Error for ConnectionError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { ConnectionError::HelloFailed(_) => None, } } } #[derive(Debug)] #[repr(u16)] enum ProtocolFormat { CBOR = 1, } impl ProtocolFormat { fn from_u16(format: u16) -> Result<Self, ConnectionError> { match format { 1 => Ok(ProtocolFormat::CBOR), _ => Err(ConnectionError::HelloFailed("Unknown format value sent by Criterion.rs benchmark; please update cargo-criterion.")), } } } const RUNNER_MAGIC_NUMBER: &str = "cargo-criterion"; const RUNNER_HELLO_SIZE: usize = RUNNER_MAGIC_NUMBER.len() // magic number + (size_of::<u8>() * 3); // version number const BENCHMARK_MAGIC_NUMBER: &str = "Criterion"; const BENCHMARK_HELLO_SIZE: usize = BENCHMARK_MAGIC_NUMBER.len() // magic number + (size_of::<u8>() * 3) // version number + size_of::<u16>() // protocol version + size_of::<u16>(); // protocol format /// This struct represents an open socket connection to a Criterion.rs benchmark. /// /// When the benchmark connects, a small handshake is performed to verify that we've connected to /// the right process and that the version of Criterion.rs on the other side is valid, etc. /// Afterwards, we exchange messages (currently using CBOR) with the benchmark. #[derive(Debug)] pub struct Connection { socket: TcpStream, receive_buffer: Vec<u8>, send_buffer: Vec<u8>, criterion_rs_version: [u8; 3], protocol_version: u16, protocol_format: ProtocolFormat, } impl Connection { /// Perform the connection handshake and wrap the TCP stream in a Connection object if successful. pub async fn new(mut socket: TcpStream) -> Result<Self> { // Send the runner-hello message. let mut hello_buf = [0u8; RUNNER_HELLO_SIZE]; hello_buf[0..RUNNER_MAGIC_NUMBER.len()].copy_from_slice(RUNNER_MAGIC_NUMBER.as_bytes()); let i = RUNNER_MAGIC_NUMBER.len(); hello_buf[i] = env!("CARGO_PKG_VERSION_MAJOR").parse().unwrap(); hello_buf[i + 1] = env!("CARGO_PKG_VERSION_MINOR").parse().unwrap(); hello_buf[i + 2] = env!("CARGO_PKG_VERSION_PATCH").parse().unwrap(); socket.write_all(&hello_buf).await?; // Read the benchmark hello message. let mut hello_buf = [0u8; BENCHMARK_HELLO_SIZE]; socket.read_exact(&mut hello_buf).await?; if &hello_buf[0..BENCHMARK_MAGIC_NUMBER.len()] != BENCHMARK_MAGIC_NUMBER.as_bytes() { return Err( ConnectionError::HelloFailed("Not connected to a Criterion.rs benchmark.").into(), ); } let mut i = BENCHMARK_MAGIC_NUMBER.len(); let criterion_rs_version = [hello_buf[i], hello_buf[i + 1], hello_buf[i + 2]]; i += 3; let protocol_version = u16::from_be_bytes([hello_buf[i], hello_buf[i + 1]]); i += 2; let protocol_format = u16::from_be_bytes([hello_buf[i], hello_buf[i + 1]]); let protocol_format = ProtocolFormat::from_u16(protocol_format)?; info!("Criterion.rs version: {:?}", criterion_rs_version); info!("Protocol version: {}", protocol_version); info!("Protocol Format: {:?}", protocol_format); Ok(Connection { socket, receive_buffer: vec![], send_buffer: vec![], criterion_rs_version, protocol_version, protocol_format, }) } /// Receive a message from the benchmark. If the benchmark has closed the connection, returns /// Ok(None). pub async fn recv<T: DeserializeOwned>(&mut self) -> Result<Option<T>> { let mut length_buf = [0u8; 4]; match self.socket.read_exact(&mut length_buf).await { Err(err) if err.kind() == ErrorKind::UnexpectedEof => return Ok(None), Err(err) => return Err(err.into()), Ok(val) => val, }; let length = u32::from_be_bytes(length_buf); self.receive_buffer.resize(length as usize, 0u8); self.socket .read_exact(&mut self.receive_buffer) .await .context("Failed to read message from Criterion.rs benchmark")?; let value: T = serde_cbor::from_slice(&self.receive_buffer) .context("Failed to parse message from Criterion.rs benchmark")?; Ok(Some(value)) } /// Send a message to the benchmark. pub async fn send(&mut self, message: &OutgoingMessage<'_>) -> Result<()> { self.send_buffer.truncate(0); serde_cbor::to_writer(&mut self.send_buffer, message) .with_context(|| format!("Failed to serialize message {:?}", message))?; let size = u32::try_from(self.send_buffer.len()).unwrap(); let length_buf = size.to_be_bytes(); self.socket .write_all(&length_buf) .await .context("Failed to send message header")?; self.socket .write_all(&self.send_buffer) .await .context("Failed to send message")?; Ok(()) } } // All of these structs are used to communicate with Criterion.rs. The benchmarks may be running // any version of Criterion.rs that supports cargo-criterion, so backwards compatibility is // important. #[derive(Debug, Deserialize)] pub enum IncomingMessage { // Benchmark lifecycle messages BeginningBenchmarkGroup { group: String, }, FinishedBenchmarkGroup { group: String, }, BeginningBenchmark { id: RawBenchmarkId, }, SkippingBenchmark { id: RawBenchmarkId, }, Warmup { nanos: f64, }, MeasurementStart { sample_count: u64, estimate_ns: f64, iter_count: u64, }, MeasurementComplete { iters: Vec<f64>, times: Vec<f64>, plot_config: PlotConfiguration, sampling_method: SamplingMethod, benchmark_config: BenchmarkConfig, }, // Value formatting responses FormattedValue { value: String, }, ScaledValues { scaled_values: Vec<f64>, unit: String, }, } #[allow(dead_code)] #[derive(Debug, Serialize)] pub enum OutgoingMessage<'a> { FormatValue { value: f64, }, FormatThroughput { value: f64, throughput: Throughput, }, ScaleValues { typical_value: f64, values: &'a [f64], }, ScaleThroughputs { typical_value: f64, values: &'a [f64], throughput: Throughput, }, ScaleForMachines { values: &'a [f64], }, Continue, } #[derive(Debug, Deserialize)] pub struct RawBenchmarkId { group_id: String, function_id: Option<String>, value_str: Option<String>, throughput: Vec<Throughput>, } impl From<RawBenchmarkId> for crate::report::BenchmarkId { fn from(other: RawBenchmarkId) -> Self { crate::report::BenchmarkId::new( other.group_id, other.function_id, other.value_str, other.throughput.first().cloned(), ) } } #[derive(Debug, Deserialize, Clone, Copy)] pub enum AxisScale { Linear, Logarithmic, } #[derive(Debug, Deserialize)] pub struct PlotConfiguration { pub summary_scale: AxisScale, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)] pub enum Throughput { Bytes(u64), Elements(u64), } #[derive(Debug, Deserialize)] pub enum SamplingMethod { Linear, Flat, } impl SamplingMethod { pub fn is_linear(&self) -> bool { match self { SamplingMethod::Linear => true, _ => false, } } } #[derive(Debug, Deserialize)] struct Duration { secs: u64, nanos: u32, } #[derive(Debug, Deserialize)] pub struct BenchmarkConfig { confidence_level: f64, measurement_time: Duration, noise_threshold: f64, nresamples: usize, sample_size: usize, significance_level: f64, warm_up_time: Duration, } impl From<BenchmarkConfig> for crate::analysis::BenchmarkConfig { fn from(other: BenchmarkConfig) -> Self { crate::analysis::BenchmarkConfig { confidence_level: other.confidence_level, measurement_time: std::time::Duration::new( other.measurement_time.secs, other.measurement_time.nanos, ), noise_threshold: other.noise_threshold, nresamples: other.nresamples, sample_size: other.sample_size, significance_level: other.significance_level, warm_up_time: std::time::Duration::new( other.warm_up_time.secs, other.warm_up_time.nanos, ), } } }
31.557003
139
0.597543
21764d861300d9f071a0bc3fdb596a2badcf9c73
1,802
use std::rc::Rc; use std::cell::RefCell; use std::fmt::Debug; use core::cmp::{PartialEq,Eq}; //Definition for a binary tree node. #[derive(Debug, PartialEq, Eq)] pub struct TreeNode { pub val: i32, pub left: Option<Rc<RefCell<TreeNode>>>, pub right: Option<Rc<RefCell<TreeNode>>>, } impl TreeNode { #[inline] pub fn new(val: i32) -> Self { TreeNode { val, left: None, right: None } } } pub fn sum_root_to_leaf1(root: Option<Rc<RefCell<TreeNode>>>) -> i32 { fn helper(root: Option<Rc<RefCell<TreeNode>>>, val: i32) -> i32 { match root { Some(node) => match node.borrow_mut() { mut p => match p.right.is_none() && p.left.is_none() { true => val | p.val, _ => helper(p.left.take(), (val | p.val) << 1) + helper(p.right.take(), (val | p.val) << 1) } } _ => 0 } } helper(root, 0) } pub fn sum_root_to_leaf2(root: Option<Rc<RefCell<TreeNode>>>) -> i32 { fn helper(root: Option<Rc<RefCell<TreeNode>>>, val: i32) -> i32 { match root { Some(node) => { //let ans = val * 10 + node.borrow().val; //error let ans = val + node.borrow().val; if node.borrow().left.is_none() && node.borrow().right.is_none() { ans } else if node.borrow().left.is_none() { helper(node.borrow().right.clone(), ans) } else if node.borrow().right.is_none() { helper(node.borrow().left.clone(), ans) } else { //helper(node.borrow().left.clone(), ans) + helper(node.borrow().right.clone(), ans) helper(node.borrow().left.clone(), 0) + helper(node.borrow().right.clone(), 0) + ans } } _ => 0 } } helper(root, 0) }
28.15625
105
0.529967
8793b1feca81758acd530463a7003f1952b37b7a
1,518
pub mod NewRelic { use crate::alert_sources::base::AlertSource; use crate::alert_sources::response::AlertList; /// This is used for interacting with NewRelic. /// Dev Note: All functions must be private since only implemented functions are to be used. Helper functions created must not be exposed use crate::alert_sources::AlertSourceInfo; use std::error::Error; pub struct NewRelicHandler { pub connect_url: String, pub identifier: String, connection_params: serde_json::Value, auth_key: String, auth_mechanism: String, } impl AlertSource for NewRelicHandler { fn new_from_object(obj: &AlertSourceInfo) -> Self { NewRelicHandler { auth_key: "".to_string(), auth_mechanism: obj.auth_type.to_string(), connect_url: obj.connect_url.to_string(), connection_params: obj.connection_params.clone(), identifier: obj.identifier.to_string(), } } fn get_source_name(&self) -> &str { "newrelic" } fn test_connection(&mut self) -> bool { true } fn process_webhook(&self) -> Result<AlertList, Box<dyn Error>> { return Ok(Vec::new()); } fn acknowledge_alert(&self) -> bool { todo!() } fn get_active_alerts(&mut self) -> Result<AlertList, Box<dyn Error>> { return Ok(Vec::new()); } } }
30.979592
141
0.582345
7a1c07e2ed0bae1ef5dfbf7118b4413f6f019c02
1,253
//! Passes for the Calyx compiler. mod clk_insertion; mod collapse_control; mod compile_control; mod compile_empty; mod compile_invoke; mod component_interface; mod dead_cell_removal; mod externalize; mod go_insertion; mod infer_static_timing; mod inliner; mod math_utilities; mod merge_assign; mod minimize_regs; mod papercut; mod resource_sharing; mod simplify_guards; mod static_timing; mod synthesis_papercut; mod top_down_compile_control; mod well_formed; pub use clk_insertion::ClkInsertion; pub use collapse_control::CollapseControl; pub use compile_control::CompileControl; pub use compile_empty::CompileEmpty; pub use compile_invoke::CompileInvoke; pub use component_interface::ComponentInterface; pub use dead_cell_removal::DeadCellRemoval; pub use externalize::Externalize; pub use go_insertion::GoInsertion; pub use infer_static_timing::InferStaticTiming; pub use inliner::Inliner; pub use merge_assign::MergeAssign; pub use minimize_regs::MinimizeRegs; pub use papercut::Papercut; pub use resource_sharing::ResourceSharing; pub use simplify_guards::SimplifyGuards; pub use static_timing::StaticTiming; pub use synthesis_papercut::SynthesisPapercut; pub use top_down_compile_control::TopDownCompileControl; pub use well_formed::WellFormed;
28.477273
56
0.845172
e5d233cdd042bd9ebfc21828c553cd28c5ac6422
20,852
//! Items for reading and opening file formats from file. use crate::Format; #[cfg(feature = "caf")] use caf::{self, CafError}; #[cfg(feature = "flac")] use claxon; #[cfg(feature = "wav")] use hound; #[cfg(feature = "ogg_vorbis")] use lewton; /// Types to which read samples may be converted via the `Reader::samples` method. pub trait Sample: sample::Sample + sample::FromSample<i8> + sample::FromSample<i16> + sample::FromSample<sample::I24> + sample::FromSample<i32> + sample::FromSample<f32> { } impl<T> Sample for T where T: sample::Sample + sample::FromSample<i8> + sample::FromSample<i16> + sample::FromSample<sample::I24> + sample::FromSample<i32> + sample::FromSample<f32> { } /// Returned by the `read` function, enumerates the various supported readers. pub enum Reader<R> where R: std::io::Read + std::io::Seek, { #[cfg(feature = "flac")] Flac(claxon::FlacReader<R>), #[cfg(feature = "ogg_vorbis")] OggVorbis(lewton::inside_ogg::OggStreamReader<R>), #[cfg(feature = "wav")] Wav(hound::WavReader<R>), #[cfg(feature = "caf_alac")] CafAlac(super::caf_alac::AlacReader<R>), } /// An iterator that reads samples from the underlying reader, converts them to the sample type `S` /// if not already in that format and yields them. pub struct Samples<'a, R, S> where R: 'a + std::io::Read + std::io::Seek, { format: FormatSamples<'a, R>, sample: std::marker::PhantomData<S>, } // The inner part of the `Samples` iterator, specific to the format of the `Reader` used to produce // the `Samples`. enum FormatSamples<'a, R> where R: 'a + std::io::Read + std::io::Seek, { #[cfg(feature = "flac")] Flac(claxon::FlacSamples<&'a mut claxon::input::BufferedReader<R>>), #[cfg(feature = "ogg_vorbis")] OggVorbis { reader: &'a mut lewton::inside_ogg::OggStreamReader<R>, index: usize, buffer: Vec<i16>, }, #[cfg(feature = "wav")] Wav(WavSamples<'a, R>), #[cfg(feature = "caf_alac")] CafAlac { reader: &'a mut super::caf_alac::AlacReader<R>, index: usize, buffer: Vec<i32>, }, } // The variants of hound's supported sample bit depths. #[cfg(feature = "wav")] enum WavSamples<'a, R: 'a> { I8(hound::WavSamples<'a, R, i8>), I16(hound::WavSamples<'a, R, i16>), I24(hound::WavSamples<'a, R, i32>), I32(hound::WavSamples<'a, R, i32>), F32(hound::WavSamples<'a, R, f32>), } /// An iterator that reads samples from the underlying reader, converts them to frames of type `F` /// and yields them. pub struct Frames<'a, R, F> where R: 'a + std::io::Read + std::io::Seek, F: sample::Frame, { samples: Samples<'a, R, F::Sample>, frame: std::marker::PhantomData<F>, } /// An alias for the buffered, file `Reader` type returned from the `open` function. pub type BufFileReader = Reader<std::io::BufReader<std::fs::File>>; /// A description of the audio format that was read from file. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Description { format: Format, channel_count: u32, sample_rate: u32, } /// Errors that might be returned from the `Reader::new` function. #[derive(Debug)] pub enum ReadError { Io(std::io::Error), Reader(FormatError), UnsupportedFormat, } /// Format-specific errors that might occur when opening or reading from an audio file. #[derive(Debug)] pub enum FormatError { #[cfg(feature = "flac")] Flac(claxon::Error), #[cfg(feature = "ogg_vorbis")] OggVorbis(lewton::VorbisError), #[cfg(feature = "wav")] Wav(hound::Error), #[cfg(feature = "caf")] Caf(caf::CafError), #[cfg(feature = "alac")] Alac(()), } /// Attempts to open an audio `Reader` from the file at the specified `Path`. /// /// The format is determined from the path's file extension. pub fn open<P>(file_path: P) -> Result<BufFileReader, ReadError> where P: AsRef<std::path::Path>, { BufFileReader::open(file_path) } impl Description { /// The format from which the audio will be read. pub fn format(&self) -> Format { self.format } /// The number of channels of audio. /// /// E.g. For audio stored in stereo this should return `2`. Mono audio will return `1`. pub fn channel_count(&self) -> u32 { self.channel_count } /// The rate in Hertz at which each channel of the stored audio is sampled. /// /// E.g. A `sample_rate` of 44_100 indicates that the audio is sampled 44_100 times per second /// per channel. pub fn sample_rate(&self) -> u32 { self.sample_rate } } impl BufFileReader { /// Attempts to open an audio `Reader` from the file at the specified `Path`. /// /// This function is a convenience wrapper around the `Reader::new` function. /// /// This function pays no attention to the `file_path`'s extension and instead attempts to read /// a supported `Format` via the file header. pub fn open<P>(file_path: P) -> Result<Self, ReadError> where P: AsRef<std::path::Path>, { let path = file_path.as_ref(); let file = std::fs::File::open(path)?; let reader = std::io::BufReader::new(file); Reader::new(reader) } } impl<R> Reader<R> where R: std::io::Read + std::io::Seek, { /// Attempts to read the format of the audio read by the given `reader` and returns the associated /// `Reader` variant. /// /// The format is determined by attempting to construct each specific format reader until one /// is successful. pub fn new(mut reader: R) -> Result<Self, ReadError> { #[cfg(feature = "wav")] { let is_wav = match hound::WavReader::new(&mut reader) { Err(hound::Error::FormatError(_)) => false, Err(err) => return Err(err.into()), Ok(_) => true, }; reader.seek(std::io::SeekFrom::Start(0))?; if is_wav { return Ok(Reader::Wav(hound::WavReader::new(reader)?)); } } #[cfg(feature = "flac")] { let is_flac = match claxon::FlacReader::new(&mut reader) { Err(claxon::Error::FormatError(_)) => false, Err(err) => return Err(err.into()), Ok(_) => true, }; reader.seek(std::io::SeekFrom::Start(0))?; if is_flac { return Ok(Reader::Flac(claxon::FlacReader::new(reader)?)); } } #[cfg(feature = "ogg_vorbis")] { let is_ogg_vorbis = match lewton::inside_ogg::OggStreamReader::new(&mut reader) { Err(lewton::VorbisError::OggError(_)) | Err(lewton::VorbisError::BadHeader( lewton::header::HeaderReadError::NotVorbisHeader, )) => false, Err(err) => return Err(err.into()), Ok(_) => true, }; reader.seek(std::io::SeekFrom::Start(0))?; if is_ogg_vorbis { return Ok(Reader::OggVorbis(lewton::inside_ogg::OggStreamReader::new( reader, )?)); } } #[cfg(feature = "caf_alac")] { let is_caf_alac = match super::caf_alac::AlacReader::new(&mut reader) { Err(FormatError::Caf(CafError::NotCaf)) => false, Err(err) => return Err(err.into()), // There is a CAF container, but no ALAC inside Ok(None) => false, // Everything is fine! Ok(Some(_)) => true, }; reader.seek(std::io::SeekFrom::Start(0))?; if is_caf_alac { return Ok(Reader::CafAlac( super::caf_alac::AlacReader::new(reader)?.unwrap(), )); } } Err(ReadError::UnsupportedFormat) } /// The format from which the audio will be read. pub fn format(&self) -> Format { match *self { #[cfg(feature = "flac")] Reader::Flac(_) => Format::Flac, #[cfg(feature = "ogg_vorbis")] Reader::OggVorbis(_) => Format::OggVorbis, #[cfg(feature = "wav")] Reader::Wav(_) => Format::Wav, #[cfg(feature = "caf_alac")] Reader::CafAlac(_) => Format::CafAlac, } } /// A basic description of the audio being read. pub fn description(&self) -> Description { match *self { #[cfg(feature = "flac")] Reader::Flac(ref reader) => { let info = reader.streaminfo(); Description { format: Format::Flac, channel_count: info.channels as u32, sample_rate: info.sample_rate, } } #[cfg(feature = "ogg_vorbis")] Reader::OggVorbis(ref reader) => Description { format: Format::OggVorbis, channel_count: u32::from(reader.ident_hdr.audio_channels), sample_rate: reader.ident_hdr.audio_sample_rate as u32, }, #[cfg(feature = "wav")] Reader::Wav(ref reader) => { let spec = reader.spec(); Description { format: Format::Wav, channel_count: u32::from(spec.channels), sample_rate: spec.sample_rate, } } #[cfg(feature = "caf_alac")] Reader::CafAlac(ref reader) => { let desc = &reader.caf_reader.audio_desc; Description { format: Format::CafAlac, channel_count: desc.channels_per_frame as u32, sample_rate: (1.0 / desc.sample_rate) as u32, } } } } /// Produce an iterator that reads samples from the underlying reader, converts them to the /// sample type `S` if not already in that format and yields them. /// /// When reading from multiple channels, samples are **interleaved**. pub fn samples<S>(&mut self) -> Samples<R, S> where S: Sample, { let format = match *self { #[cfg(feature = "flac")] Reader::Flac(ref mut reader) => FormatSamples::Flac(reader.samples()), #[cfg(feature = "ogg_vorbis")] Reader::OggVorbis(ref mut reader) => FormatSamples::OggVorbis { reader, index: 0, buffer: Vec::new(), }, #[cfg(feature = "wav")] Reader::Wav(ref mut reader) => { let spec = reader.spec(); match spec.sample_format { hound::SampleFormat::Int => match spec.bits_per_sample { 8 => FormatSamples::Wav(WavSamples::I8(reader.samples())), 16 => FormatSamples::Wav(WavSamples::I16(reader.samples())), 24 => FormatSamples::Wav(WavSamples::I24(reader.samples())), 32 => FormatSamples::Wav(WavSamples::I32(reader.samples())), // Should there be an error here? _ => FormatSamples::Wav(WavSamples::I32(reader.samples())), }, hound::SampleFormat::Float => { FormatSamples::Wav(WavSamples::F32(reader.samples())) } } } #[cfg(feature = "caf_alac")] Reader::CafAlac(ref mut reader) => FormatSamples::CafAlac { reader, index: 0, buffer: Vec::new(), }, }; Samples { format, sample: std::marker::PhantomData, } } /// Produce an iterator that yields read frames from the underlying `Reader`. /// /// This method currently expects that the frame type `F` has the same number of channels as /// stored in the underlying audio format. /// /// TODO: Should consider changing this behaviour to check the audio file's actual number of /// channels and automatically convert to `F`'s number of channels while reading. pub fn frames<F>(&mut self) -> Frames<R, F> where F: sample::Frame, F::Sample: Sample, { Frames { samples: self.samples(), frame: std::marker::PhantomData, } } } impl<'a, R, S> Iterator for Samples<'a, R, S> where R: std::io::Read + std::io::Seek, S: Sample, { type Item = Result<S, FormatError>; fn next(&mut self) -> Option<Self::Item> { match self.format { #[cfg(feature = "flac")] FormatSamples::Flac(ref mut flac_samples) => flac_samples.next().map(|sample| { sample .map_err(FormatError::Flac) .map(sample::Sample::to_sample) }), #[cfg(feature = "ogg_vorbis")] FormatSamples::OggVorbis { ref mut reader, ref mut index, ref mut buffer, } => loop { // Convert and return any pending samples. if *index < buffer.len() { let sample = sample::Sample::to_sample(buffer[*index]); *index += 1; return Some(Ok(sample)); } // If there are no samples left in the buffer, refill the buffer. match reader.read_dec_packet_itl() { Ok(Some(packet)) => { std::mem::replace(buffer, packet); *index = 0; } Ok(None) => return None, Err(err) => return Some(Err(err.into())), } }, #[cfg(feature = "wav")] FormatSamples::Wav(ref mut wav_samples) => { macro_rules! next_sample { ($samples:expr) => {{ $samples.next().map(|sample| { sample .map_err(FormatError::Wav) .map(sample::Sample::to_sample) }) }}; } match *wav_samples { WavSamples::I8(ref mut samples) => next_sample!(samples), WavSamples::I16(ref mut samples) => next_sample!(samples), WavSamples::I24(ref mut samples) => samples.next().map(|sample| { sample .map_err(FormatError::Wav) .map(sample::I24::new_unchecked) .map(sample::Sample::to_sample) }), WavSamples::I32(ref mut samples) => next_sample!(samples), WavSamples::F32(ref mut samples) => next_sample!(samples), } } #[cfg(feature = "caf_alac")] FormatSamples::CafAlac { ref mut reader, ref mut index, ref mut buffer, } => loop { // Convert and return any pending samples. if *index < buffer.len() { let sample = sample::Sample::to_sample(buffer[*index]); *index += 1; return Some(Ok(sample)); } // If there are no samples left in the buffer, refill the buffer. match reader.read_packet() { Ok(Some(packet)) => { std::mem::replace(buffer, packet); *index = 0; } Ok(None) => return None, Err(err) => return Some(Err(err)), } }, } } } impl<'a, R, F> Iterator for Frames<'a, R, F> where R: std::io::Read + std::io::Seek, F: sample::Frame, F::Sample: Sample, { type Item = Result<F, FormatError>; fn next(&mut self) -> Option<Self::Item> { enum FrameConstruction { NotEnoughSamples, Ok, Err(FormatError), } let mut result = FrameConstruction::Ok; let frame = F::from_fn(|_| match self.samples.next() { Some(Ok(sample)) => sample, Some(Err(error)) => { result = FrameConstruction::Err(error); <F::Sample as sample::Sample>::equilibrium() } None => { result = FrameConstruction::NotEnoughSamples; <F::Sample as sample::Sample>::equilibrium() } }); match result { FrameConstruction::Ok => Some(Ok(frame)), FrameConstruction::Err(error) => Some(Err(error)), FrameConstruction::NotEnoughSamples => None, } } } #[cfg(feature = "flac")] impl From<claxon::Error> for FormatError { fn from(err: claxon::Error) -> Self { FormatError::Flac(err) } } #[cfg(feature = "ogg_vorbis")] impl From<lewton::VorbisError> for FormatError { fn from(err: lewton::VorbisError) -> Self { FormatError::OggVorbis(err) } } #[cfg(feature = "wav")] impl From<hound::Error> for FormatError { fn from(err: hound::Error) -> Self { FormatError::Wav(err) } } #[cfg(feature = "caf")] impl From<CafError> for FormatError { fn from(err: CafError) -> Self { FormatError::Caf(err) } } impl<T> From<T> for ReadError where T: Into<FormatError>, { fn from(err: T) -> Self { ReadError::Reader(err.into()) } } impl From<std::io::Error> for ReadError { fn from(err: std::io::Error) -> Self { ReadError::Io(err) } } impl std::error::Error for FormatError { fn description(&self) -> &str { match *self { #[cfg(feature = "flac")] FormatError::Flac(ref err) => std::error::Error::description(err), #[cfg(feature = "ogg_vorbis")] FormatError::OggVorbis(ref err) => std::error::Error::description(err), #[cfg(feature = "wav")] FormatError::Wav(ref err) => std::error::Error::description(err), #[cfg(feature = "caf")] FormatError::Caf(ref err) => std::error::Error::description(err), #[cfg(feature = "alac")] FormatError::Alac(_) => "Alac decode error", } } fn cause(&self) -> Option<&dyn std::error::Error> { match *self { #[cfg(feature = "flac")] FormatError::Flac(ref err) => Some(err), #[cfg(feature = "ogg_vorbis")] FormatError::OggVorbis(ref err) => Some(err), #[cfg(feature = "wav")] FormatError::Wav(ref err) => Some(err), #[cfg(feature = "caf")] FormatError::Caf(ref err) => Some(err), #[cfg(feature = "alac")] FormatError::Alac(_) => None, } } } impl std::error::Error for ReadError { fn description(&self) -> &str { match *self { ReadError::Io(ref err) => std::error::Error::description(err), ReadError::Reader(ref err) => std::error::Error::description(err), ReadError::UnsupportedFormat => "no supported format was detected", } } fn cause(&self) -> Option<&dyn std::error::Error> { match *self { ReadError::Io(ref err) => Some(err), ReadError::Reader(ref err) => Some(err), ReadError::UnsupportedFormat => None, } } } impl std::fmt::Display for FormatError { fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { match *self { #[cfg(feature = "flac")] FormatError::Flac(ref err) => err.fmt(f), #[cfg(feature = "ogg_vorbis")] FormatError::OggVorbis(ref err) => err.fmt(f), #[cfg(feature = "wav")] FormatError::Wav(ref err) => err.fmt(f), #[cfg(feature = "caf")] FormatError::Caf(ref err) => err.fmt(f), #[cfg(feature = "alac")] FormatError::Alac(_) => write!(f, "{}", std::error::Error::description(self)), } } } impl std::fmt::Display for ReadError { fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { match *self { ReadError::Io(ref err) => err.fmt(f), ReadError::Reader(ref err) => err.fmt(f), ReadError::UnsupportedFormat => write!(f, "{}", std::error::Error::description(self)), } } }
32.734694
102
0.518847
678521be1433660e9c223979fbc99b3b69dcb617
9,044
use std::usize; use super::keyboard::{Key, KeyStates}; use super::mouse::{EditorMouseState, MouseKeys, MouseState, ViewportBounds}; use crate::message_prelude::*; use bitflags::bitflags; #[doc(inline)] pub use graphene::DocumentResponse; use serde::{Deserialize, Serialize}; #[impl_message(Message, InputPreprocessor)] #[derive(PartialEq, Clone, Debug, Serialize, Deserialize)] pub enum InputPreprocessorMessage { MouseDown(EditorMouseState, ModifierKeys), MouseUp(EditorMouseState, ModifierKeys), MouseMove(EditorMouseState, ModifierKeys), MouseScroll(EditorMouseState, ModifierKeys), KeyUp(Key, ModifierKeys), KeyDown(Key, ModifierKeys), BoundsOfViewports(Vec<ViewportBounds>), } bitflags! { #[derive(Default, Serialize, Deserialize)] #[repr(transparent)] pub struct ModifierKeys: u8 { const CONTROL = 0b0000_0001; const SHIFT = 0b0000_0010; const ALT = 0b0000_0100; } } #[derive(Debug, Default)] pub struct InputPreprocessor { pub keyboard: KeyStates, pub mouse: MouseState, pub viewport_bounds: ViewportBounds, } enum KeyPosition { Pressed, Released, } impl MessageHandler<InputPreprocessorMessage, ()> for InputPreprocessor { fn process_action(&mut self, message: InputPreprocessorMessage, _data: (), responses: &mut VecDeque<Message>) { match message { InputPreprocessorMessage::MouseMove(editor_mouse_state, modifier_keys) => { self.handle_modifier_keys(modifier_keys, responses); let mouse_state = editor_mouse_state.to_mouse_state(&self.viewport_bounds); self.mouse.position = mouse_state.position; responses.push_back(InputMapperMessage::PointerMove.into()); } InputPreprocessorMessage::MouseDown(editor_mouse_state, modifier_keys) => { self.handle_modifier_keys(modifier_keys, responses); let mouse_state = editor_mouse_state.to_mouse_state(&self.viewport_bounds); self.mouse.position = mouse_state.position; if let Some(message) = self.translate_mouse_event(mouse_state, KeyPosition::Pressed) { responses.push_back(message); } } InputPreprocessorMessage::MouseUp(editor_mouse_state, modifier_keys) => { self.handle_modifier_keys(modifier_keys, responses); let mouse_state = editor_mouse_state.to_mouse_state(&self.viewport_bounds); self.mouse.position = mouse_state.position; if let Some(message) = self.translate_mouse_event(mouse_state, KeyPosition::Released) { responses.push_back(message); } } InputPreprocessorMessage::MouseScroll(editor_mouse_state, modifier_keys) => { self.handle_modifier_keys(modifier_keys, responses); let mouse_state = editor_mouse_state.to_mouse_state(&self.viewport_bounds); self.mouse.position = mouse_state.position; self.mouse.scroll_delta = mouse_state.scroll_delta; responses.push_back(InputMapperMessage::MouseScroll.into()); } InputPreprocessorMessage::KeyDown(key, modifier_keys) => { self.handle_modifier_keys(modifier_keys, responses); self.keyboard.set(key as usize); responses.push_back(InputMapperMessage::KeyDown(key).into()); } InputPreprocessorMessage::KeyUp(key, modifier_keys) => { self.handle_modifier_keys(modifier_keys, responses); self.keyboard.unset(key as usize); responses.push_back(InputMapperMessage::KeyUp(key).into()); } InputPreprocessorMessage::BoundsOfViewports(bounds_of_viewports) => { assert_eq!(bounds_of_viewports.len(), 1, "Only one viewport is currently supported"); for bounds in bounds_of_viewports { let new_size = bounds.size(); let existing_size = self.viewport_bounds.size(); let translation = (new_size - existing_size) / 2.; // TODO: Extend this to multiple viewports instead of setting it to the value of this last loop iteration self.viewport_bounds = bounds; responses.push_back( graphene::Operation::TransformLayer { path: vec![], transform: glam::DAffine2::from_translation(translation).to_cols_array(), } .into(), ); responses.push_back( DocumentMessage::Overlay( graphene::Operation::TransformLayer { path: vec![], transform: glam::DAffine2::from_translation(translation).to_cols_array(), } .into(), ) .into(), ); } } }; } // clean user input and if possible reconstruct it // store the changes in the keyboard if it is a key event // transform canvas coordinates to document coordinates advertise_actions!(); } impl InputPreprocessor { fn translate_mouse_event(&mut self, new_state: MouseState, position: KeyPosition) -> Option<Message> { // Calculate the difference between the two key states (binary xor) let diff = self.mouse.mouse_keys ^ new_state.mouse_keys; self.mouse = new_state; let key = match diff { MouseKeys::LEFT => Key::Lmb, MouseKeys::RIGHT => Key::Rmb, MouseKeys::MIDDLE => Key::Mmb, MouseKeys::NONE => return None, // self.mouse.mouse_keys was invalid, e.g. when a drag began outside the client _ => { log::warn!("The number of buttons modified at the same time was greater than 1. Modification: {:#010b}", diff); Key::UnknownKey } }; Some(match position { KeyPosition::Pressed => InputMapperMessage::KeyDown(key).into(), KeyPosition::Released => InputMapperMessage::KeyUp(key).into(), }) } fn handle_modifier_keys(&mut self, modifier_keys: ModifierKeys, responses: &mut VecDeque<Message>) { self.handle_modifier_key(Key::KeyControl, modifier_keys.contains(ModifierKeys::CONTROL), responses); self.handle_modifier_key(Key::KeyShift, modifier_keys.contains(ModifierKeys::SHIFT), responses); self.handle_modifier_key(Key::KeyAlt, modifier_keys.contains(ModifierKeys::ALT), responses); } fn handle_modifier_key(&mut self, key: Key, key_is_down: bool, responses: &mut VecDeque<Message>) { let key_was_down = self.keyboard.get(key as usize); if key_was_down && !key_is_down { self.keyboard.unset(key as usize); responses.push_back(InputMapperMessage::KeyUp(key).into()); } else if !key_was_down && key_is_down { self.keyboard.set(key as usize); responses.push_back(InputMapperMessage::KeyDown(key).into()); } } } #[cfg(test)] mod test { use crate::input::mouse::ViewportPosition; use super::*; #[test] fn process_action_mouse_move_handle_modifier_keys() { let mut input_preprocessor = InputPreprocessor::default(); let mut editor_mouse_state = EditorMouseState::new(); editor_mouse_state.editor_position = ViewportPosition::new(4., 809.); let message = InputPreprocessorMessage::MouseMove(editor_mouse_state, ModifierKeys::ALT); let mut responses = VecDeque::new(); input_preprocessor.process_action(message, (), &mut responses); assert!(input_preprocessor.keyboard.get(Key::KeyAlt as usize)); assert_eq!(responses.pop_front(), Some(InputMapperMessage::KeyDown(Key::KeyAlt).into())); } #[test] fn process_action_mouse_down_handle_modifier_keys() { let mut input_preprocessor = InputPreprocessor::default(); let message = InputPreprocessorMessage::MouseDown(EditorMouseState::new(), ModifierKeys::CONTROL); let mut responses = VecDeque::new(); input_preprocessor.process_action(message, (), &mut responses); assert!(input_preprocessor.keyboard.get(Key::KeyControl as usize)); assert_eq!(responses.pop_front(), Some(InputMapperMessage::KeyDown(Key::KeyControl).into())); } #[test] fn process_action_mouse_up_handle_modifier_keys() { let mut input_preprocessor = InputPreprocessor::default(); let message = InputPreprocessorMessage::MouseUp(EditorMouseState::new(), ModifierKeys::SHIFT); let mut responses = VecDeque::new(); input_preprocessor.process_action(message, (), &mut responses); assert!(input_preprocessor.keyboard.get(Key::KeyShift as usize)); assert_eq!(responses.pop_front(), Some(InputMapperMessage::KeyDown(Key::KeyShift).into())); } #[test] fn process_action_key_down_handle_modifier_keys() { let mut input_preprocessor = InputPreprocessor::default(); input_preprocessor.keyboard.set(Key::KeyControl as usize); let message = InputPreprocessorMessage::KeyDown(Key::KeyA, ModifierKeys::empty()); let mut responses = VecDeque::new(); input_preprocessor.process_action(message, (), &mut responses); assert!(!input_preprocessor.keyboard.get(Key::KeyControl as usize)); assert_eq!(responses.pop_front(), Some(InputMapperMessage::KeyUp(Key::KeyControl).into())); } #[test] fn process_action_key_up_handle_modifier_keys() { let mut input_preprocessor = InputPreprocessor::default(); let message = InputPreprocessorMessage::KeyUp(Key::KeyS, ModifierKeys::CONTROL | ModifierKeys::SHIFT); let mut responses = VecDeque::new(); input_preprocessor.process_action(message, (), &mut responses); assert!(input_preprocessor.keyboard.get(Key::KeyControl as usize)); assert!(input_preprocessor.keyboard.get(Key::KeyShift as usize)); assert!(responses.contains(&InputMapperMessage::KeyDown(Key::KeyControl).into())); assert!(responses.contains(&InputMapperMessage::KeyDown(Key::KeyControl).into())); } }
36.914286
115
0.742371
3af87cc1c238e328a23d4efacd2ccdc4e8fea6c7
114,585
//! //! The Zinc compiler error. //! use colored::Colorize; use crate::file::error::Error as FileError; use crate::lexical::error::Error as LexicalError; use crate::lexical::token::lexeme::keyword::Keyword; use crate::lexical::token::location::Location; use crate::semantic::casting::error::Error as CastingError; use crate::semantic::element::constant::error::Error as ConstantError; use crate::semantic::element::constant::integer::error::Error as IntegerConstantError; use crate::semantic::element::error::Error as ElementError; use crate::semantic::element::place::error::Error as PlaceError; use crate::semantic::element::r#type::error::Error as TypeError; use crate::semantic::element::r#type::function::builtin::error::Error as BuiltInFunctionTypeError; use crate::semantic::element::r#type::function::error::Error as FunctionTypeError; use crate::semantic::element::r#type::function::stdlib::error::Error as StandardLibraryFunctionTypeError; use crate::semantic::element::r#type::structure::error::Error as StructureTypeError; use crate::semantic::element::value::array::error::Error as ArrayValueError; use crate::semantic::element::value::error::Error as ValueError; use crate::semantic::element::value::integer::error::Error as IntegerValueError; use crate::semantic::element::value::structure::error::Error as StructureValueError; use crate::semantic::element::value::tuple::error::Error as TupleValueError; use crate::semantic::error::Error as SemanticError; use crate::semantic::scope::error::Error as ScopeError; use crate::syntax::error::Error as SyntaxError; #[derive(Debug, PartialEq)] pub enum Error { File(FileError), Lexical(LexicalError), Syntax(SyntaxError), Semantic(SemanticError), } impl Error { pub fn format(self, context: &[&str]) -> String { match self { Self::File(inner) => inner.to_string(), Self::Lexical(LexicalError::UnterminatedBlockComment { start, end }) => { Self::format_range(context, "unterminated block comment", start, end, None) } Self::Lexical(LexicalError::UnterminatedDoubleQuoteString { start, end }) => { Self::format_range( context, "unterminated double quote string", start, end, None, ) } Self::Lexical(LexicalError::ExpectedOneOfBinary { location, expected, found, }) => Self::format_line( context, format!( "expected one of binary symbols {} or '_', found `{}`", expected, found ) .as_str(), location, None, ), Self::Lexical(LexicalError::ExpectedOneOfOctal { location, expected, found, }) => Self::format_line( context, format!( "expected one of octal symbols {} or '_', found `{}`", expected, found ) .as_str(), location, None, ), Self::Lexical(LexicalError::ExpectedOneOfDecimal { location, expected, found, }) => Self::format_line( context, format!( "expected one of decimal symbols {} or '_', found `{}`", expected, found ) .as_str(), location, None, ), Self::Lexical(LexicalError::ExpectedOneOfHexadecimal { location, expected, found, }) => Self::format_line( context, format!( "expected one of hexadecimal symbols {} or '_', found `{}`", expected, found ) .as_str(), location, None, ), Self::Lexical(LexicalError::InvalidCharacter { location, found }) => Self::format_line( context, format!("invalid character `{}`", found).as_str(), location, None, ), Self::Lexical(LexicalError::UnexpectedEnd { location }) => { Self::format_line(context, "unexpected end of input", location, None) } Self::Syntax(SyntaxError::ExpectedOneOf { location, expected, found, help, }) => Self::format_line( context, format!("expected one of {}, found `{}`", expected, found).as_str(), location, help, ), Self::Syntax(SyntaxError::ExpectedOneOfOrOperator { location, expected, found, help, }) => Self::format_line( context, format!( "expected one of {} or an operator, found `{}`", expected, found ) .as_str(), location, help, ), Self::Syntax(SyntaxError::ExpectedIdentifier { location, found, help, }) => Self::format_line( context, format!("expected identifier, found `{}`", found).as_str(), location, help, ), Self::Syntax(SyntaxError::ExpectedMutOrIdentifier { location, found, help, }) => Self::format_line( context, format!("expected `mut` or identifier, found `{}`", found).as_str(), location, help, ), Self::Syntax(SyntaxError::ExpectedFieldIdentifier { location, found, help, }) => Self::format_line( context, format!("expected field identifier, found `{}`", found).as_str(), location, help, ), Self::Syntax(SyntaxError::ExpectedType { location, found, help, }) => Self::format_line( context, format!("expected type, found `{}`", found).as_str(), location, help, ), Self::Syntax(SyntaxError::ExpectedTypeOrValue { location, found, help, }) => Self::format_line( context, format!( "expected `:` with type or `=` with value, found `{}`", found ) .as_str(), location, help, ), Self::Syntax(SyntaxError::ExpectedValue { location, found, help, }) => Self::format_line( context, format!("expected `=` with value, found `{}`", found).as_str(), location, help, ), Self::Syntax(SyntaxError::ExpectedExpressionOrOperand { location, found }) => { Self::format_line( context, format!("expected expression or operand, found `{}`", found).as_str(), location, None, ) } Self::Syntax(SyntaxError::ExpectedIntegerLiteral { location, found }) => { Self::format_line( context, format!("expected integer literal, found `{}`", found).as_str(), location, None, ) } Self::Syntax(SyntaxError::ExpectedBindingPattern { location, found }) => { Self::format_line( context, format!("expected identifier or `_`, found `{}`", found).as_str(), location, None, ) } Self::Syntax(SyntaxError::ExpectedMatchPattern { location, found }) => { Self::format_line( context, format!( "expected identifier, boolean or integer literal, path, or `_`, found `{}`", found ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentFirstOperandExpectedPlace{ found })) => { Self::format_line( context, format!( "the assignment operator `=` expected a memory place as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentSecondOperandExpectedEvaluable{ found })) => { Self::format_line( context, format!( "the assignment operator `=` expected a value as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentBitwiseOrFirstOperandExpectedPlace{ found })) => { Self::format_line( context, format!( "the assignment bitwise OR operator `|=` expected a memory place as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentBitwiseOrSecondOperandExpectedEvaluable{ found })) => { Self::format_line( context, format!( "the assignment bitwise OR operator `|=` expected a constant as the second operand, found `{}`", // TODO: constant -> value found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentBitwiseXorFirstOperandExpectedPlace{ found })) => { Self::format_line( context, format!( "the assignment bitwise XOR operator `^=` expected a memory place as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentBitwiseXorSecondOperandExpectedEvaluable{ found })) => { Self::format_line( context, format!( "the assignment bitwise XOR operator `^=` expected a constant as the second operand, found `{}`", // TODO: constant -> value found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentBitwiseAndFirstOperandExpectedPlace{ found })) => { Self::format_line( context, format!( "the assignment bitwise AND operator `&=` expected a memory place as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentBitwiseAndSecondOperandExpectedEvaluable{ found })) => { Self::format_line( context, format!( "the assignment bitwise AND operator `&=` expected a constant as the second operand, found `{}`", // TODO: constant -> value found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentBitwiseShiftLeftFirstOperandExpectedPlace{ found })) => { Self::format_line( context, format!( "the assignment bitwise shift left operator `<<=` expected a memory place as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentBitwiseShiftLeftSecondOperandExpectedEvaluable{ found })) => { Self::format_line( context, format!( "the assignment bitwise shift left operator `<<=` expected a constant as the second operand, found `{}`", // TODO: constant -> value found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentBitwiseShiftRightFirstOperandExpectedPlace{ found })) => { Self::format_line( context, format!( "the assignment bitwise shift right operator `>>=` expected a memory place as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentBitwiseShiftRightSecondOperandExpectedEvaluable{ found })) => { Self::format_line( context, format!( "the assignment bitwise shift right operator `>>=` expected a constant as the second operand, found `{}`", // TODO: constant -> value found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentAdditionFirstOperandExpectedPlace{ found })) => { Self::format_line( context, format!( "the assignment operator `+=` expected a memory place as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentAdditionSecondOperandExpectedEvaluable{ found })) => { Self::format_line( context, format!( "the assignment operator `+=` expected a value as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentSubtractionFirstOperandExpectedPlace{ found })) => { Self::format_line( context, format!( "the assignment operator `-=` expected a memory place as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentSubtractionSecondOperandExpectedEvaluable{ found })) => { Self::format_line( context, format!( "the assignment operator `-=` expected a value as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentMultiplicationFirstOperandExpectedPlace{ found })) => { Self::format_line( context, format!( "the assignment operator `*=` expected a memory place as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentMultiplicationSecondOperandExpectedEvaluable{ found })) => { Self::format_line( context, format!( "the assignment operator `*=` expected a value as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentDivisionFirstOperandExpectedPlace{ found })) => { Self::format_line( context, format!( "the assignment operator `/=` expected a memory place as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentDivisionSecondOperandExpectedEvaluable{ found })) => { Self::format_line( context, format!( "the assignment operator `/=` expected a value as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentRemainderFirstOperandExpectedPlace{ found })) => { Self::format_line( context, format!( "the assignment operator `%=` expected a memory place as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAssignmentRemainderSecondOperandExpectedEvaluable{ found })) => { Self::format_line( context, format!( "the assignment operator `%=` expected a value as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorRangeInclusiveFirstOperandExpectedConstant{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorRangeInclusiveFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the inclusive range operator `..=` expected an integer constant as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorRangeInclusiveSecondOperandExpectedConstant{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorRangeInclusiveSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the inclusive range operator `..=` expected an integer constant as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorRangeFirstOperandExpectedConstant{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorRangeFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the range operator `..` expected an integer constant as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorRangeSecondOperandExpectedConstant{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorRangeSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the range operator `..` expected an integer constant as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorOrFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorOrFirstOperandExpectedBoolean{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorOrFirstOperandExpectedBoolean{ found }))) => { Self::format_line( context, format!( "the OR operator `||` expected a boolean as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorOrSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorOrSecondOperandExpectedBoolean{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorOrSecondOperandExpectedBoolean{ found }))) => { Self::format_line( context, format!( "the OR operator `||` expected a boolean as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorXorFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorXorFirstOperandExpectedBoolean{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorXorFirstOperandExpectedBoolean{ found }))) => { Self::format_line( context, format!( "the XOR operator `^^` expected a boolean as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorXorSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorXorSecondOperandExpectedBoolean{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorXorSecondOperandExpectedBoolean{ found }))) => { Self::format_line( context, format!( "the XOR operator `^^` expected a boolean as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAndFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorAndFirstOperandExpectedBoolean{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorAndFirstOperandExpectedBoolean{ found }))) => { Self::format_line( context, format!( "the AND operator `&&` expected a boolean as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAndSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorAndSecondOperandExpectedBoolean{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorAndSecondOperandExpectedBoolean{ found }))) => { Self::format_line( context, format!( "the AND operator `&&` expected a boolean as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorEqualsFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorEqualsFirstOperandExpectedPrimitiveType{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorEqualsFirstOperandExpectedPrimitiveType{ found }))) => { Self::format_line( context, format!( "the equals operator `==` expected a unit, boolean or integer as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorEqualsSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorEqualsSecondOperandExpectedUnit{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorEqualsSecondOperandExpectedBoolean{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorEqualsSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorEqualsSecondOperandExpectedUnit{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorEqualsSecondOperandExpectedBoolean{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorEqualsSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the equals operator `==` expected a unit, boolean or integer as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorNotEqualsFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorNotEqualsFirstOperandExpectedPrimitiveType{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorNotEqualsFirstOperandExpectedPrimitiveType{ found }))) => { Self::format_line( context, format!( "the not equals operator `!=` expected a boolean or integer as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorNotEqualsSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorNotEqualsSecondOperandExpectedUnit{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorNotEqualsSecondOperandExpectedBoolean{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorNotEqualsSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorNotEqualsSecondOperandExpectedUnit{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorNotEqualsSecondOperandExpectedBoolean{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorNotEqualsSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the not equals operator `!=` expected a boolean or integer as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorGreaterEqualsFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorGreaterEqualsFirstOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorGreaterEqualsFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the greater equals operator `>=` expected an integer as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorGreaterEqualsSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorGreaterEqualsSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorGreaterEqualsSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the greater equals operator `>=` expected an integer as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorLesserEqualsFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorLesserEqualsFirstOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorLesserEqualsFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the lesser equals operator `<=` expected an integer as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorLesserEqualsSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorLesserEqualsSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorLesserEqualsSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the lesser equals operator `<=` expected an integer as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorGreaterFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorGreaterFirstOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorGreaterFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the greater operator `>` expected an integer as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorGreaterSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorGreaterSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorGreaterSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the greater operator `>` expected an integer as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorLesserFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorLesserFirstOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorLesserFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the lesser operator `<` expected an integer as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorLesserSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorLesserSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorLesserSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the lesser operator `<` expected an integer as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorBitwiseOrFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorBitwiseOrFirstOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorBitwiseOrFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the bitwise OR operator `|` expected an integer constant as the first operand, found `{}`", // TODO: constant -> '' found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorBitwiseOrSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorBitwiseOrSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorBitwiseOrSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the bitwise OR operator `|` expected an integer constant as the second operand, found `{}`", // TODO: constant -> '' found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorBitwiseXorFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorBitwiseXorFirstOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorBitwiseXorFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the bitwise XOR operator `^` expected an integer constant as the first operand, found `{}`", // TODO: constant -> '' found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorBitwiseXorSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorBitwiseXorSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorBitwiseXorSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the bitwise XOR operator `^` expected an integer constant as the second operand, found `{}`", // TODO: constant -> '' found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorBitwiseAndFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorBitwiseAndFirstOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorBitwiseAndFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the bitwise AND operator `&` expected an integer constant as the first operand, found `{}`", // TODO: constant -> '' found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorBitwiseAndSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorBitwiseAndSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorBitwiseAndSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the bitwise AND operator `&` expected an integer constant as the second operand, found `{}`", // TODO: constant -> '' found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorBitwiseShiftLeftFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorBitwiseShiftLeftFirstOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorBitwiseShiftLeftFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the bitwise shift left operator `<<` expected an integer constant as the first operand, found `{}`", // TODO: constant -> '' found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorBitwiseShiftLeftSecondOperandExpectedConstant{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorBitwiseShiftLeftSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::OperatorBitwiseShiftLeftSecondOperatorExpectedUnsigned { found })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorBitwiseShiftLeftSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::OperatorBitwiseShiftLeftSecondOperatorExpectedUnsigned { found })))) => { Self::format_line( context, format!( "the bitwise shift left operator `<<` expected an unsigned integer constant as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorBitwiseShiftRightFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorBitwiseShiftRightFirstOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorBitwiseShiftRightFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the bitwise shift right operator `>>` expected an integer constant as the first operand, found `{}`", // TODO: constant -> '' found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorBitwiseShiftRightSecondOperandExpectedConstant{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorBitwiseShiftRightSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::OperatorBitwiseShiftRightSecondOperatorExpectedUnsigned { found })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorBitwiseShiftRightSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::OperatorBitwiseShiftRightSecondOperatorExpectedUnsigned { found })))) => { Self::format_line( context, format!( "the bitwise shift right operator `>>` expected an unsigned integer constant as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAdditionFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorAdditionFirstOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorAdditionFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the addition operator `+` expected an integer as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorAdditionSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorAdditionSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorAdditionSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the addition operator `+` expected an integer as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorSubtractionFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorSubtractionFirstOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorSubtractionFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the subtraction operator `-` expected an integer as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorSubtractionSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorSubtractionSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorSubtractionSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the subtraction operator `-` expected an integer as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorMultiplicationFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorMultiplicationFirstOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorMultiplicationFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the multiplication operator `*` expected an integer as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorMultiplicationSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorMultiplicationSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorMultiplicationSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the multiplication operator `*` expected an integer as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorDivisionFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorDivisionFirstOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorDivisionFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the division operator `/` expected an integer as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorDivisionSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorDivisionSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorDivisionSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the division operator `/` expected an integer as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorRemainderFirstOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorRemainderFirstOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorRemainderFirstOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the remainder operator `%` expected an integer as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorRemainderSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorRemainderSecondOperandExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorRemainderSecondOperandExpectedInteger{ found }))) => { Self::format_line( context, format!( "the remainder operator `%` expected an integer as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorCastingFirstOperandExpectedEvaluable{ found })) => { Self::format_line( context, format!( "the casting operator `as` expected a value as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorCastingSecondOperandExpectedType{ found })) => { Self::format_line( context, format!( "the casting operator `as` expected a type as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Casting(CastingError::CastingFromInvalidType { from, to })))) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Casting(CastingError::CastingToInvalidType { from, to })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Casting(CastingError::CastingFromInvalidType { from, to })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Casting(CastingError::CastingToInvalidType { from, to })))) => { Self::format_line( context, format!( "cannot cast from `{}` to `{}`", from, to, ) .as_str(), location, Some("only integer values can be casted to greater or equal bitlength"), ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorNotExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorNotExpectedBoolean{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorNotExpectedBoolean{ found }))) => { Self::format_line( context, format!( "the NOT operator `!` expected a boolean, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorBitwiseNotExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorBitwiseNotExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorBitwiseNotExpectedInteger{ found }))) => { Self::format_line( context, format!( "the bitwise NOT operator `~` expected an integer, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorNegationExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorNegationExpectedInteger{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::OperatorNegationExpectedInteger{ found }))) => { Self::format_line( context, format!( "the negation operator `-` expected an integer, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorIndexFirstOperandExpectedPlaceOrEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Place(PlaceError::OperatorIndexFirstOperandExpectedArray{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorIndexFirstOperandExpectedArray{ found }))) => { Self::format_line( context, format!( "the index operator `[]` expected an array as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorIndexSecondOperandExpectedEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Place(PlaceError::OperatorIndexSecondOperandExpectedIntegerOrRange{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorIndexSecondOperandExpectedIntegerOrRange{ found }))) => { Self::format_line( context, format!( "the index operator `[]` expected an integer or range as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorFieldFirstOperandExpectedPlaceOrEvaluable{ found })) | Self::Semantic(SemanticError::Element(location, ElementError::Place(PlaceError::OperatorFieldFirstOperandExpectedTuple{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Place(PlaceError::OperatorFieldFirstOperandExpectedStructure{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorFieldFirstOperandExpectedTuple{ found }))) | Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::OperatorFieldFirstOperandExpectedStructure{ found }))) => { Self::format_line( context, format!( "the field access operator `.` expected a tuple or structure as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorFieldSecondOperandExpectedIdentifier { found })) => { Self::format_line( context, format!( "the field access operator `.` expected a tuple or structure field identifier as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorPathFirstOperandExpectedPath{ found })) => { Self::format_line( context, format!( "the path resolution operator `::` expected an item identifier as the first operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::OperatorPathSecondOperandExpectedIdentifier { found })) => { Self::format_line( context, format!( "the path resolution operator `::` expected an item identifier as the second operand, found `{}`", found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Array(ArrayValueError::PushingInvalidType { expected, found })))) => { Self::format_line( context, format!( "expected `{}`, found `{}`", expected, found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Array(ArrayValueError::SliceStartOutOfRange { start })))) | Self::Semantic(SemanticError::Element(location, ElementError::Place(PlaceError::ArraySliceStartOutOfRange { start }))) => { Self::format_line( context, format!( "left slice bound `{}` is negative", start, ) .as_str(), location, Some("slice range bounds must be within the array size"), ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Array(ArrayValueError::SliceEndOutOfRange { end, size })))) | Self::Semantic(SemanticError::Element(location, ElementError::Place(PlaceError::ArraySliceEndOutOfRange { end, size }))) => { Self::format_line( context, format!( "right slice bound `{}` is out of range of the array of size {}", end, size, ) .as_str(), location, Some("slice range bounds must be within the array size"), ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Array(ArrayValueError::SliceEndLesserThanStart { start, end })))) | Self::Semantic(SemanticError::Element(location, ElementError::Place(PlaceError::ArraySliceEndLesserThanStart { start, end }))) => { Self::format_line( context, format!( "left slice bound `{}` is greater than right slice bound `{}`", start, end, ) .as_str(), location, Some("left slice range bound must be lesser or equal to the right one"), ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Tuple(TupleValueError::FieldDoesNotExist { type_identifier, field_index })))) | Self::Semantic(SemanticError::Element(location, ElementError::Place(PlaceError::TupleFieldDoesNotExist { type_identifier, field_index }))) => { Self::format_line( context, format!( "tuple `{}` has no field with index `{}`", type_identifier, field_index, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Structure(StructureValueError::FieldDoesNotExist { type_identifier, field_name })))) | Self::Semantic(SemanticError::Element(location, ElementError::Place(PlaceError::StructureFieldDoesNotExist { type_identifier, field_name }))) => { Self::format_line( context, format!( "field `{}` does not exist in structure `{}`", field_name, type_identifier, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Place(PlaceError::MutatingWithDifferentType { expected, found }))) => { Self::format_line( context, format!("expected `{}`, found `{}`", expected, found).as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Place(PlaceError::MutatingImmutableMemory { name, reference }))) => { Self::format_line_with_reference( context, format!("cannot assign twice to immutable variable `{}`", name).as_str(), location, reference, Some(format!("make this variable mutable: `mut {}`", name).as_str()), ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Structure(StructureValueError::FieldExpected { type_identifier, position, expected, found })))) => { Self::format_line( context, format!( "structure `{}` expected field `{}` at position {}, found `{}`", type_identifier, expected, position, found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Structure(StructureValueError::FieldInvalidType { type_identifier, field_name, expected, found })))) => { Self::format_line( context, format!( "field `{}` of structure `{}` expected type `{}`, found `{}`", field_name, type_identifier, expected, found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Structure(StructureValueError::FieldOutOfRange { type_identifier, expected, found })))) => { Self::format_line( context, format!( "structure `{}` expected {} fields, found {}", type_identifier, expected, found, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::TypesMismatchEquals{ first, second })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::TypesMismatchEquals{ first, second })))) => { Self::format_line( context, format!( "the equals operator `==` expected two integers of the same type, found `{}` and `{}`", first, second, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::TypesMismatchNotEquals{ first, second })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::TypesMismatchNotEquals{ first, second })))) => { Self::format_line( context, format!( "the not equals operator `!=` expected two integers of the same type, found `{}` and `{}`", first, second, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::TypesMismatchGreaterEquals{ first, second })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::TypesMismatchGreaterEquals{ first, second })))) => { Self::format_line( context, format!( "the greater equals operator `>=` expected two integers of the same type, found `{}` and `{}`", first, second, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::TypesMismatchLesserEquals{ first, second })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::TypesMismatchLesserEquals{ first, second })))) => { Self::format_line( context, format!( "the lesser equals operator `<=` expected two integers of the same type, found `{}` and `{}`", first, second, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::TypesMismatchGreater{ first, second })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::TypesMismatchGreater{ first, second })))) => { Self::format_line( context, format!( "the greater operator `>` expected two integers of the same type, found `{}` and `{}`", first, second, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::TypesMismatchLesser{ first, second })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::TypesMismatchLesser{ first, second })))) => { Self::format_line( context, format!( "the lesser operator `<` expected two integers of the same type, found `{}` and `{}`", first, second, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::TypesMismatchBitwiseOr{ first, second })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::TypesMismatchBitwiseOr{ first, second })))) => { Self::format_line( context, format!( "the bitwise OR operator `|` expected two integers of the same type, found `{}` and `{}`", first, second, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::TypesMismatchBitwiseXor{ first, second })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::TypesMismatchBitwiseXor{ first, second })))) => { Self::format_line( context, format!( "the bitwise XOR operator `^` expected two integers of the same type, found `{}` and `{}`", first, second, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::TypesMismatchBitwiseAnd{ first, second })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::TypesMismatchBitwiseAnd{ first, second })))) => { Self::format_line( context, format!( "the bitwise AND operator `&` expected two integers of the same type, found `{}` and `{}`", first, second, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::TypesMismatchAddition{ first, second })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::TypesMismatchAddition{ first, second })))) => { Self::format_line( context, format!( "the addition operator `+` expected two integers of the same type, found `{}` and `{}`", first, second, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::TypesMismatchSubtraction{ first, second })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::TypesMismatchSubtraction{ first, second })))) => { Self::format_line( context, format!( "the subtraction operator `-` expected two integers of the same type, found `{}` and `{}`", first, second, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::TypesMismatchMultiplication{ first, second })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::TypesMismatchMultiplication{ first, second })))) => { Self::format_line( context, format!( "the multiplication operator `*` expected two integers of the same type, found `{}` and `{}`", first, second, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::TypesMismatchDivision{ first, second })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::TypesMismatchDivision{ first, second })))) => { Self::format_line( context, format!( "the division operator `/` expected two integers of the same type, found `{}` and `{}`", first, second, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::TypesMismatchRemainder{ first, second })))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::TypesMismatchRemainder{ first, second })))) => { Self::format_line( context, format!( "the remainder operator `%` expected two integers of the same type, found `{}` and `{}`", first, second, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::OverflowAddition { value, r#type })))) => { Self::format_line( context, format!( "the addition operator `+` overflow, as the value `{}` cannot be represeneted by type `{}`", value, r#type, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::OverflowSubtraction { value, r#type })))) => { Self::format_line( context, format!( "the subtraction operator `-` overflow, as the value `{}` cannot be represeneted by type `{}`", value, r#type, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::OverflowMultiplication { value, r#type })))) => { Self::format_line( context, format!( "the multiplication operator `*` overflow, as the value `{}` cannot be represeneted by type `{}`", value, r#type, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::OverflowDivision { value, r#type })))) => { Self::format_line( context, format!( "the division operator `/` overflow, as the value `{}` cannot be represeneted by type `{}`", value, r#type, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::OverflowRemainder { value, r#type })))) => { Self::format_line( context, format!( "the remainder operator `%` overflow, as the value `{}` cannot be represeneted by type `{}`", value, r#type, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::OverflowCasting { value, r#type })))) => { Self::format_line( context, format!( "the casting operator `as` overflow, as the value `{}` cannot be represeneted by type `{}`", value, r#type, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::OverflowNegation { value, r#type })))) => { Self::format_line( context, format!( "the negation operator `-` overflow, as the value `{}` cannot be represeneted by type `{}`", value, r#type, ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::ForbiddenFieldDivision)))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::ForbiddenFieldDivision)))) => { Self::format_line( context, "the division operator `/` is forbidden for the `field` type", location, Some("for inversion consider using `std::ff::invert`"), ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::ForbiddenFieldRemainder)))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::ForbiddenFieldRemainder)))) => { Self::format_line( context, "the remainder operator `%` is forbidden for the `field` type", location, Some("`field` type values cannot be used to get a remainder"), ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::ForbiddenFieldBitwise)))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::ForbiddenFieldBitwise)))) => { Self::format_line( context, "the bitwise operators are forbidden for the `field` type", location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Value(ValueError::Integer(IntegerValueError::ForbiddenFieldNegation)))) | Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::ForbiddenFieldNegation)))) => { Self::format_line( context, "the negation operator `-` is forbidden for the `field` type", location, Some("`field` type values cannot be negative"), ) } Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::ZeroDivision)))) => { Self::format_line( context, "division by zero", location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::ZeroRemainder)))) => { Self::format_line( context, "remainder of division by zero", location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::IntegerTooLarge { value, bitlength })))) => { Self::format_line( context, format!("integer `{}` is larger than `{}` bits", value, bitlength).as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Constant(ConstantError::Integer(IntegerConstantError::UnsignedNegative { value, r#type })))) => { Self::format_line( context, format!("found a negative value `{}` of unsigned type `{}`", value, r#type).as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::AliasDoesNotPointToType { found }))) => { Self::format_line( context, format!( "expected type, found `{}`", found ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::AliasDoesNotPointToStructure { found }))) => { Self::format_line( context, format!( "expected structure type, found `{}`", found ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Scope(ScopeError::ItemRedeclared { location, name, reference })) => { Self::format_line_with_reference( context, format!( "item `{}` already declared here", name ) .as_str(), location, reference, Some("consider giving the latter item another name"), ) } Self::Semantic(SemanticError::Scope(ScopeError::ItemUndeclared { location, name })) => { Self::format_line( context, format!( "cannot find item `{}` in this scope", name ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Scope(ScopeError::ItemIsNotNamespace { location, name })) => { Self::format_line( context, format!( "item `{}` is not a namespace", name ) .as_str(), location, Some("only modules, structures, and enumerations can contain items within their namespaces"), ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::Function(FunctionTypeError::ArgumentCount { function, expected, found })))) => { Self::format_line( context, format!( "function `{}` expected {} arguments, found {}", function, expected, found ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::Function(FunctionTypeError::ArgumentType { function, name, position, expected, found })))) => { Self::format_line( context, format!( "function `{}` expected type `{}` as the argument `{}` (#{}), found `{}`", function, expected, name, position, found ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::Function(FunctionTypeError::ArgumentConstantness { function, name, position, found })))) => { Self::format_line( context, format!( "function `{}` expected a constant as the argument `{}` (#{}), found a non-constant of type `{}`", function, name, position, found ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::Function(FunctionTypeError::ArgumentNotEvaluable { function, position, found })))) => { Self::format_line( context, format!( "function `{}` expected a value as the argument #{}, found `{}`", function, position, found ) .as_str(), location, None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::Function(FunctionTypeError::ReturnType { function, expected, found, reference })))) => { Self::format_line_with_reference( context, format!( "function `{}` must return a value of type `{}`, found `{}`", function, expected, found ) .as_str(), location, Some(reference), None, ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::Function(FunctionTypeError::NonCallable { name })))) => { Self::format_line( context, format!( "attempt to call a non-callable item `{}`", name ) .as_str(), location, Some("only functions may be called"), ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::Function(FunctionTypeError::FunctionMethodSelfNotFirst { function, position, reference })))) => { Self::format_line_with_reference( context, format!( "method `{}` expected the `{}` binding to be at the first position, but found at the position #`{}`", function, Keyword::SelfLowercase.to_string(), position, ) .as_str(), location, Some(reference), Some(format!("consider moving the `{}` binding to the first place", Keyword::SelfLowercase.to_string()).as_str()), ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::Function(FunctionTypeError::BuiltIn(BuiltInFunctionTypeError::Unknown { function }))))) => { Self::format_line( context, format!( "attempt to call a non-builtin function `{}` with `!` specifier", function ) .as_str(), location, Some("only built-in functions require the `!` symbol after the function name"), ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::Function(FunctionTypeError::BuiltIn(BuiltInFunctionTypeError::SpecifierMissing { function }))))) => { Self::format_line( context, format!( "attempt to call a builtin function `{}` without `!` specifier", function ) .as_str(), location, Some("built-in functions require the `!` symbol after the function name"), ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::Function(FunctionTypeError::BuiltIn(BuiltInFunctionTypeError::DebugArgumentCount { expected, found }))))) => { Self::format_line( context, format!( "the `dbg!` function expected {} arguments, but got {}", expected, found, ) .as_str(), location, Some("the number of `dbg!` arguments after the format string must be equal to the number of placeholders, e.g. `dbg!(\"{}, {}\", a, b)`"), ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::Function(FunctionTypeError::StandardLibrary(StandardLibraryFunctionTypeError::ArrayTruncatingToBiggerSize { from, to }))))) => { Self::format_line( context, format!( "attempt to truncate an array from size `{}` to bigger size `{}`", from, to, ) .as_str(), location, Some("consider truncating the array to a smaller size"), ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::Function(FunctionTypeError::StandardLibrary(StandardLibraryFunctionTypeError::ArrayPaddingToLesserSize { from, to }))))) => { Self::format_line( context, format!( "attempt to pad an array from size `{}` to lesser size `{}`", from, to, ) .as_str(), location, Some("consider padding the array to a bigger size"), ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::Function(FunctionTypeError::StandardLibrary(StandardLibraryFunctionTypeError::ArrayNewLengthInvalid { value }))))) => { Self::format_line( context, format!( "new array length `{}` cannot act as an index", value, ) .as_str(), location, Some("array indexes cannot be greater than maximum of `u64`"), ) } Self::Semantic(SemanticError::Element(location, ElementError::Type(TypeError::Structure(StructureTypeError::DuplicateField { type_identifier, field_name })))) => { Self::format_line( context, format!( "structure `{}` has a duplicate field `{}`", type_identifier, field_name, ) .as_str(), location, Some("consider giving the field a unique name"), ) } Self::Semantic(SemanticError::MatchScrutineeInvalidType { location, found }) => { Self::format_line( context, format!("match scrutinee expected a boolean or integer expression, found `{}`", found).as_str(), location, None, ) } Self::Semantic(SemanticError::MatchNotExhausted { location }) => { Self::format_line( context, "match expression must be exhaustive", location, Some("ensure that all possible cases are being handled, possibly by adding wildcards or more match arms"), ) } Self::Semantic(SemanticError::MatchLessThanTwoBranches { location }) => { Self::format_line( context, "match expression must have at least two branches", location, Some("consider adding some branches to make the expression useful"), ) } Self::Semantic(SemanticError::MatchBranchUnreachable { location }) => { Self::format_line( context, "match expression branch is unreachable", location, Some("consider removing the branch or moving it above the branch with a wildcard or irrefutable binding"), ) } Self::Semantic(SemanticError::MatchBranchPatternPathExpectedConstant { location, found }) => { Self::format_line( context, format!("expected path to a constant, found `{}`", found).as_str(), location, None, ) } Self::Semantic(SemanticError::MatchBranchPatternInvalidType { location, expected, found, reference }) => { Self::format_line_with_reference( context, format!("expected `{}`, found `{}`", expected, found).as_str(), location, Some(reference), Some("all branch patterns must be compatible with the type of the expression being matched"), ) } Self::Semantic(SemanticError::MatchBranchExpressionInvalidType { location, expected, found, reference }) => { Self::format_line_with_reference( context, format!("expected `{}`, found `{}`", expected, found).as_str(), location, Some(reference), Some("all branches must return the type returned by the first branch"), ) } Self::Semantic(SemanticError::MatchBranchDuplicate { location, reference }) => { Self::format_line_with_reference( context, "match expression contains a duplicate branch pattern", location, Some(reference), Some("each pattern may occur only once"), ) } Self::Semantic(SemanticError::LoopWhileExpectedBooleanCondition { location, found }) => { Self::format_line( context, format!("expected `bool`, found `{}`", found).as_str(), location, None, ) } Self::Semantic(SemanticError::LoopBoundsExpectedConstantRangeExpression { location, found }) => { Self::format_line( context, format!("expected a constant range expression, found `{}`", found).as_str(), location, Some("only constant ranges allowed, e.g. `for i in 0..42 { ... }`"), ) } Self::Semantic(SemanticError::ConditionalExpectedBooleanCondition { location, found }) => { Self::format_line( context, format!("expected `bool`, found `{}`", found).as_str(), location, None, ) } Self::Semantic(SemanticError::ConditionalBranchTypesMismatch { location, expected, found, reference }) => { Self::format_line_with_reference( context, format!("if and else branches return incompatible types `{}` and `{}`", expected, found).as_str(), location, Some(reference), None, ) } Self::Semantic(SemanticError::EntryPointMissing) => { Self::format_message( "function `main` is missing", Some("create the `main` function in the entry point file `main.zn`"), ) } Self::Semantic(SemanticError::ModuleNotFound { location, name }) => { Self::format_line( context, format!( "file not found for module `{}`", name ) .as_str(), location, Some(format!("create a file called `{}.zn` inside the `src` directory", name).as_str()), ) } Self::Semantic(SemanticError::UseExpectedPath { location, found }) => { Self::format_line( context, format!( "`use` expected an item path, but got `{}`", found ) .as_str(), location, Some("consider specifying a valid path to an item to import"), ) } Self::Semantic(SemanticError::ImplStatementExpectedStructureOrEnumeration { location, found }) => { Self::format_line( context, format!( "`impl` expected a type with namespace, found `{}`", found ) .as_str(), location, Some("only structures and enumerations can have an implementation"), ) } Self::Semantic(SemanticError::ConstantExpressionHasNonConstantElement { location, found }) => { Self::format_line( context, format!("attempt to use a non-constant value `{}` in a constant expression", found).as_str(), location, None, ) } } } fn format_message(message: &str, help: Option<&str>) -> String { let mut strings = Vec::with_capacity(8); strings.push(String::new()); strings.push(format!( "{}: {}", "error".bright_red(), message.bright_white() )); if let Some(help) = help { strings.push(format!("{}: {}", "help".bright_white(), help.bright_blue())); } strings.push(String::new()); strings.join("\n") } fn format_line( context: &[&str], message: &str, location: Location, help: Option<&str>, ) -> String { let line_number_length = location.line.to_string().len(); let mut strings = Vec::with_capacity(8); strings.push(String::new()); strings.push(format!( "{}: {}", "error".bright_red(), message.bright_white() )); strings.push(format!(" {} {}", "-->".bright_cyan(), location)); strings.push(format!( "{}{}", " ".repeat(line_number_length + 1), "|".bright_cyan() )); if let Some(line) = context.get(location.line - 1) { strings.push(format!( "{}{}", (location.line.to_string() + " | ").bright_cyan(), line )); } strings.push(format!( "{}{} {}{}", " ".repeat(line_number_length + 1), "|".bright_cyan(), "_".repeat(location.column - 1).bright_red(), "^".bright_red() )); if let Some(help) = help { strings.push(format!("{}: {}", "help".bright_white(), help.bright_blue())); } strings.push(String::new()); strings.join("\n") } fn format_line_with_reference( context: &[&str], message: &str, location: Location, reference: Option<Location>, help: Option<&str>, ) -> String { let line_number_length = location.line.to_string().len(); let mut strings = Vec::with_capacity(11); strings.push(String::new()); strings.push(format!( "{}: {}", "error".bright_red(), message.bright_white() )); if let Some(reference) = reference { let line_number_length = reference.line.to_string().len(); strings.push(format!( "{}{}", " ".repeat(line_number_length + 1), "|".bright_cyan() )); if let Some(line) = context.get(reference.line - 1) { strings.push(format!( "{}{}", (reference.line.to_string() + " | ").bright_cyan(), line )); } strings.push(format!( "{}{} {}{}", " ".repeat(line_number_length + 1), "|".bright_cyan(), "_".repeat(reference.column - 1).bright_red(), "^".bright_red() )); } strings.push(format!(" {} {}", "-->".bright_cyan(), location)); strings.push(format!( "{}{}", " ".repeat(line_number_length + 1), "|".bright_cyan() )); if let Some(line) = context.get(location.line - 1) { strings.push(format!( "{}{}", (location.line.to_string() + " | ").bright_cyan(), line )); } strings.push(format!( "{}{} {}{}", " ".repeat(line_number_length + 1), "|".bright_cyan(), "_".repeat(location.column - 1).bright_red(), "^".bright_red() )); if let Some(help) = help { strings.push(format!("{}: {}", "help".bright_white(), help.bright_blue())); } strings.push(String::new()); strings.join("\n") } fn format_range( context: &[&str], message: &'static str, start: Location, end: Location, help: Option<&str>, ) -> String { let line_number_length = end.line.to_string().len(); let mut strings = Vec::with_capacity(8 + end.line - start.line); strings.push(String::new()); strings.push(format!( "{}: {}", "error".bright_red(), message.bright_white() )); strings.push(format!(" {} {}", "-->".bright_cyan(), start)); strings.push(format!( "{}{}", " ".repeat(line_number_length + 1), "|".bright_cyan() )); for line_number in start.line..=end.line { if let Some(line) = context.get(line_number - 1) { strings.push(format!( "{}{}", (line_number.to_string() + " | ").bright_cyan(), line )); } } strings.push(format!( "{}{} {}{}", " ".repeat(line_number_length + 1), "|".bright_cyan(), "_".repeat(end.column - 1).bright_red(), "^".bright_red() )); if let Some(help) = help { strings.push(format!("{}: {}", "help".bright_white(), help.bright_blue())); } strings.push(String::new()); strings.join("\n") } } impl From<FileError> for Error { fn from(error: FileError) -> Self { Self::File(error) } } impl From<LexicalError> for Error { fn from(error: LexicalError) -> Self { Self::Lexical(error) } } impl From<SyntaxError> for Error { fn from(error: SyntaxError) -> Self { Self::Syntax(error) } } impl From<SemanticError> for Error { fn from(error: SemanticError) -> Self { Self::Semantic(error) } }
49.347545
218
0.497657
deccef2e9d312eb954858222d31a8fb2e2b9880a
2,144
use crate::github::Repository; pub fn try_parse_repository(src: &str) -> Result<Repository, String> { if src.is_empty() { return Err("Invalid repository. Cannot be empty".to_string()); } if !src.contains('/') { return Err("Invalid repository. Use {owner}/{repo} format".to_string()); } let parts = src .split('/') .filter(|x| !x.is_empty()) .collect::<Vec<&str>>(); if parts.len() < 2 { return Err("Invalid repository. Missing owner or repo".to_string()); } Ok(Repository { owner: parts[0].to_string(), repo: parts[1].to_string(), }) } #[cfg(test)] mod tests { use super::*; #[test] fn valid_repository() { let input = "foo/bar"; let result = try_parse_repository(input); assert_eq!( Ok(Repository { owner: "foo".to_string(), repo: "bar".to_string() }), result ); } #[test] fn missing_owner() { let input = "/bar"; let result = try_parse_repository(input); assert_error(|e| assert_contains("Missing owner or repo", e), result); } #[test] fn missing_repo() { let input = "foo/"; let result = try_parse_repository(input); assert_error(|e| assert_contains("Missing owner or repo", e), result); } #[test] fn empty_repository() { let input = ""; let result = try_parse_repository(input); assert_error(|e| assert_contains("Cannot be empty", e), result); } fn assert_error<F>(assert: F, actual: Result<Repository, String>) where F: FnOnce(&str), { if actual.is_ok() { panic!("actual is ok: {:#?}", actual.unwrap()) } let error = actual.err().unwrap(); assert(&error); } fn assert_contains(expected: &str, actual: &str) { if !actual.contains(expected) { panic!( "'{actual}' not contains '{expected}'", expected = expected, actual = actual ) } } }
23.56044
80
0.522388
bbeb193dba32b4c8970d8bc18160d4d2dd5563c7
491
use std::hash::BuildHasherDefault; pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; pub type FxIndexMap<K, V> = indexmap::IndexMap<K, V, BuildHasherDefault<FxHasher>>; pub type FxIndexSet<V> = indexmap::IndexSet<V, BuildHasherDefault<FxHasher>>; #[macro_export] macro_rules! define_id_collections { ($map_name:ident, $set_name:ident, $key:ty) => { pub type $map_name<T> = $crate::fx::FxHashMap<$key, T>; pub type $set_name = $crate::fx::FxHashSet<$key>; }; }
32.733333
83
0.690428
d6a6f9be639de0f65a68c294639f2ed3b28c8eee
253
use opennode::error::RequestError; #[derive(Debug)] pub enum Error { /// An error reported by Opennode in the response body. Opennode(RequestError), /// A networking error communicating with the Opennode server. Http(reqwest::Error), }
25.3
66
0.70751
2fb846c108d808d37d88c284dcece02b72a112aa
5,660
extern crate int; extern crate bitrw; extern crate tbe; extern crate num_traits; pub trait UniversalSet { type T: int::UInt; fn size(self) -> Self::T; } /// pub trait OrderedSet { type T: int::UInt; /// 0 <= value_size() fn value_size(&self) -> Self::T; /// 0 <= size() <= value_size() fn size(&self) -> Self::T; /// 0 <= i < size() /// 0 <= get(i) < value_size() fn get(&self, i: Self::T) -> Self::T; } /// pub trait OrderedSetBuilder { type T: int::UInt; fn add(&mut self, i: Self::T, value: Self::T); } pub trait CreateOrderedSet: Copy { type T: int::UInt; type B: OrderedSetBuilder<T = Self::T>; type S: OrderedSet<T = Self::T>; fn value_size(self) -> Self::T; fn new(self, size: Self::T, f: &mut FnMut(&mut Self::B) -> std::io::Result<()>) -> std::io::Result<Self::S>; } struct Range2D<T: int::UInt> { pub offset: T, pub size: T, pub value_offset: T, pub value_size: T, } impl<T: int::UInt> Range2D<T> { fn next_i(&self) -> T { self.size >> 1u8 } fn split(self, i: T, value_i: T) -> (Self, Self) { let j = i + T::_1; let value_j = value_i + T::_1; ( Range2D { offset: self.offset, size: i, value_offset: self.value_offset, value_size: value_i }, Range2D { offset: self.offset + j, size: self.size - j, value_offset: self.value_offset + value_j, value_size: self.value_size - value_j } ) } fn tbe(&self) -> tbe::TbeStruct<T> { use tbe::Tbe; (self.value_size - self.size + T::_1).tbe() } fn new(size: T, value_size: T) -> Self { Self { offset: T::_0, size: size, value_offset: T::_0, value_size: value_size } } } pub trait WriteSet { fn ordered_set_write<S: OrderedSet>(self, s: &S) -> std::io::Result<()>; } struct WriteFrame<'t, 'b, S: OrderedSet> { set: &'t S, w: &'t mut bitrw::BitWrite<'b>, } impl<S: OrderedSet> WriteFrame<'_, '_, S> { fn subset_write(&mut self, range: Range2D<S::T>) -> std::io::Result<()> { use int::UInt; if S::T::_0 < range.size && range.size < range.value_size { use tbe::TbeWrite; use num_traits::cast::AsPrimitive; let i = range.next_i(); let value_i = self.set.get(range.offset + i) - range.value_offset; self.w.write_tbe(range.tbe(), value_i - i)?; let (left, right) = range.split(i, value_i); self.subset_write(left)?; self.subset_write(right)?; } Ok(()) } } impl WriteSet for &mut bitrw::BitWrite<'_> { fn ordered_set_write<S: OrderedSet>(self, s: &S) -> std::io::Result<()> { use tbe::Tbe; use tbe::TbeWrite; let size = s.size(); let value_size = s.value_size(); self.write_tbe(value_size.tbe(), size)?; let mut x = WriteFrame { set: s, w: self }; x.subset_write(Range2D::new(size, value_size))?; Ok(()) } } pub trait ReadSet { fn ordered_set_read<C: CreateOrderedSet>(&mut self, c: C) -> std::io::Result<C::S>; } struct ReadFrame<'t, 'b, B: OrderedSetBuilder> { builder: &'t mut B, r: &'t mut bitrw::BitRead<'b>, } impl<B: OrderedSetBuilder> ReadFrame<'_, '_, B> { fn subset_read(&mut self, range: Range2D<B::T>) -> std::io::Result<()> { use int::UInt; if B::T::_0 < range.size && range.size <= range.value_size { use tbe::TbeRead; let i = range.next_i(); let value_i = self.r.read_tbe(range.tbe())? + i; self.builder.add(range.offset + i, range.value_offset + value_i); let (left, right) = range.split(i, value_i); self.subset_read(left)?; self.subset_read(right)?; } Ok(()) } } impl ReadSet for bitrw::BitRead<'_> { fn ordered_set_read<C: CreateOrderedSet>(&mut self, c: C) -> std::io::Result<C::S> { use tbe::TbeRead; use tbe::Tbe; let value_size = c.value_size(); let size = self.read_tbe(value_size.tbe())?; c.new(size, &mut |b| { ReadFrame { builder: b, r: self }.subset_read(Range2D::new(size, value_size)) }) } } #[derive(Debug)] pub struct ByteSet { pub data: Vec<u8> } impl ByteSet { pub fn get_data(&self) -> &Vec<u8> { &self.data } } fn byte_set_value_size() -> u16 { use int::UInt; (u8::MAX_VALUE as u16) + 1 } impl OrderedSet for ByteSet { type T = u16; /// 0 <= value_size() fn value_size(&self) -> Self::T { byte_set_value_size() } /// 0 <= size() <= value_size() fn size(&self) -> Self::T { self.data.len() as u16 } /// 0 <= i < size() /// 0 <= get(i) < value_size() fn get(&self, i: Self::T) -> Self::T { self.data[i as usize] as u16 } } impl OrderedSetBuilder for ByteSet { type T = u16; fn add(&mut self, i: Self::T, value: Self::T) { self.data[i as usize] = value as u8; } } pub struct CreateByteSet { } impl Clone for CreateByteSet { fn clone(&self) -> Self { CreateByteSet {} } } impl Copy for CreateByteSet {} impl CreateOrderedSet for CreateByteSet { type T = u16; type B = ByteSet; type S = ByteSet; fn value_size(self) -> Self::T { byte_set_value_size() } fn new(self, size: Self::T, f: &mut FnMut(&mut Self::B) -> std::io::Result<()>) -> std::io::Result<Self::S> { let mut result = ByteSet { data: vec![0; size as usize] }; f(&mut result)?; Ok(result) } }
27.475728
113
0.541519
e5c81556e26b327b8390a1ddb8b9fee0ec80a136
244
// Test a default that references `Self` which is then used in an object type. // Issue #18956. trait Foo<T=Self> { fn method(&self); } fn foo(x: &dyn Foo) { } //~^ ERROR the type parameter `T` must be explicitly specified fn main() { }
20.333333
78
0.651639
4b3ad0d4a933b9e374bf22f4e45903207e4f896a
25,974
use std::{ collections::BTreeMap, convert::{TryFrom, TryInto}, sync::Arc, }; use super::{DEVICE_ID_LENGTH, SESSION_ID_LENGTH, TOKEN_LENGTH}; use crate::{database::DatabaseGuard, pdu::PduBuilder, utils, ConduitResult, Error, Ruma}; use ruma::{ api::client::{ error::ErrorKind, r0::{ account::{ change_password, deactivate, get_username_availability, register, whoami, ThirdPartyIdRemovalStatus, }, contact::get_contacts, uiaa::{AuthFlow, AuthType, UiaaInfo}, }, }, events::{ room::{ canonical_alias::RoomCanonicalAliasEventContent, create::RoomCreateEventContent, guest_access::{GuestAccess, RoomGuestAccessEventContent}, history_visibility::{HistoryVisibility, RoomHistoryVisibilityEventContent}, join_rules::{JoinRule, RoomJoinRulesEventContent}, member::{MembershipState, RoomMemberEventContent}, message::RoomMessageEventContent, name::RoomNameEventContent, power_levels::RoomPowerLevelsEventContent, topic::RoomTopicEventContent, }, EventType, }, identifiers::RoomName, push, RoomAliasId, RoomId, RoomVersionId, UserId, }; use serde_json::value::to_raw_value; use tracing::info; use register::RegistrationKind; #[cfg(feature = "conduit_bin")] use rocket::{get, post}; const GUEST_NAME_LENGTH: usize = 10; /// # `GET /_matrix/client/r0/register/available` /// /// Checks if a username is valid and available on this server. /// /// Conditions for returning true: /// - The user id is not historical /// - The server name of the user id matches this server /// - No user or appservice on this server already claimed this username /// /// Note: This will not reserve the username, so the username might become invalid when trying to register #[cfg_attr( feature = "conduit_bin", get("/_matrix/client/r0/register/available", data = "<body>") )] #[tracing::instrument(skip(db, body))] pub async fn get_register_available_route( db: DatabaseGuard, body: Ruma<get_username_availability::Request<'_>>, ) -> ConduitResult<get_username_availability::Response> { // Validate user id let user_id = UserId::parse_with_server_name(body.username.clone(), db.globals.server_name()) .ok() .filter(|user_id| { !user_id.is_historical() && user_id.server_name() == db.globals.server_name() }) .ok_or(Error::BadRequest( ErrorKind::InvalidUsername, "Username is invalid.", ))?; // Check if username is creative enough if db.users.exists(&user_id)? { return Err(Error::BadRequest( ErrorKind::UserInUse, "Desired user ID is already taken.", )); } // TODO add check for appservice namespaces // If no if check is true we have an username that's available to be used. Ok(get_username_availability::Response { available: true }.into()) } /// # `POST /_matrix/client/r0/register` /// /// Register an account on this homeserver. /// /// You can use [`GET /_matrix/client/r0/register/available`](fn.get_register_available_route.html) /// to check if the user id is valid and available. /// /// - Only works if registration is enabled /// - If type is guest: ignores all parameters except initial_device_display_name /// - If sender is not appservice: Requires UIAA (but we only use a dummy stage) /// - If type is not guest and no username is given: Always fails after UIAA check /// - Creates a new account and populates it with default account data /// - If `inhibit_login` is false: Creates a device and returns device id and access_token #[cfg_attr( feature = "conduit_bin", post("/_matrix/client/r0/register", data = "<body>") )] #[tracing::instrument(skip(db, body))] pub async fn register_route( db: DatabaseGuard, body: Ruma<register::Request<'_>>, ) -> ConduitResult<register::Response> { if !db.globals.allow_registration() && !body.from_appservice { return Err(Error::BadRequest( ErrorKind::Forbidden, "Registration has been disabled.", )); } let is_guest = body.kind == RegistrationKind::Guest; let mut missing_username = false; // Validate user id let user_id = UserId::parse_with_server_name( if is_guest { utils::random_string(GUEST_NAME_LENGTH) } else { body.username.clone().unwrap_or_else(|| { // If the user didn't send a username field, that means the client is just trying // the get an UIAA error to see available flows missing_username = true; // Just give the user a random name. He won't be able to register with it anyway. utils::random_string(GUEST_NAME_LENGTH) }) } .to_lowercase(), db.globals.server_name(), ) .ok() .filter(|user_id| !user_id.is_historical() && user_id.server_name() == db.globals.server_name()) .ok_or(Error::BadRequest( ErrorKind::InvalidUsername, "Username is invalid.", ))?; // Check if username is creative enough if db.users.exists(&user_id)? { return Err(Error::BadRequest( ErrorKind::UserInUse, "Desired user ID is already taken.", )); } // UIAA let mut uiaainfo = UiaaInfo { flows: vec![AuthFlow { stages: vec![AuthType::Dummy], }], completed: Vec::new(), params: Default::default(), session: None, auth_error: None, }; if !body.from_appservice { if let Some(auth) = &body.auth { let (worked, uiaainfo) = db.uiaa.try_auth( &UserId::parse_with_server_name("", db.globals.server_name()) .expect("we know this is valid"), "".into(), auth, &uiaainfo, &db.users, &db.globals, )?; if !worked { return Err(Error::Uiaa(uiaainfo)); } // Success! } else if let Some(json) = body.json_body { uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH)); db.uiaa.create( &UserId::parse_with_server_name("", db.globals.server_name()) .expect("we know this is valid"), "".into(), &uiaainfo, &json, )?; return Err(Error::Uiaa(uiaainfo)); } else { return Err(Error::BadRequest(ErrorKind::NotJson, "Not json.")); } } if missing_username { return Err(Error::BadRequest( ErrorKind::MissingParam, "Missing username field.", )); } let password = if is_guest { None } else { body.password.as_deref() }; // Create user db.users.create(&user_id, password)?; // Default to pretty displayname let displayname = format!("{} ⚡️", user_id.localpart()); db.users .set_displayname(&user_id, Some(displayname.clone()))?; // Initial account data db.account_data.update( None, &user_id, EventType::PushRules, &ruma::events::push_rules::PushRulesEvent { content: ruma::events::push_rules::PushRulesEventContent { global: push::Ruleset::server_default(&user_id), }, }, &db.globals, )?; // Inhibit login does not work for guests if !is_guest && body.inhibit_login { return Ok(register::Response { access_token: None, user_id, device_id: None, } .into()); } // Generate new device id if the user didn't specify one let device_id = if is_guest { None } else { body.device_id.clone() } .unwrap_or_else(|| utils::random_string(DEVICE_ID_LENGTH).into()); // Generate new token for the device let token = utils::random_string(TOKEN_LENGTH); // Create device for this account db.users.create_device( &user_id, &device_id, &token, body.initial_device_display_name.clone(), )?; // If this is the first user on this server, create the admin room if db.users.count()? == 1 { // Create a user for the server let conduit_user = UserId::parse_with_server_name("conduit", db.globals.server_name()) .expect("@conduit:server_name is valid"); db.users.create(&conduit_user, None)?; let room_id = RoomId::new(db.globals.server_name()); db.rooms.get_or_create_shortroomid(&room_id, &db.globals)?; let mutex_state = Arc::clone( db.globals .roomid_mutex_state .write() .unwrap() .entry(room_id.clone()) .or_default(), ); let state_lock = mutex_state.lock().await; let mut content = RoomCreateEventContent::new(conduit_user.clone()); content.federate = true; content.predecessor = None; content.room_version = RoomVersionId::Version6; // 1. The room create event db.rooms.build_and_append_pdu( PduBuilder { event_type: EventType::RoomCreate, content: to_raw_value(&content).expect("event is valid, we just created it"), unsigned: None, state_key: Some("".to_owned()), redacts: None, }, &conduit_user, &room_id, &db, &state_lock, )?; // 2. Make conduit bot join db.rooms.build_and_append_pdu( PduBuilder { event_type: EventType::RoomMember, content: to_raw_value(&RoomMemberEventContent { membership: MembershipState::Join, displayname: None, avatar_url: None, is_direct: None, third_party_invite: None, blurhash: None, reason: None, }) .expect("event is valid, we just created it"), unsigned: None, state_key: Some(conduit_user.to_string()), redacts: None, }, &conduit_user, &room_id, &db, &state_lock, )?; // 3. Power levels let mut users = BTreeMap::new(); users.insert(conduit_user.clone(), 100.into()); users.insert(user_id.clone(), 100.into()); db.rooms.build_and_append_pdu( PduBuilder { event_type: EventType::RoomPowerLevels, content: to_raw_value(&RoomPowerLevelsEventContent { users, ..Default::default() }) .expect("event is valid, we just created it"), unsigned: None, state_key: Some("".to_owned()), redacts: None, }, &conduit_user, &room_id, &db, &state_lock, )?; // 4.1 Join Rules db.rooms.build_and_append_pdu( PduBuilder { event_type: EventType::RoomJoinRules, content: to_raw_value(&RoomJoinRulesEventContent::new(JoinRule::Invite)) .expect("event is valid, we just created it"), unsigned: None, state_key: Some("".to_owned()), redacts: None, }, &conduit_user, &room_id, &db, &state_lock, )?; // 4.2 History Visibility db.rooms.build_and_append_pdu( PduBuilder { event_type: EventType::RoomHistoryVisibility, content: to_raw_value(&RoomHistoryVisibilityEventContent::new( HistoryVisibility::Shared, )) .expect("event is valid, we just created it"), unsigned: None, state_key: Some("".to_owned()), redacts: None, }, &conduit_user, &room_id, &db, &state_lock, )?; // 4.3 Guest Access db.rooms.build_and_append_pdu( PduBuilder { event_type: EventType::RoomGuestAccess, content: to_raw_value(&RoomGuestAccessEventContent::new(GuestAccess::Forbidden)) .expect("event is valid, we just created it"), unsigned: None, state_key: Some("".to_owned()), redacts: None, }, &conduit_user, &room_id, &db, &state_lock, )?; // 6. Events implied by name and topic let room_name = Box::<RoomName>::try_from(format!("{} Admin Room", db.globals.server_name())) .expect("Room name is valid"); db.rooms.build_and_append_pdu( PduBuilder { event_type: EventType::RoomName, content: to_raw_value(&RoomNameEventContent::new(Some(room_name))) .expect("event is valid, we just created it"), unsigned: None, state_key: Some("".to_owned()), redacts: None, }, &conduit_user, &room_id, &db, &state_lock, )?; db.rooms.build_and_append_pdu( PduBuilder { event_type: EventType::RoomTopic, content: to_raw_value(&RoomTopicEventContent { topic: format!("Manage {}", db.globals.server_name()), }) .expect("event is valid, we just created it"), unsigned: None, state_key: Some("".to_owned()), redacts: None, }, &conduit_user, &room_id, &db, &state_lock, )?; // Room alias let alias: RoomAliasId = format!("#admins:{}", db.globals.server_name()) .try_into() .expect("#admins:server_name is a valid alias name"); db.rooms.build_and_append_pdu( PduBuilder { event_type: EventType::RoomCanonicalAlias, content: to_raw_value(&RoomCanonicalAliasEventContent { alias: Some(alias.clone()), alt_aliases: Vec::new(), }) .expect("event is valid, we just created it"), unsigned: None, state_key: Some("".to_owned()), redacts: None, }, &conduit_user, &room_id, &db, &state_lock, )?; db.rooms.set_alias(&alias, Some(&room_id), &db.globals)?; // Invite and join the real user db.rooms.build_and_append_pdu( PduBuilder { event_type: EventType::RoomMember, content: to_raw_value(&RoomMemberEventContent { membership: MembershipState::Invite, displayname: None, avatar_url: None, is_direct: None, third_party_invite: None, blurhash: None, reason: None, }) .expect("event is valid, we just created it"), unsigned: None, state_key: Some(user_id.to_string()), redacts: None, }, &conduit_user, &room_id, &db, &state_lock, )?; db.rooms.build_and_append_pdu( PduBuilder { event_type: EventType::RoomMember, content: to_raw_value(&RoomMemberEventContent { membership: MembershipState::Join, displayname: Some(displayname), avatar_url: None, is_direct: None, third_party_invite: None, blurhash: None, reason: None, }) .expect("event is valid, we just created it"), unsigned: None, state_key: Some(user_id.to_string()), redacts: None, }, &user_id, &room_id, &db, &state_lock, )?; // Send welcome message db.rooms.build_and_append_pdu( PduBuilder { event_type: EventType::RoomMessage, content: to_raw_value(&RoomMessageEventContent::text_html( "## Thank you for trying out Conduit!\n\nConduit is currently in Beta. This means you can join and participate in most Matrix rooms, but not all features are supported and you might run into bugs from time to time.\n\nHelpful links:\n> Website: https://conduit.rs\n> Git and Documentation: https://gitlab.com/famedly/conduit\n> Report issues: https://gitlab.com/famedly/conduit/-/issues\n\nHere are some rooms you can join (by typing the command):\n\nConduit room (Ask questions and get notified on updates):\n`/join #conduit:fachschaften.org`\n\nConduit lounge (Off-topic, only Conduit users are allowed to join)\n`/join #conduit-lounge:conduit.rs`".to_owned(), "<h2>Thank you for trying out Conduit!</h2>\n<p>Conduit is currently in Beta. This means you can join and participate in most Matrix rooms, but not all features are supported and you might run into bugs from time to time.</p>\n<p>Helpful links:</p>\n<blockquote>\n<p>Website: https://conduit.rs<br>Git and Documentation: https://gitlab.com/famedly/conduit<br>Report issues: https://gitlab.com/famedly/conduit/-/issues</p>\n</blockquote>\n<p>Here are some rooms you can join (by typing the command):</p>\n<p>Conduit room (Ask questions and get notified on updates):<br><code>/join #conduit:fachschaften.org</code></p>\n<p>Conduit lounge (Off-topic, only Conduit users are allowed to join)<br><code>/join #conduit-lounge:conduit.rs</code></p>\n".to_owned(), )) .expect("event is valid, we just created it"), unsigned: None, state_key: None, redacts: None, }, &conduit_user, &room_id, &db, &state_lock, )?; } info!("{} registered on this server", user_id); db.flush()?; Ok(register::Response { access_token: Some(token), user_id, device_id: Some(device_id), } .into()) } /// # `POST /_matrix/client/r0/account/password` /// /// Changes the password of this account. /// /// - Requires UIAA to verify user password /// - Changes the password of the sender user /// - The password hash is calculated using argon2 with 32 character salt, the plain password is /// not saved /// /// If logout_devices is true it does the following for each device except the sender device: /// - Invalidates access token /// - Deletes device metadata (device id, device display name, last seen ip, last seen ts) /// - Forgets to-device events /// - Triggers device list updates #[cfg_attr( feature = "conduit_bin", post("/_matrix/client/r0/account/password", data = "<body>") )] #[tracing::instrument(skip(db, body))] pub async fn change_password_route( db: DatabaseGuard, body: Ruma<change_password::Request<'_>>, ) -> ConduitResult<change_password::Response> { let sender_user = body.sender_user.as_ref().expect("user is authenticated"); let sender_device = body.sender_device.as_ref().expect("user is authenticated"); let mut uiaainfo = UiaaInfo { flows: vec![AuthFlow { stages: vec![AuthType::Password], }], completed: Vec::new(), params: Default::default(), session: None, auth_error: None, }; if let Some(auth) = &body.auth { let (worked, uiaainfo) = db.uiaa.try_auth( sender_user, sender_device, auth, &uiaainfo, &db.users, &db.globals, )?; if !worked { return Err(Error::Uiaa(uiaainfo)); } // Success! } else if let Some(json) = body.json_body { uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH)); db.uiaa .create(sender_user, sender_device, &uiaainfo, &json)?; return Err(Error::Uiaa(uiaainfo)); } else { return Err(Error::BadRequest(ErrorKind::NotJson, "Not json.")); } db.users .set_password(sender_user, Some(&body.new_password))?; if body.logout_devices { // Logout all devices except the current one for id in db .users .all_device_ids(sender_user) .filter_map(|id| id.ok()) .filter(|id| id != sender_device) { db.users.remove_device(sender_user, &id)?; } } db.flush()?; Ok(change_password::Response {}.into()) } /// # `GET _matrix/client/r0/account/whoami` /// /// Get user_id of the sender user. /// /// Note: Also works for Application Services #[cfg_attr( feature = "conduit_bin", get("/_matrix/client/r0/account/whoami", data = "<body>") )] #[tracing::instrument(skip(body))] pub async fn whoami_route(body: Ruma<whoami::Request>) -> ConduitResult<whoami::Response> { let sender_user = body.sender_user.as_ref().expect("user is authenticated"); Ok(whoami::Response { user_id: sender_user.clone(), } .into()) } /// # `POST /_matrix/client/r0/account/deactivate` /// /// Deactivate sender user account. /// /// - Leaves all rooms and rejects all invitations /// - Invalidates all access tokens /// - Deletes all device metadata (device id, device display name, last seen ip, last seen ts) /// - Forgets all to-device events /// - Triggers device list updates /// - Removes ability to log in again #[cfg_attr( feature = "conduit_bin", post("/_matrix/client/r0/account/deactivate", data = "<body>") )] #[tracing::instrument(skip(db, body))] pub async fn deactivate_route( db: DatabaseGuard, body: Ruma<deactivate::Request<'_>>, ) -> ConduitResult<deactivate::Response> { let sender_user = body.sender_user.as_ref().expect("user is authenticated"); let sender_device = body.sender_device.as_ref().expect("user is authenticated"); let mut uiaainfo = UiaaInfo { flows: vec![AuthFlow { stages: vec![AuthType::Password], }], completed: Vec::new(), params: Default::default(), session: None, auth_error: None, }; if let Some(auth) = &body.auth { let (worked, uiaainfo) = db.uiaa.try_auth( sender_user, sender_device, auth, &uiaainfo, &db.users, &db.globals, )?; if !worked { return Err(Error::Uiaa(uiaainfo)); } // Success! } else if let Some(json) = body.json_body { uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH)); db.uiaa .create(sender_user, sender_device, &uiaainfo, &json)?; return Err(Error::Uiaa(uiaainfo)); } else { return Err(Error::BadRequest(ErrorKind::NotJson, "Not json.")); } // Leave all joined rooms and reject all invitations // TODO: work over federation invites let all_rooms = db .rooms .rooms_joined(sender_user) .chain( db.rooms .rooms_invited(sender_user) .map(|t| t.map(|(r, _)| r)), ) .collect::<Vec<_>>(); for room_id in all_rooms { let room_id = room_id?; let event = RoomMemberEventContent { membership: MembershipState::Leave, displayname: None, avatar_url: None, is_direct: None, third_party_invite: None, blurhash: None, reason: None, }; let mutex_state = Arc::clone( db.globals .roomid_mutex_state .write() .unwrap() .entry(room_id.clone()) .or_default(), ); let state_lock = mutex_state.lock().await; db.rooms.build_and_append_pdu( PduBuilder { event_type: EventType::RoomMember, content: to_raw_value(&event).expect("event is valid, we just created it"), unsigned: None, state_key: Some(sender_user.to_string()), redacts: None, }, sender_user, &room_id, &db, &state_lock, )?; } // Remove devices and mark account as deactivated db.users.deactivate_account(sender_user)?; info!("{} deactivated their account", sender_user); db.flush()?; Ok(deactivate::Response { id_server_unbind_result: ThirdPartyIdRemovalStatus::NoSupport, } .into()) } /// # `GET _matrix/client/r0/account/3pid` /// /// Get a list of third party identifiers associated with this account. /// /// - Currently always returns empty list #[cfg_attr( feature = "conduit_bin", get("/_matrix/client/r0/account/3pid", data = "<body>") )] pub async fn third_party_route( body: Ruma<get_contacts::Request>, ) -> ConduitResult<get_contacts::Response> { let _sender_user = body.sender_user.as_ref().expect("user is authenticated"); Ok(get_contacts::Response::new(Vec::new()).into()) }
33.908616
779
0.561677
9c0cd6da69439efe686a1b448b9ad61cb3850052
990
use std::net::SocketAddr; use apikit::{ auth::UserIdentity, reject::{InternalServerError, NotFound}, }; use warp::{reply, Reply}; use crate::network::get_storage_node_address; use crate::server::Context; #[tracing::instrument(skip(context, addr))] pub async fn get( _user: UserIdentity, context: Context, addr: Option<SocketAddr>, blob_id: String, ) -> Result<reply::Response, warp::Rejection> { let socket_addr = addr.ok_or_else(|| InternalServerError::from("missing socket address"))?; let storage_node = context .node .indexer() .get_blob_storage_node(&blob_id) .await .map_err(InternalServerError::from)? .ok_or(NotFound)?; let node_address = get_storage_node_address( socket_addr.ip(), storage_node, &context.config, &format!("blob/{}", &blob_id), ) .map_err(InternalServerError::from)?; Ok(warp::redirect::temporary(node_address).into_response()) }
24.75
95
0.653535
bb54c8891f6b504b45eed1e5bebecdbdcbff6559
6,517
///! Inline of `https://github.com/bltavares/atomic-shim` #[cfg(not(any( target_arch = "mips", target_arch = "powerpc", feature = "mutex" )))] pub use std::sync::atomic::{AtomicI64, AtomicU64}; #[cfg(any(target_arch = "mips", target_arch = "powerpc", feature = "mutex"))] mod shim { use parking_lot::{const_rwlock, RwLock}; use std::sync::atomic::Ordering; #[derive(Debug, Default)] pub struct AtomicU64 { value: RwLock<u64>, } impl AtomicU64 { pub const fn new(v: u64) -> Self { Self { value: const_rwlock(v) } } #[allow(dead_code)] pub fn load(&self, _: Ordering) -> u64 { *self.value.read() } #[allow(dead_code)] pub fn store(&self, value: u64, _: Ordering) { let mut lock = self.value.write(); *lock = value; } #[allow(dead_code)] pub fn swap(&self, value: u64, _: Ordering) -> u64 { let mut lock = self.value.write(); let prev = *lock; *lock = value; prev } #[allow(dead_code)] pub fn compare_exchange( &self, current: u64, new: u64, _: Ordering, _: Ordering, ) -> Result<u64, u64> { let mut lock = self.value.write(); let prev = *lock; if prev == current { *lock = new; Ok(current) } else { Err(prev) } } #[allow(dead_code)] pub fn compare_exchange_weak( &self, current: u64, new: u64, success: Ordering, failure: Ordering, ) -> Result<u64, u64> { self.compare_exchange(current, new, success, failure) } #[allow(dead_code)] pub fn fetch_add(&self, val: u64, _: Ordering) -> u64 { let mut lock = self.value.write(); let prev = *lock; *lock = prev.wrapping_add(val); prev } #[allow(dead_code)] pub fn fetch_sub(&self, val: u64, _: Ordering) -> u64 { let mut lock = self.value.write(); let prev = *lock; *lock = prev.wrapping_sub(val); prev } #[allow(dead_code)] pub fn fetch_and(&self, val: u64, _: Ordering) -> u64 { let mut lock = self.value.write(); let prev = *lock; *lock = prev & val; prev } #[allow(dead_code)] pub fn fetch_nand(&self, val: u64, _: Ordering) -> u64 { let mut lock = self.value.write(); let prev = *lock; *lock = !(prev & val); prev } #[allow(dead_code)] pub fn fetch_or(&self, val: u64, _: Ordering) -> u64 { let mut lock = self.value.write(); let prev = *lock; *lock = prev | val; prev } #[allow(dead_code)] pub fn fetch_xor(&self, val: u64, _: Ordering) -> u64 { let mut lock = self.value.write(); let prev = *lock; *lock = prev ^ val; prev } } impl From<u64> for AtomicU64 { fn from(value: u64) -> Self { AtomicU64::new(value) } } #[derive(Debug, Default)] pub struct AtomicI64 { value: RwLock<i64>, } impl AtomicI64 { pub const fn new(v: i64) -> Self { Self { value: const_rwlock(v) } } #[allow(dead_code)] pub fn load(&self, _: Ordering) -> i64 { *self.value.read() } #[allow(dead_code)] pub fn store(&self, value: i64, _: Ordering) { let mut lock = self.value.write(); *lock = value; } #[allow(dead_code)] pub fn swap(&self, value: i64, _: Ordering) -> i64 { let mut lock = self.value.write(); let prev = *lock; *lock = value; prev } #[allow(dead_code)] pub fn compare_exchange( &self, current: i64, new: i64, _: Ordering, _: Ordering, ) -> Result<i64, i64> { let mut lock = self.value.write(); let prev = *lock; if prev == current { *lock = new; Ok(current) } else { Err(prev) } } #[allow(dead_code)] pub fn compare_exchange_weak( &self, current: i64, new: i64, success: Ordering, failure: Ordering, ) -> Result<i64, i64> { self.compare_exchange(current, new, success, failure) } #[allow(dead_code)] pub fn fetch_add(&self, val: i64, _: Ordering) -> i64 { let mut lock = self.value.write(); let prev = *lock; *lock = prev.wrapping_add(val); prev } #[allow(dead_code)] pub fn fetch_sub(&self, val: i64, _: Ordering) -> i64 { let mut lock = self.value.write(); let prev = *lock; *lock = prev.wrapping_sub(val); prev } #[allow(dead_code)] pub fn fetch_and(&self, val: i64, _: Ordering) -> i64 { let mut lock = self.value.write(); let prev = *lock; *lock = prev & val; prev } #[allow(dead_code)] pub fn fetch_nand(&self, val: i64, _: Ordering) -> i64 { let mut lock = self.value.write(); let prev = *lock; *lock = !(prev & val); prev } #[allow(dead_code)] pub fn fetch_or(&self, val: i64, _: Ordering) -> i64 { let mut lock = self.value.write(); let prev = *lock; *lock = prev | val; prev } #[allow(dead_code)] pub fn fetch_xor(&self, val: i64, _: Ordering) -> i64 { let mut lock = self.value.write(); let prev = *lock; *lock = prev ^ val; prev } } impl From<i64> for AtomicI64 { fn from(value: i64) -> Self { AtomicI64::new(value) } } } #[cfg(any( target_arch = "mips", target_arch = "powerpc", feature = "mutex" ))] pub use shim::{AtomicI64, AtomicU64};
26.384615
77
0.454657
2961111f80ef60257b70de0bb64d8f6b7aaa11f4
6,998
use crate::*; use std::str::FromStr; #[test] fn loglevels() { macro_rules! test_conv { ($level:ident, $str1:literal, $str2:literal, $str3:literal) => { assert_eq!(Ok(LogLevel::$level), LogLevel::from_str($str1)); assert_eq!(Ok(LogLevel::$level), LogLevel::from_str($str2)); assert_eq!(Ok(LogLevel::$level), LogLevel::from_str($str3)); assert_eq!(format!("{}", LogLevel::$level), $str2); assert_eq!(LogLevel::$level.name(), $str2); }; } test_conv!(Trace, "trace", "TRACE", "Trace"); test_conv!(Debug, "debug", "DEBUG", "Debug"); test_conv!(Info, "info", "INFO", "Info"); test_conv!(Warn, "warn", "WARN", "Warn"); test_conv!(Error, "error", "ERROR", "Error"); test_conv!(Off, "off", "OFF", "Off"); test_conv!(Audit, "audit", "AUDIT", "Audit"); test_conv!(Open, "open", "OPEN", "Open"); test_conv!(Close, "close", "CLOSE", "Close"); assert_eq!(Err(LogLevelError), LogLevel::from_str("zzzzz")); assert_eq!(format!("{}", LogLevelError), "invalid logging level"); for level in LogLevel::all_levels() { assert_eq!(LogLevel::from_str(level.name()), Ok(*level)); } } #[test] fn logfilter() { macro_rules! test_filter { ($level:ident, $expect:literal) => { assert_eq!(format!("{}", LogFilter::from(LogLevel::$level)), $expect); }; } test_filter!(Trace, "LogFilter(TRACE,DEBUG,INFO,WARN,ERROR)"); test_filter!(Debug, "LogFilter(DEBUG,INFO,WARN,ERROR)"); test_filter!(Info, "LogFilter(INFO,WARN,ERROR)"); test_filter!(Warn, "LogFilter(WARN,ERROR)"); test_filter!(Error, "LogFilter(ERROR)"); test_filter!(Off, "LogFilter()"); test_filter!(Audit, "LogFilter(AUDIT)"); test_filter!(Open, "LogFilter(OPEN,CLOSE)"); test_filter!(Close, "LogFilter(OPEN,CLOSE)"); macro_rules! test_fromstr { ($in:literal, $expect:literal) => { assert_eq!(format!("{}", LogFilter::from_str($in).unwrap()), $expect); }; } test_fromstr!("warn", "LogFilter(WARN,ERROR)"); test_fromstr!("info,audit", "LogFilter(INFO,WARN,ERROR,AUDIT)"); test_fromstr!("error,open,audit", "LogFilter(ERROR,AUDIT,OPEN,CLOSE)"); macro_rules! test_all { ($all:expr, $expect:literal) => { assert_eq!(format!("{}", LogFilter::all(&$all)), $expect); }; } test_all!([LogLevel::Warn], "LogFilter(WARN,ERROR)"); test_all!( [LogLevel::Info, LogLevel::Audit], "LogFilter(INFO,WARN,ERROR,AUDIT)" ); test_all!( [LogLevel::Error, LogLevel::Open, LogLevel::Audit], "LogFilter(ERROR,AUDIT,OPEN,CLOSE)" ); assert_eq!( LogFilter::new() | LogFilter::from(LogLevel::Audit) | LogFilter::from(LogLevel::Error), LogFilter::from_str("audit,error").unwrap() ); assert_eq!(true, LogFilter::new().is_empty()); assert_eq!(false, LogFilter::from(LogLevel::Audit).is_empty()); } #[cfg(feature = "logger")] #[test] fn logger() { use std::fmt::Arguments; use std::time::Instant; let now = Instant::now(); let mut stakker = Stakker::new(now); let s = &mut stakker; struct A; impl A { fn init(_: CX![]) -> Option<Self> { Some(Self) } fn warn(&self, cx: CX![]) { let id = cx.id(); cx.log( id, LogLevel::Warn, "target", format_args!("Warning"), |out| out.kv_i64(Some("num"), 1234), ); } fn fail(&self, cx: CX![]) { fail!(cx, "Called A::fail"); } } // Don't test the LogVisitor interface fully here. Leave that for // a logging crate. #[derive(Default)] struct CheckVisitor { found_num_i64: bool, found_failed_null: bool, } impl LogVisitor for CheckVisitor { fn kv_u64(&mut self, key: Option<&str>, _val: u64) { panic!("unexpected kv_u64: {:?}", key); } fn kv_i64(&mut self, key: Option<&str>, val: i64) { assert_eq!(key, Some("num")); assert_eq!(val, 1234); self.found_num_i64 = true; } fn kv_f64(&mut self, key: Option<&str>, _val: f64) { panic!("unexpected kv_f64: {:?}", key); } fn kv_bool(&mut self, key: Option<&str>, _val: bool) { panic!("unexpected kv_bool: {:?}", key); } fn kv_null(&mut self, key: Option<&str>) { assert_eq!(key, Some("failed")); self.found_failed_null = true; } fn kv_str(&mut self, key: Option<&str>, _val: &str) { panic!("unexpected kv_str: {:?}", key); } fn kv_fmt(&mut self, key: Option<&str>, _val: &Arguments<'_>) { panic!("unexpected kv_fmt: {:?}", key); } fn kv_map(&mut self, key: Option<&str>) { panic!("unexpected kv_map: {:?}", key); } fn kv_mapend(&mut self, key: Option<&str>) { panic!("unexpected kv_mapend: {:?}", key); } fn kv_arr(&mut self, key: Option<&str>) { panic!("unexpected kv_arr: {:?}", key); } fn kv_arrend(&mut self, key: Option<&str>) { panic!("unexpected kv_arrend: {:?}", key); } } let mut expect = 0; s.set_logger( LogFilter::all(&[LogLevel::Warn, LogLevel::Audit, LogLevel::Open]), move |core, r| { assert_eq!(r.id, 1); expect += 1; match expect { 1 => { assert_eq!(r.level, LogLevel::Open); assert_eq!(r.target, ""); assert_eq!(format!("{}", r.fmt), "stakker::test::log::logger::A"); } 2 => { assert_eq!(r.level, LogLevel::Warn); assert_eq!(r.target, "target"); assert_eq!(format!("{}", r.fmt), "Warning"); let mut visitor = CheckVisitor::default(); (r.kvscan)(&mut visitor); assert_eq!(true, visitor.found_num_i64); assert_eq!(false, visitor.found_failed_null); } _ => { assert_eq!(r.level, LogLevel::Close); assert_eq!(r.target, ""); assert_eq!(format!("{}", r.fmt), "Called A::fail"); let mut visitor = CheckVisitor::default(); (r.kvscan)(&mut visitor); assert_eq!(false, visitor.found_num_i64); assert_eq!(true, visitor.found_failed_null); core.shutdown(StopCause::Stopped); } } }, ); let a = actor!(s, A::init(), ret_nop!()); call!([a], warn()); call!([a], fail()); s.run(now, false); assert!(matches!(s.shutdown_reason(), Some(StopCause::Stopped))); }
34.303922
95
0.516576