hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
de99f27067dbcdb352f6fe295106568f507f7dfe | 3,468 | use super::*;
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct MacroInput {
pub ident: Ident,
pub vis: Visibility,
pub attrs: Vec<Attribute>,
pub generics: Generics,
pub body: Body,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Body {
Enum(Vec<Variant>),
Struct(VariantData),
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use Generics;
use attr::parsing::outer_attr;
use data::parsing::{visibility, struct_body, enum_body};
use generics::parsing::generics;
use ident::parsing::ident;
named!(pub macro_input -> MacroInput, do_parse!(
attrs: many0!(outer_attr) >>
vis: visibility >>
which: alt!(keyword!("struct") | keyword!("enum")) >>
id: ident >>
generics: generics >>
item: switch!(value!(which),
"struct" => map!(struct_body, move |(wh, body)| MacroInput {
ident: id,
vis: vis,
attrs: attrs,
generics: Generics {
where_clause: wh,
.. generics
},
body: Body::Struct(body),
})
|
"enum" => map!(enum_body, move |(wh, body)| MacroInput {
ident: id,
vis: vis,
attrs: attrs,
generics: Generics {
where_clause: wh,
.. generics
},
body: Body::Enum(body),
})
) >>
(item)
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use attr::FilterAttrs;
use data::VariantData;
use quote::{Tokens, ToTokens};
impl ToTokens for MacroInput {
fn to_tokens(&self, tokens: &mut Tokens) {
for attr in self.attrs.outer() {
attr.to_tokens(tokens);
}
self.vis.to_tokens(tokens);
match self.body {
Body::Enum(_) => tokens.append("enum"),
Body::Struct(_) => tokens.append("struct"),
}
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
match self.body {
Body::Enum(ref variants) => {
self.generics.where_clause.to_tokens(tokens);
tokens.append("{");
for variant in variants {
variant.to_tokens(tokens);
tokens.append(",");
}
tokens.append("}");
}
Body::Struct(ref variant_data) => {
match *variant_data {
VariantData::Struct(_) => {
self.generics.where_clause.to_tokens(tokens);
variant_data.to_tokens(tokens);
// no semicolon
}
VariantData::Tuple(_) => {
variant_data.to_tokens(tokens);
self.generics.where_clause.to_tokens(tokens);
tokens.append(";");
}
VariantData::Unit => {
self.generics.where_clause.to_tokens(tokens);
tokens.append(";");
}
}
}
}
}
}
}
| 31.243243 | 73 | 0.442042 |
e9f9daa432bca970f0850538662a56729dcaec6c | 109 | pub use crate::{
tx::*,
util::*,
api::*,
hash::*,
signature::*,
util::serialize::*
}; | 13.625 | 22 | 0.422018 |
e5aad2b5b5d2cfb33d9bcaeb7352034b796f6523 | 7,005 | // This file is part of Substrate.
// Copyright (C) 2020-2021 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::helper;
use syn::spanned::Spanned;
use quote::ToTokens;
use frame_support_procedural_tools::clean_type_string;
/// List of additional token to be used for parsing.
mod keyword {
syn::custom_keyword!(metadata);
syn::custom_keyword!(Event);
syn::custom_keyword!(pallet);
syn::custom_keyword!(generate_deposit);
syn::custom_keyword!(deposit_event);
}
/// Definition for pallet event enum.
pub struct EventDef {
/// The index of event item in pallet module.
pub index: usize,
/// The keyword Event used (contains span).
pub event: keyword::Event,
/// Event metadatas: `(name, args, docs)`.
pub metadata: Vec<(syn::Ident, Vec<String>, Vec<syn::Lit>)>,
/// A set of usage of instance, must be check for consistency with trait.
pub instances: Vec<helper::InstanceUsage>,
/// The kind of generic the type `Event` has.
pub gen_kind: super::GenericKind,
/// Whether the function `deposit_event` must be generated.
pub deposit_event: Option<(syn::Visibility, proc_macro2::Span)>,
/// Where clause used in event definition.
pub where_clause: Option<syn::WhereClause>,
/// The span of the pallet::event attribute.
pub attr_span: proc_macro2::Span,
}
/// Attribute for Event: defines metadata name to use.
///
/// Syntax is:
/// * `#[pallet::metadata(SomeType = MetadataName, ...)]`
/// * `#[pallet::generate_deposit($vis fn deposit_event)]`
enum PalletEventAttr {
Metadata {
metadata: Vec<(syn::Type, String)>,
// Span of the attribute
span: proc_macro2::Span,
},
DepositEvent {
fn_vis: syn::Visibility,
// Span for the keyword deposit_event
fn_span: proc_macro2::Span,
// Span of the attribute
span: proc_macro2::Span,
},
}
impl PalletEventAttr {
fn span(&self) -> proc_macro2::Span {
match self {
Self::Metadata { span, .. } => *span,
Self::DepositEvent { span, .. } => *span,
}
}
}
/// Parse for syntax `$Type = "$SomeString"`.
fn parse_event_metadata_element(
input: syn::parse::ParseStream
) -> syn::Result<(syn::Type, String)> {
let typ = input.parse::<syn::Type>()?;
input.parse::<syn::Token![=]>()?;
let ident = input.parse::<syn::LitStr>()?;
Ok((typ, ident.value()))
}
impl syn::parse::Parse for PalletEventAttr {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
input.parse::<syn::Token![#]>()?;
let content;
syn::bracketed!(content in input);
content.parse::<keyword::pallet>()?;
content.parse::<syn::Token![::]>()?;
let lookahead = content.lookahead1();
if lookahead.peek(keyword::metadata) {
let span = content.parse::<keyword::metadata>()?.span();
let metadata_content;
syn::parenthesized!(metadata_content in content);
let metadata = metadata_content
.parse_terminated::<_, syn::Token![,]>(parse_event_metadata_element)?
.into_pairs()
.map(syn::punctuated::Pair::into_value)
.collect();
Ok(PalletEventAttr::Metadata { metadata, span })
} else if lookahead.peek(keyword::generate_deposit) {
let span = content.parse::<keyword::generate_deposit>()?.span();
let generate_content;
syn::parenthesized!(generate_content in content);
let fn_vis = generate_content.parse::<syn::Visibility>()?;
generate_content.parse::<syn::Token![fn]>()?;
let fn_span = generate_content.parse::<keyword::deposit_event>()?.span();
Ok(PalletEventAttr::DepositEvent { fn_vis, span, fn_span })
} else {
Err(lookahead.error())
}
}
}
struct PalletEventAttrInfo {
metadata: Option<Vec<(syn::Type, String)>>,
deposit_event: Option<(syn::Visibility, proc_macro2::Span)>,
}
impl PalletEventAttrInfo {
fn from_attrs(attrs: Vec<PalletEventAttr>) -> syn::Result<Self> {
let mut metadata = None;
let mut deposit_event = None;
for attr in attrs {
match attr {
PalletEventAttr::Metadata { metadata: m, .. } if metadata.is_none() =>
metadata = Some(m),
PalletEventAttr::DepositEvent { fn_vis, fn_span, .. } if deposit_event.is_none() =>
deposit_event = Some((fn_vis, fn_span)),
attr => {
return Err(syn::Error::new(attr.span(), "Duplicate attribute"));
}
}
}
Ok(PalletEventAttrInfo { metadata, deposit_event })
}
}
impl EventDef {
pub fn try_from(
attr_span: proc_macro2::Span,
index: usize,
item: &mut syn::Item,
) -> syn::Result<Self> {
let item = if let syn::Item::Enum(item) = item {
item
} else {
return Err(syn::Error::new(item.span(), "Invalid pallet::event, expected item enum"))
};
let event_attrs: Vec<PalletEventAttr> = helper::take_item_pallet_attrs(&mut item.attrs)?;
let attr_info = PalletEventAttrInfo::from_attrs(event_attrs)?;
let metadata = attr_info.metadata.unwrap_or_else(Vec::new);
let deposit_event = attr_info.deposit_event;
if !matches!(item.vis, syn::Visibility::Public(_)) {
let msg = "Invalid pallet::event, `Error` must be public";
return Err(syn::Error::new(item.span(), msg));
}
let where_clause = item.generics.where_clause.clone();
let mut instances = vec![];
// NOTE: Event is not allowed to be only generic on I because it is not supported
// by construct_runtime.
if let Some(u) = helper::check_type_def_optional_gen(&item.generics, item.ident.span())? {
instances.push(u);
} else {
// construct_runtime only allow non generic event for non instantiable pallet.
instances.push(helper::InstanceUsage {
has_instance: false,
span: item.ident.span(),
})
}
let has_instance = item.generics.type_params().any(|t| t.ident == "I");
let has_config = item.generics.type_params().any(|t| t.ident == "T");
let gen_kind = super::GenericKind::from_gens(has_config, has_instance)
.expect("Checked by `helper::check_type_def_optional_gen` above");
let event = syn::parse2::<keyword::Event>(item.ident.to_token_stream())?;
let metadata = item.variants.iter()
.map(|variant| {
let name = variant.ident.clone();
let docs = helper::get_doc_literals(&variant.attrs);
let args = variant.fields.iter()
.map(|field| {
metadata.iter().find(|m| m.0 == field.ty)
.map(|m| m.1.clone())
.unwrap_or_else(|| {
clean_type_string(&field.ty.to_token_stream().to_string())
})
})
.collect();
(name, args, docs)
})
.collect();
Ok(EventDef {
attr_span,
index,
metadata,
instances,
deposit_event,
event,
gen_kind,
where_clause,
})
}
}
| 30.723684 | 92 | 0.681513 |
2fc545a35a3f8bd9d6c169262280950002242fda | 1,628 | use druid::{
BoxConstraints, Env, Event, EventCtx, LayoutCtx, LifeCycle, LifeCycleCtx,
PaintCtx, Size, UpdateCtx, Widget, WidgetPod,
};
use lapce_data::data::LapceTabData;
use crate::editor::container::LapceEditorContainer;
pub struct EditorDiffSplit {
left: WidgetPod<LapceTabData, LapceEditorContainer>,
right: WidgetPod<LapceTabData, LapceEditorContainer>,
}
impl Widget<LapceTabData> for EditorDiffSplit {
fn event(
&mut self,
ctx: &mut EventCtx,
event: &Event,
data: &mut LapceTabData,
env: &Env,
) {
self.left.event(ctx, event, data, env);
self.right.event(ctx, event, data, env);
}
fn lifecycle(
&mut self,
ctx: &mut LifeCycleCtx,
event: &LifeCycle,
data: &LapceTabData,
env: &Env,
) {
self.left.lifecycle(ctx, event, data, env);
self.right.lifecycle(ctx, event, data, env);
}
fn update(
&mut self,
ctx: &mut UpdateCtx,
_old_data: &LapceTabData,
data: &LapceTabData,
env: &Env,
) {
self.left.update(ctx, data, env);
self.right.update(ctx, data, env);
}
fn layout(
&mut self,
ctx: &mut LayoutCtx,
bc: &BoxConstraints,
data: &LapceTabData,
env: &Env,
) -> Size {
self.left.layout(ctx, bc, data, env);
self.right.layout(ctx, bc, data, env);
bc.max()
}
fn paint(&mut self, ctx: &mut PaintCtx, data: &LapceTabData, env: &Env) {
self.left.paint(ctx, data, env);
self.right.paint(ctx, data, env);
}
}
| 25.046154 | 77 | 0.575553 |
fe4441a67943777ad8465c9ea91ea1e43a327ebb | 707 | #![allow(dead_code)]
use std::env;
use std::path::Path;
use std::fs::{File};
use std::io::Read;
mod lib;
// Find a file {{f}} in directory {{d}} and print it's contents:
// cargo run {{d}} {{f}}
fn main() {
let args: Vec<String> = env::args().collect();
match lib::find(Path::new(&args[1]), &args[2], &|d| {
println!("File: {:?} matched!", d.file_name().into_string().unwrap());
let mut f = File::open(d.path()).unwrap();
let mut s = String::new();
match f.read_to_string(&mut s) {
Ok(_) => {
println!("{:?}", s);
},
_ => (),
}
}) {
Ok(_) => println!("done"),
Err(e) => panic!(e)
}
}
| 24.37931 | 78 | 0.468175 |
f4f0a69d5882b57b8791c5585991d0aaf4a9344d | 4,624 | use crate::prover::ProverProof;
use crate::verifier::batch_verify;
use crate::{
circuits::{
gate::{CircuitGate, GateType},
polynomials::endosclmul,
wires::*,
},
prover_index::testing::new_index_for_test,
};
use ark_ec::{AffineCurve, ProjectiveCurve};
use ark_ff::{BigInteger, BitIteratorLE, Field, One, PrimeField, UniformRand, Zero};
use array_init::array_init;
use colored::Colorize;
use commitment_dlog::{commitment::CommitmentCurve, srs::endos};
use groupmap::GroupMap;
use mina_curves::pasta::{
fp::Fp as F,
pallas::Affine as Other,
vesta::{Affine, VestaParameters},
};
use oracle::{
poseidon::PlonkSpongeConstantsKimchi,
sponge::{DefaultFqSponge, DefaultFrSponge, ScalarChallenge},
};
use rand::{rngs::StdRng, SeedableRng};
use std::time::Instant;
const PUBLIC: usize = 0;
type SpongeParams = PlonkSpongeConstantsKimchi;
type BaseSponge = DefaultFqSponge<VestaParameters, SpongeParams>;
type ScalarSponge = DefaultFrSponge<F, SpongeParams>;
#[test]
fn endomul_test() {
let bits_per_chunk = 4;
let num_bits = 128;
let chunks = num_bits / bits_per_chunk;
let num_scalars = 100;
assert_eq!(num_bits % bits_per_chunk, 0);
let mut gates = vec![];
let rows_per_scalar = 1 + chunks;
for s in 0..num_scalars {
for i in 0..chunks {
let row = rows_per_scalar * s + i;
gates.push(CircuitGate {
typ: GateType::EndoMul,
wires: Wire::new(row),
coeffs: vec![],
});
}
let row = rows_per_scalar * s + chunks;
gates.push(CircuitGate {
typ: GateType::Zero,
wires: Wire::new(row),
coeffs: vec![],
});
}
let (endo_q, endo_r) = endos::<Other>();
let index = new_index_for_test(gates, PUBLIC);
let mut witness: [Vec<F>; COLUMNS] =
array_init(|_| vec![F::zero(); rows_per_scalar * num_scalars]);
let verifier_index = index.verifier_index();
let group_map = <Affine as CommitmentCurve>::Map::setup();
let rng = &mut StdRng::from_seed([0; 32]);
// let start = Instant::now();
for i in 0..num_scalars {
let bits_lsb: Vec<_> = BitIteratorLE::new(F::rand(rng).into_repr())
.take(num_bits)
.collect();
let x = <Other as AffineCurve>::ScalarField::from_repr(
<F as PrimeField>::BigInt::from_bits_le(&bits_lsb[..]),
)
.unwrap();
let x_scalar = ScalarChallenge(x).to_field(&endo_r);
let base = Other::prime_subgroup_generator();
// let g = Other::prime_subgroup_generator().into_projective();
let acc0 = {
let t = Other::new(endo_q * base.x, base.y, false);
let p = t + base;
let acc = p + p;
(acc.x, acc.y)
};
let bits_msb: Vec<_> = bits_lsb.iter().take(num_bits).copied().rev().collect();
let res = endosclmul::gen_witness(
&mut witness,
i * rows_per_scalar,
endo_q,
(base.x, base.y),
&bits_msb,
acc0,
);
let expected = {
let t = Other::prime_subgroup_generator();
let mut acc = Other::new(acc0.0, acc0.1, false);
for i in (0..(num_bits / 2)).rev() {
let b2i = F::from(bits_lsb[2 * i] as u64);
let b2i1 = F::from(bits_lsb[2 * i + 1] as u64);
let xq = (F::one() + ((endo_q - F::one()) * b2i1)) * t.x;
let yq = (b2i.double() - F::one()) * t.y;
acc = acc + (acc + Other::new(xq, yq, false));
}
acc
};
assert_eq!(
expected,
Other::prime_subgroup_generator()
.into_projective()
.mul(x_scalar.into_repr())
.into_affine()
);
assert_eq!((expected.x, expected.y), res.acc);
assert_eq!(x.into_repr(), res.n.into_repr());
}
let start = Instant::now();
let proof =
ProverProof::create::<BaseSponge, ScalarSponge>(&group_map, witness, &index, vec![])
.unwrap();
println!("{}{:?}", "Prover time: ".yellow(), start.elapsed());
let batch: Vec<_> = vec![(&verifier_index, &proof)];
let start = Instant::now();
match batch_verify::<Affine, BaseSponge, ScalarSponge>(&group_map, &batch) {
Err(error) => panic!("Failure verifying the prover's proofs in batch: {}", error),
Ok(_) => {
println!("{}{:?}", "Verifier time: ".yellow(), start.elapsed());
}
}
}
| 31.033557 | 92 | 0.557526 |
03f57c88f3e98c6794c94eeb03d641b5e3ce8560 | 885 | use amethyst::{
core::{timing::Time, transform::Transform},
ecs::SystemBuilder,
prelude::*,
};
use crate::pong::Ball;
pub struct BallSystem;
impl System for BallSystem {
fn build(self) -> Box<dyn ParallelRunnable> {
Box::new(
SystemBuilder::new("MoveBallsSystem")
.with_query(<(&Ball, &mut Transform)>::query())
.read_resource::<Time>()
.read_component::<Ball>()
.write_component::<Transform>()
.build(move |_commands, world, time, query_balls| {
for (ball, local) in query_balls.iter_mut(world) {
local.prepend_translation_x(ball.velocity[0] * time.delta_seconds());
local.prepend_translation_y(ball.velocity[1] * time.delta_seconds());
}
}),
)
}
}
| 31.607143 | 93 | 0.533333 |
211964cc4ecb98ad73eeee6622dbd79dc2cbfac1 | 31,051 | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[derive(Clone)]
pub struct Client {
endpoint: String,
credential: std::sync::Arc<dyn azure_core::TokenCredential>,
scopes: Vec<String>,
pipeline: azure_core::pipeline::Pipeline,
}
#[derive(Clone)]
pub struct ClientBuilder {
credential: std::sync::Arc<dyn azure_core::TokenCredential>,
endpoint: Option<String>,
scopes: Option<Vec<String>>,
}
pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD;
impl ClientBuilder {
pub fn new(credential: std::sync::Arc<dyn azure_core::TokenCredential>) -> Self {
Self {
credential,
endpoint: None,
scopes: None,
}
}
pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self {
self.endpoint = Some(endpoint.into());
self
}
pub fn scopes(mut self, scopes: &[&str]) -> Self {
self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect());
self
}
pub fn build(self) -> Client {
let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned());
let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]);
Client::new(endpoint, self.credential, scopes)
}
}
impl Client {
pub(crate) fn endpoint(&self) -> &str {
self.endpoint.as_str()
}
pub(crate) fn token_credential(&self) -> &dyn azure_core::TokenCredential {
self.credential.as_ref()
}
pub(crate) fn scopes(&self) -> Vec<&str> {
self.scopes.iter().map(String::as_str).collect()
}
pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> Result<azure_core::Response, azure_core::Error> {
let mut context = azure_core::Context::default();
let mut request = request.into();
self.pipeline.send(&mut context, &mut request).await
}
pub fn new(endpoint: impl Into<String>, credential: std::sync::Arc<dyn azure_core::TokenCredential>, scopes: Vec<String>) -> Self {
let endpoint = endpoint.into();
let pipeline = azure_core::pipeline::Pipeline::new(
option_env!("CARGO_PKG_NAME"),
option_env!("CARGO_PKG_VERSION"),
azure_core::ClientOptions::default(),
Vec::new(),
Vec::new(),
);
Self {
endpoint,
credential,
scopes,
pipeline,
}
}
pub fn component_current_pricing_plan(&self) -> component_current_pricing_plan::Client {
component_current_pricing_plan::Client(self.clone())
}
pub fn ea_subscription_list_migration_date(&self) -> ea_subscription_list_migration_date::Client {
ea_subscription_list_migration_date::Client(self.clone())
}
pub fn ea_subscription_migrate_to_new_pricing_model(&self) -> ea_subscription_migrate_to_new_pricing_model::Client {
ea_subscription_migrate_to_new_pricing_model::Client(self.clone())
}
pub fn ea_subscription_rollback_to_legacy_pricing_model(&self) -> ea_subscription_rollback_to_legacy_pricing_model::Client {
ea_subscription_rollback_to_legacy_pricing_model::Client(self.clone())
}
}
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
EaSubscriptionMigrateToNewPricingModel_Post(#[from] ea_subscription_migrate_to_new_pricing_model::post::Error),
#[error(transparent)]
EaSubscriptionRollbackToLegacyPricingModel_Post(#[from] ea_subscription_rollback_to_legacy_pricing_model::post::Error),
#[error(transparent)]
EaSubscriptionListMigrationDate_Post(#[from] ea_subscription_list_migration_date::post::Error),
#[error(transparent)]
ComponentCurrentPricingPlan_Get(#[from] component_current_pricing_plan::get::Error),
#[error(transparent)]
ComponentCurrentPricingPlan_CreateAndUpdate(#[from] component_current_pricing_plan::create_and_update::Error),
#[error(transparent)]
ComponentCurrentPricingPlan_Update(#[from] component_current_pricing_plan::update::Error),
}
pub mod ea_subscription_migrate_to_new_pricing_model {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn post(&self, subscription_id: impl Into<String>) -> post::Builder {
post::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
}
pub mod post {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/microsoft.insights/migrateToNewPricingModel",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod ea_subscription_rollback_to_legacy_pricing_model {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn post(&self, subscription_id: impl Into<String>) -> post::Builder {
post::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
}
pub mod post {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/microsoft.insights/rollbackToLegacyPricingModel",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod ea_subscription_list_migration_date {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn post(&self, subscription_id: impl Into<String>) -> post::Builder {
post::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
}
pub mod post {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::EaSubscriptionMigrationDate, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/microsoft.insights/listMigrationdate",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::EaSubscriptionMigrationDate =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
}
pub mod component_current_pricing_plan {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get(
&self,
resource_group_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
subscription_id: subscription_id.into(),
resource_name: resource_name.into(),
}
}
pub fn create_and_update(
&self,
resource_group_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_name: impl Into<String>,
pricing_plan_properties: impl Into<models::ApplicationInsightsComponentPricingPlan>,
) -> create_and_update::Builder {
create_and_update::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
subscription_id: subscription_id.into(),
resource_name: resource_name.into(),
pricing_plan_properties: pricing_plan_properties.into(),
}
}
pub fn update(
&self,
resource_group_name: impl Into<String>,
subscription_id: impl Into<String>,
resource_name: impl Into<String>,
pricing_plan_properties: impl Into<models::ApplicationInsightsComponentPricingPlan>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
subscription_id: subscription_id.into(),
resource_name: resource_name.into(),
pricing_plan_properties: pricing_plan_properties.into(),
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::ApplicationInsightsComponentPricingPlan, Error>>
{
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/microsoft.insights/components/{}/pricingPlans/current",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ApplicationInsightsComponentPricingPlan =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod create_and_update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_name: String,
pub(crate) pricing_plan_properties: models::ApplicationInsightsComponentPricingPlan,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::ApplicationInsightsComponentPricingPlan, Error>>
{
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/microsoft.insights/components/{}/pricingPlans/current",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.pricing_plan_properties).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ApplicationInsightsComponentPricingPlan =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) subscription_id: String,
pub(crate) resource_name: String,
pub(crate) pricing_plan_properties: models::ApplicationInsightsComponentPricingPlan,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::ApplicationInsightsComponentPricingPlan, Error>>
{
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/microsoft.insights/components/{}/pricingPlans/current",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.pricing_plan_properties).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ApplicationInsightsComponentPricingPlan =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
}
| 49.602236 | 135 | 0.540949 |
62941c11b4980eed02cf835cf79b7e452d96ee97 | 931 | //! Sweetest = Streamlined Holochain test utils with lots of added sugar
//!
//! A wrapper around ConductorHandle which provides useful methods for setup
//! and zome calling, as well as some helpful references to Cells and Zomes
//! which make zome interaction much less verbose
mod sweet_agents;
mod sweet_app;
mod sweet_cell;
mod sweet_conductor;
mod sweet_dna;
mod sweet_network;
mod sweet_zome;
pub use sweet_agents::*;
pub use sweet_app::*;
pub use sweet_cell::*;
pub use sweet_conductor::*;
pub use sweet_dna::*;
pub use sweet_network::*;
pub use sweet_zome::*;
use hdk3::prelude::Element;
use holochain_serialized_bytes::prelude::*;
/// Necessary for parsing the output of a simple "get entry"
// TODO: remove once host fns remove SerializedBytes constraint
#[derive(serde::Serialize, serde::Deserialize, Debug, SerializedBytes)]
#[serde(transparent)]
#[repr(transparent)]
pub struct MaybeElement(pub Option<Element>);
| 29.09375 | 76 | 0.766917 |
5dfd444133a37ffb8b17d857be35936e23fe914b | 18,115 | //! Orphan checker: every impl either implements a trait defined in this
//! crate or pertains to a type defined in this crate.
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::struct_span_err;
use rustc_errors::ErrorGuaranteed;
use rustc_hir as hir;
use rustc_index::bit_set::GrowableBitSet;
use rustc_infer::infer::TyCtxtInferExt;
use rustc_middle::ty::subst::{GenericArg, InternalSubsts};
use rustc_middle::ty::{self, ImplPolarity, Ty, TyCtxt, TypeFoldable, TypeVisitor};
use rustc_session::lint;
use rustc_span::def_id::{DefId, LocalDefId};
use rustc_span::Span;
use rustc_trait_selection::traits;
use std::ops::ControlFlow;
pub(super) fn orphan_check_crate(tcx: TyCtxt<'_>, (): ()) -> &[LocalDefId] {
let mut errors = Vec::new();
for (&trait_def_id, impls_of_trait) in tcx.all_local_trait_impls(()) {
for &impl_of_trait in impls_of_trait {
match orphan_check_impl(tcx, impl_of_trait) {
Ok(()) => {}
Err(ErrorGuaranteed) => errors.push(impl_of_trait),
}
}
if tcx.trait_is_auto(trait_def_id) {
lint_auto_trait_impls(tcx, trait_def_id, impls_of_trait);
}
}
tcx.arena.alloc_slice(&errors)
}
#[instrument(skip(tcx), level = "debug")]
fn orphan_check_impl(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Result<(), ErrorGuaranteed> {
let trait_ref = tcx.impl_trait_ref(def_id).unwrap();
let trait_def_id = trait_ref.def_id;
let item = tcx.hir().item(hir::ItemId { def_id });
let hir::ItemKind::Impl(ref impl_) = item.kind else {
bug!("{:?} is not an impl: {:?}", def_id, item);
};
let sp = tcx.sess.source_map().guess_head_span(item.span);
let tr = impl_.of_trait.as_ref().unwrap();
match traits::orphan_check(tcx, item.def_id.to_def_id()) {
Ok(()) => {}
Err(err) => emit_orphan_check_error(
tcx,
sp,
tr.path.span,
impl_.self_ty.span,
&impl_.generics,
err,
)?,
}
// In addition to the above rules, we restrict impls of auto traits
// so that they can only be implemented on nominal types, such as structs,
// enums or foreign types. To see why this restriction exists, consider the
// following example (#22978). Imagine that crate A defines an auto trait
// `Foo` and a fn that operates on pairs of types:
//
// ```
// // Crate A
// auto trait Foo { }
// fn two_foos<A:Foo,B:Foo>(..) {
// one_foo::<(A,B)>(..)
// }
// fn one_foo<T:Foo>(..) { .. }
// ```
//
// This type-checks fine; in particular the fn
// `two_foos` is able to conclude that `(A,B):Foo`
// because `A:Foo` and `B:Foo`.
//
// Now imagine that crate B comes along and does the following:
//
// ```
// struct A { }
// struct B { }
// impl Foo for A { }
// impl Foo for B { }
// impl !Send for (A, B) { }
// ```
//
// This final impl is legal according to the orphan
// rules, but it invalidates the reasoning from
// `two_foos` above.
debug!(
"trait_ref={:?} trait_def_id={:?} trait_is_auto={}",
trait_ref,
trait_def_id,
tcx.trait_is_auto(trait_def_id)
);
if tcx.trait_is_auto(trait_def_id) && !trait_def_id.is_local() {
let self_ty = trait_ref.self_ty();
let opt_self_def_id = match *self_ty.kind() {
ty::Adt(self_def, _) => Some(self_def.did()),
ty::Foreign(did) => Some(did),
_ => None,
};
let msg = match opt_self_def_id {
// We only want to permit nominal types, but not *all* nominal types.
// They must be local to the current crate, so that people
// can't do `unsafe impl Send for Rc<SomethingLocal>` or
// `impl !Send for Box<SomethingLocalAndSend>`.
Some(self_def_id) => {
if self_def_id.is_local() {
None
} else {
Some((
format!(
"cross-crate traits with a default impl, like `{}`, \
can only be implemented for a struct/enum type \
defined in the current crate",
tcx.def_path_str(trait_def_id)
),
"can't implement cross-crate trait for type in another crate",
))
}
}
_ => Some((
format!(
"cross-crate traits with a default impl, like `{}`, can \
only be implemented for a struct/enum type, not `{}`",
tcx.def_path_str(trait_def_id),
self_ty
),
"can't implement cross-crate trait with a default impl for \
non-struct/enum type",
)),
};
if let Some((msg, label)) = msg {
struct_span_err!(tcx.sess, sp, E0321, "{}", msg).span_label(sp, label).emit();
return Err(ErrorGuaranteed);
}
}
if let ty::Opaque(def_id, _) = *trait_ref.self_ty().kind() {
tcx.sess
.struct_span_err(sp, "cannot implement trait on type alias impl trait")
.span_note(tcx.def_span(def_id), "type alias impl trait defined here")
.emit();
return Err(ErrorGuaranteed);
}
Ok(())
}
fn emit_orphan_check_error<'tcx>(
tcx: TyCtxt<'tcx>,
sp: Span,
trait_span: Span,
self_ty_span: Span,
generics: &hir::Generics<'tcx>,
err: traits::OrphanCheckErr<'tcx>,
) -> Result<!, ErrorGuaranteed> {
Err(match err {
traits::OrphanCheckErr::NonLocalInputType(tys) => {
let mut err = struct_span_err!(
tcx.sess,
sp,
E0117,
"only traits defined in the current crate can be implemented for \
arbitrary types"
);
err.span_label(sp, "impl doesn't use only types from inside the current crate");
for (ty, is_target_ty) in &tys {
let mut ty = *ty;
tcx.infer_ctxt().enter(|infcx| {
// Remove the lifetimes unnecessary for this error.
ty = infcx.freshen(ty);
});
ty = match ty.kind() {
// Remove the type arguments from the output, as they are not relevant.
// You can think of this as the reverse of `resolve_vars_if_possible`.
// That way if we had `Vec<MyType>`, we will properly attribute the
// problem to `Vec<T>` and avoid confusing the user if they were to see
// `MyType` in the error.
ty::Adt(def, _) => tcx.mk_adt(*def, ty::List::empty()),
_ => ty,
};
let this = "this".to_string();
let (ty, postfix) = match &ty.kind() {
ty::Slice(_) => (this, " because slices are always foreign"),
ty::Array(..) => (this, " because arrays are always foreign"),
ty::Tuple(..) => (this, " because tuples are always foreign"),
_ => (format!("`{}`", ty), ""),
};
let msg = format!("{} is not defined in the current crate{}", ty, postfix);
if *is_target_ty {
// Point at `D<A>` in `impl<A, B> for C<B> in D<A>`
err.span_label(self_ty_span, &msg);
} else {
// Point at `C<B>` in `impl<A, B> for C<B> in D<A>`
err.span_label(trait_span, &msg);
}
}
err.note("define and implement a trait or new type instead");
err.emit()
}
traits::OrphanCheckErr::UncoveredTy(param_ty, local_type) => {
let mut sp = sp;
for param in generics.params {
if param.name.ident().to_string() == param_ty.to_string() {
sp = param.span;
}
}
match local_type {
Some(local_type) => struct_span_err!(
tcx.sess,
sp,
E0210,
"type parameter `{}` must be covered by another type \
when it appears before the first local type (`{}`)",
param_ty,
local_type
)
.span_label(
sp,
format!(
"type parameter `{}` must be covered by another type \
when it appears before the first local type (`{}`)",
param_ty, local_type
),
)
.note(
"implementing a foreign trait is only possible if at \
least one of the types for which it is implemented is local, \
and no uncovered type parameters appear before that first \
local type",
)
.note(
"in this case, 'before' refers to the following order: \
`impl<..> ForeignTrait<T1, ..., Tn> for T0`, \
where `T0` is the first and `Tn` is the last",
)
.emit(),
None => struct_span_err!(
tcx.sess,
sp,
E0210,
"type parameter `{}` must be used as the type parameter for some \
local type (e.g., `MyStruct<{}>`)",
param_ty,
param_ty
)
.span_label(
sp,
format!(
"type parameter `{}` must be used as the type parameter for some \
local type",
param_ty,
),
)
.note(
"implementing a foreign trait is only possible if at \
least one of the types for which it is implemented is local",
)
.note(
"only traits defined in the current crate can be \
implemented for a type parameter",
)
.emit(),
}
}
})
}
#[derive(Default)]
struct AreUniqueParamsVisitor {
seen: GrowableBitSet<u32>,
}
#[derive(Copy, Clone)]
enum NotUniqueParam<'tcx> {
DuplicateParam(GenericArg<'tcx>),
NotParam(GenericArg<'tcx>),
}
impl<'tcx> TypeVisitor<'tcx> for AreUniqueParamsVisitor {
type BreakTy = NotUniqueParam<'tcx>;
fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
match t.kind() {
ty::Param(p) => {
if self.seen.insert(p.index) {
ControlFlow::CONTINUE
} else {
ControlFlow::Break(NotUniqueParam::DuplicateParam(t.into()))
}
}
_ => ControlFlow::Break(NotUniqueParam::NotParam(t.into())),
}
}
fn visit_region(&mut self, _: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
// We don't drop candidates during candidate assembly because of region
// constraints, so the behavior for impls only constrained by regions
// will not change.
ControlFlow::CONTINUE
}
fn visit_const(&mut self, c: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
match c.val() {
ty::ConstKind::Param(p) => {
if self.seen.insert(p.index) {
ControlFlow::CONTINUE
} else {
ControlFlow::Break(NotUniqueParam::DuplicateParam(c.into()))
}
}
_ => ControlFlow::Break(NotUniqueParam::NotParam(c.into())),
}
}
}
/// Lint impls of auto traits if they are likely to have
/// unsound or surprising effects on auto impls.
fn lint_auto_trait_impls(tcx: TyCtxt<'_>, trait_def_id: DefId, impls: &[LocalDefId]) {
let mut non_covering_impls = Vec::new();
for &impl_def_id in impls {
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
if trait_ref.references_error() {
return;
}
if tcx.impl_polarity(impl_def_id) != ImplPolarity::Positive {
return;
}
assert_eq!(trait_ref.substs.len(), 1);
let self_ty = trait_ref.self_ty();
let (self_type_did, substs) = match self_ty.kind() {
ty::Adt(def, substs) => (def.did(), substs),
_ => {
// FIXME: should also lint for stuff like `&i32` but
// considering that auto traits are unstable, that
// isn't too important for now as this only affects
// crates using `nightly`, and std.
continue;
}
};
// Impls which completely cover a given root type are fine as they
// disable auto impls entirely. So only lint if the substs
// are not a permutation of the identity substs.
match substs.visit_with(&mut AreUniqueParamsVisitor::default()) {
ControlFlow::Continue(()) => {} // ok
ControlFlow::Break(arg) => {
// Ideally:
//
// - compute the requirements for the auto impl candidate
// - check whether these are implied by the non covering impls
// - if not, emit the lint
//
// What we do here is a bit simpler:
//
// - badly check if an auto impl candidate definitely does not apply
// for the given simplified type
// - if so, do not lint
if fast_reject_auto_impl(tcx, trait_def_id, self_ty) {
// ok
} else {
non_covering_impls.push((impl_def_id, self_type_did, arg));
}
}
}
}
for &(impl_def_id, self_type_did, arg) in &non_covering_impls {
tcx.struct_span_lint_hir(
lint::builtin::SUSPICIOUS_AUTO_TRAIT_IMPLS,
tcx.hir().local_def_id_to_hir_id(impl_def_id),
tcx.def_span(impl_def_id),
|err| {
let mut err = err.build(&format!(
"cross-crate traits with a default impl, like `{}`, \
should not be specialized",
tcx.def_path_str(trait_def_id),
));
let item_span = tcx.def_span(self_type_did);
let self_descr = tcx.def_kind(self_type_did).descr(self_type_did);
err.span_note(
item_span,
&format!(
"try using the same sequence of generic parameters as the {} definition",
self_descr,
),
);
match arg {
NotUniqueParam::DuplicateParam(arg) => {
err.note(&format!("`{}` is mentioned multiple times", arg));
}
NotUniqueParam::NotParam(arg) => {
err.note(&format!("`{}` is not a generic parameter", arg));
}
}
err.emit();
},
);
}
}
fn fast_reject_auto_impl<'tcx>(tcx: TyCtxt<'tcx>, trait_def_id: DefId, self_ty: Ty<'tcx>) -> bool {
struct DisableAutoTraitVisitor<'tcx> {
tcx: TyCtxt<'tcx>,
trait_def_id: DefId,
self_ty_root: Ty<'tcx>,
seen: FxHashSet<DefId>,
}
impl<'tcx> TypeVisitor<'tcx> for DisableAutoTraitVisitor<'tcx> {
type BreakTy = ();
fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
let tcx = self.tcx;
if t != self.self_ty_root {
for impl_def_id in tcx.non_blanket_impls_for_ty(self.trait_def_id, t) {
match tcx.impl_polarity(impl_def_id) {
ImplPolarity::Negative => return ControlFlow::BREAK,
ImplPolarity::Reservation => {}
// FIXME(@lcnr): That's probably not good enough, idk
//
// We might just want to take the rustdoc code and somehow avoid
// explicit impls for `Self`.
ImplPolarity::Positive => return ControlFlow::CONTINUE,
}
}
}
match t.kind() {
ty::Adt(def, substs) if def.is_phantom_data() => substs.super_visit_with(self),
ty::Adt(def, substs) => {
// @lcnr: This is the only place where cycles can happen. We avoid this
// by only visiting each `DefId` once.
//
// This will be is incorrect in subtle cases, but I don't care :)
if self.seen.insert(def.did()) {
for ty in def.all_fields().map(|field| field.ty(tcx, substs)) {
ty.visit_with(self)?;
}
}
ControlFlow::CONTINUE
}
_ => t.super_visit_with(self),
}
}
}
let self_ty_root = match self_ty.kind() {
ty::Adt(def, _) => tcx.mk_adt(*def, InternalSubsts::identity_for_item(tcx, def.did())),
_ => unimplemented!("unexpected self ty {:?}", self_ty),
};
self_ty_root
.visit_with(&mut DisableAutoTraitVisitor {
tcx,
self_ty_root,
trait_def_id,
seen: FxHashSet::default(),
})
.is_break()
}
| 38.79015 | 99 | 0.497323 |
f95d1c8e58c7f85bed6e088aa1e95d294d0f0ec4 | 1,857 | use parity_codec::{Decode, Encode};
use chain_core::tx::data::TxId;
use client_common::{Result, Storage, Transaction};
const KEYSPACE: &str = "index_transaction";
/// Exposes functionalities for managing transactions
///
/// Stores `transaction_id -> transaction` mapping
#[derive(Default, Clone)]
pub struct TransactionService<S: Storage> {
storage: S,
}
impl<S> TransactionService<S>
where
S: Storage,
{
/// Creates a new instance of transaction service
pub fn new(storage: S) -> Self {
Self { storage }
}
/// Retrieves transaction with given id
pub fn get(&self, id: &TxId) -> Result<Option<Transaction>> {
let transaction = self
.storage
.get(KEYSPACE, id)?
.and_then(|bytes| Transaction::decode(&mut bytes.as_slice()));
Ok(transaction)
}
/// Sets transaction with given id and value
pub fn set(&self, id: &TxId, transaction: &Transaction) -> Result<()> {
self.storage.set(KEYSPACE, id, transaction.encode())?;
Ok(())
}
/// Clears all storage
pub fn clear(&self) -> Result<()> {
self.storage.clear(KEYSPACE)
}
}
#[cfg(test)]
mod tests {
use super::*;
use chain_core::tx::data::Tx;
use client_common::storage::MemoryStorage;
#[test]
fn check_flow() {
let transaction_service = TransactionService::new(MemoryStorage::default());
let id = [0u8; 32];
let transaction = Transaction::TransferTransaction(Tx::default());
assert_eq!(None, transaction_service.get(&id).unwrap());
assert!(transaction_service.set(&id, &transaction).is_ok());
assert_eq!(transaction, transaction_service.get(&id).unwrap().unwrap());
assert!(transaction_service.clear().is_ok());
assert_eq!(None, transaction_service.get(&id).unwrap());
}
}
| 27.308824 | 84 | 0.630587 |
29821d3ea8b041a5d5bfa9e87a860d64e911026a | 721 | use radius2::{Radius, RadiusOption};
fn main() {
let options = vec![RadiusOption::Sims(false)];
let mut radius = Radius::new_with_options(Some("tests/ais3"), &options);
//let verify = radius.get_address("sym.verify").unwrap();
let mut state = radius.call_state(0x004005f6);
let addr: u64 = 0xfff00000;
let flag_val = state.symbolic_value("flag", 24 * 8);
state.memory.write_value(addr, &flag_val, 24);
state.registers.set("rax", state.concrete_value(addr, 64));
radius.breakpoint(0x00400602);
radius.avoid(&[0x0040060e]);
let mut new_state = radius.run(state, 1).unwrap();
let flag = new_state.evaluate_string_value(&flag_val).unwrap();
println!("FLAG: {}", flag);
}
| 37.947368 | 76 | 0.675451 |
64679b3155905ba275cafad96334b8e1218089a3 | 21,068 | //! Quality of service for block producer.
//! Provides logic and functions to allow a Leader to prioritize
//! how transactions are included in blocks, and optimize those blocks.
//!
use {
crate::banking_stage::BatchedTransactionCostDetails,
crossbeam_channel::{unbounded, Receiver, Sender},
solana_measure::measure::Measure,
solana_runtime::{
bank::Bank,
cost_model::{CostModel, TransactionCost},
cost_tracker::CostTrackerError,
},
solana_sdk::{
clock::Slot,
transaction::{self, SanitizedTransaction, TransactionError},
},
std::{
sync::{
atomic::{AtomicBool, AtomicU64, Ordering},
Arc, RwLock,
},
thread::{self, Builder, JoinHandle},
time::Duration,
},
};
pub enum QosMetrics {
BlockBatchUpdate { bank: Arc<Bank> },
}
// QosService is local to each banking thread, each instance of QosService provides services to
// one banking thread.
// It hosts a private thread for async metrics reporting, tagged with banking thredas ID. Banking
// threda calls `report_metrics(&bank)` at end of `process_and_record_tramsaction()`, or any time
// it wants, QosService sends `&bank` to reporting thread via channel, signalling stats to be
// reported if new bank slot has changed.
//
pub struct QosService {
// cost_model instance is owned by validator, shared between replay_stage and
// banking_stage. replay_stage writes the latest on-chain program timings to
// it; banking_stage's qos_service reads that information to calculate
// transaction cost, hence RwLock wrapped.
cost_model: Arc<RwLock<CostModel>>,
// QosService hosts metrics object and a private reporting thread, as well as sender to
// communicate with thread.
report_sender: Sender<QosMetrics>,
metrics: Arc<QosServiceMetrics>,
// metrics reporting runs on a private thread
reporting_thread: Option<JoinHandle<()>>,
running_flag: Arc<AtomicBool>,
}
impl Drop for QosService {
fn drop(&mut self) {
self.running_flag.store(false, Ordering::Relaxed);
self.reporting_thread
.take()
.unwrap()
.join()
.expect("qos service metrics reporting thread failed to join");
}
}
impl QosService {
pub fn new(cost_model: Arc<RwLock<CostModel>>, id: u32) -> Self {
let (report_sender, report_receiver) = unbounded();
let running_flag = Arc::new(AtomicBool::new(true));
let metrics = Arc::new(QosServiceMetrics::new(id));
let running_flag_clone = running_flag.clone();
let metrics_clone = metrics.clone();
let reporting_thread = Some(
Builder::new()
.name("solana-qos-service-metrics-repoting".to_string())
.spawn(move || {
Self::reporting_loop(running_flag_clone, metrics_clone, report_receiver);
})
.unwrap(),
);
Self {
cost_model,
metrics,
reporting_thread,
running_flag,
report_sender,
}
}
// invoke cost_model to calculate cost for the given list of transactions
pub fn compute_transaction_costs<'a>(
&self,
transactions: impl Iterator<Item = &'a SanitizedTransaction>,
) -> Vec<TransactionCost> {
let mut compute_cost_time = Measure::start("compute_cost_time");
let cost_model = self.cost_model.read().unwrap();
let txs_costs: Vec<_> = transactions
.map(|tx| {
let cost = cost_model.calculate_cost(tx);
debug!(
"transaction {:?}, cost {:?}, cost sum {}",
tx,
cost,
cost.sum()
);
cost
})
.collect();
compute_cost_time.stop();
self.metrics
.compute_cost_time
.fetch_add(compute_cost_time.as_us(), Ordering::Relaxed);
self.metrics
.compute_cost_count
.fetch_add(txs_costs.len() as u64, Ordering::Relaxed);
txs_costs
}
// Given a list of transactions and their costs, this function returns a corresponding
// list of Results that indicate if a transaction is selected to be included in the current block,
pub fn select_transactions_per_cost<'a>(
&self,
transactions: impl Iterator<Item = &'a SanitizedTransaction>,
transactions_costs: impl Iterator<Item = &'a TransactionCost>,
bank: &Arc<Bank>,
) -> (Vec<transaction::Result<()>>, usize) {
let mut cost_tracking_time = Measure::start("cost_tracking_time");
let mut cost_tracker = bank.write_cost_tracker().unwrap();
let mut num_included = 0;
let select_results = transactions
.zip(transactions_costs)
.map(|(tx, cost)| match cost_tracker.try_add(tx, cost) {
Ok(current_block_cost) => {
debug!("slot {:?}, transaction {:?}, cost {:?}, fit into current block, current block cost {}", bank.slot(), tx, cost, current_block_cost);
self.metrics.selected_txs_count.fetch_add(1, Ordering::Relaxed);
num_included += 1;
Ok(())
},
Err(e) => {
debug!("slot {:?}, transaction {:?}, cost {:?}, not fit into current block, '{:?}'", bank.slot(), tx, cost, e);
match e {
CostTrackerError::WouldExceedBlockMaxLimit => {
self.metrics.retried_txs_per_block_limit_count.fetch_add(1, Ordering::Relaxed);
Err(TransactionError::WouldExceedMaxBlockCostLimit)
}
CostTrackerError::WouldExceedVoteMaxLimit => {
self.metrics.retried_txs_per_vote_limit_count.fetch_add(1, Ordering::Relaxed);
Err(TransactionError::WouldExceedMaxVoteCostLimit)
}
CostTrackerError::WouldExceedAccountMaxLimit => {
self.metrics.retried_txs_per_account_limit_count.fetch_add(1, Ordering::Relaxed);
Err(TransactionError::WouldExceedMaxAccountCostLimit)
}
CostTrackerError::WouldExceedAccountDataMaxLimit => {
self.metrics.retried_txs_per_account_data_limit_count.fetch_add(1, Ordering::Relaxed);
Err(TransactionError::WouldExceedMaxAccountDataCostLimit)
}
}
}
})
.collect();
cost_tracking_time.stop();
self.metrics
.cost_tracking_time
.fetch_add(cost_tracking_time.as_us(), Ordering::Relaxed);
(select_results, num_included)
}
// metrics are reported by bank slot
pub fn report_metrics(&self, bank: Arc<Bank>) {
self.report_sender
.send(QosMetrics::BlockBatchUpdate { bank })
.unwrap_or_else(|err| warn!("qos service report metrics failed: {:?}", err));
}
// metrics accumulating apis
pub fn accumulate_tpu_ingested_packets_count(&self, count: u64) {
self.metrics
.tpu_ingested_packets_count
.fetch_add(count, Ordering::Relaxed);
}
pub fn accumulate_tpu_buffered_packets_count(&self, count: u64) {
self.metrics
.tpu_buffered_packets_count
.fetch_add(count, Ordering::Relaxed);
}
pub fn accumulated_verified_txs_count(&self, count: u64) {
self.metrics
.verified_txs_count
.fetch_add(count, Ordering::Relaxed);
}
pub fn accumulated_processed_txs_count(&self, count: u64) {
self.metrics
.processed_txs_count
.fetch_add(count, Ordering::Relaxed);
}
pub fn accumulated_retryable_txs_count(&self, count: u64) {
self.metrics
.retryable_txs_count
.fetch_add(count, Ordering::Relaxed);
}
pub fn accumulate_estimated_transaction_costs(
&self,
cost_details: &BatchedTransactionCostDetails,
) {
self.metrics
.estimated_signature_cu
.fetch_add(cost_details.batched_signature_cost, Ordering::Relaxed);
self.metrics
.estimated_write_lock_cu
.fetch_add(cost_details.batched_write_lock_cost, Ordering::Relaxed);
self.metrics
.estimated_data_bytes_cu
.fetch_add(cost_details.batched_data_bytes_cost, Ordering::Relaxed);
self.metrics
.estimated_execute_cu
.fetch_add(cost_details.batched_execute_cost, Ordering::Relaxed);
}
pub fn accumulate_actual_execute_cu(&self, units: u64) {
self.metrics
.actual_execute_cu
.fetch_add(units, Ordering::Relaxed);
}
pub fn accumulate_actual_execute_time(&self, micro_sec: u64) {
self.metrics
.actual_execute_time_us
.fetch_add(micro_sec, Ordering::Relaxed);
}
fn reporting_loop(
running_flag: Arc<AtomicBool>,
metrics: Arc<QosServiceMetrics>,
report_receiver: Receiver<QosMetrics>,
) {
while running_flag.load(Ordering::Relaxed) {
for qos_metrics in report_receiver.try_iter() {
match qos_metrics {
QosMetrics::BlockBatchUpdate { bank } => {
metrics.report(bank.slot());
}
}
}
thread::sleep(Duration::from_millis(100));
}
}
}
#[derive(Default)]
struct QosServiceMetrics {
// banking_stage creates one QosService instance per working threads, that is uniquely
// identified by id. This field allows to categorize metrics for gossip votes, TPU votes
// and other transactions.
id: u32,
// aggregate metrics per slot
slot: AtomicU64,
// accumulated number of live packets TPU received from verified receiver for processing.
tpu_ingested_packets_count: AtomicU64,
// accumulated number of live packets TPU put into buffer due to no active bank.
tpu_buffered_packets_count: AtomicU64,
// accumulated number of verified txs, which excludes unsanitized transactions and
// non-vote transactions when in vote-only mode from ingested packets
verified_txs_count: AtomicU64,
// accumulated number of transactions been processed, includes those landed and those to be
// returned (due to AccountInUse, and other QoS related reasons)
processed_txs_count: AtomicU64,
// accumulated number of transactions buffered for retry, often due to AccountInUse and QoS
// reasons, includes retried_txs_per_block_limit_count and retried_txs_per_account_limit_count
retryable_txs_count: AtomicU64,
// accumulated time in micro-sec spent in computing transaction cost. It is the main performance
// overhead introduced by cost_model
compute_cost_time: AtomicU64,
// total nummber of transactions in the reporting period to be computed for theit cost. It is
// usually the number of sanitized transactions leader receives.
compute_cost_count: AtomicU64,
// acumulated time in micro-sec spent in tracking each bank's cost. It is the second part of
// overhead introduced
cost_tracking_time: AtomicU64,
// number of transactions to be included in blocks
selected_txs_count: AtomicU64,
// number of transactions to be queued for retry due to its potential to breach block limit
retried_txs_per_block_limit_count: AtomicU64,
// number of transactions to be queued for retry due to its potential to breach vote limit
retried_txs_per_vote_limit_count: AtomicU64,
// number of transactions to be queued for retry due to its potential to breach writable
// account limit
retried_txs_per_account_limit_count: AtomicU64,
// number of transactions to be queued for retry due to its account data limits
retried_txs_per_account_data_limit_count: AtomicU64,
// accumulated estimated signature Compute Unites to be packed into block
estimated_signature_cu: AtomicU64,
// accumulated estimated write locks Compute Units to be packed into block
estimated_write_lock_cu: AtomicU64,
// accumulated estimated instructino data Compute Units to be packed into block
estimated_data_bytes_cu: AtomicU64,
// accumulated estimated program Compute Units to be packed into block
estimated_execute_cu: AtomicU64,
// accumulated actual program Compute Units that have been packed into block
actual_execute_cu: AtomicU64,
// accumulated actual program execute micro-sec that have been packed into block
actual_execute_time_us: AtomicU64,
}
impl QosServiceMetrics {
pub fn new(id: u32) -> Self {
QosServiceMetrics {
id,
..QosServiceMetrics::default()
}
}
pub fn report(&self, bank_slot: Slot) {
if bank_slot != self.slot.load(Ordering::Relaxed) {
datapoint_info!(
"qos-service-stats",
("id", self.id as i64, i64),
("bank_slot", bank_slot as i64, i64),
(
"tpu_ingested_packets_count",
self.tpu_ingested_packets_count.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"tpu_buffered_packets_count",
self.tpu_buffered_packets_count.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"verified_txs_count",
self.verified_txs_count.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"processed_txs_count",
self.processed_txs_count.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"retryable_txs_count",
self.retryable_txs_count.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"compute_cost_time",
self.compute_cost_time.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"compute_cost_count",
self.compute_cost_count.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"cost_tracking_time",
self.cost_tracking_time.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"selected_txs_count",
self.selected_txs_count.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"retried_txs_per_block_limit_count",
self.retried_txs_per_block_limit_count
.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"retried_txs_per_vote_limit_count",
self.retried_txs_per_vote_limit_count
.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"retried_txs_per_account_limit_count",
self.retried_txs_per_account_limit_count
.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"retried_txs_per_account_data_limit_count",
self.retried_txs_per_account_data_limit_count
.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"estimated_signature_cu",
self.estimated_signature_cu.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"estimated_write_lock_cu",
self.estimated_write_lock_cu.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"estimated_data_bytes_cu",
self.estimated_data_bytes_cu.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"estimated_execute_cu",
self.estimated_execute_cu.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"actual_execute_cu",
self.actual_execute_cu.swap(0, Ordering::Relaxed) as i64,
i64
),
(
"actual_execute_time_us",
self.actual_execute_time_us.swap(0, Ordering::Relaxed) as i64,
i64
),
);
self.slot.store(bank_slot, Ordering::Relaxed);
}
}
}
#[cfg(test)]
mod tests {
use {
super::*,
itertools::Itertools,
solana_runtime::{
bank::Bank,
genesis_utils::{create_genesis_config, GenesisConfigInfo},
},
solana_sdk::{
hash::Hash,
signature::{Keypair, Signer},
system_transaction,
},
solana_vote_program::vote_transaction,
};
#[test]
fn test_compute_transaction_costs() {
solana_logger::setup();
// make a vec of txs
let keypair = Keypair::new();
let transfer_tx = SanitizedTransaction::from_transaction_for_tests(
system_transaction::transfer(&keypair, &keypair.pubkey(), 1, Hash::default()),
);
let vote_tx = SanitizedTransaction::from_transaction_for_tests(
vote_transaction::new_vote_transaction(
vec![42],
Hash::default(),
Hash::default(),
&keypair,
&keypair,
&keypair,
None,
),
);
let txs = vec![transfer_tx.clone(), vote_tx.clone(), vote_tx, transfer_tx];
let cost_model = Arc::new(RwLock::new(CostModel::default()));
let qos_service = QosService::new(cost_model.clone(), 1);
let txs_costs = qos_service.compute_transaction_costs(txs.iter());
// verify the size of txs_costs and its contents
assert_eq!(txs_costs.len(), txs.len());
txs_costs
.iter()
.enumerate()
.map(|(index, cost)| {
assert_eq!(
cost.sum(),
cost_model.read().unwrap().calculate_cost(&txs[index]).sum()
);
})
.collect_vec();
}
#[test]
fn test_select_transactions_per_cost() {
solana_logger::setup();
let GenesisConfigInfo { genesis_config, .. } = create_genesis_config(10);
let bank = Arc::new(Bank::new_for_tests(&genesis_config));
let cost_model = Arc::new(RwLock::new(CostModel::default()));
let keypair = Keypair::new();
let transfer_tx = SanitizedTransaction::from_transaction_for_tests(
system_transaction::transfer(&keypair, &keypair.pubkey(), 1, Hash::default()),
);
let vote_tx = SanitizedTransaction::from_transaction_for_tests(
vote_transaction::new_vote_transaction(
vec![42],
Hash::default(),
Hash::default(),
&keypair,
&keypair,
&keypair,
None,
),
);
let transfer_tx_cost = cost_model
.read()
.unwrap()
.calculate_cost(&transfer_tx)
.sum();
let vote_tx_cost = cost_model.read().unwrap().calculate_cost(&vote_tx).sum();
// make a vec of txs
let txs = vec![transfer_tx.clone(), vote_tx.clone(), transfer_tx, vote_tx];
let qos_service = QosService::new(cost_model, 1);
let txs_costs = qos_service.compute_transaction_costs(txs.iter());
// set cost tracker limit to fit 1 transfer tx and 1 vote tx
let cost_limit = transfer_tx_cost + vote_tx_cost;
bank.write_cost_tracker()
.unwrap()
.set_limits(cost_limit, cost_limit, cost_limit);
let (results, num_selected) =
qos_service.select_transactions_per_cost(txs.iter(), txs_costs.iter(), &bank);
assert_eq!(num_selected, 2);
// verify that first transfer tx and first vote are allowed
assert_eq!(results.len(), txs.len());
assert!(results[0].is_ok());
assert!(results[1].is_ok());
assert!(results[2].is_err());
assert!(results[3].is_err());
}
}
| 37.68873 | 159 | 0.574568 |
e5b8218eb1c6988898daeda78f9077fd19dd1e18 | 8,661 | // Generated from definition io.k8s.api.flowcontrol.v1beta2.LimitedPriorityLevelConfiguration
/// LimitedPriorityLevelConfiguration specifies how to handle requests that are subject to limits. It addresses two issues:
/// * How are requests for this priority level limited?
/// * What should be done with requests that exceed the limit?
#[derive(Clone, Debug, Default, PartialEq)]
pub struct LimitedPriorityLevelConfiguration {
/// `assuredConcurrencyShares` (ACS) configures the execution limit, which is a limit on the number of requests of this priority level that may be exeucting at a given time. ACS must be a positive number. The server's concurrency limit (SCL) is divided among the concurrency-controlled priority levels in proportion to their assured concurrency shares. This produces the assured concurrency value (ACV) --- the number of requests that may be executing at a time --- for each such priority level:
///
/// ACV(l) = ceil( SCL * ACS(l) / ( sum\[priority levels k\] ACS(k) ) )
///
/// bigger numbers of ACS mean more reserved concurrent requests (at the expense of every other PL). This field has a default value of 30.
pub assured_concurrency_shares: Option<i32>,
/// `limitResponse` indicates what to do with requests that can not be executed right now
pub limit_response: Option<crate::api::flowcontrol::v1beta2::LimitResponse>,
}
impl<'de> crate::serde::Deserialize<'de> for LimitedPriorityLevelConfiguration {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_assured_concurrency_shares,
Key_limit_response,
Other,
}
impl<'de> crate::serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: crate::serde::de::Error {
Ok(match v {
"assuredConcurrencyShares" => Field::Key_assured_concurrency_shares,
"limitResponse" => Field::Key_limit_response,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = LimitedPriorityLevelConfiguration;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("LimitedPriorityLevelConfiguration")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: crate::serde::de::MapAccess<'de> {
let mut value_assured_concurrency_shares: Option<i32> = None;
let mut value_limit_response: Option<crate::api::flowcontrol::v1beta2::LimitResponse> = None;
while let Some(key) = crate::serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_assured_concurrency_shares => value_assured_concurrency_shares = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_limit_response => value_limit_response = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: crate::serde::de::IgnoredAny = crate::serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(LimitedPriorityLevelConfiguration {
assured_concurrency_shares: value_assured_concurrency_shares,
limit_response: value_limit_response,
})
}
}
deserializer.deserialize_struct(
"LimitedPriorityLevelConfiguration",
&[
"assuredConcurrencyShares",
"limitResponse",
],
Visitor,
)
}
}
impl crate::serde::Serialize for LimitedPriorityLevelConfiguration {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: crate::serde::Serializer {
let mut state = serializer.serialize_struct(
"LimitedPriorityLevelConfiguration",
self.assured_concurrency_shares.as_ref().map_or(0, |_| 1) +
self.limit_response.as_ref().map_or(0, |_| 1),
)?;
if let Some(value) = &self.assured_concurrency_shares {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "assuredConcurrencyShares", value)?;
}
if let Some(value) = &self.limit_response {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "limitResponse", value)?;
}
crate::serde::ser::SerializeStruct::end(state)
}
}
#[cfg(feature = "schemars")]
impl crate::schemars::JsonSchema for LimitedPriorityLevelConfiguration {
fn schema_name() -> String {
"io.k8s.api.flowcontrol.v1beta2.LimitedPriorityLevelConfiguration".to_owned()
}
fn json_schema(__gen: &mut crate::schemars::gen::SchemaGenerator) -> crate::schemars::schema::Schema {
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("LimitedPriorityLevelConfiguration specifies how to handle requests that are subject to limits. It addresses two issues:\n * How are requests for this priority level limited?\n * What should be done with requests that exceed the limit?".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::Object))),
object: Some(Box::new(crate::schemars::schema::ObjectValidation {
properties: [
(
"assuredConcurrencyShares".to_owned(),
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("`assuredConcurrencyShares` (ACS) configures the execution limit, which is a limit on the number of requests of this priority level that may be exeucting at a given time. ACS must be a positive number. The server's concurrency limit (SCL) is divided among the concurrency-controlled priority levels in proportion to their assured concurrency shares. This produces the assured concurrency value (ACV) --- the number of requests that may be executing at a time --- for each such priority level:\n\n ACV(l) = ceil( SCL * ACS(l) / ( sum[priority levels k] ACS(k) ) )\n\nbigger numbers of ACS mean more reserved concurrent requests (at the expense of every other PL). This field has a default value of 30.".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::Integer))),
format: Some("int32".to_owned()),
..Default::default()
}),
),
(
"limitResponse".to_owned(),
{
let mut schema_obj = __gen.subschema_for::<crate::api::flowcontrol::v1beta2::LimitResponse>().into_object();
schema_obj.metadata = Some(Box::new(crate::schemars::schema::Metadata {
description: Some("`limitResponse` indicates what to do with requests that can not be executed right now".to_owned()),
..Default::default()
}));
crate::schemars::schema::Schema::Object(schema_obj)
},
),
].into(),
..Default::default()
})),
..Default::default()
})
}
}
| 56.607843 | 776 | 0.593349 |
2f07b207271d7f320701ce4afb14d996058cf0c2 | 1,363 | use super::Button;
/// A mouse event.
///
/// _**Note:** This type is largely incomplete! If you need to track
/// additional events, feel free to [open an issue] and share your use case!_
///
/// [open an issue]: https://github.com/hecrj/iced/issues
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Event {
/// The mouse cursor entered the window.
CursorEntered,
/// The mouse cursor left the window.
CursorLeft,
/// The mouse cursor was moved
CursorMoved {
/// The X coordinate of the mouse position
x: f32,
/// The Y coordinate of the mouse position
y: f32,
},
/// A mouse button was pressed.
ButtonPressed(Button),
/// A mouse button was released.
ButtonReleased(Button),
/// The mouse wheel was scrolled.
WheelScrolled {
/// The scroll movement.
delta: ScrollDelta,
},
}
/// A scroll movement.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ScrollDelta {
/// A line-based scroll movement
Lines {
/// The number of horizontal lines scrolled
x: f32,
/// The number of vertical lines scrolled
y: f32,
},
/// A pixel-based scroll movement
Pixels {
/// The number of horizontal pixels scrolled
x: f32,
/// The number of vertical pixels scrolled
y: f32,
},
}
| 23.5 | 77 | 0.603081 |
d7f34a39c3789cb232e649d4095940822de8bd4b | 5,179 | use crate::GameState;
use bevy::prelude::*;
pub struct ActionsPlugin;
// This plugin listens for keyboard input and converts the input into Actions
// Actions can then be used as a resource in other systems to act on the player input.
impl Plugin for ActionsPlugin {
fn build(&self, app: &mut App) {
app.init_resource::<Actions>().add_system_set(
SystemSet::on_update(GameState::Playing).with_system(set_movement_actions),
);
}
}
#[derive(Default)]
pub struct Actions {
pub player_movement: Option<Vec2>,
}
fn set_movement_actions(mut actions: ResMut<Actions>, keyboard_input: Res<Input<KeyCode>>) {
if GameControl::Up.just_released(&keyboard_input)
|| GameControl::Up.pressed(&keyboard_input)
|| GameControl::Left.just_released(&keyboard_input)
|| GameControl::Left.pressed(&keyboard_input)
|| GameControl::Down.just_released(&keyboard_input)
|| GameControl::Down.pressed(&keyboard_input)
|| GameControl::Right.just_released(&keyboard_input)
|| GameControl::Right.pressed(&keyboard_input)
{
let mut player_movement = Vec2::ZERO;
if GameControl::Up.just_released(&keyboard_input)
|| GameControl::Down.just_released(&keyboard_input)
{
if GameControl::Up.pressed(&keyboard_input) {
player_movement.y = 1.;
} else if GameControl::Down.pressed(&keyboard_input) {
player_movement.y = -1.;
} else {
player_movement.y = 0.;
}
} else if GameControl::Up.just_pressed(&keyboard_input) {
player_movement.y = 1.;
} else if GameControl::Down.just_pressed(&keyboard_input) {
player_movement.y = -1.;
} else {
player_movement.y = actions.player_movement.unwrap_or(Vec2::ZERO).y;
}
if GameControl::Right.just_released(&keyboard_input)
|| GameControl::Left.just_released(&keyboard_input)
{
if GameControl::Right.pressed(&keyboard_input) {
player_movement.x = 1.;
} else if GameControl::Left.pressed(&keyboard_input) {
player_movement.x = -1.;
} else {
player_movement.x = 0.;
}
} else if GameControl::Right.just_pressed(&keyboard_input) {
player_movement.x = 1.;
} else if GameControl::Left.just_pressed(&keyboard_input) {
player_movement.x = -1.;
} else {
player_movement.x = actions.player_movement.unwrap_or(Vec2::ZERO).x;
}
if player_movement != Vec2::ZERO {
player_movement = player_movement.normalize();
actions.player_movement = Some(player_movement);
}
} else {
actions.player_movement = None;
}
}
enum GameControl {
Up,
Down,
Left,
Right,
}
impl GameControl {
fn just_released(&self, keyboard_input: &Res<Input<KeyCode>>) -> bool {
match self {
GameControl::Up => {
keyboard_input.just_released(KeyCode::W)
|| keyboard_input.just_released(KeyCode::Up)
}
GameControl::Down => {
keyboard_input.just_released(KeyCode::S)
|| keyboard_input.just_released(KeyCode::Down)
}
GameControl::Left => {
keyboard_input.just_released(KeyCode::A)
|| keyboard_input.just_released(KeyCode::Left)
}
GameControl::Right => {
keyboard_input.just_released(KeyCode::D)
|| keyboard_input.just_released(KeyCode::Right)
}
}
}
fn pressed(&self, keyboard_input: &Res<Input<KeyCode>>) -> bool {
match self {
GameControl::Up => {
keyboard_input.pressed(KeyCode::W) || keyboard_input.pressed(KeyCode::Up)
}
GameControl::Down => {
keyboard_input.pressed(KeyCode::S) || keyboard_input.pressed(KeyCode::Down)
}
GameControl::Left => {
keyboard_input.pressed(KeyCode::A) || keyboard_input.pressed(KeyCode::Left)
}
GameControl::Right => {
keyboard_input.pressed(KeyCode::D) || keyboard_input.pressed(KeyCode::Right)
}
}
}
fn just_pressed(&self, keyboard_input: &Res<Input<KeyCode>>) -> bool {
match self {
GameControl::Up => {
keyboard_input.just_pressed(KeyCode::W) || keyboard_input.just_pressed(KeyCode::Up)
}
GameControl::Down => {
keyboard_input.just_pressed(KeyCode::S)
|| keyboard_input.just_pressed(KeyCode::Down)
}
GameControl::Left => {
keyboard_input.just_pressed(KeyCode::A)
|| keyboard_input.just_pressed(KeyCode::Left)
}
GameControl::Right => {
keyboard_input.just_pressed(KeyCode::D)
|| keyboard_input.just_pressed(KeyCode::Right)
}
}
}
}
| 35.965278 | 99 | 0.572891 |
fbd148a174e92a68f21afbca78c8dc7249f0f30e | 415 | extern crate clap;
extern crate libc;
extern crate ncurses;
extern crate unicode_width;
mod app;
mod commander;
mod config;
mod coordinator;
mod item;
mod item_list;
mod key;
mod libc_aux;
mod line;
mod line_storage;
mod match_info_cache;
mod pattern;
mod query;
mod reader;
mod screen;
mod screen_data;
mod search;
mod searcher;
mod state;
mod thread_util;
mod window;
fn main() {
app::App::new().start();
}
| 13.387097 | 28 | 0.749398 |
26e84dbbb473d736909f2e17b619b3565c37f856 | 4,716 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::DMACR {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R { bits: self.register.get() }
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct TXDMAER {
bits: bool,
}
impl TXDMAER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct RXDMAER {
bits: bool,
}
impl RXDMAER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _TXDMAEW<'a> {
w: &'a mut W,
}
impl<'a> _TXDMAEW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _RXDMAEW<'a> {
w: &'a mut W,
}
impl<'a> _RXDMAEW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 1 - Transmit DMA enable. If this bit is set to 1, DMA for the transmit FIFO is enabled."]
#[inline]
pub fn txdmae(&self) -> TXDMAER {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
};
TXDMAER { bits }
}
#[doc = "Bit 0 - Receive DMA enable. If this bit is set to 1, DMA for the receive FIFO is enabled."]
#[inline]
pub fn rxdmae(&self) -> RXDMAER {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
RXDMAER { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 1 - Transmit DMA enable. If this bit is set to 1, DMA for the transmit FIFO is enabled."]
#[inline]
pub fn txdmae(&mut self) -> _TXDMAEW {
_TXDMAEW { w: self }
}
#[doc = "Bit 0 - Receive DMA enable. If this bit is set to 1, DMA for the receive FIFO is enabled."]
#[inline]
pub fn rxdmae(&mut self) -> _RXDMAEW {
_RXDMAEW { w: self }
}
}
| 26.055249 | 106 | 0.50933 |
1e1ff5b991033df66adf3f16f03ca9c747fbc86f | 2,600 | use ron::value::{Map, Number, Value};
use serde::Serialize;
#[test]
fn bool() {
assert_eq!("true".parse(), Ok(Value::Bool(true)));
assert_eq!("false".parse(), Ok(Value::Bool(false)));
}
#[test]
fn char() {
assert_eq!("'a'".parse(), Ok(Value::Char('a')));
}
#[test]
fn map() {
let mut map = Map::new();
map.insert(Value::Char('a'), Value::Number(Number::new(1)));
map.insert(Value::Char('b'), Value::Number(Number::new(2f64)));
assert_eq!("{ 'a': 1, 'b': 2.0 }".parse(), Ok(Value::Map(map)));
}
#[test]
fn number() {
assert_eq!("42".parse(), Ok(Value::Number(Number::new(42))));
assert_eq!("3.1415".parse(), Ok(Value::Number(Number::new(3.1415f64))));
}
#[test]
fn option() {
let opt = Some(Box::new(Value::Char('c')));
assert_eq!("Some('c')".parse(), Ok(Value::Option(opt)));
}
#[test]
fn string() {
let normal = "\"String\"";
assert_eq!(normal.parse(), Ok(Value::String("String".into())));
let raw = "r\"Raw String\"";
assert_eq!(raw.parse(), Ok(Value::String("Raw String".into())));
let raw_hashes = "r#\"Raw String\"#";
assert_eq!(raw_hashes.parse(), Ok(Value::String("Raw String".into())));
let raw_escaped = "r##\"Contains \"#\"##";
assert_eq!(
raw_escaped.parse(),
Ok(Value::String("Contains \"#".into()))
);
let raw_multi_line = "r\"Multi\nLine\"";
assert_eq!(
raw_multi_line.parse(),
Ok(Value::String("Multi\nLine".into()))
);
}
#[test]
fn seq() {
let seq = vec![
Value::Number(Number::new(1)),
Value::Number(Number::new(2f64)),
];
assert_eq!("[1, 2.0]".parse(), Ok(Value::Seq(seq)));
}
#[test]
fn unit() {
use ron::error::{Error, ErrorCode, Position};
assert_eq!("()".parse(), Ok(Value::Unit));
assert_eq!("Foo".parse(), Ok(Value::Unit));
assert_eq!(
"".parse::<Value>(),
Err(Error {
code: ErrorCode::Eof,
position: Position { col: 1, line: 1 }
})
);
}
#[derive(Serialize)]
struct Scene(Option<(u32, u32)>);
#[derive(Serialize)]
struct Scene2 {
foo: Option<(u32, u32)>,
}
#[test]
fn roundtrip() {
use ron::{de::from_str, ser::to_string};
{
let s = to_string(&Scene2 {
foo: Some((122, 13)),
})
.unwrap();
println!("{}", s);
let scene: Value = from_str(&s).unwrap();
println!("{:?}", scene);
}
{
let s = to_string(&Scene(Some((13, 122)))).unwrap();
println!("{}", s);
let scene: Value = from_str(&s).unwrap();
println!("{:?}", scene);
}
}
| 23.214286 | 76 | 0.528462 |
11830eb5ba0348b3d32e91963ea1c9646034e416 | 5,444 | // Copyright 2019-2021 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
extern crate minisign;
use base64::{decode, encode};
use minisign::{sign, KeyPair as KP, SecretKeyBox};
use std::{
env::var_os,
fs::{self, File, OpenOptions},
io::{BufReader, Write},
path::{Path, PathBuf},
str,
time::{SystemTime, UNIX_EPOCH},
};
use tauri_bundler::bundle::common::create_file;
/// A key pair (`PublicKey` and `SecretKey`).
#[derive(Clone, Debug)]
pub struct KeyPair {
pub pk: String,
pub sk: String,
}
/// Generate base64 encoded keypair
pub fn generate_key(password: Option<String>) -> crate::Result<KeyPair> {
let KP { pk, sk } = KP::generate_encrypted_keypair(password).unwrap();
let pk_box_str = pk.to_box().unwrap().to_string();
let sk_box_str = sk.to_box(None).unwrap().to_string();
let encoded_pk = encode(&pk_box_str);
let encoded_sk = encode(&sk_box_str);
Ok(KeyPair {
pk: encoded_pk,
sk: encoded_sk,
})
}
/// Transform a base64 String to readable string for the main signer
pub fn decode_key(base64_key: String) -> crate::Result<String> {
let decoded_str = &decode(&base64_key)?[..];
Ok(String::from(str::from_utf8(decoded_str)?))
}
/// Save KeyPair to disk
pub fn save_keypair<P>(
force: bool,
sk_path: P,
key: &str,
pubkey: &str,
) -> crate::Result<(PathBuf, PathBuf)>
where
P: AsRef<Path>,
{
let sk_path = sk_path.as_ref();
let pubkey_path = format!("{}.pub", sk_path.display());
let pk_path = Path::new(&pubkey_path);
if sk_path.exists() {
if !force {
return Err(anyhow::anyhow!(
"Key generation aborted:\n{} already exists\nIf you really want to overwrite the existing key pair, add the --force switch to force this operation.",
sk_path.display()
));
} else {
std::fs::remove_file(&sk_path)?;
}
}
if pk_path.exists() {
std::fs::remove_file(&pk_path)?;
}
let mut sk_writer = create_file(&sk_path)?;
write!(sk_writer, "{:}", key)?;
sk_writer.flush()?;
let mut pk_writer = create_file(&pk_path)?;
write!(pk_writer, "{:}", pubkey)?;
pk_writer.flush()?;
Ok((fs::canonicalize(&sk_path)?, fs::canonicalize(&pk_path)?))
}
/// Read key from file
pub fn read_key_from_file<P>(sk_path: P) -> crate::Result<String>
where
P: AsRef<Path>,
{
Ok(fs::read_to_string(sk_path)?)
}
/// Sign files
pub fn sign_file<P>(
private_key: String,
password: String,
bin_path: P,
prehashed: bool,
) -> crate::Result<(PathBuf, String)>
where
P: AsRef<Path>,
{
let decoded_secret = decode_key(private_key)?;
let sk_box = SecretKeyBox::from_string(&decoded_secret).unwrap();
let sk = sk_box.into_secret_key(Some(password)).unwrap();
// We need to append .sig at the end it's where the signature will be stored
let signature_path_string = format!("{}.sig", bin_path.as_ref().display());
let signature_path = Path::new(&signature_path_string);
let mut signature_box_writer = create_file(&signature_path)?;
let trusted_comment = format!(
"timestamp:{}\tfile:{}",
unix_timestamp(),
bin_path.as_ref().display()
);
let (data_reader, should_be_prehashed) = open_data_file(bin_path)?;
let signature_box = sign(
None,
&sk,
data_reader,
prehashed | should_be_prehashed,
Some(trusted_comment.as_str()),
Some("signature from tauri secret key"),
)?;
let encoded_signature = encode(&signature_box.to_string());
signature_box_writer.write_all(&encoded_signature.as_bytes())?;
signature_box_writer.flush()?;
Ok((fs::canonicalize(&signature_path)?, encoded_signature))
}
/// Sign files using the TAURI_KEY_PASSWORD and TAURI_PRIVATE_KEY environment variables
pub fn sign_file_from_env_variables<P>(path_to_sign: P) -> crate::Result<(PathBuf, String)>
where
P: AsRef<Path>,
{
// if no password provided we set empty string
let password_string = match var_os("TAURI_KEY_PASSWORD") {
Some(value) => String::from(value.to_str().unwrap()),
None => "".into(),
};
// get the private key
if let Some(private_key) = var_os("TAURI_PRIVATE_KEY") {
// check if this file exist..
let mut private_key_string = String::from(private_key.to_str().unwrap());
let pk_dir = Path::new(&private_key_string);
// Check if user provided a path or a key
// We validate if the path exist or no.
if pk_dir.exists() {
// read file content as use it as private key
private_key_string = read_key_from_file(pk_dir)?;
}
// sign our file
return sign_file(private_key_string, password_string, path_to_sign, false);
}
// reject if we don't have the private key
Err(anyhow::anyhow!("A public key has been found, but no private key. Make sure to set `TAURI_PRIVATE_KEY` environment variable."))
}
fn unix_timestamp() -> u64 {
let start = SystemTime::now();
let since_the_epoch = start
.duration_since(UNIX_EPOCH)
.expect("system clock is incorrect");
since_the_epoch.as_secs()
}
fn open_data_file<P>(data_path: P) -> crate::Result<(BufReader<File>, bool)>
where
P: AsRef<Path>,
{
let data_path = data_path.as_ref();
let file = OpenOptions::new()
.read(true)
.open(data_path)
.map_err(|e| minisign::PError::new(minisign::ErrorKind::Io, e))?;
let should_be_hashed = match file.metadata() {
Ok(metadata) => metadata.len() > (1u64 << 30),
Err(_) => true,
};
Ok((BufReader::new(file), should_be_hashed))
}
| 28.502618 | 157 | 0.680566 |
1e6c5a31a3e753cf08548e597767c5ee42f5d555 | 4,817 | #![allow(non_snake_case, non_upper_case_globals)]
#![allow(non_camel_case_types)]
//! SysTick timer
//!
//! Used by: stm32g431, stm32g441, stm32g471, stm32g473, stm32g474, stm32g483, stm32g484, stm32g491, stm32g4a1
use crate::RWRegister;
#[cfg(not(feature = "nosync"))]
use core::marker::PhantomData;
/// SysTick control and status register
pub mod CTRL {
/// Counter enable
pub mod ENABLE {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (1 bit: 1 << 0)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// SysTick exception request enable
pub mod TICKINT {
/// Offset (1 bits)
pub const offset: u32 = 1;
/// Mask (1 bit: 1 << 1)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Clock source selection
pub mod CLKSOURCE {
/// Offset (2 bits)
pub const offset: u32 = 2;
/// Mask (1 bit: 1 << 2)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// COUNTFLAG
pub mod COUNTFLAG {
/// Offset (16 bits)
pub const offset: u32 = 16;
/// Mask (1 bit: 1 << 16)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
/// SysTick reload value register
pub mod LOAD {
/// RELOAD value
pub mod RELOAD {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (24 bits: 0xffffff << 0)
pub const mask: u32 = 0xffffff << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
/// SysTick current value register
pub mod VAL {
/// Current counter value
pub mod CURRENT {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (24 bits: 0xffffff << 0)
pub const mask: u32 = 0xffffff << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
/// SysTick calibration value register
pub mod CALIB {
/// Calibration value
pub mod TENMS {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (24 bits: 0xffffff << 0)
pub const mask: u32 = 0xffffff << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// SKEW flag: Indicates whether the TENMS value is exact
pub mod SKEW {
/// Offset (30 bits)
pub const offset: u32 = 30;
/// Mask (1 bit: 1 << 30)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// NOREF flag. Reads as zero
pub mod NOREF {
/// Offset (31 bits)
pub const offset: u32 = 31;
/// Mask (1 bit: 1 << 31)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
#[repr(C)]
pub struct RegisterBlock {
/// SysTick control and status register
pub CTRL: RWRegister<u32>,
/// SysTick reload value register
pub LOAD: RWRegister<u32>,
/// SysTick current value register
pub VAL: RWRegister<u32>,
/// SysTick calibration value register
pub CALIB: RWRegister<u32>,
}
pub struct ResetValues {
pub CTRL: u32,
pub LOAD: u32,
pub VAL: u32,
pub CALIB: u32,
}
#[cfg(not(feature = "nosync"))]
pub struct Instance {
pub(crate) addr: u32,
pub(crate) _marker: PhantomData<*const RegisterBlock>,
}
#[cfg(not(feature = "nosync"))]
impl ::core::ops::Deref for Instance {
type Target = RegisterBlock;
#[inline(always)]
fn deref(&self) -> &RegisterBlock {
unsafe { &*(self.addr as *const _) }
}
}
#[cfg(feature = "rtic")]
unsafe impl Send for Instance {}
| 25.759358 | 110 | 0.52979 |
39d280da6442919937bfe5a51852ec9b9cad7288 | 1,210 | //! # Web log analytics
//!
//! This example
//!
//! The download, parsing and analysis is farmed out to a thread pool
//! leveraging Amadeus, the distributed data processing library for Rust.
//!
//! ## Usage
//!
//! ```bash
//! cargo run --example cloudfront_logs --release
//! ```
use amadeus::prelude::*;
#[allow(unreachable_code)]
#[tokio::main]
async fn main() {
let pool = &ThreadPool::new(None, None).unwrap();
let rows = Cloudfront::new_with(
AwsRegion::UsEast1,
"us-east-1.data-analytics",
"cflogworkshop/raw/cf-accesslogs/",
AwsCredentials::Anonymous,
)
.await
.unwrap();
let (sample, histogram) = rows
.par_stream()
.map(Result::unwrap)
.fork(
pool,
Identity.sample_unstable(10),
Identity
.map(|row: &CloudfrontRow| (row.time.truncate_minutes(60), ()))
.group_by(Identity.count()),
)
.await;
let mut histogram = histogram.into_iter().collect::<Vec<_>>();
histogram.sort();
assert_eq!(histogram.iter().map(|(_, c)| c).sum::<usize>(), 207_928);
println!("sample: {:#?}", sample);
println!(
"histogram:\n {}",
histogram
.into_iter()
.map(|(time, count)| format!("{}: {}", time, count))
.collect::<Vec<_>>()
.join("\n ")
);
}
| 21.22807 | 73 | 0.62314 |
9b2dfd18385737e1725f77da4b0878d18372fa60 | 2,028 | use crate::requests::session::SessionID;
use crate::responses;
use crate::responses::error::Error::UnauthorizedError;
use async_redis_session::RedisSessionStore;
use async_session::SessionStore;
use async_trait::async_trait;
use axum::extract::{Extension, FromRequest, RequestParts};
use isucondition_core::models::user::UserID;
pub enum CurrentUserID {
Some(UserID),
None,
}
impl CurrentUserID {
pub fn is_none(&self) -> bool {
match self {
CurrentUserID::Some(_) => false,
CurrentUserID::None => true,
}
}
pub fn try_unwrap(&self) -> Result<UserID, responses::error::Error> {
match self {
CurrentUserID::Some(user_id) => Ok(user_id.clone()),
CurrentUserID::None => Err(UnauthorizedError()),
}
}
}
pub const SESSION_USER_ID: &str = "jia_user_id";
#[async_trait]
impl<B> FromRequest<B> for CurrentUserID
where
B: Send,
{
type Rejection = ();
async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> {
let Extension(store) = Extension::<RedisSessionStore>::from_request(req)
.await
.expect("session store not found");
let session_id = SessionID::from_request(req).await?;
if session_id.is_none() {
return Ok(Self::None);
}
let session_id = session_id.unwrap();
let session = store.load_session(session_id).await;
return match session {
Err(_) => {
// TODO: it should be Err
Ok(Self::None)
}
Ok(session) => {
if session.is_none() {
return Ok(Self::None);
}
let current_user_id = session.unwrap().get::<String>(SESSION_USER_ID);
match current_user_id {
None => Ok(CurrentUserID::None),
Some(user_id) => Ok(Self::Some(UserID::new(user_id.clone()))),
}
}
};
}
}
| 29.823529 | 87 | 0.570513 |
011c889ce6a341a5fdfb516f32355db902164115 | 910 | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use exonum_build::{ProtoSources, ProtobufGenerator};
fn main() {
ProtobufGenerator::with_mod_name("protobuf_mod.rs")
.with_input_dir("src/proto")
.with_includes(&[
"src/proto".into(),
ProtoSources::Exonum,
ProtoSources::Crypto,
])
.generate();
}
| 33.703704 | 75 | 0.689011 |
de3499814025c9b086e25742d3b7f9ec809d43b9 | 1,061 | use joker::token::{StringLiteral, NumberLiteral, NumberSource};
pub trait IntoStringLiteral {
fn into_string_literal(self) -> StringLiteral;
}
impl IntoStringLiteral for String {
fn into_string_literal(self) -> StringLiteral {
StringLiteral {
source: None,
value: self
}
}
}
pub trait IntoNumberLiteral {
fn into_number_literal(self) -> NumberLiteral;
}
impl IntoNumberLiteral for i64 {
fn into_number_literal(self) -> NumberLiteral {
NumberLiteral {
source: Some(NumberSource::DecimalInt(self.to_string(), None)),
value: self as f64
}
}
}
impl IntoNumberLiteral for u64 {
fn into_number_literal(self) -> NumberLiteral {
NumberLiteral {
source: Some(NumberSource::DecimalInt(self.to_string(), None)),
value: self as f64
}
}
}
impl IntoNumberLiteral for f64 {
fn into_number_literal(self) -> NumberLiteral {
NumberLiteral {
source: None,
value: self
}
}
}
| 23.065217 | 75 | 0.618285 |
acfa41d86dc245e12018eb4998c7a2e068c1eef4 | 364 | // Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2
pub use self::gen_client::Client as MinerClient;
use jsonrpc_core::Result;
use jsonrpc_derive::rpc;
#[rpc]
pub trait MinerApi {
/// submit mining seal
#[rpc(name = "mining.submit")]
fn submit(&self, minting_blob: String, nonce: u32, extra: String) -> Result<()>;
}
| 26 | 84 | 0.697802 |
641c68528c6dea53c93fc56c1890e569eccc19fb | 166 |
#[inline]
pub fn add(a: i32, b: i32) -> i32 {
let c: i32;
unsafe {
llvm_asm!("add $0, $1, $2" : "=r"(c): "r"(a), "r"(b) :: "volatile")
}
c
}
| 16.6 | 75 | 0.403614 |
56930a922dd4bf2b212c8489bcfebcfd5e200d70 | 1,517 | // Copyright 2020-2021, The Tremor Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::prelude::*;
#[derive(Clone)]
pub struct Null {}
impl Codec for Null {
fn name(&self) -> &str {
"null"
}
fn decode<'input>(
&mut self,
_data: &'input mut [u8],
_ingest_ns: u64,
) -> Result<Option<Value<'input>>> {
Ok(Some(Value::null()))
}
fn encode(&self, _data: &Value) -> Result<Vec<u8>> {
Ok(vec![])
}
fn boxed_clone(&self) -> Box<dyn Codec> {
Box::new(self.clone())
}
}
#[cfg(test)]
mod test {
use super::*;
use simd_json::OwnedValue;
#[test]
fn test_null_codec() -> Result<()> {
let seed: OwnedValue = OwnedValue::null();
let seed: Value = seed.into();
let mut codec = Null {};
let mut as_raw = codec.encode(&seed)?;
let as_json = codec.decode(as_raw.as_mut_slice(), 0);
assert!(as_json.is_ok());
as_json?;
Ok(())
}
}
| 25.283333 | 75 | 0.603164 |
3979bd159457a627d099bb86d5588e1cfdd34e18 | 721 | /* automatically generated by rust-bindgen */
pub type __uint8_t = crate::libc::c_uchar;
pub type __uint32_t = crate::libc::c_uint;
pub type __uint64_t = crate::libc::c_ulong;
pub type FStar_UInt128_t = [u64; 2];
extern "C" {
pub fn Hacl_Policies_declassify_u8(x: u8) -> u8;
}
extern "C" {
pub fn Hacl_Policies_declassify_u32(x: u32) -> u32;
}
extern "C" {
pub fn Hacl_Policies_declassify_u64(x: u64) -> u64;
}
extern "C" {
pub fn Hacl_Policies_declassify_u128(x: FStar_UInt128_t) -> FStar_UInt128_t;
}
extern "C" {
pub fn Hacl_Policies_cmp_bytes_(b1: *mut u8, b2: *mut u8, len: u32, tmp: *mut u8) -> u8;
}
extern "C" {
pub fn Hacl_Policies_cmp_bytes(b1: *mut u8, b2: *mut u8, len: u32) -> u8;
}
| 28.84 | 92 | 0.680999 |
db789a092f6069bc385b2258ec1a976ac4d9213c | 3,546 | // This file is auto generated by `cg` <https://github.com/teloxide/cg> (be02d84).
// **DO NOT EDIT THIS FILE**,
// edit `cg` instead.
use serde::Serialize;
use crate::types::{ChatId, InputFile, Message, ParseMode, ReplyMarkup};
impl_payload! {
/// Use this method to send video files, Telegram clients support mp4 videos (other formats may be sent as [`Document`]). On success, the sent [`Message`] is returned. Bots can currently send video files of up to 50 MB in size, this limit may be changed in the future.
///
/// [`Document`]: crate::types::Document
/// [`Message`]: crate::types::Message
#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize)]
pub SendVideo (SendVideoSetters) => Message {
required {
/// Unique identifier for the target chat or username of the target channel (in the format `@channelusername`)
pub chat_id: ChatId [into],
/// Video to send. Pass a file_id as String to send a video that exists on the Telegram servers (recommended), pass an HTTP URL as a String for Telegram to get a video from the Internet, or upload a new video using multipart/form-data. [More info on Sending Files »]
///
/// [More info on Sending Files »]: crate::types::InputFile
pub video: InputFile,
}
optional {
/// Duration of the video in seconds
pub duration: u32,
/// Video width
pub width: u32,
/// Video height
pub height: u32,
/// Thumbnail of the file sent; can be ignored if thumbnail generation for the file is supported server-side. The thumbnail should be in JPEG format and less than 200 kB in size. A thumbnail's width and height should not exceed 320. Ignored if the file is not uploaded using multipart/form-data. Thumbnails can't be reused and can be only uploaded as a new file, so you can pass “attach://<file_attach_name>” if the thumbnail was uploaded using multipart/form-data under <file_attach_name>. [More info on Sending Files »]
///
/// [More info on Sending Files »]: crate::types::InputFile
pub thumb: InputFile,
/// Video caption (may also be used when resending videos by _file\_id_), 0-1024 characters after entities parsing
pub caption: String [into],
/// Mode for parsing entities in the video caption. See [formatting options] for more details.
///
/// [formatting options]: https://core.telegram.org/bots/api#formatting-options
pub parse_mode: ParseMode,
/// Pass _True_, if the uploaded video is suitable for streaming
pub supports_streaming: bool,
/// Sends the message [silently]. Users will receive a notification with no sound.
///
/// [silently]: https://telegram.org/blog/channels-2-0#silent-messages
pub disable_notification: bool,
/// If the message is a reply, ID of the original message
pub reply_to_message_id: i32,
/// Additional interface options. A JSON-serialized object for an [inline keyboard], [custom reply keyboard], instructions to remove reply keyboard or to force a reply from the user.
///
/// [inline keyboard]: https://core.telegram.org/bots#inline-keyboards-and-on-the-fly-updating
/// [custom reply keyboard]: https://core.telegram.org/bots#keyboards
pub reply_markup: ReplyMarkup [into],
}
}
}
| 63.321429 | 533 | 0.644106 |
11c97f665aa3af1576c65d2802802e4a310db9e5 | 13,117 | use serde::de;
use serde_derive::Deserialize;
use std::collections::HashMap;
use std::fmt;
mod common;
use crate::common::{
deserializes_to, deserializes_to_nan_f32, deserializes_to_nan_f64, deserializes_with_error,
};
#[test]
fn deserializes_bool() {
deserializes_to("true", true);
deserializes_to("false", false);
}
#[test]
fn deserializes_i8() {
let x: i8 = 42;
deserializes_to("0x2A", x);
deserializes_to("0x2a", x);
deserializes_to("0X2A", x);
deserializes_to("0X2a", x);
deserializes_to("0x00002A", x);
deserializes_to("42", x);
deserializes_to("42.", x);
deserializes_to("42.0", x);
deserializes_to("42e0", x);
deserializes_to("4.2e1", x);
deserializes_to(".42e2", x);
deserializes_to("0.42e2", x);
deserializes_to("-42", -x);
deserializes_to("-42.", -x);
deserializes_to("-42.0", -x);
deserializes_to("-42e0", -x);
deserializes_to("-4.2e1", -x);
deserializes_to("-.42e2", -x);
deserializes_to("-0.42e2", -x);
}
#[test]
fn deserializes_u8() {
let x: u8 = 42;
deserializes_to("0x2A", x);
deserializes_to("0x2a", x);
deserializes_to("0X2A", x);
deserializes_to("0X2a", x);
deserializes_to("0x00002A", x);
deserializes_to("42", x);
deserializes_to("42.", x);
deserializes_to("42.0", x);
deserializes_to("42e0", x);
deserializes_to("4.2e1", x);
deserializes_to(".42e2", x);
deserializes_to("0.42e2", x);
}
#[test]
fn deserializes_i16() {
let x: i16 = 42;
deserializes_to("0x2A", x);
deserializes_to("0x2a", x);
deserializes_to("0X2A", x);
deserializes_to("0X2a", x);
deserializes_to("0x00002A", x);
deserializes_to("42", x);
deserializes_to("42.", x);
deserializes_to("42.0", x);
deserializes_to("42e0", x);
deserializes_to("4.2e1", x);
deserializes_to(".42e2", x);
deserializes_to("0.42e2", x);
deserializes_to("-42", -x);
deserializes_to("-42.", -x);
deserializes_to("-42.0", -x);
deserializes_to("-42e0", -x);
deserializes_to("-4.2e1", -x);
deserializes_to("-.42e2", -x);
deserializes_to("-0.42e2", -x);
}
#[test]
fn deserializes_u16() {
let x: u16 = 42;
deserializes_to("0x2A", x);
deserializes_to("0x2a", x);
deserializes_to("0X2A", x);
deserializes_to("0X2a", x);
deserializes_to("0x00002A", x);
deserializes_to("42", x);
deserializes_to("42.", x);
deserializes_to("42.0", x);
deserializes_to("42e0", x);
deserializes_to("4.2e1", x);
deserializes_to(".42e2", x);
deserializes_to("0.42e2", x);
}
#[test]
fn deserializes_i32() {
let x: i32 = 42;
deserializes_to("0x2A", x);
deserializes_to("0x2a", x);
deserializes_to("0X2A", x);
deserializes_to("0X2a", x);
deserializes_to("0x00002A", x);
deserializes_to("42", x);
deserializes_to("42.", x);
deserializes_to("42.0", x);
deserializes_to("42e0", x);
deserializes_to("4.2e1", x);
deserializes_to(".42e2", x);
deserializes_to("0.42e2", x);
deserializes_to("-42", -x);
deserializes_to("-42.", -x);
deserializes_to("-42.0", -x);
deserializes_to("-42e0", -x);
deserializes_to("-4.2e1", -x);
deserializes_to("-.42e2", -x);
deserializes_to("-0.42e2", -x);
}
#[test]
fn deserializes_u32() {
let x: u32 = 42;
deserializes_to("0x2A", x);
deserializes_to("0x2a", x);
deserializes_to("0X2A", x);
deserializes_to("0X2a", x);
deserializes_to("0x00002A", x);
deserializes_to("42", x);
deserializes_to("42.", x);
deserializes_to("42.0", x);
deserializes_to("42e0", x);
deserializes_to("4.2e1", x);
deserializes_to(".42e2", x);
deserializes_to("0.42e2", x);
}
#[test]
fn deserializes_i64() {
let x: i64 = 42;
deserializes_to("0x2A", x);
deserializes_to("0x2a", x);
deserializes_to("0X2A", x);
deserializes_to("0X2a", x);
deserializes_to("0x00002A", x);
deserializes_to("42", x);
deserializes_to("42.", x);
deserializes_to("42.0", x);
deserializes_to("42e0", x);
deserializes_to("4.2e1", x);
deserializes_to(".42e2", x);
deserializes_to("0.42e2", x);
deserializes_to("-42", -x);
deserializes_to("-42.", -x);
deserializes_to("-42.0", -x);
deserializes_to("-42e0", -x);
deserializes_to("-4.2e1", -x);
deserializes_to("-.42e2", -x);
deserializes_to("-0.42e2", -x);
}
#[test]
fn deserializes_u64() {
let x: u64 = 42;
deserializes_to("0x2A", x);
deserializes_to("0x2a", x);
deserializes_to("0X2A", x);
deserializes_to("0X2a", x);
deserializes_to("0x00002A", x);
deserializes_to("42", x);
deserializes_to("42.", x);
deserializes_to("42.0", x);
deserializes_to("42e0", x);
deserializes_to("4.2e1", x);
deserializes_to(".42e2", x);
deserializes_to("0.42e2", x);
}
#[test]
fn deserializes_f32() {
let x: f32 = 42.42;
deserializes_to("42.42", x);
deserializes_to("42.42e0", x);
deserializes_to("4.242e1", x);
deserializes_to(".4242e2", x);
deserializes_to("0.4242e2", x);
deserializes_to("-42.42", -x);
deserializes_to("-42.42", -x);
deserializes_to("-42.42", -x);
deserializes_to("-42.42e0", -x);
deserializes_to("-4.242e1", -x);
deserializes_to("-.4242e2", -x);
deserializes_to("-0.4242e2", -x);
deserializes_to("Infinity", std::f32::INFINITY);
deserializes_to("-Infinity", std::f32::NEG_INFINITY);
deserializes_to_nan_f32("NaN");
deserializes_to_nan_f32("-NaN");
}
#[test]
fn deserializes_f64() {
let x: f64 = 42.42;
deserializes_to("42.42", x);
deserializes_to("42.42e0", x);
deserializes_to("4.242e1", x);
deserializes_to(".4242e2", x);
deserializes_to("0.4242e2", x);
deserializes_to("-42.42", -x);
deserializes_to("-42.42", -x);
deserializes_to("-42.42", -x);
deserializes_to("-42.42e0", -x);
deserializes_to("-4.242e1", -x);
deserializes_to("-.4242e2", -x);
deserializes_to("-0.4242e2", -x);
deserializes_to("Infinity", std::f64::INFINITY);
deserializes_to("-Infinity", std::f64::NEG_INFINITY);
deserializes_to_nan_f64("NaN");
deserializes_to_nan_f64("-NaN");
}
#[test]
fn deserializes_char() {
deserializes_to("'x'", 'x');
deserializes_to("\"자\"", '자');
deserializes_to(r#""\"""#, '"');
deserializes_to(r#""\r""#, '\r');
deserializes_to(r#""\n""#, '\n');
deserializes_to(r#""\t""#, '\t');
deserializes_to(r#""\\""#, '\\');
deserializes_to(r#""\/""#, '/');
deserializes_to(r#""\b""#, '\u{0008}');
deserializes_to(r#""\f""#, '\u{000c}');
}
#[test]
#[ignore] // TODO currently unsupported
fn deserializes_str() {
deserializes_to("'Hello!'", "Hello!");
deserializes_to("\"안녕하세요\"", "안녕하세요");
}
#[test]
fn deserializes_string() {
deserializes_to("'Hello!'", "Hello!".to_owned());
deserializes_to("\"안녕하세요\"", "안녕하세요".to_owned());
}
#[test]
#[ignore] // TODO currently unsupported
fn deserializes_bytes() {}
#[test]
#[ignore] // TODO currently unsupported
fn deserializes_byte_buf() {}
#[test]
fn deserializes_option() {
deserializes_to::<Option<i32>>("null", None);
deserializes_to("42", Some(42));
deserializes_to("42", Some(Some(42)));
}
#[test]
fn deserializes_unit() {
deserializes_to("null", ());
}
#[test]
fn deserializes_unit_struct() {
#[derive(Deserialize, PartialEq, Debug)]
struct A;
deserializes_to("null", A);
}
#[test]
fn deserializes_newtype_struct() {
#[derive(Deserialize, PartialEq, Debug)]
struct A(i32);
#[derive(Deserialize, PartialEq, Debug)]
struct B(f64);
deserializes_to("42", A(42));
deserializes_to("42", B(42.));
}
#[test]
fn deserializes_seq() {
#[derive(Deserialize, PartialEq, Debug)]
#[serde(untagged)]
enum Val {
Number(f64),
Bool(bool),
String(String),
}
deserializes_to("[1, 2, 3]", vec![1, 2, 3]);
deserializes_to(
"[42, true, 'hello']",
vec![
Val::Number(42.),
Val::Bool(true),
Val::String("hello".to_owned()),
],
);
}
#[test]
fn deserializes_seq_size_hint() {
#[derive(Debug, PartialEq)]
struct Size(usize);
impl<'de> de::Deserialize<'de> for Size {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
struct Visitor;
impl<'de> de::Visitor<'de> for Visitor {
type Value = Size;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("array")
}
fn visit_seq<A>(self, seq: A) -> Result<Self::Value, A::Error>
where
A: serde::de::SeqAccess<'de>,
{
Ok(Size(seq.size_hint().unwrap()))
}
}
deserializer.deserialize_seq(Visitor)
}
}
deserializes_to("[]", Size(0));
deserializes_to("[42, true, 'hello']", Size(3));
deserializes_to("[42, true, [1, 2]]", Size(3));
}
#[test]
fn deserializes_tuple() {
deserializes_to("[1, 2, 3]", (1, 2, 3));
}
#[test]
fn deserializes_tuple_struct() {
#[derive(Deserialize, PartialEq, Debug)]
struct A(i32, f64);
#[derive(Deserialize, PartialEq, Debug)]
struct B(f64, i32);
deserializes_to("[1, 2]", A(1, 2.));
deserializes_to("[1, 2]", B(1., 2));
}
#[test]
fn deserializes_map() {
let mut m = HashMap::new();
m.insert("a".to_owned(), 1);
m.insert("b".to_owned(), 2);
m.insert("c".to_owned(), 3);
deserializes_to("{ a: 1, 'b': 2, \"c\": 3 }", m);
}
#[test]
fn deserializes_map_size_hint() {
#[derive(Debug, PartialEq)]
struct Size(usize);
impl<'de> de::Deserialize<'de> for Size {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
struct Visitor;
impl<'de> de::Visitor<'de> for Visitor {
type Value = Size;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("array")
}
fn visit_map<A>(self, map: A) -> Result<Self::Value, A::Error>
where
A: serde::de::MapAccess<'de>,
{
Ok(Size(map.size_hint().unwrap()))
}
}
deserializer.deserialize_map(Visitor)
}
}
deserializes_to("{}", Size(0));
deserializes_to("{ a: 1, 'b': 2, \"c\": 3 }", Size(3));
deserializes_to("{ a: 1, 'b': 2, \"c\": [1, 2] }", Size(3));
}
#[test]
fn deserializes_struct() {
#[derive(Deserialize, PartialEq, Debug)]
struct S {
a: i32,
b: i32,
c: i32,
}
deserializes_to("{ a: 1, 'b': 2, \"c\": 3 }", S { a: 1, b: 2, c: 3 });
}
#[test]
fn deserializes_enum() {
#[derive(Deserialize, PartialEq, Debug)]
enum E {
A,
B(i32),
C(i32, i32),
D { a: i32, b: i32 },
E {},
F(),
}
deserializes_to("'A'", E::A);
deserializes_to("{ B: 2 }", E::B(2));
deserializes_to("{ C: [3, 5] }", E::C(3, 5));
deserializes_to("{ D: { a: 7, b: 11 } }", E::D { a: 7, b: 11 });
deserializes_to("{ E: {} }", E::E {});
deserializes_to("{ F: [] }", E::F());
}
#[test]
fn deserializes_enum_with_error() {
#[derive(Deserialize, PartialEq, Debug)]
enum E {
A {},
B(),
}
#[derive(Deserialize, PartialEq, Debug)]
struct S {
e: E,
}
deserializes_with_error("{ e: 'A' }", S { e: E::A {} }, "expected an object");
deserializes_with_error("{ e: 'B' }", S { e: E::B() }, "expected an array");
}
#[test]
fn deserializes_ignored() {
#[derive(Deserialize, PartialEq, Debug)]
struct S {
a: i32,
b: i32,
}
deserializes_to("{ a: 1, ignored: 42, b: 2 }", S { a: 1, b: 2 });
}
#[test]
fn deserializes_json_values() {
// As int if json uses int type.
deserializes_to("0x2a", serde_json::json!(42));
deserializes_to("0x2A", serde_json::json!(42));
deserializes_to("0X2A", serde_json::json!(42));
deserializes_to("42", serde_json::json!(42));
// As float if json calls for explicit float type.
deserializes_to("42.", serde_json::json!(42.));
deserializes_to("42e0", serde_json::json!(42.));
deserializes_to("4e2", serde_json::json!(400.));
deserializes_to("4e2", serde_json::json!(4e2));
}
#[test]
fn deserialize_error_messages() {
#[derive(Deserialize, PartialEq, Debug)]
enum E {
A,
}
deserializes_with_error("'B'", E::A, "unknown variant `B`, expected `A`");
deserializes_with_error("0xffffffffff", 42, "error parsing hex");
let mut over_i64 = i64::max_value().to_string();
over_i64.push_str("0");
deserializes_with_error(
over_i64.as_str(),
serde_json::json!(42),
"error parsing integer",
);
deserializes_with_error("1e309", 42, "error parsing number: too large");
}
| 26.129482 | 95 | 0.570176 |
397f93cb162b5f102a4976b0c34b95f181582b00 | 1,989 | use crate::{gm_artifacts, RunOptions};
use std::{path::Path, process::Child};
pub fn invoke_run(
macros: &gm_artifacts::GmMacros,
build_bff: &Path,
sub_command: &RunOptions,
) -> Child {
let mut igor = std::process::Command::new(gm_artifacts::MONO_LOCATION);
igor.arg(macros.igor_path.clone())
.arg("-j=8")
.arg(format!("-options={}", build_bff.display()));
// add the verbosity
if sub_command.task.verbosity > 1 {
igor.arg("-v");
}
// add the platform
igor.arg("--")
.arg(gm_artifacts::PLATFORM_KIND.to_string())
.arg("Run")
.stdout(std::process::Stdio::piped())
.spawn()
.unwrap()
}
pub fn invoke_release(
macros: &gm_artifacts::GmMacros,
build_bff: &Path,
sub_command: &RunOptions,
) -> Child {
let mut igor = std::process::Command::new(gm_artifacts::MONO_LOCATION);
igor.arg(macros.igor_path.clone())
.arg("-j=8")
.arg(format!("-options={}", build_bff.display()));
// add the verbosity
if sub_command.task.verbosity > 1 {
igor.arg("-v");
}
igor.arg(format!(
"--lf={}",
macros.user_directory.join("licence.plist").display()
));
// add the platform
igor.arg("--")
.arg(gm_artifacts::PLATFORM_KIND.to_string())
.arg("PackageZip");
igor.stdout(std::process::Stdio::piped()).spawn().unwrap()
}
pub fn invoke_rerun(gm_build: &gm_artifacts::GmBuild) -> Child {
std::process::Command::new(
gm_build
.runtime_location
.join("mac/YoYo Runner.app/Contents/MacOS/Mac_Runner"),
)
.arg("-game")
.arg(gm_build.output_folder.join("GameAssetsMac.zip"))
.arg("-debugoutput")
.arg(gm_build.output_folder.join("debug.log"))
.arg("-output")
.arg(gm_build.output_folder.join("debug.log"))
.arg("-runTest")
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::null())
.spawn()
.unwrap()
}
| 26.878378 | 75 | 0.594771 |
d639ff227220a369d0e9ad456a0310d53a3c7b55 | 3,635 | use super::*;
/// A builder pattern struct for listing comments.
///
/// created by [`PullRequestHandler::list_comments`]
///
/// [`PullRequestHandler::list_comments`]: ./struct.PullRequestHandler.html#method.list_comments
#[derive(serde::Serialize)]
pub struct ListCommentsBuilder<'octo, 'b> {
#[serde(skip)]
handler: &'b PullRequestHandler<'octo>,
#[serde(skip)]
pr: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
sort: Option<crate::params::pulls::comments::Sort>,
#[serde(skip_serializing_if = "Option::is_none")]
direction: Option<crate::params::Direction>,
#[serde(skip_serializing_if = "Option::is_none")]
per_page: Option<u8>,
#[serde(skip_serializing_if = "Option::is_none")]
page: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
since: Option<chrono::DateTime<chrono::Utc>>,
}
impl<'octo, 'b> ListCommentsBuilder<'octo, 'b> {
pub(crate) fn new(handler: &'b PullRequestHandler<'octo>, pr: Option<u64>) -> Self {
Self {
handler,
pr,
sort: None,
direction: None,
per_page: None,
page: None,
since: None,
}
}
/// What to sort results by. Can be either `created` or `updated`,
pub fn sort(mut self, sort: impl Into<crate::params::pulls::comments::Sort>) -> Self {
self.sort = Some(sort.into());
self
}
/// The direction of the sort. Can be either ascending or descending.
/// Default: descending when sort is `created` or sort is not specified,
/// otherwise ascending sort.
pub fn direction(mut self, direction: impl Into<crate::params::Direction>) -> Self {
self.direction = Some(direction.into());
self
}
/// Results per page (max 100).
pub fn per_page(mut self, per_page: impl Into<u8>) -> Self {
self.per_page = Some(per_page.into());
self
}
/// Page number of the results to fetch.
pub fn page(mut self, page: impl Into<u32>) -> Self {
self.page = Some(page.into());
self
}
/// Only show notifications updated after the given time.
pub fn since(mut self, since: impl Into<chrono::DateTime<chrono::Utc>>) -> Self {
self.since = Some(since.into());
self
}
/// Sends the actual request.
pub async fn send(self) -> crate::Result<Page<crate::models::pulls::Comment>> {
let url = format!(
"repos/{owner}/{repo}/pulls/{pr}comments",
owner = self.handler.owner,
repo = self.handler.repo,
pr = if let Some(pr) = self.pr {
pr.to_string() + "/"
} else {
"".into()
},
);
self.handler.http_get(url, Some(&self)).await
}
}
#[cfg(test)]
mod tests {
#[tokio::test]
async fn serialize() {
let octocrab = crate::Octocrab::default();
let handler = octocrab.pulls("rust-lang", "rust");
let yesterday = chrono::Utc::now() - chrono::Duration::days(1);
let list = handler
.list_comments(Some(1))
.sort(crate::params::pulls::comments::Sort::Updated)
.direction(crate::params::Direction::Ascending)
.since(yesterday)
.per_page(100)
.page(1u8);
assert_eq!(
serde_json::to_value(list).unwrap(),
serde_json::json!({
"sort": "updated",
"direction": "asc",
"per_page": 100,
"page": 1,
"since": yesterday
})
)
}
}
| 31.885965 | 96 | 0.561761 |
72460a235add2adfa1dba7d3f2669329715054bc | 52,706 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub fn parse_http_generic_error(
response: &http::Response<bytes::Bytes>,
) -> Result<aws_smithy_types::Error, aws_smithy_json::deserialize::Error> {
crate::json_errors::parse_generic_error(response.body(), response.headers())
}
pub fn deser_structure_crate_error_access_denied_exception_json_err(
value: &[u8],
mut builder: crate::error::access_denied_exception::Builder,
) -> Result<crate::error::access_denied_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_concurrent_modification_exception_json_err(
value: &[u8],
mut builder: crate::error::concurrent_modification_exception::Builder,
) -> Result<
crate::error::concurrent_modification_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_configuration_exception_json_err(
value: &[u8],
mut builder: crate::error::configuration_exception::Builder,
) -> Result<crate::error::configuration_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_limit_exceeded_exception_json_err(
value: &[u8],
mut builder: crate::error::limit_exceeded_exception::Builder,
) -> Result<crate::error::limit_exceeded_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_resource_already_exists_exception_json_err(
value: &[u8],
mut builder: crate::error::resource_already_exists_exception::Builder,
) -> Result<
crate::error::resource_already_exists_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_validation_exception_json_err(
value: &[u8],
mut builder: crate::error::validation_exception::Builder,
) -> Result<crate::error::validation_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_create_notification_rule(
value: &[u8],
mut builder: crate::output::create_notification_rule_output::Builder,
) -> Result<
crate::output::create_notification_rule_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Arn" => {
builder = builder.set_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_delete_notification_rule(
value: &[u8],
mut builder: crate::output::delete_notification_rule_output::Builder,
) -> Result<
crate::output::delete_notification_rule_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Arn" => {
builder = builder.set_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_resource_not_found_exception_json_err(
value: &[u8],
mut builder: crate::error::resource_not_found_exception::Builder,
) -> Result<crate::error::resource_not_found_exception::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_describe_notification_rule(
value: &[u8],
mut builder: crate::output::describe_notification_rule_output::Builder,
) -> Result<
crate::output::describe_notification_rule_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Arn" => {
builder = builder.set_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"CreatedBy" => {
builder = builder.set_created_by(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"CreatedTimestamp" => {
builder = builder.set_created_timestamp(
aws_smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
aws_smithy_types::date_time::Format::EpochSeconds,
)?,
);
}
"DetailType" => {
builder = builder.set_detail_type(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::DetailType::from(u.as_ref()))
})
.transpose()?,
);
}
"EventTypes" => {
builder = builder.set_event_types(
crate::json_deser::deser_list_com_amazonaws_codestarnotifications_event_type_batch(tokens)?
);
}
"LastModifiedTimestamp" => {
builder = builder.set_last_modified_timestamp(
aws_smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
aws_smithy_types::date_time::Format::EpochSeconds,
)?,
);
}
"Name" => {
builder = builder.set_name(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Resource" => {
builder = builder.set_resource(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Status" => {
builder = builder.set_status(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::NotificationRuleStatus::from(u.as_ref()))
})
.transpose()?,
);
}
"Tags" => {
builder = builder.set_tags(
crate::json_deser::deser_map_com_amazonaws_codestarnotifications_tags(
tokens,
)?,
);
}
"Targets" => {
builder = builder.set_targets(
crate::json_deser::deser_list_com_amazonaws_codestarnotifications_targets_batch(tokens)?
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_invalid_next_token_exception_json_err(
value: &[u8],
mut builder: crate::error::invalid_next_token_exception::Builder,
) -> Result<crate::error::invalid_next_token_exception::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_event_types(
value: &[u8],
mut builder: crate::output::list_event_types_output::Builder,
) -> Result<crate::output::list_event_types_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"EventTypes" => {
builder = builder.set_event_types(
crate::json_deser::deser_list_com_amazonaws_codestarnotifications_event_type_batch(tokens)?
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_notification_rules(
value: &[u8],
mut builder: crate::output::list_notification_rules_output::Builder,
) -> Result<
crate::output::list_notification_rules_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"NotificationRules" => {
builder = builder.set_notification_rules(
crate::json_deser::deser_list_com_amazonaws_codestarnotifications_notification_rule_batch(tokens)?
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_tags_for_resource(
value: &[u8],
mut builder: crate::output::list_tags_for_resource_output::Builder,
) -> Result<
crate::output::list_tags_for_resource_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Tags" => {
builder = builder.set_tags(
crate::json_deser::deser_map_com_amazonaws_codestarnotifications_tags(
tokens,
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_targets(
value: &[u8],
mut builder: crate::output::list_targets_output::Builder,
) -> Result<crate::output::list_targets_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Targets" => {
builder = builder.set_targets(
crate::json_deser::deser_list_com_amazonaws_codestarnotifications_targets_batch(tokens)?
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_subscribe(
value: &[u8],
mut builder: crate::output::subscribe_output::Builder,
) -> Result<crate::output::subscribe_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Arn" => {
builder = builder.set_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_tag_resource(
value: &[u8],
mut builder: crate::output::tag_resource_output::Builder,
) -> Result<crate::output::tag_resource_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Tags" => {
builder = builder.set_tags(
crate::json_deser::deser_map_com_amazonaws_codestarnotifications_tags(
tokens,
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_unsubscribe(
value: &[u8],
mut builder: crate::output::unsubscribe_output::Builder,
) -> Result<crate::output::unsubscribe_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Arn" => {
builder = builder.set_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn or_empty_doc(data: &[u8]) -> &[u8] {
if data.is_empty() {
b"{}"
} else {
data
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_codestarnotifications_event_type_batch<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::EventTypeSummary>>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_event_type_summary(
tokens,
)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_map_com_amazonaws_codestarnotifications_tags<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::collections::HashMap<std::string::String, std::string::String>>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
let mut map = std::collections::HashMap::new();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
let key = key.to_unescaped().map(|u| u.into_owned())?;
let value = aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
map.insert(key, value);
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(map))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_codestarnotifications_targets_batch<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::TargetSummary>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_target_summary(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_codestarnotifications_notification_rule_batch<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::NotificationRuleSummary>>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_notification_rule_summary(tokens)?
;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_crate_model_event_type_summary<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::EventTypeSummary>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::EventTypeSummary::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"EventTypeId" => {
builder = builder.set_event_type_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ServiceName" => {
builder = builder.set_service_name(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"EventTypeName" => {
builder = builder.set_event_type_name(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ResourceType" => {
builder = builder.set_resource_type(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_target_summary<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::TargetSummary>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::TargetSummary::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"TargetAddress" => {
builder = builder.set_target_address(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"TargetType" => {
builder = builder.set_target_type(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"TargetStatus" => {
builder = builder.set_target_status(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::TargetStatus::from(u.as_ref()))
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_notification_rule_summary<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::NotificationRuleSummary>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::NotificationRuleSummary::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Arn" => {
builder = builder.set_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
| 42.266239 | 126 | 0.482393 |
714fe4c76f05fd017cb8c644512779497d9f9a6c | 6,592 | //! `types` module contains types necessary for Fluent runtime
//! value handling.
//! The core struct is [`FluentValue`] which is a type that can be passed
//! to the [`FluentBundle::format_pattern`](crate::bundle::FluentBundle) as an argument, it can be passed
//! to any Fluent Function, and any function may return it.
//!
//! This part of functionality is not fully hashed out yet, since we're waiting
//! for the internationalization APIs to mature, at which point all number
//! formatting operations will be moved out of Fluent.
//!
//! For now, [`FluentValue`] can be a string, a number, or a custom [`FluentType`]
//! which allows users of the library to implement their own types of values,
//! such as dates, or more complex structures needed for their bindings.
mod number;
mod plural;
pub use number::*;
use plural::PluralRules;
use std::any::Any;
use std::borrow::{Borrow, Cow};
use std::fmt;
use std::str::FromStr;
use intl_pluralrules::{PluralCategory, PluralRuleType};
use crate::memoizer::MemoizerKind;
use crate::resolver::Scope;
use crate::resource::FluentResource;
pub trait FluentType: fmt::Debug + AnyEq + 'static {
fn duplicate(&self) -> Box<dyn FluentType + Send>;
fn as_string(&self, intls: &intl_memoizer::IntlLangMemoizer) -> Cow<'static, str>;
fn as_string_threadsafe(
&self,
intls: &intl_memoizer::concurrent::IntlLangMemoizer,
) -> Cow<'static, str>;
}
impl PartialEq for dyn FluentType + Send {
fn eq(&self, other: &Self) -> bool {
self.equals(other.as_any())
}
}
pub trait AnyEq: Any + 'static {
fn equals(&self, other: &dyn Any) -> bool;
fn as_any(&self) -> &dyn Any;
}
impl<T: Any + PartialEq> AnyEq for T {
fn equals(&self, other: &dyn Any) -> bool {
other
.downcast_ref::<Self>()
.map_or(false, |that| self == that)
}
fn as_any(&self) -> &dyn Any {
self
}
}
/// The `FluentValue` enum represents values which can be formatted to a String.
///
/// Those values are either passed as arguments to [`FluentBundle::format_pattern`][] or
/// produced by functions, or generated in the process of pattern resolution.
///
/// [`FluentBundle::format_pattern`]: ../bundle/struct.FluentBundle.html#method.format_pattern
#[derive(Debug)]
pub enum FluentValue<'source> {
String(Cow<'source, str>),
Number(FluentNumber),
Custom(Box<dyn FluentType + Send>),
None,
Error,
}
impl<'s> PartialEq for FluentValue<'s> {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(FluentValue::String(s), FluentValue::String(s2)) => s == s2,
(FluentValue::Number(s), FluentValue::Number(s2)) => s == s2,
(FluentValue::Custom(s), FluentValue::Custom(s2)) => s == s2,
_ => false,
}
}
}
impl<'s> Clone for FluentValue<'s> {
fn clone(&self) -> Self {
match self {
FluentValue::String(s) => FluentValue::String(s.clone()),
FluentValue::Number(s) => FluentValue::Number(s.clone()),
FluentValue::Custom(s) => {
let new_value: Box<dyn FluentType + Send> = s.duplicate();
FluentValue::Custom(new_value)
}
FluentValue::Error => FluentValue::Error,
FluentValue::None => FluentValue::None,
}
}
}
impl<'source> FluentValue<'source> {
pub fn try_number<S: ToString>(v: S) -> Self {
let s = v.to_string();
if let Ok(num) = FluentNumber::from_str(&s) {
num.into()
} else {
s.into()
}
}
pub fn matches<R: Borrow<FluentResource>, M>(
&self,
other: &FluentValue,
scope: &Scope<R, M>,
) -> bool
where
M: MemoizerKind,
{
match (self, other) {
(&FluentValue::String(ref a), &FluentValue::String(ref b)) => a == b,
(&FluentValue::Number(ref a), &FluentValue::Number(ref b)) => a == b,
(&FluentValue::String(ref a), &FluentValue::Number(ref b)) => {
let cat = match a.as_ref() {
"zero" => PluralCategory::ZERO,
"one" => PluralCategory::ONE,
"two" => PluralCategory::TWO,
"few" => PluralCategory::FEW,
"many" => PluralCategory::MANY,
"other" => PluralCategory::OTHER,
_ => return false,
};
scope
.bundle
.intls
.with_try_get_threadsafe::<PluralRules, _, _>(
(PluralRuleType::CARDINAL,),
|pr| pr.0.select(b) == Ok(cat),
)
.unwrap()
}
_ => false,
}
}
pub fn write<W, R, M>(&self, w: &mut W, scope: &Scope<R, M>) -> fmt::Result
where
W: fmt::Write,
R: Borrow<FluentResource>,
M: MemoizerKind,
{
if let Some(formatter) = &scope.bundle.formatter {
if let Some(val) = formatter(self, &scope.bundle.intls) {
return w.write_str(&val);
}
}
match self {
FluentValue::String(s) => w.write_str(s),
FluentValue::Number(n) => w.write_str(&n.as_string()),
FluentValue::Custom(s) => w.write_str(&scope.bundle.intls.stringify_value(&**s)),
FluentValue::Error => Ok(()),
FluentValue::None => Ok(()),
}
}
pub fn as_string<R: Borrow<FluentResource>, M>(&self, scope: &Scope<R, M>) -> Cow<'source, str>
where
M: MemoizerKind,
{
if let Some(formatter) = &scope.bundle.formatter {
if let Some(val) = formatter(self, &scope.bundle.intls) {
return val.into();
}
}
match self {
FluentValue::String(s) => s.clone(),
FluentValue::Number(n) => n.as_string(),
FluentValue::Custom(s) => scope.bundle.intls.stringify_value(&**s),
FluentValue::Error => "".into(),
FluentValue::None => "".into(),
}
}
}
impl<'source> From<String> for FluentValue<'source> {
fn from(s: String) -> Self {
FluentValue::String(s.into())
}
}
impl<'source> From<&'source str> for FluentValue<'source> {
fn from(s: &'source str) -> Self {
FluentValue::String(s.into())
}
}
impl<'source> From<Cow<'source, str>> for FluentValue<'source> {
fn from(s: Cow<'source, str>) -> Self {
FluentValue::String(s)
}
}
| 32.472906 | 105 | 0.556129 |
7132bea66ea8afa0691025fb1506672a591954e8 | 358 | use aoc::Result;
use aoc_2016_day_18::*;
#[test]
fn part_one_answer() -> Result<()> {
let input = include_str!("../input/input.txt");
assert_eq!(part_one(input)?, input.len());
Ok(())
}
#[test]
fn part_two_answer() -> Result<()> {
let input = include_str!("../input/input.txt");
assert_eq!(part_two(input)?, input.len());
Ok(())
}
| 19.888889 | 51 | 0.592179 |
d7c8a07103e3ce35ae612a269da9ea2daafdbb70 | 22,881 | //! See docs in build/expr/mod.rs
use crate::build::expr::category::{Category, RvalueFunc};
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder};
use crate::thir::*;
use rustc_ast::InlineAsmOptions;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_hir as hir;
use rustc_index::vec::Idx;
use rustc_middle::mir::*;
use rustc_middle::ty::{self, CanonicalUserTypeAnnotation};
use std::iter;
impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Compile `expr`, storing the result into `destination`, which
/// is assumed to be uninitialized.
crate fn expr_into_dest(
&mut self,
destination: Place<'tcx>,
mut block: BasicBlock,
expr: &Expr<'tcx>,
) -> BlockAnd<()> {
debug!("expr_into_dest(destination={:?}, block={:?}, expr={:?})", destination, block, expr);
// since we frequently have to reference `self` from within a
// closure, where `self` would be shadowed, it's easier to
// just use the name `this` uniformly
let this = self;
let expr_span = expr.span;
let source_info = this.source_info(expr_span);
let expr_is_block_or_scope =
matches!(expr.kind, ExprKind::Block { .. } | ExprKind::Scope { .. });
if !expr_is_block_or_scope {
this.block_context.push(BlockFrame::SubExpr);
}
let block_and = match expr.kind {
ExprKind::Scope { region_scope, lint_level, value } => {
let region_scope = (region_scope, source_info);
ensure_sufficient_stack(|| {
this.in_scope(region_scope, lint_level, |this| {
this.expr_into_dest(destination, block, &this.thir[value])
})
})
}
ExprKind::Block { body: ref ast_block } => {
this.ast_block(destination, block, ast_block, source_info)
}
ExprKind::Match { scrutinee, ref arms } => {
this.match_expr(destination, expr_span, block, &this.thir[scrutinee], arms)
}
ExprKind::If { cond, then, else_opt } => {
let place = unpack!(
block = this.as_temp(
block,
Some(this.local_scope()),
&this.thir[cond],
Mutability::Mut
)
);
let operand = Operand::Move(Place::from(place));
let mut then_block = this.cfg.start_new_block();
let mut else_block = this.cfg.start_new_block();
let term = TerminatorKind::if_(this.tcx, operand, then_block, else_block);
this.cfg.terminate(block, source_info, term);
unpack!(
then_block = this.expr_into_dest(destination, then_block, &this.thir[then])
);
else_block = if let Some(else_opt) = else_opt {
unpack!(this.expr_into_dest(destination, else_block, &this.thir[else_opt]))
} else {
// Body of the `if` expression without an `else` clause must return `()`, thus
// we implicitly generate a `else {}` if it is not specified.
let correct_si = this.source_info(expr_span.shrink_to_hi());
this.cfg.push_assign_unit(else_block, correct_si, destination, this.tcx);
else_block
};
let join_block = this.cfg.start_new_block();
this.cfg.terminate(
then_block,
source_info,
TerminatorKind::Goto { target: join_block },
);
this.cfg.terminate(
else_block,
source_info,
TerminatorKind::Goto { target: join_block },
);
join_block.unit()
}
ExprKind::NeverToAny { source } => {
let source = &this.thir[source];
let is_call =
matches!(source.kind, ExprKind::Call { .. } | ExprKind::InlineAsm { .. });
// (#66975) Source could be a const of type `!`, so has to
// exist in the generated MIR.
unpack!(
block = this.as_temp(block, Some(this.local_scope()), source, Mutability::Mut,)
);
// This is an optimization. If the expression was a call then we already have an
// unreachable block. Don't bother to terminate it and create a new one.
if is_call {
block.unit()
} else {
this.cfg.terminate(block, source_info, TerminatorKind::Unreachable);
let end_block = this.cfg.start_new_block();
end_block.unit()
}
}
ExprKind::LogicalOp { op, lhs, rhs } => {
// And:
//
// [block: If(lhs)] -true-> [else_block: dest = (rhs)]
// | (false)
// [shortcurcuit_block: dest = false]
//
// Or:
//
// [block: If(lhs)] -false-> [else_block: dest = (rhs)]
// | (true)
// [shortcurcuit_block: dest = true]
let (shortcircuit_block, mut else_block, join_block) = (
this.cfg.start_new_block(),
this.cfg.start_new_block(),
this.cfg.start_new_block(),
);
let lhs = unpack!(block = this.as_local_operand(block, &this.thir[lhs]));
let blocks = match op {
LogicalOp::And => (else_block, shortcircuit_block),
LogicalOp::Or => (shortcircuit_block, else_block),
};
let term = TerminatorKind::if_(this.tcx, lhs, blocks.0, blocks.1);
this.cfg.terminate(block, source_info, term);
this.cfg.push_assign_constant(
shortcircuit_block,
source_info,
destination,
Constant {
span: expr_span,
user_ty: None,
literal: match op {
LogicalOp::And => ty::Const::from_bool(this.tcx, false).into(),
LogicalOp::Or => ty::Const::from_bool(this.tcx, true).into(),
},
},
);
this.cfg.goto(shortcircuit_block, source_info, join_block);
let rhs = unpack!(else_block = this.as_local_operand(else_block, &this.thir[rhs]));
this.cfg.push_assign(else_block, source_info, destination, Rvalue::Use(rhs));
this.cfg.goto(else_block, source_info, join_block);
join_block.unit()
}
ExprKind::Loop { body } => {
// [block]
// |
// [loop_block] -> [body_block] -/eval. body/-> [body_block_end]
// | ^ |
// false link | |
// | +-----------------------------------------+
// +-> [diverge_cleanup]
// The false link is required to make sure borrowck considers unwinds through the
// body, even when the exact code in the body cannot unwind
let loop_block = this.cfg.start_new_block();
// Start the loop.
this.cfg.goto(block, source_info, loop_block);
this.in_breakable_scope(Some(loop_block), destination, expr_span, move |this| {
// conduct the test, if necessary
let body_block = this.cfg.start_new_block();
this.cfg.terminate(
loop_block,
source_info,
TerminatorKind::FalseUnwind { real_target: body_block, unwind: None },
);
this.diverge_from(loop_block);
// The “return” value of the loop body must always be an unit. We therefore
// introduce a unit temporary as the destination for the loop body.
let tmp = this.get_unit_temp();
// Execute the body, branching back to the test.
let body_block_end =
unpack!(this.expr_into_dest(tmp, body_block, &this.thir[body]));
this.cfg.goto(body_block_end, source_info, loop_block);
// Loops are only exited by `break` expressions.
None
})
}
ExprKind::Call { ty: _, fun, ref args, from_hir_call, fn_span } => {
let fun = unpack!(block = this.as_local_operand(block, &this.thir[fun]));
let args: Vec<_> = args
.into_iter()
.copied()
.map(|arg| unpack!(block = this.as_local_call_operand(block, &this.thir[arg])))
.collect();
let success = this.cfg.start_new_block();
this.record_operands_moved(&args);
debug!("expr_into_dest: fn_span={:?}", fn_span);
this.cfg.terminate(
block,
source_info,
TerminatorKind::Call {
func: fun,
args,
cleanup: None,
// FIXME(varkor): replace this with an uninhabitedness-based check.
// This requires getting access to the current module to call
// `tcx.is_ty_uninhabited_from`, which is currently tricky to do.
destination: if expr.ty.is_never() {
None
} else {
Some((destination, success))
},
from_hir_call,
fn_span,
},
);
this.diverge_from(block);
success.unit()
}
ExprKind::Use { source } => this.expr_into_dest(destination, block, &this.thir[source]),
ExprKind::Borrow { arg, borrow_kind } => {
let arg = &this.thir[arg];
// We don't do this in `as_rvalue` because we use `as_place`
// for borrow expressions, so we cannot create an `RValue` that
// remains valid across user code. `as_rvalue` is usually called
// by this method anyway, so this shouldn't cause too many
// unnecessary temporaries.
let arg_place = match borrow_kind {
BorrowKind::Shared => unpack!(block = this.as_read_only_place(block, arg)),
_ => unpack!(block = this.as_place(block, arg)),
};
let borrow = Rvalue::Ref(this.tcx.lifetimes.re_erased, borrow_kind, arg_place);
this.cfg.push_assign(block, source_info, destination, borrow);
block.unit()
}
ExprKind::AddressOf { mutability, arg } => {
let arg = &this.thir[arg];
let place = match mutability {
hir::Mutability::Not => this.as_read_only_place(block, arg),
hir::Mutability::Mut => this.as_place(block, arg),
};
let address_of = Rvalue::AddressOf(mutability, unpack!(block = place));
this.cfg.push_assign(block, source_info, destination, address_of);
block.unit()
}
ExprKind::Adt { adt_def, variant_index, substs, user_ty, ref fields, ref base } => {
// See the notes for `ExprKind::Array` in `as_rvalue` and for
// `ExprKind::Borrow` above.
let is_union = adt_def.is_union();
let active_field_index = if is_union { Some(fields[0].name.index()) } else { None };
let scope = this.local_scope();
// first process the set of fields that were provided
// (evaluating them in order given by user)
let fields_map: FxHashMap<_, _> = fields
.into_iter()
.map(|f| {
(
f.name,
unpack!(
block = this.as_operand(block, Some(scope), &this.thir[f.expr])
),
)
})
.collect();
let field_names: Vec<_> =
(0..adt_def.variants[variant_index].fields.len()).map(Field::new).collect();
let fields: Vec<_> = if let Some(FruInfo { base, field_types }) = base {
let place_builder =
unpack!(block = this.as_place_builder(block, &this.thir[*base]));
// MIR does not natively support FRU, so for each
// base-supplied field, generate an operand that
// reads it from the base.
iter::zip(field_names, &**field_types)
.map(|(n, ty)| match fields_map.get(&n) {
Some(v) => v.clone(),
None => {
let place_builder = place_builder.clone();
this.consume_by_copy_or_move(
place_builder
.field(n, ty)
.into_place(this.tcx, this.typeck_results),
)
}
})
.collect()
} else {
field_names.iter().filter_map(|n| fields_map.get(n).cloned()).collect()
};
let inferred_ty = expr.ty;
let user_ty = user_ty.map(|ty| {
this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation {
span: source_info.span,
user_ty: ty,
inferred_ty,
})
});
let adt = box AggregateKind::Adt(
adt_def,
variant_index,
substs,
user_ty,
active_field_index,
);
this.cfg.push_assign(
block,
source_info,
destination,
Rvalue::Aggregate(adt, fields),
);
block.unit()
}
ExprKind::InlineAsm { template, ref operands, options, line_spans } => {
use crate::thir;
use rustc_middle::mir;
let operands = operands
.into_iter()
.map(|op| match *op {
thir::InlineAsmOperand::In { reg, expr } => mir::InlineAsmOperand::In {
reg,
value: unpack!(block = this.as_local_operand(block, &this.thir[expr])),
},
thir::InlineAsmOperand::Out { reg, late, expr } => {
mir::InlineAsmOperand::Out {
reg,
late,
place: expr.map(|expr| {
unpack!(block = this.as_place(block, &this.thir[expr]))
}),
}
}
thir::InlineAsmOperand::InOut { reg, late, expr } => {
let place = unpack!(block = this.as_place(block, &this.thir[expr]));
mir::InlineAsmOperand::InOut {
reg,
late,
// This works because asm operands must be Copy
in_value: Operand::Copy(place),
out_place: Some(place),
}
}
thir::InlineAsmOperand::SplitInOut { reg, late, in_expr, out_expr } => {
mir::InlineAsmOperand::InOut {
reg,
late,
in_value: unpack!(
block = this.as_local_operand(block, &this.thir[in_expr])
),
out_place: out_expr.map(|out_expr| {
unpack!(block = this.as_place(block, &this.thir[out_expr]))
}),
}
}
thir::InlineAsmOperand::Const { value, span } => {
mir::InlineAsmOperand::Const {
value: box Constant { span, user_ty: None, literal: value.into() },
}
}
thir::InlineAsmOperand::SymFn { expr } => mir::InlineAsmOperand::SymFn {
value: box this.as_constant(&this.thir[expr]),
},
thir::InlineAsmOperand::SymStatic { def_id } => {
mir::InlineAsmOperand::SymStatic { def_id }
}
})
.collect();
let destination = this.cfg.start_new_block();
this.cfg.terminate(
block,
source_info,
TerminatorKind::InlineAsm {
template,
operands,
options,
line_spans,
destination: if options.contains(InlineAsmOptions::NORETURN) {
None
} else {
Some(destination)
},
},
);
destination.unit()
}
// These cases don't actually need a destination
ExprKind::Assign { .. }
| ExprKind::AssignOp { .. }
| ExprKind::LlvmInlineAsm { .. } => {
unpack!(block = this.stmt_expr(block, expr, None));
this.cfg.push_assign_unit(block, source_info, destination, this.tcx);
block.unit()
}
ExprKind::Continue { .. } | ExprKind::Break { .. } | ExprKind::Return { .. } => {
unpack!(block = this.stmt_expr(block, expr, None));
// No assign, as these have type `!`.
block.unit()
}
// Avoid creating a temporary
ExprKind::VarRef { .. }
| ExprKind::UpvarRef { .. }
| ExprKind::PlaceTypeAscription { .. }
| ExprKind::ValueTypeAscription { .. } => {
debug_assert!(Category::of(&expr.kind) == Some(Category::Place));
let place = unpack!(block = this.as_place(block, expr));
let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place));
this.cfg.push_assign(block, source_info, destination, rvalue);
block.unit()
}
ExprKind::Index { .. } | ExprKind::Deref { .. } | ExprKind::Field { .. } => {
debug_assert_eq!(Category::of(&expr.kind), Some(Category::Place));
// Create a "fake" temporary variable so that we check that the
// value is Sized. Usually, this is caught in type checking, but
// in the case of box expr there is no such check.
if !destination.projection.is_empty() {
this.local_decls.push(LocalDecl::new(expr.ty, expr.span));
}
let place = unpack!(block = this.as_place(block, expr));
let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place));
this.cfg.push_assign(block, source_info, destination, rvalue);
block.unit()
}
ExprKind::Yield { value } => {
let scope = this.local_scope();
let value = unpack!(block = this.as_operand(block, Some(scope), &this.thir[value]));
let resume = this.cfg.start_new_block();
this.cfg.terminate(
block,
source_info,
TerminatorKind::Yield { value, resume, resume_arg: destination, drop: None },
);
this.generator_drop_cleanup(block);
resume.unit()
}
// these are the cases that are more naturally handled by some other mode
ExprKind::Unary { .. }
| ExprKind::Binary { .. }
| ExprKind::Box { .. }
| ExprKind::Cast { .. }
| ExprKind::Pointer { .. }
| ExprKind::Repeat { .. }
| ExprKind::Array { .. }
| ExprKind::Tuple { .. }
| ExprKind::Closure { .. }
| ExprKind::ConstBlock { .. }
| ExprKind::Literal { .. }
| ExprKind::ThreadLocalRef(_)
| ExprKind::StaticRef { .. } => {
debug_assert!(match Category::of(&expr.kind).unwrap() {
// should be handled above
Category::Rvalue(RvalueFunc::Into) => false,
// must be handled above or else we get an
// infinite loop in the builder; see
// e.g., `ExprKind::VarRef` above
Category::Place => false,
_ => true,
});
let rvalue = unpack!(block = this.as_local_rvalue(block, expr));
this.cfg.push_assign(block, source_info, destination, rvalue);
block.unit()
}
};
if !expr_is_block_or_scope {
let popped = this.block_context.pop();
assert!(popped.is_some());
}
block_and
}
}
| 44.864706 | 100 | 0.450068 |
e98bda89193e952588eb491675c44527f3b3d6da | 2,775 | // Copyright 2018 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate exonum;
extern crate exonum_testkit;
#[macro_use]
extern crate pretty_assertions;
use exonum::{
api::node::{
private::NodeInfo,
public::system::{ConnectivityStatus, ConsensusStatus, HealthCheckInfo},
},
helpers::user_agent,
messages::PROTOCOL_MAJOR_VERSION,
};
use exonum_testkit::{ApiKind, TestKitBuilder};
#[test]
fn healthcheck() {
// This test checks whether the endpoint returns expected result and correctness of
// serialize.
// Expected:
// consensus - enabled
// connectivity - not connected, due to testkit unable to emulate nodes properly.
let testkit = TestKitBuilder::validator().with_validators(2).create();
let api = testkit.api();
let info: HealthCheckInfo = api.public(ApiKind::System).get("v1/healthcheck").unwrap();
let expected = HealthCheckInfo {
consensus_status: ConsensusStatus::Enabled,
connectivity: ConnectivityStatus::NotConnected,
};
assert_eq!(info, expected);
}
#[test]
fn user_agent_info() {
let testkit = TestKitBuilder::validator().with_validators(2).create();
let api = testkit.api();
let info: String = api.public(ApiKind::System).get("v1/user_agent").unwrap();
let expected = user_agent::get();
assert_eq!(info, expected);
}
#[test]
fn network() {
let testkit = TestKitBuilder::validator().with_validators(2).create();
let api = testkit.api();
let info: NodeInfo = api.private(ApiKind::System).get("v1/network").unwrap();
assert!(info.core_version.is_some());
assert_eq!(info.protocol_version, PROTOCOL_MAJOR_VERSION);
assert!(info.services.is_empty());
}
#[test]
fn shutdown() {
let testkit = TestKitBuilder::validator().with_validators(2).create();
let api = testkit.api();
assert_eq!(
api.private(ApiKind::System)
.post::<()>("v1/shutdown")
.unwrap(),
()
);
}
#[test]
fn rebroadcast() {
let testkit = TestKitBuilder::validator().with_validators(2).create();
let api = testkit.api();
assert_eq!(
api.private(ApiKind::System)
.post::<()>("v1/rebroadcast")
.unwrap(),
()
)
}
| 29.521277 | 91 | 0.671351 |
1489daa43aaae69c529e162f6c50f5e8f6b51010 | 3,162 | use super::stream;
use crate::{loose, zlib};
use git_object as object;
use miniz_oxide::inflate::decompress_to_vec_zlib;
use object::borrowed;
use quick_error::quick_error;
use smallvec::SmallVec;
use std::{io::Read, path::PathBuf};
quick_error! {
#[derive(Debug)]
pub enum Error {
Decompress(err: zlib::Error) {
display("decompression of object data failed")
from()
source(err)
}
Parse(err: borrowed::Error) {
display("Could not parse object object")
from()
source(err)
}
Io(err: std::io::Error, action: &'static str, path: PathBuf) {
display("Could not {} data at '{}'", action, path.display())
source(err)
}
}
}
impl loose::Object {
/// **Note**: Blobs are loaded into memory and are made available that way.
/// Consider using `stream()` if large Blobs are expected.
pub fn decode(&mut self) -> Result<borrowed::Object, Error> {
self.decompress_all()?;
let bytes = &self.decompressed_data[self.header_size..];
Ok(borrowed::Object::from_bytes(self.kind, bytes)?)
}
pub fn stream(&mut self) -> Result<stream::Reader, Error> {
match &self.path {
Some(path) => Ok(stream::Reader::from_read(
self.header_size,
std::fs::File::open(path).map_err(|e| Error::Io(e, "open", path.to_owned()))?,
)),
None => {
self.decompress_all()?;
Ok(stream::Reader::from_data(
self.header_size,
&self.decompressed_data.as_slice(),
))
}
}
}
pub fn decompress_all(&mut self) -> Result<(), Error> {
if self.decompression_complete {
debug_assert!(
self.size + self.header_size == self.decompressed_data.len(),
"when decompression is done, we have stored everything in memory"
);
return Ok(());
}
let total_size = self.header_size + self.size;
if let Some(path) = self.path.take() {
// NOTE: For now we just re-read everything from the beginning without seeking, as our buffer
// is small so the seek might be more expensive than just reading everything.
let mut file = std::fs::File::open(&path).map_err(|e| Error::Io(e, "open", path.clone()))?;
let file_size = file
.metadata()
.map_err(|e| Error::Io(e, "read metadata", path.clone()))?
.len() as usize;
let mut buf = Vec::with_capacity(file_size);
file.read_to_end(&mut buf).map_err(|e| Error::Io(e, "read", path))?;
self.compressed_data = SmallVec::from(buf);
}
self.decompressed_data = SmallVec::from(decompress_to_vec_zlib(&self.compressed_data[..]).unwrap());
self.compressed_data = Default::default();
self.decompressed_data.shrink_to_fit();
assert!(self.decompressed_data.len() == total_size);
self.decompression_complete = true;
Ok(())
}
}
| 37.642857 | 108 | 0.560405 |
89e7da3c9ff36ce75513edc63373c9aeeeb0ebe6 | 220 | //!
//! Effects applied to each pixel, for example fog or anti-aliasing.
//!
mod image_effect;
#[doc(inline)]
pub use image_effect::*;
mod fog;
#[doc(inline)]
pub use fog::*;
mod fxaa;
#[doc(inline)]
pub use fxaa::*;
| 13.75 | 68 | 0.654545 |
cc1acf789ac727ae653ed3b86a9ec73e139761a2 | 9,645 | #[doc = "Register `INTPEND` reader"]
pub struct R(crate::R<INTPEND_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<INTPEND_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<INTPEND_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<INTPEND_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Read pending status of interrupt for event HFCLKSTARTED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum HFCLKSTARTED_A {
#[doc = "0: Read: Not pending"]
NOTPENDING = 0,
#[doc = "1: Read: Pending"]
PENDING = 1,
}
impl From<HFCLKSTARTED_A> for bool {
#[inline(always)]
fn from(variant: HFCLKSTARTED_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `HFCLKSTARTED` reader - Read pending status of interrupt for event HFCLKSTARTED"]
pub struct HFCLKSTARTED_R(crate::FieldReader<bool, HFCLKSTARTED_A>);
impl HFCLKSTARTED_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
HFCLKSTARTED_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> HFCLKSTARTED_A {
match self.bits {
false => HFCLKSTARTED_A::NOTPENDING,
true => HFCLKSTARTED_A::PENDING,
}
}
#[doc = "Checks if the value of the field is `NOTPENDING`"]
#[inline(always)]
pub fn is_not_pending(&self) -> bool {
**self == HFCLKSTARTED_A::NOTPENDING
}
#[doc = "Checks if the value of the field is `PENDING`"]
#[inline(always)]
pub fn is_pending(&self) -> bool {
**self == HFCLKSTARTED_A::PENDING
}
}
impl core::ops::Deref for HFCLKSTARTED_R {
type Target = crate::FieldReader<bool, HFCLKSTARTED_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Read pending status of interrupt for event LFCLKSTARTED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LFCLKSTARTED_A {
#[doc = "0: Read: Not pending"]
NOTPENDING = 0,
#[doc = "1: Read: Pending"]
PENDING = 1,
}
impl From<LFCLKSTARTED_A> for bool {
#[inline(always)]
fn from(variant: LFCLKSTARTED_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `LFCLKSTARTED` reader - Read pending status of interrupt for event LFCLKSTARTED"]
pub struct LFCLKSTARTED_R(crate::FieldReader<bool, LFCLKSTARTED_A>);
impl LFCLKSTARTED_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
LFCLKSTARTED_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LFCLKSTARTED_A {
match self.bits {
false => LFCLKSTARTED_A::NOTPENDING,
true => LFCLKSTARTED_A::PENDING,
}
}
#[doc = "Checks if the value of the field is `NOTPENDING`"]
#[inline(always)]
pub fn is_not_pending(&self) -> bool {
**self == LFCLKSTARTED_A::NOTPENDING
}
#[doc = "Checks if the value of the field is `PENDING`"]
#[inline(always)]
pub fn is_pending(&self) -> bool {
**self == LFCLKSTARTED_A::PENDING
}
}
impl core::ops::Deref for LFCLKSTARTED_R {
type Target = crate::FieldReader<bool, LFCLKSTARTED_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Read pending status of interrupt for event DONE\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DONE_A {
#[doc = "0: Read: Not pending"]
NOTPENDING = 0,
#[doc = "1: Read: Pending"]
PENDING = 1,
}
impl From<DONE_A> for bool {
#[inline(always)]
fn from(variant: DONE_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `DONE` reader - Read pending status of interrupt for event DONE"]
pub struct DONE_R(crate::FieldReader<bool, DONE_A>);
impl DONE_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
DONE_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DONE_A {
match self.bits {
false => DONE_A::NOTPENDING,
true => DONE_A::PENDING,
}
}
#[doc = "Checks if the value of the field is `NOTPENDING`"]
#[inline(always)]
pub fn is_not_pending(&self) -> bool {
**self == DONE_A::NOTPENDING
}
#[doc = "Checks if the value of the field is `PENDING`"]
#[inline(always)]
pub fn is_pending(&self) -> bool {
**self == DONE_A::PENDING
}
}
impl core::ops::Deref for DONE_R {
type Target = crate::FieldReader<bool, DONE_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Read pending status of interrupt for event HFCLKAUDIOSTARTED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum HFCLKAUDIOSTARTED_A {
#[doc = "0: Read: Not pending"]
NOTPENDING = 0,
#[doc = "1: Read: Pending"]
PENDING = 1,
}
impl From<HFCLKAUDIOSTARTED_A> for bool {
#[inline(always)]
fn from(variant: HFCLKAUDIOSTARTED_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `HFCLKAUDIOSTARTED` reader - Read pending status of interrupt for event HFCLKAUDIOSTARTED"]
pub struct HFCLKAUDIOSTARTED_R(crate::FieldReader<bool, HFCLKAUDIOSTARTED_A>);
impl HFCLKAUDIOSTARTED_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
HFCLKAUDIOSTARTED_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> HFCLKAUDIOSTARTED_A {
match self.bits {
false => HFCLKAUDIOSTARTED_A::NOTPENDING,
true => HFCLKAUDIOSTARTED_A::PENDING,
}
}
#[doc = "Checks if the value of the field is `NOTPENDING`"]
#[inline(always)]
pub fn is_not_pending(&self) -> bool {
**self == HFCLKAUDIOSTARTED_A::NOTPENDING
}
#[doc = "Checks if the value of the field is `PENDING`"]
#[inline(always)]
pub fn is_pending(&self) -> bool {
**self == HFCLKAUDIOSTARTED_A::PENDING
}
}
impl core::ops::Deref for HFCLKAUDIOSTARTED_R {
type Target = crate::FieldReader<bool, HFCLKAUDIOSTARTED_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Read pending status of interrupt for event HFCLK192MSTARTED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum HFCLK192MSTARTED_A {
#[doc = "0: Read: Not pending"]
NOTPENDING = 0,
#[doc = "1: Read: Pending"]
PENDING = 1,
}
impl From<HFCLK192MSTARTED_A> for bool {
#[inline(always)]
fn from(variant: HFCLK192MSTARTED_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `HFCLK192MSTARTED` reader - Read pending status of interrupt for event HFCLK192MSTARTED"]
pub struct HFCLK192MSTARTED_R(crate::FieldReader<bool, HFCLK192MSTARTED_A>);
impl HFCLK192MSTARTED_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
HFCLK192MSTARTED_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> HFCLK192MSTARTED_A {
match self.bits {
false => HFCLK192MSTARTED_A::NOTPENDING,
true => HFCLK192MSTARTED_A::PENDING,
}
}
#[doc = "Checks if the value of the field is `NOTPENDING`"]
#[inline(always)]
pub fn is_not_pending(&self) -> bool {
**self == HFCLK192MSTARTED_A::NOTPENDING
}
#[doc = "Checks if the value of the field is `PENDING`"]
#[inline(always)]
pub fn is_pending(&self) -> bool {
**self == HFCLK192MSTARTED_A::PENDING
}
}
impl core::ops::Deref for HFCLK192MSTARTED_R {
type Target = crate::FieldReader<bool, HFCLK192MSTARTED_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl R {
#[doc = "Bit 0 - Read pending status of interrupt for event HFCLKSTARTED"]
#[inline(always)]
pub fn hfclkstarted(&self) -> HFCLKSTARTED_R {
HFCLKSTARTED_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Read pending status of interrupt for event LFCLKSTARTED"]
#[inline(always)]
pub fn lfclkstarted(&self) -> LFCLKSTARTED_R {
LFCLKSTARTED_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 7 - Read pending status of interrupt for event DONE"]
#[inline(always)]
pub fn done(&self) -> DONE_R {
DONE_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - Read pending status of interrupt for event HFCLKAUDIOSTARTED"]
#[inline(always)]
pub fn hfclkaudiostarted(&self) -> HFCLKAUDIOSTARTED_R {
HFCLKAUDIOSTARTED_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - Read pending status of interrupt for event HFCLK192MSTARTED"]
#[inline(always)]
pub fn hfclk192mstarted(&self) -> HFCLK192MSTARTED_R {
HFCLK192MSTARTED_R::new(((self.bits >> 9) & 0x01) != 0)
}
}
#[doc = "Pending interrupts\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [intpend](index.html) module"]
pub struct INTPEND_SPEC;
impl crate::RegisterSpec for INTPEND_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [intpend::R](R) reader structure"]
impl crate::Readable for INTPEND_SPEC {
type Reader = R;
}
#[doc = "`reset()` method sets INTPEND to value 0"]
impl crate::Resettable for INTPEND_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 32.806122 | 228 | 0.622602 |
0aa2d7ccfb9fecdce5ed17c89946865d99c5ff35 | 260 | use actix_identity::Identity;
use actix_web::Responder;
pub async fn root(id: Identity) -> impl Responder {
match id.identity() {
Some(id) => crate::list_events::render_events(id, None).await,
None => crate::signin::render_page(),
}
}
| 26 | 70 | 0.653846 |
61ac4e3cbe4164818923778c170cae230a1921f3 | 1,709 | //! Power control
use crate::{
rcc::{Enable, Rcc},
stm32::PWR,
};
pub enum LowPowerMode {
StopMode1 = 0b000,
StopMode2 = 0b001,
Standby = 0b011,
Shutdown = 0b111,
}
pub enum PowerMode {
Run,
LowPower(LowPowerMode),
UltraLowPower(LowPowerMode),
}
pub struct Power {
rb: PWR,
}
impl Power {
pub fn new(pwr: PWR, rcc: &mut Rcc) -> Self {
PWR::enable(rcc);
Self { rb: pwr }
}
pub fn set_mode(&mut self, mode: PowerMode) {
match mode {
PowerMode::Run => {
self.rb.cr1.modify(|_, w| w.lpr().clear_bit());
while !self.rb.sr2.read().reglpf().bit_is_clear() {}
}
PowerMode::LowPower(sm) => {
self.rb.cr3.modify(|_, w| w.ulpen().clear_bit());
self.rb
.cr1
.modify(|_, w| unsafe { w.lpr().set_bit().lpms().bits(sm as u8) });
while !self.rb.sr2.read().reglps().bit_is_set()
|| !self.rb.sr2.read().reglpf().bit_is_set()
{}
}
PowerMode::UltraLowPower(sm) => {
self.rb.cr3.modify(|_, w| w.ulpen().set_bit());
self.rb
.cr1
.modify(|_, w| unsafe { w.lpr().set_bit().lpms().bits(sm as u8) });
while !self.rb.sr2.read().reglps().bit_is_set()
|| !self.rb.sr2.read().reglpf().bit_is_set()
{}
}
}
}
}
pub trait PowerExt {
fn constrain(self, rcc: &mut Rcc) -> Power;
}
impl PowerExt for PWR {
fn constrain(self, rcc: &mut Rcc) -> Power {
Power::new(self, rcc)
}
}
| 25.132353 | 87 | 0.471621 |
8a3633870cfc2f69b48870cf7b8f3da961edcb31 | 4,426 | use log4rs::{
append::{
console,
rolling_file::{self, policy},
},
config,
encode::pattern::PatternEncoder,
};
use crate::{cmd::Config, error::Result};
const MB_SIZE: u64 = 1024 * 1024; // 1 MB
#[macro_export]
macro_rules! error {
(target: $target:expr, $($arg:tt)+) => (
log::error!(target: $target, "{}", format!($($arg)*));
);
($($arg:tt)*) => (
log::error!(target: "relay", "{}", format!($($arg)*));
)
}
#[macro_export]
macro_rules! warn {
(target: $target:expr, $($arg:tt)+) => (
log::warn!(target: $target, "{}", format!($($arg)*));
);
($($arg:tt)*) => (
log::warn!(target: "relay", "{}", format!($($arg)*));
)
}
#[macro_export]
macro_rules! info {
(target: $target:expr, $($arg:tt)+) => (
log::info!(target: $target, "{}", format!($($arg)*));
);
($($arg:tt)*) => (
log::info!(target: "relay", "{}", format!($($arg)*));
)
}
#[macro_export]
macro_rules! debug {
(target: $target:expr, $($arg:tt)+) => (
log::debug!(target: $target, "{}", format!($($arg)*));
);
($($arg:tt)*) => (
log::debug!(target: "relay", "{}", format!($($arg)*));
)
}
#[macro_export]
macro_rules! trace {
(target: $target:expr, $($arg:tt)+) => (
log::trace!(target: $target, "{}", format!($($arg)*));
);
($($arg:tt)*) => (
log::trace!(target: "relay", "{}", format!($($arg)*));
)
}
pub fn init(conf: &Config) -> Result<()> {
init_log4rs_with_config(&conf)?;
version_info();
info!(
"Log: [level: {}, path: {:?}, roll size: {} MB, roll count: {}]",
conf.log_level, conf.log_path, conf.log_roll_size, conf.log_roll_count
);
// info!(
// "Bitcoin: [{}://{}:{}]",
// conf.btc_url.scheme(),
// conf.btc_url.host_str().unwrap(),
// conf.btc_url.port().unwrap()
// );
// info!("Realyer: [{}]", conf.btc_url);
info!(
"Bitcoin Block Waiting Interval: [{} seconds]",
conf.btc_block_interval
);
info!("Realyer: [{}]", conf.patra_url);
info!("Realyer Push Header Only: [{}]", conf.only_header);
Ok(())
}
fn init_log4rs_with_config(conf: &Config) -> Result<()> {
let pattern = "{d(%Y-%m-%d %H:%M:%S)} {h({l})} - {m}\n";
let console = console::ConsoleAppender::builder()
.encoder(Box::new(PatternEncoder::new(pattern)))
.build();
let trigger = policy::compound::trigger::size::SizeTrigger::new(conf.log_roll_size * MB_SIZE);
let roll_pattern = format!("{}.{{}}.gz", conf.log_path.display());
let roll = policy::compound::roll::fixed_window::FixedWindowRoller::builder()
.build(roll_pattern.as_str(), conf.log_roll_count)
.expect("Building fixed window roller shouldn't be fail");
let policy = policy::compound::CompoundPolicy::new(Box::new(trigger), Box::new(roll));
let roll_file = rolling_file::RollingFileAppender::builder()
.encoder(Box::new(PatternEncoder::new(pattern)))
.build(&conf.log_path, Box::new(policy))?;
let log_config_builder = config::Config::builder()
.appender(config::Appender::builder().build("console", Box::new(console)))
.appender(config::Appender::builder().build("roll", Box::new(roll_file)))
.logger(
config::Logger::builder()
.appender("console")
.appender("roll")
.build("relay", conf.log_level),
);
let root = config::Root::builder().build(conf.log_level);
let log_config = log_config_builder
.build(root)
.expect("Building log config shouldn't be fail");
log4rs::init_config(log_config).expect("Initializing log config shouldn't be fail");
Ok(())
}
fn version_info() {
info!("================================================================================");
info!(
"Release Version: {}",
option_env!("CARGO_PKG_VERSION").unwrap_or("Unknown")
);
info!(
"Git Commit Hash: {}",
option_env!("BUILD_GIT_HASH").unwrap_or("Unknown")
);
info!(
"Git Commit Branch: {}",
option_env!("BUILD_GIT_BRANCH").unwrap_or("Unknown")
);
info!(
"Rust Version: {}",
option_env!("BUILD_RUSTC_VERSION").unwrap_or("Unknown")
);
info!("================================================================================");
}
| 30.951049 | 98 | 0.523498 |
3a7f47a2aaca915f66977ed206701d5c087b9361 | 7,998 | use super::{InterpCx, Machine, MemoryKind, FnVal};
use rustc::ty::{self, Ty, Instance, TypeFoldable};
use rustc::ty::layout::{Size, Align, LayoutOf, HasDataLayout};
use rustc::mir::interpret::{Scalar, Pointer, InterpResult, PointerArithmetic,};
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Creates a dynamic vtable for the given type and vtable origin. This is used only for
/// objects.
///
/// The `trait_ref` encodes the erased self type. Hence, if we are
/// making an object `Foo<Trait>` from a value of type `Foo<T>`, then
/// `trait_ref` would map `T: Trait`.
pub fn get_vtable(
&mut self,
ty: Ty<'tcx>,
poly_trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
trace!("get_vtable(trait_ref={:?})", poly_trait_ref);
let (ty, poly_trait_ref) = self.tcx.erase_regions(&(ty, poly_trait_ref));
// All vtables must be monomorphic, bail out otherwise.
if ty.needs_subst() || poly_trait_ref.needs_subst() {
throw_inval!(TooGeneric);
}
if let Some(&vtable) = self.vtables.get(&(ty, poly_trait_ref)) {
// This means we guarantee that there are no duplicate vtables, we will
// always use the same vtable for the same (Type, Trait) combination.
// That's not what happens in rustc, but emulating per-crate deduplication
// does not sound like it actually makes anything any better.
return Ok(vtable);
}
let methods = if let Some(poly_trait_ref) = poly_trait_ref {
let trait_ref = poly_trait_ref.with_self_ty(*self.tcx, ty);
let trait_ref = self.tcx.erase_regions(&trait_ref);
self.tcx.vtable_methods(trait_ref)
} else {
&[]
};
let layout = self.layout_of(ty)?;
assert!(!layout.is_unsized(), "can't create a vtable for an unsized type");
let size = layout.size.bytes();
let align = layout.align.abi.bytes();
let ptr_size = self.pointer_size();
let ptr_align = self.tcx.data_layout.pointer_align.abi;
// /////////////////////////////////////////////////////////////////////////////////////////
// If you touch this code, be sure to also make the corresponding changes to
// `get_vtable` in `rust_codegen_llvm/meth.rs`.
// /////////////////////////////////////////////////////////////////////////////////////////
let vtable = self.memory.allocate(
ptr_size * (3 + methods.len() as u64),
ptr_align,
MemoryKind::Vtable,
);
let tcx = &*self.tcx;
let drop = Instance::resolve_drop_in_place(*tcx, ty);
let drop = self.memory.create_fn_alloc(FnVal::Instance(drop));
// No need to do any alignment checks on the memory accesses below, because we know the
// allocation is correctly aligned as we created it above. Also we're only offsetting by
// multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`.
let vtable_alloc = self.memory.get_raw_mut(vtable.alloc_id)?;
vtable_alloc.write_ptr_sized(tcx, vtable, Scalar::Ptr(drop).into())?;
let size_ptr = vtable.offset(ptr_size, tcx)?;
vtable_alloc.write_ptr_sized(tcx, size_ptr, Scalar::from_uint(size, ptr_size).into())?;
let align_ptr = vtable.offset(ptr_size * 2, tcx)?;
vtable_alloc.write_ptr_sized(tcx, align_ptr, Scalar::from_uint(align, ptr_size).into())?;
for (i, method) in methods.iter().enumerate() {
if let Some((def_id, substs)) = *method {
// resolve for vtable: insert shims where needed
let instance = ty::Instance::resolve_for_vtable(
*tcx,
self.param_env,
def_id,
substs,
).ok_or_else(|| err_inval!(TooGeneric))?;
let fn_ptr = self.memory.create_fn_alloc(FnVal::Instance(instance));
// We cannot use `vtable_allic` as we are creating fn ptrs in this loop.
let method_ptr = vtable.offset(ptr_size * (3 + i as u64), tcx)?;
self.memory.get_raw_mut(vtable.alloc_id)?
.write_ptr_sized(tcx, method_ptr, Scalar::Ptr(fn_ptr).into())?;
}
}
self.memory.mark_immutable(vtable.alloc_id)?;
assert!(self.vtables.insert((ty, poly_trait_ref), vtable).is_none());
Ok(vtable)
}
/// Resolves the function at the specified slot in the provided
/// vtable. An index of '0' corresponds to the first method
/// declared in the trait of the provided vtable.
pub fn get_vtable_slot(
&self,
vtable: Scalar<M::PointerTag>,
idx: usize
) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
let ptr_size = self.pointer_size();
// Skip over the 'drop_ptr', 'size', and 'align' fields.
let vtable_slot = vtable.ptr_offset(ptr_size * (idx as u64 + 3), self)?;
let vtable_slot = self.memory.check_ptr_access(
vtable_slot,
ptr_size,
self.tcx.data_layout.pointer_align.abi,
)?.expect("cannot be a ZST");
let fn_ptr = self.memory.get_raw(vtable_slot.alloc_id)?
.read_ptr_sized(self, vtable_slot)?.not_undef()?;
Ok(self.memory.get_fn(fn_ptr)?)
}
/// Returns the drop fn instance as well as the actual dynamic type.
pub fn read_drop_type_from_vtable(
&self,
vtable: Scalar<M::PointerTag>,
) -> InterpResult<'tcx, (ty::Instance<'tcx>, Ty<'tcx>)> {
// We don't care about the pointee type; we just want a pointer.
let vtable = self.memory.check_ptr_access(
vtable,
self.tcx.data_layout.pointer_size,
self.tcx.data_layout.pointer_align.abi,
)?.expect("cannot be a ZST");
let drop_fn = self.memory
.get_raw(vtable.alloc_id)?
.read_ptr_sized(self, vtable)?
.not_undef()?;
// We *need* an instance here, no other kind of function value, to be able
// to determine the type.
let drop_instance = self.memory.get_fn(drop_fn)?.as_instance()?;
trace!("Found drop fn: {:?}", drop_instance);
let fn_sig = drop_instance.ty(*self.tcx).fn_sig(*self.tcx);
let fn_sig = self.tcx.normalize_erasing_late_bound_regions(self.param_env, &fn_sig);
// The drop function takes `*mut T` where `T` is the type being dropped, so get that.
let ty = fn_sig.inputs()[0].builtin_deref(true).unwrap().ty;
Ok((drop_instance, ty))
}
pub fn read_size_and_align_from_vtable(
&self,
vtable: Scalar<M::PointerTag>,
) -> InterpResult<'tcx, (Size, Align)> {
let pointer_size = self.pointer_size();
// We check for `size = 3 * ptr_size`, which covers the drop fn (unused here),
// the size, and the align (which we read below).
let vtable = self.memory.check_ptr_access(
vtable,
3*pointer_size,
self.tcx.data_layout.pointer_align.abi,
)?.expect("cannot be a ZST");
let alloc = self.memory.get_raw(vtable.alloc_id)?;
let size = alloc.read_ptr_sized(
self,
vtable.offset(pointer_size, self)?
)?.not_undef()?;
let size = self.force_bits(size, pointer_size)? as u64;
let align = alloc.read_ptr_sized(
self,
vtable.offset(pointer_size * 2, self)?,
)?.not_undef()?;
let align = self.force_bits(align, pointer_size)? as u64;
if size >= self.tcx.data_layout().obj_size_bound() {
throw_ub_format!("invalid vtable: \
size is bigger than largest supported object");
}
Ok((Size::from_bytes(size), Align::from_bytes(align).unwrap()))
}
}
| 44.932584 | 100 | 0.590398 |
e5b0f5eb319b04b401ccb0c7f98005e8e78c2cac | 2,104 | use crate::bom::Manufacturer;
use crate::capacitors::{
make_mlcc, map_pf_to_label, map_three_digit_cap_to_pf, CapacitorTolerance, DielectricCode,
};
use crate::circuit::{CircuitNode};
use crate::smd::SizeCode;
fn map_part_number_to_size(part: &str) -> SizeCode {
(&part[0..=3]).parse().unwrap()
}
fn map_part_number_to_voltage(part: &str) -> f64 {
match &part[4..5] {
"4" => 4.0,
"6" => 6.3,
"Z" => 10.0,
"Y" => 16.0,
"3" => 25.0,
"5" => 50.0,
"1" => 100.0,
"2" => 200.0,
"7" => 500.0,
_ => panic!("No working voltage for {}", part),
}
}
fn map_part_number_to_dielectric(part: &str) -> DielectricCode {
match &part[5..6] {
"C" => DielectricCode::X7R,
_ => panic!("Unknown dielectric code for AVX {}", part),
}
}
fn map_part_number_to_pf(part: &str) -> f64 {
map_three_digit_cap_to_pf(&part[6..9])
}
fn map_part_number_to_tolerance(part: &str) -> CapacitorTolerance {
match &part[9..10] {
"J" => CapacitorTolerance::FivePercent,
"K" => CapacitorTolerance::TenPercent,
"M" => CapacitorTolerance::TwentyPercent,
_ => panic!("Unknon capacitor tolerance indicator {}", part),
}
}
pub fn make_avx_capacitor(part_number: &str) -> CircuitNode {
let size = map_part_number_to_size(part_number);
let tolerance = map_part_number_to_tolerance(part_number);
let value_pf = map_part_number_to_pf(part_number);
let value = map_pf_to_label(value_pf);
let dielectric = map_part_number_to_dielectric(part_number);
let voltage = map_part_number_to_voltage(part_number);
let label = format!("{} {} {}V {}", value, tolerance, voltage, dielectric);
let manufacturer = Manufacturer {
name: "AVX".to_string(),
part_number: part_number.to_owned(),
};
let description = format!("AVX X7R Series MLCC Capacitor SMD {} {}", size, label);
make_mlcc(
label,
manufacturer,
description,
size,
value_pf,
dielectric,
voltage,
tolerance,
)
}
| 29.633803 | 94 | 0.611692 |
39db543c6a57588ebfb04b59cac743cdabb37799 | 3,312 | extern crate core;
use core::exec::call::Call;
use core::banana::observer::ICallObserver;
use core::state::state::StateInfo;
pub trait IBananaFeedback {
fn add_node(&mut self, node: &[u8]);
}
use std::{
// rc::Rc,
sync::{
// RwLock,
RwLockWriteGuard,
},
};
#[allow(improper_ctypes)]
extern "C" {
fn banana_feedback<'a>() -> RwLockWriteGuard<'a, Vec<Vec<u8>>>;
}
struct Bijon {
// feedback: Rc<RwLock<dyn IBananaFeedback>>,
}
static mut HITC: u32 = 0;
impl Bijon {
fn new(
// feedback: Rc<RwLock<dyn IBananaFeedback>>
) -> Self {
unsafe { HITC = 0 }
unsafe { banana_feedback().push(vec![0x42]) }
unsafe { banana_feedback().push(vec![0x42]) }
//ensure to have at least one input in corpus
Self { } // Bijon { feedback }
}
fn feedback(&self, state: &StateInfo, call: &mut Call) -> Vec<u8> {
let mut node = vec![];
if 0x11u64 != call.id().into()
&& 0x100u64 == state.id.into()
{ return node }// for mario only moves
node.extend(u64::from(state.id).to_le_bytes().to_vec());
if !call.ok() {
return node
} // those with sucess will be added additional feedback
if 0x100u64 == state.id.into() {
return node
} // ok for mario alone it is where we stop giving more feedback
node.extend(state.fd.data());//normaly you dont want to do this cuz runtime .. every trun diff
if 0x11u64 != call.id().into() {
return node
} // pos we will log only for sucessfull moves -> most feedback granted
let pos = call.args_view(1).data_const_unsafe::<u32>();
let pos = if 0x100u64 == state.id.into() {
pos / 25 // if mario we log only few big steps achieved
} else { pos / 5 };
node.extend(pos.to_le_bytes());
return node
}
#[allow(dead_code)]
fn pos_feedback(&self, state: &StateInfo, call: &mut Call) -> Vec<u8> {
// position feadback : opt - out, just testing for feedback
if 0x11u64 != call.id().into() {
return vec![]
} // pos we will log only for sucessfull moves -> most feedback granted
let pos = call.args_view(1).data_const_unsafe::<u32>();
if 0x100u64 == state.id.into() {
pos / 25
} else {
pos / 5
}.to_le_bytes().to_vec()
}
#[allow(dead_code)]
fn lop_feedback(&self) -> Vec<u8> {
// length of poc feadback : opt - out, just testing for feedback
unsafe{
HITC += 1;
HITC.to_le_bytes().to_vec()
}
}
}
impl ICallObserver for Bijon {
fn notify(&self, _: &StateInfo, _: &mut Call) -> bool {
true
}
fn aftermath(&self, state: &StateInfo, call: &mut Call) {
//println!("[B-IJON] pos_x, pos_y {:?}", call.args_view(1).data_const_unsafe::<(u32, u32)>());
let node = self.feedback(state, call);
// let node = self.pos_feedback(state, call);
// let node = self.lop_feedback();
if 0 != node.len() {
unsafe { banana_feedback().push(node) }
}
}
}
pub fn observer(
// feedback: Rc<RwLock<dyn IBananaFeedback>>
) -> Box<dyn ICallObserver>
{
Box::new(Bijon::new())//feedback))
}
| 26.285714 | 102 | 0.56099 |
de616986d0832fb31070a7da6176fbeb2b7f03d1 | 4,399 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::PMD2INV {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct CMP1MDR {
bits: bool,
}
impl CMP1MDR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct CMP2MDR {
bits: bool,
}
impl CMP2MDR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _CMP1MDW<'a> {
w: &'a mut W,
}
impl<'a> _CMP1MDW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CMP2MDW<'a> {
w: &'a mut W,
}
impl<'a> _CMP2MDW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0"]
#[inline]
pub fn cmp1md(&self) -> CMP1MDR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
CMP1MDR { bits }
}
#[doc = "Bit 1"]
#[inline]
pub fn cmp2md(&self) -> CMP2MDR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
};
CMP2MDR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0"]
#[inline]
pub fn cmp1md(&mut self) -> _CMP1MDW {
_CMP1MDW { w: self }
}
#[doc = "Bit 1"]
#[inline]
pub fn cmp2md(&mut self) -> _CMP2MDW {
_CMP2MDW { w: self }
}
}
| 24.038251 | 59 | 0.489202 |
abf33a703ffc112b334ddebef0e72c92a3ef5c6a | 9,664 | //! AWS KMS-based Signer
use ethers_core::{
k256::ecdsa::{Error as K256Error, Signature as KSig, VerifyingKey},
types::{
transaction::{eip2718::TypedTransaction, eip712::Eip712},
Address, Signature as EthSig, H256,
},
utils::hash_message,
};
use rusoto_core::RusotoError;
use rusoto_kms::{
GetPublicKeyError, GetPublicKeyRequest, Kms, KmsClient, SignError, SignRequest, SignResponse,
};
use tracing::{debug, instrument, trace};
mod utils;
use utils::{apply_eip155, rsig_to_ethsig, verifying_key_to_address};
/// An ethers Signer that uses keys held in Amazon AWS KMS.
///
/// The AWS Signer passes signing requests to the cloud service. AWS KMS keys
/// are identified by a UUID, the `key_id`.
///
/// Because the public key is unknwon, we retrieve it on instantiation of the
/// signer. This means that the new function is `async` and must be called
/// within some runtime.
///
/// ```compile_fail
/// use rusoto_core::Client;
/// use rusoto_kms::{Kms, KmsClient};
///
/// user ethers_signers::Signer;
///
/// let client = Client::new_with(
/// EnvironmentProvider::default(),
/// HttpClient::new().unwrap()
/// );
/// let kms_client = KmsClient::new_with_client(client, Region::UsWest1);
/// let key_id = "...";
/// let chain_id = 1;
///
/// let signer = AwsSigner::new(kms_client, key_id, chain_id).await?;
/// let sig = signer.sign_message(H256::zero()).await?;
/// ```
#[derive(Clone)]
pub struct AwsSigner<'a> {
kms: &'a rusoto_kms::KmsClient,
chain_id: u64,
key_id: String,
pubkey: VerifyingKey,
address: Address,
}
impl<'a> std::fmt::Debug for AwsSigner<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("AwsSigner")
.field("key_id", &self.key_id)
.field("chain_id", &self.chain_id)
.field("pubkey", &hex::encode(self.pubkey.to_bytes()))
.field("address", &self.address)
.finish()
}
}
impl<'a> std::fmt::Display for AwsSigner<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"AwsSigner {{ address: {}, chain_id: {}, key_id: {} }}",
self.address, self.chain_id, self.key_id
)
}
}
/// Errors produced by the AwsSigner
#[derive(thiserror::Error, Debug)]
pub enum AwsSignerError {
#[error("{0}")]
SignError(#[from] RusotoError<SignError>),
#[error("{0}")]
GetPublicKeyError(#[from] RusotoError<GetPublicKeyError>),
#[error("{0}")]
K256(#[from] K256Error),
#[error("{0}")]
Spki(spki::Error),
#[error("{0}")]
Other(String),
#[error(transparent)]
/// Error when converting from a hex string
HexError(#[from] hex::FromHexError),
/// Error type from Eip712Error message
#[error("error encoding eip712 struct: {0:?}")]
Eip712Error(String),
}
impl From<String> for AwsSignerError {
fn from(s: String) -> Self {
Self::Other(s)
}
}
impl From<spki::Error> for AwsSignerError {
fn from(e: spki::Error) -> Self {
Self::Spki(e)
}
}
#[instrument(err, skip(kms, key_id), fields(key_id = %key_id.as_ref()))]
async fn request_get_pubkey<T>(
kms: &KmsClient,
key_id: T,
) -> Result<rusoto_kms::GetPublicKeyResponse, RusotoError<GetPublicKeyError>>
where
T: AsRef<str>,
{
debug!("Dispatching get_public_key");
let req = GetPublicKeyRequest { grant_tokens: None, key_id: key_id.as_ref().to_owned() };
trace!("{:?}", &req);
let resp = kms.get_public_key(req).await;
trace!("{:?}", &resp);
resp
}
#[instrument(err, skip(kms, digest, key_id), fields(digest = %hex::encode(&digest), key_id = %key_id.as_ref()))]
async fn request_sign_digest<T>(
kms: &KmsClient,
key_id: T,
digest: [u8; 32],
) -> Result<SignResponse, RusotoError<SignError>>
where
T: AsRef<str>,
{
debug!("Dispatching sign");
let req = SignRequest {
grant_tokens: None,
key_id: key_id.as_ref().to_owned(),
message: digest.to_vec().into(),
message_type: Some("DIGEST".to_owned()),
signing_algorithm: "ECDSA_SHA_256".to_owned(),
};
trace!("{:?}", &req);
let resp = kms.sign(req).await;
trace!("{:?}", &resp);
resp
}
impl<'a> AwsSigner<'a> {
/// Instantiate a new signer from an existing `KmsClient` and Key ID.
///
/// This function retrieves the public key from AWS and calculates the
/// Etheruem address. It is therefore `async`.
#[instrument(err, skip(kms, key_id, chain_id), fields(key_id = %key_id.as_ref()))]
pub async fn new<T>(
kms: &'a KmsClient,
key_id: T,
chain_id: u64,
) -> Result<AwsSigner<'a>, AwsSignerError>
where
T: AsRef<str>,
{
let pubkey = request_get_pubkey(kms, &key_id).await.map(utils::decode_pubkey)??;
let address = verifying_key_to_address(&pubkey);
debug!(
"Instantiated AWS signer with pubkey 0x{} and address 0x{}",
hex::encode(&pubkey.to_bytes()),
hex::encode(&address)
);
Ok(Self { kms, chain_id, key_id: key_id.as_ref().to_owned(), pubkey, address })
}
/// Fetch the pubkey associated with a key id
pub async fn get_pubkey_for_key<T>(&self, key_id: T) -> Result<VerifyingKey, AwsSignerError>
where
T: AsRef<str>,
{
Ok(request_get_pubkey(self.kms, key_id).await.map(utils::decode_pubkey)??)
}
/// Fetch the pubkey associated with this signer's key ID
pub async fn get_pubkey(&self) -> Result<VerifyingKey, AwsSignerError> {
self.get_pubkey_for_key(&self.key_id).await
}
/// Sign a digest with the key associated with a key id
pub async fn sign_digest_with_key<T>(
&self,
key_id: T,
digest: [u8; 32],
) -> Result<KSig, AwsSignerError>
where
T: AsRef<str>,
{
Ok(request_sign_digest(self.kms, key_id, digest).await.map(utils::decode_signature)??)
}
/// Sign a digest with this signer's key
pub async fn sign_digest(&self, digest: [u8; 32]) -> Result<KSig, AwsSignerError> {
self.sign_digest_with_key(self.key_id.clone(), digest).await
}
/// Sign a digest with this signer's key and add the eip155 `v` value
/// corresponding to this signer's chain_id
#[instrument(err, skip(digest), fields(digest = %hex::encode(&digest)))]
async fn sign_digest_with_eip155(&self, digest: H256) -> Result<EthSig, AwsSignerError> {
let sig = self.sign_digest(digest.into()).await?;
let sig = utils::rsig_from_digest_bytes_trial_recovery(&sig, digest.into(), &self.pubkey);
let mut sig = rsig_to_ethsig(&sig);
apply_eip155(&mut sig, self.chain_id);
Ok(sig)
}
}
#[async_trait::async_trait]
impl<'a> super::Signer for AwsSigner<'a> {
type Error = AwsSignerError;
#[instrument(err, skip(message))]
async fn sign_message<S: Send + Sync + AsRef<[u8]>>(
&self,
message: S,
) -> Result<EthSig, Self::Error> {
let message = message.as_ref();
let message_hash = hash_message(message);
trace!("{:?}", message_hash);
trace!("{:?}", message);
self.sign_digest_with_eip155(message_hash).await
}
#[instrument(err)]
async fn sign_transaction(&self, tx: &TypedTransaction) -> Result<EthSig, Self::Error> {
let sighash = tx.sighash(self.chain_id);
self.sign_digest_with_eip155(sighash).await
}
async fn sign_typed_data<T: Eip712 + Send + Sync>(
&self,
payload: &T,
) -> Result<EthSig, Self::Error> {
let hash = payload.encode_eip712().map_err(|e| Self::Error::Eip712Error(e.to_string()))?;
let digest = self.sign_digest_with_eip155(hash.into()).await?;
Ok(digest)
}
fn address(&self) -> Address {
self.address
}
/// Returns the signer's chain id
fn chain_id(&self) -> u64 {
self.chain_id
}
/// Sets the signer's chain id
fn with_chain_id<T: Into<u64>>(mut self, chain_id: T) -> Self {
self.chain_id = chain_id.into();
self
}
}
#[cfg(test)]
mod tests {
use rusoto_core::{
credential::{EnvironmentProvider, StaticProvider},
Client, HttpClient, Region,
};
use tracing::metadata::LevelFilter;
use super::*;
use crate::Signer;
#[allow(dead_code)]
fn setup_tracing() {
tracing_subscriber::fmt().with_max_level(LevelFilter::DEBUG).try_init().unwrap();
}
#[allow(dead_code)]
fn static_client() -> KmsClient {
let access_key = "".to_owned();
let secret_access_key = "".to_owned();
let client = Client::new_with(
StaticProvider::new(access_key, secret_access_key, None, None),
HttpClient::new().unwrap(),
);
KmsClient::new_with_client(client, Region::UsWest1)
}
#[allow(dead_code)]
fn env_client() -> KmsClient {
let client = Client::new_with(EnvironmentProvider::default(), HttpClient::new().unwrap());
KmsClient::new_with_client(client, Region::UsWest1)
}
#[tokio::test]
async fn it_signs_messages() {
let chain_id = 1;
let key_id = match std::env::var("AWS_KEY_ID") {
Ok(id) => id,
_ => return,
};
setup_tracing();
let client = env_client();
let signer = AwsSigner::new(&client, key_id, chain_id).await.unwrap();
let message = vec![0, 1, 2, 3];
let sig = signer.sign_message(&message).await.unwrap();
sig.verify(message, signer.address).expect("valid sig");
}
}
| 30.2 | 112 | 0.612997 |
b9695e388b45e15c2b926a8b1c2dae24d629f436 | 5,483 | // Copyright 2020-2021 The Datafuse Authors.
//
// SPDX-License-Identifier: Apache-2.0.
use std::fmt::Debug;
use common_exception::ErrorCode;
use common_exception::Result;
use crate::*;
// Read more:
// https://www.cockroachlabs.com/blog/vectorized-hash-joiner/
// http://myeyesareblind.com/2017/02/06/Combine-hash-values/
pub trait GroupHash: Debug {
/// Compute the hash for all values in the array.
fn group_hash(&self, _ptr: usize, _step: usize) -> Result<()> {
Err(ErrorCode::BadDataValueType(format!(
"Unsupported apply fn group_hash operation for {:?}",
self,
)))
}
}
impl GroupHash for DFUInt8Array {
fn group_hash(&self, ptr: usize, step: usize) -> Result<()> {
let mut ptr = ptr;
let array = self.downcast_ref();
for value in array.values().as_slice() {
unsafe {
std::ptr::copy_nonoverlapping(value as *const u8, ptr as *mut u8, 1);
}
ptr += step;
}
Ok(())
}
}
impl GroupHash for DFInt8Array {
fn group_hash(&self, ptr: usize, step: usize) -> Result<()> {
let mut ptr = ptr;
let array = self.downcast_ref();
for value in array.values().as_slice() {
unsafe {
std::ptr::copy_nonoverlapping(value as *const i8 as *const u8, ptr as *mut u8, 1);
}
ptr += step;
}
Ok(())
}
}
impl GroupHash for DFUInt16Array {
fn group_hash(&self, ptr: usize, step: usize) -> Result<()> {
let mut ptr = ptr;
let array = self.downcast_ref();
for value in array.values().as_slice() {
unsafe {
std::ptr::copy_nonoverlapping(value as *const u16 as *const u8, ptr as *mut u8, 2);
}
ptr += step;
}
Ok(())
}
}
impl GroupHash for DFInt16Array {
fn group_hash(&self, ptr: usize, step: usize) -> Result<()> {
let mut ptr = ptr;
let array = self.downcast_ref();
for value in array.values().as_slice() {
unsafe {
std::ptr::copy_nonoverlapping(value as *const i16 as *const u8, ptr as *mut u8, 2);
}
ptr += step;
}
Ok(())
}
}
impl GroupHash for DFInt32Array {
fn group_hash(&self, ptr: usize, step: usize) -> Result<()> {
let mut ptr = ptr;
let array = self.downcast_ref();
for value in array.values().as_slice() {
unsafe {
std::ptr::copy_nonoverlapping(value as *const i32 as *const u8, ptr as *mut u8, 4);
}
ptr += step;
}
Ok(())
}
}
impl GroupHash for DFUInt32Array {
fn group_hash(&self, ptr: usize, step: usize) -> Result<()> {
let mut ptr = ptr;
let array = self.downcast_ref();
for value in array.values().as_slice() {
unsafe {
std::ptr::copy_nonoverlapping(value as *const u32 as *const u8, ptr as *mut u8, 4);
}
ptr += step;
}
Ok(())
}
}
impl GroupHash for DFBooleanArray {
fn group_hash(&self, ptr: usize, step: usize) -> Result<()> {
let mut ptr = ptr;
let array = self.downcast_ref();
let rows = self.len();
unsafe {
for i in 0..rows {
let value = array.value_unchecked(i) as u8;
std::ptr::copy_nonoverlapping(&value as *const u8, ptr as *mut u8, 1);
ptr += step;
}
}
Ok(())
}
}
impl GroupHash for DFFloat32Array {
fn group_hash(&self, ptr: usize, step: usize) -> Result<()> {
let mut ptr = ptr;
let array = self.downcast_ref();
for value in array.values().as_slice() {
unsafe {
let bits = value.to_bits();
std::ptr::copy_nonoverlapping(&bits as *const u32 as *const u8, ptr as *mut u8, 4);
}
ptr += step;
}
Ok(())
}
}
impl GroupHash for DFFloat64Array {
fn group_hash(&self, ptr: usize, step: usize) -> Result<()> {
let mut ptr = ptr;
let array = self.downcast_ref();
for value in array.values().as_slice() {
unsafe {
let bits = value.to_bits();
std::ptr::copy_nonoverlapping(&bits as *const u64 as *mut u8, ptr as *mut u8, 8);
}
ptr += step;
}
Ok(())
}
}
impl GroupHash for DFUInt64Array {
fn group_hash(&self, ptr: usize, step: usize) -> Result<()> {
let mut ptr = ptr;
let array = self.downcast_ref();
for value in array.values().as_slice() {
unsafe {
std::ptr::copy_nonoverlapping(value as *const u64 as *mut u8, ptr as *mut u8, 8);
}
ptr += step;
}
Ok(())
}
}
impl GroupHash for DFInt64Array {
fn group_hash(&self, ptr: usize, step: usize) -> Result<()> {
let mut ptr = ptr;
let array = self.downcast_ref();
for value in array.values().as_slice() {
unsafe {
std::ptr::copy_nonoverlapping(value as *const i64 as *mut u8, ptr as *mut u8, 8);
}
ptr += step;
}
Ok(())
}
}
impl GroupHash for DFListArray {}
impl GroupHash for DFUtf8Array {}
impl GroupHash for DFBinaryArray {}
impl GroupHash for DFNullArray {}
impl GroupHash for DFStructArray {}
| 26.616505 | 99 | 0.52526 |
1cf189395af6a869d6b1d843f42cba9f39135ba7 | 1,377 | use std::path::PathBuf;
use std::env;
use cmake;
use bindgen;
fn compile_lib() {
let dst = cmake::Config::new("cmake")
.build_target("")
.define("SECP256K1_BUILD_TEST", "OFF")
.define("SECP256K1_ENABLE_MODULE_ECDH", "ON")
.build();
println!("cargo:rustc-link-search=native={}", dst.join("build/secp256k1/").display());
println!("cargo:rustc-link-lib=static=secp256k1");
}
fn generate_bindings(out_path: &PathBuf) {
let headers = [
"cmake/secp256k1/include/secp256k1_ecdh.h",
"cmake/secp256k1/include/secp256k1_multiset.h",
"cmake/secp256k1/include/secp256k1_recovery.h",
"cmake/secp256k1/include/secp256k1_schnorr.h",
"cmake/secp256k1/include/secp256k1.h",
];
let bindings = headers.iter()
.fold(bindgen::Builder::default(), |b, h| {
b.header(h.to_string())
})
.opaque_type("secp256k1_context_struct")
.opaque_type("secp256k1_pubkey")
.opaque_type("secp256k1_ecdsa_signature")
.opaque_type("secp256k1_multiset")
.opaque_type("secp256k1_ecdsa_recoverable_signature")
.generate()
.unwrap();
bindings.write_to_file(out_path.join("bindings.rs")).unwrap();
}
fn main() {
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
compile_lib();
generate_bindings(&out_path);
}
| 28.6875 | 90 | 0.639797 |
ff0e549a0847486411f8dec59ab9a58f9a490772 | 14,868 | //! Periodic Interrupt Timer (PIT)
use crate::ccm::{perclk, ticks, Divider, Frequency, TicksError};
use crate::ral;
use core::marker::PhantomData;
use embedded_hal::timer::{CountDown, Periodic};
/// An unclocked periodic interrupt timer module
///
/// In order to activate the PIT, we must pass in the
/// configured object returned from the CCM's perclk module.
pub struct UnclockedPIT(ral::pit::Instance);
impl UnclockedPIT {
pub(crate) fn new(base: ral::pit::Instance) -> Self {
UnclockedPIT(base)
}
/// Activate the PIT module after enabling the clock for the
/// module.
pub fn clock(
self,
configured: perclk::Configured,
) -> (
PIT<channel::_0>,
PIT<channel::_1>,
PIT<channel::_2>,
PIT<channel::_3>,
) {
let (clock_hz, divider) = configured.enable();
ral::write_reg!(ral::pit, self.0, MCR, MDIS: MDIS_0);
// Intentionally dropping the ral::pit::Instance. We will give consumers
// the appearance that we own it so that they cannot subsequently take it.
(
PIT::new(clock_hz, divider),
PIT::new(clock_hz, divider),
PIT::new(clock_hz, divider),
PIT::new(clock_hz, divider),
)
}
}
pub mod channel {
use crate::ral;
/// Dummy channel for describing channel chaining.
///
/// Timer 0 cannot be chained. This is the only "valid" chainable
/// channel, but it does not exist.
#[doc(hidden)]
pub struct _X;
/// PIT channel 0
pub struct _0;
/// PIT channel 1
pub struct _1;
/// PIT channel 2
pub struct _2;
/// PIT channel 3
pub struct _3;
#[doc(hidden)]
pub trait Channel {
type ChainedTo: Channel;
fn enabled() -> bool;
fn set_enabled(enable: bool);
fn set_ldval(val: u32);
fn ldval() -> u32;
fn cval() -> u32;
fn tif() -> bool;
fn clear_tif();
fn set_interrupt_enable(interrupt: bool);
fn interrupt_enable() -> bool;
fn enable_chain();
}
macro_rules! _impl_channel {
($chan:ty, $chain:ty, $tctrl:ident, $ldval:ident, $tflg:ident, $cval:ident) => {
impl Channel for $chan {
type ChainedTo = $chain;
#[inline(always)]
fn enabled() -> bool {
unsafe { ral::read_reg!(ral::pit, ral::pit::PIT, $tctrl, TEN == TEN_1) }
}
#[inline(always)]
fn set_enabled(enable: bool) {
unsafe {
ral::modify_reg!(ral::pit, ral::pit::PIT, $tctrl, TEN: u32::from(enable));
}
}
#[inline(always)]
fn set_ldval(val: u32) {
unsafe {
ral::write_reg!(ral::pit, ral::pit::PIT, $ldval, TSV: val);
}
}
#[inline(always)]
fn ldval() -> u32 {
unsafe { ral::read_reg!(ral::pit, ral::pit::PIT, $ldval, TSV) }
}
#[inline(always)]
fn cval() -> u32 {
unsafe { ral::read_reg!(ral::pit, ral::pit::PIT, $cval, TVL) }
}
#[inline(always)]
fn tif() -> bool {
unsafe { ral::read_reg!(ral::pit, ral::pit::PIT, $tflg, TIF == TIF_1) }
}
#[inline(always)]
fn clear_tif() {
unsafe {
ral::write_reg!(ral::pit, ral::pit::PIT, $tflg, TIF: TIF_1);
}
}
#[inline(always)]
fn set_interrupt_enable(interrupt: bool) {
unsafe {
ral::modify_reg!(
ral::pit,
ral::pit::PIT,
$tctrl,
TIE: u32::from(interrupt)
);
}
}
#[inline(always)]
fn interrupt_enable() -> bool {
unsafe { ral::read_reg!(ral::pit, ral::pit::PIT, $tctrl, TIE == TIE_1) }
}
#[inline(always)]
fn enable_chain() {
unsafe {
ral::modify_reg!(ral::pit, ral::pit::PIT, $tctrl, CHN: CHN_1);
}
}
}
};
}
/// Dummy channel for describing channel chaining.
///
/// Timer 0 cannot be chained. This is the only "valid" chainable
/// channel, but it does not exist.
///
/// All methods are unreachable, because we cannot call them.
impl Channel for _X {
type ChainedTo = _X;
fn enabled() -> bool {
unreachable!()
}
fn set_enabled(_: bool) {
unreachable!()
}
fn set_ldval(_: u32) {
unreachable!()
}
fn ldval() -> u32 {
unreachable!()
}
fn cval() -> u32 {
unreachable!()
}
fn tif() -> bool {
unreachable!()
}
fn clear_tif() {
unreachable!()
}
fn set_interrupt_enable(_: bool) {
unreachable!()
}
fn interrupt_enable() -> bool {
unreachable!()
}
fn enable_chain() {
unreachable!()
}
}
_impl_channel!(_0, _X, TCTRL0, LDVAL0, TFLG0, CVAL0);
_impl_channel!(_1, _0, TCTRL1, LDVAL1, TFLG1, CVAL1);
_impl_channel!(_2, _1, TCTRL2, LDVAL2, TFLG2, CVAL2);
_impl_channel!(_3, _2, TCTRL3, LDVAL3, TFLG3, CVAL3);
}
/// A periodic interrupt timer (PIT)
pub struct PIT<Chan> {
clock_hz: Frequency,
divider: Divider,
_chan: PhantomData<Chan>,
}
impl<Chan: channel::Channel> PIT<Chan> {
fn new(clock_hz: Frequency, divider: Divider) -> PIT<Chan> {
PIT {
clock_hz,
divider,
_chan: PhantomData,
}
}
#[inline(always)]
fn disabled<F: FnMut(&Self) -> R, R>(&self, mut act: F) -> R {
let enabled = Chan::enabled();
Chan::set_enabled(false);
let tsv = Chan::ldval();
let res = act(self);
self.ldval(tsv);
Chan::set_enabled(enabled);
res
}
fn ldval(&self, val: u32) {
Chan::set_ldval(val);
}
fn tif(&self) -> bool {
Chan::tif()
}
fn clear_tif(&self) {
Chan::clear_tif();
}
/// Returns the period of the clock ticks. This is the inverse
/// of the clock frequency
pub fn clock_period(&self) -> core::time::Duration {
(self.clock_hz / self.divider).into()
}
/// Measure the execution duration of `act` with this timer. Returns the duration
/// of the action, or `None` if the timer expired before the action completed.
///
/// `time` will measure the difference of counts in a 32 bit register. The counter
/// changes every clock period. The clock accuracy is based on our ability to round
/// integers. Consider choosing the input clock frequency and prescalars to define
/// a clock that can accurately measure your workloads.
///
/// The method will disable any interrupts that this timer has enabled. It will also
/// reset the timer to execute this measurement.
///
/// If you need a 64 bit timer, use the `chain` function to combine timer 0 and
/// timer 1. The two can crate the 'lifetime' timer, which is capable of measuring
/// larger intervals.
pub fn time<F: FnMut() -> R, R>(&mut self, mut act: F) -> (R, Option<core::time::Duration>) {
const STARTING_LDVAL: u32 = u32::max_value();
self.with_interrupts_disabled(|this| {
this.disabled(|this| {
this.clear_tif();
this.ldval(STARTING_LDVAL);
Chan::set_enabled(true);
let res = act();
let counter = Chan::cval();
if this.tif() {
// Action took too long and the timer expired.
// The counter value is meaningless
(res, None)
} else {
let ticks = STARTING_LDVAL - counter;
let clock_period: core::time::Duration = (this.clock_hz / this.divider).into();
(res, Some(ticks * clock_period))
}
})
})
}
/// Enable the timer to trigger an interrupt when the timer expires
pub fn set_interrupt_enable(&mut self, interrupt: bool) {
Chan::set_interrupt_enable(interrupt);
}
/// Returns `true` if the timer will trigger an interrupt when
/// it expires.
pub fn interrupt_enable(&self) -> bool {
Chan::interrupt_enable()
}
#[inline(always)]
fn with_interrupts_disabled<F: FnMut(&Self) -> R, R>(&self, mut act: F) -> R {
let interrupt_enabled = self.interrupt_enable();
Chan::set_interrupt_enable(false);
let res = act(self);
Chan::set_interrupt_enable(interrupt_enabled);
res
}
}
impl<Chan: channel::Channel> CountDown for PIT<Chan> {
type Time = core::time::Duration;
fn start<T: Into<Self::Time>>(&mut self, ms: T) {
let ticks: u32 = match ticks(ms.into(), self.clock_hz.0, self.divider.0) {
Ok(ticks) => ticks,
// Saturate the load value
Err(TicksError::TicksOverflow) | Err(TicksError::DurationOverflow) => u32::max_value(),
// Ratio of freq / div was zero, or divider was zero
Err(TicksError::DivideByZero) => 0,
};
Chan::set_enabled(false);
self.clear_tif();
self.ldval(ticks);
Chan::set_enabled(true);
}
fn wait(&mut self) -> nb::Result<(), void::Void> {
if self.tif() {
self.clear_tif();
Ok(())
} else {
Err(nb::Error::WouldBlock)
}
}
}
impl<Chan: channel::Channel> Periodic for PIT<Chan> {}
/// Two PIT timers chained together
pub struct ChainedPIT<C0, C1> {
lower: PIT<C0>,
upper: PIT<C1>,
}
impl<C0, C1> ChainedPIT<C0, C1>
where
C1: channel::Channel,
{
/// Control interrupt generation for this chained PIT timer
pub fn set_interrupt_enable(&mut self, interrupt: bool) {
self.upper.set_interrupt_enable(interrupt);
}
/// Returns `true` if interrupts are enabled, else `false`
/// if interrupts are disabled.
pub fn interrupt_enable(&self) -> bool {
self.upper.interrupt_enable()
}
}
/// Chain two timers together, returning a `ChainedPIT` timer that can
/// count twice as many ticks.
///
/// The API enforces that channel 1 is chained to channel 0, or channel 2 is
/// chained to channel 1, or channel 3 is chained to channel 2. Any other
/// combination of chaining is prevented by the compiler.
///
/// We do not support chaining more than two timers.
pub fn chain<C1: channel::Channel>(
mut lower: PIT<<C1 as channel::Channel>::ChainedTo>,
upper: PIT<C1>,
) -> ChainedPIT<<C1 as channel::Channel>::ChainedTo, C1> {
// We can only enable the interrupt for the upper timer.
// Otherwise, we'll interrupt early.
lower.set_interrupt_enable(false);
ChainedPIT { lower, upper }
}
impl<C0, C1> CountDown for ChainedPIT<C0, C1>
where
C0: channel::Channel,
C1: channel::Channel,
{
type Time = core::time::Duration;
fn start<T: Into<Self::Time>>(&mut self, time: T) {
// clock_hz and divider are equal across all PITs
let ticks: u64 = match ticks(time.into(), self.lower.clock_hz.0, self.lower.divider.0) {
Ok(ticks) => ticks,
// Saturate the load value
Err(TicksError::TicksOverflow) | Err(TicksError::DurationOverflow) => u64::max_value(),
// Ratio of freq / div was zero, or divider was zero
Err(TicksError::DivideByZero) => 0,
};
C0::set_enabled(false);
C1::set_enabled(false);
self.upper.clear_tif();
C1::enable_chain();
self.upper.ldval((ticks >> 32) as u32);
self.lower.ldval((ticks & 0xFFFF_FFFF) as u32);
C0::set_enabled(true);
C1::set_enabled(true);
}
fn wait(&mut self) -> nb::Result<(), void::Void> {
if self.upper.tif() {
self.upper.clear_tif();
Ok(())
} else {
Err(nb::Error::WouldBlock)
}
}
}
/// The lifetime timer is PIT0 chained to PIT1.
/// It allows us to time over 64 bits with no
/// carry.
impl ChainedPIT<channel::_0, channel::_1> {
/// Time the execution duration of `act`. Returns the time it took to run `act`,
/// or `None` if the timer expired.
///
/// See the notes on `PIT::time`. Unlike `PIT::time`, this `time` method uses
/// a 64 bit register, which can help measure larger intervals. As with `PIT::time`,
/// this function will temporarily disable interrupts and reset any currently-running
/// timer.
///
/// This method is only available when chaining timer 0 to timer 1.
pub fn time<F: FnMut() -> R, R>(&mut self, mut act: F) -> (R, Option<core::time::Duration>) {
const STARTING_LDVAL: u32 = u32::max_value();
self.upper.with_interrupts_disabled(|upper| {
self.lower.disabled(|lower| {
upper.disabled(|upper| {
upper.clear_tif();
upper.ldval(STARTING_LDVAL);
lower.ldval(STARTING_LDVAL);
use channel::Channel;
channel::_1::enable_chain();
channel::_1::set_enabled(true);
channel::_0::set_enabled(true);
let res = act();
let lifetime = unsafe {
let lifetime =
u64::from(ral::read_reg!(ral::pit, ral::pit::PIT, LTMR64H)) << 32;
lifetime | u64::from(ral::read_reg!(ral::pit, ral::pit::PIT, LTMR64L))
};
if upper.tif() {
(res, None)
} else {
let ticks = u64::max_value() - lifetime;
// Betting that this isn't lossy...
let clock_period: u64 =
core::time::Duration::from(upper.clock_hz / upper.divider).as_nanos()
as u64;
(
res,
Some(core::time::Duration::from_nanos(ticks * clock_period)),
)
}
})
})
})
}
}
| 32.676923 | 99 | 0.521052 |
cc17d8f8e3962e59f2d3782367c7eb6f63603132 | 269 | struct S;
impl S {
fn f(self: _) {} //~ERROR the type placeholder `_` is not allowed within types on item signatures for functions
fn g(self: &_) {} //~ERROR the type placeholder `_` is not allowed within types on item signatures for functions
}
fn main() {}
| 29.888889 | 116 | 0.684015 |
64122e08e4b6d0d1929abd4fb7378502965db152 | 528 | // Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use anyhow::Error;
use argh::from_env;
use fuchsia_async as fasync;
use fuchsia_syslog as syslog;
use setui_client_lib::*;
#[fasync::run_singlethreaded]
async fn main() -> Result<(), Error> {
syslog::init_with_tags(&["setui-client"]).expect("Can't init logger");
let command = from_env::<SettingClient>();
run_command(command).await?;
Ok(())
}
| 26.4 | 74 | 0.708333 |
505348ebf56f561b00d972274f65131a67014d9e | 1,040 | use cfg_rs::*;
use env_logger::{Builder, Logger, Target};
use log::LevelFilter;
#[derive(FromConfig)]
#[config(prefix = "log")]
struct LogEnv {
#[config(default = "out")]
target: LogTarget,
#[config(default = "info")]
level: LevelFilter,
}
struct LogTarget(Target);
impl_enum!( LogTarget {
"stdout" | "out" => LogTarget(Target::Stdout)
"stderr" | "err" => LogTarget(Target::Stderr)
});
impl From<LogEnv> for Logger {
fn from(le: LogEnv) -> Self {
Builder::new()
.target(le.target.0)
.filter_level(le.level)
.build()
}
}
fn main() -> Result<(), ConfigError> {
let config = Configuration::with_predefined()?;
let env = config.get_predefined::<LogEnv>()?;
log::set_max_level(env.level);
log::set_boxed_logger(Box::new(Logger::from(env)))?;
let mut i = 0;
for name in config.source_names() {
i += 1;
log::info!("{}: {}", i, name);
}
log::info!("hello {}", config.get::<String>("hello.toml").unwrap());
Ok(())
}
| 24.186047 | 72 | 0.577885 |
897b367548a870583ed802868a731b77a7acc9b3 | 15,560 | use super::xid::*;
use std::collections::BTreeMap;
#[derive(Eq, PartialEq, Debug, thiserror::Error)]
pub enum ParseError {
#[error("syntax error, {msg}")]
SyntaxError {
query: String,
msg: String,
span: (usize, usize),
},
}
#[derive(Eq, PartialEq, Debug)]
pub enum Binop {
And,
Or,
}
#[derive(Eq, PartialEq, Debug)]
pub enum Unop {
Not,
}
#[derive(Eq, PartialEq, Debug)]
pub enum AgeAssertion {
OlderThan,
NewerThan,
}
#[derive(Eq, PartialEq, Debug)]
pub enum Query {
Glob {
tag: String,
pattern: glob::Pattern,
span: (usize, usize),
},
Unop {
op: Unop,
span: (usize, usize),
query: Box<Query>,
},
Binop {
op: Binop,
span: (usize, usize),
left: Box<Query>,
right: Box<Query>,
},
AgeAssertion {
op: AgeAssertion,
span: (usize, usize),
duration: std::time::Duration,
},
}
fn is_tag_char(c: char) -> bool {
('a'..='z').contains(&c)
|| ('A'..='Z').contains(&c)
|| ('0'..='9').contains(&c)
|| c == '-'
|| c == '_'
}
macro_rules! impl_binop {
($name:ident, $opi:ident, $ops:literal , $sub:ident) => {
fn $name(&mut self) -> Result<Query, ParseError> {
let op = $ops;
let mut l: Query;
let (_, start_pos) = self.peek();
l = self.$sub()?;
loop {
if !self.lookahead_sep(op) {
return Ok(l);
}
self.advance(op.chars().count() + 1);
if self.is_eof() {
return Err(ParseError::SyntaxError {
query: self.query_chars.iter().collect(),
msg: format!("operator '{}' expects a value", op),
span: (self.offset - 1, self.offset - 1),
});
}
let r = self.$sub()?;
let (_, end_pos) = self.peek();
l = Query::Binop {
op: Binop::$opi,
span: (start_pos, end_pos),
left: Box::new(l),
right: Box::new(r),
}
}
}
};
}
struct Parser {
query_chars: Vec<char>,
offset: usize,
}
impl Parser {
fn peek(&mut self) -> (char, usize) {
match self.query_chars.get(self.offset) {
Some(c) => (*c, self.offset),
None => ('•', self.offset),
}
}
fn advance(&mut self, count: usize) {
self.offset += count;
if self.offset > self.query_chars.len() {
self.offset = self.query_chars.len();
}
}
fn get(&mut self) -> (char, usize) {
let offset = self.offset;
self.advance(1);
match self.query_chars.get(offset) {
Some(c) => (*c, offset),
None => ('•', offset),
}
}
fn is_eof(&self) -> bool {
self.offset == self.query_chars.len()
}
fn lookahead(&mut self, lookahead: &str) -> bool {
for (i, c) in lookahead.chars().enumerate() {
match self.query_chars.get(self.offset + i) {
Some(gotc) if c == *gotc => (),
_ => return false,
}
}
true
}
fn lookahead_sep(&mut self, op: &str) -> bool {
if !self.lookahead(op) {
return false;
}
match self.query_chars.get(self.offset + op.chars().count()) {
Some('•') => true,
None => true,
Some(_) => false,
}
}
fn consume_if_matches(&mut self, s: &str) -> bool {
if self.lookahead(s) {
self.advance(s.chars().count());
true
} else {
false
}
}
fn expect(&mut self, expected: &str) -> Result<(), ParseError> {
if !self.lookahead(expected) {
Err(ParseError::SyntaxError {
query: self.query_chars.iter().collect(),
msg: format!("expected '{}'", expected),
span: (self.offset, self.offset),
})
} else {
self.advance(expected.chars().count());
Ok(())
}
}
fn parse(&mut self) -> Result<Query, ParseError> {
let v = self.parse_expr()?;
if self.offset != self.query_chars.len() {
return Err(ParseError::SyntaxError {
query: self.query_chars.iter().collect(),
msg: "unexpected input at end of query'".to_owned(),
span: (self.offset, self.offset),
});
}
Ok(v)
}
fn parse_expr(&mut self) -> Result<Query, ParseError> {
self.parse_and()
}
impl_binop!(parse_and, And, "and", parse_or);
impl_binop!(parse_or, Or, "or", parse_base);
fn parse_base(&mut self) -> Result<Query, ParseError> {
let (c, _) = self.peek();
if c == '[' {
self.expect("[")?;
self.consume_if_matches("•");
let v = self.parse_expr()?;
self.expect("]")?;
self.consume_if_matches("•");
Ok(v)
} else if c == '~' {
self.parse_unop()
} else if self.lookahead("older-than•") || self.lookahead("newer-than•") {
self.parse_age_assertion()
} else {
self.parse_eq()
}
}
fn parse_age_assertion(&mut self) -> Result<Query, ParseError> {
let (_, start_pos) = self.peek();
let op = if self.consume_if_matches("older-than•") {
AgeAssertion::OlderThan
} else if self.consume_if_matches("newer-than•") {
AgeAssertion::NewerThan
} else {
unreachable!()
};
let (_, duration_start_pos) = self.peek();
let mut d = String::new();
loop {
match self.peek() {
(c, _) if c != '•' => {
self.advance(1);
d.push(c);
}
_ => break,
}
}
let (_, end_pos) = self.peek();
self.consume_if_matches("•");
match humantime::parse_duration(&d) {
Ok(duration) => Ok(Query::AgeAssertion {
op,
duration,
span: (start_pos, end_pos),
}),
Err(err) => Err(ParseError::SyntaxError {
query: self.query_chars.iter().collect(),
msg: format!("error parsing duration: {}", err),
span: (duration_start_pos, end_pos),
}),
}
}
fn parse_tag(&mut self) -> Result<String, ParseError> {
let (c, pos) = self.peek();
if !is_tag_char(c) {
return Err(ParseError::SyntaxError {
query: self.query_chars.iter().collect(),
msg: "expected a tag character".to_string(),
span: (pos, pos),
});
}
self.get();
let mut v = String::new();
v.push(c);
loop {
match self.peek() {
(c, _) if is_tag_char(c) => {
self.advance(1);
v.push(c);
}
_ => break,
}
}
Ok(v)
}
fn parse_value(&mut self) -> String {
let (c, _) = self.peek();
if c == '•' {
self.advance(1);
return "".to_string();
}
self.get();
let mut v = String::new();
v.push(c);
loop {
match self.peek() {
(c, _) if c != '•' => {
self.advance(1);
v.push(c);
}
_ => break,
}
}
self.consume_if_matches("•");
v
}
fn parse_eq(&mut self) -> Result<Query, ParseError> {
let (_, tag_pos) = self.peek();
let tag = self.parse_tag()?;
let (_, tag_end_pos) = self.peek();
let escape: bool;
if self.consume_if_matches("==") {
escape = true;
} else if self.consume_if_matches("=") {
escape = false;
} else {
return Err(ParseError::SyntaxError {
query: self.query_chars.iter().collect(),
msg: "expected '=' after tag".to_string(),
span: (tag_pos, tag_end_pos),
});
}
let raw_pattern = self.parse_value();
let (_, end_pos) = self.peek();
let pattern = if escape {
glob::Pattern::escape(&raw_pattern)
} else {
raw_pattern
};
let pattern = match glob::Pattern::new(&pattern) {
Ok(pattern) => pattern,
Err(err) => {
return Err(ParseError::SyntaxError {
query: self.query_chars.iter().collect(),
msg: format!("invalid glob pattern: {}", err),
span: (tag_pos, end_pos),
})
}
};
self.consume_if_matches("•");
Ok(Query::Glob {
tag,
pattern,
span: (tag_pos, end_pos),
})
}
fn parse_unop(&mut self) -> Result<Query, ParseError> {
let (op, op_pos) = self.peek();
let op = if self.consume_if_matches("~") {
self.consume_if_matches("•");
Unop::Not
} else {
return Err(ParseError::SyntaxError {
query: self.query_chars.iter().collect(),
msg: format!("unknown unary operator: {}", op),
span: (op_pos, op_pos),
});
};
let query = Box::new(self.parse_base()?);
let (_, end_pos) = self.peek();
Ok(Query::Unop {
op,
query,
span: (op_pos, end_pos),
})
}
}
pub fn parse(s: &str) -> Result<Query, ParseError> {
let mut query_chars: Vec<char> = s.chars().collect();
// Ensure the query always ends with a separator character,
// this makes things more consistent.
query_chars.push('•');
let mut p = Parser {
query_chars,
offset: 0,
};
p.parse()
}
pub fn report_parse_error(e: ParseError) {
match e {
ParseError::SyntaxError { query, msg, span } => {
let mut codemap = codemap::CodeMap::new();
let indices: Vec<(usize, char)> = query.char_indices().collect();
let query_span = codemap.add_file("<query>".to_owned(), query).span;
let err_span = query_span.subspan(
indices.get(span.0).unwrap_or(&indices[indices.len() - 1]).0 as u64,
indices.get(span.1).unwrap_or(&indices[indices.len() - 1]).0 as u64,
);
let label = codemap_diagnostic::SpanLabel {
span: err_span,
style: codemap_diagnostic::SpanStyle::Primary,
label: None,
};
let d = codemap_diagnostic::Diagnostic {
level: codemap_diagnostic::Level::Error,
message: msg,
code: None,
spans: vec![label],
};
let mut emitter = codemap_diagnostic::Emitter::stderr(
codemap_diagnostic::ColorConfig::Always,
Some(&codemap),
);
emitter.emit(&[d]);
}
}
}
pub struct QueryContext<'a> {
pub age: std::time::Duration,
pub tagset: &'a BTreeMap<String, String>,
}
pub fn query_matches(q: &Query, ctx: &QueryContext) -> bool {
match q {
Query::Glob { tag, pattern, .. } => match ctx.tagset.get(tag) {
Some(v) => pattern.matches(v),
None => false,
},
Query::Binop {
op, left, right, ..
} => match op {
Binop::And => query_matches(left, ctx) && query_matches(right, ctx),
Binop::Or => query_matches(left, ctx) || query_matches(right, ctx),
},
Query::Unop { op, query, .. } => match op {
Unop::Not => !query_matches(query, ctx),
},
Query::AgeAssertion { op, duration, .. } => match op {
AgeAssertion::OlderThan => ctx.age > *duration,
AgeAssertion::NewerThan => ctx.age < *duration,
},
}
}
pub struct QueryEncryptedContext<'a> {
pub tagset: &'a BTreeMap<String, String>,
}
pub fn query_matches_encrypted(q: &Query, ctx: &QueryEncryptedContext) -> bool {
match q {
Query::Glob { tag, pattern, .. } => match ctx.tagset.get(tag) {
Some(v) => pattern.matches(v),
None => false,
},
Query::Binop {
op, left, right, ..
} => match op {
Binop::And => query_matches_encrypted(left, ctx) && query_matches_encrypted(right, ctx),
Binop::Or => query_matches_encrypted(left, ctx) || query_matches_encrypted(right, ctx),
},
Query::Unop { op, query, .. } => match op {
Unop::Not => !query_matches_encrypted(query, ctx),
},
Query::AgeAssertion { .. } => false,
}
}
pub fn get_id_query(q: &Query) -> Option<Xid> {
match q {
Query::Glob { tag, pattern, .. }
if tag == "id" && pattern.as_str().chars().all(char::is_alphanumeric) =>
{
if let Ok(xid) = Xid::parse(pattern.as_str()) {
Some(xid)
} else {
None
}
}
_ => None,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_id_query() {
assert_eq!(
get_id_query(&parse("id=11223344556677881122334455667788").unwrap()),
Some(Xid::parse(&"11223344556677881122334455667788").unwrap())
);
assert_eq!(get_id_query(&parse("foo=123").unwrap()), None);
}
#[test]
fn test_query_match() {
let mut tagset = BTreeMap::<String, String>::new();
tagset.insert("foo".to_string(), "123".to_string());
tagset.insert("bar".to_string(), "".to_string());
let ctx = QueryContext {
age: std::time::Duration::new(5, 0),
tagset: &tagset,
};
let ectx = QueryEncryptedContext { tagset: &tagset };
assert!(query_matches(&parse("foo=123•and•bar=").unwrap(), &ctx));
assert!(query_matches(&parse("foo=12*").unwrap(), &ctx));
assert!(query_matches(&parse("foo=12?").unwrap(), &ctx));
assert!(query_matches(&parse("~foo=xxx").unwrap(), &ctx));
assert!(query_matches(&parse("older-than•2s").unwrap(), &ctx));
assert!(query_matches(&parse("newer-than•6s").unwrap(), &ctx));
assert!(!query_matches(&parse("older-than•6s").unwrap(), &ctx));
assert!(!query_matches(&parse("newer-than•2s").unwrap(), &ctx));
assert!(!query_matches(&parse("~•[•foo==123•]").unwrap(), &ctx));
assert!(query_matches_encrypted(
&parse("foo=123•and•bar=").unwrap(),
&ectx
));
assert!(query_matches_encrypted(&parse("foo=12*").unwrap(), &ectx));
assert!(query_matches_encrypted(&parse("foo=12?").unwrap(), &ectx));
assert!(query_matches_encrypted(&parse("~foo=xxx").unwrap(), &ectx));
assert!(!query_matches_encrypted(
&parse("older-than•2s").unwrap(),
&ectx
));
assert!(!query_matches_encrypted(
&parse("newer-than•6s").unwrap(),
&ectx
));
}
}
| 28.975791 | 100 | 0.477185 |
0e528fe9ed75211689275f2fbb24a4e2dce53e46 | 3,927 |
// Carry-less Multiplication
#[inline]
fn cl_mul(a: u64, b: u64, dst: &mut [u64; 2]) {
dst[0] = 0;
dst[1] = 0;
for i in 0u64..64 {
if (b & (1u64 << i)) != 0 {
dst[1] ^= a;
}
// Shift the result
dst[0] >>= 1;
if (dst[1] & (1u64 << 0)) != 0 {
dst[0] ^= 1u64 << 63;
}
dst[1] >>= 1;
}
}
#[derive(Clone)]
pub struct Polyval {
key: [u8; Self::KEY_LEN],
h: [u8; Self::BLOCK_LEN],
}
impl Polyval {
pub const KEY_LEN: usize = 16;
pub const BLOCK_LEN: usize = 16;
pub const TAG_LEN: usize = 16;
pub fn new(k: &[u8]) -> Self {
assert_eq!(k.len(), Self::KEY_LEN);
let h = [0u8; Self::TAG_LEN];
let mut key = [0u8; Self::KEY_LEN];
key.copy_from_slice(&k[..Self::KEY_LEN]);
Self { key, h }
}
#[inline]
fn gf_mul(&mut self) {
// a: h
// b: key
let a = [
u64::from_le_bytes([
self.h[0], self.h[1], self.h[2], self.h[3],
self.h[4], self.h[5], self.h[6], self.h[7],
]),
u64::from_le_bytes([
self.h[ 8], self.h[ 9], self.h[10], self.h[11],
self.h[12], self.h[13], self.h[14], self.h[15],
]),
];
let b = [
u64::from_le_bytes([
self.key[0], self.key[1], self.key[2], self.key[3],
self.key[4], self.key[5], self.key[6], self.key[7],
]),
u64::from_le_bytes([
self.key[ 8], self.key[ 9], self.key[10], self.key[11],
self.key[12], self.key[13], self.key[14], self.key[15],
]),
];
let mut tmp1 = [0u64; 2];
let mut tmp2 = [0u64; 2];
let mut tmp3 = [0u64; 2];
let mut tmp4 = [0u64; 2];
cl_mul(a[0], b[0], &mut tmp1); // 0x00
cl_mul(a[1], b[0], &mut tmp2); // 0x01
cl_mul(a[0], b[1], &mut tmp3); // 0x10
cl_mul(a[1], b[1], &mut tmp4); // 0x11
tmp2[0] ^= tmp3[0];
tmp2[1] ^= tmp3[1];
tmp3[0] = 0;
tmp3[1] = tmp2[0];
tmp2[0] = tmp2[1];
tmp2[1] = 0;
tmp1[0] ^= tmp3[0];
tmp1[1] ^= tmp3[1];
tmp4[0] ^= tmp2[0];
tmp4[1] ^= tmp2[1];
const XMMMASK: [u64; 2] = [0x1u64, 0xc200000000000000];
cl_mul(XMMMASK[1], tmp1[0], &mut tmp2); // 0x01
unsafe {
let tmp33: &mut [u32; 4] = core::mem::transmute::<&mut [u64; 2], &mut [u32; 4]>(&mut tmp3);
let tmp11: &mut [u32; 4] = core::mem::transmute::<&mut [u64; 2], &mut [u32; 4]>(&mut tmp1);
tmp33[0] = tmp11[2];
tmp33[1] = tmp11[3];
tmp33[2] = tmp11[0];
tmp33[3] = tmp11[1];
}
tmp1[0] = tmp2[0] ^ tmp3[0];
tmp1[1] = tmp2[1] ^ tmp3[1];
cl_mul(XMMMASK[1], tmp1[0], &mut tmp2); // 0x01
unsafe {
let tmp33: &mut [u32; 4] = core::mem::transmute::<&mut [u64; 2], &mut [u32; 4]>(&mut tmp3);
let tmp11: &mut [u32; 4] = core::mem::transmute::<&mut [u64; 2], &mut [u32; 4]>(&mut tmp1);
tmp33[0] = tmp11[2];
tmp33[1] = tmp11[3];
tmp33[2] = tmp11[0];
tmp33[3] = tmp11[1];
}
tmp1[0] = tmp2[0] ^ tmp3[0];
tmp1[1] = tmp2[1] ^ tmp3[1];
tmp4[0] ^= tmp1[0];
tmp4[1] ^= tmp1[1];
self.h[0.. 8].copy_from_slice(&tmp4[0].to_le_bytes());
self.h[8..16].copy_from_slice(&tmp4[1].to_le_bytes());
}
pub fn update(&mut self, m: &[u8]) {
for chunk in m.chunks(Self::BLOCK_LEN) {
for i in 0..chunk.len() {
self.h[i] ^= chunk[i];
}
self.gf_mul();
}
}
pub fn finalize(self) -> [u8; Self::TAG_LEN] {
self.h
}
}
| 26.006623 | 103 | 0.422715 |
e94f0956251153b1c19b54c26fc497a79f9cf0e3 | 400,753 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
#[allow(clippy::unnecessary_wraps)]
pub fn parse_associate_firewall_rule_group_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::AssociateFirewallRuleGroupOutput,
crate::error::AssociateFirewallRuleGroupError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::AssociateFirewallRuleGroupError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::AssociateFirewallRuleGroupError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::AssociateFirewallRuleGroupError {
meta: generic,
kind: crate::error::AssociateFirewallRuleGroupErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ConflictException" => {
crate::error::AssociateFirewallRuleGroupError {
meta: generic,
kind: crate::error::AssociateFirewallRuleGroupErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"InternalServiceErrorException" => crate::error::AssociateFirewallRuleGroupError {
meta: generic,
kind: crate::error::AssociateFirewallRuleGroupErrorKind::InternalServiceErrorException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"LimitExceededException" => crate::error::AssociateFirewallRuleGroupError {
meta: generic,
kind: crate::error::AssociateFirewallRuleGroupErrorKind::LimitExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::AssociateFirewallRuleGroupError {
meta: generic,
kind: crate::error::AssociateFirewallRuleGroupErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::AssociateFirewallRuleGroupError {
meta: generic,
kind: crate::error::AssociateFirewallRuleGroupErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::AssociateFirewallRuleGroupError {
meta: generic,
kind: crate::error::AssociateFirewallRuleGroupErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::AssociateFirewallRuleGroupError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_associate_firewall_rule_group_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::AssociateFirewallRuleGroupOutput,
crate::error::AssociateFirewallRuleGroupError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::associate_firewall_rule_group_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_associate_firewall_rule_group(
response.body().as_ref(),
output,
)
.map_err(crate::error::AssociateFirewallRuleGroupError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_associate_resolver_endpoint_ip_address_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::AssociateResolverEndpointIpAddressOutput,
crate::error::AssociateResolverEndpointIpAddressError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::AssociateResolverEndpointIpAddressError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::AssociateResolverEndpointIpAddressError::unhandled(generic))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::AssociateResolverEndpointIpAddressError { meta: generic, kind: crate::error::AssociateResolverEndpointIpAddressErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverEndpointIpAddressError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidParameterException" => crate::error::AssociateResolverEndpointIpAddressError { meta: generic, kind: crate::error::AssociateResolverEndpointIpAddressErrorKind::InvalidParameterException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverEndpointIpAddressError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidRequestException" => crate::error::AssociateResolverEndpointIpAddressError { meta: generic, kind: crate::error::AssociateResolverEndpointIpAddressErrorKind::InvalidRequestException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverEndpointIpAddressError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"LimitExceededException" => crate::error::AssociateResolverEndpointIpAddressError { meta: generic, kind: crate::error::AssociateResolverEndpointIpAddressErrorKind::LimitExceededException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverEndpointIpAddressError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ResourceExistsException" => crate::error::AssociateResolverEndpointIpAddressError { meta: generic, kind: crate::error::AssociateResolverEndpointIpAddressErrorKind::ResourceExistsException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::resource_exists_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_exists_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverEndpointIpAddressError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ResourceNotFoundException" => crate::error::AssociateResolverEndpointIpAddressError { meta: generic, kind: crate::error::AssociateResolverEndpointIpAddressErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverEndpointIpAddressError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ThrottlingException" => crate::error::AssociateResolverEndpointIpAddressError { meta: generic, kind: crate::error::AssociateResolverEndpointIpAddressErrorKind::ThrottlingException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverEndpointIpAddressError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
_ => crate::error::AssociateResolverEndpointIpAddressError::generic(generic)
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_associate_resolver_endpoint_ip_address_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::AssociateResolverEndpointIpAddressOutput,
crate::error::AssociateResolverEndpointIpAddressError,
> {
Ok({
#[allow(unused_mut)]
let mut output =
crate::output::associate_resolver_endpoint_ip_address_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_associate_resolver_endpoint_ip_address(response.body().as_ref(), output).map_err(crate::error::AssociateResolverEndpointIpAddressError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_associate_resolver_query_log_config_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::AssociateResolverQueryLogConfigOutput,
crate::error::AssociateResolverQueryLogConfigError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::AssociateResolverQueryLogConfigError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::AssociateResolverQueryLogConfigError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::AssociateResolverQueryLogConfigError { meta: generic, kind: crate::error::AssociateResolverQueryLogConfigErrorKind::AccessDeniedException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverQueryLogConfigError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InternalServiceErrorException" => crate::error::AssociateResolverQueryLogConfigError { meta: generic, kind: crate::error::AssociateResolverQueryLogConfigErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverQueryLogConfigError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidParameterException" => crate::error::AssociateResolverQueryLogConfigError { meta: generic, kind: crate::error::AssociateResolverQueryLogConfigErrorKind::InvalidParameterException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverQueryLogConfigError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidRequestException" => crate::error::AssociateResolverQueryLogConfigError { meta: generic, kind: crate::error::AssociateResolverQueryLogConfigErrorKind::InvalidRequestException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverQueryLogConfigError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"LimitExceededException" => crate::error::AssociateResolverQueryLogConfigError { meta: generic, kind: crate::error::AssociateResolverQueryLogConfigErrorKind::LimitExceededException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverQueryLogConfigError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ResourceExistsException" => crate::error::AssociateResolverQueryLogConfigError { meta: generic, kind: crate::error::AssociateResolverQueryLogConfigErrorKind::ResourceExistsException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::resource_exists_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_exists_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverQueryLogConfigError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ResourceNotFoundException" => crate::error::AssociateResolverQueryLogConfigError { meta: generic, kind: crate::error::AssociateResolverQueryLogConfigErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverQueryLogConfigError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ThrottlingException" => crate::error::AssociateResolverQueryLogConfigError { meta: generic, kind: crate::error::AssociateResolverQueryLogConfigErrorKind::ThrottlingException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverQueryLogConfigError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
_ => crate::error::AssociateResolverQueryLogConfigError::generic(generic)
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_associate_resolver_query_log_config_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::AssociateResolverQueryLogConfigOutput,
crate::error::AssociateResolverQueryLogConfigError,
> {
Ok({
#[allow(unused_mut)]
let mut output =
crate::output::associate_resolver_query_log_config_output::Builder::default();
let _ = response;
output =
crate::json_deser::deser_operation_crate_operation_associate_resolver_query_log_config(
response.body().as_ref(),
output,
)
.map_err(crate::error::AssociateResolverQueryLogConfigError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_associate_resolver_rule_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::AssociateResolverRuleOutput,
crate::error::AssociateResolverRuleError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::AssociateResolverRuleError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::AssociateResolverRuleError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::AssociateResolverRuleError {
meta: generic,
kind: crate::error::AssociateResolverRuleErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::AssociateResolverRuleError {
meta: generic,
kind: crate::error::AssociateResolverRuleErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::AssociateResolverRuleError {
meta: generic,
kind: crate::error::AssociateResolverRuleErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"LimitExceededException" => crate::error::AssociateResolverRuleError {
meta: generic,
kind: crate::error::AssociateResolverRuleErrorKind::LimitExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceExistsException" => crate::error::AssociateResolverRuleError {
meta: generic,
kind: crate::error::AssociateResolverRuleErrorKind::ResourceExistsException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_exists_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_exists_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::AssociateResolverRuleError {
meta: generic,
kind: crate::error::AssociateResolverRuleErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceUnavailableException" => crate::error::AssociateResolverRuleError {
meta: generic,
kind: crate::error::AssociateResolverRuleErrorKind::ResourceUnavailableException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::resource_unavailable_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_unavailable_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::AssociateResolverRuleError {
meta: generic,
kind: crate::error::AssociateResolverRuleErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::AssociateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::AssociateResolverRuleError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_associate_resolver_rule_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::AssociateResolverRuleOutput,
crate::error::AssociateResolverRuleError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::associate_resolver_rule_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_associate_resolver_rule(
response.body().as_ref(),
output,
)
.map_err(crate::error::AssociateResolverRuleError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_firewall_domain_list_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::CreateFirewallDomainListOutput,
crate::error::CreateFirewallDomainListError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::CreateFirewallDomainListError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::CreateFirewallDomainListError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::CreateFirewallDomainListError {
meta: generic,
kind: crate::error::CreateFirewallDomainListErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallDomainListError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::CreateFirewallDomainListError {
meta: generic,
kind: crate::error::CreateFirewallDomainListErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallDomainListError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"LimitExceededException" => crate::error::CreateFirewallDomainListError {
meta: generic,
kind: crate::error::CreateFirewallDomainListErrorKind::LimitExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallDomainListError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::CreateFirewallDomainListError {
meta: generic,
kind: crate::error::CreateFirewallDomainListErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallDomainListError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::CreateFirewallDomainListError {
meta: generic,
kind: crate::error::CreateFirewallDomainListErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallDomainListError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::CreateFirewallDomainListError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_firewall_domain_list_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::CreateFirewallDomainListOutput,
crate::error::CreateFirewallDomainListError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::create_firewall_domain_list_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_create_firewall_domain_list(
response.body().as_ref(),
output,
)
.map_err(crate::error::CreateFirewallDomainListError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_firewall_rule_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::CreateFirewallRuleOutput,
crate::error::CreateFirewallRuleError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::CreateFirewallRuleError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::CreateFirewallRuleError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::CreateFirewallRuleError {
meta: generic,
kind: crate::error::CreateFirewallRuleErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::CreateFirewallRuleError {
meta: generic,
kind: crate::error::CreateFirewallRuleErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"LimitExceededException" => crate::error::CreateFirewallRuleError {
meta: generic,
kind: crate::error::CreateFirewallRuleErrorKind::LimitExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::CreateFirewallRuleError {
meta: generic,
kind: crate::error::CreateFirewallRuleErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::CreateFirewallRuleError {
meta: generic,
kind: crate::error::CreateFirewallRuleErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::CreateFirewallRuleError {
meta: generic,
kind: crate::error::CreateFirewallRuleErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::CreateFirewallRuleError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_firewall_rule_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::CreateFirewallRuleOutput,
crate::error::CreateFirewallRuleError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::create_firewall_rule_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_create_firewall_rule(
response.body().as_ref(),
output,
)
.map_err(crate::error::CreateFirewallRuleError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_firewall_rule_group_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::CreateFirewallRuleGroupOutput,
crate::error::CreateFirewallRuleGroupError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::CreateFirewallRuleGroupError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::CreateFirewallRuleGroupError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::CreateFirewallRuleGroupError {
meta: generic,
kind: crate::error::CreateFirewallRuleGroupErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::CreateFirewallRuleGroupError {
meta: generic,
kind: crate::error::CreateFirewallRuleGroupErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"LimitExceededException" => crate::error::CreateFirewallRuleGroupError {
meta: generic,
kind: crate::error::CreateFirewallRuleGroupErrorKind::LimitExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::CreateFirewallRuleGroupError {
meta: generic,
kind: crate::error::CreateFirewallRuleGroupErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::CreateFirewallRuleGroupError {
meta: generic,
kind: crate::error::CreateFirewallRuleGroupErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::CreateFirewallRuleGroupError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_firewall_rule_group_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::CreateFirewallRuleGroupOutput,
crate::error::CreateFirewallRuleGroupError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::create_firewall_rule_group_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_create_firewall_rule_group(
response.body().as_ref(),
output,
)
.map_err(crate::error::CreateFirewallRuleGroupError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_resolver_endpoint_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::CreateResolverEndpointOutput,
crate::error::CreateResolverEndpointError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::CreateResolverEndpointError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::CreateResolverEndpointError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::CreateResolverEndpointError {
meta: generic,
kind: crate::error::CreateResolverEndpointErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::CreateResolverEndpointError {
meta: generic,
kind: crate::error::CreateResolverEndpointErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::CreateResolverEndpointError {
meta: generic,
kind: crate::error::CreateResolverEndpointErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"LimitExceededException" => crate::error::CreateResolverEndpointError {
meta: generic,
kind: crate::error::CreateResolverEndpointErrorKind::LimitExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceExistsException" => crate::error::CreateResolverEndpointError {
meta: generic,
kind: crate::error::CreateResolverEndpointErrorKind::ResourceExistsException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_exists_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_exists_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::CreateResolverEndpointError {
meta: generic,
kind: crate::error::CreateResolverEndpointErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::CreateResolverEndpointError {
meta: generic,
kind: crate::error::CreateResolverEndpointErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::CreateResolverEndpointError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_resolver_endpoint_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::CreateResolverEndpointOutput,
crate::error::CreateResolverEndpointError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::create_resolver_endpoint_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_create_resolver_endpoint(
response.body().as_ref(),
output,
)
.map_err(crate::error::CreateResolverEndpointError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_resolver_query_log_config_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::CreateResolverQueryLogConfigOutput,
crate::error::CreateResolverQueryLogConfigError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::CreateResolverQueryLogConfigError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::CreateResolverQueryLogConfigError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::CreateResolverQueryLogConfigError {
meta: generic,
kind: crate::error::CreateResolverQueryLogConfigErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::CreateResolverQueryLogConfigError {
meta: generic,
kind:
crate::error::CreateResolverQueryLogConfigErrorKind::InternalServiceErrorException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"InvalidParameterException" => crate::error::CreateResolverQueryLogConfigError {
meta: generic,
kind: crate::error::CreateResolverQueryLogConfigErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::CreateResolverQueryLogConfigError {
meta: generic,
kind: crate::error::CreateResolverQueryLogConfigErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"LimitExceededException" => crate::error::CreateResolverQueryLogConfigError {
meta: generic,
kind: crate::error::CreateResolverQueryLogConfigErrorKind::LimitExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceExistsException" => crate::error::CreateResolverQueryLogConfigError {
meta: generic,
kind: crate::error::CreateResolverQueryLogConfigErrorKind::ResourceExistsException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_exists_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_exists_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::CreateResolverQueryLogConfigError {
meta: generic,
kind: crate::error::CreateResolverQueryLogConfigErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::CreateResolverQueryLogConfigError {
meta: generic,
kind: crate::error::CreateResolverQueryLogConfigErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::CreateResolverQueryLogConfigError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_resolver_query_log_config_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::CreateResolverQueryLogConfigOutput,
crate::error::CreateResolverQueryLogConfigError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::create_resolver_query_log_config_output::Builder::default();
let _ = response;
output =
crate::json_deser::deser_operation_crate_operation_create_resolver_query_log_config(
response.body().as_ref(),
output,
)
.map_err(crate::error::CreateResolverQueryLogConfigError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_resolver_rule_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::CreateResolverRuleOutput,
crate::error::CreateResolverRuleError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::CreateResolverRuleError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::CreateResolverRuleError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::CreateResolverRuleError {
meta: generic,
kind: crate::error::CreateResolverRuleErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::CreateResolverRuleError {
meta: generic,
kind: crate::error::CreateResolverRuleErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::CreateResolverRuleError {
meta: generic,
kind: crate::error::CreateResolverRuleErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"LimitExceededException" => crate::error::CreateResolverRuleError {
meta: generic,
kind: crate::error::CreateResolverRuleErrorKind::LimitExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceExistsException" => crate::error::CreateResolverRuleError {
meta: generic,
kind: crate::error::CreateResolverRuleErrorKind::ResourceExistsException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_exists_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_exists_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::CreateResolverRuleError {
meta: generic,
kind: crate::error::CreateResolverRuleErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceUnavailableException" => crate::error::CreateResolverRuleError {
meta: generic,
kind: crate::error::CreateResolverRuleErrorKind::ResourceUnavailableException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::resource_unavailable_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_unavailable_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::CreateResolverRuleError {
meta: generic,
kind: crate::error::CreateResolverRuleErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::CreateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::CreateResolverRuleError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_resolver_rule_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::CreateResolverRuleOutput,
crate::error::CreateResolverRuleError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::create_resolver_rule_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_create_resolver_rule(
response.body().as_ref(),
output,
)
.map_err(crate::error::CreateResolverRuleError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_firewall_domain_list_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DeleteFirewallDomainListOutput,
crate::error::DeleteFirewallDomainListError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DeleteFirewallDomainListError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::DeleteFirewallDomainListError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::DeleteFirewallDomainListError {
meta: generic,
kind: crate::error::DeleteFirewallDomainListErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallDomainListError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ConflictException" => {
crate::error::DeleteFirewallDomainListError {
meta: generic,
kind: crate::error::DeleteFirewallDomainListErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallDomainListError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"InternalServiceErrorException" => crate::error::DeleteFirewallDomainListError {
meta: generic,
kind: crate::error::DeleteFirewallDomainListErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallDomainListError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::DeleteFirewallDomainListError {
meta: generic,
kind: crate::error::DeleteFirewallDomainListErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallDomainListError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::DeleteFirewallDomainListError {
meta: generic,
kind: crate::error::DeleteFirewallDomainListErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallDomainListError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DeleteFirewallDomainListError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_firewall_domain_list_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DeleteFirewallDomainListOutput,
crate::error::DeleteFirewallDomainListError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::delete_firewall_domain_list_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_delete_firewall_domain_list(
response.body().as_ref(),
output,
)
.map_err(crate::error::DeleteFirewallDomainListError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_firewall_rule_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DeleteFirewallRuleOutput,
crate::error::DeleteFirewallRuleError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DeleteFirewallRuleError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::DeleteFirewallRuleError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::DeleteFirewallRuleError {
meta: generic,
kind: crate::error::DeleteFirewallRuleErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::DeleteFirewallRuleError {
meta: generic,
kind: crate::error::DeleteFirewallRuleErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::DeleteFirewallRuleError {
meta: generic,
kind: crate::error::DeleteFirewallRuleErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::DeleteFirewallRuleError {
meta: generic,
kind: crate::error::DeleteFirewallRuleErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DeleteFirewallRuleError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_firewall_rule_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DeleteFirewallRuleOutput,
crate::error::DeleteFirewallRuleError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::delete_firewall_rule_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_delete_firewall_rule(
response.body().as_ref(),
output,
)
.map_err(crate::error::DeleteFirewallRuleError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_firewall_rule_group_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DeleteFirewallRuleGroupOutput,
crate::error::DeleteFirewallRuleGroupError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DeleteFirewallRuleGroupError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::DeleteFirewallRuleGroupError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::DeleteFirewallRuleGroupError {
meta: generic,
kind: crate::error::DeleteFirewallRuleGroupErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ConflictException" => {
crate::error::DeleteFirewallRuleGroupError {
meta: generic,
kind: crate::error::DeleteFirewallRuleGroupErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"InternalServiceErrorException" => crate::error::DeleteFirewallRuleGroupError {
meta: generic,
kind: crate::error::DeleteFirewallRuleGroupErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::DeleteFirewallRuleGroupError {
meta: generic,
kind: crate::error::DeleteFirewallRuleGroupErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::DeleteFirewallRuleGroupError {
meta: generic,
kind: crate::error::DeleteFirewallRuleGroupErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::DeleteFirewallRuleGroupError {
meta: generic,
kind: crate::error::DeleteFirewallRuleGroupErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DeleteFirewallRuleGroupError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_firewall_rule_group_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DeleteFirewallRuleGroupOutput,
crate::error::DeleteFirewallRuleGroupError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::delete_firewall_rule_group_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_delete_firewall_rule_group(
response.body().as_ref(),
output,
)
.map_err(crate::error::DeleteFirewallRuleGroupError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_resolver_endpoint_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DeleteResolverEndpointOutput,
crate::error::DeleteResolverEndpointError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DeleteResolverEndpointError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::DeleteResolverEndpointError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::DeleteResolverEndpointError {
meta: generic,
kind: crate::error::DeleteResolverEndpointErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::DeleteResolverEndpointError {
meta: generic,
kind: crate::error::DeleteResolverEndpointErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::DeleteResolverEndpointError {
meta: generic,
kind: crate::error::DeleteResolverEndpointErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::DeleteResolverEndpointError {
meta: generic,
kind: crate::error::DeleteResolverEndpointErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::DeleteResolverEndpointError {
meta: generic,
kind: crate::error::DeleteResolverEndpointErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DeleteResolverEndpointError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_resolver_endpoint_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DeleteResolverEndpointOutput,
crate::error::DeleteResolverEndpointError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::delete_resolver_endpoint_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_delete_resolver_endpoint(
response.body().as_ref(),
output,
)
.map_err(crate::error::DeleteResolverEndpointError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_resolver_query_log_config_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DeleteResolverQueryLogConfigOutput,
crate::error::DeleteResolverQueryLogConfigError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DeleteResolverQueryLogConfigError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::DeleteResolverQueryLogConfigError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::DeleteResolverQueryLogConfigError {
meta: generic,
kind: crate::error::DeleteResolverQueryLogConfigErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::DeleteResolverQueryLogConfigError {
meta: generic,
kind:
crate::error::DeleteResolverQueryLogConfigErrorKind::InternalServiceErrorException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"InvalidParameterException" => crate::error::DeleteResolverQueryLogConfigError {
meta: generic,
kind: crate::error::DeleteResolverQueryLogConfigErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::DeleteResolverQueryLogConfigError {
meta: generic,
kind: crate::error::DeleteResolverQueryLogConfigErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::DeleteResolverQueryLogConfigError {
meta: generic,
kind: crate::error::DeleteResolverQueryLogConfigErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::DeleteResolverQueryLogConfigError {
meta: generic,
kind: crate::error::DeleteResolverQueryLogConfigErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DeleteResolverQueryLogConfigError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_resolver_query_log_config_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DeleteResolverQueryLogConfigOutput,
crate::error::DeleteResolverQueryLogConfigError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::delete_resolver_query_log_config_output::Builder::default();
let _ = response;
output =
crate::json_deser::deser_operation_crate_operation_delete_resolver_query_log_config(
response.body().as_ref(),
output,
)
.map_err(crate::error::DeleteResolverQueryLogConfigError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_resolver_rule_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DeleteResolverRuleOutput,
crate::error::DeleteResolverRuleError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DeleteResolverRuleError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::DeleteResolverRuleError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::DeleteResolverRuleError {
meta: generic,
kind: crate::error::DeleteResolverRuleErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::DeleteResolverRuleError {
meta: generic,
kind: crate::error::DeleteResolverRuleErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceInUseException" => crate::error::DeleteResolverRuleError {
meta: generic,
kind: crate::error::DeleteResolverRuleErrorKind::ResourceInUseException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_in_use_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_in_use_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::DeleteResolverRuleError {
meta: generic,
kind: crate::error::DeleteResolverRuleErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::DeleteResolverRuleError {
meta: generic,
kind: crate::error::DeleteResolverRuleErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DeleteResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DeleteResolverRuleError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_resolver_rule_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DeleteResolverRuleOutput,
crate::error::DeleteResolverRuleError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::delete_resolver_rule_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_delete_resolver_rule(
response.body().as_ref(),
output,
)
.map_err(crate::error::DeleteResolverRuleError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_disassociate_firewall_rule_group_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DisassociateFirewallRuleGroupOutput,
crate::error::DisassociateFirewallRuleGroupError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DisassociateFirewallRuleGroupError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::DisassociateFirewallRuleGroupError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::DisassociateFirewallRuleGroupError {
meta: generic,
kind: crate::error::DisassociateFirewallRuleGroupErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ConflictException" => {
crate::error::DisassociateFirewallRuleGroupError {
meta: generic,
kind: crate::error::DisassociateFirewallRuleGroupErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"InternalServiceErrorException" => crate::error::DisassociateFirewallRuleGroupError {
meta: generic,
kind:
crate::error::DisassociateFirewallRuleGroupErrorKind::InternalServiceErrorException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"ResourceNotFoundException" => crate::error::DisassociateFirewallRuleGroupError {
meta: generic,
kind: crate::error::DisassociateFirewallRuleGroupErrorKind::ResourceNotFoundException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"ThrottlingException" => crate::error::DisassociateFirewallRuleGroupError {
meta: generic,
kind: crate::error::DisassociateFirewallRuleGroupErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::DisassociateFirewallRuleGroupError {
meta: generic,
kind: crate::error::DisassociateFirewallRuleGroupErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DisassociateFirewallRuleGroupError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_disassociate_firewall_rule_group_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DisassociateFirewallRuleGroupOutput,
crate::error::DisassociateFirewallRuleGroupError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::disassociate_firewall_rule_group_output::Builder::default();
let _ = response;
output =
crate::json_deser::deser_operation_crate_operation_disassociate_firewall_rule_group(
response.body().as_ref(),
output,
)
.map_err(crate::error::DisassociateFirewallRuleGroupError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_disassociate_resolver_endpoint_ip_address_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DisassociateResolverEndpointIpAddressOutput,
crate::error::DisassociateResolverEndpointIpAddressError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DisassociateResolverEndpointIpAddressError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(
crate::error::DisassociateResolverEndpointIpAddressError::unhandled(generic),
)
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::DisassociateResolverEndpointIpAddressError { meta: generic, kind: crate::error::DisassociateResolverEndpointIpAddressErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverEndpointIpAddressError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidParameterException" => crate::error::DisassociateResolverEndpointIpAddressError { meta: generic, kind: crate::error::DisassociateResolverEndpointIpAddressErrorKind::InvalidParameterException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverEndpointIpAddressError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidRequestException" => crate::error::DisassociateResolverEndpointIpAddressError { meta: generic, kind: crate::error::DisassociateResolverEndpointIpAddressErrorKind::InvalidRequestException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverEndpointIpAddressError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ResourceExistsException" => crate::error::DisassociateResolverEndpointIpAddressError { meta: generic, kind: crate::error::DisassociateResolverEndpointIpAddressErrorKind::ResourceExistsException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::resource_exists_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_exists_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverEndpointIpAddressError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ResourceNotFoundException" => crate::error::DisassociateResolverEndpointIpAddressError { meta: generic, kind: crate::error::DisassociateResolverEndpointIpAddressErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverEndpointIpAddressError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ThrottlingException" => crate::error::DisassociateResolverEndpointIpAddressError { meta: generic, kind: crate::error::DisassociateResolverEndpointIpAddressErrorKind::ThrottlingException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverEndpointIpAddressError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
_ => crate::error::DisassociateResolverEndpointIpAddressError::generic(generic)
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_disassociate_resolver_endpoint_ip_address_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DisassociateResolverEndpointIpAddressOutput,
crate::error::DisassociateResolverEndpointIpAddressError,
> {
Ok({
#[allow(unused_mut)]
let mut output =
crate::output::disassociate_resolver_endpoint_ip_address_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_disassociate_resolver_endpoint_ip_address(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverEndpointIpAddressError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_disassociate_resolver_query_log_config_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DisassociateResolverQueryLogConfigOutput,
crate::error::DisassociateResolverQueryLogConfigError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DisassociateResolverQueryLogConfigError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::DisassociateResolverQueryLogConfigError::unhandled(generic))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::DisassociateResolverQueryLogConfigError { meta: generic, kind: crate::error::DisassociateResolverQueryLogConfigErrorKind::AccessDeniedException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverQueryLogConfigError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InternalServiceErrorException" => crate::error::DisassociateResolverQueryLogConfigError { meta: generic, kind: crate::error::DisassociateResolverQueryLogConfigErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverQueryLogConfigError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidParameterException" => crate::error::DisassociateResolverQueryLogConfigError { meta: generic, kind: crate::error::DisassociateResolverQueryLogConfigErrorKind::InvalidParameterException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverQueryLogConfigError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidRequestException" => crate::error::DisassociateResolverQueryLogConfigError { meta: generic, kind: crate::error::DisassociateResolverQueryLogConfigErrorKind::InvalidRequestException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverQueryLogConfigError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ResourceNotFoundException" => crate::error::DisassociateResolverQueryLogConfigError { meta: generic, kind: crate::error::DisassociateResolverQueryLogConfigErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverQueryLogConfigError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ThrottlingException" => crate::error::DisassociateResolverQueryLogConfigError { meta: generic, kind: crate::error::DisassociateResolverQueryLogConfigErrorKind::ThrottlingException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverQueryLogConfigError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
_ => crate::error::DisassociateResolverQueryLogConfigError::generic(generic)
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_disassociate_resolver_query_log_config_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DisassociateResolverQueryLogConfigOutput,
crate::error::DisassociateResolverQueryLogConfigError,
> {
Ok({
#[allow(unused_mut)]
let mut output =
crate::output::disassociate_resolver_query_log_config_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_disassociate_resolver_query_log_config(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverQueryLogConfigError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_disassociate_resolver_rule_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DisassociateResolverRuleOutput,
crate::error::DisassociateResolverRuleError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DisassociateResolverRuleError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::DisassociateResolverRuleError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::DisassociateResolverRuleError {
meta: generic,
kind: crate::error::DisassociateResolverRuleErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::DisassociateResolverRuleError {
meta: generic,
kind: crate::error::DisassociateResolverRuleErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::DisassociateResolverRuleError {
meta: generic,
kind: crate::error::DisassociateResolverRuleErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::DisassociateResolverRuleError {
meta: generic,
kind: crate::error::DisassociateResolverRuleErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::DisassociateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DisassociateResolverRuleError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_disassociate_resolver_rule_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DisassociateResolverRuleOutput,
crate::error::DisassociateResolverRuleError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::disassociate_resolver_rule_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_disassociate_resolver_rule(
response.body().as_ref(),
output,
)
.map_err(crate::error::DisassociateResolverRuleError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_firewall_config_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::GetFirewallConfigOutput, crate::error::GetFirewallConfigError>
{
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::GetFirewallConfigError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::GetFirewallConfigError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::GetFirewallConfigError {
meta: generic,
kind: crate::error::GetFirewallConfigErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::GetFirewallConfigError {
meta: generic,
kind: crate::error::GetFirewallConfigErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::GetFirewallConfigError {
meta: generic,
kind: crate::error::GetFirewallConfigErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::GetFirewallConfigError {
meta: generic,
kind: crate::error::GetFirewallConfigErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::GetFirewallConfigError {
meta: generic,
kind: crate::error::GetFirewallConfigErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::GetFirewallConfigError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_firewall_config_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::GetFirewallConfigOutput, crate::error::GetFirewallConfigError>
{
Ok({
#[allow(unused_mut)]
let mut output = crate::output::get_firewall_config_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_get_firewall_config(
response.body().as_ref(),
output,
)
.map_err(crate::error::GetFirewallConfigError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_firewall_domain_list_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetFirewallDomainListOutput,
crate::error::GetFirewallDomainListError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::GetFirewallDomainListError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::GetFirewallDomainListError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::GetFirewallDomainListError {
meta: generic,
kind: crate::error::GetFirewallDomainListErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallDomainListError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::GetFirewallDomainListError {
meta: generic,
kind: crate::error::GetFirewallDomainListErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallDomainListError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::GetFirewallDomainListError {
meta: generic,
kind: crate::error::GetFirewallDomainListErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallDomainListError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::GetFirewallDomainListError {
meta: generic,
kind: crate::error::GetFirewallDomainListErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallDomainListError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::GetFirewallDomainListError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_firewall_domain_list_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetFirewallDomainListOutput,
crate::error::GetFirewallDomainListError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::get_firewall_domain_list_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_get_firewall_domain_list(
response.body().as_ref(),
output,
)
.map_err(crate::error::GetFirewallDomainListError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_firewall_rule_group_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetFirewallRuleGroupOutput,
crate::error::GetFirewallRuleGroupError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::GetFirewallRuleGroupError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::GetFirewallRuleGroupError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::GetFirewallRuleGroupError {
meta: generic,
kind: crate::error::GetFirewallRuleGroupErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::GetFirewallRuleGroupError {
meta: generic,
kind: crate::error::GetFirewallRuleGroupErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::GetFirewallRuleGroupError {
meta: generic,
kind: crate::error::GetFirewallRuleGroupErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::GetFirewallRuleGroupError {
meta: generic,
kind: crate::error::GetFirewallRuleGroupErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallRuleGroupError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::GetFirewallRuleGroupError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_firewall_rule_group_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetFirewallRuleGroupOutput,
crate::error::GetFirewallRuleGroupError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::get_firewall_rule_group_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_get_firewall_rule_group(
response.body().as_ref(),
output,
)
.map_err(crate::error::GetFirewallRuleGroupError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_firewall_rule_group_association_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetFirewallRuleGroupAssociationOutput,
crate::error::GetFirewallRuleGroupAssociationError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::GetFirewallRuleGroupAssociationError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::GetFirewallRuleGroupAssociationError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::GetFirewallRuleGroupAssociationError { meta: generic, kind: crate::error::GetFirewallRuleGroupAssociationErrorKind::AccessDeniedException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallRuleGroupAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InternalServiceErrorException" => crate::error::GetFirewallRuleGroupAssociationError { meta: generic, kind: crate::error::GetFirewallRuleGroupAssociationErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallRuleGroupAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ResourceNotFoundException" => crate::error::GetFirewallRuleGroupAssociationError { meta: generic, kind: crate::error::GetFirewallRuleGroupAssociationErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallRuleGroupAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ThrottlingException" => crate::error::GetFirewallRuleGroupAssociationError { meta: generic, kind: crate::error::GetFirewallRuleGroupAssociationErrorKind::ThrottlingException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallRuleGroupAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
_ => crate::error::GetFirewallRuleGroupAssociationError::generic(generic)
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_firewall_rule_group_association_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetFirewallRuleGroupAssociationOutput,
crate::error::GetFirewallRuleGroupAssociationError,
> {
Ok({
#[allow(unused_mut)]
let mut output =
crate::output::get_firewall_rule_group_association_output::Builder::default();
let _ = response;
output =
crate::json_deser::deser_operation_crate_operation_get_firewall_rule_group_association(
response.body().as_ref(),
output,
)
.map_err(crate::error::GetFirewallRuleGroupAssociationError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_firewall_rule_group_policy_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetFirewallRuleGroupPolicyOutput,
crate::error::GetFirewallRuleGroupPolicyError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::GetFirewallRuleGroupPolicyError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::GetFirewallRuleGroupPolicyError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::GetFirewallRuleGroupPolicyError {
meta: generic,
kind: crate::error::GetFirewallRuleGroupPolicyErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallRuleGroupPolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::GetFirewallRuleGroupPolicyError {
meta: generic,
kind: crate::error::GetFirewallRuleGroupPolicyErrorKind::InternalServiceErrorException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallRuleGroupPolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"ResourceNotFoundException" => crate::error::GetFirewallRuleGroupPolicyError {
meta: generic,
kind: crate::error::GetFirewallRuleGroupPolicyErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallRuleGroupPolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::GetFirewallRuleGroupPolicyError {
meta: generic,
kind: crate::error::GetFirewallRuleGroupPolicyErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallRuleGroupPolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::GetFirewallRuleGroupPolicyError {
meta: generic,
kind: crate::error::GetFirewallRuleGroupPolicyErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetFirewallRuleGroupPolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::GetFirewallRuleGroupPolicyError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_firewall_rule_group_policy_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetFirewallRuleGroupPolicyOutput,
crate::error::GetFirewallRuleGroupPolicyError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::get_firewall_rule_group_policy_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_get_firewall_rule_group_policy(
response.body().as_ref(),
output,
)
.map_err(crate::error::GetFirewallRuleGroupPolicyError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_config_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::GetResolverConfigOutput, crate::error::GetResolverConfigError>
{
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::GetResolverConfigError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::GetResolverConfigError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::GetResolverConfigError {
meta: generic,
kind: crate::error::GetResolverConfigErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::GetResolverConfigError {
meta: generic,
kind: crate::error::GetResolverConfigErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::GetResolverConfigError {
meta: generic,
kind: crate::error::GetResolverConfigErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::GetResolverConfigError {
meta: generic,
kind: crate::error::GetResolverConfigErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::GetResolverConfigError {
meta: generic,
kind: crate::error::GetResolverConfigErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::GetResolverConfigError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_config_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::GetResolverConfigOutput, crate::error::GetResolverConfigError>
{
Ok({
#[allow(unused_mut)]
let mut output = crate::output::get_resolver_config_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_get_resolver_config(
response.body().as_ref(),
output,
)
.map_err(crate::error::GetResolverConfigError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_dnssec_config_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetResolverDnssecConfigOutput,
crate::error::GetResolverDnssecConfigError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::GetResolverDnssecConfigError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::GetResolverDnssecConfigError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::GetResolverDnssecConfigError {
meta: generic,
kind: crate::error::GetResolverDnssecConfigErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverDnssecConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::GetResolverDnssecConfigError {
meta: generic,
kind: crate::error::GetResolverDnssecConfigErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverDnssecConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::GetResolverDnssecConfigError {
meta: generic,
kind: crate::error::GetResolverDnssecConfigErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverDnssecConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::GetResolverDnssecConfigError {
meta: generic,
kind: crate::error::GetResolverDnssecConfigErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverDnssecConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::GetResolverDnssecConfigError {
meta: generic,
kind: crate::error::GetResolverDnssecConfigErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverDnssecConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::GetResolverDnssecConfigError {
meta: generic,
kind: crate::error::GetResolverDnssecConfigErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverDnssecConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::GetResolverDnssecConfigError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_dnssec_config_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetResolverDnssecConfigOutput,
crate::error::GetResolverDnssecConfigError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::get_resolver_dnssec_config_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_get_resolver_dnssec_config(
response.body().as_ref(),
output,
)
.map_err(crate::error::GetResolverDnssecConfigError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_endpoint_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetResolverEndpointOutput,
crate::error::GetResolverEndpointError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::GetResolverEndpointError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::GetResolverEndpointError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::GetResolverEndpointError {
meta: generic,
kind: crate::error::GetResolverEndpointErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::GetResolverEndpointError {
meta: generic,
kind: crate::error::GetResolverEndpointErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::GetResolverEndpointError {
meta: generic,
kind: crate::error::GetResolverEndpointErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::GetResolverEndpointError {
meta: generic,
kind: crate::error::GetResolverEndpointErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::GetResolverEndpointError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_endpoint_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetResolverEndpointOutput,
crate::error::GetResolverEndpointError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::get_resolver_endpoint_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_get_resolver_endpoint(
response.body().as_ref(),
output,
)
.map_err(crate::error::GetResolverEndpointError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_query_log_config_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetResolverQueryLogConfigOutput,
crate::error::GetResolverQueryLogConfigError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::GetResolverQueryLogConfigError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::GetResolverQueryLogConfigError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::GetResolverQueryLogConfigError {
meta: generic,
kind: crate::error::GetResolverQueryLogConfigErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::GetResolverQueryLogConfigError {
meta: generic,
kind: crate::error::GetResolverQueryLogConfigErrorKind::InternalServiceErrorException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"InvalidParameterException" => crate::error::GetResolverQueryLogConfigError {
meta: generic,
kind: crate::error::GetResolverQueryLogConfigErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::GetResolverQueryLogConfigError {
meta: generic,
kind: crate::error::GetResolverQueryLogConfigErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::GetResolverQueryLogConfigError {
meta: generic,
kind: crate::error::GetResolverQueryLogConfigErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::GetResolverQueryLogConfigError {
meta: generic,
kind: crate::error::GetResolverQueryLogConfigErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::GetResolverQueryLogConfigError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_query_log_config_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetResolverQueryLogConfigOutput,
crate::error::GetResolverQueryLogConfigError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::get_resolver_query_log_config_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_get_resolver_query_log_config(
response.body().as_ref(),
output,
)
.map_err(crate::error::GetResolverQueryLogConfigError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_query_log_config_association_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetResolverQueryLogConfigAssociationOutput,
crate::error::GetResolverQueryLogConfigAssociationError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::GetResolverQueryLogConfigAssociationError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::GetResolverQueryLogConfigAssociationError::unhandled(generic))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::GetResolverQueryLogConfigAssociationError { meta: generic, kind: crate::error::GetResolverQueryLogConfigAssociationErrorKind::AccessDeniedException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InternalServiceErrorException" => crate::error::GetResolverQueryLogConfigAssociationError { meta: generic, kind: crate::error::GetResolverQueryLogConfigAssociationErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidParameterException" => crate::error::GetResolverQueryLogConfigAssociationError { meta: generic, kind: crate::error::GetResolverQueryLogConfigAssociationErrorKind::InvalidParameterException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidRequestException" => crate::error::GetResolverQueryLogConfigAssociationError { meta: generic, kind: crate::error::GetResolverQueryLogConfigAssociationErrorKind::InvalidRequestException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ResourceNotFoundException" => crate::error::GetResolverQueryLogConfigAssociationError { meta: generic, kind: crate::error::GetResolverQueryLogConfigAssociationErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ThrottlingException" => crate::error::GetResolverQueryLogConfigAssociationError { meta: generic, kind: crate::error::GetResolverQueryLogConfigAssociationErrorKind::ThrottlingException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
_ => crate::error::GetResolverQueryLogConfigAssociationError::generic(generic)
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_query_log_config_association_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetResolverQueryLogConfigAssociationOutput,
crate::error::GetResolverQueryLogConfigAssociationError,
> {
Ok({
#[allow(unused_mut)]
let mut output =
crate::output::get_resolver_query_log_config_association_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_get_resolver_query_log_config_association(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigAssociationError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_query_log_config_policy_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetResolverQueryLogConfigPolicyOutput,
crate::error::GetResolverQueryLogConfigPolicyError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::GetResolverQueryLogConfigPolicyError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::GetResolverQueryLogConfigPolicyError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::GetResolverQueryLogConfigPolicyError { meta: generic, kind: crate::error::GetResolverQueryLogConfigPolicyErrorKind::AccessDeniedException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigPolicyError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InternalServiceErrorException" => crate::error::GetResolverQueryLogConfigPolicyError { meta: generic, kind: crate::error::GetResolverQueryLogConfigPolicyErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigPolicyError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidParameterException" => crate::error::GetResolverQueryLogConfigPolicyError { meta: generic, kind: crate::error::GetResolverQueryLogConfigPolicyErrorKind::InvalidParameterException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigPolicyError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidRequestException" => crate::error::GetResolverQueryLogConfigPolicyError { meta: generic, kind: crate::error::GetResolverQueryLogConfigPolicyErrorKind::InvalidRequestException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigPolicyError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"UnknownResourceException" => crate::error::GetResolverQueryLogConfigPolicyError { meta: generic, kind: crate::error::GetResolverQueryLogConfigPolicyErrorKind::UnknownResourceException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::unknown_resource_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_unknown_resource_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigPolicyError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
_ => crate::error::GetResolverQueryLogConfigPolicyError::generic(generic)
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_query_log_config_policy_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetResolverQueryLogConfigPolicyOutput,
crate::error::GetResolverQueryLogConfigPolicyError,
> {
Ok({
#[allow(unused_mut)]
let mut output =
crate::output::get_resolver_query_log_config_policy_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_get_resolver_query_log_config_policy(response.body().as_ref(), output).map_err(crate::error::GetResolverQueryLogConfigPolicyError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_rule_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::GetResolverRuleOutput, crate::error::GetResolverRuleError> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::GetResolverRuleError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::GetResolverRuleError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::GetResolverRuleError {
meta: generic,
kind: crate::error::GetResolverRuleErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::GetResolverRuleError {
meta: generic,
kind: crate::error::GetResolverRuleErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::GetResolverRuleError {
meta: generic,
kind: crate::error::GetResolverRuleErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::GetResolverRuleError {
meta: generic,
kind: crate::error::GetResolverRuleErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::GetResolverRuleError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_rule_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::GetResolverRuleOutput, crate::error::GetResolverRuleError> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::get_resolver_rule_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_get_resolver_rule(
response.body().as_ref(),
output,
)
.map_err(crate::error::GetResolverRuleError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_rule_association_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetResolverRuleAssociationOutput,
crate::error::GetResolverRuleAssociationError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::GetResolverRuleAssociationError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::GetResolverRuleAssociationError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::GetResolverRuleAssociationError {
meta: generic,
kind: crate::error::GetResolverRuleAssociationErrorKind::InternalServiceErrorException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverRuleAssociationError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"InvalidParameterException" => crate::error::GetResolverRuleAssociationError {
meta: generic,
kind: crate::error::GetResolverRuleAssociationErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverRuleAssociationError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::GetResolverRuleAssociationError {
meta: generic,
kind: crate::error::GetResolverRuleAssociationErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverRuleAssociationError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::GetResolverRuleAssociationError {
meta: generic,
kind: crate::error::GetResolverRuleAssociationErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverRuleAssociationError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::GetResolverRuleAssociationError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_rule_association_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetResolverRuleAssociationOutput,
crate::error::GetResolverRuleAssociationError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::get_resolver_rule_association_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_get_resolver_rule_association(
response.body().as_ref(),
output,
)
.map_err(crate::error::GetResolverRuleAssociationError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_rule_policy_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetResolverRulePolicyOutput,
crate::error::GetResolverRulePolicyError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::GetResolverRulePolicyError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::GetResolverRulePolicyError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::GetResolverRulePolicyError {
meta: generic,
kind: crate::error::GetResolverRulePolicyErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverRulePolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::GetResolverRulePolicyError {
meta: generic,
kind: crate::error::GetResolverRulePolicyErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverRulePolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"UnknownResourceException" => crate::error::GetResolverRulePolicyError {
meta: generic,
kind: crate::error::GetResolverRulePolicyErrorKind::UnknownResourceException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::unknown_resource_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_unknown_resource_exception_json_err(response.body().as_ref(), output).map_err(crate::error::GetResolverRulePolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::GetResolverRulePolicyError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_get_resolver_rule_policy_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::GetResolverRulePolicyOutput,
crate::error::GetResolverRulePolicyError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::get_resolver_rule_policy_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_get_resolver_rule_policy(
response.body().as_ref(),
output,
)
.map_err(crate::error::GetResolverRulePolicyError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_import_firewall_domains_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ImportFirewallDomainsOutput,
crate::error::ImportFirewallDomainsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ImportFirewallDomainsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::ImportFirewallDomainsError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::ImportFirewallDomainsError {
meta: generic,
kind: crate::error::ImportFirewallDomainsErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ImportFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ConflictException" => {
crate::error::ImportFirewallDomainsError {
meta: generic,
kind: crate::error::ImportFirewallDomainsErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ImportFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"InternalServiceErrorException" => crate::error::ImportFirewallDomainsError {
meta: generic,
kind: crate::error::ImportFirewallDomainsErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ImportFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"LimitExceededException" => crate::error::ImportFirewallDomainsError {
meta: generic,
kind: crate::error::ImportFirewallDomainsErrorKind::LimitExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ImportFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::ImportFirewallDomainsError {
meta: generic,
kind: crate::error::ImportFirewallDomainsErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ImportFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::ImportFirewallDomainsError {
meta: generic,
kind: crate::error::ImportFirewallDomainsErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ImportFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::ImportFirewallDomainsError {
meta: generic,
kind: crate::error::ImportFirewallDomainsErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ImportFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ImportFirewallDomainsError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_import_firewall_domains_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ImportFirewallDomainsOutput,
crate::error::ImportFirewallDomainsError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::import_firewall_domains_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_import_firewall_domains(
response.body().as_ref(),
output,
)
.map_err(crate::error::ImportFirewallDomainsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_firewall_configs_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListFirewallConfigsOutput,
crate::error::ListFirewallConfigsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListFirewallConfigsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::ListFirewallConfigsError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::ListFirewallConfigsError {
meta: generic,
kind: crate::error::ListFirewallConfigsErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::ListFirewallConfigsError {
meta: generic,
kind: crate::error::ListFirewallConfigsErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::ListFirewallConfigsError {
meta: generic,
kind: crate::error::ListFirewallConfigsErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::ListFirewallConfigsError {
meta: generic,
kind: crate::error::ListFirewallConfigsErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListFirewallConfigsError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_firewall_configs_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListFirewallConfigsOutput,
crate::error::ListFirewallConfigsError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_firewall_configs_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_firewall_configs(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListFirewallConfigsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_firewall_domain_lists_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListFirewallDomainListsOutput,
crate::error::ListFirewallDomainListsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListFirewallDomainListsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::ListFirewallDomainListsError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::ListFirewallDomainListsError {
meta: generic,
kind: crate::error::ListFirewallDomainListsErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallDomainListsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::ListFirewallDomainListsError {
meta: generic,
kind: crate::error::ListFirewallDomainListsErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallDomainListsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::ListFirewallDomainListsError {
meta: generic,
kind: crate::error::ListFirewallDomainListsErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallDomainListsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::ListFirewallDomainListsError {
meta: generic,
kind: crate::error::ListFirewallDomainListsErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallDomainListsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListFirewallDomainListsError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_firewall_domain_lists_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListFirewallDomainListsOutput,
crate::error::ListFirewallDomainListsError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_firewall_domain_lists_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_firewall_domain_lists(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListFirewallDomainListsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_firewall_domains_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListFirewallDomainsOutput,
crate::error::ListFirewallDomainsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListFirewallDomainsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::ListFirewallDomainsError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::ListFirewallDomainsError {
meta: generic,
kind: crate::error::ListFirewallDomainsErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::ListFirewallDomainsError {
meta: generic,
kind: crate::error::ListFirewallDomainsErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::ListFirewallDomainsError {
meta: generic,
kind: crate::error::ListFirewallDomainsErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::ListFirewallDomainsError {
meta: generic,
kind: crate::error::ListFirewallDomainsErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::ListFirewallDomainsError {
meta: generic,
kind: crate::error::ListFirewallDomainsErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListFirewallDomainsError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_firewall_domains_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListFirewallDomainsOutput,
crate::error::ListFirewallDomainsError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_firewall_domains_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_firewall_domains(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListFirewallDomainsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_firewall_rule_group_associations_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListFirewallRuleGroupAssociationsOutput,
crate::error::ListFirewallRuleGroupAssociationsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListFirewallRuleGroupAssociationsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::ListFirewallRuleGroupAssociationsError::unhandled(generic))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::ListFirewallRuleGroupAssociationsError { meta: generic, kind: crate::error::ListFirewallRuleGroupAssociationsErrorKind::AccessDeniedException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallRuleGroupAssociationsError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InternalServiceErrorException" => crate::error::ListFirewallRuleGroupAssociationsError { meta: generic, kind: crate::error::ListFirewallRuleGroupAssociationsErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallRuleGroupAssociationsError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ThrottlingException" => crate::error::ListFirewallRuleGroupAssociationsError { meta: generic, kind: crate::error::ListFirewallRuleGroupAssociationsErrorKind::ThrottlingException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallRuleGroupAssociationsError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ValidationException" => crate::error::ListFirewallRuleGroupAssociationsError { meta: generic, kind: crate::error::ListFirewallRuleGroupAssociationsErrorKind::ValidationException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallRuleGroupAssociationsError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
_ => crate::error::ListFirewallRuleGroupAssociationsError::generic(generic)
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_firewall_rule_group_associations_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListFirewallRuleGroupAssociationsOutput,
crate::error::ListFirewallRuleGroupAssociationsError,
> {
Ok({
#[allow(unused_mut)]
let mut output =
crate::output::list_firewall_rule_group_associations_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_firewall_rule_group_associations(response.body().as_ref(), output).map_err(crate::error::ListFirewallRuleGroupAssociationsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_firewall_rule_groups_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListFirewallRuleGroupsOutput,
crate::error::ListFirewallRuleGroupsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListFirewallRuleGroupsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::ListFirewallRuleGroupsError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::ListFirewallRuleGroupsError {
meta: generic,
kind: crate::error::ListFirewallRuleGroupsErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallRuleGroupsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::ListFirewallRuleGroupsError {
meta: generic,
kind: crate::error::ListFirewallRuleGroupsErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallRuleGroupsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::ListFirewallRuleGroupsError {
meta: generic,
kind: crate::error::ListFirewallRuleGroupsErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallRuleGroupsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::ListFirewallRuleGroupsError {
meta: generic,
kind: crate::error::ListFirewallRuleGroupsErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallRuleGroupsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListFirewallRuleGroupsError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_firewall_rule_groups_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListFirewallRuleGroupsOutput,
crate::error::ListFirewallRuleGroupsError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_firewall_rule_groups_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_firewall_rule_groups(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListFirewallRuleGroupsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_firewall_rules_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::ListFirewallRulesOutput, crate::error::ListFirewallRulesError>
{
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListFirewallRulesError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::ListFirewallRulesError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::ListFirewallRulesError {
meta: generic,
kind: crate::error::ListFirewallRulesErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallRulesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::ListFirewallRulesError {
meta: generic,
kind: crate::error::ListFirewallRulesErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallRulesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::ListFirewallRulesError {
meta: generic,
kind: crate::error::ListFirewallRulesErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallRulesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::ListFirewallRulesError {
meta: generic,
kind: crate::error::ListFirewallRulesErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallRulesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::ListFirewallRulesError {
meta: generic,
kind: crate::error::ListFirewallRulesErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListFirewallRulesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListFirewallRulesError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_firewall_rules_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::ListFirewallRulesOutput, crate::error::ListFirewallRulesError>
{
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_firewall_rules_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_firewall_rules(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListFirewallRulesError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_configs_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListResolverConfigsOutput,
crate::error::ListResolverConfigsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListResolverConfigsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::ListResolverConfigsError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::ListResolverConfigsError {
meta: generic,
kind: crate::error::ListResolverConfigsErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::ListResolverConfigsError {
meta: generic,
kind: crate::error::ListResolverConfigsErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidNextTokenException" => crate::error::ListResolverConfigsError {
meta: generic,
kind: crate::error::ListResolverConfigsErrorKind::InvalidNextTokenException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_next_token_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_next_token_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::ListResolverConfigsError {
meta: generic,
kind: crate::error::ListResolverConfigsErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::ListResolverConfigsError {
meta: generic,
kind: crate::error::ListResolverConfigsErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::ListResolverConfigsError {
meta: generic,
kind: crate::error::ListResolverConfigsErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListResolverConfigsError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_configs_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListResolverConfigsOutput,
crate::error::ListResolverConfigsError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_resolver_configs_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_resolver_configs(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListResolverConfigsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_dnssec_configs_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListResolverDnssecConfigsOutput,
crate::error::ListResolverDnssecConfigsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListResolverDnssecConfigsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::ListResolverDnssecConfigsError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::ListResolverDnssecConfigsError {
meta: generic,
kind: crate::error::ListResolverDnssecConfigsErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverDnssecConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::ListResolverDnssecConfigsError {
meta: generic,
kind: crate::error::ListResolverDnssecConfigsErrorKind::InternalServiceErrorException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverDnssecConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"InvalidNextTokenException" => crate::error::ListResolverDnssecConfigsError {
meta: generic,
kind: crate::error::ListResolverDnssecConfigsErrorKind::InvalidNextTokenException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_next_token_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_next_token_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverDnssecConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::ListResolverDnssecConfigsError {
meta: generic,
kind: crate::error::ListResolverDnssecConfigsErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverDnssecConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::ListResolverDnssecConfigsError {
meta: generic,
kind: crate::error::ListResolverDnssecConfigsErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverDnssecConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::ListResolverDnssecConfigsError {
meta: generic,
kind: crate::error::ListResolverDnssecConfigsErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverDnssecConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListResolverDnssecConfigsError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_dnssec_configs_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListResolverDnssecConfigsOutput,
crate::error::ListResolverDnssecConfigsError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_resolver_dnssec_configs_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_resolver_dnssec_configs(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListResolverDnssecConfigsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_endpoint_ip_addresses_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListResolverEndpointIpAddressesOutput,
crate::error::ListResolverEndpointIpAddressesError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListResolverEndpointIpAddressesError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::ListResolverEndpointIpAddressesError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::ListResolverEndpointIpAddressesError { meta: generic, kind: crate::error::ListResolverEndpointIpAddressesErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverEndpointIpAddressesError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidNextTokenException" => crate::error::ListResolverEndpointIpAddressesError { meta: generic, kind: crate::error::ListResolverEndpointIpAddressesErrorKind::InvalidNextTokenException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_next_token_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_next_token_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverEndpointIpAddressesError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidParameterException" => crate::error::ListResolverEndpointIpAddressesError { meta: generic, kind: crate::error::ListResolverEndpointIpAddressesErrorKind::InvalidParameterException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverEndpointIpAddressesError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ResourceNotFoundException" => crate::error::ListResolverEndpointIpAddressesError { meta: generic, kind: crate::error::ListResolverEndpointIpAddressesErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverEndpointIpAddressesError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ThrottlingException" => crate::error::ListResolverEndpointIpAddressesError { meta: generic, kind: crate::error::ListResolverEndpointIpAddressesErrorKind::ThrottlingException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverEndpointIpAddressesError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
_ => crate::error::ListResolverEndpointIpAddressesError::generic(generic)
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_endpoint_ip_addresses_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListResolverEndpointIpAddressesOutput,
crate::error::ListResolverEndpointIpAddressesError,
> {
Ok({
#[allow(unused_mut)]
let mut output =
crate::output::list_resolver_endpoint_ip_addresses_output::Builder::default();
let _ = response;
output =
crate::json_deser::deser_operation_crate_operation_list_resolver_endpoint_ip_addresses(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListResolverEndpointIpAddressesError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_endpoints_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListResolverEndpointsOutput,
crate::error::ListResolverEndpointsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListResolverEndpointsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::ListResolverEndpointsError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::ListResolverEndpointsError {
meta: generic,
kind: crate::error::ListResolverEndpointsErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverEndpointsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidNextTokenException" => crate::error::ListResolverEndpointsError {
meta: generic,
kind: crate::error::ListResolverEndpointsErrorKind::InvalidNextTokenException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_next_token_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_next_token_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverEndpointsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::ListResolverEndpointsError {
meta: generic,
kind: crate::error::ListResolverEndpointsErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverEndpointsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::ListResolverEndpointsError {
meta: generic,
kind: crate::error::ListResolverEndpointsErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverEndpointsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::ListResolverEndpointsError {
meta: generic,
kind: crate::error::ListResolverEndpointsErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverEndpointsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListResolverEndpointsError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_endpoints_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListResolverEndpointsOutput,
crate::error::ListResolverEndpointsError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_resolver_endpoints_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_resolver_endpoints(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListResolverEndpointsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_query_log_config_associations_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListResolverQueryLogConfigAssociationsOutput,
crate::error::ListResolverQueryLogConfigAssociationsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListResolverQueryLogConfigAssociationsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(
crate::error::ListResolverQueryLogConfigAssociationsError::unhandled(generic),
)
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::ListResolverQueryLogConfigAssociationsError { meta: generic, kind: crate::error::ListResolverQueryLogConfigAssociationsErrorKind::AccessDeniedException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverQueryLogConfigAssociationsError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InternalServiceErrorException" => crate::error::ListResolverQueryLogConfigAssociationsError { meta: generic, kind: crate::error::ListResolverQueryLogConfigAssociationsErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverQueryLogConfigAssociationsError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidParameterException" => crate::error::ListResolverQueryLogConfigAssociationsError { meta: generic, kind: crate::error::ListResolverQueryLogConfigAssociationsErrorKind::InvalidParameterException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverQueryLogConfigAssociationsError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidRequestException" => crate::error::ListResolverQueryLogConfigAssociationsError { meta: generic, kind: crate::error::ListResolverQueryLogConfigAssociationsErrorKind::InvalidRequestException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverQueryLogConfigAssociationsError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"LimitExceededException" => crate::error::ListResolverQueryLogConfigAssociationsError { meta: generic, kind: crate::error::ListResolverQueryLogConfigAssociationsErrorKind::LimitExceededException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverQueryLogConfigAssociationsError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ThrottlingException" => crate::error::ListResolverQueryLogConfigAssociationsError { meta: generic, kind: crate::error::ListResolverQueryLogConfigAssociationsErrorKind::ThrottlingException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverQueryLogConfigAssociationsError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
_ => crate::error::ListResolverQueryLogConfigAssociationsError::generic(generic)
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_query_log_config_associations_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListResolverQueryLogConfigAssociationsOutput,
crate::error::ListResolverQueryLogConfigAssociationsError,
> {
Ok({
#[allow(unused_mut)]
let mut output =
crate::output::list_resolver_query_log_config_associations_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_resolver_query_log_config_associations(response.body().as_ref(), output).map_err(crate::error::ListResolverQueryLogConfigAssociationsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_query_log_configs_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListResolverQueryLogConfigsOutput,
crate::error::ListResolverQueryLogConfigsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListResolverQueryLogConfigsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::ListResolverQueryLogConfigsError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::ListResolverQueryLogConfigsError {
meta: generic,
kind: crate::error::ListResolverQueryLogConfigsErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverQueryLogConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::ListResolverQueryLogConfigsError {
meta: generic,
kind: crate::error::ListResolverQueryLogConfigsErrorKind::InternalServiceErrorException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverQueryLogConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"InvalidNextTokenException" => crate::error::ListResolverQueryLogConfigsError {
meta: generic,
kind: crate::error::ListResolverQueryLogConfigsErrorKind::InvalidNextTokenException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_next_token_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_next_token_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverQueryLogConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::ListResolverQueryLogConfigsError {
meta: generic,
kind: crate::error::ListResolverQueryLogConfigsErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverQueryLogConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::ListResolverQueryLogConfigsError {
meta: generic,
kind: crate::error::ListResolverQueryLogConfigsErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverQueryLogConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::ListResolverQueryLogConfigsError {
meta: generic,
kind: crate::error::ListResolverQueryLogConfigsErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverQueryLogConfigsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListResolverQueryLogConfigsError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_query_log_configs_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListResolverQueryLogConfigsOutput,
crate::error::ListResolverQueryLogConfigsError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_resolver_query_log_configs_output::Builder::default();
let _ = response;
output =
crate::json_deser::deser_operation_crate_operation_list_resolver_query_log_configs(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListResolverQueryLogConfigsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_rule_associations_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListResolverRuleAssociationsOutput,
crate::error::ListResolverRuleAssociationsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListResolverRuleAssociationsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::ListResolverRuleAssociationsError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::ListResolverRuleAssociationsError {
meta: generic,
kind:
crate::error::ListResolverRuleAssociationsErrorKind::InternalServiceErrorException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverRuleAssociationsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"InvalidNextTokenException" => crate::error::ListResolverRuleAssociationsError {
meta: generic,
kind: crate::error::ListResolverRuleAssociationsErrorKind::InvalidNextTokenException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_next_token_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_next_token_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverRuleAssociationsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::ListResolverRuleAssociationsError {
meta: generic,
kind: crate::error::ListResolverRuleAssociationsErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverRuleAssociationsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::ListResolverRuleAssociationsError {
meta: generic,
kind: crate::error::ListResolverRuleAssociationsErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverRuleAssociationsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::ListResolverRuleAssociationsError {
meta: generic,
kind: crate::error::ListResolverRuleAssociationsErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverRuleAssociationsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListResolverRuleAssociationsError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_rule_associations_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListResolverRuleAssociationsOutput,
crate::error::ListResolverRuleAssociationsError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_resolver_rule_associations_output::Builder::default();
let _ = response;
output =
crate::json_deser::deser_operation_crate_operation_list_resolver_rule_associations(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListResolverRuleAssociationsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_rules_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::ListResolverRulesOutput, crate::error::ListResolverRulesError>
{
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListResolverRulesError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::ListResolverRulesError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::ListResolverRulesError {
meta: generic,
kind: crate::error::ListResolverRulesErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverRulesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidNextTokenException" => crate::error::ListResolverRulesError {
meta: generic,
kind: crate::error::ListResolverRulesErrorKind::InvalidNextTokenException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_next_token_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_next_token_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverRulesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::ListResolverRulesError {
meta: generic,
kind: crate::error::ListResolverRulesErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverRulesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::ListResolverRulesError {
meta: generic,
kind: crate::error::ListResolverRulesErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverRulesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::ListResolverRulesError {
meta: generic,
kind: crate::error::ListResolverRulesErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListResolverRulesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListResolverRulesError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_resolver_rules_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::ListResolverRulesOutput, crate::error::ListResolverRulesError>
{
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_resolver_rules_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_resolver_rules(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListResolverRulesError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_tags_for_resource_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListTagsForResourceOutput,
crate::error::ListTagsForResourceError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListTagsForResourceError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::ListTagsForResourceError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::ListTagsForResourceError {
meta: generic,
kind: crate::error::ListTagsForResourceErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListTagsForResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidNextTokenException" => crate::error::ListTagsForResourceError {
meta: generic,
kind: crate::error::ListTagsForResourceErrorKind::InvalidNextTokenException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_next_token_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_next_token_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListTagsForResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::ListTagsForResourceError {
meta: generic,
kind: crate::error::ListTagsForResourceErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListTagsForResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::ListTagsForResourceError {
meta: generic,
kind: crate::error::ListTagsForResourceErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListTagsForResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::ListTagsForResourceError {
meta: generic,
kind: crate::error::ListTagsForResourceErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListTagsForResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::ListTagsForResourceError {
meta: generic,
kind: crate::error::ListTagsForResourceErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::ListTagsForResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListTagsForResourceError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_tags_for_resource_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListTagsForResourceOutput,
crate::error::ListTagsForResourceError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_tags_for_resource_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_tags_for_resource(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListTagsForResourceError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_put_firewall_rule_group_policy_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::PutFirewallRuleGroupPolicyOutput,
crate::error::PutFirewallRuleGroupPolicyError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::PutFirewallRuleGroupPolicyError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::PutFirewallRuleGroupPolicyError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::PutFirewallRuleGroupPolicyError {
meta: generic,
kind: crate::error::PutFirewallRuleGroupPolicyErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::PutFirewallRuleGroupPolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::PutFirewallRuleGroupPolicyError {
meta: generic,
kind: crate::error::PutFirewallRuleGroupPolicyErrorKind::InternalServiceErrorException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::PutFirewallRuleGroupPolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"ResourceNotFoundException" => crate::error::PutFirewallRuleGroupPolicyError {
meta: generic,
kind: crate::error::PutFirewallRuleGroupPolicyErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::PutFirewallRuleGroupPolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::PutFirewallRuleGroupPolicyError {
meta: generic,
kind: crate::error::PutFirewallRuleGroupPolicyErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::PutFirewallRuleGroupPolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::PutFirewallRuleGroupPolicyError {
meta: generic,
kind: crate::error::PutFirewallRuleGroupPolicyErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::PutFirewallRuleGroupPolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::PutFirewallRuleGroupPolicyError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_put_firewall_rule_group_policy_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::PutFirewallRuleGroupPolicyOutput,
crate::error::PutFirewallRuleGroupPolicyError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::put_firewall_rule_group_policy_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_put_firewall_rule_group_policy(
response.body().as_ref(),
output,
)
.map_err(crate::error::PutFirewallRuleGroupPolicyError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_put_resolver_query_log_config_policy_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::PutResolverQueryLogConfigPolicyOutput,
crate::error::PutResolverQueryLogConfigPolicyError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::PutResolverQueryLogConfigPolicyError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::PutResolverQueryLogConfigPolicyError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::PutResolverQueryLogConfigPolicyError { meta: generic, kind: crate::error::PutResolverQueryLogConfigPolicyErrorKind::AccessDeniedException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::PutResolverQueryLogConfigPolicyError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InternalServiceErrorException" => crate::error::PutResolverQueryLogConfigPolicyError { meta: generic, kind: crate::error::PutResolverQueryLogConfigPolicyErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::PutResolverQueryLogConfigPolicyError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidParameterException" => crate::error::PutResolverQueryLogConfigPolicyError { meta: generic, kind: crate::error::PutResolverQueryLogConfigPolicyErrorKind::InvalidParameterException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::PutResolverQueryLogConfigPolicyError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidPolicyDocument" => crate::error::PutResolverQueryLogConfigPolicyError { meta: generic, kind: crate::error::PutResolverQueryLogConfigPolicyErrorKind::InvalidPolicyDocument({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_policy_document::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_policy_document_json_err(response.body().as_ref(), output).map_err(crate::error::PutResolverQueryLogConfigPolicyError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InvalidRequestException" => crate::error::PutResolverQueryLogConfigPolicyError { meta: generic, kind: crate::error::PutResolverQueryLogConfigPolicyErrorKind::InvalidRequestException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::PutResolverQueryLogConfigPolicyError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"UnknownResourceException" => crate::error::PutResolverQueryLogConfigPolicyError { meta: generic, kind: crate::error::PutResolverQueryLogConfigPolicyErrorKind::UnknownResourceException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::unknown_resource_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_unknown_resource_exception_json_err(response.body().as_ref(), output).map_err(crate::error::PutResolverQueryLogConfigPolicyError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
_ => crate::error::PutResolverQueryLogConfigPolicyError::generic(generic)
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_put_resolver_query_log_config_policy_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::PutResolverQueryLogConfigPolicyOutput,
crate::error::PutResolverQueryLogConfigPolicyError,
> {
Ok({
#[allow(unused_mut)]
let mut output =
crate::output::put_resolver_query_log_config_policy_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_put_resolver_query_log_config_policy(response.body().as_ref(), output).map_err(crate::error::PutResolverQueryLogConfigPolicyError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_put_resolver_rule_policy_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::PutResolverRulePolicyOutput,
crate::error::PutResolverRulePolicyError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::PutResolverRulePolicyError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::PutResolverRulePolicyError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::PutResolverRulePolicyError {
meta: generic,
kind: crate::error::PutResolverRulePolicyErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::PutResolverRulePolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::PutResolverRulePolicyError {
meta: generic,
kind: crate::error::PutResolverRulePolicyErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::PutResolverRulePolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidPolicyDocument" => crate::error::PutResolverRulePolicyError {
meta: generic,
kind: crate::error::PutResolverRulePolicyErrorKind::InvalidPolicyDocument({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_policy_document::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_policy_document_json_err(response.body().as_ref(), output).map_err(crate::error::PutResolverRulePolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"UnknownResourceException" => crate::error::PutResolverRulePolicyError {
meta: generic,
kind: crate::error::PutResolverRulePolicyErrorKind::UnknownResourceException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::unknown_resource_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_unknown_resource_exception_json_err(response.body().as_ref(), output).map_err(crate::error::PutResolverRulePolicyError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::PutResolverRulePolicyError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_put_resolver_rule_policy_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::PutResolverRulePolicyOutput,
crate::error::PutResolverRulePolicyError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::put_resolver_rule_policy_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_put_resolver_rule_policy(
response.body().as_ref(),
output,
)
.map_err(crate::error::PutResolverRulePolicyError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_tag_resource_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::TagResourceOutput, crate::error::TagResourceError> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::TagResourceError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::TagResourceError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::TagResourceError {
meta: generic,
kind: crate::error::TagResourceErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::TagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::TagResourceError {
meta: generic,
kind: crate::error::TagResourceErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::TagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::TagResourceError {
meta: generic,
kind: crate::error::TagResourceErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::TagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidTagException" => crate::error::TagResourceError {
meta: generic,
kind: crate::error::TagResourceErrorKind::InvalidTagException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_tag_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_tag_exception_json_err(response.body().as_ref(), output).map_err(crate::error::TagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"LimitExceededException" => crate::error::TagResourceError {
meta: generic,
kind: crate::error::TagResourceErrorKind::LimitExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::TagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::TagResourceError {
meta: generic,
kind: crate::error::TagResourceErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::TagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::TagResourceError {
meta: generic,
kind: crate::error::TagResourceErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::TagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::TagResourceError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_tag_resource_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::TagResourceOutput, crate::error::TagResourceError> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::tag_resource_output::Builder::default();
let _ = response;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_untag_resource_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::UntagResourceOutput, crate::error::UntagResourceError> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::UntagResourceError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::UntagResourceError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::UntagResourceError {
meta: generic,
kind: crate::error::UntagResourceErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UntagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::UntagResourceError {
meta: generic,
kind: crate::error::UntagResourceErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UntagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::UntagResourceError {
meta: generic,
kind: crate::error::UntagResourceErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UntagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::UntagResourceError {
meta: generic,
kind: crate::error::UntagResourceErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UntagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::UntagResourceError {
meta: generic,
kind: crate::error::UntagResourceErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UntagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::UntagResourceError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_untag_resource_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::UntagResourceOutput, crate::error::UntagResourceError> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::untag_resource_output::Builder::default();
let _ = response;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_firewall_config_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateFirewallConfigOutput,
crate::error::UpdateFirewallConfigError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::UpdateFirewallConfigError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::UpdateFirewallConfigError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::UpdateFirewallConfigError {
meta: generic,
kind: crate::error::UpdateFirewallConfigErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::UpdateFirewallConfigError {
meta: generic,
kind: crate::error::UpdateFirewallConfigErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::UpdateFirewallConfigError {
meta: generic,
kind: crate::error::UpdateFirewallConfigErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::UpdateFirewallConfigError {
meta: generic,
kind: crate::error::UpdateFirewallConfigErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::UpdateFirewallConfigError {
meta: generic,
kind: crate::error::UpdateFirewallConfigErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::UpdateFirewallConfigError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_firewall_config_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateFirewallConfigOutput,
crate::error::UpdateFirewallConfigError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::update_firewall_config_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_update_firewall_config(
response.body().as_ref(),
output,
)
.map_err(crate::error::UpdateFirewallConfigError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_firewall_domains_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateFirewallDomainsOutput,
crate::error::UpdateFirewallDomainsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::UpdateFirewallDomainsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::UpdateFirewallDomainsError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::UpdateFirewallDomainsError {
meta: generic,
kind: crate::error::UpdateFirewallDomainsErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ConflictException" => {
crate::error::UpdateFirewallDomainsError {
meta: generic,
kind: crate::error::UpdateFirewallDomainsErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"InternalServiceErrorException" => crate::error::UpdateFirewallDomainsError {
meta: generic,
kind: crate::error::UpdateFirewallDomainsErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"LimitExceededException" => crate::error::UpdateFirewallDomainsError {
meta: generic,
kind: crate::error::UpdateFirewallDomainsErrorKind::LimitExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::UpdateFirewallDomainsError {
meta: generic,
kind: crate::error::UpdateFirewallDomainsErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::UpdateFirewallDomainsError {
meta: generic,
kind: crate::error::UpdateFirewallDomainsErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::UpdateFirewallDomainsError {
meta: generic,
kind: crate::error::UpdateFirewallDomainsErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallDomainsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::UpdateFirewallDomainsError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_firewall_domains_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateFirewallDomainsOutput,
crate::error::UpdateFirewallDomainsError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::update_firewall_domains_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_update_firewall_domains(
response.body().as_ref(),
output,
)
.map_err(crate::error::UpdateFirewallDomainsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_firewall_rule_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateFirewallRuleOutput,
crate::error::UpdateFirewallRuleError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::UpdateFirewallRuleError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::UpdateFirewallRuleError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::UpdateFirewallRuleError {
meta: generic,
kind: crate::error::UpdateFirewallRuleErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ConflictException" => {
crate::error::UpdateFirewallRuleError {
meta: generic,
kind: crate::error::UpdateFirewallRuleErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"InternalServiceErrorException" => crate::error::UpdateFirewallRuleError {
meta: generic,
kind: crate::error::UpdateFirewallRuleErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::UpdateFirewallRuleError {
meta: generic,
kind: crate::error::UpdateFirewallRuleErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::UpdateFirewallRuleError {
meta: generic,
kind: crate::error::UpdateFirewallRuleErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::UpdateFirewallRuleError {
meta: generic,
kind: crate::error::UpdateFirewallRuleErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::UpdateFirewallRuleError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_firewall_rule_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateFirewallRuleOutput,
crate::error::UpdateFirewallRuleError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::update_firewall_rule_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_update_firewall_rule(
response.body().as_ref(),
output,
)
.map_err(crate::error::UpdateFirewallRuleError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_firewall_rule_group_association_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateFirewallRuleGroupAssociationOutput,
crate::error::UpdateFirewallRuleGroupAssociationError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::UpdateFirewallRuleGroupAssociationError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::UpdateFirewallRuleGroupAssociationError::unhandled(generic))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::UpdateFirewallRuleGroupAssociationError { meta: generic, kind: crate::error::UpdateFirewallRuleGroupAssociationErrorKind::AccessDeniedException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallRuleGroupAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ConflictException" => crate::error::UpdateFirewallRuleGroupAssociationError { meta: generic, kind: crate::error::UpdateFirewallRuleGroupAssociationErrorKind::ConflictException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallRuleGroupAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"InternalServiceErrorException" => crate::error::UpdateFirewallRuleGroupAssociationError { meta: generic, kind: crate::error::UpdateFirewallRuleGroupAssociationErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallRuleGroupAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ResourceNotFoundException" => crate::error::UpdateFirewallRuleGroupAssociationError { meta: generic, kind: crate::error::UpdateFirewallRuleGroupAssociationErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallRuleGroupAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ThrottlingException" => crate::error::UpdateFirewallRuleGroupAssociationError { meta: generic, kind: crate::error::UpdateFirewallRuleGroupAssociationErrorKind::ThrottlingException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallRuleGroupAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
"ValidationException" => crate::error::UpdateFirewallRuleGroupAssociationError { meta: generic, kind: crate::error::UpdateFirewallRuleGroupAssociationErrorKind::ValidationException({
#[allow(unused_mut)]let mut tmp =
{
#[allow(unused_mut)]let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallRuleGroupAssociationError::unhandled)?;
output.build()
}
;
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
})},
_ => crate::error::UpdateFirewallRuleGroupAssociationError::generic(generic)
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_firewall_rule_group_association_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateFirewallRuleGroupAssociationOutput,
crate::error::UpdateFirewallRuleGroupAssociationError,
> {
Ok({
#[allow(unused_mut)]
let mut output =
crate::output::update_firewall_rule_group_association_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_update_firewall_rule_group_association(response.body().as_ref(), output).map_err(crate::error::UpdateFirewallRuleGroupAssociationError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_resolver_config_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateResolverConfigOutput,
crate::error::UpdateResolverConfigError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::UpdateResolverConfigError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::UpdateResolverConfigError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::UpdateResolverConfigError {
meta: generic,
kind: crate::error::UpdateResolverConfigErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::UpdateResolverConfigError {
meta: generic,
kind: crate::error::UpdateResolverConfigErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::UpdateResolverConfigError {
meta: generic,
kind: crate::error::UpdateResolverConfigErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::UpdateResolverConfigError {
meta: generic,
kind: crate::error::UpdateResolverConfigErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"LimitExceededException" => crate::error::UpdateResolverConfigError {
meta: generic,
kind: crate::error::UpdateResolverConfigErrorKind::LimitExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::UpdateResolverConfigError {
meta: generic,
kind: crate::error::UpdateResolverConfigErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceUnavailableException" => crate::error::UpdateResolverConfigError {
meta: generic,
kind: crate::error::UpdateResolverConfigErrorKind::ResourceUnavailableException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::resource_unavailable_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_unavailable_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::UpdateResolverConfigError {
meta: generic,
kind: crate::error::UpdateResolverConfigErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::UpdateResolverConfigError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_resolver_config_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateResolverConfigOutput,
crate::error::UpdateResolverConfigError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::update_resolver_config_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_update_resolver_config(
response.body().as_ref(),
output,
)
.map_err(crate::error::UpdateResolverConfigError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_resolver_dnssec_config_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateResolverDnssecConfigOutput,
crate::error::UpdateResolverDnssecConfigError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::UpdateResolverDnssecConfigError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::UpdateResolverDnssecConfigError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"AccessDeniedException" => crate::error::UpdateResolverDnssecConfigError {
meta: generic,
kind: crate::error::UpdateResolverDnssecConfigErrorKind::AccessDeniedException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::access_denied_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_access_denied_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverDnssecConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServiceErrorException" => crate::error::UpdateResolverDnssecConfigError {
meta: generic,
kind: crate::error::UpdateResolverDnssecConfigErrorKind::InternalServiceErrorException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverDnssecConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"InvalidParameterException" => crate::error::UpdateResolverDnssecConfigError {
meta: generic,
kind: crate::error::UpdateResolverDnssecConfigErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverDnssecConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::UpdateResolverDnssecConfigError {
meta: generic,
kind: crate::error::UpdateResolverDnssecConfigErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverDnssecConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::UpdateResolverDnssecConfigError {
meta: generic,
kind: crate::error::UpdateResolverDnssecConfigErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverDnssecConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::UpdateResolverDnssecConfigError {
meta: generic,
kind: crate::error::UpdateResolverDnssecConfigErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverDnssecConfigError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::UpdateResolverDnssecConfigError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_resolver_dnssec_config_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateResolverDnssecConfigOutput,
crate::error::UpdateResolverDnssecConfigError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::update_resolver_dnssec_config_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_update_resolver_dnssec_config(
response.body().as_ref(),
output,
)
.map_err(crate::error::UpdateResolverDnssecConfigError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_resolver_endpoint_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateResolverEndpointOutput,
crate::error::UpdateResolverEndpointError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::UpdateResolverEndpointError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::UpdateResolverEndpointError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::UpdateResolverEndpointError {
meta: generic,
kind: crate::error::UpdateResolverEndpointErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::UpdateResolverEndpointError {
meta: generic,
kind: crate::error::UpdateResolverEndpointErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::UpdateResolverEndpointError {
meta: generic,
kind: crate::error::UpdateResolverEndpointErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::UpdateResolverEndpointError {
meta: generic,
kind: crate::error::UpdateResolverEndpointErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::UpdateResolverEndpointError {
meta: generic,
kind: crate::error::UpdateResolverEndpointErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverEndpointError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::UpdateResolverEndpointError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_resolver_endpoint_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateResolverEndpointOutput,
crate::error::UpdateResolverEndpointError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::update_resolver_endpoint_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_update_resolver_endpoint(
response.body().as_ref(),
output,
)
.map_err(crate::error::UpdateResolverEndpointError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_resolver_rule_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateResolverRuleOutput,
crate::error::UpdateResolverRuleError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::UpdateResolverRuleError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::UpdateResolverRuleError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServiceErrorException" => crate::error::UpdateResolverRuleError {
meta: generic,
kind: crate::error::UpdateResolverRuleErrorKind::InternalServiceErrorException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::internal_service_error_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_service_error_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidParameterException" => crate::error::UpdateResolverRuleError {
meta: generic,
kind: crate::error::UpdateResolverRuleErrorKind::InvalidParameterException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_parameter_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_parameter_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InvalidRequestException" => crate::error::UpdateResolverRuleError {
meta: generic,
kind: crate::error::UpdateResolverRuleErrorKind::InvalidRequestException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::invalid_request_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_invalid_request_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"LimitExceededException" => crate::error::UpdateResolverRuleError {
meta: generic,
kind: crate::error::UpdateResolverRuleErrorKind::LimitExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::limit_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_limit_exceeded_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::UpdateResolverRuleError {
meta: generic,
kind: crate::error::UpdateResolverRuleErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceUnavailableException" => crate::error::UpdateResolverRuleError {
meta: generic,
kind: crate::error::UpdateResolverRuleErrorKind::ResourceUnavailableException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::resource_unavailable_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_unavailable_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ThrottlingException" => crate::error::UpdateResolverRuleError {
meta: generic,
kind: crate::error::UpdateResolverRuleErrorKind::ThrottlingException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::throttling_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_throttling_exception_json_err(response.body().as_ref(), output).map_err(crate::error::UpdateResolverRuleError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::UpdateResolverRuleError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_resolver_rule_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateResolverRuleOutput,
crate::error::UpdateResolverRuleError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::update_resolver_rule_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_update_resolver_rule(
response.body().as_ref(),
output,
)
.map_err(crate::error::UpdateResolverRuleError::unhandled)?;
output.build()
})
}
| 47.919766 | 231 | 0.568487 |
f5f105a2edaa33b0e30f442e7a233d91f6d68c0c | 41,166 | use svd_parser::{Cluster, ClusterInfo, RegisterCluster};
use super::register::RegisterSpec;
use super::{AccessSpec, FieldSpec};
use crate::{
clean_whitespace_opt,
error::{SvdExpanderError, SvdExpanderResult},
value::EnumeratedValueSetSpec,
};
/// Describes a cluster of registers that exist on a peripheral. Clusters may be top-level
/// constructs of a peripheral or may be nested within other clusters.
#[derive(Debug, Clone, PartialEq)]
pub struct ClusterSpec {
preceding_path: String,
derived_from: Option<String>,
base_address: u32,
/// Name that identifies the cluster. Must be unique within the scope of its parent.
pub name: String,
/// Description of the details of the register cluster.
pub description: Option<String>,
/// Cluster's starting address relative to its parent.
pub address_offset: u32,
/// Default bit width of any register contained within this cluster.
pub default_register_size: Option<u32>,
/// Default value after reset of any register contained within this cluster.
pub default_register_reset_value: Option<u32>,
/// Default register bits that have a defined reset value for any register contained within
/// this cluster.
pub default_register_reset_mask: Option<u32>,
/// Default access rights of any register contained within this cluster.
pub default_register_access: Option<AccessSpec>,
/// Top-level registers that exist within this cluster.
pub registers: Vec<RegisterSpec>,
/// Top-level clusters nested within this cluster.
pub clusters: Vec<ClusterSpec>,
}
impl ClusterSpec {
pub(crate) fn new(
c: &Cluster,
preceding_path: &str,
base_address: u32,
) -> SvdExpanderResult<Vec<Self>> {
let specs: Vec<Self> = match c {
Cluster::Single(ref ci) => vec![Self::from_cluster_info(ci, preceding_path, base_address)?],
Cluster::Array(ref ci, ref d) => {
let dim_indices = if let Some(ref di) = d.dim_index {
if d.dim != di.len() as u32 {
return Err(SvdExpanderError::new(&format!(
"Cluster {}: 'dim' element must have the same value as the length of 'dimIndex'",
&c.name
)));
}
di.to_owned()
} else {
(0..d.dim).map(|v| v.to_string()).collect()
};
let prototype = Self::from_cluster_info(ci, preceding_path, base_address)?;
let mut cluster_specs = Vec::with_capacity(d.dim as usize);
for (n, dim_index) in dim_indices.iter().enumerate() {
let mut spec = prototype.clone();
spec.interpolate_array_params(
dim_index.clone(),
prototype.address_offset + n as u32 * d.dim_increment,
);
cluster_specs.push(spec);
}
cluster_specs
}
};
Ok(specs)
}
/// The memory address of this cluster
pub fn address(&self) -> u32 {
self.base_address + self.address_offset
}
/// The full path to the cluster that this cluster inherits from (if any).
pub fn derived_from_path(&self) -> Option<String> {
match self.derived_from {
Some(ref df) => match df.contains(".") {
true => Some(df.clone()),
false => Some(format!("{}.{}", self.preceding_path, df)),
},
None => None,
}
}
/// The full path to this cluster.
pub fn path(&self) -> String {
format!("{}.{}", self.preceding_path, self.name)
}
/// Recursively iterates all the register clusters contained within this cluster.
pub fn iter_clusters<'a>(&'a self) -> Box<dyn Iterator<Item = &ClusterSpec> + 'a> {
Box::new(
self
.clusters
.iter()
.flat_map(|c| c.iter_clusters())
.chain(vec![self]),
)
}
/// Recursively iterates all the registers contained within this cluster.
pub fn iter_registers<'a>(&'a self) -> Box<dyn Iterator<Item = &RegisterSpec> + 'a> {
Box::new(
self
.clusters
.iter()
.flat_map(|c| c.iter_registers())
.chain(self.registers.iter()),
)
}
/// Recursively iterates all the register fields contained within this cluster.
pub fn iter_fields<'a>(&'a self) -> Box<dyn Iterator<Item = &FieldSpec> + 'a> {
Box::new(self.iter_registers().flat_map(|r| r.fields.iter()))
}
/// Recursively iterates all the enumerated value sets on all the regsiters contained within
/// this cluster.
pub fn iter_enumerated_value_sets<'a>(
&'a self,
) -> Box<dyn Iterator<Item = &EnumeratedValueSetSpec> + 'a> {
Box::new(
self
.iter_registers()
.flat_map(|r| r.iter_enumerated_value_sets()),
)
}
pub(crate) fn clone_with_overrides(&self, preceding_path: &str, base_address: u32) -> Self {
let mut cluster = Self {
preceding_path: preceding_path.to_owned(),
derived_from: None,
base_address,
name: self.name.clone(),
description: self.description.clone(),
address_offset: self.address_offset,
default_register_size: self.default_register_size,
default_register_reset_value: self.default_register_reset_value,
default_register_reset_mask: self.default_register_reset_mask,
default_register_access: self.default_register_access,
registers: Vec::new(),
clusters: Vec::new(),
};
cluster.registers = self
.registers
.iter()
.map(|r| r.clone_with_overrides(&cluster.path(), cluster.address()))
.collect();
cluster.clusters = self
.clusters
.iter()
.map(|c| c.clone_with_overrides(&cluster.path(), cluster.address()))
.collect();
cluster
}
pub(crate) fn mutate_clusters<F>(&mut self, f: F) -> SvdExpanderResult<bool>
where
F: Fn(&mut ClusterSpec) -> SvdExpanderResult<bool>,
F: Copy,
{
let mut changed = false;
for child in &mut self.clusters.iter_mut() {
if child.mutate_clusters(f)? {
changed = true;
}
}
if f(self)? {
changed = true;
}
Ok(changed)
}
pub(crate) fn mutate_registers<F>(&mut self, f: F) -> SvdExpanderResult<bool>
where
F: Fn(&mut RegisterSpec) -> SvdExpanderResult<bool>,
F: Copy,
{
let mut changed = false;
for child in &mut self.clusters.iter_mut() {
if child.mutate_registers(f)? {
changed = true;
}
}
for register in self.registers.iter_mut() {
if f(register)? {
changed = true;
}
}
Ok(changed)
}
pub(crate) fn mutate_fields<F>(&mut self, f: F) -> SvdExpanderResult<bool>
where
F: Fn(&mut FieldSpec) -> SvdExpanderResult<bool>,
F: Copy,
{
let mut changed = false;
for cluster in &mut self.clusters.iter_mut() {
if cluster.mutate_fields(f)? {
changed = true;
}
}
for register in &mut self.registers.iter_mut() {
if register.mutate_fields(f)? {
changed = true;
}
}
Ok(changed)
}
pub(crate) fn mutate_enumerated_value_sets<F>(&mut self, f: F) -> SvdExpanderResult<bool>
where
F: Fn(&mut EnumeratedValueSetSpec) -> SvdExpanderResult<bool>,
F: Copy,
{
let mut changed = false;
for cluster in &mut self.clusters.iter_mut() {
if cluster.mutate_enumerated_value_sets(f)? {
changed = true;
}
}
for register in &mut self.registers.iter_mut() {
if register.mutate_enumerated_value_sets(f)? {
changed = true;
}
}
Ok(changed)
}
pub(crate) fn inherit_from(&mut self, cs: &ClusterSpec) -> bool {
let mut changed = false;
if self.description.is_none() && cs.description.is_some() {
self.description = cs.description.clone();
changed = true;
}
if self.default_register_size.is_none() && cs.default_register_size.is_some() {
self.default_register_size = cs.default_register_size;
changed = true;
}
if self.default_register_access.is_none() && cs.default_register_access.is_some() {
self.default_register_access = cs.default_register_access;
changed = true;
}
if self.default_register_reset_value.is_none() && cs.default_register_reset_value.is_some() {
self.default_register_reset_value = cs.default_register_reset_value;
changed = true;
}
if self.default_register_reset_mask.is_none() && cs.default_register_reset_mask.is_some() {
self.default_register_reset_mask = cs.default_register_reset_mask;
changed = true;
}
for ancestor in cs.registers.iter() {
if let Some(ref mut descendant) = self.registers.iter_mut().find(|r| r.name == ancestor.name)
{
if descendant.inherit_from(ancestor) {
changed = true;
}
} else {
self
.registers
.push(ancestor.clone_with_overrides(&self.path(), self.address()));
changed = true;
}
}
for ancestor in cs.clusters.iter() {
if let Some(ref mut descendant) = self.clusters.iter_mut().find(|c| c.name == ancestor.name) {
if descendant.inherit_from(ancestor) {
changed = true;
}
} else {
self
.clusters
.push(ancestor.clone_with_overrides(&self.path(), self.address()));
changed = true;
}
}
changed
}
pub(crate) fn propagate_default_register_properties(
&mut self,
size: &Option<u32>,
reset_value: &Option<u32>,
reset_mask: &Option<u32>,
access: &Option<AccessSpec>,
) -> bool {
let mut changed = false;
if self.default_register_size.is_none() && size.is_some() {
self.default_register_size = size.clone();
changed = true;
}
if self.default_register_reset_value.is_none() && reset_value.is_some() {
self.default_register_reset_value = reset_value.clone();
changed = true;
}
if self.default_register_reset_mask.is_none() && reset_mask.is_some() {
self.default_register_reset_mask = reset_mask.clone();
changed = true;
}
if self.default_register_access.is_none() && access.is_some() {
self.default_register_access = access.clone();
changed = true;
}
for cluster in self.clusters.iter_mut() {
if cluster.propagate_default_register_properties(
&self.default_register_size,
&self.default_register_reset_value,
&self.default_register_reset_mask,
&self.default_register_access,
) {
changed = true;
}
}
for register in self.registers.iter_mut() {
if register.propagate_default_properties(
&self.default_register_size,
&self.default_register_reset_value,
&self.default_register_reset_mask,
&self.default_register_access,
) {
changed = true;
}
}
changed
}
fn from_cluster_info(
ci: &ClusterInfo,
preceding_path: &str,
base_address: u32,
) -> SvdExpanderResult<Self> {
let mut cluster = Self {
preceding_path: preceding_path.to_owned(),
derived_from: ci.derived_from.clone(),
base_address,
name: ci.name.clone(),
description: clean_whitespace_opt(ci.description.clone())?,
address_offset: ci.address_offset,
default_register_size: ci.default_register_properties.size.clone(),
default_register_reset_value: ci.default_register_properties.reset_value.clone(),
default_register_reset_mask: ci.default_register_properties.reset_mask.clone(),
default_register_access: match ci.default_register_properties.access {
Some(ref a) => Some(AccessSpec::new(a)),
None => None,
},
registers: Vec::with_capacity(0),
clusters: Vec::with_capacity(0),
};
cluster.registers = {
let mut registers = Vec::new();
for register in ci.children.iter().filter_map(|rc| match rc {
RegisterCluster::Register(ref r) => Some(r),
RegisterCluster::Cluster(_) => None,
}) {
registers.extend(RegisterSpec::new(
register,
&cluster.path(),
cluster.address(),
)?);
}
registers
};
cluster.clusters = {
let mut clusters = Vec::new();
for c in ci.children.iter().filter_map(|rc| match rc {
RegisterCluster::Cluster(ref c) => Some(c),
RegisterCluster::Register(_) => None,
}) {
clusters.extend(ClusterSpec::new(c, &cluster.path(), cluster.address())?);
}
clusters
};
Ok(cluster)
}
fn interpolate_array_params(&mut self, index: String, address_offset: u32) {
self.name = self.name.replace("%s", &index);
if let Some(df) = self.derived_from.clone() {
self.derived_from = Some(df.replace("%s", &index));
}
if let Some(desc) = self.description.clone() {
self.description = Some(desc.replace("%s", &index));
}
self.address_offset = address_offset;
}
}
#[cfg(test)]
mod tests {
use super::ClusterSpec;
use crate::{AccessSpec, FieldSpec, RegisterSpec};
use std::cell::RefCell;
use svd_parser::{parse::Parse, Cluster};
use xmltree::Element;
#[test]
fn can_create_single_from_xml() {
let el: Element = Element::parse(
r##"
<cluster>
<name>FOO</name>
<description>Bar</description>
<addressOffset>3000</addressOffset>
<access>write-only</access>
<resetValue>1234</resetValue>
<resetMask>4321</resetMask>
<size>16</size>
<register>
<name>R1</name>
<addressOffset>100</addressOffset>
</register>
<register>
<name>R2</name>
<addressOffset>200</addressOffset>
</register>
<cluster>
<name>C1</name>
<addressOffset>200</addressOffset>
</cluster>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ci = Cluster::parse(&el).unwrap();
let mut specs = ClusterSpec::new(&ci, "", 0).unwrap();
assert_eq!(1, specs.len());
let cs = specs.pop().unwrap();
assert_eq!("FOO", cs.name);
assert_eq!("Bar", cs.description.unwrap());
assert_eq!(3000, cs.address_offset);
assert_eq!(AccessSpec::WriteOnly, cs.default_register_access.unwrap());
assert_eq!(1234, cs.default_register_reset_value.unwrap());
assert_eq!(4321, cs.default_register_reset_mask.unwrap());
assert_eq!(16, cs.default_register_size.unwrap());
assert_eq!(2, cs.registers.len());
assert_eq!("R1", cs.registers[0].name);
assert_eq!("R2", cs.registers[1].name);
assert_eq!(1, cs.clusters.len());
assert_eq!("C1", cs.clusters[0].name);
}
#[test]
fn can_create_multiple_from_xml() {
let el: Element = Element::parse(
r##"
<cluster>
<name>FOO_%s</name>
<description>Bar %s</description>
<addressOffset>3000</addressOffset>
<access>write-only</access>
<resetValue>1234</resetValue>
<resetMask>4321</resetMask>
<size>16</size>
<dim>3</dim>
<dimIndex>one,two,three</dimIndex>
<dimIncrement>0x20</dimIncrement>
<register>
<name>R1</name>
<addressOffset>100</addressOffset>
</register>
<register>
<name>R2</name>
<addressOffset>200</addressOffset>
</register>
<cluster>
<name>C1</name>
<addressOffset>200</addressOffset>
</cluster>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ci = Cluster::parse(&el).unwrap();
let mut specs = ClusterSpec::new(&ci, "", 0).unwrap();
assert_eq!(3, specs.len());
let mut cs = specs.pop().unwrap();
assert_eq!("FOO_three", cs.name);
assert_eq!("Bar three", cs.description.unwrap());
assert_eq!(3064, cs.address_offset);
assert_eq!(AccessSpec::WriteOnly, cs.default_register_access.unwrap());
assert_eq!(1234, cs.default_register_reset_value.unwrap());
assert_eq!(4321, cs.default_register_reset_mask.unwrap());
assert_eq!(16, cs.default_register_size.unwrap());
assert_eq!(2, cs.registers.len());
assert_eq!("R1", cs.registers[0].name);
assert_eq!("R2", cs.registers[1].name);
assert_eq!(1, cs.clusters.len());
assert_eq!("C1", cs.clusters[0].name);
cs = specs.pop().unwrap();
assert_eq!("FOO_two", cs.name);
assert_eq!("Bar two", cs.description.unwrap());
assert_eq!(3032, cs.address_offset);
assert_eq!(AccessSpec::WriteOnly, cs.default_register_access.unwrap());
assert_eq!(1234, cs.default_register_reset_value.unwrap());
assert_eq!(4321, cs.default_register_reset_mask.unwrap());
assert_eq!(16, cs.default_register_size.unwrap());
assert_eq!(2, cs.registers.len());
assert_eq!("R1", cs.registers[0].name);
assert_eq!("R2", cs.registers[1].name);
assert_eq!(1, cs.clusters.len());
assert_eq!("C1", cs.clusters[0].name);
cs = specs.pop().unwrap();
assert_eq!("FOO_one", cs.name);
assert_eq!("Bar one", cs.description.unwrap());
assert_eq!(3000, cs.address_offset);
assert_eq!(AccessSpec::WriteOnly, cs.default_register_access.unwrap());
assert_eq!(1234, cs.default_register_reset_value.unwrap());
assert_eq!(4321, cs.default_register_reset_mask.unwrap());
assert_eq!(16, cs.default_register_size.unwrap());
assert_eq!(2, cs.registers.len());
assert_eq!("R1", cs.registers[0].name);
assert_eq!("R2", cs.registers[1].name);
assert_eq!(1, cs.clusters.len());
assert_eq!("C1", cs.clusters[0].name);
}
#[test]
fn inherits_from_other_cluster() {
let descendant_el: Element = Element::parse(
r##"
<cluster>
<name>FOO</name>
<addressOffset>1000</addressOffset>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let descendant_ci = Cluster::parse(&descendant_el).unwrap();
let mut descendant_specs = ClusterSpec::new(&descendant_ci, "", 0).unwrap();
let mut descendant_cs = descendant_specs.pop().unwrap();
let ancestor_el: Element = Element::parse(
r##"
<cluster>
<name>FOO2</name>
<description>Bar</description>
<addressOffset>3000</addressOffset>
<access>write-only</access>
<resetValue>1234</resetValue>
<resetMask>4321</resetMask>
<size>16</size>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ancestor_ci = Cluster::parse(&ancestor_el).unwrap();
let mut ancestor_specs = ClusterSpec::new(&ancestor_ci, "", 0).unwrap();
let ancestor_cs = ancestor_specs.pop().unwrap();
let changed = descendant_cs.inherit_from(&ancestor_cs);
assert!(changed);
// Not inherited
assert_eq!("FOO", descendant_cs.name);
assert_eq!(1000, descendant_cs.address_offset);
// Inherited
assert_eq!("Bar", descendant_cs.description.unwrap());
assert_eq!(
AccessSpec::WriteOnly,
descendant_cs.default_register_access.unwrap()
);
assert_eq!(1234, descendant_cs.default_register_reset_value.unwrap());
assert_eq!(4321, descendant_cs.default_register_reset_mask.unwrap());
assert_eq!(16, descendant_cs.default_register_size.unwrap());
}
#[test]
fn inherits_from_returns_false_when_no_changes() {
let descendant_el: Element = Element::parse(
r##"
<cluster>
<name>FOO</name>
<description>Baz</description>
<addressOffset>1000</addressOffset>
<access>read-only</access>
<resetValue>2345</resetValue>
<resetMask>6543</resetMask>
<size>32</size>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let descendant_ci = Cluster::parse(&descendant_el).unwrap();
let mut descendant_specs = ClusterSpec::new(&descendant_ci, "", 0).unwrap();
let mut descendant_cs = descendant_specs.pop().unwrap();
let ancestor_el: Element = Element::parse(
r##"
<cluster>
<name>FOO2</name>
<description>Bar</description>
<addressOffset>3000</addressOffset>
<access>write-only</access>
<resetValue>1234</resetValue>
<resetMask>4321</resetMask>
<size>16</size>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ancestor_ci = Cluster::parse(&ancestor_el).unwrap();
let mut ancestor_specs = ClusterSpec::new(&ancestor_ci, "", 0).unwrap();
let ancestor_cs = ancestor_specs.pop().unwrap();
let changed = descendant_cs.inherit_from(&ancestor_cs);
assert!(!changed);
assert_eq!("FOO", descendant_cs.name);
assert_eq!("Baz", descendant_cs.description.unwrap());
assert_eq!(1000, descendant_cs.address_offset);
assert_eq!(
AccessSpec::ReadOnly,
descendant_cs.default_register_access.unwrap()
);
assert_eq!(2345, descendant_cs.default_register_reset_value.unwrap());
assert_eq!(6543, descendant_cs.default_register_reset_mask.unwrap());
assert_eq!(32, descendant_cs.default_register_size.unwrap());
}
#[test]
fn inherits_from_returns_true_for_overridden_inherited_cluster() {
let descendant_el: Element = Element::parse(
r##"
<cluster>
<name>FOO</name>
<addressOffset>1000</addressOffset>
<cluster>
<name>FOO_sub</name>
<addressOffset>3100</addressOffset>
</cluster>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let descendant_ci = Cluster::parse(&descendant_el).unwrap();
let mut descendant_specs = ClusterSpec::new(&descendant_ci, "", 0).unwrap();
let mut descendant_cs = descendant_specs.pop().unwrap();
let ancestor_el: Element = Element::parse(
r##"
<cluster>
<name>FOO2</name>
<addressOffset>3000</addressOffset>
<cluster>
<name>FOO_sub</name>
<addressOffset>3200</addressOffset>
<description>BAZ</description>
</cluster>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ancestor_ci = Cluster::parse(&ancestor_el).unwrap();
let mut ancestor_specs = ClusterSpec::new(&ancestor_ci, "", 0).unwrap();
let ancestor_cs = ancestor_specs.pop().unwrap();
let changed = descendant_cs.inherit_from(&ancestor_cs);
assert!(changed);
assert_eq!("FOO", descendant_cs.name);
assert_eq!(1000, descendant_cs.address_offset);
assert_eq!(1, descendant_cs.clusters.len());
assert_eq!("FOO_sub", descendant_cs.clusters[0].name);
assert_eq!(3100, descendant_cs.clusters[0].address_offset);
assert_eq!(
"BAZ",
descendant_cs.clusters[0].description.clone().unwrap()
);
}
#[test]
fn inherits_from_returns_true_for_added_inherited_cluster() {
let descendant_el: Element = Element::parse(
r##"
<cluster>
<name>FOO</name>
<addressOffset>1000</addressOffset>
<cluster>
<name>FOO_sub</name>
<addressOffset>3100</addressOffset>
</cluster>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let descendant_ci = Cluster::parse(&descendant_el).unwrap();
let mut descendant_specs = ClusterSpec::new(&descendant_ci, "", 0).unwrap();
let mut descendant_cs = descendant_specs.pop().unwrap();
let ancestor_el: Element = Element::parse(
r##"
<cluster>
<name>FOO2</name>
<addressOffset>3000</addressOffset>
<cluster>
<name>FOO_sub2</name>
<addressOffset>3200</addressOffset>
<description>BAZ</description>
</cluster>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ancestor_ci = Cluster::parse(&ancestor_el).unwrap();
let mut ancestor_specs = ClusterSpec::new(&ancestor_ci, "", 0).unwrap();
let ancestor_cs = ancestor_specs.pop().unwrap();
let changed = descendant_cs.inherit_from(&ancestor_cs);
assert!(changed);
assert_eq!("FOO", descendant_cs.name);
assert_eq!(1000, descendant_cs.address_offset);
assert_eq!(2, descendant_cs.clusters.len());
assert_eq!("FOO_sub", descendant_cs.clusters[0].name);
assert_eq!(3100, descendant_cs.clusters[0].address_offset);
assert!(descendant_cs.clusters[0].description.is_none());
assert_eq!("FOO_sub2", descendant_cs.clusters[1].name);
assert_eq!(3200, descendant_cs.clusters[1].address_offset);
assert_eq!(
"BAZ",
descendant_cs.clusters[1].description.clone().unwrap()
);
}
#[test]
fn inherits_from_returns_true_for_overridden_inherited_register() {
let descendant_el: Element = Element::parse(
r##"
<cluster>
<name>FOO</name>
<addressOffset>1000</addressOffset>
<register>
<name>FOO_sub</name>
<addressOffset>3100</addressOffset>
</register>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let descendant_ci = Cluster::parse(&descendant_el).unwrap();
let mut descendant_specs = ClusterSpec::new(&descendant_ci, "", 0).unwrap();
let mut descendant_cs = descendant_specs.pop().unwrap();
let ancestor_el: Element = Element::parse(
r##"
<cluster>
<name>FOO2</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO_sub</name>
<addressOffset>3200</addressOffset>
<description>BAZ</description>
</register>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ancestor_ci = Cluster::parse(&ancestor_el).unwrap();
let mut ancestor_specs = ClusterSpec::new(&ancestor_ci, "", 0).unwrap();
let ancestor_cs = ancestor_specs.pop().unwrap();
let changed = descendant_cs.inherit_from(&ancestor_cs);
assert!(changed);
assert_eq!("FOO", descendant_cs.name);
assert_eq!(1000, descendant_cs.address_offset);
assert_eq!(1, descendant_cs.registers.len());
assert_eq!("FOO_sub", descendant_cs.registers[0].name);
assert_eq!(3100, descendant_cs.registers[0].address_offset);
assert_eq!(
"BAZ",
descendant_cs.registers[0].description.clone().unwrap()
);
}
#[test]
fn inherits_from_returns_true_for_added_inherited_register() {
let descendant_el: Element = Element::parse(
r##"
<cluster>
<name>FOO</name>
<addressOffset>1000</addressOffset>
<register>
<name>FOO_sub</name>
<addressOffset>3100</addressOffset>
</register>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let descendant_ci = Cluster::parse(&descendant_el).unwrap();
let mut descendant_specs = ClusterSpec::new(&descendant_ci, "", 0).unwrap();
let mut descendant_cs = descendant_specs.pop().unwrap();
let ancestor_el: Element = Element::parse(
r##"
<cluster>
<name>FOO2</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO_sub2</name>
<addressOffset>3200</addressOffset>
<description>BAZ</description>
</register>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ancestor_ci = Cluster::parse(&ancestor_el).unwrap();
let mut ancestor_specs = ClusterSpec::new(&ancestor_ci, "", 0).unwrap();
let ancestor_cs = ancestor_specs.pop().unwrap();
let changed = descendant_cs.inherit_from(&ancestor_cs);
assert!(changed);
assert_eq!("FOO", descendant_cs.name);
assert_eq!(1000, descendant_cs.address_offset);
assert_eq!(2, descendant_cs.registers.len());
assert_eq!("FOO_sub", descendant_cs.registers[0].name);
assert_eq!(3100, descendant_cs.registers[0].address_offset);
assert!(descendant_cs.registers[0].description.is_none());
assert_eq!("FOO_sub2", descendant_cs.registers[1].name);
assert_eq!(3200, descendant_cs.registers[1].address_offset);
assert_eq!(
"BAZ",
descendant_cs.registers[1].description.clone().unwrap()
);
}
#[test]
fn single_has_correct_path() {
let el: Element = Element::parse(
r##"
<cluster>
<name>FOO</name>
<addressOffset>3000</addressOffset>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ri = Cluster::parse(&el).unwrap();
let rs = ClusterSpec::new(&ri, "path", 0).unwrap();
assert_eq!("path.FOO", rs[0].path());
}
#[test]
fn multiples_have_correct_paths() {
let el: Element = Element::parse(
r##"
<cluster>
<name>FOO_%s</name>
<addressOffset>3000</addressOffset>
<dim>3</dim>
<dimIndex>one,two,three</dimIndex>
<dimIncrement>0x4</dimIncrement>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ri = Cluster::parse(&el).unwrap();
let rs = ClusterSpec::new(&ri, "path", 0).unwrap();
assert_eq!("path.FOO_one", rs[0].path());
assert_eq!("path.FOO_two", rs[1].path());
assert_eq!("path.FOO_three", rs[2].path());
}
#[test]
fn single_has_correct_derived_from_path() {
let el: Element = Element::parse(
r##"
<cluster derivedFrom="BAR">
<name>FOO</name>
<addressOffset>3000</addressOffset>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ri = Cluster::parse(&el).unwrap();
let rs = ClusterSpec::new(&ri, "path", 0).unwrap();
assert_eq!("path.BAR", rs[0].derived_from_path().unwrap());
}
#[test]
fn multiples_have_correct_derived_from_paths() {
let el: Element = Element::parse(
r##"
<cluster derivedFrom="BAR_%s">
<name>FOO_%s</name>
<addressOffset>3000</addressOffset>
<dim>3</dim>
<dimIndex>one,two,three</dimIndex>
<dimIncrement>0x4</dimIncrement>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ri = Cluster::parse(&el).unwrap();
let rs = ClusterSpec::new(&ri, "path", 0).unwrap();
assert_eq!("path.BAR_one", rs[0].derived_from_path().unwrap());
assert_eq!("path.BAR_two", rs[1].derived_from_path().unwrap());
assert_eq!("path.BAR_three", rs[2].derived_from_path().unwrap());
}
#[test]
fn recursively_iterates_clusters() {
let el: Element = Element::parse(
r##"
<cluster>
<name>FOO5</name>
<addressOffset>3000</addressOffset>
<cluster>
<name>FOO1</name>
<addressOffset>3000</addressOffset>
</cluster>
<cluster>
<name>FOO4</name>
<addressOffset>3000</addressOffset>
<cluster>
<name>FOO2</name>
<addressOffset>3000</addressOffset>
</cluster>
<cluster>
<name>FOO3</name>
<addressOffset>3000</addressOffset>
</cluster>
</cluster>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ci = Cluster::parse(&el).unwrap();
let cs = ClusterSpec::new(&ci, "path", 0).unwrap();
assert_eq!(1, cs.len());
let top = &cs[0];
let all_clusters: Vec<&ClusterSpec> = top.iter_clusters().collect();
assert_eq!("FOO1", all_clusters[0].name);
assert_eq!("FOO2", all_clusters[1].name);
assert_eq!("FOO3", all_clusters[2].name);
assert_eq!("FOO4", all_clusters[3].name);
assert_eq!("FOO5", all_clusters[4].name);
}
#[test]
fn recursively_mutates_clusters() {
let el: Element = Element::parse(
r##"
<cluster>
<name>FOO5</name>
<addressOffset>5000</addressOffset>
<cluster>
<name>FOO1</name>
<addressOffset>1000</addressOffset>
</cluster>
<cluster>
<name>FOO4</name>
<addressOffset>4000</addressOffset>
<cluster>
<name>FOO2</name>
<addressOffset>2000</addressOffset>
</cluster>
<cluster>
<name>FOO3</name>
<addressOffset>3000</addressOffset>
</cluster>
</cluster>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ci = Cluster::parse(&el).unwrap();
let mut cs = ClusterSpec::new(&ci, "path", 0).unwrap();
assert_eq!(1, cs.len());
let top = &mut cs[0];
let count = RefCell::new(0);
top
.mutate_clusters(|c| {
c.name = format!("{} {}", c.name, count.borrow());
let current = (*count.borrow()).clone();
*count.borrow_mut() = current + 1;
Ok(false)
})
.unwrap();
let all_clusters: Vec<&ClusterSpec> = top.iter_clusters().collect();
assert_eq!("FOO1 0", all_clusters[0].name);
assert_eq!("FOO2 1", all_clusters[1].name);
assert_eq!("FOO3 2", all_clusters[2].name);
assert_eq!("FOO4 3", all_clusters[3].name);
assert_eq!("FOO5 4", all_clusters[4].name);
}
#[test]
fn recursively_iterates_registers() {
let el: Element = Element::parse(
r##"
<cluster>
<name>FOO5</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO5</name>
<addressOffset>3000</addressOffset>
</register>
<cluster>
<name>FOO1</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO1</name>
<addressOffset>3000</addressOffset>
</register>
</cluster>
<cluster>
<name>FOO4</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO4</name>
<addressOffset>3000</addressOffset>
</register>
<cluster>
<name>FOO2</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO2</name>
<addressOffset>3000</addressOffset>
</register>
</cluster>
<cluster>
<name>FOO3</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO3</name>
<addressOffset>3000</addressOffset>
</register>
</cluster>
</cluster>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ci = Cluster::parse(&el).unwrap();
let cs = ClusterSpec::new(&ci, "path", 0).unwrap();
assert_eq!(1, cs.len());
let top = &cs[0];
let all_clusters: Vec<&RegisterSpec> = top.iter_registers().collect();
assert_eq!("FOO1", all_clusters[0].name);
assert_eq!("FOO2", all_clusters[1].name);
assert_eq!("FOO3", all_clusters[2].name);
assert_eq!("FOO4", all_clusters[3].name);
assert_eq!("FOO5", all_clusters[4].name);
}
#[test]
fn recursively_mutates_registers() {
let el: Element = Element::parse(
r##"
<cluster>
<name>FOO5</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO5</name>
<addressOffset>3000</addressOffset>
</register>
<cluster>
<name>FOO1</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO1</name>
<addressOffset>3000</addressOffset>
</register>
</cluster>
<cluster>
<name>FOO4</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO4</name>
<addressOffset>3000</addressOffset>
</register>
<cluster>
<name>FOO2</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO2</name>
<addressOffset>3000</addressOffset>
</register>
</cluster>
<cluster>
<name>FOO3</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO3</name>
<addressOffset>3000</addressOffset>
</register>
</cluster>
</cluster>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ci = Cluster::parse(&el).unwrap();
let mut cs = ClusterSpec::new(&ci, "path", 0).unwrap();
assert_eq!(1, cs.len());
let top = &mut cs[0];
let count = RefCell::new(0);
top
.mutate_registers(|r| {
r.name = format!("{} {}", r.name, count.borrow());
let current = (*count.borrow()).clone();
*count.borrow_mut() = current + 1;
Ok(false)
})
.unwrap();
let all_registers: Vec<&RegisterSpec> = top.iter_registers().collect();
assert_eq!("FOO1 0", all_registers[0].name);
assert_eq!("FOO2 1", all_registers[1].name);
assert_eq!("FOO3 2", all_registers[2].name);
assert_eq!("FOO4 3", all_registers[3].name);
assert_eq!("FOO5 4", all_registers[4].name);
}
#[test]
fn recursively_mutates_fields() {
let el: Element = Element::parse(
r##"
<cluster>
<name>FOO5</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO5</name>
<addressOffset>3000</addressOffset>
<fields>
<field>
<name>FOO5</name>
<bitOffset>1</bitOffset>
<bitWidth>1</bitWidth>
</field>
</fields>
</register>
<cluster>
<name>FOO1</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO1</name>
<addressOffset>3000</addressOffset>
<fields>
<field>
<name>FOO1</name>
<bitOffset>1</bitOffset>
<bitWidth>1</bitWidth>
</field>
</fields>
</register>
</cluster>
<cluster>
<name>FOO4</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO4</name>
<addressOffset>3000</addressOffset>
<fields>
<field>
<name>FOO4</name>
<bitOffset>1</bitOffset>
<bitWidth>1</bitWidth>
</field>
</fields>
</register>
<cluster>
<name>FOO2</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO2</name>
<addressOffset>3000</addressOffset>
<fields>
<field>
<name>FOO2</name>
<bitOffset>1</bitOffset>
<bitWidth>1</bitWidth>
</field>
</fields>
</register>
</cluster>
<cluster>
<name>FOO3</name>
<addressOffset>3000</addressOffset>
<register>
<name>FOO3</name>
<addressOffset>3000</addressOffset>
<fields>
<field>
<name>FOO3</name>
<bitOffset>1</bitOffset>
<bitWidth>1</bitWidth>
</field>
</fields>
</register>
</cluster>
</cluster>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ci = Cluster::parse(&el).unwrap();
let mut cs = ClusterSpec::new(&ci, "path", 0).unwrap();
assert_eq!(1, cs.len());
let top = &mut cs[0];
let count = RefCell::new(0);
top
.mutate_fields(|f| {
f.name = format!("{} {}", f.name, count.borrow());
let current = (*count.borrow()).clone();
*count.borrow_mut() = current + 1;
Ok(false)
})
.unwrap();
let all_fields: Vec<&FieldSpec> = top.iter_fields().collect();
assert_eq!("FOO1 0", all_fields[0].name);
assert_eq!("FOO2 1", all_fields[1].name);
assert_eq!("FOO3 2", all_fields[2].name);
assert_eq!("FOO4 3", all_fields[3].name);
assert_eq!("FOO5 4", all_fields[4].name);
}
#[test]
pub fn propagates_default_register_properties() {
let el: Element = Element::parse(
r##"
<cluster>
<name>FOO</name>
<addressOffset>5000</addressOffset>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ci = Cluster::parse(&el).unwrap();
let mut cs = ClusterSpec::new(&ci, "path", 0).unwrap();
let cluster = &mut cs[0];
let changed = cluster.propagate_default_register_properties(
&Some(1),
&Some(2),
&Some(3),
&Some(AccessSpec::ReadWriteOnce),
);
assert!(changed);
assert_eq!(1, cluster.default_register_size.unwrap());
assert_eq!(2, cluster.default_register_reset_value.unwrap());
assert_eq!(3, cluster.default_register_reset_mask.unwrap());
assert_eq!(
AccessSpec::ReadWriteOnce,
cluster.default_register_access.unwrap()
);
}
#[test]
pub fn propagate_default_register_properties_returns_false_when_no_changes() {
let el: Element = Element::parse(
r##"
<cluster>
<name>FOO</name>
<addressOffset>5000</addressOffset>
</cluster>
"##
.as_bytes(),
)
.unwrap();
let ci = Cluster::parse(&el).unwrap();
let mut cs = ClusterSpec::new(&ci, "path", 0).unwrap();
let cluster = &mut cs[0];
let changed = cluster.propagate_default_register_properties(&None, &None, &None, &None);
assert!(!changed);
assert!(cluster.default_register_access.is_none());
assert!(cluster.default_register_reset_value.is_none());
assert!(cluster.default_register_reset_mask.is_none());
assert!(cluster.default_register_size.is_none());
}
}
| 28.449205 | 100 | 0.59588 |
e21089dc53a5c509d1dc6277a12ae2709130ffd8 | 731 | #![allow(unused_imports)]
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
#[doc = "The `RequestDestination` enum."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `RequestDestination`*"]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum RequestDestination {
None = "",
Audio = "audio",
Audioworklet = "audioworklet",
Document = "document",
Embed = "embed",
Font = "font",
Image = "image",
Manifest = "manifest",
Object = "object",
Paintworklet = "paintworklet",
Report = "report",
Script = "script",
Sharedworker = "sharedworker",
Style = "style",
Track = "track",
Video = "video",
Worker = "worker",
Xslt = "xslt",
}
| 26.107143 | 97 | 0.610123 |
ffc17a5f862466af097577555022010b22d2697e | 6,313 | #[doc = "Register `OSC48MCTRL` reader"]
pub struct R(crate::R<OSC48MCTRL_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<OSC48MCTRL_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<OSC48MCTRL_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<OSC48MCTRL_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `OSC48MCTRL` writer"]
pub struct W(crate::W<OSC48MCTRL_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<OSC48MCTRL_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<OSC48MCTRL_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<OSC48MCTRL_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `ENABLE` reader - Oscillator Enable"]
pub struct ENABLE_R(crate::FieldReader<bool, bool>);
impl ENABLE_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
ENABLE_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for ENABLE_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ENABLE` writer - Oscillator Enable"]
pub struct ENABLE_W<'a> {
w: &'a mut W,
}
impl<'a> ENABLE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u8 & 0x01) << 1);
self.w
}
}
#[doc = "Field `RUNSTDBY` reader - Run in Standby"]
pub struct RUNSTDBY_R(crate::FieldReader<bool, bool>);
impl RUNSTDBY_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
RUNSTDBY_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for RUNSTDBY_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `RUNSTDBY` writer - Run in Standby"]
pub struct RUNSTDBY_W<'a> {
w: &'a mut W,
}
impl<'a> RUNSTDBY_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | ((value as u8 & 0x01) << 6);
self.w
}
}
#[doc = "Field `ONDEMAND` reader - On Demand Control"]
pub struct ONDEMAND_R(crate::FieldReader<bool, bool>);
impl ONDEMAND_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
ONDEMAND_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for ONDEMAND_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ONDEMAND` writer - On Demand Control"]
pub struct ONDEMAND_W<'a> {
w: &'a mut W,
}
impl<'a> ONDEMAND_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | ((value as u8 & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bit 1 - Oscillator Enable"]
#[inline(always)]
pub fn enable(&self) -> ENABLE_R {
ENABLE_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 6 - Run in Standby"]
#[inline(always)]
pub fn runstdby(&self) -> RUNSTDBY_R {
RUNSTDBY_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - On Demand Control"]
#[inline(always)]
pub fn ondemand(&self) -> ONDEMAND_R {
ONDEMAND_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 1 - Oscillator Enable"]
#[inline(always)]
pub fn enable(&mut self) -> ENABLE_W {
ENABLE_W { w: self }
}
#[doc = "Bit 6 - Run in Standby"]
#[inline(always)]
pub fn runstdby(&mut self) -> RUNSTDBY_W {
RUNSTDBY_W { w: self }
}
#[doc = "Bit 7 - On Demand Control"]
#[inline(always)]
pub fn ondemand(&mut self) -> ONDEMAND_W {
ONDEMAND_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u8) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "48MHz Internal Oscillator (OSC48M) Control\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [osc48mctrl](index.html) module"]
pub struct OSC48MCTRL_SPEC;
impl crate::RegisterSpec for OSC48MCTRL_SPEC {
type Ux = u8;
}
#[doc = "`read()` method returns [osc48mctrl::R](R) reader structure"]
impl crate::Readable for OSC48MCTRL_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [osc48mctrl::W](W) writer structure"]
impl crate::Writable for OSC48MCTRL_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets OSC48MCTRL to value 0x82"]
impl crate::Resettable for OSC48MCTRL_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0x82
}
}
| 30.350962 | 434 | 0.560589 |
de36516b5b2ee442f0e5f84939a0d5aa99baf72b | 562 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use diem_operational_tool::command::{Command, ResultWrapper};
use std::process::exit;
use structopt::StructOpt;
fn main() {
let result = Command::from_args().execute();
match result {
Ok(val) => println!("{}", val),
Err(err) => {
let result: ResultWrapper<()> = ResultWrapper::Error(err.to_string());
println!("{}", serde_json::to_string_pretty(&result).unwrap());
exit(1);
}
}
}
| 26.761905 | 82 | 0.604982 |
877d4f1c79a53de5cf2cb9359c914d1eba173a6c | 22,657 | //! Handle syntactic aspects of inserting a new `use`.
use std::{cmp::Ordering, iter::successors};
use crate::RootDatabase;
use hir::Semantics;
use itertools::{EitherOrBoth, Itertools};
use syntax::{
algo::SyntaxRewriter,
ast::{
self,
edit::{AstNodeEdit, IndentLevel},
make, AstNode, AttrsOwner, PathSegmentKind, VisibilityOwner,
},
AstToken, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
};
use test_utils::mark;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct InsertUseConfig {
pub merge: Option<MergeBehavior>,
pub prefix_kind: hir::PrefixKind,
}
#[derive(Debug, Clone)]
pub enum ImportScope {
File(ast::SourceFile),
Module(ast::ItemList),
}
impl ImportScope {
pub fn from(syntax: SyntaxNode) -> Option<Self> {
if let Some(module) = ast::Module::cast(syntax.clone()) {
module.item_list().map(ImportScope::Module)
} else if let this @ Some(_) = ast::SourceFile::cast(syntax.clone()) {
this.map(ImportScope::File)
} else {
ast::ItemList::cast(syntax).map(ImportScope::Module)
}
}
/// Determines the containing syntax node in which to insert a `use` statement affecting `position`.
pub fn find_insert_use_container(
position: &SyntaxNode,
sema: &Semantics<'_, RootDatabase>,
) -> Option<Self> {
sema.ancestors_with_macros(position.clone()).find_map(Self::from)
}
pub fn as_syntax_node(&self) -> &SyntaxNode {
match self {
ImportScope::File(file) => file.syntax(),
ImportScope::Module(item_list) => item_list.syntax(),
}
}
fn indent_level(&self) -> IndentLevel {
match self {
ImportScope::File(file) => file.indent_level(),
ImportScope::Module(item_list) => item_list.indent_level() + 1,
}
}
fn first_insert_pos(&self) -> (InsertPosition<SyntaxElement>, AddBlankLine) {
match self {
ImportScope::File(_) => (InsertPosition::First, AddBlankLine::AfterTwice),
// don't insert the imports before the item list's opening curly brace
ImportScope::Module(item_list) => item_list
.l_curly_token()
.map(|b| (InsertPosition::After(b.into()), AddBlankLine::Around))
.unwrap_or((InsertPosition::First, AddBlankLine::AfterTwice)),
}
}
fn insert_pos_after_last_inner_element(&self) -> (InsertPosition<SyntaxElement>, AddBlankLine) {
self.as_syntax_node()
.children_with_tokens()
.filter(|child| match child {
NodeOrToken::Node(node) => is_inner_attribute(node.clone()),
NodeOrToken::Token(token) => is_inner_comment(token.clone()),
})
.last()
.map(|last_inner_element| {
(InsertPosition::After(last_inner_element.into()), AddBlankLine::BeforeTwice)
})
.unwrap_or_else(|| self.first_insert_pos())
}
}
fn is_inner_attribute(node: SyntaxNode) -> bool {
ast::Attr::cast(node).map(|attr| attr.kind()) == Some(ast::AttrKind::Inner)
}
fn is_inner_comment(token: SyntaxToken) -> bool {
ast::Comment::cast(token).and_then(|comment| comment.kind().doc)
== Some(ast::CommentPlacement::Inner)
}
/// Insert an import path into the given file/node. A `merge` value of none indicates that no import merging is allowed to occur.
pub fn insert_use<'a>(
scope: &ImportScope,
path: ast::Path,
merge: Option<MergeBehavior>,
) -> SyntaxRewriter<'a> {
let _p = profile::span("insert_use");
let mut rewriter = SyntaxRewriter::default();
let use_item = make::use_(None, make::use_tree(path.clone(), None, None, false));
// merge into existing imports if possible
if let Some(mb) = merge {
for existing_use in scope.as_syntax_node().children().filter_map(ast::Use::cast) {
if let Some(merged) = try_merge_imports(&existing_use, &use_item, mb) {
rewriter.replace(existing_use.syntax(), merged.syntax());
return rewriter;
}
}
}
// either we weren't allowed to merge or there is no import that fits the merge conditions
// so look for the place we have to insert to
let (insert_position, add_blank) = find_insert_position(scope, path);
let indent = if let ident_level @ 1..=usize::MAX = scope.indent_level().0 as usize {
Some(make::tokens::whitespace(&" ".repeat(4 * ident_level)).into())
} else {
None
};
let to_insert: Vec<SyntaxElement> = {
let mut buf = Vec::new();
match add_blank {
AddBlankLine::Before | AddBlankLine::Around => {
buf.push(make::tokens::single_newline().into())
}
AddBlankLine::BeforeTwice => buf.push(make::tokens::blank_line().into()),
_ => (),
}
if add_blank.has_before() {
if let Some(indent) = indent.clone() {
mark::hit!(insert_use_indent_before);
buf.push(indent);
}
}
buf.push(use_item.syntax().clone().into());
match add_blank {
AddBlankLine::After | AddBlankLine::Around => {
buf.push(make::tokens::single_newline().into())
}
AddBlankLine::AfterTwice => buf.push(make::tokens::blank_line().into()),
_ => (),
}
// only add indentation *after* our stuff if there's another node directly after it
if add_blank.has_after() && matches!(insert_position, InsertPosition::Before(_)) {
if let Some(indent) = indent {
mark::hit!(insert_use_indent_after);
buf.push(indent);
}
} else if add_blank.has_after() && matches!(insert_position, InsertPosition::After(_)) {
mark::hit!(insert_use_no_indent_after);
}
buf
};
match insert_position {
InsertPosition::First => {
rewriter.insert_many_as_first_children(scope.as_syntax_node(), to_insert)
}
InsertPosition::Last => return rewriter, // actually unreachable
InsertPosition::Before(anchor) => rewriter.insert_many_before(&anchor, to_insert),
InsertPosition::After(anchor) => rewriter.insert_many_after(&anchor, to_insert),
}
rewriter
}
fn eq_visibility(vis0: Option<ast::Visibility>, vis1: Option<ast::Visibility>) -> bool {
match (vis0, vis1) {
(None, None) => true,
// FIXME: Don't use the string representation to check for equality
// spaces inside of the node would break this comparison
(Some(vis0), Some(vis1)) => vis0.to_string() == vis1.to_string(),
_ => false,
}
}
fn eq_attrs(
attrs0: impl Iterator<Item = ast::Attr>,
attrs1: impl Iterator<Item = ast::Attr>,
) -> bool {
let attrs0 = attrs0.map(|attr| attr.to_string());
let attrs1 = attrs1.map(|attr| attr.to_string());
attrs0.eq(attrs1)
}
pub fn try_merge_imports(
lhs: &ast::Use,
rhs: &ast::Use,
merge_behavior: MergeBehavior,
) -> Option<ast::Use> {
// don't merge imports with different visibilities
if !eq_visibility(lhs.visibility(), rhs.visibility()) {
return None;
}
if !eq_attrs(lhs.attrs(), rhs.attrs()) {
return None;
}
let lhs_tree = lhs.use_tree()?;
let rhs_tree = rhs.use_tree()?;
let merged = try_merge_trees(&lhs_tree, &rhs_tree, merge_behavior)?;
Some(lhs.with_use_tree(merged))
}
pub fn try_merge_trees(
lhs: &ast::UseTree,
rhs: &ast::UseTree,
merge: MergeBehavior,
) -> Option<ast::UseTree> {
let lhs_path = lhs.path()?;
let rhs_path = rhs.path()?;
let (lhs_prefix, rhs_prefix) = common_prefix(&lhs_path, &rhs_path)?;
let (lhs, rhs) = if is_simple_path(lhs)
&& is_simple_path(rhs)
&& lhs_path == lhs_prefix
&& rhs_path == rhs_prefix
{
(lhs.clone(), rhs.clone())
} else {
(lhs.split_prefix(&lhs_prefix), rhs.split_prefix(&rhs_prefix))
};
recursive_merge(&lhs, &rhs, merge)
}
/// Recursively "zips" together lhs and rhs.
fn recursive_merge(
lhs: &ast::UseTree,
rhs: &ast::UseTree,
merge: MergeBehavior,
) -> Option<ast::UseTree> {
let mut use_trees = lhs
.use_tree_list()
.into_iter()
.flat_map(|list| list.use_trees())
// we use Option here to early return from this function(this is not the same as a `filter` op)
.map(|tree| match merge.is_tree_allowed(&tree) {
true => Some(tree),
false => None,
})
.collect::<Option<Vec<_>>>()?;
use_trees.sort_unstable_by(|a, b| path_cmp_for_sort(a.path(), b.path()));
for rhs_t in rhs.use_tree_list().into_iter().flat_map(|list| list.use_trees()) {
if !merge.is_tree_allowed(&rhs_t) {
return None;
}
let rhs_path = rhs_t.path();
match use_trees.binary_search_by(|lhs_t| {
let (lhs_t, rhs_t) = match lhs_t
.path()
.zip(rhs_path.clone())
.and_then(|(lhs, rhs)| common_prefix(&lhs, &rhs))
{
Some((lhs_p, rhs_p)) => (lhs_t.split_prefix(&lhs_p), rhs_t.split_prefix(&rhs_p)),
None => (lhs_t.clone(), rhs_t.clone()),
};
path_cmp_bin_search(lhs_t.path(), rhs_t.path())
}) {
Ok(idx) => {
let lhs_t = &mut use_trees[idx];
let lhs_path = lhs_t.path()?;
let rhs_path = rhs_path?;
let (lhs_prefix, rhs_prefix) = common_prefix(&lhs_path, &rhs_path)?;
if lhs_prefix == lhs_path && rhs_prefix == rhs_path {
let tree_is_self = |tree: ast::UseTree| {
tree.path().as_ref().map(path_is_self).unwrap_or(false)
};
// check if only one of the two trees has a tree list, and whether that then contains `self` or not.
// If this is the case we can skip this iteration since the path without the list is already included in the other one via `self`
let tree_contains_self = |tree: &ast::UseTree| {
tree.use_tree_list()
.map(|tree_list| tree_list.use_trees().any(tree_is_self))
.unwrap_or(false)
};
match (tree_contains_self(&lhs_t), tree_contains_self(&rhs_t)) {
(true, false) => continue,
(false, true) => {
*lhs_t = rhs_t;
continue;
}
_ => (),
}
// glob imports arent part of the use-tree lists so we need to special handle them here as well
// this special handling is only required for when we merge a module import into a glob import of said module
// see the `merge_self_glob` or `merge_mod_into_glob` tests
if lhs_t.star_token().is_some() || rhs_t.star_token().is_some() {
*lhs_t = make::use_tree(
make::path_unqualified(make::path_segment_self()),
None,
None,
false,
);
use_trees.insert(idx, make::glob_use_tree());
continue;
}
if lhs_t.use_tree_list().is_none() && rhs_t.use_tree_list().is_none() {
continue;
}
}
let lhs = lhs_t.split_prefix(&lhs_prefix);
let rhs = rhs_t.split_prefix(&rhs_prefix);
match recursive_merge(&lhs, &rhs, merge) {
Some(use_tree) => use_trees[idx] = use_tree,
None => return None,
}
}
Err(_)
if merge == MergeBehavior::Last
&& use_trees.len() > 0
&& rhs_t.use_tree_list().is_some() =>
{
return None
}
Err(idx) => {
use_trees.insert(idx, rhs_t);
}
}
}
Some(lhs.with_use_tree_list(make::use_tree_list(use_trees)))
}
/// Traverses both paths until they differ, returning the common prefix of both.
fn common_prefix(lhs: &ast::Path, rhs: &ast::Path) -> Option<(ast::Path, ast::Path)> {
let mut res = None;
let mut lhs_curr = first_path(&lhs);
let mut rhs_curr = first_path(&rhs);
loop {
match (lhs_curr.segment(), rhs_curr.segment()) {
(Some(lhs), Some(rhs)) if lhs.syntax().text() == rhs.syntax().text() => (),
_ => break res,
}
res = Some((lhs_curr.clone(), rhs_curr.clone()));
match lhs_curr.parent_path().zip(rhs_curr.parent_path()) {
Some((lhs, rhs)) => {
lhs_curr = lhs;
rhs_curr = rhs;
}
_ => break res,
}
}
}
fn is_simple_path(use_tree: &ast::UseTree) -> bool {
use_tree.use_tree_list().is_none() && use_tree.star_token().is_none()
}
fn path_is_self(path: &ast::Path) -> bool {
path.segment().and_then(|seg| seg.self_token()).is_some() && path.qualifier().is_none()
}
#[inline]
fn first_segment(path: &ast::Path) -> Option<ast::PathSegment> {
first_path(path).segment()
}
fn first_path(path: &ast::Path) -> ast::Path {
successors(Some(path.clone()), ast::Path::qualifier).last().unwrap()
}
fn segment_iter(path: &ast::Path) -> impl Iterator<Item = ast::PathSegment> + Clone {
// cant make use of SyntaxNode::siblings, because the returned Iterator is not clone
successors(first_segment(path), |p| p.parent_path().parent_path().and_then(|p| p.segment()))
}
fn path_len(path: ast::Path) -> usize {
segment_iter(&path).count()
}
/// Orders paths in the following way:
/// the sole self token comes first, after that come uppercase identifiers, then lowercase identifiers
// FIXME: rustfmt sorts lowercase idents before uppercase, in general we want to have the same ordering rustfmt has
// which is `self` and `super` first, then identifier imports with lowercase ones first, then glob imports and at last list imports.
// Example foo::{self, foo, baz, Baz, Qux, *, {Bar}}
fn path_cmp_for_sort(a: Option<ast::Path>, b: Option<ast::Path>) -> Ordering {
match (a, b) {
(None, None) => Ordering::Equal,
(None, Some(_)) => Ordering::Less,
(Some(_), None) => Ordering::Greater,
(Some(ref a), Some(ref b)) => match (path_is_self(a), path_is_self(b)) {
(true, true) => Ordering::Equal,
(true, false) => Ordering::Less,
(false, true) => Ordering::Greater,
(false, false) => path_cmp_short(a, b),
},
}
}
/// Path comparison func for binary searching for merging.
fn path_cmp_bin_search(lhs: Option<ast::Path>, rhs: Option<ast::Path>) -> Ordering {
match (lhs.as_ref().and_then(first_segment), rhs.as_ref().and_then(first_segment)) {
(None, None) => Ordering::Equal,
(None, Some(_)) => Ordering::Less,
(Some(_), None) => Ordering::Greater,
(Some(ref a), Some(ref b)) => path_segment_cmp(a, b),
}
}
/// Short circuiting comparison, if both paths are equal until one of them ends they are considered
/// equal
fn path_cmp_short(a: &ast::Path, b: &ast::Path) -> Ordering {
let a = segment_iter(a);
let b = segment_iter(b);
// cmp_by would be useful for us here but that is currently unstable
// cmp doesnt work due the lifetimes on text's return type
a.zip(b)
.find_map(|(a, b)| match path_segment_cmp(&a, &b) {
Ordering::Equal => None,
ord => Some(ord),
})
.unwrap_or(Ordering::Equal)
}
/// Compares to paths, if one ends earlier than the other the has_tl parameters decide which is
/// greater as a a path that has a tree list should be greater, while one that just ends without
/// a tree list should be considered less.
fn use_tree_path_cmp(a: &ast::Path, a_has_tl: bool, b: &ast::Path, b_has_tl: bool) -> Ordering {
let a_segments = segment_iter(a);
let b_segments = segment_iter(b);
// cmp_by would be useful for us here but that is currently unstable
// cmp doesnt work due the lifetimes on text's return type
a_segments
.zip_longest(b_segments)
.find_map(|zipped| match zipped {
EitherOrBoth::Both(ref a, ref b) => match path_segment_cmp(a, b) {
Ordering::Equal => None,
ord => Some(ord),
},
EitherOrBoth::Left(_) if !b_has_tl => Some(Ordering::Greater),
EitherOrBoth::Left(_) => Some(Ordering::Less),
EitherOrBoth::Right(_) if !a_has_tl => Some(Ordering::Less),
EitherOrBoth::Right(_) => Some(Ordering::Greater),
})
.unwrap_or(Ordering::Equal)
}
fn path_segment_cmp(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering {
let a = a.kind().and_then(|kind| match kind {
PathSegmentKind::Name(name_ref) => Some(name_ref),
_ => None,
});
let b = b.kind().and_then(|kind| match kind {
PathSegmentKind::Name(name_ref) => Some(name_ref),
_ => None,
});
a.as_ref().map(ast::NameRef::text).cmp(&b.as_ref().map(ast::NameRef::text))
}
/// What type of merges are allowed.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum MergeBehavior {
/// Merge everything together creating deeply nested imports.
Full,
/// Only merge the last import level, doesn't allow import nesting.
Last,
}
impl MergeBehavior {
#[inline]
fn is_tree_allowed(&self, tree: &ast::UseTree) -> bool {
match self {
MergeBehavior::Full => true,
// only simple single segment paths are allowed
MergeBehavior::Last => {
tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1)
}
}
}
}
#[derive(Eq, PartialEq, PartialOrd, Ord)]
enum ImportGroup {
// the order here defines the order of new group inserts
Std,
ExternCrate,
ThisCrate,
ThisModule,
SuperModule,
}
impl ImportGroup {
fn new(path: &ast::Path) -> ImportGroup {
let default = ImportGroup::ExternCrate;
let first_segment = match first_segment(path) {
Some(it) => it,
None => return default,
};
let kind = first_segment.kind().unwrap_or(PathSegmentKind::SelfKw);
match kind {
PathSegmentKind::SelfKw => ImportGroup::ThisModule,
PathSegmentKind::SuperKw => ImportGroup::SuperModule,
PathSegmentKind::CrateKw => ImportGroup::ThisCrate,
PathSegmentKind::Name(name) => match name.text().as_str() {
"std" => ImportGroup::Std,
"core" => ImportGroup::Std,
_ => ImportGroup::ExternCrate,
},
PathSegmentKind::Type { .. } => unreachable!(),
}
}
}
#[derive(PartialEq, Eq)]
enum AddBlankLine {
Before,
BeforeTwice,
Around,
After,
AfterTwice,
}
impl AddBlankLine {
fn has_before(&self) -> bool {
matches!(self, AddBlankLine::Before | AddBlankLine::BeforeTwice | AddBlankLine::Around)
}
fn has_after(&self) -> bool {
matches!(self, AddBlankLine::After | AddBlankLine::AfterTwice | AddBlankLine::Around)
}
}
fn find_insert_position(
scope: &ImportScope,
insert_path: ast::Path,
) -> (InsertPosition<SyntaxElement>, AddBlankLine) {
let group = ImportGroup::new(&insert_path);
let path_node_iter = scope
.as_syntax_node()
.children()
.filter_map(|node| ast::Use::cast(node.clone()).zip(Some(node)))
.flat_map(|(use_, node)| {
let tree = use_.use_tree()?;
let path = tree.path()?;
let has_tl = tree.use_tree_list().is_some();
Some((path, has_tl, node))
});
// Iterator that discards anything thats not in the required grouping
// This implementation allows the user to rearrange their import groups as this only takes the first group that fits
let group_iter = path_node_iter
.clone()
.skip_while(|(path, ..)| ImportGroup::new(path) != group)
.take_while(|(path, ..)| ImportGroup::new(path) == group);
// track the last element we iterated over, if this is still None after the iteration then that means we never iterated in the first place
let mut last = None;
// find the element that would come directly after our new import
let post_insert = group_iter.inspect(|(.., node)| last = Some(node.clone())).find(
|&(ref path, has_tl, _)| {
use_tree_path_cmp(&insert_path, false, path, has_tl) != Ordering::Greater
},
);
match post_insert {
// insert our import before that element
Some((.., node)) => (InsertPosition::Before(node.into()), AddBlankLine::After),
// there is no element after our new import, so append it to the end of the group
None => match last {
Some(node) => (InsertPosition::After(node.into()), AddBlankLine::Before),
// the group we were looking for actually doesnt exist, so insert
None => {
// similar concept here to the `last` from above
let mut last = None;
// find the group that comes after where we want to insert
let post_group = path_node_iter
.inspect(|(.., node)| last = Some(node.clone()))
.find(|(p, ..)| ImportGroup::new(p) > group);
match post_group {
Some((.., node)) => {
(InsertPosition::Before(node.into()), AddBlankLine::AfterTwice)
}
// there is no such group, so append after the last one
None => match last {
Some(node) => {
(InsertPosition::After(node.into()), AddBlankLine::BeforeTwice)
}
// there are no imports in this file at all
None => scope.insert_pos_after_last_inner_element(),
},
}
}
},
}
}
#[cfg(test)]
mod tests;
| 37.636213 | 149 | 0.575407 |
751e5184ce4d65996f9f14d3821eaad0a8cf4655 | 339 | /*
Sundance by Alexander Abraham,
a.k.a. "The Black Unicorn", a.k.a. "Angeldust Duke".
Licensed under the MIT license.
*/
pub mod toolchain;
/// A small tool to print out lexed tokens.
pub fn toolchain(input: String, output: String) {
for token in lex(read_file(input)){
println!("{} : {}", token.name, token.value);
}
}
| 22.6 | 53 | 0.654867 |
5017ff20a626aaeb6c40bf06ea76f05cfdf98a0e | 756 | // traits2.rs
//
// Your task is to implement the trait
// `AppendBar' for a vector of strings.
//
// To implement this trait, consider for
// a moment what it means to 'append "Bar"'
// to a vector of strings.
//
// No boiler plate code this time,
// you can do this!
trait AppendBar {
fn append_bar(self) -> Self;
}
// Add your code here
impl AppendBar for Vec<String> {
fn append_bar(mut self) -> Self {
self.push(String::from("Bar"));
self
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn is_vec_pop_eq_bar() {
let mut foo = vec![String::from("Foo")].append_bar();
assert_eq!(foo.pop().unwrap(), String::from("Bar"));
assert_eq!(foo.pop().unwrap(), String::from("Foo"));
}
}
| 21 | 61 | 0.597884 |
9c8924ecb50ed61dfc91c83b03d2a379d1636bb2 | 201,472 | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use std::borrow::Cow;
use std::convert::TryFrom;
use std::convert::TryInto;
use std::num::IntErrorKind;
use num_traits::identities::Zero;
use tidb_query_codegen::rpn_fn;
use tidb_query_datatype::*;
use tipb::{Expr, FieldType};
use crate::types::RpnExpressionBuilder;
use crate::{RpnExpressionNode, RpnFnCallExtra, RpnFnMeta};
use tidb_query_common::Result;
use tidb_query_datatype::codec::convert::*;
use tidb_query_datatype::codec::data_type::*;
use tidb_query_datatype::codec::error::{ERR_DATA_OUT_OF_RANGE, ERR_TRUNCATE_WRONG_VALUE};
use tidb_query_datatype::codec::mysql::{binary_literal, Time};
use tidb_query_datatype::codec::Error;
use tidb_query_datatype::expr::EvalContext;
fn get_cast_fn_rpn_meta(
is_from_constant: bool,
from_field_type: &FieldType,
to_field_type: &FieldType,
) -> Result<RpnFnMeta> {
let from = box_try!(EvalType::try_from(from_field_type.as_accessor().tp()));
let to = box_try!(EvalType::try_from(to_field_type.as_accessor().tp()));
let func_meta = match (from, to) {
// any as int
(EvalType::Int, EvalType::Int) => {
if !from_field_type.is_unsigned() && to_field_type.is_unsigned() {
cast_signed_int_as_unsigned_int_fn_meta()
} else {
cast_int_as_int_others_fn_meta()
}
}
(EvalType::Real, EvalType::Int) => {
if !to_field_type.is_unsigned() {
cast_any_as_any_fn_meta::<Real, Int>()
} else {
cast_real_as_uint_fn_meta()
}
}
(EvalType::Bytes, EvalType::Int) => {
if is_from_constant && from_field_type.is_binary_string_like() {
cast_binary_string_as_int_fn_meta()
} else {
cast_string_as_int_fn_meta()
}
}
(EvalType::Decimal, EvalType::Int) => {
if !to_field_type.is_unsigned() {
cast_any_as_any_fn_meta::<Decimal, Int>()
} else {
cast_decimal_as_uint_fn_meta()
}
}
(EvalType::DateTime, EvalType::Int) => cast_any_as_any_fn_meta::<DateTime, Int>(),
(EvalType::Duration, EvalType::Int) => cast_any_as_any_fn_meta::<Duration, Int>(),
(EvalType::Json, EvalType::Int) => {
if !to_field_type.is_unsigned() {
cast_any_as_any_fn_meta::<Json, Int>()
} else {
cast_json_as_uint_fn_meta()
}
}
// any as real
(EvalType::Int, EvalType::Real) => {
let fu = from_field_type.is_unsigned();
let ru = to_field_type.is_unsigned();
match (fu, ru) {
(true, _) => cast_unsigned_int_as_signed_or_unsigned_real_fn_meta(),
(false, false) => cast_signed_int_as_signed_real_fn_meta(),
(false, true) => cast_signed_int_as_unsigned_real_fn_meta(),
}
}
(EvalType::Real, EvalType::Real) => {
if !to_field_type.is_unsigned() {
cast_real_as_signed_real_fn_meta()
} else {
cast_real_as_unsigned_real_fn_meta()
}
}
(EvalType::Bytes, EvalType::Real) => {
match (
is_from_constant && from_field_type.is_binary_string_like(),
to_field_type.is_unsigned(),
) {
(true, true) => cast_binary_string_as_unsigned_real_fn_meta(),
(true, false) => cast_binary_string_as_signed_real_fn_meta(),
(false, true) => cast_string_as_unsigned_real_fn_meta(),
(false, false) => cast_string_as_signed_real_fn_meta(),
}
}
(EvalType::Decimal, EvalType::Real) => {
if !to_field_type.is_unsigned() {
cast_any_as_any_fn_meta::<Decimal, Real>()
} else {
cast_decimal_as_unsigned_real_fn_meta()
}
}
(EvalType::DateTime, EvalType::Real) => cast_any_as_any_fn_meta::<DateTime, Real>(),
(EvalType::Duration, EvalType::Real) => cast_any_as_any_fn_meta::<Duration, Real>(),
(EvalType::Json, EvalType::Real) => cast_any_as_any_fn_meta::<Json, Real>(),
// any as string
(EvalType::Int, EvalType::Bytes) => {
if !from_field_type.is_unsigned() {
cast_any_as_string_fn_meta::<Int>()
} else {
cast_uint_as_string_fn_meta()
}
}
(EvalType::Real, EvalType::Bytes) => {
if from_field_type.as_accessor().tp() == FieldTypeTp::Float {
cast_float_real_as_string_fn_meta()
} else {
cast_any_as_string_fn_meta::<Real>()
}
}
(EvalType::Bytes, EvalType::Bytes) => cast_string_as_string_fn_meta(),
(EvalType::Decimal, EvalType::Bytes) => cast_any_as_string_fn_meta::<Decimal>(),
(EvalType::DateTime, EvalType::Bytes) => cast_any_as_string_fn_meta::<DateTime>(),
(EvalType::Duration, EvalType::Bytes) => cast_any_as_string_fn_meta::<Duration>(),
(EvalType::Json, EvalType::Bytes) => cast_any_as_any_fn_meta::<Json, Bytes>(),
// any as decimal
(EvalType::Int, EvalType::Decimal) => {
let fu = from_field_type.is_unsigned();
let ru = to_field_type.is_unsigned();
match (fu, ru) {
(true, _) => cast_unsigned_int_as_signed_or_unsigned_decimal_fn_meta(),
(false, true) => cast_signed_int_as_unsigned_decimal_fn_meta(),
(false, false) => cast_any_as_decimal_fn_meta::<Int>(),
}
}
(EvalType::Real, EvalType::Decimal) => cast_real_as_decimal_fn_meta(),
(EvalType::Bytes, EvalType::Decimal) => {
if !to_field_type.is_unsigned() {
cast_any_as_decimal_fn_meta::<Bytes>()
} else {
cast_string_as_unsigned_decimal_fn_meta()
}
}
(EvalType::Decimal, EvalType::Decimal) => {
if !to_field_type.is_unsigned() {
cast_decimal_as_signed_decimal_fn_meta()
} else {
cast_decimal_as_unsigned_decimal_fn_meta()
}
}
(EvalType::DateTime, EvalType::Decimal) => cast_any_as_decimal_fn_meta::<DateTime>(),
(EvalType::Duration, EvalType::Decimal) => cast_any_as_decimal_fn_meta::<Duration>(),
(EvalType::Json, EvalType::Decimal) => cast_any_as_decimal_fn_meta::<Json>(),
// any as duration
(EvalType::Int, EvalType::Duration) => cast_int_as_duration_fn_meta(),
(EvalType::Real, EvalType::Duration) => cast_real_as_duration_fn_meta(),
(EvalType::Bytes, EvalType::Duration) => cast_bytes_as_duration_fn_meta(),
(EvalType::Decimal, EvalType::Duration) => cast_decimal_as_duration_fn_meta(),
(EvalType::DateTime, EvalType::Duration) => cast_time_as_duration_fn_meta(),
(EvalType::Duration, EvalType::Duration) => cast_duration_as_duration_fn_meta(),
(EvalType::Json, EvalType::Duration) => cast_json_as_duration_fn_meta(),
(EvalType::Int, EvalType::DateTime) => cast_int_as_time_fn_meta(),
(EvalType::Real, EvalType::DateTime) => cast_real_as_time_fn_meta(),
(EvalType::Bytes, EvalType::DateTime) => cast_string_as_time_fn_meta(),
(EvalType::Decimal, EvalType::DateTime) => cast_decimal_as_time_fn_meta(),
(EvalType::DateTime, EvalType::DateTime) => cast_time_as_time_fn_meta(),
(EvalType::Duration, EvalType::DateTime) => cast_duration_as_time_fn_meta(),
// any as json
(EvalType::Int, EvalType::Json) => {
if from_field_type
.as_accessor()
.flag()
.contains(FieldTypeFlag::IS_BOOLEAN)
{
cast_bool_as_json_fn_meta()
} else if !from_field_type.is_unsigned() {
cast_any_as_any_fn_meta::<Int, Json>()
} else {
cast_uint_as_json_fn_meta()
}
}
(EvalType::Real, EvalType::Json) => cast_any_as_any_fn_meta::<Real, Json>(),
(EvalType::Bytes, EvalType::Json) => cast_string_as_json_fn_meta(),
(EvalType::Decimal, EvalType::Json) => cast_any_as_any_fn_meta::<Decimal, Json>(),
(EvalType::DateTime, EvalType::Json) => cast_any_as_any_fn_meta::<DateTime, Json>(),
(EvalType::Duration, EvalType::Json) => cast_any_as_any_fn_meta::<Duration, Json>(),
(EvalType::Json, EvalType::Json) => cast_json_as_json_fn_meta(),
_ => return Err(other_err!("Unsupported cast from {} to {}", from, to)),
};
Ok(func_meta)
}
/// Gets the cast function between specified data types.
///
/// TODO: This function supports some internal casts performed by TiKV. However it would be better
/// to be done in TiDB.
pub fn get_cast_fn_rpn_node(
is_from_constant: bool,
from_field_type: &FieldType,
to_field_type: FieldType,
) -> Result<RpnExpressionNode> {
let func_meta = get_cast_fn_rpn_meta(is_from_constant, from_field_type, &to_field_type)?;
// This cast function is inserted by `Coprocessor` automatically,
// the `inUnion` flag always false in this situation. Ideally,
// the cast function should be inserted by TiDB and pushed down
// with all implicit arguments.
Ok(RpnExpressionNode::FnCall {
func_meta,
args_len: 1,
field_type: to_field_type,
metadata: Box::new(tipb::InUnionMetadata::default()),
})
}
/// Gets the RPN function meta
pub fn map_cast_func(expr: &Expr) -> Result<RpnFnMeta> {
let children = expr.get_children();
if children.len() != 1 {
return Err(other_err!(
"Unexpected arguments: sig {:?} with {} args",
expr.get_sig(),
children.len()
));
}
get_cast_fn_rpn_meta(
RpnExpressionBuilder::is_expr_eval_to_scalar(&children[0])?,
children[0].get_field_type(),
expr.get_field_type(),
)
}
// cast any as int/uint, some cast functions reuse `cast_any_as_any`
//
// - cast_real_as_int -> cast_any_as_any<Real, Int>
// - cast_decimal_as_int -> cast_any_as_any<Decimal, Int>
// - cast_time_as_int_or_uint -> cast_any_as_any<Time, Int>
// - cast_duration_as_int_or_uint -> cast_any_as_any<Duration, Int>
// - cast_json_as_int -> cast_any_as_any<Json, Int>
#[rpn_fn(capture = [metadata], metadata_type = tipb::InUnionMetadata)]
#[inline]
fn cast_signed_int_as_unsigned_int(
metadata: &tipb::InUnionMetadata,
val: &Option<Int>,
) -> Result<Option<Int>> {
match val {
None => Ok(None),
Some(val) => {
let val = *val;
if metadata.get_in_union() && val < 0i64 {
Ok(Some(0))
} else {
Ok(Some(val))
}
}
}
}
#[rpn_fn]
#[inline]
fn cast_int_as_int_others(val: &Option<Int>) -> Result<Option<Int>> {
match val {
None => Ok(None),
Some(val) => Ok(Some(*val)),
}
}
#[rpn_fn(capture = [ctx, metadata], metadata_type = tipb::InUnionMetadata)]
#[inline]
fn cast_real_as_uint(
ctx: &mut EvalContext,
metadata: &tipb::InUnionMetadata,
val: &Option<Real>,
) -> Result<Option<Int>> {
match val {
None => Ok(None),
Some(val) => {
let val = val.into_inner();
if metadata.get_in_union() && val < 0f64 {
Ok(Some(0))
} else {
// FIXME: mysql's double to unsigned is very special,
// it **seems** that if the float num bigger than i64::MAX,
// then return i64::MAX always.
// This may be the bug of mysql.
// So I don't change ours' behavior here.
let val: u64 = val.convert(ctx)?;
Ok(Some(val as i64))
}
}
}
}
#[rpn_fn(capture = [ctx, extra, metadata], metadata_type = tipb::InUnionMetadata)]
#[inline]
fn cast_string_as_int(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
metadata: &tipb::InUnionMetadata,
val: &Option<Bytes>,
) -> Result<Option<Int>> {
match val {
None => Ok(None),
Some(val) => {
// TODO: in TiDB, if `b.args[0].GetType().Hybrid()` || `IsBinaryLiteral(b.args[0])`,
// then it will return res from EvalInt() directly.
let is_unsigned = extra.ret_field_type.is_unsigned();
let val = get_valid_utf8_prefix(ctx, val.as_slice())?;
let val = val.trim();
let is_str_neg = val.starts_with('-');
if metadata.get_in_union() && is_unsigned && is_str_neg {
Ok(Some(0))
} else {
// FIXME: if the err get_valid_int_prefix returned is overflow err,
// it should be ERR_TRUNCATE_WRONG_VALUE but not others.
let valid_int_prefix = get_valid_int_prefix(ctx, val)?;
let parse_res = if !is_str_neg {
valid_int_prefix.parse::<u64>().map(|x| x as i64)
} else {
valid_int_prefix.parse::<i64>()
};
// The `OverflowAsWarning` is true just if in `SELECT` statement context, e.g:
// 1. SELECT * FROM t => OverflowAsWarning = true
// 2. INSERT INTO t VALUE (...) => OverflowAsWarning = false
// 3. INSERT INTO t SELECT * FROM t2 => OverflowAsWarning = false
// (according to https://github.com/pingcap/tidb/blob/e173c7f5c1041b3c7e67507889d50a7bdbcdfc01/executor/executor.go#L1452)
//
// NOTE: if this flag(OverflowAsWarning)'s setting had changed,
// then here's behavior should be changed to keep consistent with TiDB.
match parse_res {
Ok(x) => {
if !is_str_neg {
if !is_unsigned && x as u64 > std::i64::MAX as u64 {
ctx.warnings
.append_warning(Error::cast_as_signed_overflow())
}
} else if is_unsigned {
ctx.warnings
.append_warning(Error::cast_neg_int_as_unsigned());
}
Ok(Some(x as i64))
}
Err(err) => match *err.kind() {
IntErrorKind::Overflow | IntErrorKind::Underflow => {
let err = if is_str_neg {
Error::overflow("BIGINT UNSIGNED", valid_int_prefix)
} else {
Error::overflow("BIGINT", valid_int_prefix)
};
let warn_err = Error::truncated_wrong_val("INTEGER", val);
ctx.handle_overflow_err(warn_err).map_err(|_| err)?;
let val = if is_str_neg {
std::i64::MIN
} else {
std::u64::MAX as i64
};
Ok(Some(val))
}
_ => Err(other_err!("parse string to int failed: {}", err)),
},
}
}
}
}
}
#[rpn_fn(capture = [ctx])]
fn cast_binary_string_as_int(ctx: &mut EvalContext, val: &Option<Bytes>) -> Result<Option<Int>> {
match val {
None => Ok(None),
Some(val) => {
let r = binary_literal::to_uint(ctx, val)? as i64;
Ok(Some(r))
}
}
}
#[rpn_fn(capture = [ctx, metadata], metadata_type = tipb::InUnionMetadata)]
#[inline]
fn cast_decimal_as_uint(
ctx: &mut EvalContext,
metadata: &tipb::InUnionMetadata,
val: &Option<Decimal>,
) -> Result<Option<Int>> {
match val {
None => Ok(None),
Some(val) => {
// TODO: here TiDB round before call `val.is_negative()`
if metadata.get_in_union() && val.is_negative() {
Ok(Some(0))
} else {
let r: u64 = val.convert(ctx)?;
Ok(Some(r as i64))
}
}
}
}
#[rpn_fn(capture = [ctx])]
#[inline]
fn cast_json_as_uint(ctx: &mut EvalContext, val: &Option<Json>) -> Result<Option<Int>> {
match val {
None => Ok(None),
Some(j) => {
let r: u64 = j.convert(ctx)?;
Ok(Some(r as i64))
}
}
}
// cast any as real, some cast functions reuse `cast_any_as_any`
//
// cast_decimal_as_signed_real -> cast_any_as_any<Decimal, Real>
// cast_time_as_real -> cast_any_as_any<Time, Real>
// cast_duration_as_real -> cast_any_as_any<Duration, Real>
// cast_json_as_real -> by cast_any_as_any<Json, Real>
#[rpn_fn]
#[inline]
fn cast_signed_int_as_signed_real(val: &Option<Int>) -> Result<Option<Real>> {
match val {
None => Ok(None),
Some(val) => Ok(Real::new(*val as f64).ok()),
}
}
#[rpn_fn(capture = [metadata], metadata_type = tipb::InUnionMetadata)]
#[inline]
fn cast_signed_int_as_unsigned_real(
metadata: &tipb::InUnionMetadata,
val: &Option<Int>,
) -> Result<Option<Real>> {
match val {
None => Ok(None),
Some(val) => {
if metadata.get_in_union() && *val < 0 {
Ok(Some(Real::zero()))
} else {
// FIXME: negative number to unsigned real's logic may be wrong here.
Ok(Real::new(*val as u64 as f64).ok())
}
}
}
}
// because we needn't to consider if uint overflow upper boundary of signed real,
// so we can merge uint to signed/unsigned real in one function
#[rpn_fn]
#[inline]
fn cast_unsigned_int_as_signed_or_unsigned_real(val: &Option<Int>) -> Result<Option<Real>> {
match val {
None => Ok(None),
Some(val) => Ok(Real::new(*val as u64 as f64).ok()),
}
}
#[rpn_fn]
#[inline]
fn cast_real_as_signed_real(val: &Option<Real>) -> Result<Option<Real>> {
Ok(*val)
}
#[rpn_fn(capture = [metadata], metadata_type = tipb::InUnionMetadata)]
#[inline]
fn cast_real_as_unsigned_real(
metadata: &tipb::InUnionMetadata,
val: &Option<Real>,
) -> Result<Option<Real>> {
match val {
None => Ok(None),
Some(val) => {
if metadata.get_in_union() && val.into_inner() < 0f64 {
Ok(Some(Real::zero()))
} else {
// FIXME: negative number to unsigned real's logic may be wrong here.
Ok(Some(*val))
}
}
}
}
#[rpn_fn(capture = [ctx, extra])]
#[inline]
fn cast_string_as_signed_real(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<Bytes>,
) -> Result<Option<Real>> {
match val {
None => Ok(None),
Some(val) => {
let r: f64 = val.convert(ctx)?;
let r = produce_float_with_specified_tp(ctx, extra.ret_field_type, r)?;
Ok(Real::new(r).ok())
}
}
}
#[rpn_fn(capture = [ctx, extra])]
#[inline]
fn cast_binary_string_as_signed_real(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<Bytes>,
) -> Result<Option<Real>> {
match val {
None => Ok(None),
Some(val) => {
let r = binary_literal::to_uint(ctx, val)? as i64 as f64;
let r = produce_float_with_specified_tp(ctx, extra.ret_field_type, r)?;
Ok(Real::new(r).ok())
}
}
}
#[rpn_fn(capture = [ctx, extra, metadata], metadata_type = tipb::InUnionMetadata)]
#[inline]
fn cast_string_as_unsigned_real(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
metadata: &tipb::InUnionMetadata,
val: &Option<Bytes>,
) -> Result<Option<Real>> {
match val {
None => Ok(None),
Some(val) => {
let mut r: f64 = val.convert(ctx)?;
if metadata.get_in_union() && r < 0f64 {
r = 0f64;
}
let r = produce_float_with_specified_tp(ctx, extra.ret_field_type, r)?;
Ok(Real::new(r).ok())
}
}
}
#[rpn_fn(capture = [ctx, extra])]
#[inline]
fn cast_binary_string_as_unsigned_real(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<Bytes>,
) -> Result<Option<Real>> {
match val {
None => Ok(None),
Some(val) => {
let r = binary_literal::to_uint(ctx, val)? as f64;
let r = produce_float_with_specified_tp(ctx, extra.ret_field_type, r)?;
Ok(Real::new(r).ok())
}
}
}
#[rpn_fn(capture = [ctx, metadata], metadata_type = tipb::InUnionMetadata)]
#[inline]
fn cast_decimal_as_unsigned_real(
ctx: &mut EvalContext,
metadata: &tipb::InUnionMetadata,
val: &Option<Decimal>,
) -> Result<Option<Real>> {
match val {
None => Ok(None),
Some(val) => {
if metadata.get_in_union() && val.is_negative() {
Ok(Some(Real::zero()))
} else {
// FIXME: negative number to unsigned real's logic may be wrong here.
Ok(Some(val.convert(ctx)?))
}
}
}
}
// cast any as string, some cast functions reuse `cast_any_as_any`
//
// cast_int_as_string -> cast_any_as_string_fn_meta::<Int>
// cast_real_as_string -> cast_any_as_string_fn_meta::<Real>
// cast_decimal_as_string -> cast_any_as_string_fn_meta::<Decimal>
// cast_datetime_as_string -> cast_any_as_string_fn_meta::<DateTime>
// cast_duration_as_string -> cast_any_as_string_fn_meta::<Duration>
// cast_json_as_string -> by cast_any_as_any<Json, String>
#[rpn_fn(capture = [ctx, extra])]
#[inline]
fn cast_any_as_string<T: ConvertTo<Bytes> + Evaluable>(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<T>,
) -> Result<Option<Bytes>> {
match val {
None => Ok(None),
Some(val) => {
let val: Bytes = val.convert(ctx)?;
cast_as_string_helper(ctx, extra, val)
}
}
}
#[rpn_fn(capture = [ctx, extra])]
#[inline]
fn cast_uint_as_string(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<Int>,
) -> Result<Option<Bytes>> {
match val {
None => Ok(None),
Some(val) => {
let val = (*val as u64).to_string().into_bytes();
cast_as_string_helper(ctx, extra, val)
}
}
}
#[rpn_fn(capture = [ctx, extra])]
#[inline]
fn cast_float_real_as_string(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<Real>,
) -> Result<Option<Bytes>> {
match val {
None => Ok(None),
Some(val) => {
let val = val.into_inner() as f32;
let val = val.to_string().into_bytes();
cast_as_string_helper(ctx, extra, val)
}
}
}
// FIXME: We cannot use specialization in current Rust version, so impl ConvertTo<Bytes> for Bytes cannot
// pass compile because of we have impl Convert<Bytes> for T where T: ToString + Evaluable
// Refactor this part after https://github.com/rust-lang/rust/issues/31844 closed
#[rpn_fn(capture = [ctx, extra])]
#[inline]
fn cast_string_as_string(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<Bytes>,
) -> Result<Option<Bytes>> {
match val {
None => Ok(None),
Some(val) => {
let val = val.clone();
cast_as_string_helper(ctx, extra, val)
}
}
}
#[inline]
fn cast_as_string_helper(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: Vec<u8>,
) -> Result<Option<Bytes>> {
let res = produce_str_with_specified_tp(
ctx,
Cow::Borrowed(val.as_slice()),
extra.ret_field_type,
false,
)?;
let mut res = match res {
Cow::Borrowed(_) => val,
Cow::Owned(x) => x.to_vec(),
};
pad_zero_for_binary_type(&mut res, extra.ret_field_type);
Ok(Some(res))
}
// cast any as decimal, some cast functions reuse `cast_any_as_decimal`
//
// - cast_signed_int_as_signed_decimal -> cast_any_as_decimal<Int>
// - cast_string_as_signed_decimal -> cast_any_as_decimal<Bytes>
// - cast_time_as_decimal -> cast_any_as_decimal<Time>
// - cast_duration_as_decimal -> cast_any_as_decimal<Duration>
// - cast_json_as_decimal -> cast_any_as_decimal<Json>
#[rpn_fn(capture = [ctx, extra])]
#[inline]
fn cast_unsigned_int_as_signed_or_unsigned_decimal(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<i64>,
) -> Result<Option<Decimal>> {
match val {
None => Ok(None),
Some(val) => {
// because uint's upper bound is smaller than signed decimal's upper bound
// so we can merge cast uint as signed/unsigned decimal in this function
let dec = Decimal::from(*val as u64);
Ok(Some(produce_dec_with_specified_tp(
ctx,
dec,
extra.ret_field_type,
)?))
}
}
}
#[rpn_fn(capture = [ctx, extra, metadata], metadata_type = tipb::InUnionMetadata)]
#[inline]
fn cast_signed_int_as_unsigned_decimal(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
metadata: &tipb::InUnionMetadata,
val: &Option<i64>,
) -> Result<Option<Decimal>> {
match val {
None => Ok(None),
Some(val) => {
let dec = if metadata.get_in_union() && *val < 0 {
Decimal::zero()
} else {
Decimal::from(*val as u64)
};
Ok(Some(produce_dec_with_specified_tp(
ctx,
dec,
extra.ret_field_type,
)?))
}
}
}
#[rpn_fn(capture = [ctx, extra, metadata], metadata_type = tipb::InUnionMetadata)]
#[inline]
fn cast_real_as_decimal(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
metadata: &tipb::InUnionMetadata,
val: &Option<Real>,
) -> Result<Option<Decimal>> {
match val {
None => Ok(None),
Some(val) => {
let val = val.into_inner();
let res = if metadata.get_in_union() && val < 0f64 {
Decimal::zero()
} else {
Decimal::from_f64(val)?
};
Ok(Some(produce_dec_with_specified_tp(
ctx,
res,
extra.ret_field_type,
)?))
}
}
}
#[rpn_fn(capture = [ctx, extra, metadata], metadata_type = tipb::InUnionMetadata)]
#[inline]
fn cast_string_as_unsigned_decimal(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
metadata: &tipb::InUnionMetadata,
val: &Option<Bytes>,
) -> Result<Option<Decimal>> {
match val {
None => Ok(None),
Some(val) => {
// FIXME: in TiDB, if the param IsBinaryLiteral, then return the result of `evalDecimal` directly
let d: Decimal = val.convert(ctx)?;
let d = if metadata.get_in_union() && d.is_negative() {
Decimal::zero()
} else {
d
};
Ok(Some(produce_dec_with_specified_tp(
ctx,
d,
extra.ret_field_type,
)?))
}
}
}
#[rpn_fn(capture = [ctx, extra])]
#[inline]
fn cast_decimal_as_signed_decimal(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<Decimal>,
) -> Result<Option<Decimal>> {
match val {
None => Ok(None),
Some(val) => Ok(Some(produce_dec_with_specified_tp(
ctx,
*val,
extra.ret_field_type,
)?)),
}
}
#[rpn_fn(capture = [ctx, extra, metadata], metadata_type = tipb::InUnionMetadata)]
#[inline]
fn cast_decimal_as_unsigned_decimal(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
metadata: &tipb::InUnionMetadata,
val: &Option<Decimal>,
) -> Result<Option<Decimal>> {
match val {
None => Ok(None),
Some(val) => {
let res = if metadata.get_in_union() && val.is_negative() {
Decimal::zero()
} else {
*val
};
Ok(Some(produce_dec_with_specified_tp(
ctx,
res,
extra.ret_field_type,
)?))
}
}
}
#[rpn_fn(capture = [ctx, extra])]
#[inline]
fn cast_any_as_decimal<From: Evaluable + ConvertTo<Decimal>>(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<From>,
) -> Result<Option<Decimal>> {
match val {
None => Ok(None),
Some(val) => {
let dec: Decimal = val.convert(ctx)?;
Ok(Some(produce_dec_with_specified_tp(
ctx,
dec,
extra.ret_field_type,
)?))
}
}
}
// cast any as duration, no cast functions reuse `cast_any_as_any`
#[rpn_fn(capture = [ctx, extra])]
#[inline]
fn cast_int_as_duration(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<Int>,
) -> Result<Option<Duration>> {
match val {
None => Ok(None),
Some(val) => {
let fsp = extra.ret_field_type.get_decimal() as i8;
Duration::from_i64(ctx, *val, fsp).map(Some).or_else(|err| {
if err.is_overflow() {
ctx.handle_overflow_err(err)?;
Ok(None)
} else {
Err(err.into())
}
})
}
}
}
#[rpn_fn(capture = [ctx, extra])]
#[inline]
fn cast_time_as_duration(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<DateTime>,
) -> Result<Option<Duration>> {
match val {
None => Ok(None),
Some(val) => {
let dur: Duration = val.convert(ctx)?;
Ok(Some(dur.round_frac(extra.ret_field_type.decimal() as i8)?))
}
}
}
#[rpn_fn(capture = [extra])]
#[inline]
fn cast_duration_as_duration(
extra: &RpnFnCallExtra,
val: &Option<Duration>,
) -> Result<Option<Duration>> {
match val {
None => Ok(None),
Some(val) => Ok(Some(val.round_frac(extra.ret_field_type.decimal() as i8)?)),
}
}
macro_rules! cast_as_duration {
($ty:ty, $as_uint_fn:ident, $extra:expr) => {
#[rpn_fn(capture = [ctx, extra])]
#[inline]
fn $as_uint_fn(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<$ty>,
) -> Result<Option<Duration>> {
match val {
None => Ok(None),
Some(val) => {
let result =
Duration::parse(ctx, $extra, extra.ret_field_type.get_decimal() as i8);
match result {
Ok(dur) => Ok(Some(dur)),
Err(e) => match e.code() {
ERR_DATA_OUT_OF_RANGE => {
ctx.handle_overflow_err(e)?;
Ok(None)
}
ERR_TRUNCATE_WRONG_VALUE => {
ctx.handle_truncate_err(e)?;
Ok(None)
}
_ => Err(e.into()),
},
}
}
}
}
};
}
cast_as_duration!(
Real,
cast_real_as_duration,
val.into_inner().to_string().as_bytes()
);
cast_as_duration!(Bytes, cast_bytes_as_duration, val);
cast_as_duration!(
Decimal,
cast_decimal_as_duration,
val.to_string().as_bytes()
);
cast_as_duration!(
Json,
cast_json_as_duration,
val.as_ref().unquote()?.as_bytes()
);
#[rpn_fn(capture = [ctx, extra])]
fn cast_int_as_time(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<Int>,
) -> Result<Option<Time>> {
if let Some(val) = *val {
// Parse `val` as a `u64`
Time::parse_from_i64(
ctx,
val,
extra.ret_field_type.as_accessor().tp().try_into()?,
extra.ret_field_type.get_decimal() as i8,
)
.map(Some)
.or_else(|_| {
Ok(ctx
.handle_invalid_time_error(Error::incorrect_datetime_value(val))
.map(|_| None)?)
})
} else {
Ok(None)
}
}
// NOTE: in MySQL, casting `Real` to `Time` should cast `Real` to `Int` first,
// However, TiDB cast `Real` to `String` and then parse it into a `Time`
#[rpn_fn(capture = [ctx, extra])]
fn cast_real_as_time(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<Real>,
) -> Result<Option<Time>> {
if let Some(val) = val {
// Convert `val` to a string first and then parse it as a float string.
Time::parse(
ctx,
&val.to_string(),
extra.ret_field_type.as_accessor().tp().try_into()?,
extra.ret_field_type.get_decimal() as i8,
// Enable round
true,
)
.map(Some)
.or_else(|e| Ok(ctx.handle_invalid_time_error(e).map(|_| None)?))
} else {
Ok(None)
}
}
#[rpn_fn(capture = [ctx, extra])]
fn cast_string_as_time(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<Bytes>,
) -> Result<Option<Time>> {
if let Some(val) = val {
// Convert `val` to a string first and then parse it as a float string.
Time::parse(
ctx,
unsafe { std::str::from_utf8_unchecked(val) },
extra.ret_field_type.as_accessor().tp().try_into()?,
extra.ret_field_type.get_decimal() as i8,
// Enable round
true,
)
.map(Some)
.or_else(|e| Ok(ctx.handle_invalid_time_error(e).map(|_| None)?))
} else {
Ok(None)
}
}
#[rpn_fn(capture = [ctx, extra])]
fn cast_decimal_as_time(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<Decimal>,
) -> Result<Option<Time>> {
if let Some(val) = val {
// Convert `val` to a string first and then parse it as a string.
Time::parse(
ctx,
val.to_string().as_str(),
extra.ret_field_type.as_accessor().tp().try_into()?,
extra.ret_field_type.get_decimal() as i8,
// Enable round
true,
)
.map(Some)
.or_else(|e| Ok(ctx.handle_invalid_time_error(e).map(|_| None)?))
} else {
Ok(None)
}
}
#[rpn_fn(capture = [ctx, extra])]
fn cast_time_as_time(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<Time>,
) -> Result<Option<Time>> {
if let Some(mut val) = val {
val.set_time_type(extra.ret_field_type.as_accessor().tp().try_into()?)?;
val.round_frac(ctx, extra.ret_field_type.get_decimal() as i8)
.map(Some)
.or_else(|e| Ok(ctx.handle_invalid_time_error(e).map(|_| None)?))
} else {
Ok(None)
}
}
#[rpn_fn(capture = [ctx, extra])]
fn cast_duration_as_time(
ctx: &mut EvalContext,
extra: &RpnFnCallExtra,
val: &Option<Duration>,
) -> Result<Option<Time>> {
if let Some(val) = *val {
Time::from_duration(
ctx,
val,
extra.ret_field_type.as_accessor().tp().try_into()?,
)
.and_then(|now| now.round_frac(ctx, extra.ret_field_type.get_decimal() as i8))
.map(Some)
.or_else(|e| Ok(ctx.handle_invalid_time_error(e).map(|_| None)?))
} else {
Ok(None)
}
}
// cast any as json, some cast functions reuse `cast_any_as_any`
//
// - cast_int_as_json -> cast_any_as_any<Int, Json>
// - cast_real_as_json -> cast_any_as_any<Real, Json>
// - cast_decimal_as_json -> cast_any_as_any<Decimal, Json>
// - cast_time_as_json -> cast_any_as_any<Time, Json>
// - cast_duration_as_json -> cast_any_as_any<Duration, Json>
#[rpn_fn]
#[inline]
fn cast_bool_as_json(val: &Option<Int>) -> Result<Option<Json>> {
match val {
None => Ok(None),
Some(val) => Ok(Some(Json::from_bool(*val != 0)?)),
}
}
#[rpn_fn]
#[inline]
fn cast_uint_as_json(val: &Option<Int>) -> Result<Option<Json>> {
match val {
None => Ok(None),
Some(val) => Ok(Some(Json::from_u64(*val as u64)?)),
}
}
#[rpn_fn(capture = [extra])]
#[inline]
fn cast_string_as_json(extra: &RpnFnCallExtra<'_>, val: &Option<Bytes>) -> Result<Option<Json>> {
match val {
None => Ok(None),
Some(val) => {
if extra
.ret_field_type
.as_accessor()
.flag()
.contains(FieldTypeFlag::PARSE_TO_JSON)
{
// if failed, is it because of bug?
let s: String = box_try!(String::from_utf8(val.to_owned()));
let val: Json = s.parse()?;
Ok(Some(val))
} else {
// FIXME: port `JSONBinary` from TiDB to adapt if the bytes is not a valid utf8 string
let val = unsafe { String::from_utf8_unchecked(val.to_owned()) };
Ok(Some(Json::from_string(val)?))
}
}
}
}
#[rpn_fn]
#[inline]
fn cast_json_as_json(val: &Option<Json>) -> Result<Option<Json>> {
match val {
None => Ok(None),
Some(val) => Ok(Some(val.clone())),
}
}
#[rpn_fn(capture = [ctx])]
#[inline]
fn cast_any_as_any<From: ConvertTo<To> + Evaluable, To: Evaluable>(
ctx: &mut EvalContext,
val: &Option<From>,
) -> Result<Option<To>> {
match val {
None => Ok(None),
Some(val) => {
let val = val.convert(ctx)?;
Ok(Some(val))
}
}
}
#[cfg(test)]
mod tests {
use super::Result;
use crate::impl_cast::*;
use crate::types::test_util::RpnFnScalarEvaluator;
use crate::RpnFnCallExtra;
use std::collections::BTreeMap;
use std::fmt::{Debug, Display};
use std::sync::Arc;
use std::{f32, f64, i64, u64};
use tidb_query_datatype::builder::FieldTypeBuilder;
use tidb_query_datatype::codec::convert::produce_dec_with_specified_tp;
use tidb_query_datatype::codec::data_type::{Bytes, Int, Real};
use tidb_query_datatype::codec::error::{
ERR_DATA_OUT_OF_RANGE, ERR_DATA_TOO_LONG, ERR_TRUNCATE_WRONG_VALUE, ERR_UNKNOWN,
WARN_DATA_TRUNCATED,
};
use tidb_query_datatype::codec::mysql::charset::*;
use tidb_query_datatype::codec::mysql::decimal::{max_decimal, max_or_min_dec};
use tidb_query_datatype::codec::mysql::{
Decimal, Duration, Json, RoundMode, Time, TimeType, MAX_FSP, MIN_FSP,
};
use tidb_query_datatype::codec::Error;
use tidb_query_datatype::expr::Flag;
use tidb_query_datatype::expr::{EvalConfig, EvalContext};
use tidb_query_datatype::{Collation, FieldTypeFlag, FieldTypeTp, UNSPECIFIED_LENGTH};
use tipb::ScalarFuncSig;
fn test_none_with_ctx_and_extra<F, Input, Ret>(func: F)
where
F: Fn(&mut EvalContext, &RpnFnCallExtra, &Option<Input>) -> Result<Option<Ret>>,
{
let mut ctx = EvalContext::default();
let ret_field_type: FieldType = FieldType::default();
let extra = RpnFnCallExtra {
ret_field_type: &ret_field_type,
};
let r = func(&mut ctx, &extra, &None).unwrap();
assert!(r.is_none());
}
fn test_none_with_ctx<F, Input, Ret>(func: F)
where
F: Fn(&mut EvalContext, &Option<Input>) -> Result<Option<Ret>>,
{
let mut ctx = EvalContext::default();
let r = func(&mut ctx, &None).unwrap();
assert!(r.is_none());
}
fn test_none_with_extra<F, Input, Ret>(func: F)
where
F: Fn(&RpnFnCallExtra, &Option<Input>) -> Result<Option<Ret>>,
{
let ret_field_type: FieldType = FieldType::default();
let extra = RpnFnCallExtra {
ret_field_type: &ret_field_type,
};
let r = func(&extra, &None).unwrap();
assert!(r.is_none());
}
fn test_none_with_metadata<F, Input, Ret>(func: F)
where
F: Fn(&tipb::InUnionMetadata, &Option<Input>) -> Result<Option<Ret>>,
{
let metadata = make_metadata(true);
let r = func(&metadata, &None).unwrap();
assert!(r.is_none());
}
fn test_none_with_ctx_and_metadata<F, Input, Ret>(func: F)
where
F: Fn(&mut EvalContext, &tipb::InUnionMetadata, &Option<Input>) -> Result<Option<Ret>>,
{
let mut ctx = EvalContext::default();
let metadata = make_metadata(true);
let r = func(&mut ctx, &metadata, &None).unwrap();
assert!(r.is_none());
}
fn test_none_with_ctx_and_extra_and_metadata<F, Input, Ret>(func: F)
where
F: Fn(
&mut EvalContext,
&RpnFnCallExtra,
&tipb::InUnionMetadata,
&Option<Input>,
) -> Result<Option<Ret>>,
{
let mut ctx = EvalContext::default();
let ret_field_type: FieldType = FieldType::default();
let extra = RpnFnCallExtra {
ret_field_type: &ret_field_type,
};
let metadata = make_metadata(true);
let r = func(&mut ctx, &extra, &metadata, &None).unwrap();
assert!(r.is_none());
}
fn test_none_with_nothing<F, Input, Ret>(func: F)
where
F: Fn(&Option<Input>) -> Result<Option<Ret>>,
{
let r = func(&None).unwrap();
assert!(r.is_none());
}
struct CtxConfig {
overflow_as_warning: bool,
truncate_as_warning: bool,
should_clip_to_zero: bool,
in_insert_stmt: bool,
in_update_or_delete_stmt: bool,
}
impl Default for CtxConfig {
fn default() -> Self {
CtxConfig {
overflow_as_warning: false,
truncate_as_warning: false,
should_clip_to_zero: false,
in_insert_stmt: false,
in_update_or_delete_stmt: false,
}
}
}
impl From<CtxConfig> for EvalContext {
fn from(config: CtxConfig) -> Self {
let mut flag: Flag = Flag::empty();
if config.overflow_as_warning {
flag |= Flag::OVERFLOW_AS_WARNING;
}
if config.truncate_as_warning {
flag |= Flag::TRUNCATE_AS_WARNING;
}
if config.should_clip_to_zero {
flag |= Flag::IN_INSERT_STMT;
}
if config.in_insert_stmt {
flag |= Flag::IN_INSERT_STMT;
}
if config.in_update_or_delete_stmt {
flag |= Flag::IN_UPDATE_OR_DELETE_STMT;
}
let cfg = Arc::new(EvalConfig::from_flag(flag));
EvalContext::new(cfg)
}
}
fn make_metadata(in_union: bool) -> tipb::InUnionMetadata {
let mut metadata = tipb::InUnionMetadata::default();
metadata.set_in_union(in_union);
metadata
}
struct FieldTypeConfig {
unsigned: bool,
flen: isize,
decimal: isize,
charset: Option<&'static str>,
tp: Option<FieldTypeTp>,
collation: Option<Collation>,
}
impl Default for FieldTypeConfig {
fn default() -> Self {
FieldTypeConfig {
unsigned: false,
flen: UNSPECIFIED_LENGTH,
decimal: UNSPECIFIED_LENGTH,
charset: None,
tp: None,
collation: None,
}
}
}
impl From<FieldTypeConfig> for FieldType {
fn from(config: FieldTypeConfig) -> Self {
let mut ft = FieldType::default();
if let Some(c) = config.charset {
ft.set_charset(String::from(c));
}
let fta = ft.as_mut_accessor();
if config.unsigned {
fta.set_flag(FieldTypeFlag::UNSIGNED);
}
fta.set_flen(config.flen);
fta.set_decimal(config.decimal);
if let Some(tp) = config.tp {
fta.set_tp(tp);
}
if let Some(c) = config.collation {
fta.set_collation(c);
}
ft
}
}
fn make_extra(ret_field_type: &FieldType) -> RpnFnCallExtra {
RpnFnCallExtra { ret_field_type }
}
fn make_log<P: Display, R: Display + Debug>(
input: &P,
expect: &R,
result: &Result<Option<R>>,
) -> String {
format!(
"input: {}, expect: {:?}, output: {:?}",
input, expect, result
)
}
fn check_overflow(ctx: &EvalContext, overflow: bool, log: &str) {
if overflow {
assert_eq!(
ctx.warnings.warning_cnt, 1,
"{}, {:?}",
log, ctx.warnings.warnings
);
assert_eq!(
ctx.warnings.warnings[0].get_code(),
ERR_DATA_OUT_OF_RANGE,
"{}",
log
);
} else {
assert_eq!(ctx.warnings.warning_cnt, 0, "{}", log);
}
}
fn check_warning(ctx: &EvalContext, err_code: Option<i32>, log: &str) {
if let Some(x) = err_code {
assert_eq!(
ctx.warnings.warning_cnt, 1,
"log: {}, warnings: {:?}",
log, ctx.warnings.warnings
);
assert_eq!(ctx.warnings.warnings[0].get_code(), x, "{}", log);
}
}
fn check_result<R: Debug + PartialEq>(expect: Option<&R>, res: &Result<Option<R>>, log: &str) {
assert!(res.is_ok(), "{}", log);
let res = res.as_ref().unwrap();
if res.is_none() {
assert!(expect.is_none(), "{}", log);
} else {
let res = res.as_ref().unwrap();
assert_eq!(res, expect.unwrap(), "{}", log);
}
}
// comment for all test below:
// if there should not be any overflow/truncate,
// then should not set ctx with overflow_as_warning/truncated_as_warning flag,
// and then if there is unexpected overflow/truncate,
// then we will find them in `unwrap`
#[test]
fn test_int_as_int_others() {
test_none_with_nothing(cast_int_as_int_others);
let cs = vec![
(i64::MAX, i64::MAX),
(i64::MIN, i64::MIN),
(u64::MAX as i64, u64::MAX as i64),
];
for (input, expect) in cs {
let r = cast_int_as_int_others(&Some(input));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_signed_int_as_unsigned_int() {
test_none_with_metadata(cast_signed_int_as_unsigned_int);
let cs = vec![
// (origin, result, in_union)
// in union
(-10, 0u64, true),
(10, 10u64, true),
(i64::MIN, 0u64, true),
(i64::MAX, i64::MAX as u64, true),
// not in union
(-10, (-10i64) as u64, false),
(10, 10u64, false),
(i64::MIN, i64::MIN as u64, false),
(i64::MAX, i64::MAX as u64, false),
];
for (input, expect, in_union) in cs {
let metadata = make_metadata(in_union);
let r = cast_signed_int_as_unsigned_int(&metadata, &Some(input));
let r = r.map(|x| x.map(|x| x as u64));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_real_as_int() {
test_none_with_ctx(cast_any_as_any::<Real, Int>);
let cs = vec![
// (origin, result, overflow)
(-10.4, -10i64, false),
(-10.5, -11, false),
(10.4, 10, false),
(10.5, 11, false),
(i64::MAX as f64, i64::MAX, false),
((1u64 << 63) as f64, i64::MAX, false),
(i64::MIN as f64, i64::MIN, false),
((1u64 << 63) as f64 + (1u64 << 62) as f64, i64::MAX, true),
((i64::MIN as f64) * 2f64, i64::MIN, true),
];
for (input, result, overflow) in cs {
let mut ctx = CtxConfig {
overflow_as_warning: true,
..CtxConfig::default()
}
.into();
let r = cast_any_as_any::<Real, Int>(&mut ctx, &Real::new(input).ok());
let log = make_log(&input, &result, &r);
check_result(Some(&result), &r, log.as_str());
check_overflow(&ctx, overflow, log.as_str());
}
}
#[test]
fn test_real_as_uint() {
test_none_with_ctx_and_metadata(cast_real_as_uint);
// in_union
let cs = vec![
// (input, expect)
(-10.0, 0u64),
(i64::MIN as f64, 0),
(10.0, 10u64),
(i64::MAX as f64, (1u64 << 63)),
];
for (input, expect) in cs {
let mut ctx = EvalContext::default();
let metadata = make_metadata(true);
let r = cast_real_as_uint(&mut ctx, &metadata, &Some(Real::new(input).unwrap()));
let r = r.map(|x| x.map(|x| x as u64));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
// no clip to zero
let cs = vec![
// (origin, expect, overflow)
(10.5, 11u64, false),
(10.4, 10u64, false),
(
((1u64 << 63) + (1u64 << 62)) as f64,
((1u64 << 63) + (1u64 << 62)),
false,
),
(u64::MAX as f64, u64::MAX, false),
((u64::MAX as f64) * 2f64, u64::MAX, true),
(-1f64, -1f64 as i64 as u64, true),
];
for (input, expect, overflow) in cs {
let mut ctx = CtxConfig {
overflow_as_warning: true,
..CtxConfig::default()
}
.into();
let metadata = make_metadata(false);
let r = cast_real_as_uint(&mut ctx, &metadata, &Real::new(input).ok());
let r = r.map(|x| x.map(|x| x as u64));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
check_overflow(&ctx, overflow, log.as_str())
}
// should clip to zero
let cs: Vec<(f64, u64, bool)> = vec![
// (origin, expect, overflow)
(-1f64, 0, true),
(i64::MIN as f64, 0, true),
];
for (input, expect, overflow) in cs {
let mut ctx = CtxConfig {
overflow_as_warning: true,
should_clip_to_zero: true,
..CtxConfig::default()
}
.into();
let metadata = make_metadata(false);
let r = cast_real_as_uint(&mut ctx, &metadata, &Some(Real::new(input).unwrap()));
let r = r.map(|x| x.map(|x| x as u64));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
check_overflow(&ctx, overflow, log.as_str())
}
}
#[test]
fn test_cast_string_as_int() {
// None
{
let output: Option<Int> = RpnFnScalarEvaluator::new()
.push_param(ScalarValue::Bytes(None))
.evaluate(ScalarFuncSig::CastStringAsInt)
.unwrap();
assert_eq!(output, None);
}
#[derive(Debug)]
enum Cond {
None,
Unsigned,
InUnionAndUnsigned,
}
impl Cond {
fn in_union(&self) -> bool {
if let Cond::InUnionAndUnsigned = self {
true
} else {
false
}
}
fn is_unsigned(&self) -> bool {
match self {
Cond::InUnionAndUnsigned | Cond::Unsigned => true,
_ => false,
}
}
}
let cs: Vec<(&str, i64, Vec<i32>, Cond)> = vec![
// (origin, expect, err_code, condition)
// has no prefix `-`
(
" 9223372036854775807 ",
9223372036854775807i64,
vec![],
Cond::None,
),
(
"9223372036854775807",
9223372036854775807i64,
vec![],
Cond::None,
),
(
"9223372036854775808",
9223372036854775808u64 as i64,
vec![ERR_UNKNOWN],
Cond::None,
),
(
"9223372036854775808",
9223372036854775808u64 as i64,
vec![],
Cond::Unsigned,
),
(
" 9223372036854775807abc ",
9223372036854775807i64,
vec![ERR_TRUNCATE_WRONG_VALUE],
Cond::None,
),
(
"9223372036854775807abc",
9223372036854775807i64,
vec![ERR_TRUNCATE_WRONG_VALUE],
Cond::None,
),
(
"9223372036854775808abc",
9223372036854775808u64 as i64,
vec![ERR_TRUNCATE_WRONG_VALUE, ERR_UNKNOWN],
Cond::None,
),
(
"9223372036854775808abc",
9223372036854775808u64 as i64,
vec![ERR_TRUNCATE_WRONG_VALUE],
Cond::Unsigned,
),
// TODO: there are some cases that has not be covered.
// FIXME: in mysql, this case will return 18446744073709551615
// and `show warnings` will show
// `| Warning | 1292 | Truncated incorrect INTEGER value: '18446744073709551616'`
// fix this cast_string_as_int after fix TiDB's
// ("18446744073709551616", 18446744073709551615 as i64, Some(ERR_TRUNCATE_WRONG_VALUE) , Cond::Unsigned)
// FIXME: our cast_string_as_int's err handle is not exactly same as TiDB's
// ("18446744073709551616", 18446744073709551615u64 as i64, Some(ERR_TRUNCATE_WRONG_VALUE), Cond::InSelectStmt),
// has prefix `-` and in_union and unsigned
("-10", 0, vec![], Cond::InUnionAndUnsigned),
("-9223372036854775808", 0, vec![], Cond::InUnionAndUnsigned),
// has prefix `-` and not in_union or not unsigned
("-10", -10i64, vec![], Cond::None),
(
"-9223372036854775808",
-9223372036854775808i64,
vec![],
Cond::None,
),
// FIXME: our cast_string_as_int's err handle is not exactly same as TiDB's
(
"-9223372036854775809",
-9223372036854775808i64,
vec![ERR_TRUNCATE_WRONG_VALUE],
Cond::None,
),
("-10", -10i64, vec![ERR_UNKNOWN], Cond::Unsigned),
(
"-9223372036854775808",
-9223372036854775808i64,
vec![ERR_UNKNOWN],
Cond::Unsigned,
),
(
"-9223372036854775809",
-9223372036854775808i64,
vec![ERR_TRUNCATE_WRONG_VALUE],
Cond::Unsigned,
),
];
for (input, expected, mut err_code, cond) in cs {
let (result, ctx) = RpnFnScalarEvaluator::new()
.context(CtxConfig {
overflow_as_warning: true,
truncate_as_warning: true,
..CtxConfig::default()
})
.metadata(Box::new(make_metadata(cond.in_union())))
.push_param(ScalarValue::Bytes(Some(input.as_bytes().to_owned())))
.evaluate_raw(
FieldTypeConfig {
tp: Some(FieldTypeTp::LongLong),
unsigned: cond.is_unsigned(),
..FieldTypeConfig::default()
},
ScalarFuncSig::CastStringAsInt,
);
let output: Option<Int> = result.unwrap().into();
assert_eq!(
output.unwrap(),
expected,
"input:{:?}, expected:{:?}, cond:{:?}",
input,
expected,
cond,
);
let mut got_warnings = ctx
.warnings
.warnings
.iter()
.map(|w| w.get_code())
.collect::<Vec<i32>>();
got_warnings.sort();
err_code.sort();
assert_eq!(
ctx.warnings.warning_cnt,
err_code.len(),
"input:{:?}, expected:{:?}, warnings:{:?}",
input,
expected,
got_warnings,
);
assert_eq!(got_warnings, err_code);
}
// binary literal
let cases = vec![
(vec![0x01, 0x02, 0x03], Some(0x010203 as i64)),
(vec![0x01, 0x02, 0x03, 0x4], Some(0x01020304 as i64)),
(
vec![0x01, 0x02, 0x03, 0x4, 0x05, 0x06, 0x06, 0x06, 0x06],
None,
),
];
for (input, expected) in cases {
let output: Result<Option<Int>> = RpnFnScalarEvaluator::new()
.return_field_type(FieldTypeConfig {
tp: Some(FieldTypeTp::LongLong),
..FieldTypeConfig::default()
})
.push_param_with_field_type(
input.clone(),
FieldTypeConfig {
tp: Some(FieldTypeTp::VarString),
collation: Some(Collation::Binary),
..FieldTypeConfig::default()
},
)
.evaluate(ScalarFuncSig::CastStringAsInt);
if let Some(exp) = expected {
assert!(output.is_ok(), "input: {:?}", input);
assert_eq!(output.unwrap().unwrap(), exp, "input={:?}", input);
} else {
assert!(output.is_err());
}
}
}
#[test]
fn test_decimal_as_int() {
test_none_with_ctx(cast_any_as_any::<Decimal, Int>);
let cs: Vec<(Decimal, i64, Option<i32>)> = vec![
// (origin, expect, overflow)
(
Decimal::from_bytes(b"9223372036854775807")
.unwrap()
.unwrap(),
9223372036854775807,
None,
),
(
Decimal::from_bytes(b"-9223372036854775808")
.unwrap()
.unwrap(),
-9223372036854775808,
None,
),
(
Decimal::from_bytes(b"9223372036854775808")
.unwrap()
.unwrap(),
9223372036854775807,
Some(ERR_TRUNCATE_WRONG_VALUE),
),
(
Decimal::from_bytes(b"-9223372036854775809")
.unwrap()
.unwrap(),
-9223372036854775808,
Some(ERR_TRUNCATE_WRONG_VALUE),
),
];
for (input, expect, err_code) in cs {
let mut ctx = CtxConfig {
overflow_as_warning: true,
..CtxConfig::default()
}
.into();
let r = cast_any_as_any::<Decimal, Int>(&mut ctx, &Some(input));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
check_warning(&ctx, err_code, log.as_str());
}
}
#[test]
fn test_decimal_as_uint() {
test_none_with_ctx_and_metadata(cast_decimal_as_uint);
// in_union
let cs: Vec<(Decimal, u64)> = vec![
(
Decimal::from_bytes(b"-9223372036854775808")
.unwrap()
.unwrap(),
0,
),
(
Decimal::from_bytes(b"-9223372036854775809")
.unwrap()
.unwrap(),
0,
),
(
Decimal::from_bytes(b"9223372036854775808")
.unwrap()
.unwrap(),
9223372036854775808,
),
(
Decimal::from_bytes(b"18446744073709551615")
.unwrap()
.unwrap(),
18446744073709551615,
),
];
for (input, expect) in cs {
let mut ctx = EvalContext::default();
let metadata = make_metadata(true);
let r = cast_decimal_as_uint(&mut ctx, &metadata, &Some(input));
let r = r.map(|x| x.map(|x| x as u64));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
let cs: Vec<(Decimal, u64, Option<i32>)> = vec![
// (input, expect, err_code)
(Decimal::from_bytes(b"10").unwrap().unwrap(), 10, None),
(
Decimal::from_bytes(b"1844674407370955161")
.unwrap()
.unwrap(),
1844674407370955161,
None,
),
(
Decimal::from_bytes(b"-10").unwrap().unwrap(),
0,
Some(ERR_TRUNCATE_WRONG_VALUE),
),
(
Decimal::from_bytes(b"18446744073709551616")
.unwrap()
.unwrap(),
u64::MAX,
Some(ERR_TRUNCATE_WRONG_VALUE),
),
];
for (input, expect, err_code) in cs {
let mut ctx = CtxConfig {
overflow_as_warning: true,
..CtxConfig::default()
}
.into();
let metadata = make_metadata(false);
let r = cast_decimal_as_uint(&mut ctx, &metadata, &Some(input));
let r = r.map(|x| x.map(|x| x as u64));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
check_warning(&ctx, err_code, log.as_str());
}
}
#[test]
fn test_time_as_int_and_uint() {
let mut ctx = EvalContext::default();
// TODO: add more test case
// TODO: add test that make cast_any_as_any::<Time, Int> returning truncated error
let cs: Vec<(Time, i64)> = vec![
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14", 0, true).unwrap(),
20000101121314,
),
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14.6666", 0, true).unwrap(),
20000101121315,
),
// FiXME
// Time::parse_utc_datetime("2000-01-01T12:13:14.6666", 4).unwrap().round_frac(DEFAULT_FSP)
// will get 2000-01-01T12:13:14, this is a bug
// (
// Time::parse_utc_datetime("2000-01-01T12:13:14.6666", 4).unwrap(),
// 20000101121315,
// ),
];
for (input, expect) in cs {
let r = cast_any_as_any::<Time, Int>(&mut ctx, &Some(input));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_cast_int_as_time() {
let should_pass = vec![
("0000-00-00 00:00:00", 0),
("2000-01-01 00:00:00", 101),
("2045-00-00 00:00:00", 450_000),
("2059-12-31 00:00:00", 591_231),
("1970-01-01 00:00:00", 700_101),
("1999-12-31 00:00:00", 991_231),
("2000-01-01 00:00:00", 101_000_000),
("2069-12-31 23:59:59", 691_231_235_959),
("1970-01-01 00:00:00", 700_101_000_000),
("1999-12-31 23:59:59", 991_231_235_959),
("0100-00-00 00:00:00", 1_000_000_000_000),
("1000-01-01 00:00:00", 10_000_101_000_000),
("1999-01-01 00:00:00", 19_990_101_000_000),
];
for (expected, input) in should_pass {
let actual: Time = RpnFnScalarEvaluator::new()
.push_param(input)
.return_field_type(FieldTypeBuilder::new().tp(FieldTypeTp::DateTime).build())
.evaluate(ScalarFuncSig::CastIntAsTime)
// `Result<Option<_>>`
.unwrap()
.unwrap();
assert_eq!(actual.to_string(), expected);
}
let should_fail = vec![
-11111,
1,
100,
700_100,
10_000_100,
100_000_000,
100_000_101_000_000,
];
for case in should_fail {
let actual = RpnFnScalarEvaluator::new()
.push_param(case)
.return_field_type(FieldTypeBuilder::new().tp(FieldTypeTp::Date).build())
.evaluate::<Time>(ScalarFuncSig::CastIntAsTime)
.unwrap();
assert!(actual.is_none());
}
}
#[test]
fn test_cast_real_time() {
let cases = vec![
("2019-09-16 10:11:12", 190916101112.111, 0),
("2019-09-16 10:11:12", 20190916101112.111, 0),
("2019-09-16 10:11:12", 20190916101112.123, 0),
("2019-09-16 10:11:13", 20190916101112.999, 0),
];
for (expected, input, fsp) in cases {
let actual: Time = RpnFnScalarEvaluator::new()
.push_param(input)
.return_field_type(
FieldTypeBuilder::new()
.tp(FieldTypeTp::DateTime)
.decimal(fsp)
.build(),
)
.evaluate::<Time>(ScalarFuncSig::CastRealAsTime)
// `Result<Option<_>>`
.unwrap()
.unwrap();
assert_eq!(actual.to_string(), expected);
}
}
#[test]
fn test_cast_string_as_time() {
let cases = vec![
("2019-09-16 10:11:12", "20190916101112", 0),
("2019-09-16 10:11:12", "190916101112", 0),
("2019-09-16 10:11:01", "19091610111", 0),
("2019-09-16 10:11:00", "1909161011", 0),
("2019-09-16 10:01:00", "190916101", 0),
("1909-12-10 00:00:00", "19091210", 0),
("2020-02-29 10:00:00", "20200229100000", 0),
("2019-09-16 01:00:00", "1909161", 0),
("2019-09-16 00:00:00", "190916", 0),
("2019-09-01 00:00:00", "19091", 0),
("2019-09-16 10:11:12.111", "190916101112.111", 3),
("2019-09-16 10:11:12.111", "20190916101112.111", 3),
("2019-09-16 10:11:12.67", "20190916101112.666", 2),
("2019-09-16 10:11:13.0", "20190916101112.999", 1),
("2019-09-16 00:00:00", "2019-09-16", 0),
("2019-09-16 10:11:12", "2019-09-16 10:11:12", 0),
("2019-09-16 10:11:12", "2019-09-16T10:11:12", 0),
("2019-09-16 10:11:12.7", "2019-09-16T10:11:12.66", 1),
("2019-09-16 10:11:13.0", "2019-09-16T10:11:12.99", 1),
("2020-01-01 00:00:00.0", "2019-12-31 23:59:59.99", 1),
];
for (expected, input, fsp) in cases {
let actual: Time = RpnFnScalarEvaluator::new()
.push_param(input.as_bytes().to_vec())
.return_field_type(
FieldTypeBuilder::new()
.tp(FieldTypeTp::DateTime)
.decimal(fsp)
.build(),
)
.evaluate::<Time>(ScalarFuncSig::CastStringAsTime)
// `Result<Option<_>>`
.unwrap()
.unwrap();
assert_eq!(actual.to_string(), expected);
}
}
#[test]
fn test_time_as_time() {
let cases = vec![
// (Timestamp, DateTime)
("2020-02-29 10:00:00.999", "2020-02-29 10:00:01.0", 1),
("2019-09-16 01:00:00.999", "2019-09-16 01:00:01.00", 2),
("2019-09-16 00:00:00.9999", "2019-09-16 00:00:01.0", 1),
];
for (input, expected, fsp) in cases {
let mut ctx = EvalContext::default();
let time =
Time::parse_timestamp(&mut ctx, input, MAX_FSP, /* Enable round*/ true).unwrap();
let actual: Time = RpnFnScalarEvaluator::new()
.push_param(time)
.return_field_type(
FieldTypeBuilder::new()
.tp(FieldTypeTp::DateTime)
.decimal(fsp)
.build(),
)
.evaluate::<Time>(ScalarFuncSig::CastTimeAsTime)
// `Result<Option<_>>`
.unwrap()
.unwrap();
assert_eq!(actual.to_string(), expected);
}
}
#[test]
fn test_cast_duration_as_time() {
use chrono::Datelike;
let cases = vec!["11:30:45.123456", "-35:30:46"];
for case in cases {
let mut ctx = EvalContext::default();
let duration = Duration::parse(&mut ctx, case.as_bytes(), MAX_FSP).unwrap();
let now = RpnFnScalarEvaluator::new()
.push_param(duration)
.return_field_type(
FieldTypeBuilder::new()
.tp(FieldTypeTp::DateTime)
.decimal(MAX_FSP as isize)
.build(),
)
.evaluate::<Time>(ScalarFuncSig::CastDurationAsTime)
.unwrap()
.unwrap();
let chrono_today = chrono::Utc::now();
let today = now.checked_sub(&mut ctx, duration).unwrap();
assert_eq!(today.year(), chrono_today.year() as u32);
assert_eq!(today.month(), chrono_today.month());
assert_eq!(today.day(), chrono_today.day());
assert_eq!(today.hour(), 0);
assert_eq!(today.minute(), 0);
assert_eq!(today.second(), 0);
assert_eq!(today.micro(), 0);
}
}
#[test]
fn test_cast_decimal_as_time() {
let cases = vec![
("2019-09-16 10:11:12", "20190916101112", 0),
("2019-09-16 10:11:12", "190916101112", 0),
("2019-09-16 10:11:01", "19091610111", 0),
("2019-09-16 10:11:00", "1909161011", 0),
("2019-09-16 10:01:00", "190916101", 0),
("1909-12-10 00:00:00", "19091210", 0),
("2020-02-29 10:00:00", "20200229100000", 0),
("2019-09-16 01:00:00", "1909161", 0),
("2019-09-16 00:00:00", "190916", 0),
("2019-09-01 00:00:00", "19091", 0),
("2019-09-16 10:11:12.111", "190916101112.111", 3),
("2019-09-16 10:11:12.111", "20190916101112.111", 3),
("2019-09-16 10:11:12.67", "20190916101112.666", 2),
("2019-09-16 10:11:13.0", "20190916101112.999", 1),
];
for (expected, decimal, fsp) in cases {
let decimal: Decimal = decimal.parse().unwrap();
let actual: Time = RpnFnScalarEvaluator::new()
.push_param(decimal)
.return_field_type(
FieldTypeBuilder::new()
.tp(FieldTypeTp::DateTime)
.decimal(fsp)
.build(),
)
.evaluate(ScalarFuncSig::CastDecimalAsTime)
// `Result<Option<_>>`
.unwrap()
.unwrap();
assert_eq!(actual.to_string(), expected);
}
}
#[test]
fn test_duration_as_int() {
let mut ctx = EvalContext::default();
// TODO: add more test case
let cs: Vec<(Duration, i64)> = vec![
(
Duration::parse(&mut ctx, b"17:51:04.78", 2).unwrap(),
175105,
),
(
Duration::parse(&mut ctx, b"-17:51:04.78", 2).unwrap(),
-175105,
),
(
Duration::parse(&mut ctx, b"17:51:04.78", 0).unwrap(),
175105,
),
(
Duration::parse(&mut ctx, b"-17:51:04.78", 0).unwrap(),
-175105,
),
];
for (input, expect) in cs {
let mut ctx = CtxConfig {
overflow_as_warning: true,
..CtxConfig::default()
}
.into();
let r = cast_any_as_any::<Duration, Int>(&mut ctx, &Some(input));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_json_as_int() {
test_none_with_ctx(cast_any_as_any::<Json, Int>);
// no overflow
let cs = vec![
// (origin, expect, overflow)
(Json::from_object(BTreeMap::default()).unwrap(), 0, false),
(Json::from_array(vec![]).unwrap(), 0, false),
(Json::from_i64(10).unwrap(), 10i64, false),
(Json::from_i64(i64::MAX).unwrap(), i64::MAX, false),
(Json::from_i64(i64::MIN).unwrap(), i64::MIN, false),
(Json::from_u64(0).unwrap(), 0, false),
(Json::from_u64(u64::MAX).unwrap(), u64::MAX as i64, false),
(
Json::from_f64(i64::MIN as u64 as f64).unwrap(),
i64::MAX,
false,
),
(
Json::from_f64(i64::MAX as u64 as f64).unwrap(),
i64::MAX,
false,
),
(
Json::from_f64(i64::MIN as u64 as f64).unwrap(),
i64::MAX,
false,
),
(Json::from_f64(i64::MIN as f64).unwrap(), i64::MIN, false),
(Json::from_f64(10.5).unwrap(), 11, false),
(Json::from_f64(10.4).unwrap(), 10, false),
(Json::from_f64(-10.4).unwrap(), -10, false),
(Json::from_f64(-10.5).unwrap(), -11, false),
(Json::from_string(String::from("10.0")).unwrap(), 10, false),
(Json::from_bool(true).unwrap(), 1, false),
(Json::from_bool(false).unwrap(), 0, false),
(Json::none().unwrap(), 0, false),
(
Json::from_f64(((1u64 << 63) + (1u64 << 62)) as u64 as f64).unwrap(),
i64::MAX,
true,
),
(
Json::from_f64(-((1u64 << 63) as f64 + (1u64 << 62) as f64)).unwrap(),
i64::MIN,
true,
),
];
for (input, expect, overflow) in cs {
let mut ctx = CtxConfig {
overflow_as_warning: true,
..CtxConfig::default()
}
.into();
let r = cast_any_as_any::<Json, Int>(&mut ctx, &Some(input.clone()));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
check_overflow(&ctx, overflow, log.as_str());
}
}
#[test]
fn test_json_as_uint() {
test_none_with_ctx(cast_json_as_uint);
// no clip to zero
let cs: Vec<(Json, u64, Option<i32>)> = vec![
// (origin, expect, error_code)
(Json::from_f64(-1.0).unwrap(), -1.0f64 as i64 as u64, None),
(Json::from_string(String::from("10")).unwrap(), 10, None),
(
Json::from_string(String::from("+10abc")).unwrap(),
10,
Some(ERR_TRUNCATE_WRONG_VALUE),
),
(
Json::from_string(String::from("9999999999999999999999999")).unwrap(),
u64::MAX,
Some(ERR_DATA_OUT_OF_RANGE),
),
(
Json::from_f64(2f64 * (u64::MAX as f64)).unwrap(),
u64::MAX,
Some(ERR_DATA_OUT_OF_RANGE),
),
];
for (input, expect, error_code) in cs {
let mut ctx = CtxConfig {
overflow_as_warning: true,
truncate_as_warning: true,
..CtxConfig::default()
}
.into();
let r = cast_json_as_uint(&mut ctx, &Some(input.clone()));
let r = r.map(|x| x.map(|x| x as u64));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
check_warning(&ctx, error_code, log.as_str());
}
// should clip to zero
let cs: Vec<(Json, u64, Option<i32>)> = vec![
// (origin, expect, err_code)
(Json::from_f64(-1.0).unwrap(), 0, None),
(
Json::from_string(String::from("-10")).unwrap(),
0,
Some(ERR_DATA_OUT_OF_RANGE),
),
(Json::from_string(String::from("10")).unwrap(), 10, None),
(
Json::from_string(String::from("+10abc")).unwrap(),
10,
Some(ERR_TRUNCATE_WRONG_VALUE),
),
(
Json::from_string(String::from("9999999999999999999999999")).unwrap(),
u64::MAX,
Some(ERR_DATA_OUT_OF_RANGE),
),
(
Json::from_f64(2f64 * (u64::MAX as f64)).unwrap(),
u64::MAX,
Some(ERR_DATA_OUT_OF_RANGE),
),
];
for (input, expect, err_code) in cs {
let mut ctx = CtxConfig {
overflow_as_warning: true,
truncate_as_warning: true,
should_clip_to_zero: true,
..CtxConfig::default()
}
.into();
let r = cast_json_as_uint(&mut ctx, &Some(input.clone()));
let r = r.map(|x| x.map(|x| x as u64));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
check_warning(&ctx, err_code, log.as_str());
}
}
#[test]
fn test_signed_int_as_signed_real() {
test_none_with_nothing(cast_signed_int_as_signed_real);
let cs: Vec<(i64, f64)> = vec![
// (input, expect)
(i64::MIN, i64::MIN as f64),
(0, 0f64),
(i64::MAX, i64::MAX as f64),
];
for (input, expect) in cs {
let r = cast_signed_int_as_signed_real(&Some(input));
let r = r.map(|x| x.map(|x| x.into_inner()));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_signed_int_as_unsigned_real() {
test_none_with_metadata(cast_signed_int_as_unsigned_real);
let cs: Vec<(i64, f64, bool)> = vec![
// (input, expect, in_union)
// TODO: add test case of negative int to unsigned real without in_union
// (i64::MIN, i64::MIN as u64 as f64, false),
// not in union
(i64::MAX, i64::MAX as f64, false),
(0, 0f64, false),
// in union
(i64::MIN, 0f64, true),
(-1, 0f64, true),
(i64::MAX, i64::MAX as f64, true),
(0, 0f64, true),
];
for (input, expect, in_union) in cs {
let metadata = make_metadata(in_union);
let r = cast_signed_int_as_unsigned_real(&metadata, &Some(input));
let r = r.map(|x| x.map(|x| x.into_inner()));
let log = format!(
"input: {}, expect: {}, in_union: {}",
input, expect, in_union
);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_unsigned_int_as_signed_or_unsigned_real() {
test_none_with_nothing(cast_unsigned_int_as_signed_or_unsigned_real);
let cs = vec![
// (input, expect)
(0, 0f64),
(u64::MAX, u64::MAX as f64),
(i64::MAX as u64, i64::MAX as u64 as f64),
];
for (input, expect) in cs {
let r = cast_unsigned_int_as_signed_or_unsigned_real(&Some(input as i64));
let r = r.map(|x| x.map(|x| x.into_inner()));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_real_as_signed_real() {
test_none_with_nothing(cast_real_as_signed_real);
let cs = vec![
// (input, expect)
(f64::from(f32::MIN), f64::from(f32::MIN)),
(f64::from(f32::MAX), f64::from(f32::MAX)),
(f64::MIN, f64::MIN),
(0f64, 0f64),
(f64::MAX, f64::MAX),
(i64::MIN as f64, i64::MIN as f64),
(i64::MAX as f64, i64::MAX as f64),
(u64::MAX as f64, u64::MAX as f64),
];
for (input, expect) in cs {
let r = cast_real_as_signed_real(&Some(Real::new(input).unwrap()));
let r = r.map(|x| x.map(|x| x.into_inner()));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_real_as_unsigned_real() {
let cs = vec![
// (input, expect, in_union)
// not in union
// TODO: add test case of negative real to unsigned real
// (-1.0, -1.0, false),
// (i64::MIN as f64, i64::MIN as f64, false),
// (f64::MIN, f64::MIN, false),
(u64::MIN as f64, u64::MIN as f64, false),
(1.0, 1.0, false),
(i64::MAX as f64, i64::MAX as f64, false),
(u64::MAX as f64, u64::MAX as f64, false),
(f64::MAX, f64::MAX, false),
// in union
(-1.0, 0.0, true),
(i64::MIN as f64, 0.0, true),
(u64::MIN as f64, 0.0, true),
(f64::MIN, 0.0, true),
(1.0, 1.0, true),
(i64::MAX as f64, i64::MAX as f64, true),
(u64::MAX as f64, u64::MAX as f64, true),
(f64::MAX, f64::MAX, true),
];
for (input, expect, in_union) in cs {
let metadata = make_metadata(in_union);
let r = cast_real_as_unsigned_real(&metadata, &Some(Real::new(input).unwrap()));
let r = r.map(|x| x.map(|x| x.into_inner()));
let log = format!(
"input: {}, expect: {}, in_union: {}",
input, expect, in_union
);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_cast_string_as_real() {
// None
{
let output: Option<Real> = RpnFnScalarEvaluator::new()
.push_param(ScalarValue::Bytes(None))
.evaluate(ScalarFuncSig::CastStringAsReal)
.unwrap();
assert_eq!(output, None);
}
// signed
let ul = UNSPECIFIED_LENGTH;
let cs: Vec<(String, f64, isize, isize, bool, bool)> = vec![
// (input, expect, flen, decimal, truncated, overflow)
// no special flen and decimal
(String::from("99999999"), 99999999f64, ul, ul, false, false),
(String::from("1234abc"), 1234f64, ul, ul, true, false),
(String::from("-1234abc"), -1234f64, ul, ul, true, false),
(
(0..400).map(|_| '9').collect::<String>(),
f64::MAX,
ul,
ul,
true,
false,
),
(
(0..401)
.map(|x| if x == 0 { '-' } else { '9' })
.collect::<String>(),
f64::MIN,
ul,
ul,
true,
false,
),
// with special flen and decimal
(String::from("99999999"), 99999999f64, 8, 0, false, false),
(String::from("99999999"), 99999999f64, 9, 0, false, false),
(String::from("99999999"), 9999999f64, 7, 0, false, true),
(String::from("99999999"), 999999.99, 8, 2, false, true),
(String::from("1234abc"), 0.9f64, 1, 1, true, true),
(String::from("-1234abc"), -0.9f64, 1, 1, true, true),
];
for (input, expected, flen, decimal, truncated, overflow) in cs {
let (result, ctx) = RpnFnScalarEvaluator::new()
.context(CtxConfig {
overflow_as_warning: true,
truncate_as_warning: true,
..CtxConfig::default()
})
.push_param(input.clone().into_bytes())
.evaluate_raw(
FieldTypeConfig {
unsigned: false,
flen,
decimal,
tp: Some(FieldTypeTp::Double),
..FieldTypeConfig::default()
},
ScalarFuncSig::CastStringAsReal,
);
let output: Option<Real> = result.unwrap().into();
assert!(
(output.unwrap().into_inner() - expected).abs() < std::f64::EPSILON,
"input={:?}",
input
);
let (warning_cnt, warnings) = match (truncated, overflow) {
(true, true) => (2, vec![ERR_TRUNCATE_WRONG_VALUE, ERR_DATA_OUT_OF_RANGE]),
(true, false) => (1, vec![ERR_TRUNCATE_WRONG_VALUE]),
(false, true) => (1, vec![ERR_DATA_OUT_OF_RANGE]),
_ => (0, vec![]),
};
assert_eq!(ctx.warnings.warning_cnt, warning_cnt);
let mut got_warnings = ctx
.warnings
.warnings
.iter()
.map(|w| w.get_code())
.collect::<Vec<i32>>();
got_warnings.sort();
assert_eq!(got_warnings, warnings);
}
// unsigned
let cs: Vec<(String, f64, isize, isize, bool, bool, bool)> = vec![
// (input, expect, flen, decimal, truncated, overflow, in_union)
// not in union
(
String::from("99999999"),
99999999f64,
ul,
ul,
false,
false,
false,
),
(String::from("1234abc"), 1234f64, ul, ul, true, false, false),
(
(0..400).map(|_| '9').collect::<String>(),
f64::MAX,
ul,
ul,
true,
false,
false,
),
(
String::from("99999999"),
99999999f64,
8,
0,
false,
false,
false,
),
(
String::from("99999999"),
9999999.9,
8,
1,
false,
true,
false,
),
(
String::from("99999999"),
999999.99,
8,
2,
false,
true,
false,
),
(String::from("99999999"), 999999.9, 7, 1, false, true, false),
(String::from("1234abc"), 1234.0, 4, 0, true, false, false),
(String::from("1234abc"), 999.9, 4, 1, true, true, false),
(String::from("1234abc"), 99.99, 4, 2, true, true, false),
(String::from("1234abc"), 99.9, 3, 1, true, true, false),
(String::from("1234abc"), 9.999, 4, 3, true, true, false),
(
String::from("99999999"),
99999999f64,
8,
0,
false,
false,
false,
),
(
String::from("99999999"),
9999999.9,
8,
1,
false,
true,
false,
),
(
String::from("99999999"),
999999.99,
8,
2,
false,
true,
false,
),
(String::from("99999999"), 999999.9, 7, 1, false, true, false),
(String::from("1234abc"), 1234.0, 4, 0, true, false, false),
(String::from("1234abc"), 999.9, 4, 1, true, true, false),
(String::from("1234abc"), 99.99, 4, 2, true, true, false),
(String::from("1234abc"), 99.9, 3, 1, true, true, false),
(String::from("1234abc"), 9.999, 4, 3, true, true, false),
(
(0..400).map(|_| '9').collect::<String>(),
f64::MAX,
ul,
ul,
true,
false,
false,
),
(
(0..400).map(|_| '9').collect::<String>(),
9999999999.0,
10,
0,
true,
true,
false,
),
(
(0..400).map(|_| '9').collect::<String>(),
999999999.9,
10,
1,
true,
true,
false,
),
// TODO
// (
// (0..401)
// .map(|x| if x == 0 { '-' } else { '9' })
// .collect::<String>(),
// 0f64, ul, ul, true, true, false,
// ),
// (
// String::from("-1234abc"), 0f64, ul, ul,
// true, true, false,
// ),
// (String::from("-1234abc"), 0.0, 4, 0, true, true, false),
// (String::from("-1234abc"), 0.0, 4, 1, true, true, false),
// (String::from("-1234abc"), 0.0, 4, 2, true, true, false),
// (String::from("-1234abc"), 0.0, 3, 1, true, true, false),
// (String::from("-1234abc"), 0.0, 4, 3, true, true, false),
// in union
// in union and neg
(String::from("-190"), 0f64, ul, ul, false, false, true),
(String::from("-10abc"), 0f64, ul, ul, true, false, true),
(String::from("-1234abc"), 0.0, ul, ul, true, false, true),
];
for (input, expected, flen, decimal, truncated, overflow, in_union) in cs {
let (result, ctx) = RpnFnScalarEvaluator::new()
.context(CtxConfig {
overflow_as_warning: true,
truncate_as_warning: true,
..CtxConfig::default()
})
.metadata(Box::new(make_metadata(in_union)))
.push_param(input.clone().into_bytes())
.evaluate_raw(
FieldTypeConfig {
unsigned: true,
flen,
decimal,
tp: Some(FieldTypeTp::Double),
..FieldTypeConfig::default()
},
ScalarFuncSig::CastStringAsReal,
);
let output: Option<Real> = result.unwrap().into();
assert!(
(output.unwrap().into_inner() - expected).abs() < std::f64::EPSILON,
"input:{:?}, expected:{:?}, flen:{:?}, decimal:{:?}, truncated:{:?}, overflow:{:?}, in_union:{:?}",
input, expected, flen, decimal, truncated, overflow, in_union
);
let (warning_cnt, warnings) = match (truncated, overflow) {
(true, true) => (2, vec![ERR_TRUNCATE_WRONG_VALUE, ERR_DATA_OUT_OF_RANGE]),
(true, false) => (1, vec![ERR_TRUNCATE_WRONG_VALUE]),
(false, true) => (1, vec![ERR_DATA_OUT_OF_RANGE]),
_ => (0, vec![]),
};
let mut got_warnings = ctx
.warnings
.warnings
.iter()
.map(|w| w.get_code())
.collect::<Vec<i32>>();
got_warnings.sort();
assert_eq!(
ctx.warnings.warning_cnt, warning_cnt,
"input:{:?}, expected:{:?}, flen:{:?}, decimal:{:?}, truncated:{:?}, overflow:{:?}, in_union:{:?}, warnings:{:?}",
input, expected, flen, decimal, truncated, overflow, in_union,got_warnings,
);
assert_eq!(got_warnings, warnings);
}
// not in union, neg
let cs: Vec<(String, f64, isize, isize, Vec<i32>)> = vec![
(
(0..401)
.map(|x| if x == 0 { '-' } else { '9' })
.collect::<String>(),
0f64,
ul,
ul,
vec![ERR_TRUNCATE_WRONG_VALUE, ERR_DATA_OUT_OF_RANGE],
),
(
String::from("-1234abc"),
0f64,
ul,
ul,
vec![ERR_TRUNCATE_WRONG_VALUE, ERR_DATA_OUT_OF_RANGE],
),
(
String::from("-1234abc"),
0.0,
4,
0,
vec![ERR_TRUNCATE_WRONG_VALUE, ERR_DATA_OUT_OF_RANGE],
),
// the case below has 3 warning
// 1. from getValidFloatPrefix, because of `-1234abc`'s `abc`, (ERR_TRUNCATE_WRONG_VALUE)
// 2. from ProduceFloatWithSpecifiedTp, because of TruncateFloat (ERR_DATA_OUT_OF_RANGE)
// 3. from ProduceFloatWithSpecifiedTp, because of unsigned but negative (ERR_DATA_OUT_OF_RANGE)
(
String::from("-1234abc"),
0.0,
4,
1,
vec![
ERR_TRUNCATE_WRONG_VALUE,
ERR_DATA_OUT_OF_RANGE,
ERR_DATA_OUT_OF_RANGE,
],
),
(
String::from("-1234abc"),
0.0,
4,
2,
vec![
ERR_TRUNCATE_WRONG_VALUE,
ERR_DATA_OUT_OF_RANGE,
ERR_DATA_OUT_OF_RANGE,
],
),
(
String::from("-1234abc"),
0.0,
3,
1,
vec![
ERR_TRUNCATE_WRONG_VALUE,
ERR_DATA_OUT_OF_RANGE,
ERR_DATA_OUT_OF_RANGE,
],
),
(
String::from("-1234abc"),
0.0,
4,
3,
vec![
ERR_TRUNCATE_WRONG_VALUE,
ERR_DATA_OUT_OF_RANGE,
ERR_DATA_OUT_OF_RANGE,
],
),
];
for (input, expected, flen, decimal, err_codes) in cs {
let (result, ctx) = RpnFnScalarEvaluator::new()
.context(CtxConfig {
overflow_as_warning: true,
truncate_as_warning: true,
..CtxConfig::default()
})
.metadata(Box::new(make_metadata(false)))
.push_param(input.clone().into_bytes())
.evaluate_raw(
FieldTypeConfig {
unsigned: true,
flen,
decimal,
tp: Some(FieldTypeTp::Double),
..FieldTypeConfig::default()
},
ScalarFuncSig::CastStringAsReal,
);
let output: Option<Real> = result.unwrap().into();
assert!(
(output.unwrap().into_inner() - expected).abs() < std::f64::EPSILON,
"input={:?}",
input
);
assert_eq!(ctx.warnings.warning_cnt, err_codes.len());
for (idx, err) in err_codes.iter().enumerate() {
assert_eq!(
ctx.warnings.warnings[idx].get_code(),
*err,
"input: {:?}",
input
);
}
}
// binary literal
let cases = vec![
(vec![0x01, 0x02, 0x03], Some(f64::from(0x010203))),
(vec![0x01, 0x02, 0x03, 0x4], Some(f64::from(0x01020304))),
(
vec![0x01, 0x02, 0x03, 0x4, 0x05, 0x06, 0x06, 0x06, 0x06],
None,
),
];
for (input, expected) in cases {
let output: Result<Option<Real>> = RpnFnScalarEvaluator::new()
.metadata(Box::new(make_metadata(false)))
.return_field_type(FieldTypeConfig {
flen: tidb_query_datatype::UNSPECIFIED_LENGTH,
decimal: tidb_query_datatype::UNSPECIFIED_LENGTH,
tp: Some(FieldTypeTp::Double),
..FieldTypeConfig::default()
})
.push_param_with_field_type(
input.clone(),
FieldTypeConfig {
tp: Some(FieldTypeTp::VarString),
collation: Some(Collation::Binary),
..FieldTypeConfig::default()
},
)
.evaluate(ScalarFuncSig::CastStringAsReal);
if let Some(exp) = expected {
assert!(output.is_ok(), "input: {:?}", input);
assert!(
(output.unwrap().unwrap().into_inner() - exp).abs() < std::f64::EPSILON,
"input={:?}",
input
);
} else {
assert!(output.is_err());
}
}
}
#[test]
fn test_decimal_as_signed_real() {
test_none_with_ctx(cast_any_as_any::<Decimal, Int>);
// because decimal can always be represent by signed real,
// so we needn't to check whether get truncated err.
let cs = vec![
// (input, expect)
(Decimal::from_f64(-10.0).unwrap(), -10.0),
(Decimal::from_f64(i64::MIN as f64).unwrap(), i64::MIN as f64),
(Decimal::from_f64(i64::MAX as f64).unwrap(), i64::MAX as f64),
(Decimal::from_f64(u64::MAX as f64).unwrap(), u64::MAX as f64),
];
for (input, expect) in cs {
let mut ctx = EvalContext::default();
let r = cast_any_as_any::<Decimal, Real>(&mut ctx, &Some(input));
let r = r.map(|x| x.map(|x| x.into_inner()));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_decimal_as_unsigned_real() {
test_none_with_ctx_and_metadata(cast_decimal_as_unsigned_real);
let cs: Vec<(Decimal, f64, bool, bool)> = vec![
// (origin, expect, in_union, overflow)
// not in union
(Decimal::from(0), 0.0, false, false),
(
Decimal::from(9223372036854775807u64),
9223372036854775807.0,
false,
false,
),
(
Decimal::from_bytes(b"9223372036854775809")
.unwrap()
.unwrap(),
9223372036854775809.0,
false,
false,
),
// TODO: add test case for negative decimal to unsigned real
// in union
(Decimal::from(-1023), 0f64, true, false),
(Decimal::from(-10), 0f64, true, false),
(Decimal::from(i64::MIN), 0f64, true, false),
(Decimal::from(1023), 1023.0, true, false),
(Decimal::from(10), 10.0, true, false),
(Decimal::from(i64::MAX), i64::MAX as f64, true, false),
(Decimal::from(u64::MAX), u64::MAX as f64, true, false),
(
Decimal::from(1844674407370955161u64),
1844674407370955161u64 as f64,
true,
false,
),
(
Decimal::from_bytes(b"18446744073709551616")
.unwrap()
.unwrap(),
// 18446744073709551616 - u64::MAX==1,
// but u64::MAX as f64 == 18446744073709551616
u64::MAX as f64,
true,
false,
),
];
for (input, expect, in_union, overflow) in cs {
let mut ctx = CtxConfig {
overflow_as_warning: true,
..CtxConfig::default()
}
.into();
let metadata = make_metadata(in_union);
let r = cast_decimal_as_unsigned_real(&mut ctx, &metadata, &Some(input));
let r = r.map(|x| x.map(|x| x.into_inner()));
let log = format!(
"input: {}, expect: {}, in_union: {}, expect_overflow: {}, result: {:?}",
input, expect, in_union, overflow, r
);
check_result(Some(&expect), &r, log.as_str());
check_overflow(&ctx, overflow, log.as_str());
}
}
#[test]
fn test_time_as_real() {
let mut ctx = EvalContext::default();
test_none_with_ctx(cast_any_as_any::<Time, Real>);
// TODO: add more test case
let cs = vec![
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14.6666", 6, true).unwrap(),
20000101121314.666600,
),
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14.6666", 0, true).unwrap(),
20000101121315.0,
),
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14.6666", 3, true).unwrap(),
20000101121314.667,
),
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14.6666", 4, true).unwrap(),
20000101121314.6666,
),
];
for (input, expect) in cs {
let mut ctx = EvalContext::default();
let r = cast_any_as_any::<Time, Real>(&mut ctx, &Some(input));
let r = r.map(|x| x.map(|x| x.into_inner()));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_duration_as_real() {
let mut ctx = EvalContext::default();
// TODO: add more test case
let cs = vec![
// (input, expect)
(
Duration::parse(&mut ctx, b"17:51:04.78", 2).unwrap(),
175104.78,
),
(
Duration::parse(&mut ctx, b"-17:51:04.78", 2).unwrap(),
-175104.78,
),
(
Duration::parse(&mut ctx, b"17:51:04.78", 0).unwrap(),
175105.0,
),
(
Duration::parse(&mut ctx, b"-17:51:04.78", 0).unwrap(),
-175105.0,
),
];
for (input, expect) in cs {
let mut ctx = EvalContext::default();
let r = cast_any_as_any::<Duration, Real>(&mut ctx, &Some(input));
let r = r.map(|x| x.map(|x| x.into_inner()));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_json_as_real() {
let cs: Vec<(Json, f64, Option<i32>)> = vec![
// (input, expect, err_code)
(Json::from_object(BTreeMap::default()).unwrap(), 0f64, None),
(Json::from_array(vec![]).unwrap(), 0f64, None),
(Json::from_i64(10).unwrap(), 10f64, None),
(Json::from_i64(i64::MAX).unwrap(), i64::MAX as f64, None),
(Json::from_i64(i64::MIN).unwrap(), i64::MIN as f64, None),
(Json::from_u64(0).unwrap(), 0f64, None),
(Json::from_u64(u64::MAX).unwrap(), u64::MAX as f64, None),
(Json::from_f64(f64::MAX).unwrap(), f64::MAX, None),
(Json::from_f64(f64::MIN).unwrap(), f64::MIN, None),
(Json::from_string(String::from("10.0")).unwrap(), 10.0, None),
(
Json::from_string(String::from("-10.0")).unwrap(),
-10.0,
None,
),
(Json::from_bool(true).unwrap(), 1f64, None),
(Json::from_bool(false).unwrap(), 0f64, None),
(Json::none().unwrap(), 0f64, None),
(
Json::from_string((0..500).map(|_| '9').collect::<String>()).unwrap(),
f64::MAX,
Some(ERR_TRUNCATE_WRONG_VALUE),
),
(
Json::from_string(
(0..500)
.map(|x| if x == 0 { '-' } else { '9' })
.collect::<String>(),
)
.unwrap(),
f64::MIN,
Some(ERR_TRUNCATE_WRONG_VALUE),
),
];
for (input, expect, err_code) in cs {
let mut ctx = CtxConfig {
truncate_as_warning: true,
..CtxConfig::default()
}
.into();
let r = cast_any_as_any::<Json, Real>(&mut ctx, &Some(input.clone()));
let r = r.map(|x| x.map(|x| x.into_inner()));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
check_warning(&ctx, err_code, log.as_str());
}
}
/// base_cs:
/// vector of (T, T to bytes(without any other handle do by cast_as_string_helper),
/// T to string for debug output),
/// the object should not be zero len.
#[allow(clippy::type_complexity)]
fn test_as_string_helper<T: Clone, FnCast>(
base_cs: Vec<(T, Vec<u8>, String)>,
cast_func: FnCast,
func_name: &str,
) where
FnCast: Fn(&mut EvalContext, &RpnFnCallExtra, &Option<T>) -> Result<Option<Bytes>>,
{
#[derive(Clone, Copy)]
enum FlenType {
Eq,
LessOne,
ExtraOne,
Unspecified,
}
let cs: Vec<(FlenType, bool, &str, FieldTypeTp, Collation, Option<i32>)> = vec![
// (flen_type, pad_zero, charset, tp, collation, err_code)
// normal, flen==str.len
(
FlenType::Eq,
false,
CHARSET_BIN,
FieldTypeTp::String,
Collation::Binary,
None,
),
(
FlenType::Eq,
false,
CHARSET_UTF8,
FieldTypeTp::String,
Collation::Binary,
None,
),
(
FlenType::Eq,
false,
CHARSET_UTF8MB4,
FieldTypeTp::String,
Collation::Binary,
None,
),
(
FlenType::Eq,
false,
CHARSET_ASCII,
FieldTypeTp::String,
Collation::Binary,
None,
),
(
FlenType::Eq,
false,
CHARSET_LATIN1,
FieldTypeTp::String,
Collation::Binary,
None,
),
// normal, flen==UNSPECIFIED_LENGTH
(
FlenType::Unspecified,
false,
CHARSET_BIN,
FieldTypeTp::String,
Collation::Binary,
None,
),
(
FlenType::Unspecified,
false,
CHARSET_UTF8,
FieldTypeTp::String,
Collation::Binary,
None,
),
(
FlenType::Unspecified,
false,
CHARSET_UTF8MB4,
FieldTypeTp::String,
Collation::Binary,
None,
),
(
FlenType::Unspecified,
false,
CHARSET_ASCII,
FieldTypeTp::String,
Collation::Binary,
None,
),
(
FlenType::Unspecified,
false,
CHARSET_LATIN1,
FieldTypeTp::String,
Collation::Binary,
None,
),
// branch 1 of ProduceStrWithSpecifiedTp
// not bin_str, so no pad_zero
(
FlenType::LessOne,
false,
CHARSET_UTF8,
FieldTypeTp::String,
Collation::Utf8Mb4BinNoPadding,
Some(ERR_DATA_TOO_LONG),
),
(
FlenType::LessOne,
false,
CHARSET_UTF8MB4,
FieldTypeTp::String,
Collation::Utf8Mb4BinNoPadding,
Some(ERR_DATA_TOO_LONG),
),
(
FlenType::Eq,
false,
CHARSET_UTF8,
FieldTypeTp::String,
Collation::Utf8Mb4BinNoPadding,
None,
),
(
FlenType::Eq,
false,
CHARSET_UTF8MB4,
FieldTypeTp::String,
Collation::Utf8Mb4BinNoPadding,
None,
),
(
FlenType::ExtraOne,
false,
CHARSET_UTF8,
FieldTypeTp::String,
Collation::Utf8Mb4BinNoPadding,
None,
),
(
FlenType::ExtraOne,
false,
CHARSET_UTF8MB4,
FieldTypeTp::String,
Collation::Utf8Mb4BinNoPadding,
None,
),
(
FlenType::ExtraOne,
false,
CHARSET_UTF8,
FieldTypeTp::String,
Collation::Utf8Mb4BinNoPadding,
None,
),
(
FlenType::ExtraOne,
false,
CHARSET_UTF8MB4,
FieldTypeTp::String,
Collation::Utf8Mb4BinNoPadding,
None,
),
// bin_str, so need pad_zero
(
FlenType::ExtraOne,
true,
CHARSET_UTF8,
FieldTypeTp::String,
Collation::Binary,
None,
),
(
FlenType::ExtraOne,
true,
CHARSET_UTF8MB4,
FieldTypeTp::String,
Collation::Binary,
None,
),
// branch 2 of ProduceStrWithSpecifiedTp
// branch 2 need s.len>flen, so never need pad_zero
(
FlenType::LessOne,
false,
CHARSET_ASCII,
FieldTypeTp::String,
Collation::Utf8Mb4BinNoPadding,
Some(ERR_DATA_TOO_LONG),
),
(
FlenType::LessOne,
false,
CHARSET_LATIN1,
FieldTypeTp::String,
Collation::Utf8Mb4BinNoPadding,
Some(ERR_DATA_TOO_LONG),
),
(
FlenType::LessOne,
false,
CHARSET_BIN,
FieldTypeTp::String,
Collation::Utf8Mb4BinNoPadding,
Some(ERR_DATA_TOO_LONG),
),
// branch 3 of ProduceStrWithSpecifiedTp ,
// will never be reached,
// because padZero param is always false
];
for (input, bytes, debug_str) in base_cs {
for (flen_type, pad_zero, charset, tp, collation, err_code) in cs.iter() {
let mut ctx = CtxConfig {
truncate_as_warning: true,
..CtxConfig::default()
}
.into();
let res_len = bytes.len();
let flen = match flen_type {
FlenType::Eq => res_len as isize,
FlenType::LessOne => {
if res_len == 0 {
continue;
} else {
(res_len - 1) as isize
}
}
FlenType::ExtraOne => (res_len + 1) as isize,
FlenType::Unspecified => UNSPECIFIED_LENGTH,
};
let rft = FieldTypeConfig {
flen,
charset: Some(charset),
tp: Some(*tp),
collation: Some(*collation),
..FieldTypeConfig::default()
}
.into();
let extra = make_extra(&rft);
let r = cast_func(&mut ctx, &extra, &Some(input.clone()));
let mut expect = bytes.clone();
if *pad_zero && flen > expect.len() as isize {
expect.extend((expect.len()..flen as usize).map(|_| 0u8));
} else if flen != UNSPECIFIED_LENGTH {
expect.truncate(flen as usize);
}
let log = format!(
"func: {:?}, input: {}, expect: {:?}, flen: {}, \
charset: {}, field_type: {}, collation: {}, output: {:?}",
func_name, debug_str, &expect, flen, charset, tp, collation, &r
);
check_result(Some(&expect), &r, log.as_str());
check_warning(&ctx, *err_code, log.as_str());
}
}
}
#[test]
fn test_int_as_string() {
test_none_with_ctx_and_extra(cast_any_as_string::<Int>);
let cs: Vec<(i64, Vec<u8>, String)> = vec![
(
i64::MAX,
i64::MAX.to_string().into_bytes(),
i64::MAX.to_string(),
),
(
i64::MIN,
i64::MIN.to_string().into_bytes(),
i64::MIN.to_string(),
),
];
test_as_string_helper(cs, cast_any_as_string::<Int>, "cast_any_as_string::<Int>");
}
#[test]
fn test_uint_as_string() {
test_none_with_ctx_and_extra(cast_uint_as_string);
let cs: Vec<(u64, Vec<u8>, String)> = vec![
(
i64::MAX as u64,
(i64::MAX as u64).to_string().into_bytes(),
(i64::MAX as u64).to_string(),
),
(
i64::MIN as u64,
(i64::MIN as u64).to_string().into_bytes(),
(i64::MIN as u64).to_string(),
),
(
u64::MAX,
u64::MAX.to_string().into_bytes(),
u64::MAX.to_string(),
),
(0u64, 0u64.to_string().into_bytes(), 0u64.to_string()),
];
test_as_string_helper(
cs,
|ctx, extra, val| {
let val = val.map(|x| x as i64);
cast_uint_as_string(ctx, extra, &val)
},
"cast_uint_as_string",
);
}
#[test]
fn test_float_real_as_string() {
test_none_with_ctx_and_extra(cast_float_real_as_string);
let cs: Vec<(f32, Vec<u8>, String)> = vec![
(
f32::MAX,
f32::MAX.to_string().into_bytes(),
f32::MAX.to_string(),
),
(1.0f32, 1.0f32.to_string().into_bytes(), 1.0f32.to_string()),
(
1.1113f32,
1.1113f32.to_string().into_bytes(),
1.1113f32.to_string(),
),
(0.1f32, 0.1f32.to_string().into_bytes(), 0.1f32.to_string()),
];
test_as_string_helper(
cs,
|ctx, extra, val| {
cast_float_real_as_string(
ctx,
extra,
&val.map(|x| Real::new(f64::from(x)).unwrap()),
)
},
"cast_float_real_as_string",
);
}
#[test]
fn test_double_real_as_string() {
test_none_with_ctx_and_extra(cast_any_as_string::<Real>);
let cs: Vec<(f64, Vec<u8>, String)> = vec![
(
f64::from(f32::MAX),
(f64::from(f32::MAX)).to_string().into_bytes(),
f64::from(f32::MAX).to_string(),
),
(
f64::from(f32::MIN),
(f64::from(f32::MIN)).to_string().into_bytes(),
f64::from(f32::MIN).to_string(),
),
(
f64::MIN,
f64::MIN.to_string().into_bytes(),
f64::MIN.to_string(),
),
(
f64::MAX,
f64::MAX.to_string().into_bytes(),
f64::MAX.to_string(),
),
(1.0f64, 1.0f64.to_string().into_bytes(), 1.0f64.to_string()),
(
1.1113f64,
1.1113f64.to_string().into_bytes(),
1.1113f64.to_string(),
),
(0.1f64, 0.1f64.to_string().into_bytes(), 0.1f64.to_string()),
];
test_as_string_helper(
cs,
|ctx, extra, val| {
cast_any_as_string::<Real>(ctx, extra, &val.map(|x| Real::new(x).unwrap()))
},
"cast_any_as_string::<Real>",
);
}
#[test]
fn test_string_as_string() {
test_none_with_ctx_and_extra(cast_string_as_string);
let cs: Vec<(Vec<u8>, Vec<u8>, String)> = vec![
(
Vec::from(b"".as_ref()),
Vec::from(b"".as_ref()),
String::from("<empty-str>"),
),
(
(0..1024).map(|_| b'0').collect::<Vec<u8>>(),
(0..1024).map(|_| b'0').collect::<Vec<u8>>(),
String::from("1024 zeros('0')"),
),
];
test_as_string_helper(cs, cast_string_as_string, "cast_string_as_string");
}
#[test]
fn test_decimal_as_string() {
test_none_with_ctx_and_extra(cast_any_as_string::<Decimal>);
let cs: Vec<(Decimal, Vec<u8>, String)> = vec![
(
Decimal::from(i64::MAX),
i64::MAX.to_string().into_bytes(),
i64::MAX.to_string(),
),
(
Decimal::from(i64::MIN),
i64::MIN.to_string().into_bytes(),
i64::MIN.to_string(),
),
(
Decimal::from(u64::MAX),
u64::MAX.to_string().into_bytes(),
u64::MAX.to_string(),
),
(
Decimal::from_f64(0.0).unwrap(),
0.0.to_string().into_bytes(),
0.0.to_string(),
),
(
Decimal::from_f64(i64::MAX as f64).unwrap(),
(i64::MAX as f64).to_string().into_bytes(),
(i64::MAX as f64).to_string(),
),
(
Decimal::from_f64(i64::MIN as f64).unwrap(),
(i64::MIN as f64).to_string().into_bytes(),
(i64::MIN as f64).to_string(),
),
(
Decimal::from_f64(u64::MAX as f64).unwrap(),
(u64::MAX as f64).to_string().into_bytes(),
(u64::MAX as f64).to_string(),
),
(
Decimal::from_bytes(b"999999999999999999999999")
.unwrap()
.unwrap(),
Vec::from(b"999999999999999999999999".as_ref()),
String::from("999999999999999999999999"),
),
];
test_as_string_helper(
cs,
cast_any_as_string::<Decimal>,
"cast_any_as_string::<Decimal>",
);
}
#[test]
fn test_time_as_string() {
test_none_with_ctx_and_extra(cast_any_as_string::<Time>);
let mut ctx = EvalContext::default();
// TODO: add more test case
let cs: Vec<(Time, Vec<u8>, String)> = vec![
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14", 0, true).unwrap(),
"2000-01-01 12:13:14".to_string().into_bytes(),
"2000-01-01 12:13:14".to_string(),
),
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14.6666", 0, true).unwrap(),
"2000-01-01 12:13:15".to_string().into_bytes(),
"2000-01-01 12:13:15".to_string(),
),
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14.6666", 3, true).unwrap(),
"2000-01-01 12:13:14.667".to_string().into_bytes(),
"2000-01-01 12:13:14.667".to_string(),
),
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14.6666", 4, true).unwrap(),
"2000-01-01 12:13:14.6666".to_string().into_bytes(),
"2000-01-01 12:13:14.6666".to_string(),
),
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14.6666", 6, true).unwrap(),
"2000-01-01 12:13:14.666600".to_string().into_bytes(),
"2000-01-01 12:13:14.666600".to_string(),
),
];
test_as_string_helper(cs, cast_any_as_string::<Time>, "cast_any_as_string::<Time>");
}
#[test]
fn test_duration_as_string() {
test_none_with_ctx_and_extra(cast_any_as_string::<Duration>);
let mut ctx = EvalContext::default();
let cs = vec![
(
Duration::parse(&mut ctx, b"17:51:04.78", 2).unwrap(),
"17:51:04.78".to_string().into_bytes(),
"17:51:04.78".to_string(),
),
(
Duration::parse(&mut ctx, b"-17:51:04.78", 2).unwrap(),
"-17:51:04.78".to_string().into_bytes(),
"-17:51:04.78".to_string(),
),
(
Duration::parse(&mut ctx, b"17:51:04.78", 0).unwrap(),
"17:51:05".to_string().into_bytes(),
"17:51:05".to_string(),
),
(
Duration::parse(&mut ctx, b"-17:51:04.78", 0).unwrap(),
"-17:51:05".to_string().into_bytes(),
"-17:51:05".to_string(),
),
];
test_as_string_helper(
cs,
cast_any_as_string::<Duration>,
"cast_any_as_string::<Duration>",
);
}
#[test]
fn test_json_as_string() {
test_none_with_ctx(cast_any_as_any::<Json, Bytes>);
// FIXME: this case is not exactly same as TiDB's,
// such as(left is TiKV, right is TiDB)
// f64::MIN => "1.7976931348623157e308", "1.7976931348623157e+308",
// f64::MAX => "-1.7976931348623157e308", "-1.7976931348623157e+308",
// f32::MIN as f64 => "3.4028234663852886e38", "3.4028234663852886e+38",
// f32::MAX as f64 => "-3.4028234663852886e38", "-3.4028234663852886e+38",
// i64::MIN as f64 => "-9.223372036854776e18", "-9223372036854776000",
// i64::MAX as f64 => "9.223372036854776e18", "9223372036854776000",
// u64::MAX as f64 => "1.8446744073709552e19", "18446744073709552000",
let cs = vec![
(
Json::from_object(BTreeMap::default()).unwrap(),
"{}".to_string(),
),
(Json::from_array(vec![]).unwrap(), "[]".to_string()),
(Json::from_i64(10).unwrap(), "10".to_string()),
(Json::from_i64(i64::MAX).unwrap(), i64::MAX.to_string()),
(Json::from_i64(i64::MIN).unwrap(), i64::MIN.to_string()),
(Json::from_u64(0).unwrap(), "0".to_string()),
(Json::from_u64(u64::MAX).unwrap(), u64::MAX.to_string()),
(Json::from_f64(f64::MIN).unwrap(), format!("{:e}", f64::MIN)),
(Json::from_f64(f64::MAX).unwrap(), format!("{:e}", f64::MAX)),
(
Json::from_f64(f64::from(f32::MIN)).unwrap(),
format!("{:e}", f64::from(f32::MIN)),
),
(
Json::from_f64(f64::from(f32::MAX)).unwrap(),
format!("{:e}", f64::from(f32::MAX)),
),
(
Json::from_f64(i64::MIN as f64).unwrap(),
format!("{:e}", i64::MIN as f64),
),
(
Json::from_f64(i64::MAX as f64).unwrap(),
format!("{:e}", i64::MAX as f64),
),
(
Json::from_f64(u64::MAX as f64).unwrap(),
format!("{:e}", u64::MAX as f64),
),
(Json::from_f64(10.5).unwrap(), "10.5".to_string()),
(Json::from_f64(10.4).unwrap(), "10.4".to_string()),
(Json::from_f64(-10.4).unwrap(), "-10.4".to_string()),
(Json::from_f64(-10.5).unwrap(), "-10.5".to_string()),
(
Json::from_string(String::from("10.0")).unwrap(),
r#""10.0""#.to_string(),
),
(Json::from_bool(true).unwrap(), "true".to_string()),
(Json::from_bool(false).unwrap(), "false".to_string()),
(Json::none().unwrap(), "null".to_string()),
];
for (input, expect) in cs {
let mut ctx = EvalContext::default();
let r = cast_any_as_any::<Json, Bytes>(&mut ctx, &Some(input.clone()));
let r = r.map(|x| x.map(|x| unsafe { String::from_utf8_unchecked(x) }));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
macro_rules! cast_closure_with_metadata {
($cast_fn:expr) => {
|ctx, extra, _, val| $cast_fn(ctx, extra, val)
};
}
/// base_cs
/// - (cast_func_input, in_union, is_res_unsigned, base_result)
/// - the base_result is the result **should** produce by
/// the logic of cast func above `produce_dec_with_specified_tp`
fn test_as_decimal_helper<T: Clone, FnCast, FnToStr>(
base_cs: Vec<(T, bool, bool, Decimal)>,
cast_func: FnCast,
input_as_debug_str_func: FnToStr,
func_name: &str,
) where
FnCast: Fn(
&mut EvalContext,
&RpnFnCallExtra,
&tipb::InUnionMetadata,
&Option<T>,
) -> Result<Option<Decimal>>,
FnToStr: Fn(&T) -> String,
{
#[derive(Clone, Copy, Debug)]
#[allow(clippy::enum_variant_names)]
enum Cond {
TargetIntPartLenLessThanOriginIntPartLen,
TargetDecimalBiggerThanOriginDecimal,
TargetDecimalLessThanOriginDecimal,
}
#[derive(Clone, Copy, Debug)]
enum Sign {
Positive,
Negative,
}
#[derive(Clone, Copy, Debug)]
enum ResType {
Zero,
Same,
TruncateToMax,
TruncateToMin,
Round,
}
let cs = vec![
// (
// origin, origin_flen, origin_decimal, res_flen, res_decimal, is_unsigned,
// expect, warning_err_code,
// (InInsertStmt || InUpdateStmt || InDeleteStmt), overflow_as_warning, truncate_as_warning
// )
//
// The origin_flen, origin_decimal here is
// to let the programmer clearly know what the flen and decimal of the decimal is.
// res_flen and res_decimal isn't UNSPECIFIED_LENGTH
// origin not zero, but res's int part len < origin's int part
(
Cond::TargetIntPartLenLessThanOriginIntPartLen,
Sign::Positive,
false,
ResType::TruncateToMax,
Some(ERR_DATA_OUT_OF_RANGE),
false,
true,
false,
),
(
Cond::TargetIntPartLenLessThanOriginIntPartLen,
Sign::Negative,
false,
ResType::TruncateToMin,
Some(ERR_DATA_OUT_OF_RANGE),
false,
true,
false,
),
// origin_decimal < res_decimal
(
Cond::TargetDecimalBiggerThanOriginDecimal,
Sign::Positive,
false,
ResType::Same,
None,
false,
false,
false,
),
(
Cond::TargetDecimalBiggerThanOriginDecimal,
Sign::Positive,
false,
ResType::Same,
None,
true,
false,
false,
),
(
Cond::TargetDecimalBiggerThanOriginDecimal,
Sign::Negative,
false,
ResType::Same,
None,
false,
false,
false,
),
(
Cond::TargetDecimalBiggerThanOriginDecimal,
Sign::Positive,
false,
ResType::Same,
None,
true,
false,
false,
),
(
Cond::TargetDecimalBiggerThanOriginDecimal,
Sign::Positive,
true,
ResType::Same,
None,
false,
false,
false,
),
(
Cond::TargetDecimalBiggerThanOriginDecimal,
Sign::Positive,
true,
ResType::Same,
None,
true,
false,
false,
),
(
Cond::TargetDecimalBiggerThanOriginDecimal,
Sign::Negative,
true,
ResType::Zero,
None,
false,
false,
false,
),
(
Cond::TargetDecimalBiggerThanOriginDecimal,
Sign::Negative,
true,
ResType::Zero,
None,
true,
false,
false,
),
// origin_decimal > res_decimal
(
Cond::TargetDecimalLessThanOriginDecimal,
Sign::Positive,
false,
ResType::Round,
Some(WARN_DATA_TRUNCATED),
false,
false,
true,
),
(
Cond::TargetDecimalLessThanOriginDecimal,
Sign::Positive,
false,
ResType::Round,
Some(WARN_DATA_TRUNCATED),
true,
false,
false,
),
(
Cond::TargetDecimalLessThanOriginDecimal,
Sign::Negative,
false,
ResType::Round,
Some(WARN_DATA_TRUNCATED),
false,
false,
true,
),
(
Cond::TargetDecimalLessThanOriginDecimal,
Sign::Negative,
false,
ResType::Round,
Some(WARN_DATA_TRUNCATED),
true,
false,
true,
),
(
Cond::TargetDecimalLessThanOriginDecimal,
Sign::Positive,
true,
ResType::Round,
Some(WARN_DATA_TRUNCATED),
false,
false,
true,
),
(
Cond::TargetDecimalLessThanOriginDecimal,
Sign::Positive,
true,
ResType::Round,
Some(WARN_DATA_TRUNCATED),
true,
false,
false,
),
(
Cond::TargetDecimalLessThanOriginDecimal,
Sign::Negative,
true,
ResType::Zero,
Some(WARN_DATA_TRUNCATED),
false,
false,
true,
),
(
Cond::TargetDecimalLessThanOriginDecimal,
Sign::Negative,
true,
ResType::Zero,
Some(WARN_DATA_TRUNCATED),
true,
false,
false,
),
// TODO: add test case for Decimal::round failure
];
for (input, in_union, is_res_unsigned, base_res) in base_cs {
for (
cond,
sign,
is_unsigned,
res_type,
mut warning_err_code,
in_dml,
mut overflow_as_warning,
mut truncate_as_warning,
) in cs.clone()
{
let (origin_flen, origin_decimal) = base_res.prec_and_frac();
// some test case in `cs` is just for unsigned result or signed result,
// some is just for negative/positive base_res
//
// in the test case above, we have negative and positive for every test case,
// so if the sign is different from base_res's sign, we can skip it.
if is_res_unsigned != is_unsigned {
continue;
}
let base_res = match sign {
Sign::Positive => {
if base_res.is_negative() {
continue;
} else {
base_res
}
}
Sign::Negative => {
if base_res.is_negative() {
base_res
} else {
continue;
}
}
};
let (res_flen, res_decimal) = match cond {
Cond::TargetIntPartLenLessThanOriginIntPartLen => {
if origin_flen - origin_decimal == 0 || origin_flen <= 1 {
continue;
}
(origin_flen - 1, origin_decimal)
}
Cond::TargetDecimalBiggerThanOriginDecimal => {
(origin_flen + 1, origin_decimal + 1)
}
Cond::TargetDecimalLessThanOriginDecimal => {
if origin_decimal == 0 || origin_flen <= 1 {
continue;
}
// TODO: if add test case for Decimal::round failure,
// then should check whether this setting is right.
let res = base_res
.clone()
.round((origin_decimal - 1) as i8, RoundMode::HalfEven);
if res.is_zero() {
truncate_as_warning = false;
overflow_as_warning = false;
warning_err_code = None;
}
(origin_flen - 1, origin_decimal - 1)
}
};
let expect = match res_type {
ResType::Zero => Decimal::zero(),
ResType::Same => base_res,
ResType::TruncateToMax => max_decimal(res_flen as u8, res_decimal as u8),
ResType::TruncateToMin => {
max_or_min_dec(true, res_flen as u8, res_decimal as u8)
}
ResType::Round => {
let r = base_res
.clone()
.round(res_decimal as i8, RoundMode::HalfEven)
.unwrap();
if r == base_res {
overflow_as_warning = false;
truncate_as_warning = false;
warning_err_code = None;
}
r
}
};
let ctx_in_dml_flag = vec![Flag::IN_INSERT_STMT, Flag::IN_UPDATE_OR_DELETE_STMT];
for in_dml_flag in ctx_in_dml_flag {
let (res_flen, res_decimal) = (res_flen as isize, res_decimal as isize);
let rft = FieldTypeConfig {
unsigned: is_unsigned,
flen: res_flen,
decimal: res_decimal,
..FieldTypeConfig::default()
}
.into();
let metadata = make_metadata(in_union);
let extra = make_extra(&rft);
let mut ctx = CtxConfig {
overflow_as_warning,
truncate_as_warning,
in_insert_stmt: in_dml_flag == Flag::IN_INSERT_STMT,
in_update_or_delete_stmt: in_dml_flag == Flag::IN_UPDATE_OR_DELETE_STMT,
..CtxConfig::default()
}
.into();
let cast_func_res =
cast_func(&mut ctx, &extra, &metadata, &Some(input.clone()));
let mut ctx = CtxConfig {
overflow_as_warning,
truncate_as_warning,
in_insert_stmt: in_dml_flag == Flag::IN_INSERT_STMT,
in_update_or_delete_stmt: in_dml_flag == Flag::IN_UPDATE_OR_DELETE_STMT,
..CtxConfig::default()
}
.into();
let pd_res = produce_dec_with_specified_tp(&mut ctx, base_res, &rft);
// make log
let cast_func_res_log = cast_func_res
.as_ref()
.map(|x| x.as_ref().map(|x| x.to_string()));
let pd_res_log = pd_res.as_ref().map(|x| x.to_string());
let log = format!(
"test_func_name: {}, \
input: {}, base_res: {}, \
origin_flen: {}, origin_decimal: {}, \
res_flen: {}, res_decimal: {}, \
in_union: {}, is_unsigned: {}, in_dml: {}, in_dml_flag: {:?}, \
cond: {:?}, sign: {:?}, res_type: {:?}, \
overflow_as_warning: {}, truncate_as_warning: {}, expect_warning_err_code: {:?} \
expect: {}, expect_from_produce_dec_with_specified_tp(this is just for debug): {:?}, result: {:?}",
func_name, input_as_debug_str_func(&input), base_res,
origin_flen, origin_decimal,
res_flen, res_decimal,
in_union, is_unsigned, in_dml, in_dml_flag,
cond, sign, res_type,
overflow_as_warning, truncate_as_warning, warning_err_code,
expect.to_string(), pd_res_log, cast_func_res_log
);
check_result(Some(&expect), &cast_func_res, log.as_str());
check_warning(&ctx, warning_err_code, log.as_str());
}
}
}
}
// These test depend on the correctness of
// Decimal::from(u64), Decimal::from(i64), Decimal::from_f64(), Decimal::from_bytes()
// Decimal::zero(), Decimal::round, max_or_min_dec, max_decimal
#[test]
fn test_unsigned_int_as_signed_or_unsigned_decimal() {
test_none_with_ctx_and_extra(cast_unsigned_int_as_signed_or_unsigned_decimal);
let cs = vec![
(10u64 as i64, false, true, Decimal::from(10)),
(u64::MAX as i64, false, true, Decimal::from(u64::MAX)),
(i64::MAX as u64 as i64, false, true, Decimal::from(i64::MAX)),
];
test_as_decimal_helper(
cs,
cast_closure_with_metadata!(cast_unsigned_int_as_signed_or_unsigned_decimal),
|x| x.to_string(),
"cast_unsigned_int_as_signed_or_unsigned_decimal",
);
}
#[test]
fn test_signed_int_as_unsigned_decimal() {
test_none_with_ctx_and_extra_and_metadata(cast_signed_int_as_unsigned_decimal);
let cs = vec![
// (input, in_union, is_res_unsigned, base_result)
// negative, in_union
(-1, true, true, Decimal::zero()),
(-10, true, true, Decimal::zero()),
(i64::MIN, true, true, Decimal::zero()),
// not negative, in_union
(1, true, true, Decimal::from(1)),
(10, true, true, Decimal::from(10)),
(i64::MAX, true, true, Decimal::from(i64::MAX)),
// negative, not in_union
// FIXME: fix these case(negative to unsigned decimal, without in_union)
// after fix the bug of this situation(negative to unsigned decimal, without in_union)
(-1, false, true, Decimal::from(-1i64 as u64)),
(-10, false, true, Decimal::from(-10i64 as u64)),
(
i64::MIN + 1,
false,
true,
Decimal::from((i64::MIN + 1) as u64),
),
// not negative, not in_union
(1, false, true, Decimal::from(1)),
(10, false, true, Decimal::from(10)),
(i64::MAX, false, true, Decimal::from(i64::MAX)),
];
test_as_decimal_helper(
cs,
cast_signed_int_as_unsigned_decimal,
|x| x.to_string(),
"cast_signed_int_as_unsigned_decimal",
);
}
#[test]
fn test_signed_int_as_signed_decimal() {
test_none_with_ctx_and_extra(cast_any_as_decimal::<Int>);
let cs: Vec<(i64, bool, bool, Decimal)> = vec![
// (input, in_union, is_res_unsigned, base_result)
(-1, false, false, Decimal::from(-1)),
(-10, false, false, Decimal::from(-10)),
(i64::MIN, false, false, Decimal::from(i64::MIN)),
(1, false, false, Decimal::from(1)),
(10, false, false, Decimal::from(10)),
(i64::MAX, false, false, Decimal::from(i64::MAX)),
];
test_as_decimal_helper(
cs,
cast_closure_with_metadata!(cast_any_as_decimal::<Int>),
|x| x.to_string(),
"cast_signed_int_as_signed_decimal",
);
}
#[test]
fn test_real_as_decimal() {
test_none_with_ctx_and_extra_and_metadata(cast_real_as_decimal);
// TODO: add test case that make Decimal::from_f64 return err
let cs = vec![
/// (input, in_union, is_res_unsigned, base_result)
// neg and in_union
(-10.0, true, false, Decimal::zero()),
(i64::MIN as f64, true, false, Decimal::zero()),
(-1.0, true, false, Decimal::zero()),
(-0.0001, true, false, Decimal::zero()),
// not neg and in_union
(10.0, true, false, Decimal::from_f64(10.0).unwrap()),
(
i64::MAX as f64,
true,
false,
Decimal::from_f64(i64::MAX as f64).unwrap(),
),
(1.0, true, false, Decimal::from_f64(1.0).unwrap()),
(0.0001, true, false, Decimal::from_f64(0.0001).unwrap()),
// neg and not in_union
(-10.0, false, false, Decimal::from_f64(-10.0).unwrap()),
(
i64::MIN as f64,
false,
false,
Decimal::from_f64(i64::MIN as f64).unwrap(),
),
(-1.0, false, false, Decimal::from_f64(-1.0).unwrap()),
(-0.0001, false, false, Decimal::from_f64(-0.0001).unwrap()),
// not neg and not in_union
(10.0, false, false, Decimal::from_f64(10.0).unwrap()),
(
i64::MAX as f64,
false,
false,
Decimal::from_f64(i64::MAX as f64).unwrap(),
),
(1.0, false, false, Decimal::from_f64(1.0).unwrap()),
(0.0001, false, false, Decimal::from_f64(0.0001).unwrap()),
];
test_as_decimal_helper(
cs,
|ctx, extra, metadata, val| {
let val = val.map(|x| Real::new(x).unwrap());
cast_real_as_decimal(ctx, extra, metadata, &val)
},
|x| x.to_string(),
"cast_real_as_decimal",
);
}
#[test]
fn test_string_as_signed_decimal() {
test_none_with_ctx_and_extra(cast_any_as_decimal::<Bytes>);
// TODO: add test case that make Decimal::from_bytes return err.
let cs = vec![
// (input, in_union, is_res_unsigned, base_result)
// neg and in_union
("-10", true, false, Decimal::from(-10)),
("-1", true, false, Decimal::from(-1)),
(
"-0.001",
true,
false,
Decimal::from_bytes(b"-0.001").unwrap().unwrap(),
),
(
"-9223372036854775807",
true,
false,
Decimal::from(-9223372036854775807i64),
),
(
"-9223372036854775808",
true,
false,
Decimal::from(-9223372036854775808i64),
),
(
"-9223372036854775808.001",
true,
false,
Decimal::from_bytes(b"-9223372036854775808.001")
.unwrap()
.unwrap(),
),
(
"-9223372036854775808.002",
true,
false,
Decimal::from_bytes(b"-9223372036854775808.002")
.unwrap()
.unwrap(),
),
(
"-18446744073709551615",
true,
false,
Decimal::from_bytes(b"-18446744073709551615")
.unwrap()
.unwrap(),
),
(
"-18446744073709551615.001",
true,
false,
Decimal::from_bytes(b"-18446744073709551615.001")
.unwrap()
.unwrap(),
),
(
"-18446744073709551615.11",
true,
false,
Decimal::from_bytes(b"-18446744073709551615.11")
.unwrap()
.unwrap(),
),
// not neg and in_union
("10", true, false, Decimal::from(10)),
("1", true, false, Decimal::from(1)),
("0.001", true, false, Decimal::from_f64(0.001).unwrap()),
(
"9223372036854775807",
true,
false,
Decimal::from(9223372036854775807u64),
),
(
"9223372036854775808",
true,
false,
Decimal::from(9223372036854775808u64),
),
(
"9223372036854775808.001",
true,
false,
Decimal::from_bytes(b"9223372036854775808.001")
.unwrap()
.unwrap(),
),
(
"9223372036854775808.002",
true,
false,
Decimal::from_bytes(b"9223372036854775808.002")
.unwrap()
.unwrap(),
),
(
"18446744073709551615",
true,
false,
Decimal::from(18446744073709551615u64),
),
(
"18446744073709551615.001",
true,
false,
Decimal::from_bytes(b"18446744073709551615.001")
.unwrap()
.unwrap(),
),
(
"18446744073709551615.11",
true,
false,
Decimal::from_bytes(b"18446744073709551615.11")
.unwrap()
.unwrap(),
),
// neg and not in_union
("-10", false, false, Decimal::from(-10)),
("-1", false, false, Decimal::from(-1)),
("-0.001", false, false, Decimal::from_f64(-0.001).unwrap()),
(
"-9223372036854775807",
false,
true,
Decimal::from(-9223372036854775807i64),
),
(
"-9223372036854775808",
false,
true,
Decimal::from(-9223372036854775808i64),
),
(
"-9223372036854775808.001",
false,
true,
Decimal::from_bytes(b"-9223372036854775808.001")
.unwrap()
.unwrap(),
),
(
"-9223372036854775808.002",
false,
true,
Decimal::from_bytes(b"-9223372036854775808.002")
.unwrap()
.unwrap(),
),
(
"-18446744073709551615",
false,
true,
Decimal::from_bytes(b"-18446744073709551615")
.unwrap()
.unwrap(),
),
(
"-18446744073709551615.001",
false,
true,
Decimal::from_bytes(b"-18446744073709551615.001")
.unwrap()
.unwrap(),
),
(
"-18446744073709551615.11",
false,
true,
Decimal::from_bytes(b"-18446744073709551615.11")
.unwrap()
.unwrap(),
),
// not neg and not in_union
("10", false, false, Decimal::from(10)),
("1", false, false, Decimal::from(1)),
("0.001", false, false, Decimal::from_f64(0.001).unwrap()),
(
"9223372036854775807",
false,
true,
Decimal::from(9223372036854775807u64),
),
(
"9223372036854775808",
false,
true,
Decimal::from(9223372036854775808u64),
),
(
"9223372036854775808.001",
false,
true,
Decimal::from_bytes(b"9223372036854775808.001")
.unwrap()
.unwrap(),
),
(
"9223372036854775808.002",
false,
true,
Decimal::from_bytes(b"9223372036854775808.002")
.unwrap()
.unwrap(),
),
(
"18446744073709551615",
false,
true,
Decimal::from(18446744073709551615u64),
),
(
"18446744073709551615.001",
false,
true,
Decimal::from_bytes(b"18446744073709551615.001")
.unwrap()
.unwrap(),
),
(
"18446744073709551615.11",
false,
true,
Decimal::from_bytes(b"18446744073709551615.11")
.unwrap()
.unwrap(),
),
// can not convert to decimal
("abcde", false, false, Decimal::zero()),
("", false, false, Decimal::zero()),
("s", false, false, Decimal::zero()),
("abcde", true, false, Decimal::zero()),
("", true, false, Decimal::zero()),
("s", true, false, Decimal::zero()),
("abcde", false, true, Decimal::zero()),
("", false, true, Decimal::zero()),
("s", false, true, Decimal::zero()),
("abcde", true, true, Decimal::zero()),
("", true, true, Decimal::zero()),
("s", true, true, Decimal::zero()),
];
test_as_decimal_helper(
cs,
|ctx, extra, _, val| {
let val = val.map(|x| x.as_bytes().to_vec());
cast_any_as_decimal::<Bytes>(ctx, extra, &val)
},
|x| (*x).to_string(),
"cast_string_as_signed_decimal",
)
}
#[test]
fn test_string_as_unsigned_decimal() {
test_none_with_ctx_and_extra_and_metadata(cast_string_as_unsigned_decimal);
let cs = vec![
// (input, in_union, is_res_unsigned, base_result)
// neg and in_union
("-10", true, true, Decimal::zero()),
("-1", true, true, Decimal::zero()),
("-0.001", true, true, Decimal::zero()),
("-9223372036854775807", true, true, Decimal::zero()),
("-9223372036854775808", true, true, Decimal::zero()),
("-9223372036854775808.001", true, true, Decimal::zero()),
("-9223372036854775808.002", true, true, Decimal::zero()),
("-18446744073709551615", true, true, Decimal::zero()),
("-18446744073709551615.001", true, true, Decimal::zero()),
("-18446744073709551615.11", true, true, Decimal::zero()),
// not neg and in_union
("10", true, true, Decimal::from(10)),
("1", true, true, Decimal::from(1)),
("0.001", true, true, Decimal::from_f64(0.001).unwrap()),
(
"9223372036854775807",
true,
true,
Decimal::from(9223372036854775807u64),
),
(
"9223372036854775808",
true,
true,
Decimal::from(9223372036854775808u64),
),
(
"9223372036854775808.001",
true,
true,
Decimal::from_bytes(b"9223372036854775808.001")
.unwrap()
.unwrap(),
),
(
"9223372036854775808.002",
true,
true,
Decimal::from_bytes(b"9223372036854775808.002")
.unwrap()
.unwrap(),
),
(
"18446744073709551615",
true,
true,
Decimal::from(18446744073709551615u64),
),
(
"18446744073709551615.001",
true,
true,
Decimal::from_bytes(b"18446744073709551615.001")
.unwrap()
.unwrap(),
),
(
"18446744073709551615.11",
true,
true,
Decimal::from_bytes(b"18446744073709551615.11")
.unwrap()
.unwrap(),
),
// neg and not in_union
("-10", false, true, Decimal::from(-10)),
("-1", false, true, Decimal::from(-1)),
("-0.001", false, true, Decimal::from_f64(-0.001).unwrap()),
(
"-9223372036854775807",
false,
true,
Decimal::from(-9223372036854775807i64),
),
(
"-9223372036854775808",
false,
true,
Decimal::from(-9223372036854775808i64),
),
(
"-9223372036854775808.001",
false,
true,
Decimal::from_bytes(b"-9223372036854775808.001")
.unwrap()
.unwrap(),
),
(
"-9223372036854775808.002",
false,
true,
Decimal::from_bytes(b"-9223372036854775808.002")
.unwrap()
.unwrap(),
),
(
"-18446744073709551615",
false,
true,
Decimal::from_bytes(b"-18446744073709551615")
.unwrap()
.unwrap(),
),
(
"-18446744073709551615.001",
false,
true,
Decimal::from_bytes(b"-18446744073709551615.001")
.unwrap()
.unwrap(),
),
(
"-18446744073709551615.11",
false,
true,
Decimal::from_bytes(b"-18446744073709551615.11")
.unwrap()
.unwrap(),
),
// not neg and not in_union
("10", false, true, Decimal::from(10)),
("1", false, true, Decimal::from(1)),
("0.001", false, true, Decimal::from_f64(0.001).unwrap()),
(
"9223372036854775807",
false,
true,
Decimal::from(9223372036854775807u64),
),
(
"9223372036854775808",
false,
true,
Decimal::from(9223372036854775808u64),
),
(
"9223372036854775808.001",
false,
true,
Decimal::from_bytes(b"9223372036854775808.001")
.unwrap()
.unwrap(),
),
(
"9223372036854775808.002",
false,
true,
Decimal::from_bytes(b"9223372036854775808.002")
.unwrap()
.unwrap(),
),
(
"18446744073709551615",
false,
true,
Decimal::from(18446744073709551615u64),
),
(
"18446744073709551615.001",
false,
true,
Decimal::from_bytes(b"18446744073709551615.001")
.unwrap()
.unwrap(),
),
(
"18446744073709551615.11",
false,
true,
Decimal::from_bytes(b"18446744073709551615.11")
.unwrap()
.unwrap(),
),
// can not convert to decimal
("abcde", false, false, Decimal::zero()),
("", false, false, Decimal::zero()),
("s", false, false, Decimal::zero()),
("abcde", true, false, Decimal::zero()),
("", true, false, Decimal::zero()),
("s", true, false, Decimal::zero()),
("abcde", false, true, Decimal::zero()),
("", false, true, Decimal::zero()),
("s", false, true, Decimal::zero()),
("abcde", true, true, Decimal::zero()),
("", true, true, Decimal::zero()),
("s", true, true, Decimal::zero()),
];
test_as_decimal_helper(
cs,
|ctx, extra, metadata, val| {
let val = val.map(|x| x.as_bytes().to_vec());
cast_string_as_unsigned_decimal(ctx, extra, metadata, &val)
},
|x| (*x).to_string(),
"cast_string_as_unsigned_decimal",
);
}
#[test]
fn test_decimal_as_signed_decimal() {
test_none_with_ctx_and_extra(cast_decimal_as_signed_decimal);
// in_union and result is unsigned
let cs = vec![
// (input, in_union, is_res_unsigned, base_result)
// in_union
(Decimal::zero(), true, false, Decimal::zero()),
(
Decimal::from_f64(-10f64).unwrap(),
true,
false,
Decimal::from_f64(-10f64).unwrap(),
),
(
Decimal::from(i64::MIN),
true,
false,
Decimal::from(i64::MIN),
),
(
Decimal::from(i64::MAX),
true,
false,
Decimal::from(i64::MAX),
),
(
Decimal::from(u64::MAX),
true,
false,
Decimal::from(u64::MAX),
),
// not in_union
(Decimal::zero(), false, false, Decimal::zero()),
(
Decimal::from_f64(-10f64).unwrap(),
false,
false,
Decimal::from_f64(-10f64).unwrap(),
),
(
Decimal::from(i64::MIN),
false,
false,
Decimal::from(i64::MIN),
),
(
Decimal::from(i64::MAX),
false,
false,
Decimal::from(i64::MAX),
),
(
Decimal::from(u64::MAX),
false,
false,
Decimal::from(u64::MAX),
),
];
test_as_decimal_helper(
cs,
cast_closure_with_metadata!(cast_decimal_as_signed_decimal),
|x| x.to_string(),
"cast_decimal_as_signed_decimal",
);
}
#[test]
fn test_decimal_as_unsigned_decimal() {
test_none_with_ctx_and_extra_and_metadata(cast_decimal_as_unsigned_decimal);
// in_union and result is unsigned
let cs = vec![
// (input, in_union, is_res_unsigned, base_result)
// neg and in_union
(
Decimal::from_f64(-10f64).unwrap(),
true,
true,
Decimal::zero(),
),
(Decimal::from(i64::MIN), true, true, Decimal::zero()),
// not neg and in_union
(Decimal::zero(), true, true, Decimal::zero()),
(
Decimal::from_f64(10f64).unwrap(),
true,
true,
Decimal::from_f64(10f64).unwrap(),
),
(Decimal::from(i64::MAX), true, true, Decimal::from(i64::MAX)),
(Decimal::from(u64::MAX), true, true, Decimal::from(u64::MAX)),
// neg and not in_union
(
Decimal::from_f64(-10f64).unwrap(),
false,
true,
Decimal::from_f64(-10f64).unwrap(),
),
(
Decimal::from(i64::MIN),
false,
true,
Decimal::from(i64::MIN),
),
// not neg and not in_union
(Decimal::zero(), true, true, Decimal::zero()),
(
Decimal::from_f64(10f64).unwrap(),
true,
true,
Decimal::from_f64(10f64).unwrap(),
),
(Decimal::from(i64::MAX), true, true, Decimal::from(i64::MAX)),
(Decimal::from(u64::MAX), true, true, Decimal::from(u64::MAX)),
];
test_as_decimal_helper(
cs,
cast_decimal_as_unsigned_decimal,
|x| x.to_string(),
"cast_decimal_as_unsigned_decimal",
);
}
#[test]
fn test_time_as_decimal() {
test_none_with_ctx_and_extra(cast_any_as_decimal::<Time>);
let mut ctx = EvalContext::default();
// TODO: add more test case
let cs: Vec<(Time, bool, bool, Decimal)> = vec![
// (cast_func_input, in_union, is_res_unsigned, base_result)
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14", 0, false).unwrap(),
false,
false,
Decimal::from_bytes(b"20000101121314").unwrap().unwrap(),
),
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14.6666", 0, true).unwrap(),
false,
false,
Decimal::from_bytes(b"20000101121315").unwrap().unwrap(),
),
];
test_as_decimal_helper(
cs,
cast_closure_with_metadata!(cast_any_as_decimal::<Time>),
|x| x.to_string(),
"cast_time_as_decimal",
)
}
#[test]
fn test_duration_as_decimal() {
test_none_with_ctx_and_extra(cast_any_as_decimal::<Duration>);
let mut ctx = EvalContext::default();
// TODO: add more test case
let cs: Vec<(Duration, bool, bool, Decimal)> = vec![
// (input, in_union, is_res_unsigned, base_result)
(
Duration::parse(&mut ctx, b"17:51:04.78", 2).unwrap(),
false,
false,
Decimal::from_f64(175104.78).unwrap(),
),
(
Duration::parse(&mut ctx, b"-17:51:04.78", 2).unwrap(),
false,
false,
Decimal::from_f64(-175104.78).unwrap(),
),
(
Duration::parse(&mut ctx, b"17:51:04.78", 0).unwrap(),
false,
false,
Decimal::from(175105),
),
(
Duration::parse(&mut ctx, b"-17:51:04.78", 0).unwrap(),
false,
false,
Decimal::from(-175105),
),
];
test_as_decimal_helper(
cs,
cast_closure_with_metadata!(cast_any_as_decimal::<Duration>),
|x| x.to_string(),
"cast_duration_as_int",
)
}
#[test]
fn test_json_as_decimal() {
test_none_with_ctx_and_extra(cast_any_as_decimal::<Json>);
// TODO: add test case that make Decimal::from_str failed
let cs: Vec<(Json, bool, bool, Decimal)> = vec![
// (cast_func_input, in_union, is_res_unsigned, base_result)
(
Json::from_object(BTreeMap::default()).unwrap(),
false,
false,
Decimal::zero(),
),
(
Json::from_array(vec![]).unwrap(),
false,
false,
Decimal::zero(),
),
(
Json::from_i64(10).unwrap(),
false,
false,
Decimal::from_f64(10f64).unwrap(),
),
(
Json::from_i64(i64::MAX).unwrap(),
false,
false,
Decimal::from_f64(i64::MAX as f64).unwrap(),
),
(
Json::from_i64(i64::MIN).unwrap(),
false,
false,
Decimal::from_f64(i64::MIN as f64).unwrap(),
),
(Json::from_u64(0).unwrap(), false, false, Decimal::zero()),
(
Json::from_u64(i64::MAX as u64).unwrap(),
false,
false,
Decimal::from_f64(i64::MAX as f64).unwrap(),
),
(
Json::from_u64(u64::MAX).unwrap(),
false,
false,
Decimal::from_f64(u64::MAX as f64).unwrap(),
),
(
Json::from_f64(i64::MAX as f64).unwrap(),
false,
false,
Decimal::from_f64(i64::MAX as f64).unwrap(),
),
(
Json::from_f64(i64::MIN as f64).unwrap(),
false,
false,
Decimal::from_f64(i64::MIN as f64).unwrap(),
),
(
Json::from_f64(u64::MAX as f64).unwrap(),
false,
false,
Decimal::from_f64(u64::MAX as f64).unwrap(),
),
(
Json::from_string("10.0".to_string()).unwrap(),
false,
false,
Decimal::from_bytes(b"10.0").unwrap().unwrap(),
),
(
Json::from_string("-10.0".to_string()).unwrap(),
false,
false,
Decimal::from_bytes(b"-10.0").unwrap().unwrap(),
),
(
Json::from_string("9999999999999999999".to_string()).unwrap(),
false,
false,
Decimal::from_bytes(b"9999999999999999999")
.unwrap()
.unwrap(),
),
(
Json::from_string("-9999999999999999999".to_string()).unwrap(),
false,
false,
Decimal::from_bytes(b"-9999999999999999999")
.unwrap()
.unwrap(),
),
(
Json::from_bool(true).unwrap(),
false,
false,
Decimal::from_f64(1f64).unwrap(),
),
(
Json::from_bool(false).unwrap(),
false,
false,
Decimal::zero(),
),
(Json::none().unwrap(), false, false, Decimal::zero()),
];
test_as_decimal_helper(
cs,
cast_closure_with_metadata!(cast_any_as_decimal::<Json>),
|x| x.to_string(),
"cast_json_as_decimal",
);
}
#[test]
fn test_int_as_duration() {
// None
{
let output: Option<Real> = RpnFnScalarEvaluator::new()
.push_param(ScalarValue::Bytes(None))
.evaluate(ScalarFuncSig::CastIntAsDuration)
.unwrap();
assert_eq!(output, None);
}
let mut ctx = EvalContext::default();
// This case copy from Duration.rs::tests::test_from_i64
let cs: Vec<(
i64,
isize,
tidb_query_datatype::codec::Result<Option<Duration>>,
bool,
)> = vec![
// (input, fsp, expect, overflow)
(
101010,
0,
Ok(Some(Duration::parse(&mut ctx, b"10:10:10", 0).unwrap())),
false,
),
(
101010,
5,
Ok(Some(Duration::parse(&mut ctx, b"10:10:10", 5).unwrap())),
false,
),
(
8385959,
0,
Ok(Some(Duration::parse(&mut ctx, b"838:59:59", 0).unwrap())),
false,
),
(
8385959,
6,
Ok(Some(Duration::parse(&mut ctx, b"838:59:59", 6).unwrap())),
false,
),
(
-101010,
0,
Ok(Some(Duration::parse(&mut ctx, b"-10:10:10", 0).unwrap())),
false,
),
(
-101010,
5,
Ok(Some(Duration::parse(&mut ctx, b"-10:10:10", 5).unwrap())),
false,
),
(
-8385959,
0,
Ok(Some(Duration::parse(&mut ctx, b"-838:59:59", 0).unwrap())),
false,
),
(
-8385959,
6,
Ok(Some(Duration::parse(&mut ctx, b"-838:59:59", 6).unwrap())),
false,
),
// overflow as warning
(
8385960,
0,
Ok(Some(Duration::parse(&mut ctx, b"838:59:59", 0).unwrap())),
true,
),
(
-8385960,
0,
Ok(Some(Duration::parse(&mut ctx, b"-838:59:59", 0).unwrap())),
true,
),
// will truncated
(8376049, 0, Err(Error::truncated_wrong_val("", "")), false),
(8375960, 0, Err(Error::truncated_wrong_val("", "")), false),
(8376049, 0, Err(Error::truncated_wrong_val("", "")), false),
(
10000000000,
0,
Ok(Some(Duration::parse(&mut ctx, b"0:0:0", 0).unwrap())),
false,
),
(
10000235959,
0,
Ok(Some(Duration::parse(&mut ctx, b"23:59:59", 0).unwrap())),
false,
),
(
-10000235959,
0,
Ok(Some(Duration::parse(&mut ctx, b"-838:59:59", 0).unwrap())),
false,
),
];
for (input, fsp, expected, overflow) in cs {
let (result, ctx) = RpnFnScalarEvaluator::new()
.context(CtxConfig {
overflow_as_warning: true,
..CtxConfig::default()
})
.push_param(input)
.evaluate_raw(
FieldTypeConfig {
tp: Some(FieldTypeTp::Duration),
decimal: fsp,
..FieldTypeConfig::default()
},
ScalarFuncSig::CastIntAsDuration,
);
match expected {
Ok(expected) => {
let result: Option<Duration> = result.unwrap().into();
assert_eq!(
result, expected,
"input:{:?}, expected:{:?}, got:{:?}",
input, expected, result,
);
}
Err(_) => {
assert!(
result.is_err(),
"input:{:?}, expected err:{:?}, got:{:?}",
input,
expected,
result
);
}
}
if overflow {
assert_eq!(ctx.warnings.warning_cnt, 1);
assert_eq!(ctx.warnings.warnings[0].get_code(), ERR_DATA_OUT_OF_RANGE);
}
}
}
fn test_as_duration_helper<T: Clone, FnCast>(
base_cs: Vec<T>,
func_to_cast_str: impl Fn(&T) -> String,
func_to_debug_str: impl Fn(&T) -> String,
func_cast: FnCast,
func_name: &str,
) where
FnCast: Fn(&mut EvalContext, &RpnFnCallExtra, &Option<T>) -> Result<Option<Duration>>,
{
// cast_real_as_duration call `Duration::parse`, directly,
// and `Duration::parse`, is test in duration.rs.
// Our test here is to make sure that the result is same as calling `Duration::parse`,
// no matter whether call_real_as_duration call `Duration::parse`, directly.
for val in base_cs {
for fsp in MIN_FSP..=MAX_FSP {
let mut ctx = CtxConfig {
overflow_as_warning: true,
truncate_as_warning: true,
..CtxConfig::default()
}
.into();
let rft = FieldTypeConfig {
decimal: fsp as isize,
..FieldTypeConfig::default()
}
.into();
let extra = make_extra(&rft);
let result = func_cast(&mut ctx, &extra, &Some(val.clone()));
let val_str = func_to_cast_str(&val);
let base_expect = Duration::parse(&mut ctx, val_str.as_bytes(), fsp);
// make log
let result_str = result.as_ref().map(|x| x.map(|x| x.to_string()));
match base_expect {
Err(e) => match e.code() {
ERR_DATA_OUT_OF_RANGE => {
let log = format!(
"func_name:{}, input: {}, fsp: {}, output: {:?}, expect: {}, expect_warn: {}",
func_name, func_to_debug_str(&val), fsp, result_str, Duration::zero(), ERR_DATA_OUT_OF_RANGE
);
check_overflow(&ctx, true, log.as_str());
check_result(None, &result, log.as_str());
}
ERR_TRUNCATE_WRONG_VALUE => {
let log = format!(
"func_name:{}, input: {}, fsp: {}, output: {:?}, output_warn: {:?}, expect: {}, expect_warn: {}",
func_name, func_to_debug_str(&val), fsp, result_str, ctx.warnings.warnings, Duration::zero(), WARN_DATA_TRUNCATED
);
check_warning(&ctx, Some(ERR_TRUNCATE_WRONG_VALUE), log.as_str());
check_result(None, &result, log.as_str());
}
_ => {
let expect_err: tidb_query_common::error::Error = e.into();
let log = format!(
"func_name:{}, input: {}, fsp: {}, output: {:?}, output_warn: {:?}, expect: {:?}",
func_name, func_to_debug_str(&val), fsp, result_str, ctx.warnings.warnings, expect_err
);
assert!(result.is_err(), "log: {}", log)
}
},
Ok(v) => {
let log = format!(
"func_name:{}, input: {}, fsp: {}, output: {:?}, output_warn: {:?}, expect: {:?}",
func_name, func_to_debug_str(&val), fsp, result_str, ctx.warnings.warnings, v
);
check_result(Some(&v), &result, log.as_str())
}
}
}
}
}
#[test]
fn test_real_as_duration() {
test_none_with_ctx_and_extra(cast_real_as_duration);
let cs: Vec<f64> = vec![
101112.0,
101112.123456,
1112.0,
12.0,
-0.123,
12345.0,
-123.0,
-23.0,
];
test_as_duration_helper(
cs,
|x| x.to_string(),
|x| x.to_string(),
|ctx, extra, val| {
let val = val.map(|x| Real::new(x).unwrap());
cast_real_as_duration(ctx, extra, &val)
},
"cast_real_as_duration",
)
}
#[test]
fn test_bytes_as_duration() {
test_none_with_ctx_and_extra(cast_bytes_as_duration);
let cs: Vec<Bytes> = vec![
b"17:51:04.78".to_vec(),
b"-17:51:04.78".to_vec(),
b"17:51:04.78".to_vec(),
b"-17:51:04.78".to_vec(),
];
test_as_duration_helper(
cs,
|x| String::from_utf8_lossy(x).to_string(),
|x| String::from_utf8_lossy(x).to_string(),
cast_bytes_as_duration,
"cast_bytes_as_duration",
);
}
#[test]
fn test_decimal_as_duration() {
test_none_with_ctx_and_extra(cast_decimal_as_duration);
let cs = vec![
Decimal::from(i64::MIN),
Decimal::from(i64::MAX),
Decimal::from(u64::MAX),
Decimal::zero(),
Decimal::from_bytes(b"-9223372036854775808")
.unwrap()
.unwrap(),
Decimal::from_bytes(b"9223372036854775808")
.unwrap()
.unwrap(),
Decimal::from_bytes(b"-9223372036854775809")
.unwrap()
.unwrap(),
Decimal::from_bytes(b"9223372036854775809")
.unwrap()
.unwrap(),
Decimal::from_bytes(b"-18446744073709551615")
.unwrap()
.unwrap(),
Decimal::from_bytes(b"18446744073709551615")
.unwrap()
.unwrap(),
Decimal::from_bytes(b"-18446744073709551616")
.unwrap()
.unwrap(),
Decimal::from_bytes(b"18446744073709551616")
.unwrap()
.unwrap(),
Decimal::from_bytes(b"-184467440737095516160")
.unwrap()
.unwrap(),
Decimal::from_bytes(b"184467440737095516160")
.unwrap()
.unwrap(),
Decimal::from_bytes(b"-99999999999999999999999999999999")
.unwrap()
.unwrap(),
Decimal::from_bytes(b"99999999999999999999999999999999")
.unwrap()
.unwrap(),
];
test_as_duration_helper(
cs,
|x| x.to_string(),
|x| x.to_string(),
cast_decimal_as_duration,
"cast_decimal_as_duration",
);
}
#[test]
fn test_time_as_duration() {
test_none_with_ctx_and_extra(cast_time_as_duration);
// copy from test_convert_to_duration
let cs = vec![
// (input, input's fsp, output's fsp, output)
("2012-12-31 11:30:45.123456", 4, 0, "11:30:45"),
("2012-12-31 11:30:45.123456", 4, 1, "11:30:45.1"),
("2012-12-31 11:30:45.123456", 4, 2, "11:30:45.12"),
("2012-12-31 11:30:45.123456", 4, 3, "11:30:45.124"),
("2012-12-31 11:30:45.123456", 4, 4, "11:30:45.1235"),
("2012-12-31 11:30:45.123456", 4, 5, "11:30:45.12350"),
("2012-12-31 11:30:45.123456", 4, 6, "11:30:45.123500"),
("2012-12-31 11:30:45.123456", 6, 0, "11:30:45"),
("2012-12-31 11:30:45.123456", 6, 1, "11:30:45.1"),
("2012-12-31 11:30:45.123456", 6, 2, "11:30:45.12"),
("2012-12-31 11:30:45.123456", 6, 3, "11:30:45.123"),
("2012-12-31 11:30:45.123456", 6, 4, "11:30:45.1235"),
("2012-12-31 11:30:45.123456", 6, 5, "11:30:45.12346"),
("2012-12-31 11:30:45.123456", 6, 6, "11:30:45.123456"),
("2012-12-31 11:30:45.123456", 0, 0, "11:30:45"),
("2012-12-31 11:30:45.123456", 0, 1, "11:30:45.0"),
("2012-12-31 11:30:45.123456", 0, 2, "11:30:45.00"),
("2012-12-31 11:30:45.123456", 0, 3, "11:30:45.000"),
("2012-12-31 11:30:45.123456", 0, 4, "11:30:45.0000"),
("2012-12-31 11:30:45.123456", 0, 5, "11:30:45.00000"),
("2012-12-31 11:30:45.123456", 0, 6, "11:30:45.000000"),
("0000-00-00 00:00:00", 6, 0, "00:00:00"),
("0000-00-00 00:00:00", 6, 1, "00:00:00.0"),
("0000-00-00 00:00:00", 6, 2, "00:00:00.00"),
("0000-00-00 00:00:00", 6, 3, "00:00:00.000"),
("0000-00-00 00:00:00", 6, 4, "00:00:00.0000"),
("0000-00-00 00:00:00", 6, 5, "00:00:00.00000"),
("0000-00-00 00:00:00", 6, 6, "00:00:00.000000"),
];
for (s, fsp, expect_fsp, expect) in cs {
let mut ctx = EvalContext::default();
let rft = FieldTypeConfig {
decimal: expect_fsp,
..FieldTypeConfig::default()
}
.into();
let extra = make_extra(&rft);
let input_time = Time::parse_datetime(&mut ctx, s, fsp, true).unwrap();
let expect_time =
Duration::parse(&mut ctx, expect.as_bytes(), expect_fsp as i8).unwrap();
let result = cast_time_as_duration(&mut ctx, &extra, &Some(input_time));
let result_str = result.as_ref().map(|x| x.as_ref().map(|x| x.to_string()));
let log = format!(
"input: {}, fsp: {}, expect_fsp: {}, expect: {}, output: {:?}",
s, fsp, expect_fsp, expect, result_str,
);
check_result(Some(&expect_time), &result, log.as_str());
}
}
#[test]
fn test_duration_as_duration() {
test_none_with_extra(cast_duration_as_duration);
let cs = vec![
("11:30:45.123456", 6, 0, "11:30:45"),
("11:30:45.123456", 6, 1, "11:30:45.1"),
("11:30:45.123456", 6, 2, "11:30:45.12"),
("11:30:45.123456", 6, 3, "11:30:45.123"),
("11:30:45.123456", 6, 4, "11:30:45.1235"),
("11:30:45.123456", 6, 5, "11:30:45.12346"),
("11:30:45.123456", 6, 6, "11:30:45.123456"),
];
for (input, input_fsp, output_fsp, expect) in cs {
let rft = FieldTypeConfig {
decimal: output_fsp as isize,
..FieldTypeConfig::default()
}
.into();
let extra = make_extra(&rft);
let mut ctx = EvalContext::default();
let dur = Duration::parse(&mut ctx, input.as_bytes(), input_fsp).unwrap();
let expect = Duration::parse(&mut ctx, expect.as_bytes(), output_fsp).unwrap();
let r = cast_duration_as_duration(&extra, &Some(dur));
let result_str = r.as_ref().map(|x| x.map(|x| x.to_string()));
let log = format!(
"input: {}, input_fsp: {}, output_fsp: {}, expect: {}, output: {:?}",
input, input_fsp, output_fsp, expect, result_str
);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_json_as_duration() {
test_none_with_ctx_and_extra(cast_json_as_duration);
// the case that Json::unquote failed had be tested by test_json_unquote
let cs = vec![
Json::from_object(BTreeMap::default()).unwrap(),
Json::from_array(vec![]).unwrap(),
Json::from_i64(10).unwrap(),
Json::from_i64(i64::MAX).unwrap(),
Json::from_i64(i64::MIN).unwrap(),
Json::from_u64(0).unwrap(),
Json::from_u64(u64::MAX).unwrap(),
Json::from_f64(10.5).unwrap(),
Json::from_f64(10.4).unwrap(),
Json::from_f64(-10.4).unwrap(),
Json::from_f64(-10.5).unwrap(),
Json::from_f64(i64::MIN as u64 as f64).unwrap(),
Json::from_f64(i64::MAX as u64 as f64).unwrap(),
Json::from_f64(i64::MIN as u64 as f64).unwrap(),
Json::from_f64(i64::MIN as f64).unwrap(),
Json::from_f64(((1u64 << 63) + (1u64 << 62)) as u64 as f64).unwrap(),
Json::from_f64(-((1u64 << 63) as f64 + (1u64 << 62) as f64)).unwrap(),
Json::from_f64(f64::from(f32::MIN)).unwrap(),
Json::from_f64(f64::from(f32::MAX)).unwrap(),
Json::from_f64(f64::MAX).unwrap(),
Json::from_f64(f64::MAX).unwrap(),
Json::from_string(String::from("10.0")).unwrap(),
Json::from_string(String::from(
"999999999999999999999999999999999999999999999999",
))
.unwrap(),
Json::from_string(String::from(
"-999999999999999999999999999999999999999999999999",
))
.unwrap(),
Json::from_string(String::from(
"99999999999999999999999999999999999999999999999aabcde9",
))
.unwrap(),
Json::from_string(String::from(
"-99999999999999999999999999999999999999999999999aabcde9",
))
.unwrap(),
Json::from_bool(true).unwrap(),
Json::from_bool(false).unwrap(),
Json::none().unwrap(),
];
test_as_duration_helper(
cs,
|x| x.as_ref().unquote().unwrap(),
|x| format!("{:?}", x),
cast_json_as_duration,
"cast_json_as_duration",
);
}
#[test]
fn test_int_as_json() {
test_none_with_ctx(cast_any_as_any::<Int, Json>);
let cs = vec![
(i64::MIN, Json::from_i64(i64::MIN).unwrap()),
(0, Json::from_i64(0).unwrap()),
(i64::MAX, Json::from_i64(i64::MAX).unwrap()),
];
for (input, expect) in cs {
let mut ctx = EvalContext::default();
let r = cast_any_as_any::<Int, Json>(&mut ctx, &Some(input));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_uint_as_json() {
test_none_with_nothing(cast_uint_as_json);
let cs = vec![
(u64::MAX, Json::from_u64(u64::MAX).unwrap()),
(0, Json::from_u64(0).unwrap()),
(i64::MAX as u64, Json::from_u64(i64::MAX as u64).unwrap()),
];
for (input, expect) in cs {
let r = cast_uint_as_json(&Some(input as i64));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_bool_as_json() {
test_none_with_nothing(cast_bool_as_json);
let cs = vec![
(0, Json::from_bool(false).unwrap()),
(i64::MIN, Json::from_bool(true).unwrap()),
(i64::MAX, Json::from_bool(true).unwrap()),
];
for (input, expect) in cs {
let result = cast_bool_as_json(&Some(input));
let log = make_log(&input, &expect, &result);
check_result(Some(&expect), &result, log.as_str());
}
}
#[test]
fn test_real_as_json() {
test_none_with_ctx(cast_any_as_any::<Real, Json>);
let cs = vec![
(
f64::from(f32::MAX),
Json::from_f64(f64::from(f32::MAX)).unwrap(),
),
(
f64::from(f32::MIN),
Json::from_f64(f64::from(f32::MIN)).unwrap(),
),
(f64::MAX, Json::from_f64(f64::MAX).unwrap()),
(f64::MIN, Json::from_f64(f64::MIN).unwrap()),
];
for (input, expect) in cs {
let mut ctx = EvalContext::default();
let r = cast_any_as_any::<Real, Json>(&mut ctx, &Real::new(input).ok());
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_string_as_json() {
test_none_with_extra(cast_string_as_json);
let mut jo1: BTreeMap<String, Json> = BTreeMap::new();
jo1.insert(
String::from("a"),
Json::from_string(String::from("b")).unwrap(),
);
// HasParseToJSONFlag
let cs = vec![
(
"{\"a\": \"b\"}".to_string(),
Json::from_object(jo1).unwrap(),
true,
),
(
"{}".to_string(),
Json::from_object(BTreeMap::new()).unwrap(),
true,
),
(
"[1, 2, 3]".to_string(),
Json::from_array(vec![
Json::from_i64(1).unwrap(),
Json::from_i64(2).unwrap(),
Json::from_i64(3).unwrap(),
])
.unwrap(),
true,
),
(
"[]".to_string(),
Json::from_array(Vec::new()).unwrap(),
true,
),
(
"9223372036854775807".to_string(),
Json::from_i64(9223372036854775807).unwrap(),
true,
),
(
"-9223372036854775808".to_string(),
Json::from_i64(-9223372036854775808).unwrap(),
true,
),
(
"18446744073709551615".to_string(),
Json::from_f64(18446744073709552000.0).unwrap(),
true,
),
// FIXME: f64::MAX.to_string() to json should success
// (f64::MAX.to_string(), Json::from_f64(f64::MAX), true),
("0.0".to_string(), Json::from_f64(0.0).unwrap(), true),
(
"\"abcde\"".to_string(),
Json::from_string("abcde".to_string()).unwrap(),
true,
),
(
"\"\"".to_string(),
Json::from_string("".to_string()).unwrap(),
true,
),
("true".to_string(), Json::from_bool(true).unwrap(), true),
("false".to_string(), Json::from_bool(false).unwrap(), true),
];
for (input, expect, parse_to_json) in cs {
let mut rft = FieldType::default();
if parse_to_json {
let fta = rft.as_mut_accessor();
fta.set_flag(FieldTypeFlag::PARSE_TO_JSON);
}
let extra = make_extra(&rft);
let result = cast_string_as_json(&extra, &Some(input.clone().into_bytes()));
let result_str = result.as_ref().map(|x| x.as_ref().map(|x| x.to_string()));
let log = format!(
"input: {}, parse_to_json: {}, expect: {:?}, result: {:?}",
input, parse_to_json, expect, result_str
);
check_result(Some(&expect), &result, log.as_str());
}
}
#[test]
fn test_decimal_as_json() {
test_none_with_ctx(cast_any_as_any::<Decimal, Json>);
let cs = vec![
(
Decimal::from_f64(i64::MIN as f64).unwrap(),
Json::from_f64(i64::MIN as f64).unwrap(),
),
(
Decimal::from_f64(i64::MAX as f64).unwrap(),
Json::from_f64(i64::MAX as f64).unwrap(),
),
(
Decimal::from_bytes(b"184467440737095516160")
.unwrap()
.unwrap(),
Json::from_f64(184467440737095516160.0).unwrap(),
),
(
Decimal::from_bytes(b"-184467440737095516160")
.unwrap()
.unwrap(),
Json::from_f64(-184467440737095516160.0).unwrap(),
),
];
for (input, expect) in cs {
let mut ctx = EvalContext::default();
let r = cast_any_as_any::<Decimal, Json>(&mut ctx, &Some(input));
let log = make_log(&input, &expect, &r);
check_result(Some(&expect), &r, log.as_str());
}
}
#[test]
fn test_time_as_json() {
test_none_with_ctx(cast_any_as_any::<Time, Json>);
let mut ctx = EvalContext::default();
// TODO: add more case for other TimeType
let cs = vec![
// Add time_type filed here is to make maintainer know clearly that what is the type of the time.
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14", 0, true).unwrap(),
TimeType::DateTime,
Json::from_string("2000-01-01 12:13:14.000000".to_string()).unwrap(),
),
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14.6666", 0, true).unwrap(),
TimeType::DateTime,
Json::from_string("2000-01-01 12:13:15.000000".to_string()).unwrap(),
),
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14", 6, true).unwrap(),
TimeType::DateTime,
Json::from_string("2000-01-01 12:13:14.000000".to_string()).unwrap(),
),
(
Time::parse_datetime(&mut ctx, "2000-01-01T12:13:14.6666", 6, true).unwrap(),
TimeType::DateTime,
Json::from_string("2000-01-01 12:13:14.666600".to_string()).unwrap(),
),
(
Time::parse_datetime(&mut ctx, "2019-09-01", 0, true).unwrap(),
TimeType::DateTime,
Json::from_string("2019-09-01 00:00:00.000000".to_string()).unwrap(),
),
(
Time::parse_datetime(&mut ctx, "2019-09-01", 6, true).unwrap(),
TimeType::DateTime,
Json::from_string("2019-09-01 00:00:00.000000".to_string()).unwrap(),
),
];
for (input, time_type, expect) in cs {
let mut ctx = EvalContext::default();
let result = cast_any_as_any::<Time, Json>(&mut ctx, &Some(input));
let result_str = result.as_ref().map(|x| x.as_ref().map(|x| x.to_string()));
let log = format!(
"input: {}, expect_time_type: {:?}, real_time_type: {:?}, expect: {}, result: {:?}",
&input,
time_type,
input.get_time_type(),
&expect,
result_str
);
assert_eq!(input.get_time_type(), time_type, "{}", log);
check_result(Some(&expect), &result, log.as_str());
}
}
#[test]
fn test_duration_as_json() {
test_none_with_ctx(cast_any_as_any::<Duration, Json>);
// TODO: add more case
let cs = vec![
(
Duration::zero(),
Json::from_string("00:00:00.000000".to_string()).unwrap(),
),
(
Duration::parse(&mut EvalContext::default(), b"10:10:10", 0).unwrap(),
Json::from_string("10:10:10.000000".to_string()).unwrap(),
),
];
for (input, expect) in cs {
let mut ctx = EvalContext::default();
let result = cast_any_as_any::<Duration, Json>(&mut ctx, &Some(input));
let log = make_log(&input, &expect, &result);
check_result(Some(&expect), &result, log.as_str());
}
}
#[test]
fn test_json_as_json() {
test_none_with_nothing(cast_json_as_json);
let mut jo1: BTreeMap<String, Json> = BTreeMap::new();
jo1.insert("a".to_string(), Json::from_string("b".to_string()).unwrap());
let cs = vec![
Json::from_object(jo1).unwrap(),
Json::from_array(vec![
Json::from_i64(1).unwrap(),
Json::from_i64(3).unwrap(),
Json::from_i64(4).unwrap(),
])
.unwrap(),
Json::from_i64(i64::MIN).unwrap(),
Json::from_i64(i64::MAX).unwrap(),
Json::from_u64(0u64).unwrap(),
Json::from_u64(u64::MAX).unwrap(),
Json::from_f64(f64::MIN).unwrap(),
Json::from_f64(f64::MAX).unwrap(),
Json::from_string("abcde".to_string()).unwrap(),
Json::from_bool(true).unwrap(),
Json::from_bool(false).unwrap(),
Json::none().unwrap(),
];
for input in cs {
let expect = input.clone();
let result = cast_json_as_json(&Some(input.clone()));
let log = make_log(&input, &expect, &result);
check_result(Some(&expect), &result, log.as_str());
}
}
}
| 34.766523 | 145 | 0.464323 |
33760b97db5f7bf03935cee27cb568f3b4175d21 | 3,840 | // Copyright (c) Aptos
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use aptos_logger::debug;
use std::{cmp::min, future::Future, pin::Pin, thread, time::Duration};
/// Given an operation retries it successfully sleeping everytime it fails
/// If the operation succeeds before the iterator runs out, it returns success
pub fn retry<I, O, T, E>(iterable: I, mut operation: O) -> Result<T, E>
where
I: IntoIterator<Item = Duration>,
O: FnMut() -> Result<T, E>,
{
let mut iterator = iterable.into_iter();
loop {
match operation() {
Ok(value) => return Ok(value),
Err(err) => {
if let Some(delay) = iterator.next() {
thread::sleep(delay);
} else {
return Err(err);
}
}
}
}
}
pub async fn retry_async<'a, I, O, T, E>(iterable: I, mut operation: O) -> Result<T, E>
where
I: IntoIterator<Item = Duration>,
O: FnMut() -> Pin<Box<dyn Future<Output = Result<T, E>> + Send + 'a>>,
E: std::fmt::Display + std::fmt::Debug,
{
let mut iterator = iterable.into_iter();
loop {
match operation().await {
Ok(value) => return Ok(value),
Err(err) => {
if let Some(delay) = iterator.next() {
debug!("{}. Retrying in {} seconds..", err, delay.as_secs());
tokio::time::sleep(delay).await;
} else {
return Err(err);
}
}
}
}
}
pub fn fixed_retry_strategy(delay_ms: u64, tries: usize) -> impl Iterator<Item = Duration> {
FixedDelay::new(delay_ms).take(tries)
}
pub fn exp_retry_strategy(
start_ms: u64,
limit_ms: u64,
tries: usize,
) -> impl Iterator<Item = Duration> {
ExponentWithLimitDelay::new(start_ms, limit_ms).take(tries)
}
/// An iterator which uses a fixed delay
pub struct FixedDelay {
duration: Duration,
}
pub struct ExponentWithLimitDelay {
current: Duration,
limit: Duration,
exp: f64,
}
impl FixedDelay {
/// Create a new `FixedDelay` using the given duration in milliseconds.
fn new(millis: u64) -> Self {
FixedDelay {
duration: Duration::from_millis(millis),
}
}
}
impl ExponentWithLimitDelay {
fn new(start_ms: u64, limit_ms: u64) -> Self {
ExponentWithLimitDelay {
current: Duration::from_millis(start_ms),
limit: Duration::from_millis(limit_ms),
exp: 1.5,
}
}
}
impl Iterator for FixedDelay {
type Item = Duration;
fn next(&mut self) -> Option<Duration> {
Some(self.duration)
}
}
impl Iterator for ExponentWithLimitDelay {
type Item = Duration;
fn next(&mut self) -> Option<Duration> {
let duration = self.current;
self.current = min(
Duration::from_millis((self.current.as_millis() as f64 * self.exp) as u64),
self.limit,
);
Some(duration)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_fixed_retry_strategy_success() {
let mut collection = vec![1, 2, 3, 4, 5].into_iter();
let result = retry(fixed_retry_strategy(0, 10), || match collection.next() {
Some(n) if n == 5 => Ok(n),
Some(_) => Err("not 5"),
None => Err("not 5"),
})
.unwrap();
assert_eq!(result, 5);
}
#[test]
fn test_fixed_retry_strategy_error() {
let mut collection = vec![1, 2, 3, 4, 5].into_iter();
let result = retry(fixed_retry_strategy(0, 3), || match collection.next() {
Some(n) if n == 5 => Ok(n),
Some(_) => Err("not 5"),
None => Err("not 5"),
});
assert_eq!(result, Err("not 5"));
}
}
| 26.853147 | 92 | 0.55026 |
f9e1564e6d1a9226235de43b551d0d7e1ea210f3 | 3,488 | ///! Raid generation parameters.
use super::super::personal_data::get_personal_info;
use super::mon::{Ability, Gender};
use wasm_bindgen::prelude::*;
/// Species of mon.
pub type Species = u32; // TODO: Replace this with an enum eventually.
/// A raid mon can be randomly shiny, always shiny, or never shiny.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum ShinyPool {
Random,
Locked(bool),
}
impl From<u8> for ShinyPool {
// Converts a u32 into a ShinyPool, based on the game's encoding.
fn from(n: u8) -> Self {
match n {
n if n == 0 => ShinyPool::Random,
n if n == 1 => ShinyPool::Locked(false),
_ => ShinyPool::Locked(true),
}
}
}
/// Which abilities can be rolled.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum AbilityPool {
Random,
NoHA,
Locked(Ability),
}
impl From<u8> for AbilityPool {
// Converts a u32 into a AbilityPool, based on the game's encoding.
fn from(n: u8) -> Self {
match n {
n if n == 4 => AbilityPool::Random,
n if n == 3 => AbilityPool::NoHA,
n if n == 2 => AbilityPool::Locked(Ability::Hidden),
n if n == 1 => AbilityPool::Locked(Ability::Second),
n if n == 0 => AbilityPool::Locked(Ability::First),
_ => panic!("Invalid ability type"),
}
}
}
/// Which genders can be rolled.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum GenderPool {
Random(u8),
Locked(Gender),
}
impl GenderPool {
// Converts a u32 into a GenderPool, based on the game's encoding.
fn from(n: u8, ratio: Option<u8>) -> Self {
match n {
n if n == 0 => GenderPool::Random(ratio.unwrap()),
n if n == 1 => GenderPool::Locked(Gender::Male),
n if n == 2 => GenderPool::Locked(Gender::Female),
n if n == 3 => GenderPool::Locked(Gender::Genderless),
_ => panic!("Invalid gender type"),
}
}
}
/// Represents a raid mon's possible stats: form, gender, ability, IVs, etc.
/// These are defined in the game data and control things.
#[wasm_bindgen]
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub struct Raid {
species: Species,
min_flawless_ivs: u8,
alt_form: u8,
is_gmax: bool,
ability: AbilityPool,
gender: GenderPool,
shiny: ShinyPool,
}
#[wasm_bindgen]
impl Raid {
#[wasm_bindgen(constructor)]
pub fn new(
species: Species,
alt_form: u8,
min_flawless_ivs: u8,
is_gmax: bool,
ability_pool: u8,
gender_pool: u8,
) -> Self {
let gender_ratio =
get_personal_info(species as usize, alt_form as usize).map(|pi| pi.get_gender_ratio());
Raid {
species,
min_flawless_ivs,
alt_form,
is_gmax,
ability: AbilityPool::from(ability_pool),
gender: GenderPool::from(gender_pool, gender_ratio),
shiny: ShinyPool::Random,
}
}
}
impl Raid {
pub fn get_shiny_pool(&self) -> ShinyPool {
self.shiny
}
pub fn get_min_flawless_ivs(&self) -> u8 {
self.min_flawless_ivs
}
pub fn get_ability_pool(&self) -> AbilityPool {
self.ability
}
pub fn get_gender_pool(&self) -> GenderPool {
self.gender
}
pub fn get_species(&self) -> Species {
self.species
}
pub fn get_alt_form(&self) -> u8 {
self.alt_form
}
}
| 26.029851 | 99 | 0.580849 |
fcb083cd7b2cc7a97162f6c14717fb2900ef8b0b | 641 | // Copyright 2022 The Engula Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! snapshot manager
// TODO(walter) add define
| 37.705882 | 75 | 0.74727 |
e46a1ff699ec7c67fb8ea4950b704e3cf69cae8b | 3,188 | pub struct Scanner<R: std::io::Read> {
reader: R,
}
impl<R: std::io::Read> Scanner<R> {
/// let stdin = std::io::stdin();
/// let mut sc = Scanner::new(stdin.lock());
pub fn new(reader: R) -> Self { Self { reader: reader } }
pub fn scan<T: std::str::FromStr>(&mut self) -> T {
use std::io::Read;
self.reader.by_ref().bytes().map(|c| c.unwrap() as char)
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>().parse::<T>().ok().unwrap()
}
}
// #[allow(warnings)]
fn main() {
use std::io::Write;
let stdin = std::io::stdin();
let mut sc = Scanner::new(std::io::BufReader::new(stdin.lock()));
let stdout = std::io::stdout();
let out = &mut std::io::BufWriter::new(stdout.lock());
let n: usize = sc.scan();
let f = prime_factorize(n);
write!(out, "{}: ", n).unwrap();
for (p, c) in f.iter() {
for _ in 0..*c {
write!(out, "{} ", p).unwrap();
}
}
writeln!(out, "").unwrap();
}
pub fn least_prime_factor(n: usize) -> Vec<usize> {
assert!(n >= 2);
let mut s: Vec<usize> = (0..n).collect();
s[1] = 0;
let mut i = 0;
while i * i < n - 1 {
i += 1;
if s[i as usize] != i { continue; }
for j in (i * i..n).step_by(i as usize) {
if s[j as usize] == j { s[j as usize] = i; }
}
}
s
}
pub fn greatest_prime_factor(n: usize) -> Vec<usize> {
assert!(n >= 2);
let mut s: Vec<usize> = (0..n).collect();
s[1] = 0;
let mut i = 0;
while i < n - 1 {
i += 1;
if s[i as usize] != i { continue; }
for j in (i * 2..n).step_by(i as usize) {
s[j as usize] = i;
}
}
s
}
pub fn sieve_of_eratosthenes(n: usize) -> Vec<bool> {
let lpf = least_prime_factor(n);
(0..n).map(|i| i >= 2 && i == lpf[i as usize]).collect()
}
pub fn find_prime_numbers(n: usize) -> Vec<usize> {
let is_prime = sieve_of_eratosthenes(n);
(0..n).filter(|i| is_prime[*i as usize]).collect()
}
pub fn prime_factorize(mut n: usize) -> std::collections::BTreeMap<usize, usize> {
let mut cnt = std::collections::BTreeMap::new();
let mut i = 1;
while i * i < n {
i += 1;
if n % i != 0 { continue; }
while n % i == 0 {
n /= i;
*cnt.entry(i).or_insert(0usize) += 1;
}
}
if n > 1 { cnt.insert(n, 1); }
cnt
}
pub struct PrimeFactorizeLPF {
lpf: Vec<usize>,
}
impl PrimeFactorizeLPF {
pub fn new(n: usize) -> Self {
PrimeFactorizeLPF { lpf: least_prime_factor(n) }
}
pub fn factorize(&self, mut n: usize) -> std::collections::BTreeMap<usize, usize> {
let mut cnt = std::collections::BTreeMap::new();
while n > 1 {
let p = self.lpf[n] as usize;
n /= p;
*cnt.entry(p).or_insert(0usize) += 1;
}
cnt
}
}
pub fn count_prime_factors(n: usize) -> Vec<usize> {
let mut cnt = vec![0; n as usize];
for p in find_prime_numbers(n).into_iter().map(|x| x as usize) {
for i in (p..n).step_by(p) { cnt[i] += 1; }
}
cnt
}
| 24.523077 | 87 | 0.506587 |
fb54f0ff89be42f4ec6dfc9de82dda74efb57779 | 5,919 | #![recursion_limit = "128"]
mod channel;
mod clipboard;
mod controller;
mod edit_view;
mod linecache;
mod main_win;
mod prefs_win;
mod proto;
mod rpc;
mod scrollable_drawing_area;
mod theme;
mod xi_thread;
use crate::channel::Sender;
use crate::controller::{Controller, CoreMsg};
use clap::{Arg, SubCommand};
use gio::prelude::*;
use gio::{ApplicationExt, ApplicationFlags, FileExt};
use glib::clone;
use gtk::{Application};
use log::*;
use main_win::MainWin;
use rpc::{Core, Handler};
use serde_json::Value;
use std::any::Any;
use std::cell::RefCell;
use std::env::args;
use dirs_next::home_dir;
// pub struct SharedQueue {
// queue: VecDeque<CoreMsg>,
// }
// impl SharedQueue {
// pub fn add_core_msg(&mut self, msg: CoreMsg) {
// if self.queue.is_empty() {
// self.pipe_writer
// .write_all(&[0u8])
// .expect("failed to write to signalling pipe");
// }
// trace!("pushing to queue");
// self.queue.push_back(msg);
// }
// }
trait IdleCallback: Send {
fn call(self: Box<Self>, a: &Any);
}
impl<F: FnOnce(&Any) + Send> IdleCallback for F {
fn call(self: Box<F>, a: &Any) {
(*self)(a)
}
}
// struct QueueSource {
// win: Rc<RefCell<MainWin>>,
// sender: Sender<CoreMsg>,
// }
// impl SourceFuncs for QueueSource {
// fn check(&self) -> bool {
// false
// }
// fn prepare(&self) -> (bool, Option<u32>) {
// (false, None)
// }
// fn dispatch(&self) -> bool {
// trace!("dispatch");
// let mut shared_queue = self.queue.lock().unwrap();
// while let Some(msg) = shared_queue.queue.pop_front() {
// trace!("found a msg");
// MainWin::handle_msg(self.win.clone(), msg);
// }
// let mut buf = [0u8; 64];
// shared_queue
// .pipe_reader
// .try_read(&mut buf)
// .expect("failed to read signalling pipe");
// true
// }
// }
#[derive(Clone)]
struct MyHandler {
sender: Sender<CoreMsg>,
}
impl MyHandler {
fn new(sender: Sender<CoreMsg>) -> MyHandler {
MyHandler { sender }
}
}
impl Handler for MyHandler {
fn notification(&self, method: &str, params: &Value) {
debug!(
"CORE --> {{\"method\": \"{}\", \"params\":{}}}",
method, params
);
let method2 = method.to_string();
let params2 = params.clone();
self.sender.send(CoreMsg::Notification {
method: method2,
params: params2,
});
}
}
fn main() {
env_logger::init();
// let matches = App::new("gxi")
// .version("0.2.0")
// .author("brainn <[email protected]>")
// .about("Xi frontend")
// .arg(Arg::with_name("FILE")
// .multiple(true)
// .help("file to open")
// )
// .get_matches();
// let mut files = vec![];
// if matches.is_present("FILE") {
// files = matches.values_of("FILE").unwrap().collect::<Vec<_>>();
// }
// debug!("files {:?}", files);
let controller = Controller::new();
let controller2 = controller.clone();
let (chan, sender) = channel::Channel::new(move |msg| {
controller2.borrow().handle_msg(msg);
});
controller.borrow_mut().set_sender(sender.clone());
controller.borrow_mut().set_channel(chan);
// let queue: VecDeque<CoreMsg> = Default::default();
// let (reader, writer) = pipe().unwrap();
// let reader_raw_fd = reader.as_raw_fd();
// let shared_queue = Arc::new(Mutex::new(SharedQueue {
// queue: queue.clone(),
// pipe_writer: writer,
// pipe_reader: reader,
// }));
let (xi_peer, rx) = xi_thread::start_xi_thread();
let handler = MyHandler::new(sender.clone());
let core = Core::new(xi_peer, rx, handler.clone());
controller.borrow_mut().set_core(core);
let application =
Application::new(Some("com.github.bvinc.gxi"), ApplicationFlags::HANDLES_OPEN)
.expect("failed to create gtk application");
let mut config_dir = None;
let mut plugin_dir = None;
if let Some(home_dir) = home_dir() {
let xi_config = home_dir.join(".config").join("xi");
let xi_plugin = xi_config.join("plugins");
config_dir = xi_config.to_str().map(|s| s.to_string());
plugin_dir = xi_plugin.to_str().map(|s| s.to_string());
}
application.connect_startup(clone!(@strong controller => move |application| {
debug!("startup");
controller.borrow().core().client_started(config_dir.clone(), plugin_dir.clone());
let main_win = MainWin::new(application, controller.clone());
controller.borrow_mut().set_main_win(main_win);
// let source = new_source(QueueSource {
// win: main_win.clone(),
// sender: sender.clone(),
// });
// unsafe {
// use glib::translate::ToGlibPtr;
// ::glib_sys::g_source_add_unix_fd(source.to_glib_none().0, reader_raw_fd, ::glib_sys::G_IO_IN);
// }
// let main_context = MainContext::default();
// source.attach(Some(&main_context));
}));
application.connect_activate(clone!(@strong controller => move |application| {
debug!("activate");
controller.borrow().req_new_view(None);
}));
application.connect_open(clone!(@strong controller => move |_,files,s| {
debug!("open");
for file in files {
let path = file.get_path();
if path.is_none() { continue; }
let path = path.unwrap();
let path = path.to_string_lossy().into_owned();
controller.borrow().req_new_view(Some(&path));
}
}));
application.connect_shutdown(move |_| {
debug!("shutdown");
});
application.run(&args().collect::<Vec<_>>());
}
| 28.052133 | 109 | 0.567495 |
bba4e68d4cce54c77999693b6f4063b6674070a3 | 201 | #![crate_type = "lib"]
#![feature(const_generics_defaults)]
#![allow(incomplete_features, dead_code)]
struct Both<const N: usize=3, T> {
//~^ ERROR: generic parameters with a default must be
v: T
}
| 22.333333 | 53 | 0.701493 |
389fdf0a0c5ce2f9cb965526d439e9c0780de9ae | 1,321 | // Copyright 2020 David Young
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
use crate::error::Error;
use core::convert::TryFrom;
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum NodeType {
/// Classic USICand for SUSIC using 24 bit input/output cards.
Usic = 'N' as isize,
/// SUSIC using32 bit input/output cards.
Susic = 'X' as isize,
/// SMINI with fixed 24 inputs and 48 outputs
Smini = 'M' as isize,
/// CPNODEwith 16 to 144input/outputs using8 bit cards.
Cpnode = 'C' as isize,
}
impl TryFrom<u8> for NodeType {
type Error = Error;
fn try_from(nt: u8) -> Result<Self, Error> {
use NodeType::*;
match nt as char {
'N' => Ok(Usic),
'X' => Ok(Susic),
'M' => Ok(Smini),
'C' => Ok(Cpnode),
_ => Err(Error::InvalidNodeType),
}
}
}
impl core::fmt::Display for NodeType {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter<'_>,
) -> core::result::Result<(), core::fmt::Error> {
write!(fmt, "{:?}", self)
}
}
| 29.355556 | 77 | 0.591976 |
3ab744ebaeb17d0541961cb90bf347054428f2d0 | 14,024 | // This module contains some shared code for encoding and decoding various
// things from the `ty` module, and in particular implements support for
// "shorthands" which allow to have pointers back into the already encoded
// stream instead of re-encoding the same thing twice.
//
// The functionality in here is shared between persisting to crate metadata and
// persisting to incr. comp. caches.
use crate::hir::def_id::{DefId, CrateNum};
use crate::infer::canonical::{CanonicalVarInfo, CanonicalVarInfos};
use rustc_data_structures::fx::FxHashMap;
use crate::rustc_serialize::{Decodable, Decoder, Encoder, Encodable, opaque};
use std::hash::Hash;
use std::intrinsics;
use crate::ty::{self, Ty, TyCtxt};
use crate::ty::subst::SubstsRef;
use crate::mir::interpret::Allocation;
/// The shorthand encoding uses an enum's variant index `usize`
/// and is offset by this value so it never matches a real variant.
/// This offset is also chosen so that the first byte is never < 0x80.
pub const SHORTHAND_OFFSET: usize = 0x80;
pub trait EncodableWithShorthand: Clone + Eq + Hash {
type Variant: Encodable;
fn variant(&self) -> &Self::Variant;
}
impl<'tcx> EncodableWithShorthand for Ty<'tcx> {
type Variant = ty::TyKind<'tcx>;
fn variant(&self) -> &Self::Variant {
&self.sty
}
}
impl<'tcx> EncodableWithShorthand for ty::Predicate<'tcx> {
type Variant = ty::Predicate<'tcx>;
fn variant(&self) -> &Self::Variant {
self
}
}
pub trait TyEncoder: Encoder {
fn position(&self) -> usize;
}
impl TyEncoder for opaque::Encoder {
#[inline]
fn position(&self) -> usize {
self.position()
}
}
/// Encode the given value or a previously cached shorthand.
pub fn encode_with_shorthand<E, T, M>(encoder: &mut E,
value: &T,
cache: M)
-> Result<(), E::Error>
where E: TyEncoder,
M: for<'b> Fn(&'b mut E) -> &'b mut FxHashMap<T, usize>,
T: EncodableWithShorthand,
{
let existing_shorthand = cache(encoder).get(value).cloned();
if let Some(shorthand) = existing_shorthand {
return encoder.emit_usize(shorthand);
}
let variant = value.variant();
let start = encoder.position();
variant.encode(encoder)?;
let len = encoder.position() - start;
// The shorthand encoding uses the same usize as the
// discriminant, with an offset so they can't conflict.
let discriminant = unsafe { intrinsics::discriminant_value(variant) };
assert!(discriminant < SHORTHAND_OFFSET as u64);
let shorthand = start + SHORTHAND_OFFSET;
// Get the number of bits that leb128 could fit
// in the same space as the fully encoded type.
let leb128_bits = len * 7;
// Check that the shorthand is a not longer than the
// full encoding itself, i.e., it's an obvious win.
if leb128_bits >= 64 || (shorthand as u64) < (1 << leb128_bits) {
cache(encoder).insert(value.clone(), shorthand);
}
Ok(())
}
pub fn encode_predicates<'tcx, E, C>(encoder: &mut E,
predicates: &ty::GenericPredicates<'tcx>,
cache: C)
-> Result<(), E::Error>
where E: TyEncoder,
C: for<'b> Fn(&'b mut E) -> &'b mut FxHashMap<ty::Predicate<'tcx>, usize>,
{
predicates.parent.encode(encoder)?;
predicates.predicates.len().encode(encoder)?;
for (predicate, span) in &predicates.predicates {
encode_with_shorthand(encoder, predicate, &cache)?;
span.encode(encoder)?;
}
Ok(())
}
pub trait TyDecoder<'a, 'tcx: 'a>: Decoder {
fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx>;
fn peek_byte(&self) -> u8;
fn position(&self) -> usize;
fn cached_ty_for_shorthand<F>(&mut self,
shorthand: usize,
or_insert_with: F)
-> Result<Ty<'tcx>, Self::Error>
where F: FnOnce(&mut Self) -> Result<Ty<'tcx>, Self::Error>;
fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
where F: FnOnce(&mut Self) -> R;
fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum;
fn positioned_at_shorthand(&self) -> bool {
(self.peek_byte() & (SHORTHAND_OFFSET as u8)) != 0
}
}
#[inline]
pub fn decode_cnum<'a, 'tcx, D>(decoder: &mut D) -> Result<CrateNum, D::Error>
where D: TyDecoder<'a, 'tcx>,
'tcx: 'a,
{
let cnum = CrateNum::from_u32(u32::decode(decoder)?);
Ok(decoder.map_encoded_cnum_to_current(cnum))
}
#[inline]
pub fn decode_ty<'a, 'tcx, D>(decoder: &mut D) -> Result<Ty<'tcx>, D::Error>
where D: TyDecoder<'a, 'tcx>,
'tcx: 'a,
{
// Handle shorthands first, if we have an usize > 0x80.
if decoder.positioned_at_shorthand() {
let pos = decoder.read_usize()?;
assert!(pos >= SHORTHAND_OFFSET);
let shorthand = pos - SHORTHAND_OFFSET;
decoder.cached_ty_for_shorthand(shorthand, |decoder| {
decoder.with_position(shorthand, Ty::decode)
})
} else {
let tcx = decoder.tcx();
Ok(tcx.mk_ty(ty::TyKind::decode(decoder)?))
}
}
#[inline]
pub fn decode_predicates<'a, 'tcx, D>(decoder: &mut D)
-> Result<ty::GenericPredicates<'tcx>, D::Error>
where D: TyDecoder<'a, 'tcx>,
'tcx: 'a,
{
Ok(ty::GenericPredicates {
parent: Decodable::decode(decoder)?,
predicates: (0..decoder.read_usize()?).map(|_| {
// Handle shorthands first, if we have an usize > 0x80.
let predicate = if decoder.positioned_at_shorthand() {
let pos = decoder.read_usize()?;
assert!(pos >= SHORTHAND_OFFSET);
let shorthand = pos - SHORTHAND_OFFSET;
decoder.with_position(shorthand, ty::Predicate::decode)
} else {
ty::Predicate::decode(decoder)
}?;
Ok((predicate, Decodable::decode(decoder)?))
})
.collect::<Result<Vec<_>, _>>()?,
})
}
#[inline]
pub fn decode_substs<'a, 'tcx, D>(decoder: &mut D) -> Result<SubstsRef<'tcx>, D::Error>
where D: TyDecoder<'a, 'tcx>,
'tcx: 'a,
{
let len = decoder.read_usize()?;
let tcx = decoder.tcx();
Ok(tcx.mk_substs((0..len).map(|_| Decodable::decode(decoder)))?)
}
#[inline]
pub fn decode_region<'a, 'tcx, D>(decoder: &mut D) -> Result<ty::Region<'tcx>, D::Error>
where D: TyDecoder<'a, 'tcx>,
'tcx: 'a,
{
Ok(decoder.tcx().mk_region(Decodable::decode(decoder)?))
}
#[inline]
pub fn decode_ty_slice<'a, 'tcx, D>(decoder: &mut D)
-> Result<&'tcx ty::List<Ty<'tcx>>, D::Error>
where D: TyDecoder<'a, 'tcx>,
'tcx: 'a,
{
let len = decoder.read_usize()?;
Ok(decoder.tcx().mk_type_list((0..len).map(|_| Decodable::decode(decoder)))?)
}
#[inline]
pub fn decode_adt_def<'a, 'tcx, D>(decoder: &mut D)
-> Result<&'tcx ty::AdtDef, D::Error>
where D: TyDecoder<'a, 'tcx>,
'tcx: 'a,
{
let def_id = DefId::decode(decoder)?;
Ok(decoder.tcx().adt_def(def_id))
}
#[inline]
pub fn decode_existential_predicate_slice<'a, 'tcx, D>(decoder: &mut D)
-> Result<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>, D::Error>
where D: TyDecoder<'a, 'tcx>,
'tcx: 'a,
{
let len = decoder.read_usize()?;
Ok(decoder.tcx()
.mk_existential_predicates((0..len).map(|_| Decodable::decode(decoder)))?)
}
#[inline]
pub fn decode_canonical_var_infos<'a, 'tcx, D>(decoder: &mut D)
-> Result<CanonicalVarInfos<'tcx>, D::Error>
where D: TyDecoder<'a, 'tcx>,
'tcx: 'a,
{
let len = decoder.read_usize()?;
let interned: Result<Vec<CanonicalVarInfo>, _> = (0..len).map(|_| Decodable::decode(decoder))
.collect();
Ok(decoder.tcx()
.intern_canonical_var_infos(interned?.as_slice()))
}
#[inline]
pub fn decode_lazy_const<'a, 'tcx, D>(decoder: &mut D)
-> Result<&'tcx ty::LazyConst<'tcx>, D::Error>
where D: TyDecoder<'a, 'tcx>,
'tcx: 'a,
{
Ok(decoder.tcx().mk_lazy_const(Decodable::decode(decoder)?))
}
#[inline]
pub fn decode_allocation<'a, 'tcx, D>(decoder: &mut D)
-> Result<&'tcx Allocation, D::Error>
where D: TyDecoder<'a, 'tcx>,
'tcx: 'a,
{
Ok(decoder.tcx().intern_const_alloc(Decodable::decode(decoder)?))
}
#[macro_export]
macro_rules! __impl_decoder_methods {
($($name:ident -> $ty:ty;)*) => {
$(fn $name(&mut self) -> Result<$ty, Self::Error> {
self.opaque.$name()
})*
}
}
#[macro_export]
macro_rules! implement_ty_decoder {
($DecoderName:ident <$($typaram:tt),*>) => {
mod __ty_decoder_impl {
use super::$DecoderName;
use $crate::infer::canonical::CanonicalVarInfos;
use $crate::ty;
use $crate::ty::codec::*;
use $crate::ty::subst::SubstsRef;
use $crate::hir::def_id::{CrateNum};
use crate::rustc_serialize::{Decoder, SpecializedDecoder};
use std::borrow::Cow;
impl<$($typaram ),*> Decoder for $DecoderName<$($typaram),*> {
type Error = String;
__impl_decoder_methods! {
read_nil -> ();
read_u128 -> u128;
read_u64 -> u64;
read_u32 -> u32;
read_u16 -> u16;
read_u8 -> u8;
read_usize -> usize;
read_i128 -> i128;
read_i64 -> i64;
read_i32 -> i32;
read_i16 -> i16;
read_i8 -> i8;
read_isize -> isize;
read_bool -> bool;
read_f64 -> f64;
read_f32 -> f32;
read_char -> char;
read_str -> Cow<'_, str>;
}
fn error(&mut self, err: &str) -> Self::Error {
self.opaque.error(err)
}
}
// FIXME(#36588) These impls are horribly unsound as they allow
// the caller to pick any lifetime for 'tcx, including 'static,
// by using the unspecialized proxies to them.
impl<$($typaram),*> SpecializedDecoder<CrateNum>
for $DecoderName<$($typaram),*> {
fn specialized_decode(&mut self) -> Result<CrateNum, Self::Error> {
decode_cnum(self)
}
}
impl<$($typaram),*> SpecializedDecoder<ty::Ty<'tcx>>
for $DecoderName<$($typaram),*> {
fn specialized_decode(&mut self) -> Result<ty::Ty<'tcx>, Self::Error> {
decode_ty(self)
}
}
impl<$($typaram),*> SpecializedDecoder<ty::GenericPredicates<'tcx>>
for $DecoderName<$($typaram),*> {
fn specialized_decode(&mut self)
-> Result<ty::GenericPredicates<'tcx>, Self::Error> {
decode_predicates(self)
}
}
impl<$($typaram),*> SpecializedDecoder<SubstsRef<'tcx>>
for $DecoderName<$($typaram),*> {
fn specialized_decode(&mut self) -> Result<SubstsRef<'tcx>, Self::Error> {
decode_substs(self)
}
}
impl<$($typaram),*> SpecializedDecoder<ty::Region<'tcx>>
for $DecoderName<$($typaram),*> {
fn specialized_decode(&mut self) -> Result<ty::Region<'tcx>, Self::Error> {
decode_region(self)
}
}
impl<$($typaram),*> SpecializedDecoder<&'tcx ty::List<ty::Ty<'tcx>>>
for $DecoderName<$($typaram),*> {
fn specialized_decode(&mut self)
-> Result<&'tcx ty::List<ty::Ty<'tcx>>, Self::Error> {
decode_ty_slice(self)
}
}
impl<$($typaram),*> SpecializedDecoder<&'tcx ty::AdtDef>
for $DecoderName<$($typaram),*> {
fn specialized_decode(&mut self) -> Result<&'tcx ty::AdtDef, Self::Error> {
decode_adt_def(self)
}
}
impl<$($typaram),*> SpecializedDecoder<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>>
for $DecoderName<$($typaram),*> {
fn specialized_decode(&mut self)
-> Result<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>, Self::Error> {
decode_existential_predicate_slice(self)
}
}
impl<$($typaram),*> SpecializedDecoder<CanonicalVarInfos<'tcx>>
for $DecoderName<$($typaram),*> {
fn specialized_decode(&mut self)
-> Result<CanonicalVarInfos<'tcx>, Self::Error> {
decode_canonical_var_infos(self)
}
}
impl<$($typaram),*> SpecializedDecoder<&'tcx $crate::ty::LazyConst<'tcx>>
for $DecoderName<$($typaram),*> {
fn specialized_decode(&mut self) -> Result<&'tcx ty::LazyConst<'tcx>, Self::Error> {
decode_lazy_const(self)
}
}
impl<$($typaram),*> SpecializedDecoder<&'tcx $crate::mir::interpret::Allocation>
for $DecoderName<$($typaram),*> {
fn specialized_decode(
&mut self
) -> Result<&'tcx $crate::mir::interpret::Allocation, Self::Error> {
decode_allocation(self)
}
}
}
}
}
| 34.204878 | 100 | 0.537151 |
f705f45aa97b984e57bb3e6b6075ff480f1502fa | 14,093 | // DO NOT EDIT !
// This file was generated automatically from 'src/mako/cli/main.rs.mako'
// DO NOT EDIT !
#![allow(unused_variables, unused_imports, dead_code, unused_mut)]
extern crate tokio;
#[macro_use]
extern crate clap;
extern crate yup_oauth2 as oauth2;
use std::env;
use std::io::{self, Write};
use clap::{App, SubCommand, Arg};
use google_groupsmigration1::{api, Error};
mod client;
use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg,
input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol,
calltype_from_str, remove_json_null_values, ComplexType, JsonType, JsonTypeInfo};
use std::default::Default;
use std::str::FromStr;
use serde_json as json;
use clap::ArgMatches;
enum DoitError {
IoError(String, io::Error),
ApiError(Error),
}
struct Engine<'n> {
opt: ArgMatches<'n>,
hub: api::GroupsMigration,
gp: Vec<&'static str>,
gpm: Vec<(&'static str, &'static str)>,
}
impl<'n> Engine<'n> {
async fn _archive_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.archive().insert(opt.value_of("group-id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let vals = opt.values_of("mode").unwrap().collect::<Vec<&str>>();
let protocol = calltype_from_str(vals[0], ["simple"].iter().map(|&v| v.to_string()).collect(), err);
let mut input_file = input_file_from_opts(vals[1], err);
let mime_type = input_mime_from_opts(opt.value_of("mime").unwrap_or("application/octet-stream"), err);
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Upload(UploadProtocol::Simple) => call.upload(input_file.unwrap(), mime_type.unwrap()).await,
CallType::Standard => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _doit(&self, dry_run: bool) -> Result<Result<(), DoitError>, Option<InvalidOptionsError>> {
let mut err = InvalidOptionsError::new();
let mut call_result: Result<(), DoitError> = Ok(());
let mut err_opt: Option<InvalidOptionsError> = None;
match self.opt.subcommand() {
("archive", Some(opt)) => {
match opt.subcommand() {
("insert", Some(opt)) => {
call_result = self._archive_insert(opt, dry_run, &mut err).await;
},
_ => {
err.issues.push(CLIError::MissingMethodError("archive".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
_ => {
err.issues.push(CLIError::MissingCommandError);
writeln!(io::stderr(), "{}\n", self.opt.usage()).ok();
}
}
if dry_run {
if err.issues.len() > 0 {
err_opt = Some(err);
}
Err(err_opt)
} else {
Ok(call_result)
}
}
// Please note that this call will fail if any part of the opt can't be handled
async fn new(opt: ArgMatches<'n>) -> Result<Engine<'n>, InvalidOptionsError> {
let (config_dir, secret) = {
let config_dir = match client::assure_config_dir_exists(opt.value_of("folder").unwrap_or("~/.google-service-cli")) {
Err(e) => return Err(InvalidOptionsError::single(e, 3)),
Ok(p) => p,
};
match client::application_secret_from_directory(&config_dir, "groupsmigration1-secret.json",
"{\"installed\":{\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\",\"client_secret\":\"hCsslbCUyfehWMmbkG8vTYxG\",\"token_uri\":\"https://accounts.google.com/o/oauth2/token\",\"client_email\":\"\",\"redirect_uris\":[\"urn:ietf:wg:oauth:2.0:oob\",\"oob\"],\"client_x509_cert_url\":\"\",\"client_id\":\"620010449518-9ngf7o4dhs0dka470npqvor6dc5lqb9b.apps.googleusercontent.com\",\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"}}") {
Ok(secret) => (config_dir, secret),
Err(e) => return Err(InvalidOptionsError::single(e, 4))
}
};
let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
secret,
yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
).persist_tokens_to_disk(format!("{}/groupsmigration1", config_dir)).build().await.unwrap();
let client = hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots());
let engine = Engine {
opt: opt,
hub: api::GroupsMigration::new(client, auth),
gp: vec!["$-xgafv", "access-token", "alt", "callback", "fields", "key", "oauth-token", "pretty-print", "quota-user", "upload-type", "upload-protocol"],
gpm: vec![
("$-xgafv", "$.xgafv"),
("access-token", "access_token"),
("oauth-token", "oauth_token"),
("pretty-print", "prettyPrint"),
("quota-user", "quotaUser"),
("upload-type", "uploadType"),
("upload-protocol", "upload_protocol"),
]
};
match engine._doit(true).await {
Err(Some(err)) => Err(err),
Err(None) => Ok(engine),
Ok(_) => unreachable!(),
}
}
async fn doit(&self) -> Result<(), DoitError> {
match self._doit(false).await {
Ok(res) => res,
Err(_) => unreachable!(),
}
}
}
#[tokio::main]
async fn main() {
let mut exit_status = 0i32;
let upload_value_names = ["mode", "file"];
let arg_data = [
("archive", "methods: 'insert'", vec![
("insert",
Some(r##"Inserts a new mail into the archive of the Google group."##),
"Details at http://byron.github.io/google-apis-rs/google_groupsmigration1_cli/archive_insert",
vec![
(Some(r##"group-id"##),
None,
Some(r##"The group ID"##),
Some(true),
Some(false)),
(Some(r##"mode"##),
Some(r##"u"##),
Some(r##"Specify the upload protocol (simple) and the file to upload"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
];
let mut app = App::new("groupsmigration1")
.author("Sebastian Thiel <[email protected]>")
.version("2.0.5+20210318")
.about("The Groups Migration API allows domain administrators to archive emails into Google groups.")
.after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_groupsmigration1_cli")
.arg(Arg::with_name("url")
.long("scope")
.help("Specify the authentication a method should be executed in. Each scope requires the user to grant this application permission to use it.If unset, it defaults to the shortest scope url for a particular method.")
.multiple(true)
.takes_value(true))
.arg(Arg::with_name("folder")
.long("config-dir")
.help("A directory into which we will store our persistent data. Defaults to a user-writable directory that we will create during the first invocation.[default: ~/.google-service-cli")
.multiple(false)
.takes_value(true))
.arg(Arg::with_name("debug")
.long("debug")
.help("Debug print all errors")
.multiple(false)
.takes_value(false));
for &(main_command_name, about, ref subcommands) in arg_data.iter() {
let mut mcmd = SubCommand::with_name(main_command_name).about(about);
for &(sub_command_name, ref desc, url_info, ref args) in subcommands {
let mut scmd = SubCommand::with_name(sub_command_name);
if let &Some(desc) = desc {
scmd = scmd.about(desc);
}
scmd = scmd.after_help(url_info);
for &(ref arg_name, ref flag, ref desc, ref required, ref multi) in args {
let arg_name_str =
match (arg_name, flag) {
(&Some(an), _ ) => an,
(_ , &Some(f)) => f,
_ => unreachable!(),
};
let mut arg = Arg::with_name(arg_name_str)
.empty_values(false);
if let &Some(short_flag) = flag {
arg = arg.short(short_flag);
}
if let &Some(desc) = desc {
arg = arg.help(desc);
}
if arg_name.is_some() && flag.is_some() {
arg = arg.takes_value(true);
}
if let &Some(required) = required {
arg = arg.required(required);
}
if let &Some(multi) = multi {
arg = arg.multiple(multi);
}
if arg_name_str == "mode" {
arg = arg.number_of_values(2);
arg = arg.value_names(&upload_value_names);
scmd = scmd.arg(Arg::with_name("mime")
.short("m")
.requires("mode")
.required(false)
.help("The file's mime time, like 'application/octet-stream'")
.takes_value(true));
}
scmd = scmd.arg(arg);
}
mcmd = mcmd.subcommand(scmd);
}
app = app.subcommand(mcmd);
}
let matches = app.get_matches();
let debug = matches.is_present("debug");
match Engine::new(matches).await {
Err(err) => {
exit_status = err.exit_code;
writeln!(io::stderr(), "{}", err).ok();
},
Ok(engine) => {
if let Err(doit_err) = engine.doit().await {
exit_status = 1;
match doit_err {
DoitError::IoError(path, err) => {
writeln!(io::stderr(), "Failed to open output file '{}': {}", path, err).ok();
},
DoitError::ApiError(err) => {
if debug {
writeln!(io::stderr(), "{:#?}", err).ok();
} else {
writeln!(io::stderr(), "{}", err).ok();
}
}
}
}
}
}
std::process::exit(exit_status);
}
| 43.767081 | 526 | 0.464486 |
ef7dc6ebe3f69ad7304a2ec49dbbd112b65eedc3 | 29,850 | use ra_db::FileId;
use ra_syntax::{ast, SmolStr};
use rustc_hash::FxHashMap;
use test_utils::tested_by;
use crate::{
db::DefDatabase,
ids::{AstItemDef, LocationCtx, MacroCallId, MacroCallLoc, MacroDefId, MacroFileKind},
name::MACRO_RULES,
nameres::{
diagnostics::DefDiagnostic,
mod_resolution::{resolve_submodule, ParentModule},
raw, Crate, CrateDefMap, CrateModuleId, ModuleData, ModuleDef, PerNs, ReachedFixedPoint,
Resolution, ResolveMode,
},
Adt, AstId, Const, Enum, Function, HirFileId, MacroDef, Module, Name, Path, PathKind, Static,
Struct, Trait, TypeAlias, Union,
};
pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
// populate external prelude
for dep in def_map.krate.dependencies(db) {
log::debug!("crate dep {:?} -> {:?}", dep.name, dep.krate);
if let Some(module) = dep.krate.root_module(db) {
def_map.extern_prelude.insert(dep.name.clone(), module.into());
}
// look for the prelude
if def_map.prelude.is_none() {
let map = db.crate_def_map(dep.krate);
if map.prelude.is_some() {
def_map.prelude = map.prelude;
}
}
}
let mut collector = DefCollector {
db,
def_map,
glob_imports: FxHashMap::default(),
unresolved_imports: Vec::new(),
unexpanded_macros: Vec::new(),
macro_stack_monitor: MacroStackMonitor::default(),
};
collector.collect();
collector.finish()
}
#[derive(Default)]
struct MacroStackMonitor {
counts: FxHashMap<MacroDefId, u32>,
/// Mainly use for test
validator: Option<Box<dyn Fn(u32) -> bool>>,
}
impl MacroStackMonitor {
fn increase(&mut self, macro_def_id: MacroDefId) {
*self.counts.entry(macro_def_id).or_default() += 1;
}
fn decrease(&mut self, macro_def_id: MacroDefId) {
*self.counts.entry(macro_def_id).or_default() -= 1;
}
fn is_poison(&self, macro_def_id: MacroDefId) -> bool {
let cur = *self.counts.get(¯o_def_id).unwrap_or(&0);
if let Some(validator) = &self.validator {
validator(cur)
} else {
cur > 100
}
}
}
/// Walks the tree of module recursively
struct DefCollector<DB> {
db: DB,
def_map: CrateDefMap,
glob_imports: FxHashMap<CrateModuleId, Vec<(CrateModuleId, raw::ImportId)>>,
unresolved_imports: Vec<(CrateModuleId, raw::ImportId, raw::ImportData)>,
unexpanded_macros: Vec<(CrateModuleId, AstId<ast::MacroCall>, Path)>,
/// Some macro use `$tt:tt which mean we have to handle the macro perfectly
/// To prevent stack overflow, we add a deep counter here for prevent that.
macro_stack_monitor: MacroStackMonitor,
}
impl<'a, DB> DefCollector<&'a DB>
where
DB: DefDatabase,
{
fn collect(&mut self) {
let crate_graph = self.db.crate_graph();
let file_id = crate_graph.crate_root(self.def_map.krate.crate_id());
let raw_items = self.db.raw_items(file_id.into());
let module_id = self.def_map.root;
self.def_map.modules[module_id].definition = Some(file_id);
ModCollector {
def_collector: &mut *self,
attr_path: None,
module_id,
file_id: file_id.into(),
raw_items: &raw_items,
parent_module: None,
}
.collect(raw_items.items());
// main name resolution fixed-point loop.
let mut i = 0;
loop {
self.db.check_canceled();
match (self.resolve_imports(), self.resolve_macros()) {
(ReachedFixedPoint::Yes, ReachedFixedPoint::Yes) => break,
_ => i += 1,
}
if i == 1000 {
log::error!("name resolution is stuck");
break;
}
}
let unresolved_imports = std::mem::replace(&mut self.unresolved_imports, Vec::new());
// show unresolved imports in completion, etc
for (module_id, import, import_data) in unresolved_imports {
self.record_resolved_import(module_id, PerNs::none(), import, &import_data)
}
}
/// Define a macro with `macro_rules`.
///
/// It will define the macro in legacy textual scope, and if it has `#[macro_export]`,
/// then it is also defined in the root module scope.
/// You can `use` or invoke it by `crate::macro_name` anywhere, before or after the definition.
///
/// It is surprising that the macro will never be in the current module scope.
/// These code fails with "unresolved import/macro",
/// ```rust,compile_fail
/// mod m { macro_rules! foo { () => {} } }
/// use m::foo as bar;
/// ```
///
/// ```rust,compile_fail
/// macro_rules! foo { () => {} }
/// self::foo!();
/// crate::foo!();
/// ```
///
/// Well, this code compiles, bacause the plain path `foo` in `use` is searched
/// in the legacy textual scope only.
/// ```rust
/// macro_rules! foo { () => {} }
/// use foo as bar;
/// ```
fn define_macro(
&mut self,
module_id: CrateModuleId,
name: Name,
macro_: MacroDef,
export: bool,
) {
// Textual scoping
self.define_legacy_macro(module_id, name.clone(), macro_);
// Module scoping
// In Rust, `#[macro_export]` macros are unconditionally visible at the
// crate root, even if the parent modules is **not** visible.
if export {
self.update(self.def_map.root, None, &[(name, Resolution::from_macro(macro_))]);
}
}
/// Define a legacy textual scoped macro in module
///
/// We use a map `legacy_macros` to store all legacy textual scoped macros visable per module.
/// It will clone all macros from parent legacy scope, whose definition is prior to
/// the definition of current module.
/// And also, `macro_use` on a module will import all legacy macros visable inside to
/// current legacy scope, with possible shadowing.
fn define_legacy_macro(&mut self, module_id: CrateModuleId, name: Name, macro_: MacroDef) {
// Always shadowing
self.def_map.modules[module_id].scope.legacy_macros.insert(name, macro_);
}
/// Import macros from `#[macro_use] extern crate`.
fn import_macros_from_extern_crate(
&mut self,
current_module_id: CrateModuleId,
import: &raw::ImportData,
) {
log::debug!(
"importing macros from extern crate: {:?} ({:?})",
import,
self.def_map.edition,
);
let res = self.def_map.resolve_name_in_extern_prelude(
&import
.path
.as_ident()
.expect("extern crate should have been desugared to one-element path"),
);
if let Some(ModuleDef::Module(m)) = res.take_types() {
tested_by!(macro_rules_from_other_crates_are_visible_with_macro_use);
self.import_all_macros_exported(current_module_id, m.krate);
}
}
/// Import all exported macros from another crate
///
/// Exported macros are just all macros in the root module scope.
/// Note that it contains not only all `#[macro_export]` macros, but also all aliases
/// created by `use` in the root module, ignoring the visibility of `use`.
fn import_all_macros_exported(&mut self, current_module_id: CrateModuleId, krate: Crate) {
let def_map = self.db.crate_def_map(krate);
for (name, def) in def_map[def_map.root].scope.macros() {
// `macro_use` only bring things into legacy scope.
self.define_legacy_macro(current_module_id, name.clone(), def);
}
}
fn resolve_imports(&mut self) -> ReachedFixedPoint {
let mut imports = std::mem::replace(&mut self.unresolved_imports, Vec::new());
let mut resolved = Vec::new();
imports.retain(|(module_id, import, import_data)| {
let (def, fp) = self.resolve_import(*module_id, import_data);
if fp == ReachedFixedPoint::Yes {
resolved.push((*module_id, def, *import, import_data.clone()))
}
fp == ReachedFixedPoint::No
});
self.unresolved_imports = imports;
// Resolves imports, filling-in module scopes
let result =
if resolved.is_empty() { ReachedFixedPoint::Yes } else { ReachedFixedPoint::No };
for (module_id, def, import, import_data) in resolved {
self.record_resolved_import(module_id, def, import, &import_data)
}
result
}
fn resolve_import(
&self,
module_id: CrateModuleId,
import: &raw::ImportData,
) -> (PerNs, ReachedFixedPoint) {
log::debug!("resolving import: {:?} ({:?})", import, self.def_map.edition);
if import.is_extern_crate {
let res = self.def_map.resolve_name_in_extern_prelude(
&import
.path
.as_ident()
.expect("extern crate should have been desugared to one-element path"),
);
(res, ReachedFixedPoint::Yes)
} else {
let res = self.def_map.resolve_path_fp_with_macro(
self.db,
ResolveMode::Import,
module_id,
&import.path,
);
(res.resolved_def, res.reached_fixedpoint)
}
}
fn record_resolved_import(
&mut self,
module_id: CrateModuleId,
def: PerNs,
import_id: raw::ImportId,
import: &raw::ImportData,
) {
if import.is_glob {
log::debug!("glob import: {:?}", import);
match def.take_types() {
Some(ModuleDef::Module(m)) => {
if import.is_prelude {
tested_by!(std_prelude);
self.def_map.prelude = Some(m);
} else if m.krate != self.def_map.krate {
tested_by!(glob_across_crates);
// glob import from other crate => we can just import everything once
let item_map = self.db.crate_def_map(m.krate);
let scope = &item_map[m.module_id].scope;
// Module scoped macros is included
let items = scope
.items
.iter()
.map(|(name, res)| (name.clone(), res.clone()))
.collect::<Vec<_>>();
self.update(module_id, Some(import_id), &items);
} else {
// glob import from same crate => we do an initial
// import, and then need to propagate any further
// additions
let scope = &self.def_map[m.module_id].scope;
// Module scoped macros is included
let items = scope
.items
.iter()
.map(|(name, res)| (name.clone(), res.clone()))
.collect::<Vec<_>>();
self.update(module_id, Some(import_id), &items);
// record the glob import in case we add further items
self.glob_imports
.entry(m.module_id)
.or_default()
.push((module_id, import_id));
}
}
Some(ModuleDef::Adt(Adt::Enum(e))) => {
tested_by!(glob_enum);
// glob import from enum => just import all the variants
let variants = e.variants(self.db);
let resolutions = variants
.into_iter()
.filter_map(|variant| {
let res = Resolution {
def: PerNs::both(variant.into(), variant.into()),
import: Some(import_id),
};
let name = variant.name(self.db)?;
Some((name, res))
})
.collect::<Vec<_>>();
self.update(module_id, Some(import_id), &resolutions);
}
Some(d) => {
log::debug!("glob import {:?} from non-module/enum {:?}", import, d);
}
None => {
log::debug!("glob import {:?} didn't resolve as type", import);
}
}
} else {
match import.path.segments.last() {
Some(last_segment) => {
let name = import.alias.clone().unwrap_or_else(|| last_segment.name.clone());
log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
// extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
if import.is_extern_crate && module_id == self.def_map.root {
if let Some(def) = def.take_types() {
self.def_map.extern_prelude.insert(name.clone(), def);
}
}
let resolution = Resolution { def, import: Some(import_id) };
self.update(module_id, Some(import_id), &[(name, resolution)]);
}
None => tested_by!(bogus_paths),
}
}
}
fn update(
&mut self,
module_id: CrateModuleId,
import: Option<raw::ImportId>,
resolutions: &[(Name, Resolution)],
) {
self.update_recursive(module_id, import, resolutions, 0)
}
fn update_recursive(
&mut self,
module_id: CrateModuleId,
import: Option<raw::ImportId>,
resolutions: &[(Name, Resolution)],
depth: usize,
) {
if depth > 100 {
// prevent stack overflows (but this shouldn't be possible)
panic!("infinite recursion in glob imports!");
}
let module_items = &mut self.def_map.modules[module_id].scope;
let mut changed = false;
for (name, res) in resolutions {
let existing = module_items.items.entry(name.clone()).or_default();
if existing.def.types.is_none() && res.def.types.is_some() {
existing.def.types = res.def.types;
existing.import = import.or(res.import);
changed = true;
}
if existing.def.values.is_none() && res.def.values.is_some() {
existing.def.values = res.def.values;
existing.import = import.or(res.import);
changed = true;
}
if existing.def.macros.is_none() && res.def.macros.is_some() {
existing.def.macros = res.def.macros;
existing.import = import.or(res.import);
changed = true;
}
if existing.def.is_none()
&& res.def.is_none()
&& existing.import.is_none()
&& res.import.is_some()
{
existing.import = res.import;
}
}
if !changed {
return;
}
let glob_imports = self
.glob_imports
.get(&module_id)
.into_iter()
.flat_map(|v| v.iter())
.cloned()
.collect::<Vec<_>>();
for (glob_importing_module, glob_import) in glob_imports {
// We pass the glob import so that the tracked import in those modules is that glob import
self.update_recursive(glob_importing_module, Some(glob_import), resolutions, depth + 1);
}
}
fn resolve_macros(&mut self) -> ReachedFixedPoint {
let mut macros = std::mem::replace(&mut self.unexpanded_macros, Vec::new());
let mut resolved = Vec::new();
let mut res = ReachedFixedPoint::Yes;
macros.retain(|(module_id, ast_id, path)| {
let resolved_res = self.def_map.resolve_path_fp_with_macro(
self.db,
ResolveMode::Other,
*module_id,
path,
);
if let Some(def) = resolved_res.resolved_def.get_macros() {
let call_id = MacroCallLoc { def: def.id, ast_id: *ast_id }.id(self.db);
resolved.push((*module_id, call_id, def.id));
res = ReachedFixedPoint::No;
return false;
}
true
});
self.unexpanded_macros = macros;
for (module_id, macro_call_id, macro_def_id) in resolved {
self.collect_macro_expansion(module_id, macro_call_id, macro_def_id);
}
res
}
fn collect_macro_expansion(
&mut self,
module_id: CrateModuleId,
macro_call_id: MacroCallId,
macro_def_id: MacroDefId,
) {
if self.def_map.poison_macros.contains(¯o_def_id) {
return;
}
self.macro_stack_monitor.increase(macro_def_id);
if !self.macro_stack_monitor.is_poison(macro_def_id) {
let file_id: HirFileId = macro_call_id.as_file(MacroFileKind::Items);
let raw_items = self.db.raw_items(file_id);
ModCollector {
def_collector: &mut *self,
file_id,
attr_path: None,
module_id,
raw_items: &raw_items,
parent_module: None,
}
.collect(raw_items.items());
} else {
log::error!("Too deep macro expansion: {:?}", macro_call_id);
self.def_map.poison_macros.insert(macro_def_id);
}
self.macro_stack_monitor.decrease(macro_def_id);
}
fn finish(self) -> CrateDefMap {
self.def_map
}
}
/// Walks a single module, populating defs, imports and macros
struct ModCollector<'a, D> {
def_collector: D,
module_id: CrateModuleId,
file_id: HirFileId,
attr_path: Option<&'a SmolStr>,
raw_items: &'a raw::RawItems,
parent_module: Option<ParentModule<'a>>,
}
impl<DB> ModCollector<'_, &'_ mut DefCollector<&'_ DB>>
where
DB: DefDatabase,
{
fn collect(&mut self, items: &[raw::RawItem]) {
// Prelude module is always considered to be `#[macro_use]`.
if let Some(prelude_module) = self.def_collector.def_map.prelude {
if prelude_module.krate != self.def_collector.def_map.krate {
tested_by!(prelude_is_macro_use);
self.def_collector.import_all_macros_exported(self.module_id, prelude_module.krate);
}
}
// This should be processed eagerly instead of deferred to resolving.
// `#[macro_use] extern crate` is hoisted to imports macros before collecting
// any other items.
for item in items {
if let raw::RawItem::Import(import_id) = *item {
let import = self.raw_items[import_id].clone();
if import.is_extern_crate && import.is_macro_use {
self.def_collector.import_macros_from_extern_crate(self.module_id, &import);
}
}
}
for item in items {
match *item {
raw::RawItem::Module(m) => self.collect_module(&self.raw_items[m]),
raw::RawItem::Import(import_id) => self.def_collector.unresolved_imports.push((
self.module_id,
import_id,
self.raw_items[import_id].clone(),
)),
raw::RawItem::Def(def) => self.define_def(&self.raw_items[def]),
raw::RawItem::Macro(mac) => self.collect_macro(&self.raw_items[mac]),
}
}
}
fn collect_module(&mut self, module: &raw::ModuleData) {
match module {
// inline module, just recurse
raw::ModuleData::Definition { name, items, ast_id, attr_path, is_macro_use } => {
let module_id =
self.push_child_module(name.clone(), ast_id.with_file_id(self.file_id), None);
let parent_module = ParentModule { name, attr_path: attr_path.as_ref() };
ModCollector {
def_collector: &mut *self.def_collector,
module_id,
attr_path: attr_path.as_ref(),
file_id: self.file_id,
raw_items: self.raw_items,
parent_module: Some(parent_module),
}
.collect(&*items);
if *is_macro_use {
self.import_all_legacy_macros(module_id);
}
}
// out of line module, resolve, parse and recurse
raw::ModuleData::Declaration { name, ast_id, attr_path, is_macro_use } => {
let ast_id = ast_id.with_file_id(self.file_id);
let is_root = self.def_collector.def_map.modules[self.module_id].parent.is_none();
match resolve_submodule(
self.def_collector.db,
self.file_id,
self.attr_path,
name,
is_root,
attr_path.as_ref(),
self.parent_module,
) {
Ok(file_id) => {
let module_id = self.push_child_module(name.clone(), ast_id, Some(file_id));
let raw_items = self.def_collector.db.raw_items(file_id.into());
ModCollector {
def_collector: &mut *self.def_collector,
module_id,
attr_path: attr_path.as_ref(),
file_id: file_id.into(),
raw_items: &raw_items,
parent_module: None,
}
.collect(raw_items.items());
if *is_macro_use {
self.import_all_legacy_macros(module_id);
}
}
Err(candidate) => self.def_collector.def_map.diagnostics.push(
DefDiagnostic::UnresolvedModule {
module: self.module_id,
declaration: ast_id,
candidate,
},
),
};
}
}
}
fn push_child_module(
&mut self,
name: Name,
declaration: AstId<ast::Module>,
definition: Option<FileId>,
) -> CrateModuleId {
let modules = &mut self.def_collector.def_map.modules;
let res = modules.alloc(ModuleData::default());
modules[res].parent = Some(self.module_id);
modules[res].declaration = Some(declaration);
modules[res].definition = definition;
modules[res].scope.legacy_macros = modules[self.module_id].scope.legacy_macros.clone();
modules[self.module_id].children.insert(name.clone(), res);
let resolution = Resolution {
def: PerNs::types(
Module { krate: self.def_collector.def_map.krate, module_id: res }.into(),
),
import: None,
};
self.def_collector.update(self.module_id, None, &[(name, resolution)]);
res
}
fn define_def(&mut self, def: &raw::DefData) {
let module = Module { krate: self.def_collector.def_map.krate, module_id: self.module_id };
let ctx = LocationCtx::new(self.def_collector.db, module, self.file_id);
macro_rules! def {
($kind:ident, $ast_id:ident) => {
$kind { id: AstItemDef::from_ast_id(ctx, $ast_id) }.into()
};
}
let name = def.name.clone();
let def: PerNs = match def.kind {
raw::DefKind::Function(ast_id) => PerNs::values(def!(Function, ast_id)),
raw::DefKind::Struct(ast_id) => {
let s = def!(Struct, ast_id);
PerNs::both(s, s)
}
raw::DefKind::Union(ast_id) => {
let s = def!(Union, ast_id);
PerNs::both(s, s)
}
raw::DefKind::Enum(ast_id) => PerNs::types(def!(Enum, ast_id)),
raw::DefKind::Const(ast_id) => PerNs::values(def!(Const, ast_id)),
raw::DefKind::Static(ast_id) => PerNs::values(def!(Static, ast_id)),
raw::DefKind::Trait(ast_id) => PerNs::types(def!(Trait, ast_id)),
raw::DefKind::TypeAlias(ast_id) => PerNs::types(def!(TypeAlias, ast_id)),
};
let resolution = Resolution { def, import: None };
self.def_collector.update(self.module_id, None, &[(name, resolution)])
}
fn collect_macro(&mut self, mac: &raw::MacroData) {
// Case 1: macro rules, define a macro in crate-global mutable scope
if is_macro_rules(&mac.path) {
if let Some(name) = &mac.name {
let macro_id = MacroDefId(mac.ast_id.with_file_id(self.file_id));
let macro_ = MacroDef { id: macro_id };
self.def_collector.define_macro(self.module_id, name.clone(), macro_, mac.export);
}
return;
}
let ast_id = mac.ast_id.with_file_id(self.file_id);
// Case 2: try to resolve in legacy scope and expand macro_rules, triggering
// recursive item collection.
if let Some(macro_def) = mac.path.as_ident().and_then(|name| {
self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name)
}) {
let def = macro_def.id;
let macro_call_id = MacroCallLoc { def, ast_id }.id(self.def_collector.db);
self.def_collector.collect_macro_expansion(self.module_id, macro_call_id, def);
return;
}
// Case 3: resolve in module scope, expand during name resolution.
// We rewrite simple path `macro_name` to `self::macro_name` to force resolve in module scope only.
let mut path = mac.path.clone();
if path.is_ident() {
path.kind = PathKind::Self_;
}
self.def_collector.unexpanded_macros.push((self.module_id, ast_id, path));
}
fn import_all_legacy_macros(&mut self, module_id: CrateModuleId) {
let macros = self.def_collector.def_map[module_id].scope.legacy_macros.clone();
for (name, macro_) in macros {
self.def_collector.define_legacy_macro(self.module_id, name.clone(), macro_);
}
}
}
fn is_macro_rules(path: &Path) -> bool {
path.as_ident() == Some(&MACRO_RULES)
}
#[cfg(test)]
mod tests {
use ra_db::SourceDatabase;
use super::*;
use crate::{db::DefDatabase, mock::MockDatabase, Crate};
use ra_arena::Arena;
use rustc_hash::FxHashSet;
fn do_collect_defs(
db: &impl DefDatabase,
def_map: CrateDefMap,
monitor: MacroStackMonitor,
) -> CrateDefMap {
let mut collector = DefCollector {
db,
def_map,
glob_imports: FxHashMap::default(),
unresolved_imports: Vec::new(),
unexpanded_macros: Vec::new(),
macro_stack_monitor: monitor,
};
collector.collect();
collector.finish()
}
fn do_limited_resolve(code: &str, limit: u32, poison_limit: u32) -> CrateDefMap {
let (db, _source_root, _) = MockDatabase::with_single_file(&code);
let crate_id = db.crate_graph().iter().next().unwrap();
let krate = Crate { crate_id };
let def_map = {
let edition = krate.edition(&db);
let mut modules: Arena<CrateModuleId, ModuleData> = Arena::default();
let root = modules.alloc(ModuleData::default());
CrateDefMap {
krate,
edition,
extern_prelude: FxHashMap::default(),
prelude: None,
root,
modules,
poison_macros: FxHashSet::default(),
diagnostics: Vec::new(),
}
};
let mut monitor = MacroStackMonitor::default();
monitor.validator = Some(Box::new(move |count| {
assert!(count < limit);
count >= poison_limit
}));
do_collect_defs(&db, def_map, monitor)
}
#[test]
fn test_macro_expand_limit_width() {
do_limited_resolve(
r#"
macro_rules! foo {
($($ty:ty)*) => { foo!($($ty)*, $($ty)*); }
}
foo!(KABOOM);
"#,
16,
1000,
);
}
#[test]
fn test_macro_expand_poisoned() {
let def = do_limited_resolve(
r#"
macro_rules! foo {
($ty:ty) => { foo!($ty); }
}
foo!(KABOOM);
"#,
100,
16,
);
assert_eq!(def.poison_macros.len(), 1);
}
#[test]
fn test_macro_expand_normal() {
let def = do_limited_resolve(
r#"
macro_rules! foo {
($ident:ident) => { struct $ident {} }
}
foo!(Bar);
"#,
16,
16,
);
assert_eq!(def.poison_macros.len(), 0);
}
}
| 36.897404 | 136 | 0.535544 |
d68b8dbee5d5ddc82b6660db24ce4e17f23144a2 | 1,716 | fn print(count: &mut usize, id: usize, layout: &layout::tree::LayoutR) {
*count += 1;
debug_println!("result: {:?} {:?} {:?}", *count, id, layout);
}
pub fn compute() {
let mut layout_tree = layout::tree::LayoutTree::default();
layout_tree.insert(
1,
0,
0,
layout::idtree::InsertType::Back,
layout::style::Style {
position_type: layout::style::PositionType::Absolute,
size: layout::geometry::Size {
width: layout::style::Dimension::Points(1920.0),
height: layout::style::Dimension::Points(1024.0),
},
..Default::default()
},
);
layout_tree.insert(
2,
1,
0,
layout::idtree::InsertType::Back,
layout::style::Style {
justify_content: layout::style::JustifyContent::Center,
size: layout::geometry::Size {
width: layout::style::Dimension::Points(110f32),
height: layout::style::Dimension::Points(100f32),
..Default::default()
},
..Default::default()
},
);
layout_tree.insert(
3,
2,
0,
layout::idtree::InsertType::Back,
layout::style::Style {
position_type: layout::style::PositionType::Absolute,
size: layout::geometry::Size {
width: layout::style::Dimension::Points(60f32),
height: layout::style::Dimension::Points(40f32),
..Default::default()
},
..Default::default()
},
);
layout_tree.compute(print, &mut 0);
}
| 32.377358 | 73 | 0.494172 |
e59584148339de63856feed192b97507c5f19f56 | 1,108 | use super::Html;
use crate::{http::StatusCode, IntoResponse, Response};
/// Template response using [`askama`](https://crates.io/crates/askama).
#[cfg_attr(docsrs, doc(cfg(feature = "template")))]
pub struct Template<T>(pub T);
impl<T: askama::Template + Send> IntoResponse for Template<T> {
fn into_response(self) -> Response {
match self.0.render() {
Ok(s) => s.into_response(),
Err(err) => Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(err.to_string()),
}
}
}
/// Template response with content-type "text/html" using [`askama`](https://crates.io/crates/askama).
#[cfg_attr(docsrs, doc(cfg(feature = "template")))]
pub struct HtmlTemplate<T>(pub T);
impl<T: askama::Template + Send> IntoResponse for HtmlTemplate<T> {
fn into_response(self) -> Response {
match self.0.render() {
Ok(s) => Html(s).into_response(),
Err(err) => Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(err.to_string()),
}
}
}
| 33.575758 | 102 | 0.603791 |
2111c396e430cf0222a4adfe548661f0013c0300 | 325 | #![warn(rust_2018_idioms)]
use tokio_io::AsyncReadExt;
use tokio_test::assert_ok;
#[tokio::test]
async fn read_exact() {
let mut buf = Box::new([0; 8]);
let mut rd: &[u8] = b"hello world";
let n = assert_ok!(rd.read_exact(&mut buf[..]).await);
assert_eq!(n, 8);
assert_eq!(buf[..], b"hello wo"[..]);
}
| 21.666667 | 58 | 0.603077 |
901b73c610e3b0f6fe82d1fd7c71d7f15990379e | 60,962 | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This pass type-checks the MIR to ensure it is not broken.
#![allow(unreachable_code)]
use borrow_check::nll::region_infer::Cause;
use borrow_check::nll::region_infer::ClosureRegionRequirementsExt;
use borrow_check::nll::universal_regions::UniversalRegions;
use dataflow::FlowAtLocation;
use dataflow::MaybeInitializedLvals;
use dataflow::move_paths::MoveData;
use rustc::hir::def_id::DefId;
use rustc::infer::{InferCtxt, InferOk, InferResult, LateBoundRegionConversionTime, UnitResult};
use rustc::infer::region_constraints::{GenericKind, RegionConstraintData};
use rustc::traits::{self, FulfillmentContext};
use rustc::ty::error::TypeError;
use rustc::ty::fold::TypeFoldable;
use rustc::ty::{self, ToPolyTraitRef, Ty, TyCtxt, TypeVariants};
use rustc::middle::const_val::ConstVal;
use rustc::mir::*;
use rustc::mir::tcx::PlaceTy;
use rustc::mir::visit::{PlaceContext, Visitor};
use std::fmt;
use syntax::ast;
use syntax_pos::{Span, DUMMY_SP};
use transform::{MirPass, MirSource};
use util::liveness::LivenessResults;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::indexed_vec::Idx;
macro_rules! span_mirbug {
($context:expr, $elem:expr, $($message:tt)*) => ({
$crate::borrow_check::nll::type_check::mirbug(
$context.tcx(),
$context.last_span,
&format!(
"broken MIR in {:?} ({:?}): {}",
$context.body_id,
$elem,
format_args!($($message)*),
),
)
})
}
macro_rules! span_mirbug_and_err {
($context:expr, $elem:expr, $($message:tt)*) => ({
{
span_mirbug!($context, $elem, $($message)*);
$context.error()
}
})
}
mod liveness;
mod input_output;
/// Type checks the given `mir` in the context of the inference
/// context `infcx`. Returns any region constraints that have yet to
/// be proven. This result is includes liveness constraints that
/// ensure that regions appearing in the types of all local variables
/// are live at all points where that local variable may later be
/// used.
///
/// This phase of type-check ought to be infallible -- this is because
/// the original, HIR-based type-check succeeded. So if any errors
/// occur here, we will get a `bug!` reported.
///
/// # Parameters
///
/// - `infcx` -- inference context to use
/// - `param_env` -- parameter environment to use for trait solving
/// - `mir` -- MIR to type-check
/// - `mir_def_id` -- DefId from which the MIR is derived (must be local)
/// - `region_bound_pairs` -- the implied outlives obligations between type parameters
/// and lifetimes (e.g., `&'a T` implies `T: 'a`)
/// - `implicit_region_bound` -- a region which all generic parameters are assumed
/// to outlive; should represent the fn body
/// - `input_tys` -- fully liberated, but **not** normalized, expected types of the arguments;
/// the types of the input parameters found in the MIR itself will be equated with these
/// - `output_ty` -- fully liberaetd, but **not** normalized, expected return type;
/// the type for the RETURN_PLACE will be equated with this
/// - `liveness` -- results of a liveness computation on the MIR; used to create liveness
/// constraints for the regions in the types of variables
/// - `flow_inits` -- results of a maybe-init dataflow analysis
/// - `move_data` -- move-data constructed when performing the maybe-init dataflow analysis
pub(crate) fn type_check<'gcx, 'tcx>(
infcx: &InferCtxt<'_, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'gcx>,
mir: &Mir<'tcx>,
mir_def_id: DefId,
universal_regions: &UniversalRegions<'tcx>,
liveness: &LivenessResults,
flow_inits: &mut FlowAtLocation<MaybeInitializedLvals<'_, 'gcx, 'tcx>>,
move_data: &MoveData<'tcx>,
) -> MirTypeckRegionConstraints<'tcx> {
let body_id = infcx.tcx.hir.as_local_node_id(mir_def_id).unwrap();
let implicit_region_bound = infcx.tcx.mk_region(ty::ReVar(universal_regions.fr_fn_body));
type_check_internal(
infcx,
body_id,
param_env,
mir,
&universal_regions.region_bound_pairs,
Some(implicit_region_bound),
&mut |cx| {
liveness::generate(cx, mir, liveness, flow_inits, move_data);
cx.equate_inputs_and_outputs(mir, mir_def_id, universal_regions);
},
)
}
fn type_check_internal<'gcx, 'tcx>(
infcx: &InferCtxt<'_, 'gcx, 'tcx>,
body_id: ast::NodeId,
param_env: ty::ParamEnv<'gcx>,
mir: &Mir<'tcx>,
region_bound_pairs: &[(ty::Region<'tcx>, GenericKind<'tcx>)],
implicit_region_bound: Option<ty::Region<'tcx>>,
extra: &mut FnMut(&mut TypeChecker<'_, 'gcx, 'tcx>),
) -> MirTypeckRegionConstraints<'tcx> {
let mut checker = TypeChecker::new(
infcx,
body_id,
param_env,
region_bound_pairs,
implicit_region_bound,
);
let errors_reported = {
let mut verifier = TypeVerifier::new(&mut checker, mir);
verifier.visit_mir(mir);
verifier.errors_reported
};
if !errors_reported {
// if verifier failed, don't do further checks to avoid ICEs
checker.typeck_mir(mir);
}
extra(&mut checker);
checker.constraints
}
fn mirbug(tcx: TyCtxt, span: Span, msg: &str) {
// We sometimes see MIR failures (notably predicate failures) due to
// the fact that we check rvalue sized predicates here. So use `delay_span_bug`
// to avoid reporting bugs in those cases.
tcx.sess.diagnostic().delay_span_bug(span, msg);
}
enum FieldAccessError {
OutOfRange { field_count: usize },
}
/// Verifies that MIR types are sane to not crash further checks.
///
/// The sanitize_XYZ methods here take an MIR object and compute its
/// type, calling `span_mirbug` and returning an error type if there
/// is a problem.
struct TypeVerifier<'a, 'b: 'a, 'gcx: 'b + 'tcx, 'tcx: 'b> {
cx: &'a mut TypeChecker<'b, 'gcx, 'tcx>,
mir: &'a Mir<'tcx>,
last_span: Span,
body_id: ast::NodeId,
errors_reported: bool,
}
impl<'a, 'b, 'gcx, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'gcx, 'tcx> {
fn visit_span(&mut self, span: &Span) {
if *span != DUMMY_SP {
self.last_span = *span;
}
}
fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) {
self.sanitize_place(place, location, context);
}
fn visit_constant(&mut self, constant: &Constant<'tcx>, location: Location) {
self.super_constant(constant, location);
self.sanitize_constant(constant, location);
self.sanitize_type(constant, constant.ty);
}
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
self.super_rvalue(rvalue, location);
let rval_ty = rvalue.ty(self.mir, self.tcx());
self.sanitize_type(rvalue, rval_ty);
}
fn visit_local_decl(&mut self, local: Local, local_decl: &LocalDecl<'tcx>) {
self.super_local_decl(local, local_decl);
self.sanitize_type(local_decl, local_decl.ty);
}
fn visit_mir(&mut self, mir: &Mir<'tcx>) {
self.sanitize_type(&"return type", mir.return_ty());
for local_decl in &mir.local_decls {
self.sanitize_type(local_decl, local_decl.ty);
}
if self.errors_reported {
return;
}
self.super_mir(mir);
}
}
impl<'a, 'b, 'gcx, 'tcx> TypeVerifier<'a, 'b, 'gcx, 'tcx> {
fn new(cx: &'a mut TypeChecker<'b, 'gcx, 'tcx>, mir: &'a Mir<'tcx>) -> Self {
TypeVerifier {
mir,
body_id: cx.body_id,
cx,
last_span: mir.span,
errors_reported: false,
}
}
fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
self.cx.infcx.tcx
}
fn sanitize_type(&mut self, parent: &fmt::Debug, ty: Ty<'tcx>) -> Ty<'tcx> {
if ty.has_escaping_regions() || ty.references_error() {
span_mirbug_and_err!(self, parent, "bad type {:?}", ty)
} else {
ty
}
}
/// Checks that the constant's `ty` field matches up with what
/// would be expected from its literal.
fn sanitize_constant(&mut self, constant: &Constant<'tcx>, location: Location) {
debug!(
"sanitize_constant(constant={:?}, location={:?})",
constant,
location
);
let expected_ty = match constant.literal {
Literal::Value { value } => {
// FIXME(#46702) -- We need some way to get the predicates
// associated with the "pre-evaluated" form of the
// constant. For example, consider that the constant
// may have associated constant projections (`<Foo as
// Trait<'a, 'b>>::SOME_CONST`) that impose
// constraints on `'a` and `'b`. These constraints
// would be lost if we just look at the normalized
// value.
if let ConstVal::Function(def_id, ..) = value.val {
let tcx = self.tcx();
let type_checker = &mut self.cx;
// FIXME -- For now, use the substitutions from
// `value.ty` rather than `value.val`. The
// renumberer will rewrite them to independent
// sets of regions; in principle, we ought to
// derive the type of the `value.val` from "first
// principles" and equate with value.ty, but as we
// are transitioning to the miri-based system, we
// don't have a handy function for that, so for
// now we just ignore `value.val` regions.
let substs = match value.ty.sty {
ty::TyFnDef(ty_def_id, substs) => {
assert_eq!(def_id, ty_def_id);
substs
}
_ => span_bug!(
self.last_span,
"unexpected type for constant function: {:?}",
value.ty
),
};
let instantiated_predicates =
tcx.predicates_of(def_id).instantiate(tcx, substs);
let predicates =
type_checker.normalize(&instantiated_predicates.predicates, location);
type_checker.prove_predicates(&predicates, location);
}
value.ty
}
Literal::Promoted { .. } => {
// FIXME -- promoted MIR return types reference
// various "free regions" (e.g., scopes and things)
// that they ought not to do. We have to figure out
// how best to handle that -- probably we want treat
// promoted MIR much like closures, renumbering all
// their free regions and propagating constraints
// upwards. We have the same acyclic guarantees, so
// that should be possible. But for now, ignore them.
//
// let promoted_mir = &self.mir.promoted[index];
// promoted_mir.return_ty()
return;
}
};
debug!("sanitize_constant: expected_ty={:?}", expected_ty);
if let Err(terr) = self.cx
.eq_types(expected_ty, constant.ty, location.at_self())
{
span_mirbug!(
self,
constant,
"constant {:?} should have type {:?} but has {:?} ({:?})",
constant,
expected_ty,
constant.ty,
terr,
);
}
}
/// Checks that the types internal to the `place` match up with
/// what would be expected.
fn sanitize_place(
&mut self,
place: &Place<'tcx>,
location: Location,
context: PlaceContext,
) -> PlaceTy<'tcx> {
debug!("sanitize_place: {:?}", place);
let place_ty = match *place {
Place::Local(index) => PlaceTy::Ty {
ty: self.mir.local_decls[index].ty,
},
Place::Static(box Static { def_id, ty: sty }) => {
let sty = self.sanitize_type(place, sty);
let ty = self.tcx().type_of(def_id);
let ty = self.cx.normalize(&ty, location);
if let Err(terr) = self.cx.eq_types(ty, sty, location.at_self()) {
span_mirbug!(
self,
place,
"bad static type ({:?}: {:?}): {:?}",
ty,
sty,
terr
);
}
PlaceTy::Ty { ty: sty }
}
Place::Projection(ref proj) => {
let base_context = if context.is_mutating_use() {
PlaceContext::Projection(Mutability::Mut)
} else {
PlaceContext::Projection(Mutability::Not)
};
let base_ty = self.sanitize_place(&proj.base, location, base_context);
if let PlaceTy::Ty { ty } = base_ty {
if ty.references_error() {
assert!(self.errors_reported);
return PlaceTy::Ty {
ty: self.tcx().types.err,
};
}
}
self.sanitize_projection(base_ty, &proj.elem, place, location)
}
};
if let PlaceContext::Copy = context {
let ty = place_ty.to_ty(self.tcx());
if self.cx
.infcx
.type_moves_by_default(self.cx.param_env, ty, DUMMY_SP)
{
span_mirbug!(self, place, "attempted copy of non-Copy type ({:?})", ty);
}
}
place_ty
}
fn sanitize_projection(
&mut self,
base: PlaceTy<'tcx>,
pi: &PlaceElem<'tcx>,
place: &Place<'tcx>,
location: Location,
) -> PlaceTy<'tcx> {
debug!("sanitize_projection: {:?} {:?} {:?}", base, pi, place);
let tcx = self.tcx();
let base_ty = base.to_ty(tcx);
match *pi {
ProjectionElem::Deref => {
let deref_ty = base_ty.builtin_deref(true, ty::LvaluePreference::NoPreference);
PlaceTy::Ty {
ty: deref_ty.map(|t| t.ty).unwrap_or_else(|| {
span_mirbug_and_err!(self, place, "deref of non-pointer {:?}", base_ty)
}),
}
}
ProjectionElem::Index(i) => {
let index_ty = Place::Local(i).ty(self.mir, tcx).to_ty(tcx);
if index_ty != tcx.types.usize {
PlaceTy::Ty {
ty: span_mirbug_and_err!(self, i, "index by non-usize {:?}", i),
}
} else {
PlaceTy::Ty {
ty: base_ty.builtin_index().unwrap_or_else(|| {
span_mirbug_and_err!(self, place, "index of non-array {:?}", base_ty)
}),
}
}
}
ProjectionElem::ConstantIndex { .. } => {
// consider verifying in-bounds
PlaceTy::Ty {
ty: base_ty.builtin_index().unwrap_or_else(|| {
span_mirbug_and_err!(self, place, "index of non-array {:?}", base_ty)
}),
}
}
ProjectionElem::Subslice { from, to } => PlaceTy::Ty {
ty: match base_ty.sty {
ty::TyArray(inner, size) => {
let size = size.val.to_const_int().unwrap().to_u64().unwrap();
let min_size = (from as u64) + (to as u64);
if let Some(rest_size) = size.checked_sub(min_size) {
tcx.mk_array(inner, rest_size)
} else {
span_mirbug_and_err!(
self,
place,
"taking too-small slice of {:?}",
base_ty
)
}
}
ty::TySlice(..) => base_ty,
_ => span_mirbug_and_err!(self, place, "slice of non-array {:?}", base_ty),
},
},
ProjectionElem::Downcast(adt_def1, index) => match base_ty.sty {
ty::TyAdt(adt_def, substs) if adt_def.is_enum() && adt_def == adt_def1 => {
if index >= adt_def.variants.len() {
PlaceTy::Ty {
ty: span_mirbug_and_err!(
self,
place,
"cast to variant #{:?} but enum only has {:?}",
index,
adt_def.variants.len()
),
}
} else {
PlaceTy::Downcast {
adt_def,
substs,
variant_index: index,
}
}
}
_ => PlaceTy::Ty {
ty: span_mirbug_and_err!(
self,
place,
"can't downcast {:?} as {:?}",
base_ty,
adt_def1
),
},
},
ProjectionElem::Field(field, fty) => {
let fty = self.sanitize_type(place, fty);
match self.field_ty(place, base, field, location) {
Ok(ty) => if let Err(terr) = self.cx.eq_types(ty, fty, location.at_self()) {
span_mirbug!(
self,
place,
"bad field access ({:?}: {:?}): {:?}",
ty,
fty,
terr
);
},
Err(FieldAccessError::OutOfRange { field_count }) => span_mirbug!(
self,
place,
"accessed field #{} but variant only has {}",
field.index(),
field_count
),
}
PlaceTy::Ty { ty: fty }
}
}
}
fn error(&mut self) -> Ty<'tcx> {
self.errors_reported = true;
self.tcx().types.err
}
fn field_ty(
&mut self,
parent: &fmt::Debug,
base_ty: PlaceTy<'tcx>,
field: Field,
location: Location,
) -> Result<Ty<'tcx>, FieldAccessError> {
let tcx = self.tcx();
let (variant, substs) = match base_ty {
PlaceTy::Downcast {
adt_def,
substs,
variant_index,
} => (&adt_def.variants[variant_index], substs),
PlaceTy::Ty { ty } => match ty.sty {
ty::TyAdt(adt_def, substs) if !adt_def.is_enum() => (&adt_def.variants[0], substs),
ty::TyClosure(def_id, substs) => {
return match substs.upvar_tys(def_id, tcx).nth(field.index()) {
Some(ty) => Ok(ty),
None => Err(FieldAccessError::OutOfRange {
field_count: substs.upvar_tys(def_id, tcx).count(),
}),
}
}
ty::TyGenerator(def_id, substs, _) => {
// Try upvars first. `field_tys` requires final optimized MIR.
if let Some(ty) = substs.upvar_tys(def_id, tcx).nth(field.index()) {
return Ok(ty);
}
return match substs.field_tys(def_id, tcx).nth(field.index()) {
Some(ty) => Ok(ty),
None => Err(FieldAccessError::OutOfRange {
field_count: substs.field_tys(def_id, tcx).count() + 1,
}),
};
}
ty::TyTuple(tys, _) => {
return match tys.get(field.index()) {
Some(&ty) => Ok(ty),
None => Err(FieldAccessError::OutOfRange {
field_count: tys.len(),
}),
}
}
_ => {
return Ok(span_mirbug_and_err!(
self,
parent,
"can't project out of {:?}",
base_ty
))
}
},
};
if let Some(field) = variant.fields.get(field.index()) {
Ok(self.cx.normalize(&field.ty(tcx, substs), location))
} else {
Err(FieldAccessError::OutOfRange {
field_count: variant.fields.len(),
})
}
}
}
/// The MIR type checker. Visits the MIR and enforces all the
/// constraints needed for it to be valid and well-typed. Along the
/// way, it accrues region constraints -- these can later be used by
/// NLL region checking.
struct TypeChecker<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'gcx>,
last_span: Span,
body_id: ast::NodeId,
region_bound_pairs: &'a [(ty::Region<'tcx>, GenericKind<'tcx>)],
implicit_region_bound: Option<ty::Region<'tcx>>,
reported_errors: FxHashSet<(Ty<'tcx>, Span)>,
constraints: MirTypeckRegionConstraints<'tcx>,
}
/// A collection of region constraints that must be satisfied for the
/// program to be considered well-typed.
#[derive(Default)]
pub(crate) struct MirTypeckRegionConstraints<'tcx> {
/// In general, the type-checker is not responsible for enforcing
/// liveness constraints; this job falls to the region inferencer,
/// which performs a liveness analysis. However, in some limited
/// cases, the MIR type-checker creates temporary regions that do
/// not otherwise appear in the MIR -- in particular, the
/// late-bound regions that it instantiates at call-sites -- and
/// hence it must report on their liveness constraints.
pub liveness_set: Vec<(ty::Region<'tcx>, Location, Cause)>,
/// During the course of type-checking, we will accumulate region
/// constraints due to performing subtyping operations or solving
/// traits. These are accumulated into this vector for later use.
pub outlives_sets: Vec<OutlivesSet<'tcx>>,
}
/// Outlives relationships between regions and types created at a
/// particular point within the control-flow graph.
pub struct OutlivesSet<'tcx> {
/// The locations associated with these constraints.
pub locations: Locations,
/// Constraints generated. In terms of the NLL RFC, when you have
/// a constraint `R1: R2 @ P`, the data in there specifies things
/// like `R1: R2`.
pub data: RegionConstraintData<'tcx>,
}
#[derive(Copy, Clone, Debug)]
pub struct Locations {
/// The location in the MIR that generated these constraints.
/// This is intended for error reporting and diagnosis; the
/// constraints may *take effect* at a distinct spot.
pub from_location: Location,
/// The constraints must be met at this location. In terms of the
/// NLL RFC, when you have a constraint `R1: R2 @ P`, this field
/// is the `P` value.
pub at_location: Location,
}
impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
fn new(
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
body_id: ast::NodeId,
param_env: ty::ParamEnv<'gcx>,
region_bound_pairs: &'a [(ty::Region<'tcx>, GenericKind<'tcx>)],
implicit_region_bound: Option<ty::Region<'tcx>>,
) -> Self {
TypeChecker {
infcx,
last_span: DUMMY_SP,
body_id,
param_env,
region_bound_pairs,
implicit_region_bound,
reported_errors: FxHashSet(),
constraints: MirTypeckRegionConstraints::default(),
}
}
fn misc(&self, span: Span) -> traits::ObligationCause<'tcx> {
traits::ObligationCause::misc(span, self.body_id)
}
fn fully_perform_op<OP, R>(
&mut self,
locations: Locations,
op: OP,
) -> Result<R, TypeError<'tcx>>
where
OP: FnOnce(&mut Self) -> InferResult<'tcx, R>,
{
let mut fulfill_cx = FulfillmentContext::new();
let InferOk { value, obligations } = self.infcx.commit_if_ok(|_| op(self))?;
fulfill_cx.register_predicate_obligations(self.infcx, obligations);
if let Err(e) = fulfill_cx.select_all_or_error(self.infcx) {
span_mirbug!(self, "", "errors selecting obligation: {:?}", e);
}
self.infcx.process_registered_region_obligations(
self.region_bound_pairs,
self.implicit_region_bound,
self.param_env,
self.body_id,
);
let data = self.infcx.take_and_reset_region_constraints();
if !data.is_empty() {
self.constraints
.outlives_sets
.push(OutlivesSet { locations, data });
}
Ok(value)
}
fn sub_types(
&mut self,
sub: Ty<'tcx>,
sup: Ty<'tcx>,
locations: Locations,
) -> UnitResult<'tcx> {
self.fully_perform_op(locations, |this| {
this.infcx
.at(&this.misc(this.last_span), this.param_env)
.sup(sup, sub)
})
}
fn eq_types(&mut self, a: Ty<'tcx>, b: Ty<'tcx>, locations: Locations) -> UnitResult<'tcx> {
self.fully_perform_op(locations, |this| {
this.infcx
.at(&this.misc(this.last_span), this.param_env)
.eq(b, a)
})
}
fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
self.infcx.tcx
}
fn check_stmt(&mut self, mir: &Mir<'tcx>, stmt: &Statement<'tcx>, location: Location) {
debug!("check_stmt: {:?}", stmt);
let tcx = self.tcx();
match stmt.kind {
StatementKind::Assign(ref place, ref rv) => {
let place_ty = place.ty(mir, tcx).to_ty(tcx);
let rv_ty = rv.ty(mir, tcx);
if let Err(terr) =
self.sub_types(rv_ty, place_ty, location.at_successor_within_block())
{
span_mirbug!(
self,
stmt,
"bad assignment ({:?} = {:?}): {:?}",
place_ty,
rv_ty,
terr
);
}
self.check_rvalue(mir, rv, location);
}
StatementKind::SetDiscriminant {
ref place,
variant_index,
} => {
let place_type = place.ty(mir, tcx).to_ty(tcx);
let adt = match place_type.sty {
TypeVariants::TyAdt(adt, _) if adt.is_enum() => adt,
_ => {
span_bug!(
stmt.source_info.span,
"bad set discriminant ({:?} = {:?}): lhs is not an enum",
place,
variant_index
);
}
};
if variant_index >= adt.variants.len() {
span_bug!(
stmt.source_info.span,
"bad set discriminant ({:?} = {:?}): value of of range",
place,
variant_index
);
};
}
StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::InlineAsm { .. }
| StatementKind::EndRegion(_)
| StatementKind::Validate(..)
| StatementKind::Nop => {}
}
}
fn check_terminator(
&mut self,
mir: &Mir<'tcx>,
term: &Terminator<'tcx>,
term_location: Location,
) {
debug!("check_terminator: {:?}", term);
let tcx = self.tcx();
match term.kind {
TerminatorKind::Goto { .. }
| TerminatorKind::Resume
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::GeneratorDrop
| TerminatorKind::Unreachable
| TerminatorKind::Drop { .. }
| TerminatorKind::FalseEdges { .. } => {
// no checks needed for these
}
TerminatorKind::DropAndReplace {
ref location,
ref value,
target,
unwind,
} => {
let place_ty = location.ty(mir, tcx).to_ty(tcx);
let rv_ty = value.ty(mir, tcx);
let locations = Locations {
from_location: term_location,
at_location: target.start_location(),
};
if let Err(terr) = self.sub_types(rv_ty, place_ty, locations) {
span_mirbug!(
self,
term,
"bad DropAndReplace ({:?} = {:?}): {:?}",
place_ty,
rv_ty,
terr
);
}
// Subtle: this assignment occurs at the start of
// *both* blocks, so we need to ensure that it holds
// at both locations.
if let Some(unwind) = unwind {
let locations = Locations {
from_location: term_location,
at_location: unwind.start_location(),
};
if let Err(terr) = self.sub_types(rv_ty, place_ty, locations) {
span_mirbug!(
self,
term,
"bad DropAndReplace ({:?} = {:?}): {:?}",
place_ty,
rv_ty,
terr
);
}
}
}
TerminatorKind::SwitchInt {
ref discr,
switch_ty,
..
} => {
let discr_ty = discr.ty(mir, tcx);
if let Err(terr) = self.sub_types(discr_ty, switch_ty, term_location.at_self()) {
span_mirbug!(
self,
term,
"bad SwitchInt ({:?} on {:?}): {:?}",
switch_ty,
discr_ty,
terr
);
}
if !switch_ty.is_integral() && !switch_ty.is_char() && !switch_ty.is_bool() {
span_mirbug!(self, term, "bad SwitchInt discr ty {:?}", switch_ty);
}
// FIXME: check the values
}
TerminatorKind::Call {
ref func,
ref args,
ref destination,
..
} => {
let func_ty = func.ty(mir, tcx);
debug!("check_terminator: call, func_ty={:?}", func_ty);
let sig = match func_ty.sty {
ty::TyFnDef(..) | ty::TyFnPtr(_) => func_ty.fn_sig(tcx),
_ => {
span_mirbug!(self, term, "call to non-function {:?}", func_ty);
return;
}
};
let (sig, map) = self.infcx.replace_late_bound_regions_with_fresh_var(
term.source_info.span,
LateBoundRegionConversionTime::FnCall,
&sig,
);
let sig = self.normalize(&sig, term_location);
self.check_call_dest(mir, term, &sig, destination, term_location);
// The ordinary liveness rules will ensure that all
// regions in the type of the callee are live here. We
// then further constrain the late-bound regions that
// were instantiated at the call site to be live as
// well. The resulting is that all the input (and
// output) types in the signature must be live, since
// all the inputs that fed into it were live.
for &late_bound_region in map.values() {
self.constraints.liveness_set.push((
late_bound_region,
term_location,
Cause::LiveOther(term_location),
));
}
if self.is_box_free(func) {
self.check_box_free_inputs(mir, term, &sig, args, term_location);
} else {
self.check_call_inputs(mir, term, &sig, args, term_location);
}
}
TerminatorKind::Assert {
ref cond, ref msg, ..
} => {
let cond_ty = cond.ty(mir, tcx);
if cond_ty != tcx.types.bool {
span_mirbug!(self, term, "bad Assert ({:?}, not bool", cond_ty);
}
if let AssertMessage::BoundsCheck { ref len, ref index } = *msg {
if len.ty(mir, tcx) != tcx.types.usize {
span_mirbug!(self, len, "bounds-check length non-usize {:?}", len)
}
if index.ty(mir, tcx) != tcx.types.usize {
span_mirbug!(self, index, "bounds-check index non-usize {:?}", index)
}
}
}
TerminatorKind::Yield { ref value, .. } => {
let value_ty = value.ty(mir, tcx);
match mir.yield_ty {
None => span_mirbug!(self, term, "yield in non-generator"),
Some(ty) => {
if let Err(terr) = self.sub_types(value_ty, ty, term_location.at_self()) {
span_mirbug!(
self,
term,
"type of yield value is {:?}, but the yield type is {:?}: {:?}",
value_ty,
ty,
terr
);
}
}
}
}
}
}
fn check_call_dest(
&mut self,
mir: &Mir<'tcx>,
term: &Terminator<'tcx>,
sig: &ty::FnSig<'tcx>,
destination: &Option<(Place<'tcx>, BasicBlock)>,
term_location: Location,
) {
let tcx = self.tcx();
match *destination {
Some((ref dest, target_block)) => {
let dest_ty = dest.ty(mir, tcx).to_ty(tcx);
let locations = Locations {
from_location: term_location,
at_location: target_block.start_location(),
};
if let Err(terr) = self.sub_types(sig.output(), dest_ty, locations) {
span_mirbug!(
self,
term,
"call dest mismatch ({:?} <- {:?}): {:?}",
dest_ty,
sig.output(),
terr
);
}
}
None => {
// FIXME(canndrew): This is_never should probably be an is_uninhabited
if !sig.output().is_never() {
span_mirbug!(self, term, "call to converging function {:?} w/o dest", sig);
}
}
}
}
fn check_call_inputs(
&mut self,
mir: &Mir<'tcx>,
term: &Terminator<'tcx>,
sig: &ty::FnSig<'tcx>,
args: &[Operand<'tcx>],
term_location: Location,
) {
debug!("check_call_inputs({:?}, {:?})", sig, args);
if args.len() < sig.inputs().len() || (args.len() > sig.inputs().len() && !sig.variadic) {
span_mirbug!(self, term, "call to {:?} with wrong # of args", sig);
}
for (n, (fn_arg, op_arg)) in sig.inputs().iter().zip(args).enumerate() {
let op_arg_ty = op_arg.ty(mir, self.tcx());
if let Err(terr) = self.sub_types(op_arg_ty, fn_arg, term_location.at_self()) {
span_mirbug!(
self,
term,
"bad arg #{:?} ({:?} <- {:?}): {:?}",
n,
fn_arg,
op_arg_ty,
terr
);
}
}
}
fn is_box_free(&self, operand: &Operand<'tcx>) -> bool {
match operand {
&Operand::Constant(box Constant {
literal:
Literal::Value {
value:
&ty::Const {
val: ConstVal::Function(def_id, _),
..
},
..
},
..
}) => Some(def_id) == self.tcx().lang_items().box_free_fn(),
_ => false,
}
}
fn check_box_free_inputs(
&mut self,
mir: &Mir<'tcx>,
term: &Terminator<'tcx>,
sig: &ty::FnSig<'tcx>,
args: &[Operand<'tcx>],
term_location: Location,
) {
debug!("check_box_free_inputs");
// box_free takes a Box as a pointer. Allow for that.
if sig.inputs().len() != 1 {
span_mirbug!(self, term, "box_free should take 1 argument");
return;
}
let pointee_ty = match sig.inputs()[0].sty {
ty::TyRawPtr(mt) => mt.ty,
_ => {
span_mirbug!(self, term, "box_free should take a raw ptr");
return;
}
};
if args.len() != 1 {
span_mirbug!(self, term, "box_free called with wrong # of args");
return;
}
let ty = args[0].ty(mir, self.tcx());
let arg_ty = match ty.sty {
ty::TyRawPtr(mt) => mt.ty,
ty::TyAdt(def, _) if def.is_box() => ty.boxed_ty(),
_ => {
span_mirbug!(self, term, "box_free called with bad arg ty");
return;
}
};
if let Err(terr) = self.sub_types(arg_ty, pointee_ty, term_location.at_self()) {
span_mirbug!(
self,
term,
"bad box_free arg ({:?} <- {:?}): {:?}",
pointee_ty,
arg_ty,
terr
);
}
}
fn check_iscleanup(&mut self, mir: &Mir<'tcx>, block_data: &BasicBlockData<'tcx>) {
let is_cleanup = block_data.is_cleanup;
self.last_span = block_data.terminator().source_info.span;
match block_data.terminator().kind {
TerminatorKind::Goto { target } => {
self.assert_iscleanup(mir, block_data, target, is_cleanup)
}
TerminatorKind::SwitchInt { ref targets, .. } => for target in targets {
self.assert_iscleanup(mir, block_data, *target, is_cleanup);
},
TerminatorKind::Resume => if !is_cleanup {
span_mirbug!(self, block_data, "resume on non-cleanup block!")
},
TerminatorKind::Abort => if !is_cleanup {
span_mirbug!(self, block_data, "abort on non-cleanup block!")
},
TerminatorKind::Return => if is_cleanup {
span_mirbug!(self, block_data, "return on cleanup block")
},
TerminatorKind::GeneratorDrop { .. } => if is_cleanup {
span_mirbug!(self, block_data, "generator_drop in cleanup block")
},
TerminatorKind::Yield { resume, drop, .. } => {
if is_cleanup {
span_mirbug!(self, block_data, "yield in cleanup block")
}
self.assert_iscleanup(mir, block_data, resume, is_cleanup);
if let Some(drop) = drop {
self.assert_iscleanup(mir, block_data, drop, is_cleanup);
}
}
TerminatorKind::Unreachable => {}
TerminatorKind::Drop { target, unwind, .. }
| TerminatorKind::DropAndReplace { target, unwind, .. }
| TerminatorKind::Assert {
target,
cleanup: unwind,
..
} => {
self.assert_iscleanup(mir, block_data, target, is_cleanup);
if let Some(unwind) = unwind {
if is_cleanup {
span_mirbug!(self, block_data, "unwind on cleanup block")
}
self.assert_iscleanup(mir, block_data, unwind, true);
}
}
TerminatorKind::Call {
ref destination,
cleanup,
..
} => {
if let &Some((_, target)) = destination {
self.assert_iscleanup(mir, block_data, target, is_cleanup);
}
if let Some(cleanup) = cleanup {
if is_cleanup {
span_mirbug!(self, block_data, "cleanup on cleanup block")
}
self.assert_iscleanup(mir, block_data, cleanup, true);
}
}
TerminatorKind::FalseEdges {
real_target,
ref imaginary_targets,
} => {
self.assert_iscleanup(mir, block_data, real_target, is_cleanup);
for target in imaginary_targets {
self.assert_iscleanup(mir, block_data, *target, is_cleanup);
}
}
}
}
fn assert_iscleanup(
&mut self,
mir: &Mir<'tcx>,
ctxt: &fmt::Debug,
bb: BasicBlock,
iscleanuppad: bool,
) {
if mir[bb].is_cleanup != iscleanuppad {
span_mirbug!(
self,
ctxt,
"cleanuppad mismatch: {:?} should be {:?}",
bb,
iscleanuppad
);
}
}
fn check_local(&mut self, mir: &Mir<'tcx>, local: Local, local_decl: &LocalDecl<'tcx>) {
match mir.local_kind(local) {
LocalKind::ReturnPointer | LocalKind::Arg => {
// return values of normal functions are required to be
// sized by typeck, but return values of ADT constructors are
// not because we don't include a `Self: Sized` bounds on them.
//
// Unbound parts of arguments were never required to be Sized
// - maybe we should make that a warning.
return;
}
LocalKind::Var | LocalKind::Temp => {}
}
let span = local_decl.source_info.span;
let ty = local_decl.ty;
// Erase the regions from `ty` to get a global type. The
// `Sized` bound in no way depends on precise regions, so this
// shouldn't affect `is_sized`.
let gcx = self.tcx().global_tcx();
let erased_ty = gcx.lift(&self.tcx().erase_regions(&ty)).unwrap();
if !erased_ty.is_sized(gcx, self.param_env, span) {
// in current MIR construction, all non-control-flow rvalue
// expressions evaluate through `as_temp` or `into` a return
// slot or local, so to find all unsized rvalues it is enough
// to check all temps, return slots and locals.
if let None = self.reported_errors.replace((ty, span)) {
span_err!(
self.tcx().sess,
span,
E0161,
"cannot move a value of type {0}: the size of {0} \
cannot be statically determined",
ty
);
}
}
}
fn aggregate_field_ty(
&mut self,
ak: &AggregateKind<'tcx>,
field_index: usize,
location: Location,
) -> Result<Ty<'tcx>, FieldAccessError> {
let tcx = self.tcx();
match *ak {
AggregateKind::Adt(def, variant_index, substs, active_field_index) => {
let variant = &def.variants[variant_index];
let adj_field_index = active_field_index.unwrap_or(field_index);
if let Some(field) = variant.fields.get(adj_field_index) {
Ok(self.normalize(&field.ty(tcx, substs), location))
} else {
Err(FieldAccessError::OutOfRange {
field_count: variant.fields.len(),
})
}
}
AggregateKind::Closure(def_id, substs) => {
match substs.upvar_tys(def_id, tcx).nth(field_index) {
Some(ty) => Ok(ty),
None => Err(FieldAccessError::OutOfRange {
field_count: substs.upvar_tys(def_id, tcx).count(),
}),
}
}
AggregateKind::Generator(def_id, substs, _) => {
if let Some(ty) = substs.upvar_tys(def_id, tcx).nth(field_index) {
Ok(ty)
} else {
match substs.field_tys(def_id, tcx).nth(field_index) {
Some(ty) => Ok(ty),
None => Err(FieldAccessError::OutOfRange {
field_count: substs.field_tys(def_id, tcx).count() + 1,
}),
}
}
}
AggregateKind::Array(ty) => Ok(ty),
AggregateKind::Tuple => {
unreachable!("This should have been covered in check_rvalues");
}
}
}
fn check_rvalue(&mut self, mir: &Mir<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) {
let tcx = self.tcx();
match rvalue {
Rvalue::Aggregate(ak, ops) => {
self.check_aggregate_rvalue(mir, rvalue, ak, ops, location)
}
Rvalue::Repeat(operand, const_usize) => if const_usize.as_u64() > 1 {
let operand_ty = operand.ty(mir, tcx);
let trait_ref = ty::TraitRef {
def_id: tcx.lang_items().copy_trait().unwrap(),
substs: tcx.mk_substs_trait(operand_ty, &[]),
};
self.prove_trait_ref(trait_ref, location);
},
Rvalue::NullaryOp(_, ty) => {
let trait_ref = ty::TraitRef {
def_id: tcx.lang_items().sized_trait().unwrap(),
substs: tcx.mk_substs_trait(ty, &[]),
};
self.prove_trait_ref(trait_ref, location);
}
Rvalue::Cast(cast_kind, op, ty) => match cast_kind {
CastKind::ReifyFnPointer => {
let fn_sig = op.ty(mir, tcx).fn_sig(tcx);
// The type that we see in the fcx is like
// `foo::<'a, 'b>`, where `foo` is the path to a
// function definition. When we extract the
// signature, it comes from the `fn_sig` query,
// and hence may contain unnormalized results.
let fn_sig = self.normalize(&fn_sig, location);
let ty_fn_ptr_from = tcx.mk_fn_ptr(fn_sig);
if let Err(terr) = self.eq_types(ty_fn_ptr_from, ty, location.at_self()) {
span_mirbug!(
self,
rvalue,
"equating {:?} with {:?} yields {:?}",
ty_fn_ptr_from,
ty,
terr
);
}
}
CastKind::ClosureFnPointer => {
let sig = match op.ty(mir, tcx).sty {
ty::TyClosure(def_id, substs) => {
substs.closure_sig_ty(def_id, tcx).fn_sig(tcx)
}
_ => bug!(),
};
let ty_fn_ptr_from = tcx.coerce_closure_fn_ty(sig);
if let Err(terr) = self.eq_types(ty_fn_ptr_from, ty, location.at_self()) {
span_mirbug!(
self,
rvalue,
"equating {:?} with {:?} yields {:?}",
ty_fn_ptr_from,
ty,
terr
);
}
}
CastKind::UnsafeFnPointer => {
let fn_sig = op.ty(mir, tcx).fn_sig(tcx);
// The type that we see in the fcx is like
// `foo::<'a, 'b>`, where `foo` is the path to a
// function definition. When we extract the
// signature, it comes from the `fn_sig` query,
// and hence may contain unnormalized results.
let fn_sig = self.normalize(&fn_sig, location);
let ty_fn_ptr_from = tcx.safe_to_unsafe_fn_ty(fn_sig);
if let Err(terr) = self.eq_types(ty_fn_ptr_from, ty, location.at_self()) {
span_mirbug!(
self,
rvalue,
"equating {:?} with {:?} yields {:?}",
ty_fn_ptr_from,
ty,
terr
);
}
}
CastKind::Unsize => {
let trait_ref = ty::TraitRef {
def_id: tcx.lang_items().coerce_unsized_trait().unwrap(),
substs: tcx.mk_substs_trait(op.ty(mir, tcx), &[ty]),
};
self.prove_trait_ref(trait_ref, location);
}
CastKind::Misc => {}
},
// FIXME: These other cases have to be implemented in future PRs
Rvalue::Use(..)
| Rvalue::Ref(..)
| Rvalue::Len(..)
| Rvalue::BinaryOp(..)
| Rvalue::CheckedBinaryOp(..)
| Rvalue::UnaryOp(..)
| Rvalue::Discriminant(..) => {}
}
}
fn check_aggregate_rvalue(
&mut self,
mir: &Mir<'tcx>,
rvalue: &Rvalue<'tcx>,
aggregate_kind: &AggregateKind<'tcx>,
operands: &[Operand<'tcx>],
location: Location,
) {
let tcx = self.tcx();
self.prove_aggregate_predicates(aggregate_kind, location);
if *aggregate_kind == AggregateKind::Tuple {
// tuple rvalue field type is always the type of the op. Nothing to check here.
return;
}
for (i, operand) in operands.iter().enumerate() {
let field_ty = match self.aggregate_field_ty(aggregate_kind, i, location) {
Ok(field_ty) => field_ty,
Err(FieldAccessError::OutOfRange { field_count }) => {
span_mirbug!(
self,
rvalue,
"accessed field #{} but variant only has {}",
i,
field_count
);
continue;
}
};
let operand_ty = operand.ty(mir, tcx);
if let Err(terr) =
self.sub_types(operand_ty, field_ty, location.at_successor_within_block())
{
span_mirbug!(
self,
rvalue,
"{:?} is not a subtype of {:?}: {:?}",
operand_ty,
field_ty,
terr
);
}
}
}
fn prove_aggregate_predicates(
&mut self,
aggregate_kind: &AggregateKind<'tcx>,
location: Location,
) {
let tcx = self.tcx();
debug!(
"prove_aggregate_predicates(aggregate_kind={:?}, location={:?})",
aggregate_kind,
location
);
let instantiated_predicates = match aggregate_kind {
AggregateKind::Adt(def, _, substs, _) => {
tcx.predicates_of(def.did).instantiate(tcx, substs)
}
// For closures, we have some **extra requirements** we
//
// have to check. In particular, in their upvars and
// signatures, closures often reference various regions
// from the surrounding function -- we call those the
// closure's free regions. When we borrow-check (and hence
// region-check) closures, we may find that the closure
// requires certain relationships between those free
// regions. However, because those free regions refer to
// portions of the CFG of their caller, the closure is not
// in a position to verify those relationships. In that
// case, the requirements get "propagated" to us, and so
// we have to solve them here where we instantiate the
// closure.
//
// Despite the opacity of the previous parapgrah, this is
// actually relatively easy to understand in terms of the
// desugaring. A closure gets desugared to a struct, and
// these extra requirements are basically like where
// clauses on the struct.
AggregateKind::Closure(def_id, substs) => {
if let Some(closure_region_requirements) = tcx.mir_borrowck(*def_id) {
closure_region_requirements.apply_requirements(
self.infcx,
self.body_id,
location,
*def_id,
*substs,
);
}
tcx.predicates_of(*def_id).instantiate(tcx, substs.substs)
}
AggregateKind::Generator(def_id, substs, _) => {
tcx.predicates_of(*def_id).instantiate(tcx, substs.substs)
}
AggregateKind::Array(_) | AggregateKind::Tuple => ty::InstantiatedPredicates::empty(),
};
let predicates = self.normalize(&instantiated_predicates.predicates, location);
debug!("prove_aggregate_predicates: predicates={:?}", predicates);
self.prove_predicates(&predicates, location);
}
fn prove_trait_ref(&mut self, trait_ref: ty::TraitRef<'tcx>, location: Location) {
self.prove_predicates(
&[
ty::Predicate::Trait(trait_ref.to_poly_trait_ref().to_poly_trait_predicate()),
],
location,
);
}
fn prove_predicates(&mut self, predicates: &[ty::Predicate<'tcx>], location: Location) {
debug!(
"prove_predicates(predicates={:?}, location={:?})",
predicates,
location
);
self.fully_perform_op(location.at_self(), |this| {
let cause = this.misc(this.last_span);
let obligations = predicates
.iter()
.map(|&p| traits::Obligation::new(cause.clone(), this.param_env, p))
.collect();
Ok(InferOk {
value: (),
obligations,
})
}).unwrap()
}
fn typeck_mir(&mut self, mir: &Mir<'tcx>) {
self.last_span = mir.span;
debug!("run_on_mir: {:?}", mir.span);
for (local, local_decl) in mir.local_decls.iter_enumerated() {
self.check_local(mir, local, local_decl);
}
for (block, block_data) in mir.basic_blocks().iter_enumerated() {
let mut location = Location {
block,
statement_index: 0,
};
for stmt in &block_data.statements {
if stmt.source_info.span != DUMMY_SP {
self.last_span = stmt.source_info.span;
}
self.check_stmt(mir, stmt, location);
location.statement_index += 1;
}
self.check_terminator(mir, block_data.terminator(), location);
self.check_iscleanup(mir, block_data);
}
}
fn normalize<T>(&mut self, value: &T, location: Location) -> T
where
T: fmt::Debug + TypeFoldable<'tcx>,
{
self.fully_perform_op(location.at_self(), |this| {
let mut selcx = traits::SelectionContext::new(this.infcx);
let cause = this.misc(this.last_span);
let traits::Normalized { value, obligations } =
traits::normalize(&mut selcx, this.param_env, cause, value);
Ok(InferOk { value, obligations })
}).unwrap()
}
}
pub struct TypeckMir;
impl MirPass for TypeckMir {
fn run_pass<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &mut Mir<'tcx>) {
let def_id = src.def_id;
let id = tcx.hir.as_local_node_id(def_id).unwrap();
debug!("run_pass: {:?}", def_id);
if tcx.sess.err_count() > 0 {
// compiling a broken program can obviously result in a
// broken MIR, so try not to report duplicate errors.
return;
}
let param_env = tcx.param_env(def_id);
tcx.infer_ctxt().enter(|infcx| {
let _ = type_check_internal(&infcx, id, param_env, mir, &[], None, &mut |_| ());
// For verification purposes, we just ignore the resulting
// region constraint sets. Not our problem. =)
});
}
}
trait AtLocation {
/// Creates a `Locations` where `self` is both the from-location
/// and the at-location. This means that any required region
/// relationships must hold upon entering the statement/terminator
/// indicated by `self`. This is typically used when processing
/// "inputs" to the given location.
fn at_self(self) -> Locations;
/// Creates a `Locations` where `self` is the from-location and
/// its successor within the block is the at-location. This means
/// that any required region relationships must hold only upon
/// **exiting** the statement/terminator indicated by `self`. This
/// is for example used when you have a `place = rv` statement: it
/// indicates that the `typeof(rv) <: typeof(place)` as of the
/// **next** statement.
fn at_successor_within_block(self) -> Locations;
}
impl AtLocation for Location {
fn at_self(self) -> Locations {
Locations {
from_location: self,
at_location: self,
}
}
fn at_successor_within_block(self) -> Locations {
Locations {
from_location: self,
at_location: self.successor_within_block(),
}
}
}
| 37.911692 | 100 | 0.484925 |
76ebb6a6706c4608c901fba31845a830b7013c51 | 13,324 | mod aneros;
mod fleshlight_launch_helper;
mod generic_command_manager;
mod kiiroo_v2;
mod kiiroo_v21;
mod kiiroo_v2_vibrator;
mod lelof1s;
mod lovehoney_desire;
mod lovense;
mod magic_motion_v1;
mod magic_motion_v2;
mod magic_motion_v3;
mod maxpro;
mod motorbunny;
mod picobong;
mod prettylove;
mod raw_protocol;
mod realov;
mod svakom;
mod vibratissimo;
mod vorze_sa;
mod wevibe;
mod wevibe8bit;
mod xinput;
mod youcups;
mod youou;
use super::DeviceImpl;
use crate::{
core::{
errors::{ButtplugDeviceError, ButtplugError},
messages::{
self,
ButtplugDeviceCommandMessageUnion,
ButtplugDeviceMessageType,
ButtplugMessage,
MessageAttributesMap,
VibrateCmd,
VibrateSubcommand,
},
},
device::{configuration_manager::DeviceProtocolConfiguration, ButtplugDeviceResultFuture},
};
use futures::future::{self, BoxFuture};
use std::convert::TryFrom;
use std::sync::Arc;
pub enum ProtocolTypes {
Aneros,
KiirooV2,
KiirooV2Vibrator,
KiirooV21,
LeloF1s,
LovehoneyDesire,
Lovense,
MagicMotionV1,
MagicMotionV2,
MagicMotionV3,
Maxpro,
Motorbunny,
Picobong,
PrettyLove,
RawProtocol,
Realov,
Svakom,
Vibratissimo,
VorzeSA,
WeVibe,
WeVibe8Bit,
XInput,
Youcups,
Youou,
}
impl TryFrom<&str> for ProtocolTypes {
type Error = ButtplugError;
fn try_from(protocol_name: &str) -> Result<Self, Self::Error> {
match protocol_name {
"aneros" => Ok(ProtocolTypes::Aneros),
"kiiroo-v2" => Ok(ProtocolTypes::KiirooV2),
"kiiroo-v2-vibrator" => Ok(ProtocolTypes::KiirooV2Vibrator),
"kiiroo-v21" => Ok(ProtocolTypes::KiirooV21),
"lelo-f1s" => Ok(ProtocolTypes::LeloF1s),
"lovehoney-desire" => Ok(ProtocolTypes::LovehoneyDesire),
"lovense" => Ok(ProtocolTypes::Lovense),
"magic-motion-1" => Ok(ProtocolTypes::MagicMotionV1),
"magic-motion-2" => Ok(ProtocolTypes::MagicMotionV2),
"magic-motion-3" => Ok(ProtocolTypes::MagicMotionV3),
"maxpro" => Ok(ProtocolTypes::Maxpro),
"motorbunny" => Ok(ProtocolTypes::Motorbunny),
"picobong" => Ok(ProtocolTypes::Picobong),
"prettylove" => Ok(ProtocolTypes::PrettyLove),
"raw" => Ok(ProtocolTypes::RawProtocol),
"realov" => Ok(ProtocolTypes::Realov),
"svakom" => Ok(ProtocolTypes::Svakom),
"vibratissimo" => Ok(ProtocolTypes::Vibratissimo),
"vorze-sa" => Ok(ProtocolTypes::VorzeSA),
"wevibe" => Ok(ProtocolTypes::WeVibe),
"wevibe-8bit" => Ok(ProtocolTypes::WeVibe8Bit),
"xinput" => Ok(ProtocolTypes::XInput),
"youcups" => Ok(ProtocolTypes::Youcups),
"youou" => Ok(ProtocolTypes::Youou),
_ => {
error!("Protocol {} not implemented.", protocol_name);
Err(ButtplugDeviceError::ProtocolNotImplemented(protocol_name.to_owned()).into())
}
}
}
}
pub fn try_create_protocol(
protocol_type: &ProtocolTypes,
device: &dyn DeviceImpl,
config: DeviceProtocolConfiguration,
) -> BoxFuture<'static, Result<Box<dyn ButtplugProtocol>, ButtplugError>> {
match protocol_type {
ProtocolTypes::Aneros => aneros::Aneros::try_create(device, config),
ProtocolTypes::KiirooV2 => kiiroo_v2::KiirooV2::try_create(device, config),
ProtocolTypes::KiirooV2Vibrator => {
kiiroo_v2_vibrator::KiirooV2Vibrator::try_create(device, config)
}
ProtocolTypes::KiirooV21 => kiiroo_v21::KiirooV21::try_create(device, config),
ProtocolTypes::LeloF1s => lelof1s::LeloF1s::try_create(device, config),
ProtocolTypes::LovehoneyDesire => lovehoney_desire::LovehoneyDesire::try_create(device, config),
ProtocolTypes::Lovense => lovense::Lovense::try_create(device, config),
ProtocolTypes::MagicMotionV1 => magic_motion_v1::MagicMotionV1::try_create(device, config),
ProtocolTypes::MagicMotionV2 => magic_motion_v2::MagicMotionV2::try_create(device, config),
ProtocolTypes::MagicMotionV3 => magic_motion_v3::MagicMotionV3::try_create(device, config),
ProtocolTypes::Maxpro => maxpro::Maxpro::try_create(device, config),
ProtocolTypes::Motorbunny => motorbunny::Motorbunny::try_create(device, config),
ProtocolTypes::Picobong => picobong::Picobong::try_create(device, config),
ProtocolTypes::PrettyLove => prettylove::PrettyLove::try_create(device, config),
ProtocolTypes::RawProtocol => raw_protocol::RawProtocol::try_create(device, config),
ProtocolTypes::Realov => realov::Realov::try_create(device, config),
ProtocolTypes::Svakom => svakom::Svakom::try_create(device, config),
ProtocolTypes::Vibratissimo => vibratissimo::Vibratissimo::try_create(device, config),
ProtocolTypes::VorzeSA => vorze_sa::VorzeSA::try_create(device, config),
ProtocolTypes::WeVibe => wevibe::WeVibe::try_create(device, config),
ProtocolTypes::WeVibe8Bit => wevibe8bit::WeVibe8Bit::try_create(device, config),
ProtocolTypes::XInput => xinput::XInput::try_create(device, config),
ProtocolTypes::Youcups => youcups::Youcups::try_create(device, config),
ProtocolTypes::Youou => youou::Youou::try_create(device, config),
}
}
pub trait ButtplugProtocolCreator: ButtplugProtocol {
fn try_create(
device_impl: &dyn DeviceImpl,
config: DeviceProtocolConfiguration,
) -> BoxFuture<'static, Result<Box<dyn ButtplugProtocol>, ButtplugError>>
where
Self: Sized,
{
let (names, attrs) = config.get_attributes(device_impl.name()).unwrap();
let name = names.get("en-us").unwrap().clone();
Box::pin(async move { Ok(Self::new_protocol(&name, attrs)) })
}
fn new_protocol(name: &str, attrs: MessageAttributesMap) -> Box<dyn ButtplugProtocol>
where
Self: Sized;
}
pub trait ButtplugProtocol: ButtplugProtocolCommandHandler + Sync {}
pub trait ButtplugProtocolProperties {
fn name(&self) -> &str;
fn message_attributes(&self) -> MessageAttributesMap;
fn stop_commands(&self) -> Vec<ButtplugDeviceCommandMessageUnion>;
}
pub trait ButtplugProtocolCommandHandler: Send + ButtplugProtocolProperties {
// In order to not have to worry about id setting at the protocol level (this
// should be taken care of in the server's device manager), we return server
// messages but Buttplug errors.
fn handle_command(
&self,
device: Arc<Box<dyn DeviceImpl>>,
command_message: ButtplugDeviceCommandMessageUnion,
) -> ButtplugDeviceResultFuture {
match command_message {
ButtplugDeviceCommandMessageUnion::FleshlightLaunchFW12Cmd(msg) => {
self.handle_fleshlight_launch_fw12_cmd(device, msg)
}
ButtplugDeviceCommandMessageUnion::KiirooCmd(msg) => self.handle_kiiroo_cmd(device, msg),
ButtplugDeviceCommandMessageUnion::LinearCmd(msg) => self.handle_linear_cmd(device, msg),
ButtplugDeviceCommandMessageUnion::RawReadCmd(msg) => self.handle_raw_read_cmd(device, msg),
ButtplugDeviceCommandMessageUnion::RawWriteCmd(msg) => self.handle_raw_write_cmd(device, msg),
ButtplugDeviceCommandMessageUnion::RotateCmd(msg) => self.handle_rotate_cmd(device, msg),
ButtplugDeviceCommandMessageUnion::SingleMotorVibrateCmd(msg) => {
self.handle_single_motor_vibrate_cmd(device, msg)
}
ButtplugDeviceCommandMessageUnion::StopDeviceCmd(msg) => {
self.handle_stop_device_cmd(device, msg)
}
ButtplugDeviceCommandMessageUnion::RawSubscribeCmd(msg) => {
self.handle_raw_subscribe_cmd(device, msg)
}
ButtplugDeviceCommandMessageUnion::RawUnsubscribeCmd(msg) => {
self.handle_raw_unsubscribe_cmd(device, msg)
}
ButtplugDeviceCommandMessageUnion::VibrateCmd(msg) => self.handle_vibrate_cmd(device, msg),
ButtplugDeviceCommandMessageUnion::VorzeA10CycloneCmd(msg) => {
self.handle_vorze_a10_cyclone_cmd(device, msg)
}
}
}
fn handle_stop_device_cmd(
&self,
device: Arc<Box<dyn DeviceImpl>>,
message: messages::StopDeviceCmd,
) -> ButtplugDeviceResultFuture {
let ok_return = messages::Ok::new(message.get_id());
let fut_vec: Vec<ButtplugDeviceResultFuture> = self
.stop_commands()
.iter()
.map(|cmd| self.handle_command(device.clone(), cmd.clone()))
.collect();
Box::pin(async move {
// TODO We should be able to run these concurrently, and should return any error we get.
for fut in fut_vec {
if let Err(e) = fut.await {
error!("{:?}", e);
}
}
Ok(ok_return.into())
})
}
fn handle_single_motor_vibrate_cmd(
&self,
device: Arc<Box<dyn DeviceImpl>>,
message: messages::SingleMotorVibrateCmd,
) -> ButtplugDeviceResultFuture {
// Time for sadness! In order to handle conversion of
// SingleMotorVibrateCmd, we need to know how many vibrators a device
// has. We don't actually know that until we get to the protocol level,
// so we're stuck parsing this here. Since we can assume
// SingleMotorVibrateCmd will ALWAYS map to vibration, we can convert to
// VibrateCmd here and save ourselves having to handle it in every
// protocol, meaning spec v0 and v1 programs will still be forward
// compatible with vibrators.
let vibrator_count;
if let Some(attr) = self
.message_attributes()
.get(&ButtplugDeviceMessageType::VibrateCmd)
{
if let Some(count) = attr.feature_count {
vibrator_count = count as usize;
} else {
return ButtplugDeviceError::ProtocolRequirementError(format!(
"{} needs to support VibrateCmd with a feature count to use SingleMotorVibrateCmd.",
self.name()
))
.into();
}
} else {
return ButtplugDeviceError::ProtocolRequirementError(format!(
"{} needs to support VibrateCmd to use SingleMotorVibrateCmd.",
self.name()
))
.into();
}
let speed = message.speed;
let mut cmds = vec![];
for i in 0..vibrator_count {
cmds.push(VibrateSubcommand::new(i as u32, speed));
}
let mut vibrate_cmd = VibrateCmd::new(message.device_index, cmds);
vibrate_cmd.set_id(message.get_id());
self.handle_command(device, vibrate_cmd.into())
}
fn handle_raw_write_cmd(
&self,
device: Arc<Box<dyn DeviceImpl>>,
message: messages::RawWriteCmd,
) -> ButtplugDeviceResultFuture {
let id = message.get_id();
let fut = device.write_value(message.into());
Box::pin(async move { fut.await.map(|_| messages::Ok::new(id).into()) })
}
fn handle_raw_read_cmd(
&self,
device: Arc<Box<dyn DeviceImpl>>,
message: messages::RawReadCmd,
) -> ButtplugDeviceResultFuture {
let id = message.get_id();
let fut = device.read_value(message.into());
Box::pin(async move {
fut.await.map(|mut msg| {
msg.set_id(id);
msg.into()
})
})
}
fn handle_raw_unsubscribe_cmd(
&self,
device: Arc<Box<dyn DeviceImpl>>,
message: messages::RawUnsubscribeCmd,
) -> ButtplugDeviceResultFuture {
let id = message.get_id();
let fut = device.unsubscribe(message.into());
Box::pin(async move { fut.await.map(|_| messages::Ok::new(id).into()) })
}
fn handle_raw_subscribe_cmd(
&self,
device: Arc<Box<dyn DeviceImpl>>,
message: messages::RawSubscribeCmd,
) -> ButtplugDeviceResultFuture {
let id = message.get_id();
let fut = device.subscribe(message.into());
Box::pin(async move { fut.await.map(|_| messages::Ok::new(id).into()) })
}
fn command_unimplemented(&self) -> ButtplugDeviceResultFuture {
#[cfg(build = "debug")]
unimplemented!("Command not implemented for this protocol");
#[cfg(not(build = "debug"))]
Box::pin(future::ready(Err(
ButtplugDeviceError::UnhandledCommand("Command not implemented for this protocol".to_owned())
.into(),
)))
}
fn handle_vorze_a10_cyclone_cmd(
&self,
_device: Arc<Box<dyn DeviceImpl>>,
_message: messages::VorzeA10CycloneCmd,
) -> ButtplugDeviceResultFuture {
self.command_unimplemented()
}
fn handle_kiiroo_cmd(
&self,
_device: Arc<Box<dyn DeviceImpl>>,
_message: messages::KiirooCmd,
) -> ButtplugDeviceResultFuture {
self.command_unimplemented()
}
fn handle_fleshlight_launch_fw12_cmd(
&self,
_device: Arc<Box<dyn DeviceImpl>>,
_message: messages::FleshlightLaunchFW12Cmd,
) -> ButtplugDeviceResultFuture {
self.command_unimplemented()
}
fn handle_vibrate_cmd(
&self,
_device: Arc<Box<dyn DeviceImpl>>,
_message: messages::VibrateCmd,
) -> ButtplugDeviceResultFuture {
self.command_unimplemented()
}
fn handle_rotate_cmd(
&self,
_device: Arc<Box<dyn DeviceImpl>>,
_message: messages::RotateCmd,
) -> ButtplugDeviceResultFuture {
self.command_unimplemented()
}
fn handle_linear_cmd(
&self,
_device: Arc<Box<dyn DeviceImpl>>,
_message: messages::LinearCmd,
) -> ButtplugDeviceResultFuture {
self.command_unimplemented()
}
/*
fn handle_battery_level_cmd(
&self,
device: Arc<Box<dyn DeviceImpl>>,
message: messages::Battery,
) -> ButtplugDeviceResultFuture {
self.command_unimplemented()
}
fn handle_rssi_level_cmd(
&self,
device: Arc<Box<dyn DeviceImpl>>,
message: ButtplugDeviceCommandMessageUnion,
) -> ButtplugDeviceResultFuture {
unimplemented!("Command not implemented for this protocol");
}
*/
}
| 33.646465 | 100 | 0.695212 |
efdfedc5f72b35c7b3c613e936077d42ffef9fbf | 10,587 | use core::SegmentId;
use error::TantivyError;
use schema::Schema;
use serde_json;
use std::borrow::BorrowMut;
use std::fmt;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use Result;
use super::pool::LeasedItem;
use super::pool::Pool;
use super::segment::create_segment;
use super::segment::Segment;
use core::searcher::Searcher;
use core::IndexMeta;
use core::SegmentMeta;
use core::SegmentReader;
use core::META_FILEPATH;
#[cfg(feature = "mmap")]
use directory::MmapDirectory;
use directory::{Directory, RAMDirectory};
use directory::{DirectoryClone, ManagedDirectory};
use indexer::index_writer::open_index_writer;
use indexer::index_writer::HEAP_SIZE_MIN;
use indexer::segment_updater::save_new_metas;
use indexer::DirectoryLock;
use num_cpus;
use std::path::Path;
use tokenizer::TokenizerManager;
use IndexWriter;
fn load_metas(directory: &Directory) -> Result<IndexMeta> {
let meta_data = directory.atomic_read(&META_FILEPATH)?;
let meta_string = String::from_utf8_lossy(&meta_data);
serde_json::from_str(&meta_string)
.map_err(|_| TantivyError::CorruptedFile(META_FILEPATH.clone()))
}
/// Search Index
pub struct Index {
directory: ManagedDirectory,
schema: Schema,
num_searchers: Arc<AtomicUsize>,
searcher_pool: Arc<Pool<Searcher>>,
tokenizers: TokenizerManager,
}
impl Index {
/// Creates a new index using the `RAMDirectory`.
///
/// The index will be allocated in anonymous memory.
/// This should only be used for unit tests.
pub fn create_in_ram(schema: Schema) -> Index {
let ram_directory = RAMDirectory::create();
Index::create(ram_directory, schema).expect("Creating a RAMDirectory should never fail")
}
/// Creates a new index in a given filepath.
/// The index will use the `MMapDirectory`.
///
/// If a previous index was in this directory, then its meta file will be destroyed.
#[cfg(feature = "mmap")]
pub fn create_in_dir<P: AsRef<Path>>(directory_path: P, schema: Schema) -> Result<Index> {
let mmap_directory = MmapDirectory::open(directory_path)?;
Index::create(mmap_directory, schema)
}
/// Creates a new index in a temp directory.
///
/// The index will use the `MMapDirectory` in a newly created directory.
/// The temp directory will be destroyed automatically when the `Index` object
/// is destroyed.
///
/// The temp directory is only used for testing the `MmapDirectory`.
/// For other unit tests, prefer the `RAMDirectory`, see: `create_in_ram`.
#[cfg(feature = "mmap")]
pub fn create_from_tempdir(schema: Schema) -> Result<Index> {
let mmap_directory = MmapDirectory::create_from_tempdir()?;
Index::create(mmap_directory, schema)
}
/// Creates a new index given an implementation of the trait `Directory`
pub fn create<Dir: Directory>(dir: Dir, schema: Schema) -> Result<Index> {
let directory = ManagedDirectory::new(dir)?;
Index::from_directory(directory, schema)
}
/// Create a new index from a directory.
fn from_directory(mut directory: ManagedDirectory, schema: Schema) -> Result<Index> {
save_new_metas(schema.clone(), 0, directory.borrow_mut())?;
let metas = IndexMeta::with_schema(schema);
Index::create_from_metas(directory, &metas)
}
/// Creates a new index given a directory and an `IndexMeta`.
fn create_from_metas(directory: ManagedDirectory, metas: &IndexMeta) -> Result<Index> {
let schema = metas.schema.clone();
let n_cpus = num_cpus::get();
let index = Index {
directory,
schema,
num_searchers: Arc::new(AtomicUsize::new(n_cpus)),
searcher_pool: Arc::new(Pool::new()),
tokenizers: TokenizerManager::default(),
};
index.load_searchers()?;
Ok(index)
}
/// Accessor for the tokenizer manager.
pub fn tokenizers(&self) -> &TokenizerManager {
&self.tokenizers
}
/// Opens a new directory from an index path.
#[cfg(feature = "mmap")]
pub fn open_in_dir<P: AsRef<Path>>(directory_path: P) -> Result<Index> {
let mmap_directory = MmapDirectory::open(directory_path)?;
Index::open(mmap_directory)
}
/// Open the index using the provided directory
pub fn open<D: Directory>(directory: D) -> Result<Index> {
let directory = ManagedDirectory::new(directory)?;
let metas = load_metas(&directory)?;
Index::create_from_metas(directory, &metas)
}
/// Reads the index meta file from the directory.
pub fn load_metas(&self) -> Result<IndexMeta> {
load_metas(self.directory())
}
/// Open a new index writer. Attempts to acquire a lockfile.
///
/// The lockfile should be deleted on drop, but it is possible
/// that due to a panic or other error, a stale lockfile will be
/// left in the index directory. If you are sure that no other
/// `IndexWriter` on the system is accessing the index directory,
/// it is safe to manually delete the lockfile.
///
/// - `num_threads` defines the number of indexing workers that
/// should work at the same time.
///
/// - `overall_heap_size_in_bytes` sets the amount of memory
/// allocated for all indexing thread.
/// Each thread will receive a budget of `overall_heap_size_in_bytes / num_threads`.
///
/// # Errors
/// If the lockfile already exists, returns `Error::FileAlreadyExists`.
/// # Panics
/// If the heap size per thread is too small, panics.
pub fn writer_with_num_threads(
&self,
num_threads: usize,
overall_heap_size_in_bytes: usize,
) -> Result<IndexWriter> {
let directory_lock = DirectoryLock::lock(self.directory().box_clone())?;
let heap_size_in_bytes_per_thread = overall_heap_size_in_bytes / num_threads;
open_index_writer(
self,
num_threads,
heap_size_in_bytes_per_thread,
directory_lock,
)
}
/// Creates a multithreaded writer
///
/// Tantivy will automatically define the number of threads to use.
/// `overall_heap_size_in_bytes` is the total target memory usage that will be split
/// between a given number of threads.
///
/// # Errors
/// If the lockfile already exists, returns `Error::FileAlreadyExists`.
/// # Panics
/// If the heap size per thread is too small, panics.
pub fn writer(&self, overall_heap_size_in_bytes: usize) -> Result<IndexWriter> {
let mut num_threads = num_cpus::get();
let heap_size_in_bytes_per_thread = overall_heap_size_in_bytes / num_threads;
if heap_size_in_bytes_per_thread < HEAP_SIZE_MIN {
num_threads = (overall_heap_size_in_bytes / HEAP_SIZE_MIN).max(1);
}
self.writer_with_num_threads(num_threads, overall_heap_size_in_bytes)
}
/// Accessor to the index schema
///
/// The schema is actually cloned.
pub fn schema(&self) -> Schema {
self.schema.clone()
}
/// Returns the list of segments that are searchable
pub fn searchable_segments(&self) -> Result<Vec<Segment>> {
Ok(self.searchable_segment_metas()?
.into_iter()
.map(|segment_meta| self.segment(segment_meta))
.collect())
}
#[doc(hidden)]
pub fn segment(&self, segment_meta: SegmentMeta) -> Segment {
create_segment(self.clone(), segment_meta)
}
/// Creates a new segment.
pub fn new_segment(&self) -> Segment {
let segment_meta = SegmentMeta::new(SegmentId::generate_random(), 0);
self.segment(segment_meta)
}
/// Return a reference to the index directory.
pub fn directory(&self) -> &ManagedDirectory {
&self.directory
}
/// Return a mutable reference to the index directory.
pub fn directory_mut(&mut self) -> &mut ManagedDirectory {
&mut self.directory
}
/// Reads the meta.json and returns the list of
/// `SegmentMeta` from the last commit.
pub fn searchable_segment_metas(&self) -> Result<Vec<SegmentMeta>> {
Ok(self.load_metas()?.segments)
}
/// Returns the list of segment ids that are searchable.
pub fn searchable_segment_ids(&self) -> Result<Vec<SegmentId>> {
Ok(self.searchable_segment_metas()?
.iter()
.map(|segment_meta| segment_meta.id())
.collect())
}
/// Sets the number of searchers to use
///
/// Only works after the next call to `load_searchers`
pub fn set_num_searchers(&mut self, num_searchers: usize) {
self.num_searchers.store(num_searchers, Ordering::Release);
}
/// Creates a new generation of searchers after
/// a change of the set of searchable indexes.
///
/// This needs to be called when a new segment has been
/// published or after a merge.
pub fn load_searchers(&self) -> Result<()> {
let searchable_segments = self.searchable_segments()?;
let segment_readers: Vec<SegmentReader> = searchable_segments
.iter()
.map(SegmentReader::open)
.collect::<Result<_>>()?;
let schema = self.schema();
let num_searchers: usize = self.num_searchers.load(Ordering::Acquire);
let searchers = (0..num_searchers)
.map(|_| Searcher::new(schema.clone(), segment_readers.clone()))
.collect();
self.searcher_pool.publish_new_generation(searchers);
Ok(())
}
/// Returns a searcher
///
/// This method should be called every single time a search
/// query is performed.
/// The searchers are taken from a pool of `num_searchers` searchers.
/// If no searcher is available
/// this may block.
///
/// The same searcher must be used for a given query, as it ensures
/// the use of a consistent segment set.
pub fn searcher(&self) -> LeasedItem<Searcher> {
self.searcher_pool.acquire()
}
}
impl fmt::Debug for Index {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Index({:?})", self.directory)
}
}
impl Clone for Index {
fn clone(&self) -> Index {
Index {
directory: self.directory.clone(),
schema: self.schema.clone(),
num_searchers: Arc::clone(&self.num_searchers),
searcher_pool: Arc::clone(&self.searcher_pool),
tokenizers: self.tokenizers.clone(),
}
}
}
| 35.526846 | 96 | 0.648626 |
d768a7a78912b593a03d94434d4f4d7e0b63e408 | 4,733 | use serde::{ser, Serialize, Serializer};
use std::collections::HashMap;
use std::hash::Hash;
use crate::{Path, Root};
/// Trait for validating glTF JSON data so that the library can function without panicking.
pub trait Validate {
/// Validates the invariants required for the library to function safely.
fn validate<P, R>(&self, _root: &Root, _path: P, _report: &mut R)
where
P: Fn() -> Path,
R: FnMut(&dyn Fn() -> Path, Error),
{
// nop
}
}
/// Specifies what kind of error occured during validation.
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Error {
/// An index was found to be out of bounds.
IndexOutOfBounds,
/// An invalid value was identified.
Invalid,
/// Some required data has been omitted.
Missing,
}
/// Specifies a type that has been pre-validated during deserialization or otherwise.
#[derive(Debug, Eq, Hash, PartialEq)]
pub enum Checked<T> {
/// The item is valid.
Valid(T),
/// The item is invalid.
Invalid,
}
impl<T> Checked<T> {
/// Converts from `Checked<T>` to `Checked<&T>`.
pub fn as_ref(&self) -> Checked<&T> {
match *self {
Checked::Valid(ref item) => Checked::Valid(item),
Checked::Invalid => Checked::Invalid,
}
}
/// Takes ownership of the contained item if it is `Valid`.
///
/// # Panics
///
/// Panics if called on an `Invalid` item.
pub fn unwrap(self) -> T {
match self {
Checked::Valid(item) => item,
Checked::Invalid => panic!("attempted to unwrap an invalid item"),
}
}
}
impl<T: Serialize> Serialize for Checked<T> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer
{
match *self {
Checked::Valid(ref item) => item.serialize(serializer),
Checked::Invalid => Err(ser::Error::custom("invalid item")),
}
}
}
impl<T: Clone> Clone for Checked<T> {
fn clone(&self) -> Self {
match *self {
Checked::Valid(ref item) => Checked::Valid(item.clone()),
Checked::Invalid => Checked::Invalid,
}
}
}
impl<T: Copy> Copy for Checked<T> {}
impl<T: Default> Default for Checked<T> {
fn default() -> Self {
Checked::Valid(T::default())
}
}
impl<T> Validate for Checked<T> {
fn validate<P, R>(&self, _root: &Root, path: P, report: &mut R)
where P: Fn() -> Path, R: FnMut(&dyn Fn() -> Path, Error)
{
match *self {
Checked::Valid(_) => {},
Checked::Invalid => report(&path, Error::Invalid),
}
}
}
impl<K: Eq + Hash + ToString + Validate, V: Validate> Validate for HashMap<K, V> {
fn validate<P, R>(&self, root: &Root, path: P, report: &mut R)
where P: Fn() -> Path, R: FnMut(&dyn Fn() -> Path, Error)
{
for (key, value) in self.iter() {
key.validate(root, || path().key(&key.to_string()), report);
value.validate(root, || path().key(&key.to_string()), report);
}
}
}
impl<T: Validate> Validate for Option<T> {
fn validate<P, R>(&self, root: &Root, path: P, report: &mut R)
where P: Fn() -> Path, R: FnMut(&dyn Fn() -> Path, Error)
{
if let Some(value) = self.as_ref() {
value.validate(root, path, report);
}
}
}
impl<T: Validate> Validate for Vec<T> {
fn validate<P, R>(&self, root: &Root, path: P, report: &mut R)
where P: Fn() -> Path, R: FnMut(&dyn Fn() -> Path, Error)
{
for (index, value) in self.iter().enumerate() {
value.validate(root, || path().index(index), report);
}
}
}
impl Validate for std::boxed::Box<serde_json::value::RawValue> {
fn validate<P, R>(&self, _: &Root, _: P, _: &mut R)
where P: Fn() -> Path, R: FnMut(&dyn Fn() -> Path, Error)
{
// nop
}
}
impl std::error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::IndexOutOfBounds => "Index out of bounds",
Error::Invalid => "Invalid value",
Error::Missing => "Missing data",
}
}
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
use std::error::Error;
write!(f, "{}", self.description())
}
}
// These types are assumed to be always valid.
impl Validate for bool {}
impl Validate for u32 {}
impl Validate for i32 {}
impl Validate for f32 {}
impl Validate for [f32; 3] {}
impl Validate for [f32; 4] {}
impl Validate for [f32; 16] {}
impl Validate for () {}
impl Validate for String {}
impl Validate for serde_json::Value {}
| 27.841176 | 91 | 0.565181 |
f835bd5f4a462dd661787f14f328ce062574d531 | 27,395 | //! Instruction formats and opcodes.
//!
//! The `instructions` module contains definitions for instruction formats, opcodes, and the
//! in-memory representation of IR instructions.
//!
//! A large part of this module is auto-generated from the instruction descriptions in the meta
//! directory.
use alloc::vec::Vec;
use core::fmt::{self, Display, Formatter};
use core::ops::{Deref, DerefMut};
use core::str::FromStr;
use crate::ir::{self, trapcode::TrapCode, types, Block, FuncRef, JumpTable, SigRef, Type, Value};
use crate::isa;
use crate::bitset::BitSet;
use crate::entity;
/// Some instructions use an external list of argument values because there is not enough space in
/// the 16-byte `InstructionData` struct. These value lists are stored in a memory pool in
/// `dfg.value_lists`.
pub type ValueList = entity::EntityList<Value>;
/// Memory pool for holding value lists. See `ValueList`.
pub type ValueListPool = entity::ListPool<Value>;
// Include code generated by `cranelift-codegen/meta/src/gen_inst.rs`. This file contains:
//
// - The `pub enum InstructionFormat` enum with all the instruction formats.
// - The `pub enum InstructionData` enum with all the instruction data fields.
// - The `pub enum Opcode` definition with all known opcodes,
// - The `const OPCODE_FORMAT: [InstructionFormat; N]` table.
// - The private `fn opcode_name(Opcode) -> &'static str` function, and
// - The hash table `const OPCODE_HASH_TABLE: [Opcode; N]`.
//
// For value type constraints:
//
// - The `const OPCODE_CONSTRAINTS : [OpcodeConstraints; N]` table.
// - The `const TYPE_SETS : [ValueTypeSet; N]` table.
// - The `const OPERAND_CONSTRAINTS : [OperandConstraint; N]` table.
//
include!(concat!(env!("OUT_DIR"), "/opcodes.rs"));
impl Display for Opcode {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", opcode_name(*self))
}
}
impl Opcode {
/// Get the instruction format for this opcode.
pub fn format(self) -> InstructionFormat {
OPCODE_FORMAT[self as usize - 1]
}
/// Get the constraint descriptor for this opcode.
/// Panic if this is called on `NotAnOpcode`.
pub fn constraints(self) -> OpcodeConstraints {
OPCODE_CONSTRAINTS[self as usize - 1]
}
/// Returns true if the instruction is a resumable trap.
pub fn is_resumable_trap(&self) -> bool {
match self {
Opcode::ResumableTrap | Opcode::ResumableTrapnz => true,
_ => false,
}
}
}
// This trait really belongs in cranelift-reader where it is used by the `.clif` file parser, but since
// it critically depends on the `opcode_name()` function which is needed here anyway, it lives in
// this module. This also saves us from running the build script twice to generate code for the two
// separate crates.
impl FromStr for Opcode {
type Err = &'static str;
/// Parse an Opcode name from a string.
fn from_str(s: &str) -> Result<Self, &'static str> {
use crate::constant_hash::{probe, simple_hash, Table};
impl<'a> Table<&'a str> for [Option<Opcode>] {
fn len(&self) -> usize {
self.len()
}
fn key(&self, idx: usize) -> Option<&'a str> {
self[idx].map(opcode_name)
}
}
match probe::<&str, [Option<Self>]>(&OPCODE_HASH_TABLE, s, simple_hash(s)) {
Err(_) => Err("Unknown opcode"),
// We unwrap here because probe() should have ensured that the entry
// at this index is not None.
Ok(i) => Ok(OPCODE_HASH_TABLE[i].unwrap()),
}
}
}
/// A variable list of `Value` operands used for function call arguments and passing arguments to
/// basic blocks.
#[derive(Clone, Debug)]
pub struct VariableArgs(Vec<Value>);
impl VariableArgs {
/// Create an empty argument list.
pub fn new() -> Self {
Self(Vec::new())
}
/// Add an argument to the end.
pub fn push(&mut self, v: Value) {
self.0.push(v)
}
/// Check if the list is empty.
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
/// Convert this to a value list in `pool` with `fixed` prepended.
pub fn into_value_list(self, fixed: &[Value], pool: &mut ValueListPool) -> ValueList {
let mut vlist = ValueList::default();
vlist.extend(fixed.iter().cloned(), pool);
vlist.extend(self.0, pool);
vlist
}
}
// Coerce `VariableArgs` into a `&[Value]` slice.
impl Deref for VariableArgs {
type Target = [Value];
fn deref(&self) -> &[Value] {
&self.0
}
}
impl DerefMut for VariableArgs {
fn deref_mut(&mut self) -> &mut [Value] {
&mut self.0
}
}
impl Display for VariableArgs {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
for (i, val) in self.0.iter().enumerate() {
if i == 0 {
write!(fmt, "{}", val)?;
} else {
write!(fmt, ", {}", val)?;
}
}
Ok(())
}
}
impl Default for VariableArgs {
fn default() -> Self {
Self::new()
}
}
/// Analyzing an instruction.
///
/// Avoid large matches on instruction formats by using the methods defined here to examine
/// instructions.
impl InstructionData {
/// Return information about the destination of a branch or jump instruction.
///
/// Any instruction that can transfer control to another block reveals its possible destinations
/// here.
pub fn analyze_branch<'a>(&'a self, pool: &'a ValueListPool) -> BranchInfo<'a> {
match *self {
Self::Jump {
destination,
ref args,
..
} => BranchInfo::SingleDest(destination, args.as_slice(pool)),
Self::BranchInt {
destination,
ref args,
..
}
| Self::BranchFloat {
destination,
ref args,
..
}
| Self::Branch {
destination,
ref args,
..
} => BranchInfo::SingleDest(destination, &args.as_slice(pool)[1..]),
Self::BranchIcmp {
destination,
ref args,
..
} => BranchInfo::SingleDest(destination, &args.as_slice(pool)[2..]),
Self::BranchTable {
table, destination, ..
} => BranchInfo::Table(table, Some(destination)),
Self::IndirectJump { table, .. } => BranchInfo::Table(table, None),
_ => {
debug_assert!(!self.opcode().is_branch());
BranchInfo::NotABranch
}
}
}
/// Get the single destination of this branch instruction, if it is a single destination
/// branch or jump.
///
/// Multi-destination branches like `br_table` return `None`.
pub fn branch_destination(&self) -> Option<Block> {
match *self {
Self::Jump { destination, .. }
| Self::Branch { destination, .. }
| Self::BranchInt { destination, .. }
| Self::BranchFloat { destination, .. }
| Self::BranchIcmp { destination, .. } => Some(destination),
Self::BranchTable { .. } | Self::IndirectJump { .. } => None,
_ => {
debug_assert!(!self.opcode().is_branch());
None
}
}
}
/// Get a mutable reference to the single destination of this branch instruction, if it is a
/// single destination branch or jump.
///
/// Multi-destination branches like `br_table` return `None`.
pub fn branch_destination_mut(&mut self) -> Option<&mut Block> {
match *self {
Self::Jump {
ref mut destination,
..
}
| Self::Branch {
ref mut destination,
..
}
| Self::BranchInt {
ref mut destination,
..
}
| Self::BranchFloat {
ref mut destination,
..
}
| Self::BranchIcmp {
ref mut destination,
..
} => Some(destination),
Self::BranchTable { .. } => None,
_ => {
debug_assert!(!self.opcode().is_branch());
None
}
}
}
/// If this is a trapping instruction, get its trap code. Otherwise, return
/// `None`.
pub fn trap_code(&self) -> Option<TrapCode> {
match *self {
Self::CondTrap { code, .. }
| Self::FloatCondTrap { code, .. }
| Self::IntCondTrap { code, .. }
| Self::Trap { code, .. } => Some(code),
_ => None,
}
}
/// If this is a trapping instruction, get an exclusive reference to its
/// trap code. Otherwise, return `None`.
pub fn trap_code_mut(&mut self) -> Option<&mut TrapCode> {
match self {
Self::CondTrap { code, .. }
| Self::FloatCondTrap { code, .. }
| Self::IntCondTrap { code, .. }
| Self::Trap { code, .. } => Some(code),
_ => None,
}
}
/// Return information about a call instruction.
///
/// Any instruction that can call another function reveals its call signature here.
pub fn analyze_call<'a>(&'a self, pool: &'a ValueListPool) -> CallInfo<'a> {
match *self {
Self::Call {
func_ref, ref args, ..
} => CallInfo::Direct(func_ref, args.as_slice(pool)),
Self::CallIndirect {
sig_ref, ref args, ..
} => CallInfo::Indirect(sig_ref, &args.as_slice(pool)[1..]),
_ => {
debug_assert!(!self.opcode().is_call());
CallInfo::NotACall
}
}
}
#[inline]
pub(crate) fn sign_extend_immediates(&mut self, ctrl_typevar: Type) {
if ctrl_typevar.is_invalid() {
return;
}
let bit_width = ctrl_typevar.bits();
match self {
Self::BinaryImm64 {
opcode,
arg: _,
imm,
} => {
if *opcode == Opcode::SdivImm || *opcode == Opcode::SremImm {
imm.sign_extend_from_width(bit_width);
}
}
Self::IntCompareImm {
opcode,
arg: _,
cond,
imm,
} => {
debug_assert_eq!(*opcode, Opcode::IcmpImm);
if cond.unsigned() != *cond {
imm.sign_extend_from_width(bit_width);
}
}
_ => {}
}
}
}
/// Information about branch and jump instructions.
pub enum BranchInfo<'a> {
/// This is not a branch or jump instruction.
/// This instruction will not transfer control to another block in the function, but it may still
/// affect control flow by returning or trapping.
NotABranch,
/// This is a branch or jump to a single destination block, possibly taking value arguments.
SingleDest(Block, &'a [Value]),
/// This is a jump table branch which can have many destination blocks and maybe one default block.
Table(JumpTable, Option<Block>),
}
/// Information about call instructions.
pub enum CallInfo<'a> {
/// This is not a call instruction.
NotACall,
/// This is a direct call to an external function declared in the preamble. See
/// `DataFlowGraph.ext_funcs`.
Direct(FuncRef, &'a [Value]),
/// This is an indirect call with the specified signature. See `DataFlowGraph.signatures`.
Indirect(SigRef, &'a [Value]),
}
/// Value type constraints for a given opcode.
///
/// The `InstructionFormat` determines the constraints on most operands, but `Value` operands and
/// results are not determined by the format. Every `Opcode` has an associated
/// `OpcodeConstraints` object that provides the missing details.
#[derive(Clone, Copy)]
pub struct OpcodeConstraints {
/// Flags for this opcode encoded as a bit field:
///
/// Bits 0-2:
/// Number of fixed result values. This does not include `variable_args` results as are
/// produced by call instructions.
///
/// Bit 3:
/// This opcode is polymorphic and the controlling type variable can be inferred from the
/// designated input operand. This is the `typevar_operand` index given to the
/// `InstructionFormat` meta language object. When this bit is not set, the controlling
/// type variable must be the first output value instead.
///
/// Bit 4:
/// This opcode is polymorphic and the controlling type variable does *not* appear as the
/// first result type.
///
/// Bits 5-7:
/// Number of fixed value arguments. The minimum required number of value operands.
flags: u8,
/// Permitted set of types for the controlling type variable as an index into `TYPE_SETS`.
typeset_offset: u8,
/// Offset into `OPERAND_CONSTRAINT` table of the descriptors for this opcode. The first
/// `num_fixed_results()` entries describe the result constraints, then follows constraints for
/// the fixed `Value` input operands. (`num_fixed_value_arguments()` of them).
constraint_offset: u16,
}
impl OpcodeConstraints {
/// Can the controlling type variable for this opcode be inferred from the designated value
/// input operand?
/// This also implies that this opcode is polymorphic.
pub fn use_typevar_operand(self) -> bool {
(self.flags & 0x8) != 0
}
/// Is it necessary to look at the designated value input operand in order to determine the
/// controlling type variable, or is it good enough to use the first return type?
///
/// Most polymorphic instructions produce a single result with the type of the controlling type
/// variable. A few polymorphic instructions either don't produce any results, or produce
/// results with a fixed type. These instructions return `true`.
pub fn requires_typevar_operand(self) -> bool {
(self.flags & 0x10) != 0
}
/// Get the number of *fixed* result values produced by this opcode.
/// This does not include `variable_args` produced by calls.
pub fn num_fixed_results(self) -> usize {
(self.flags & 0x7) as usize
}
/// Get the number of *fixed* input values required by this opcode.
///
/// This does not include `variable_args` arguments on call and branch instructions.
///
/// The number of fixed input values is usually implied by the instruction format, but
/// instruction formats that use a `ValueList` put both fixed and variable arguments in the
/// list. This method returns the *minimum* number of values required in the value list.
pub fn num_fixed_value_arguments(self) -> usize {
((self.flags >> 5) & 0x7) as usize
}
/// Get the offset into `TYPE_SETS` for the controlling type variable.
/// Returns `None` if the instruction is not polymorphic.
fn typeset_offset(self) -> Option<usize> {
let offset = usize::from(self.typeset_offset);
if offset < TYPE_SETS.len() {
Some(offset)
} else {
None
}
}
/// Get the offset into OPERAND_CONSTRAINTS where the descriptors for this opcode begin.
fn constraint_offset(self) -> usize {
self.constraint_offset as usize
}
/// Get the value type of result number `n`, having resolved the controlling type variable to
/// `ctrl_type`.
pub fn result_type(self, n: usize, ctrl_type: Type) -> Type {
debug_assert!(n < self.num_fixed_results(), "Invalid result index");
if let ResolvedConstraint::Bound(t) =
OPERAND_CONSTRAINTS[self.constraint_offset() + n].resolve(ctrl_type)
{
t
} else {
panic!("Result constraints can't be free");
}
}
/// Get the value type of input value number `n`, having resolved the controlling type variable
/// to `ctrl_type`.
///
/// Unlike results, it is possible for some input values to vary freely within a specific
/// `ValueTypeSet`. This is represented with the `ArgumentConstraint::Free` variant.
pub fn value_argument_constraint(self, n: usize, ctrl_type: Type) -> ResolvedConstraint {
debug_assert!(
n < self.num_fixed_value_arguments(),
"Invalid value argument index"
);
let offset = self.constraint_offset() + self.num_fixed_results();
OPERAND_CONSTRAINTS[offset + n].resolve(ctrl_type)
}
/// Get the typeset of allowed types for the controlling type variable in a polymorphic
/// instruction.
pub fn ctrl_typeset(self) -> Option<ValueTypeSet> {
self.typeset_offset().map(|offset| TYPE_SETS[offset])
}
/// Is this instruction polymorphic?
pub fn is_polymorphic(self) -> bool {
self.ctrl_typeset().is_some()
}
}
type BitSet8 = BitSet<u8>;
type BitSet16 = BitSet<u16>;
/// A value type set describes the permitted set of types for a type variable.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct ValueTypeSet {
/// Allowed lane sizes
pub lanes: BitSet16,
/// Allowed int widths
pub ints: BitSet8,
/// Allowed float widths
pub floats: BitSet8,
/// Allowed bool widths
pub bools: BitSet8,
/// Allowed ref widths
pub refs: BitSet8,
}
impl ValueTypeSet {
/// Is `scalar` part of the base type set?
///
/// Note that the base type set does not have to be included in the type set proper.
fn is_base_type(self, scalar: Type) -> bool {
let l2b = scalar.log2_lane_bits();
if scalar.is_int() {
self.ints.contains(l2b)
} else if scalar.is_float() {
self.floats.contains(l2b)
} else if scalar.is_bool() {
self.bools.contains(l2b)
} else if scalar.is_ref() {
self.refs.contains(l2b)
} else {
false
}
}
/// Does `typ` belong to this set?
pub fn contains(self, typ: Type) -> bool {
let l2l = typ.log2_lane_count();
self.lanes.contains(l2l) && self.is_base_type(typ.lane_type())
}
/// Get an example member of this type set.
///
/// This is used for error messages to avoid suggesting invalid types.
pub fn example(self) -> Type {
let t = if self.ints.max().unwrap_or(0) > 5 {
types::I32
} else if self.floats.max().unwrap_or(0) > 5 {
types::F32
} else if self.bools.max().unwrap_or(0) > 5 {
types::B32
} else {
types::B1
};
t.by(1 << self.lanes.min().unwrap()).unwrap()
}
}
/// Operand constraints. This describes the value type constraints on a single `Value` operand.
enum OperandConstraint {
/// This operand has a concrete value type.
Concrete(Type),
/// This operand can vary freely within the given type set.
/// The type set is identified by its index into the TYPE_SETS constant table.
Free(u8),
/// This operand is the same type as the controlling type variable.
Same,
/// This operand is `ctrlType.lane_of()`.
LaneOf,
/// This operand is `ctrlType.as_bool()`.
AsBool,
/// This operand is `ctrlType.half_width()`.
HalfWidth,
/// This operand is `ctrlType.double_width()`.
DoubleWidth,
/// This operand is `ctrlType.half_vector()`.
HalfVector,
/// This operand is `ctrlType.double_vector()`.
DoubleVector,
/// This operand is `ctrlType.split_lanes()`.
SplitLanes,
}
impl OperandConstraint {
/// Resolve this operand constraint into a concrete value type, given the value of the
/// controlling type variable.
pub fn resolve(&self, ctrl_type: Type) -> ResolvedConstraint {
use self::OperandConstraint::*;
use self::ResolvedConstraint::Bound;
match *self {
Concrete(t) => Bound(t),
Free(vts) => ResolvedConstraint::Free(TYPE_SETS[vts as usize]),
Same => Bound(ctrl_type),
LaneOf => Bound(ctrl_type.lane_of()),
AsBool => Bound(ctrl_type.as_bool()),
HalfWidth => Bound(ctrl_type.half_width().expect("invalid type for half_width")),
DoubleWidth => Bound(
ctrl_type
.double_width()
.expect("invalid type for double_width"),
),
HalfVector => Bound(
ctrl_type
.half_vector()
.expect("invalid type for half_vector"),
),
DoubleVector => Bound(ctrl_type.by(2).expect("invalid type for double_vector")),
SplitLanes => Bound(
ctrl_type
.split_lanes()
.expect("invalid type for split_lanes"),
),
}
}
}
/// The type constraint on a value argument once the controlling type variable is known.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum ResolvedConstraint {
/// The operand is bound to a known type.
Bound(Type),
/// The operand type can vary freely within the given set.
Free(ValueTypeSet),
}
#[cfg(test)]
mod tests {
use super::*;
use alloc::string::ToString;
#[test]
fn opcodes() {
use core::mem;
let x = Opcode::Iadd;
let mut y = Opcode::Isub;
assert!(x != y);
y = Opcode::Iadd;
assert_eq!(x, y);
assert_eq!(x.format(), InstructionFormat::Binary);
assert_eq!(format!("{:?}", Opcode::IaddImm), "IaddImm");
assert_eq!(Opcode::IaddImm.to_string(), "iadd_imm");
// Check the matcher.
assert_eq!("iadd".parse::<Opcode>(), Ok(Opcode::Iadd));
assert_eq!("iadd_imm".parse::<Opcode>(), Ok(Opcode::IaddImm));
assert_eq!("iadd\0".parse::<Opcode>(), Err("Unknown opcode"));
assert_eq!("".parse::<Opcode>(), Err("Unknown opcode"));
assert_eq!("\0".parse::<Opcode>(), Err("Unknown opcode"));
// Opcode is a single byte, and because Option<Opcode> originally came to 2 bytes, early on
// Opcode included a variant NotAnOpcode to avoid the unnecessary bloat. Since then the Rust
// compiler has brought in NonZero optimization, meaning that an enum not using the 0 value
// can be optional for no size cost. We want to ensure Option<Opcode> remains small.
assert_eq!(mem::size_of::<Opcode>(), mem::size_of::<Option<Opcode>>());
}
#[test]
fn instruction_data() {
use core::mem;
// The size of the `InstructionData` enum is important for performance. It should not
// exceed 16 bytes. Use `Box<FooData>` out-of-line payloads for instruction formats that
// require more space than that. It would be fine with a data structure smaller than 16
// bytes, but what are the odds of that?
assert_eq!(mem::size_of::<InstructionData>(), 16);
}
#[test]
fn constraints() {
let a = Opcode::Iadd.constraints();
assert!(a.use_typevar_operand());
assert!(!a.requires_typevar_operand());
assert_eq!(a.num_fixed_results(), 1);
assert_eq!(a.num_fixed_value_arguments(), 2);
assert_eq!(a.result_type(0, types::I32), types::I32);
assert_eq!(a.result_type(0, types::I8), types::I8);
assert_eq!(
a.value_argument_constraint(0, types::I32),
ResolvedConstraint::Bound(types::I32)
);
assert_eq!(
a.value_argument_constraint(1, types::I32),
ResolvedConstraint::Bound(types::I32)
);
let b = Opcode::Bitcast.constraints();
assert!(!b.use_typevar_operand());
assert!(!b.requires_typevar_operand());
assert_eq!(b.num_fixed_results(), 1);
assert_eq!(b.num_fixed_value_arguments(), 1);
assert_eq!(b.result_type(0, types::I32), types::I32);
assert_eq!(b.result_type(0, types::I8), types::I8);
match b.value_argument_constraint(0, types::I32) {
ResolvedConstraint::Free(vts) => assert!(vts.contains(types::F32)),
_ => panic!("Unexpected constraint from value_argument_constraint"),
}
let c = Opcode::Call.constraints();
assert_eq!(c.num_fixed_results(), 0);
assert_eq!(c.num_fixed_value_arguments(), 0);
let i = Opcode::CallIndirect.constraints();
assert_eq!(i.num_fixed_results(), 0);
assert_eq!(i.num_fixed_value_arguments(), 1);
let cmp = Opcode::Icmp.constraints();
assert!(cmp.use_typevar_operand());
assert!(cmp.requires_typevar_operand());
assert_eq!(cmp.num_fixed_results(), 1);
assert_eq!(cmp.num_fixed_value_arguments(), 2);
}
#[test]
fn value_set() {
use crate::ir::types::*;
let vts = ValueTypeSet {
lanes: BitSet16::from_range(0, 8),
ints: BitSet8::from_range(4, 7),
floats: BitSet8::from_range(0, 0),
bools: BitSet8::from_range(3, 7),
refs: BitSet8::from_range(5, 7),
};
assert!(!vts.contains(I8));
assert!(vts.contains(I32));
assert!(vts.contains(I64));
assert!(vts.contains(I32X4));
assert!(!vts.contains(F32));
assert!(!vts.contains(B1));
assert!(vts.contains(B8));
assert!(vts.contains(B64));
assert!(vts.contains(R32));
assert!(vts.contains(R64));
assert_eq!(vts.example().to_string(), "i32");
let vts = ValueTypeSet {
lanes: BitSet16::from_range(0, 8),
ints: BitSet8::from_range(0, 0),
floats: BitSet8::from_range(5, 7),
bools: BitSet8::from_range(3, 7),
refs: BitSet8::from_range(0, 0),
};
assert_eq!(vts.example().to_string(), "f32");
let vts = ValueTypeSet {
lanes: BitSet16::from_range(1, 8),
ints: BitSet8::from_range(0, 0),
floats: BitSet8::from_range(5, 7),
bools: BitSet8::from_range(3, 7),
refs: BitSet8::from_range(0, 0),
};
assert_eq!(vts.example().to_string(), "f32x2");
let vts = ValueTypeSet {
lanes: BitSet16::from_range(2, 8),
ints: BitSet8::from_range(0, 0),
floats: BitSet8::from_range(0, 0),
bools: BitSet8::from_range(3, 7),
refs: BitSet8::from_range(0, 0),
};
assert!(!vts.contains(B32X2));
assert!(vts.contains(B32X4));
assert_eq!(vts.example().to_string(), "b32x4");
let vts = ValueTypeSet {
// TypeSet(lanes=(1, 256), ints=(8, 64))
lanes: BitSet16::from_range(0, 9),
ints: BitSet8::from_range(3, 7),
floats: BitSet8::from_range(0, 0),
bools: BitSet8::from_range(0, 0),
refs: BitSet8::from_range(0, 0),
};
assert!(vts.contains(I32));
assert!(vts.contains(I32X4));
assert!(!vts.contains(R32));
assert!(!vts.contains(R64));
}
}
| 34.85369 | 103 | 0.583245 |
e89ca1469e57cf9f870de8d7649f894fbed25ef9 | 29,186 | /// BigQuery request and response messages for audit log.
/// Note: `Table.schema` has been deprecated in favor of `Table.schemaJson`.
/// `Table.schema` may continue to be present in your logs during this
/// transition.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AuditData {
/// A job completion event.
#[prost(message, optional, tag = "17")]
pub job_completed_event: ::core::option::Option<JobCompletedEvent>,
/// Information about the table access events.
#[prost(message, repeated, tag = "19")]
pub table_data_read_events: ::prost::alloc::vec::Vec<TableDataReadEvent>,
/// Request data for each BigQuery method.
#[prost(oneof = "audit_data::Request", tags = "1, 16, 2, 3, 4, 5, 6, 7, 8, 20")]
pub request: ::core::option::Option<audit_data::Request>,
/// Response data for each BigQuery method.
#[prost(
oneof = "audit_data::Response",
tags = "9, 10, 11, 12, 18, 13, 14, 15, 21"
)]
pub response: ::core::option::Option<audit_data::Response>,
}
/// Nested message and enum types in `AuditData`.
pub mod audit_data {
/// Request data for each BigQuery method.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Request {
/// Table insert request.
#[prost(message, tag = "1")]
TableInsertRequest(super::TableInsertRequest),
/// Table update request.
#[prost(message, tag = "16")]
TableUpdateRequest(super::TableUpdateRequest),
/// Dataset list request.
#[prost(message, tag = "2")]
DatasetListRequest(super::DatasetListRequest),
/// Dataset insert request.
#[prost(message, tag = "3")]
DatasetInsertRequest(super::DatasetInsertRequest),
/// Dataset update request.
#[prost(message, tag = "4")]
DatasetUpdateRequest(super::DatasetUpdateRequest),
/// Job insert request.
#[prost(message, tag = "5")]
JobInsertRequest(super::JobInsertRequest),
/// Job query request.
#[prost(message, tag = "6")]
JobQueryRequest(super::JobQueryRequest),
/// Job get query results request.
#[prost(message, tag = "7")]
JobGetQueryResultsRequest(super::JobGetQueryResultsRequest),
/// Table data-list request.
#[prost(message, tag = "8")]
TableDataListRequest(super::TableDataListRequest),
/// Iam policy request.
#[prost(message, tag = "20")]
SetIamPolicyRequest(super::super::super::super::super::iam::v1::SetIamPolicyRequest),
}
/// Response data for each BigQuery method.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Response {
/// Table insert response.
#[prost(message, tag = "9")]
TableInsertResponse(super::TableInsertResponse),
/// Table update response.
#[prost(message, tag = "10")]
TableUpdateResponse(super::TableUpdateResponse),
/// Dataset insert response.
#[prost(message, tag = "11")]
DatasetInsertResponse(super::DatasetInsertResponse),
/// Dataset update response.
#[prost(message, tag = "12")]
DatasetUpdateResponse(super::DatasetUpdateResponse),
/// Job insert response.
#[prost(message, tag = "18")]
JobInsertResponse(super::JobInsertResponse),
/// Job query response.
#[prost(message, tag = "13")]
JobQueryResponse(super::JobQueryResponse),
/// Job get query results response.
#[prost(message, tag = "14")]
JobGetQueryResultsResponse(super::JobGetQueryResultsResponse),
/// Deprecated: Job query-done response. Use this information for usage
/// analysis.
#[prost(message, tag = "15")]
JobQueryDoneResponse(super::JobQueryDoneResponse),
/// Iam Policy.
#[prost(message, tag = "21")]
PolicyResponse(super::super::super::super::super::iam::v1::Policy),
}
}
/// Table insert request.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableInsertRequest {
/// The new table.
#[prost(message, optional, tag = "1")]
pub resource: ::core::option::Option<Table>,
}
/// Table update request.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableUpdateRequest {
/// The table to be updated.
#[prost(message, optional, tag = "1")]
pub resource: ::core::option::Option<Table>,
}
/// Table insert response.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableInsertResponse {
/// Final state of the inserted table.
#[prost(message, optional, tag = "1")]
pub resource: ::core::option::Option<Table>,
}
/// Table update response.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableUpdateResponse {
/// Final state of the updated table.
#[prost(message, optional, tag = "1")]
pub resource: ::core::option::Option<Table>,
}
/// Dataset list request.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DatasetListRequest {
/// Whether to list all datasets, including hidden ones.
#[prost(bool, tag = "1")]
pub list_all: bool,
}
/// Dataset insert request.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DatasetInsertRequest {
/// The dataset to be inserted.
#[prost(message, optional, tag = "1")]
pub resource: ::core::option::Option<Dataset>,
}
/// Dataset insert response.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DatasetInsertResponse {
/// Final state of the inserted dataset.
#[prost(message, optional, tag = "1")]
pub resource: ::core::option::Option<Dataset>,
}
/// Dataset update request.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DatasetUpdateRequest {
/// The dataset to be updated.
#[prost(message, optional, tag = "1")]
pub resource: ::core::option::Option<Dataset>,
}
/// Dataset update response.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DatasetUpdateResponse {
/// Final state of the updated dataset.
#[prost(message, optional, tag = "1")]
pub resource: ::core::option::Option<Dataset>,
}
/// Job insert request.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct JobInsertRequest {
/// Job insert request.
#[prost(message, optional, tag = "1")]
pub resource: ::core::option::Option<Job>,
}
/// Job insert response.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct JobInsertResponse {
/// Job insert response.
#[prost(message, optional, tag = "1")]
pub resource: ::core::option::Option<Job>,
}
/// Job query request.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct JobQueryRequest {
/// The query.
#[prost(string, tag = "1")]
pub query: ::prost::alloc::string::String,
/// The maximum number of results.
#[prost(uint32, tag = "2")]
pub max_results: u32,
/// The default dataset for tables that do not have a dataset specified.
#[prost(message, optional, tag = "3")]
pub default_dataset: ::core::option::Option<DatasetName>,
/// Project that the query should be charged to.
#[prost(string, tag = "4")]
pub project_id: ::prost::alloc::string::String,
/// If true, don't actually run the job. Just check that it would run.
#[prost(bool, tag = "5")]
pub dry_run: bool,
}
/// Job query response.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct JobQueryResponse {
/// The total number of rows in the full query result set.
#[prost(uint64, tag = "1")]
pub total_results: u64,
/// Information about the queried job.
#[prost(message, optional, tag = "2")]
pub job: ::core::option::Option<Job>,
}
/// Job getQueryResults request.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct JobGetQueryResultsRequest {
/// Maximum number of results to return.
#[prost(uint32, tag = "1")]
pub max_results: u32,
/// Zero-based row number at which to start.
#[prost(uint64, tag = "2")]
pub start_row: u64,
}
/// Job getQueryResults response.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct JobGetQueryResultsResponse {
/// Total number of results in query results.
#[prost(uint64, tag = "1")]
pub total_results: u64,
/// The job that was created to run the query.
/// It completed if `job.status.state` is `DONE`.
/// It failed if `job.status.errorResult` is also present.
#[prost(message, optional, tag = "2")]
pub job: ::core::option::Option<Job>,
}
/// Job getQueryDone response.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct JobQueryDoneResponse {
/// The job and status information.
/// The job completed if `job.status.state` is `DONE`.
#[prost(message, optional, tag = "1")]
pub job: ::core::option::Option<Job>,
}
/// Query job completed event.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct JobCompletedEvent {
/// Name of the event.
#[prost(string, tag = "1")]
pub event_name: ::prost::alloc::string::String,
/// Job information.
#[prost(message, optional, tag = "2")]
pub job: ::core::option::Option<Job>,
}
/// Table data read event. Only present for tables, not views, and is only
/// included in the log record for the project that owns the table.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableDataReadEvent {
/// Name of the accessed table.
#[prost(message, optional, tag = "1")]
pub table_name: ::core::option::Option<TableName>,
/// A list of referenced fields. This information is not included by default.
/// To enable this in the logs, please contact BigQuery support or open a bug
/// in the BigQuery issue tracker.
#[prost(string, repeated, tag = "2")]
pub referenced_fields: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// Table data-list request.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableDataListRequest {
/// Starting row offset.
#[prost(uint64, tag = "1")]
pub start_row: u64,
/// Maximum number of results to return.
#[prost(uint32, tag = "2")]
pub max_results: u32,
}
/// Describes a BigQuery table.
/// See the \[Table\](/bigquery/docs/reference/v2/tables) API resource
/// for more details on individual fields.
/// Note: `Table.schema` has been deprecated in favor of `Table.schemaJson`.
/// `Table.schema` may continue to be present in your logs during this
/// transition.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Table {
/// The name of the table.
#[prost(message, optional, tag = "1")]
pub table_name: ::core::option::Option<TableName>,
/// User-provided metadata for the table.
#[prost(message, optional, tag = "2")]
pub info: ::core::option::Option<TableInfo>,
/// A JSON representation of the table's schema.
#[prost(string, tag = "8")]
pub schema_json: ::prost::alloc::string::String,
/// If present, this is a virtual table defined by a SQL query.
#[prost(message, optional, tag = "4")]
pub view: ::core::option::Option<TableViewDefinition>,
/// The expiration date for the table, after which the table
/// is deleted and the storage reclaimed.
/// If not present, the table persists indefinitely.
#[prost(message, optional, tag = "5")]
pub expire_time: ::core::option::Option<::prost_types::Timestamp>,
/// The time the table was created.
#[prost(message, optional, tag = "6")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// The time the table was last truncated
/// by an operation with a `writeDisposition` of `WRITE_TRUNCATE`.
#[prost(message, optional, tag = "7")]
pub truncate_time: ::core::option::Option<::prost_types::Timestamp>,
/// The time the table was last modified.
#[prost(message, optional, tag = "9")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// The table encryption information. Set when non-default encryption is used.
#[prost(message, optional, tag = "10")]
pub encryption: ::core::option::Option<EncryptionInfo>,
}
/// User-provided metadata for a table.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableInfo {
/// A short name for the table, such as`"Analytics Data - Jan 2011"`.
#[prost(string, tag = "1")]
pub friendly_name: ::prost::alloc::string::String,
/// A long description, perhaps several paragraphs,
/// describing the table contents in detail.
#[prost(string, tag = "2")]
pub description: ::prost::alloc::string::String,
/// Labels provided for the table.
#[prost(map = "string, string", tag = "3")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
}
/// Describes a virtual table defined by a SQL query.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableViewDefinition {
/// SQL query defining the view.
#[prost(string, tag = "1")]
pub query: ::prost::alloc::string::String,
}
/// BigQuery dataset information.
/// See the \[Dataset\](/bigquery/docs/reference/v2/datasets) API resource
/// for more details on individual fields.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Dataset {
/// The name of the dataset.
#[prost(message, optional, tag = "1")]
pub dataset_name: ::core::option::Option<DatasetName>,
/// User-provided metadata for the dataset.
#[prost(message, optional, tag = "2")]
pub info: ::core::option::Option<DatasetInfo>,
/// The time the dataset was created.
#[prost(message, optional, tag = "4")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// The time the dataset was last modified.
#[prost(message, optional, tag = "5")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// The access control list for the dataset.
#[prost(message, optional, tag = "6")]
pub acl: ::core::option::Option<BigQueryAcl>,
/// If this field is present, each table that does not specify an
/// expiration time is assigned an expiration time by adding this
/// duration to the table's `createTime`. If this field is empty,
/// there is no default table expiration time.
#[prost(message, optional, tag = "8")]
pub default_table_expire_duration: ::core::option::Option<::prost_types::Duration>,
}
/// User-provided metadata for a dataset.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DatasetInfo {
/// A short name for the dataset, such as`"Analytics Data 2011"`.
#[prost(string, tag = "1")]
pub friendly_name: ::prost::alloc::string::String,
/// A long description, perhaps several paragraphs,
/// describing the dataset contents in detail.
#[prost(string, tag = "2")]
pub description: ::prost::alloc::string::String,
/// Labels provided for the dataset.
#[prost(map = "string, string", tag = "3")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
}
/// An access control list.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BigQueryAcl {
/// Access control entry list.
#[prost(message, repeated, tag = "1")]
pub entries: ::prost::alloc::vec::Vec<big_query_acl::Entry>,
}
/// Nested message and enum types in `BigQueryAcl`.
pub mod big_query_acl {
/// Access control entry.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Entry {
/// The granted role, which can be `READER`, `WRITER`, or `OWNER`.
#[prost(string, tag = "1")]
pub role: ::prost::alloc::string::String,
/// Grants access to a group identified by an email address.
#[prost(string, tag = "2")]
pub group_email: ::prost::alloc::string::String,
/// Grants access to a user identified by an email address.
#[prost(string, tag = "3")]
pub user_email: ::prost::alloc::string::String,
/// Grants access to all members of a domain.
#[prost(string, tag = "4")]
pub domain: ::prost::alloc::string::String,
/// Grants access to special groups. Valid groups are `PROJECT_OWNERS`,
/// `PROJECT_READERS`, `PROJECT_WRITERS` and `ALL_AUTHENTICATED_USERS`.
#[prost(string, tag = "5")]
pub special_group: ::prost::alloc::string::String,
/// Grants access to a BigQuery View.
#[prost(message, optional, tag = "6")]
pub view_name: ::core::option::Option<super::TableName>,
}
}
/// Describes a job.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Job {
/// Job name.
#[prost(message, optional, tag = "1")]
pub job_name: ::core::option::Option<JobName>,
/// Job configuration.
#[prost(message, optional, tag = "2")]
pub job_configuration: ::core::option::Option<JobConfiguration>,
/// Job status.
#[prost(message, optional, tag = "3")]
pub job_status: ::core::option::Option<JobStatus>,
/// Job statistics.
#[prost(message, optional, tag = "4")]
pub job_statistics: ::core::option::Option<JobStatistics>,
}
/// Job configuration information.
/// See the \[Jobs\](/bigquery/docs/reference/v2/jobs) API resource
/// for more details on individual fields.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct JobConfiguration {
/// If true, don't actually run the job. Just check that it would run.
#[prost(bool, tag = "9")]
pub dry_run: bool,
/// Labels provided for the job.
#[prost(map = "string, string", tag = "3")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Job configuration information.
#[prost(oneof = "job_configuration::Configuration", tags = "5, 6, 7, 8")]
pub configuration: ::core::option::Option<job_configuration::Configuration>,
}
/// Nested message and enum types in `JobConfiguration`.
pub mod job_configuration {
/// Describes a query job, which executes a SQL-like query.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Query {
/// The SQL query to run.
#[prost(string, tag = "1")]
pub query: ::prost::alloc::string::String,
/// The table where results are written.
#[prost(message, optional, tag = "2")]
pub destination_table: ::core::option::Option<super::TableName>,
/// Describes when a job is allowed to create a table:
/// `CREATE_IF_NEEDED`, `CREATE_NEVER`.
#[prost(string, tag = "3")]
pub create_disposition: ::prost::alloc::string::String,
/// Describes how writes affect existing tables:
/// `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`.
#[prost(string, tag = "4")]
pub write_disposition: ::prost::alloc::string::String,
/// If a table name is specified without a dataset in a query,
/// this dataset will be added to table name.
#[prost(message, optional, tag = "5")]
pub default_dataset: ::core::option::Option<super::DatasetName>,
/// Describes data sources outside BigQuery, if needed.
#[prost(message, repeated, tag = "6")]
pub table_definitions: ::prost::alloc::vec::Vec<super::TableDefinition>,
/// Describes the priority given to the query:
/// `QUERY_INTERACTIVE` or `QUERY_BATCH`.
#[prost(string, tag = "7")]
pub query_priority: ::prost::alloc::string::String,
/// Result table encryption information. Set when non-default encryption is
/// used.
#[prost(message, optional, tag = "8")]
pub destination_table_encryption: ::core::option::Option<super::EncryptionInfo>,
/// Type of the statement (e.g. SELECT, INSERT, CREATE_TABLE, CREATE_MODEL..)
#[prost(string, tag = "9")]
pub statement_type: ::prost::alloc::string::String,
}
/// Describes a load job, which loads data from an external source via
/// the import pipeline.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Load {
/// URIs for the data to be imported. Only Google Cloud Storage URIs are
/// supported.
#[prost(string, repeated, tag = "1")]
pub source_uris: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The table schema in JSON format representation of a TableSchema.
#[prost(string, tag = "6")]
pub schema_json: ::prost::alloc::string::String,
/// The table where the imported data is written.
#[prost(message, optional, tag = "3")]
pub destination_table: ::core::option::Option<super::TableName>,
/// Describes when a job is allowed to create a table:
/// `CREATE_IF_NEEDED`, `CREATE_NEVER`.
#[prost(string, tag = "4")]
pub create_disposition: ::prost::alloc::string::String,
/// Describes how writes affect existing tables:
/// `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`.
#[prost(string, tag = "5")]
pub write_disposition: ::prost::alloc::string::String,
/// Result table encryption information. Set when non-default encryption is
/// used.
#[prost(message, optional, tag = "7")]
pub destination_table_encryption: ::core::option::Option<super::EncryptionInfo>,
}
/// Describes an extract job, which exports data to an external source
/// via the export pipeline.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Extract {
/// Google Cloud Storage URIs where extracted data should be written.
#[prost(string, repeated, tag = "1")]
pub destination_uris: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The source table.
#[prost(message, optional, tag = "2")]
pub source_table: ::core::option::Option<super::TableName>,
}
/// Describes a copy job, which copies an existing table to another table.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableCopy {
/// Source tables.
#[prost(message, repeated, tag = "1")]
pub source_tables: ::prost::alloc::vec::Vec<super::TableName>,
/// Destination table.
#[prost(message, optional, tag = "2")]
pub destination_table: ::core::option::Option<super::TableName>,
/// Describes when a job is allowed to create a table:
/// `CREATE_IF_NEEDED`, `CREATE_NEVER`.
#[prost(string, tag = "3")]
pub create_disposition: ::prost::alloc::string::String,
/// Describes how writes affect existing tables:
/// `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`.
#[prost(string, tag = "4")]
pub write_disposition: ::prost::alloc::string::String,
/// Result table encryption information. Set when non-default encryption is
/// used.
#[prost(message, optional, tag = "5")]
pub destination_table_encryption: ::core::option::Option<super::EncryptionInfo>,
}
/// Job configuration information.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Configuration {
/// Query job information.
#[prost(message, tag = "5")]
Query(Query),
/// Load job information.
#[prost(message, tag = "6")]
Load(Load),
/// Extract job information.
#[prost(message, tag = "7")]
Extract(Extract),
/// TableCopy job information.
#[prost(message, tag = "8")]
TableCopy(TableCopy),
}
}
/// Describes an external data source used in a query.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableDefinition {
/// Name of the table, used in queries.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Google Cloud Storage URIs for the data to be imported.
#[prost(string, repeated, tag = "2")]
pub source_uris: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// Running state of a job.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct JobStatus {
/// State of a job: `PENDING`, `RUNNING`, or `DONE`.
#[prost(string, tag = "1")]
pub state: ::prost::alloc::string::String,
/// If the job did not complete successfully, this field describes why.
#[prost(message, optional, tag = "2")]
pub error: ::core::option::Option<super::super::super::super::rpc::Status>,
/// Errors encountered during the running of the job. Do not necessarily mean
/// that the job has completed or was unsuccessful.
#[prost(message, repeated, tag = "3")]
pub additional_errors: ::prost::alloc::vec::Vec<super::super::super::super::rpc::Status>,
}
/// Job statistics that may change after a job starts.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct JobStatistics {
/// Time when the job was created.
#[prost(message, optional, tag = "1")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Time when the job started.
#[prost(message, optional, tag = "2")]
pub start_time: ::core::option::Option<::prost_types::Timestamp>,
/// Time when the job ended.
#[prost(message, optional, tag = "3")]
pub end_time: ::core::option::Option<::prost_types::Timestamp>,
/// Total bytes processed for a job.
#[prost(int64, tag = "4")]
pub total_processed_bytes: i64,
/// Processed bytes, adjusted by the job's CPU usage.
#[prost(int64, tag = "5")]
pub total_billed_bytes: i64,
/// The tier assigned by CPU-based billing.
#[prost(int32, tag = "7")]
pub billing_tier: i32,
/// The total number of slot-ms consumed by the query job.
#[prost(int64, tag = "8")]
pub total_slot_ms: i64,
/// Reservation usage.
#[prost(message, repeated, tag = "14")]
pub reservation_usage: ::prost::alloc::vec::Vec<job_statistics::ReservationResourceUsage>,
/// The first N tables accessed by the query job. Older queries that
/// reference a large number of tables may not have all of their
/// tables in this list. You can use the total_tables_processed count to
/// know how many total tables were read in the query. For new queries,
/// there is currently no limit.
#[prost(message, repeated, tag = "9")]
pub referenced_tables: ::prost::alloc::vec::Vec<TableName>,
/// Total number of unique tables referenced in the query.
#[prost(int32, tag = "10")]
pub total_tables_processed: i32,
/// The first N views accessed by the query job. Older queries that
/// reference a large number of views may not have all of their
/// views in this list. You can use the total_tables_processed count to
/// know how many total tables were read in the query. For new queries,
/// there is currently no limit.
#[prost(message, repeated, tag = "11")]
pub referenced_views: ::prost::alloc::vec::Vec<TableName>,
/// Total number of unique views referenced in the query.
#[prost(int32, tag = "12")]
pub total_views_processed: i32,
/// Number of output rows produced by the query job.
#[prost(int64, tag = "15")]
pub query_output_row_count: i64,
/// Total bytes loaded for an import job.
#[prost(int64, tag = "13")]
pub total_load_output_bytes: i64,
}
/// Nested message and enum types in `JobStatistics`.
pub mod job_statistics {
/// Job resource usage breakdown by reservation.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ReservationResourceUsage {
/// Reservation name or "unreserved" for on-demand resources usage.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Total slot milliseconds used by the reservation for a particular job.
#[prost(int64, tag = "2")]
pub slot_ms: i64,
}
}
/// The fully-qualified name for a dataset.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DatasetName {
/// The project ID.
#[prost(string, tag = "1")]
pub project_id: ::prost::alloc::string::String,
/// The dataset ID within the project.
#[prost(string, tag = "2")]
pub dataset_id: ::prost::alloc::string::String,
}
/// The fully-qualified name for a table.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TableName {
/// The project ID.
#[prost(string, tag = "1")]
pub project_id: ::prost::alloc::string::String,
/// The dataset ID within the project.
#[prost(string, tag = "2")]
pub dataset_id: ::prost::alloc::string::String,
/// The table ID of the table within the dataset.
#[prost(string, tag = "3")]
pub table_id: ::prost::alloc::string::String,
}
/// The fully-qualified name for a job.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct JobName {
/// The project ID.
#[prost(string, tag = "1")]
pub project_id: ::prost::alloc::string::String,
/// The job ID within the project.
#[prost(string, tag = "2")]
pub job_id: ::prost::alloc::string::String,
/// The job location.
#[prost(string, tag = "3")]
pub location: ::prost::alloc::string::String,
}
/// Describes encryption properties for a table or a job
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct EncryptionInfo {
/// unique identifier for cloud kms key
#[prost(string, tag = "1")]
pub kms_key_name: ::prost::alloc::string::String,
}
| 43.047198 | 100 | 0.640889 |
623abfccbcd6e85828ebe9bb6d139b9fc3ce5bda | 23,339 | // Copyright Materialize, Inc. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
use std::fmt;
use anyhow::{anyhow, Error};
/// This module defines a small language for directly constructing RelationExprs and running
/// various optimizations on them. It uses datadriven, so the output of each test can be rewritten
/// by setting the REWRITE environment variable.
/// TODO(justin):
/// * It's currently missing a mechanism to run just a single test file
/// * There is some duplication between this and the SQL planner
/// * Not all operators supported (Reduce)
#[derive(Debug, Clone)]
enum Sexp {
List(Vec<Sexp>),
Atom(String),
}
impl fmt::Display for Sexp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Sexp::Atom(s) => write!(f, "{}", s),
Sexp::List(es) => {
write!(f, "(")?;
let mut split = "";
for e in es {
write!(f, "{}{}", split, e)?;
split = " ";
}
write!(f, ")")
}
}
}
}
struct SexpParser {
s: Vec<char>,
i: usize,
}
impl SexpParser {
fn peek(&self) -> Option<char> {
if self.i < self.s.len() {
Some(self.s[self.i])
} else {
None
}
}
fn munch(&mut self) {
loop {
match self.peek() {
Some(ch) if ch.is_whitespace() => self.i += 1,
_ => break,
}
}
}
fn closer(ch: char) -> Option<char> {
match ch {
'(' => Some(')'),
'[' => Some(']'),
'{' => Some('}'),
_ => None,
}
}
fn is_atom_char(ch: char) -> bool {
('a'..='z').contains(&ch)
|| ('A'..='Z').contains(&ch)
|| ('0'..='9').contains(&ch)
|| ch == '-'
|| ch == '_'
|| ch == '#'
}
fn parse(&mut self) -> Result<Sexp, Error> {
self.munch();
match self.peek() {
None => Err(anyhow!(String::from("unexpected end of sexp"))),
Some(e @ '(') | Some(e @ '[') | Some(e @ '{') => {
self.i += 1;
let mut result = Vec::new();
while self.peek() != SexpParser::closer(e) {
result.push(self.parse()?);
self.munch();
}
self.i += 1;
Ok(Sexp::List(result))
}
Some(ch) if SexpParser::is_atom_char(ch) => {
let start = self.i;
while let Some(ch) = self.peek() {
if !SexpParser::is_atom_char(ch) {
break;
}
self.i += 1;
}
let end = self.i;
self.munch();
let word: String = self.s[start..end].iter().collect();
Ok(Sexp::Atom(word))
}
Some(ch) => Err(anyhow!("unexpected: {}", ch)),
}
}
fn parse_sexp(s: String) -> Result<Sexp, Error> {
let mut p = SexpParser {
s: s.chars().collect(),
i: 0,
};
p.parse()
}
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::fmt::Write;
use anyhow::{anyhow, bail, Error};
use expr::explain::ViewExplanation;
use expr::{
DummyHumanizer, ExprHumanizer, GlobalId, Id, JoinImplementation, LocalId, MirRelationExpr,
MirScalarExpr,
};
use repr::{ColumnType, Datum, RelationType, Row, ScalarType};
use transform::{Optimizer, Transform, TransformArgs};
use super::{Sexp, SexpParser};
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum TestType {
Build,
Opt,
Steps,
}
#[derive(Debug)]
struct Scope {
objects: HashMap<String, (Id, RelationType)>,
names: HashMap<Id, String>,
}
impl<'a> Scope {
fn insert(
&mut self,
name: &str,
typ: RelationType,
) -> (LocalId, Option<(Id, RelationType)>) {
let old_val = self.get(name);
let id = LocalId::new(self.objects.len() as u64);
self.set(name, Id::Local(id), typ);
(id, old_val)
}
fn set(&mut self, name: &str, id: Id, typ: RelationType) {
self.objects.insert(name.to_string(), (id, typ));
self.names.insert(id, name.to_string());
}
fn remove(&mut self, name: &str) {
self.objects.remove(name);
}
fn get(&self, name: &str) -> Option<(Id, RelationType)> {
self.objects.get(name).cloned()
}
}
#[derive(Debug)]
struct TestCatalog {
objects: HashMap<String, (GlobalId, RelationType)>,
names: HashMap<GlobalId, String>,
}
impl<'a> TestCatalog {
fn insert(&mut self, name: &str, typ: RelationType) {
// TODO(justin): error on dup name?
let id = GlobalId::User(self.objects.len() as u64);
self.objects.insert(name.to_string(), (id, typ));
self.names.insert(id, name.to_string());
}
fn get(&'a self, name: &str) -> Option<&'a (GlobalId, RelationType)> {
self.objects.get(name)
}
}
impl ExprHumanizer for TestCatalog {
fn humanize_id(&self, id: GlobalId) -> Option<String> {
self.names.get(&id).map(|s| s.to_string())
}
fn humanize_scalar_type(&self, ty: &ScalarType) -> String {
DummyHumanizer.humanize_scalar_type(ty)
}
fn humanize_column_type(&self, ty: &ColumnType) -> String {
DummyHumanizer.humanize_column_type(ty)
}
}
fn nth(s: &Sexp, n: usize) -> Result<Sexp, Error> {
match s {
Sexp::List(l) => {
if n >= l.len() {
Err(anyhow!("can't take {}[{}]", s, n))
} else {
Ok(l[n].clone())
}
}
_ => Err(anyhow!("can't take {}[{}]", s, n)),
}
}
fn try_list(s: Sexp) -> Result<Vec<Sexp>, Error> {
match s {
Sexp::List(s) => Ok(s),
_ => Err(anyhow!("expected {} to be a list", s)),
}
}
fn try_atom(s: &Sexp) -> Result<String, Error> {
match s {
Sexp::Atom(s) => Ok(s.clone()),
_ => Err(anyhow!("expected {} to be an atom", s)),
}
}
fn try_list_of_atoms(s: &Sexp) -> Result<Vec<String>, Error> {
match s {
Sexp::List(s) => s.iter().map(|s| try_atom(s)).collect(),
_ => Err(anyhow!("expected {} to be a list", s)),
}
}
/// Extracts a column reference expression from a Sexp.
fn extract_idx(s: Sexp) -> Result<usize, Error> {
match s {
Sexp::Atom(a) if a.starts_with('#') => {
Ok(a.chars().skip(1).collect::<String>().parse()?)
}
s => Err(anyhow!("expected {} to be a column reference", s)),
}
}
fn build_rel(
s: Sexp,
catalog: &TestCatalog,
scope: &mut Scope,
) -> Result<MirRelationExpr, Error> {
// TODO(justin): cleaner destructuring of a sexp here: this is too lenient at the moment,
// since extra arguments to an operator are ignored.
match try_atom(&nth(&s, 0)?)?.as_str() {
// (get <name>)
"get" => {
let name = try_atom(&nth(&s, 1)?)?;
match scope.get(&name) {
Some((id, typ)) => Ok(MirRelationExpr::Get { id, typ }),
None => match catalog.get(&name) {
None => Err(anyhow!("no catalog object named {}", name)),
Some((id, typ)) => Ok(MirRelationExpr::Get {
id: Id::Global(*id),
typ: typ.clone(),
}),
},
}
}
// (let <name> <value> <body>)
"let" => {
let name = try_atom(&nth(&s, 1)?)?;
let value = build_rel(nth(&s, 2)?, catalog, scope)?;
let (id, prev) = scope.insert(&name, value.typ());
let body = build_rel(nth(&s, 3)?, catalog, scope)?;
if let Some((old_id, old_val)) = prev {
scope.set(&name, old_id, old_val);
} else {
scope.remove(&name)
}
Ok(MirRelationExpr::Let {
id,
value: Box::new(value),
body: Box::new(body),
})
}
// (map <input> [expressions])
"map" => Ok(MirRelationExpr::Map {
input: Box::new(build_rel(nth(&s, 1)?, catalog, scope)?),
scalars: build_scalar_list(nth(&s, 2)?)?,
}),
// (project <input> [<col refs>])
"project" => Ok(MirRelationExpr::Project {
input: Box::new(build_rel(nth(&s, 1)?, catalog, scope)?),
outputs: try_list(nth(&s, 2)?)?
.into_iter()
.map(extract_idx)
.collect::<Result<Vec<usize>, Error>>()?,
}),
// (constant [<rows>] [<types>])
"constant" => {
// TODO(justin): ...fix this.
let mut row_packer = repr::RowPacker::new();
let rows: Vec<(Row, isize)> = try_list(nth(&s, 1)?)?
.into_iter()
.map(try_list)
.collect::<Result<Vec<Vec<Sexp>>, Error>>()?
.into_iter()
.map(|e| {
e.into_iter()
.map(build_scalar)
.collect::<Result<Vec<MirScalarExpr>, Error>>()
})
.collect::<Result<Vec<Vec<MirScalarExpr>>, Error>>()?
.iter()
.map(move |exprs| {
Ok(row_packer.pack(
exprs
.iter()
.map(|e| match e {
MirScalarExpr::Literal(r, _) => {
Ok(r.as_ref().unwrap().iter().next().unwrap().clone())
}
_ => bail!("exprs in constant must be literals"),
})
.collect::<Result<Vec<Datum>, Error>>()?,
))
})
.collect::<Result<Vec<Row>, Error>>()?
.iter()
.map(|r| (r.clone(), 1))
.collect::<Vec<(Row, isize)>>();
Ok(MirRelationExpr::Constant {
rows: Ok(rows),
typ: parse_type_list(nth(&s, 2)?)?,
})
}
// (join [<inputs>] [<equivalences>]])
"join" => {
let inputs = try_list(nth(&s, 1)?)?
.into_iter()
.map(|r| build_rel(r, catalog, scope))
.collect::<Result<Vec<MirRelationExpr>, Error>>()?;
// TODO(justin): is there a way to make this more comprehensible?
let equivalences: Vec<Vec<MirScalarExpr>> = try_list(nth(&s, 2)?)?
.into_iter()
.map(try_list)
.collect::<Result<Vec<Vec<Sexp>>, Error>>()?
.into_iter()
.map(|e| e.into_iter().map(build_scalar).collect())
.collect::<Result<Vec<Vec<MirScalarExpr>>, Error>>()?;
Ok(MirRelationExpr::Join {
inputs,
equivalences,
demand: None,
implementation: JoinImplementation::Unimplemented,
})
}
// (union [<inputs>])
"union" => {
let inputs = try_list(nth(&s, 1)?)?
.into_iter()
.map(|r| build_rel(r, catalog, scope))
.collect::<Result<Vec<MirRelationExpr>, Error>>()?;
Ok(MirRelationExpr::Union {
base: Box::new(inputs[0].clone()),
inputs: inputs[1..].to_vec(),
})
}
// (negate <input>)
"negate" => Ok(MirRelationExpr::Negate {
input: Box::new(build_rel(nth(&s, 1)?, catalog, scope)?),
}),
// (filter <input> <predicate>)
"filter" => Ok(MirRelationExpr::Filter {
input: Box::new(build_rel(nth(&s, 1)?, catalog, scope)?),
predicates: build_scalar_list(nth(&s, 2)?)?,
}),
// (arrange-by <input> [<keys>])
"arrange-by" => Ok(MirRelationExpr::ArrangeBy {
input: Box::new(build_rel(nth(&s, 1)?, catalog, scope)?),
keys: try_list(nth(&s, 2)?)?
.into_iter()
.map(build_scalar_list)
.collect::<Result<Vec<Vec<MirScalarExpr>>, Error>>()?,
}),
// TODO(justin): add the rest of the operators.
name => Err(anyhow!("expected {} to be a relational operator", name)),
}
}
fn build_scalar_list(s: Sexp) -> Result<Vec<MirScalarExpr>, Error> {
try_list(s)?
.into_iter()
.map(build_scalar)
.collect::<Result<Vec<MirScalarExpr>, Error>>()
}
// TODO(justin): is there some way to re-use the sql parser/builder for this?
fn build_scalar(s: Sexp) -> Result<MirScalarExpr, Error> {
match s {
// TODO(justin): support more scalar exprs.
Sexp::Atom(s) => match s.as_str() {
"true" => Ok(MirScalarExpr::literal(Ok(Datum::True), ScalarType::Bool)),
"false" => Ok(MirScalarExpr::literal(Ok(Datum::False), ScalarType::Bool)),
s => {
match s.chars().next() {
None => {
// It shouldn't have parsed as an atom originally.
unreachable!();
}
Some('#') => Ok(MirScalarExpr::Column(extract_idx(Sexp::Atom(
s.to_string(),
))?)),
Some('0') | Some('1') | Some('2') | Some('3') | Some('4') | Some('5')
| Some('6') | Some('7') | Some('8') | Some('9') => {
Ok(MirScalarExpr::literal(
Ok(Datum::Int64(s.parse::<i64>()?)),
ScalarType::Int64,
))
}
_ => Err(anyhow!("couldn't parse scalar: {}", s)),
}
}
},
s => Err(anyhow!("expected {} to be a scalar", s)),
}
}
fn parse_type_list(s: Sexp) -> Result<RelationType, Error> {
let types = try_list_of_atoms(&s)?;
let col_types = types
.iter()
.map(|e| match e.as_str() {
"int32" => Ok(ScalarType::Int32.nullable(true)),
"int64" => Ok(ScalarType::Int64.nullable(true)),
"bool" => Ok(ScalarType::Bool.nullable(true)),
_ => Err(anyhow!("unknown type {}", e)),
})
.collect::<Result<Vec<ColumnType>, Error>>()?;
Ok(RelationType::new(col_types))
}
fn parse_key_list(s: Sexp) -> Result<Vec<Vec<usize>>, Error> {
let keys = try_list(s)?
.into_iter()
.map(|s2| {
let result = try_list_of_atoms(&s2)?
.into_iter()
.map(|e| Ok(e.parse::<usize>()?))
.collect::<Result<Vec<usize>, Error>>();
result
})
.collect::<Result<Vec<Vec<usize>>, Error>>()?;
Ok(keys)
}
fn handle_cat(s: Sexp, cat: &mut TestCatalog) -> Result<(), Error> {
match try_atom(&nth(&s, 0)?)?.as_str() {
"defsource" => {
let name = try_atom(&nth(&s, 1)?)?;
let mut typ = parse_type_list(nth(&s, 2)?)?;
if let Ok(sexp) = nth(&s, 3) {
typ.keys = parse_key_list(sexp)?;
}
cat.insert(&name, typ);
Ok(())
}
s => Err(anyhow!("not a valid catalog command: {}", s)),
}
}
fn generate_explanation(
rel: &MirRelationExpr,
cat: &TestCatalog,
format: Option<&Vec<String>>,
) -> String {
let mut explanation = ViewExplanation::new(rel, cat);
if let Some(format) = format {
if format.contains(&"types".to_string()) {
explanation.explain_types();
}
}
explanation.to_string()
}
fn run_testcase(
s: &str,
cat: &TestCatalog,
args: &HashMap<String, Vec<String>>,
test_type: TestType,
) -> Result<String, Error> {
let mut scope = Scope {
objects: HashMap::new(),
names: HashMap::new(),
};
let mut rel = build_rel(SexpParser::parse_sexp(s.to_string())?, &cat, &mut scope)?;
let mut id_gen = Default::default();
let indexes = HashMap::new();
for t in args.get("apply").cloned().unwrap_or_else(Vec::new).iter() {
get_transform(t)?.transform(
&mut rel,
TransformArgs {
id_gen: &mut id_gen,
indexes: &indexes,
},
)?;
}
match test_type {
TestType::Opt => {
let mut opt: Optimizer = Default::default();
rel = opt.optimize(rel, &HashMap::new()).unwrap().into_inner();
Ok(generate_explanation(&rel, &cat, args.get("format")))
}
TestType::Build => Ok(generate_explanation(&rel, &cat, args.get("format"))),
TestType::Steps => {
// TODO(justin): this thing does not currently peek into fixpoints, so it's not
// that helpful for optimizations that involve those (which is most of them).
let opt: Optimizer = Default::default();
let mut out = String::new();
// Buffer of the names of the transformations that have been applied with no changes.
let mut no_change: Vec<String> = Vec::new();
writeln!(
out,
"{}",
generate_explanation(&rel, &cat, args.get("format"))
)?;
writeln!(out, "====")?;
for transform in opt.transforms.iter() {
let prev = rel.clone();
transform.transform(
&mut rel,
TransformArgs {
id_gen: &mut id_gen,
indexes: &indexes,
},
)?;
if rel != prev {
if no_change.len() > 0 {
write!(out, "No change:")?;
let mut sep = " ";
for t in no_change {
write!(out, "{}{}", sep, t)?;
sep = ", ";
}
writeln!(out, "\n====")?;
}
no_change = vec![];
write!(out, "Applied {:?}:", transform)?;
writeln!(
out,
"\n{}",
generate_explanation(&rel, &cat, args.get("format"))
)?;
writeln!(out, "====")?;
} else {
no_change.push(format!("{:?}", transform));
}
}
if no_change.len() > 0 {
write!(out, "No change:")?;
let mut sep = " ";
for t in no_change {
write!(out, "{}{}", sep, t)?;
sep = ", ";
}
writeln!(out, "\n====")?;
}
writeln!(out, "Final:")?;
writeln!(
out,
"{}",
generate_explanation(&rel, &cat, args.get("format"))
)?;
writeln!(out, "====")?;
Ok(out)
}
}
}
fn get_transform(name: &str) -> Result<Box<dyn Transform>, Error> {
// TODO(justin): is there a way to just extract these from the Optimizer list of
// transforms?
match name {
"PredicatePushdown" => Ok(Box::new(transform::predicate_pushdown::PredicatePushdown)),
_ => Err(anyhow!(
"no transform named {} (you might have to add it to get_transform)",
name
)),
}
}
#[test]
fn run() {
datadriven::walk("tests/testdata", |f| {
let mut catalog = TestCatalog {
objects: HashMap::new(),
names: HashMap::new(),
};
f.run(move |s| -> String {
match s.directive.as_str() {
"cat" => {
match handle_cat(
SexpParser::parse_sexp(s.input.clone()).unwrap(),
&mut catalog,
) {
Ok(()) => String::from("ok\n"),
Err(err) => format!("error: {}\n", err),
}
}
"build" => match run_testcase(&s.input, &catalog, &s.args, TestType::Build) {
Ok(msg) => msg,
Err(err) => format!("error: {}\n", err),
},
"opt" => match run_testcase(&s.input, &catalog, &s.args, TestType::Opt) {
Ok(msg) => msg,
Err(err) => format!("error: {}\n", err),
},
"steps" => match run_testcase(&s.input, &catalog, &s.args, TestType::Steps) {
Ok(msg) => msg,
Err(err) => format!("error: {}\n", err),
},
_ => panic!("unknown directive: {}", s.directive),
}
})
});
}
}
| 35.043544 | 101 | 0.419727 |
1a71f2d038533bd45e43994f12baf258beef0765 | 9,019 | //! # crateinfo
//!
//! Loads crate information.
//!
#[cfg(test)]
#[path = "crateinfo_test.rs"]
mod crateinfo_test;
use crate::types::{CrateDependency, CrateInfo};
use cargo_metadata::camino::Utf8PathBuf;
use cargo_metadata::MetadataCommand;
use fsio;
use glob::glob;
use std::env;
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
fn expand_glob_members(glob_member: &str) -> Vec<String> {
match glob(glob_member) {
Ok(entries) => {
let mut members = vec![];
for entry in entries {
match entry {
Ok(path) => {
let mut updated_path = path.to_str().unwrap().to_string();
updated_path = updated_path.replace("\\", "/");
members.push(updated_path);
}
_ => (),
};
}
members
}
_ => vec![],
}
}
fn normalize_members(crate_info: &mut CrateInfo) {
match crate_info.workspace {
Some(ref mut workspace) => {
match workspace.members {
Some(ref mut members) => {
let existing_members = members.clone();
let mut index = 0;
for member in existing_members.iter() {
// glob
if member.contains("*") {
let mut expanded_members = expand_glob_members(&member);
members.remove(index);
members.append(&mut expanded_members);
} else {
index = index + 1;
}
}
}
None => (),
};
}
None => (), //not a workspace
}
}
fn get_members_from_dependencies(crate_info: &CrateInfo) -> Vec<String> {
let mut members = vec![];
match crate_info.dependencies {
Some(ref dependencies) => {
for value in dependencies.values() {
match *value {
CrateDependency::Info(ref info) => match info.path {
Some(ref path) => {
if path.starts_with("./") {
let member_path =
path.chars().skip(2).take(path.len() - 2).collect();
members.push(member_path);
}
}
None => (),
},
_ => (),
};
}
}
None => (),
};
members
}
fn add_members(crate_info: &mut CrateInfo, new_members: Vec<String>) {
if new_members.len() > 0 {
match crate_info.workspace {
Some(ref mut workspace) => match workspace.members {
Some(ref mut members) => {
for new_member in new_members.iter() {
let member_string = new_member.to_string();
match members.iter().position(|member| *member == member_string) {
None => members.push(member_string),
_ => (),
}
}
}
None => workspace.members = Some(new_members),
},
None => (), //not a workspace
}
}
}
fn remove_excludes(crate_info: &mut CrateInfo) -> bool {
let mut removed = false;
match crate_info.workspace {
Some(ref mut workspace) => match workspace.exclude {
Some(ref excludes) => match workspace.members {
Some(ref mut members) => {
for exclude in excludes.iter() {
let exclude_string = exclude.to_string();
let result = members.iter().position(|member| *member == exclude_string);
match result {
Some(index) => {
members.remove(index);
removed = true;
()
}
None => (),
};
}
}
None => (),
},
None => (),
},
None => (), //not a workspace
};
removed
}
fn load_workspace_members(crate_info: &mut CrateInfo) {
if crate_info.workspace.is_some() {
normalize_members(crate_info);
let dependencies = get_members_from_dependencies(&crate_info);
add_members(crate_info, dependencies);
remove_excludes(crate_info);
}
}
/// Loads the crate info based on the Cargo.toml found in the current working directory.
pub(crate) fn load() -> CrateInfo {
load_from(Path::new("Cargo.toml").to_path_buf())
}
pub(crate) fn load_from(file_path: PathBuf) -> CrateInfo {
if file_path.exists() {
debug!("Reading file: {:#?}", &file_path);
let crate_info_string = match fsio::file::read_text_file(&file_path) {
Ok(content) => content,
Err(error) => panic!("Unable to open Cargo.toml, error: {}", error),
};
let mut crate_info: CrateInfo = match toml::from_str(&crate_info_string) {
Ok(value) => value,
Err(error) => panic!("Unable to parse Cargo.toml, {}", error),
};
load_workspace_members(&mut crate_info);
debug!("Loaded Cargo.toml: {:#?}", &crate_info);
crate_info
} else {
CrateInfo::new()
}
}
#[derive(Debug, Deserialize)]
struct CargoConfig {
build: Option<CargoConfigBuild>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "kebab-case")]
struct CargoConfigBuild {
target: Option<RustTarget>,
target_dir: Option<PathBuf>,
}
#[derive(Debug, Deserialize)]
#[serde(from = "PathBuf")]
struct RustTarget(PathBuf);
impl RustTarget {
fn name(&self) -> &str {
self.0.file_stem().unwrap().to_str().unwrap()
}
}
impl From<PathBuf> for RustTarget {
fn from(buf: PathBuf) -> Self {
Self(buf)
}
}
impl AsRef<OsStr> for RustTarget {
fn as_ref(&self) -> &OsStr {
self.0.as_ref()
}
}
fn get_cargo_config(home: Option<PathBuf>) -> Option<CargoConfig> {
let path = env::current_dir().ok()?;
let config_file = path
.ancestors()
.map(|ancestor| ancestor.join(".cargo"))
.chain(home)
.map(|config_file| config_file.join("config"))
.filter_map(|config_file| {
let config_file_with_extension = config_file.with_extension("toml");
if config_file.exists() {
Some(config_file)
} else if config_file_with_extension.exists() {
Some(config_file_with_extension)
} else {
None
}
})
.next()?;
let config_file = fsio::file::read_text_file(&config_file).ok()?;
toml::from_str(&config_file).ok()
}
pub(crate) fn crate_target_triple(
default_target_triple: Option<String>,
home: Option<PathBuf>,
) -> Option<String> {
get_cargo_config(home)
.and_then(|config| config.build)
.and_then(|build| build.target)
.map(|target| target.name().to_string())
.or(default_target_triple)
}
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct CrateTargetDirs {
pub(crate) host: Utf8PathBuf,
pub(crate) custom: Option<Utf8PathBuf>,
}
pub(crate) fn crate_target_dirs(home: Option<PathBuf>) -> CrateTargetDirs {
let metadata = MetadataCommand::new()
.exec()
.map_err(|err| debug!("Unable to extract cargo metadata, error: {}", err))
.ok();
let host = metadata
.map(|metadata| metadata.target_directory)
.unwrap_or_else(|| "target".into());
let custom = get_cargo_config(home)
.and_then(|config| config.build)
.and_then(|build| build.target)
.map(|target_triple| host.join(target_triple.name()));
CrateTargetDirs { host, custom }
}
pub(crate) fn search_workspace_root() -> Option<String> {
if envmnt::is("CARGO_MAKE_WORKSPACE_EMULATION") {
search_workspace_root_for_emulation()
} else {
search_workspace_root_via_metadata()
}
}
fn search_workspace_root_for_emulation() -> Option<String> {
let path_value = envmnt::get_any(
&vec![
"CARGO_MAKE_WORKSPACE_EMULATION_ROOT_DIRECTORY",
"CARGO_MAKE_WORKING_DIRECTORY",
],
"",
);
if path_value.is_empty() {
None
} else {
Some(path_value)
}
}
fn search_workspace_root_via_metadata() -> Option<String> {
debug!("Getting cargo metadata.");
MetadataCommand::new()
.exec()
.map(|metadata| metadata.workspace_root.to_string())
.map_err(|err| debug!("Unable to extract cargo metadata, error: {:#?}", err))
.ok()
}
| 28.814696 | 97 | 0.517241 |
f9f2ea29d2f9e0f7c65b3ab81c6096db0da2c4f6 | 1,095 | // Copyright 2019-2022 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT
use std::process::Command;
#[cfg(not(feature = "release"))]
const RELEASE_TRACK: &str = "unstable";
#[cfg(feature = "release")]
const RELEASE_TRACK: &str = "alpha";
const NETWORK: &str = if cfg!(feature = "devnet") {
"devnet"
} else if cfg!(feature = "interopnet") {
"interopnet"
} else if cfg!(feature = "calibnet") {
"calibnet"
} else {
"mainnet"
};
fn main() {
// expose environment variable FOREST_VERSON at build time
println!("cargo:rustc-env=FOREST_VERSION={}", version());
}
// returns version string at build time, e.g., `v0.1.0/unstable/mainnet/7af2f5bf`
fn version() -> String {
let git_cmd = Command::new("git")
.args(&["rev-parse", "--short", "HEAD"])
.output()
.expect("Git references should be available on a build system");
let git_hash = String::from_utf8(git_cmd.stdout).unwrap_or_default();
format!(
"v{}/{}/{}/{}",
env!("CARGO_PKG_VERSION"),
RELEASE_TRACK,
NETWORK,
git_hash,
)
}
| 26.071429 | 81 | 0.618265 |
feece517cebf4660cbd35151c8d83019b7af671d | 29,912 | use rustc_ast as ast;
use rustc_ast::visit::{self, AssocCtxt, FnCtxt, FnKind, Visitor};
use rustc_ast::{AssocTyConstraint, AssocTyConstraintKind, NodeId};
use rustc_ast::{PatKind, RangeEnd};
use rustc_errors::struct_span_err;
use rustc_feature::{AttributeGate, BUILTIN_ATTRIBUTE_MAP};
use rustc_feature::{Features, GateIssue};
use rustc_session::parse::feature_err_issue;
use rustc_session::Session;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::sym;
use rustc_span::Span;
use tracing::debug;
macro_rules! gate_feature_fn {
($visitor: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr, $help: expr) => {{
let (visitor, has_feature, span, name, explain, help) =
(&*$visitor, $has_feature, $span, $name, $explain, $help);
let has_feature: bool = has_feature(visitor.features);
debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature);
if !has_feature && !span.allows_unstable($name) {
feature_err_issue(&visitor.sess.parse_sess, name, span, GateIssue::Language, explain)
.help(help)
.emit();
}
}};
($visitor: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr) => {{
let (visitor, has_feature, span, name, explain) =
(&*$visitor, $has_feature, $span, $name, $explain);
let has_feature: bool = has_feature(visitor.features);
debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature);
if !has_feature && !span.allows_unstable($name) {
feature_err_issue(&visitor.sess.parse_sess, name, span, GateIssue::Language, explain)
.emit();
}
}};
}
macro_rules! gate_feature_post {
($visitor: expr, $feature: ident, $span: expr, $explain: expr, $help: expr) => {
gate_feature_fn!($visitor, |x: &Features| x.$feature, $span, sym::$feature, $explain, $help)
};
($visitor: expr, $feature: ident, $span: expr, $explain: expr) => {
gate_feature_fn!($visitor, |x: &Features| x.$feature, $span, sym::$feature, $explain)
};
}
pub fn check_attribute(attr: &ast::Attribute, sess: &Session, features: &Features) {
PostExpansionVisitor { sess, features }.visit_attribute(attr)
}
struct PostExpansionVisitor<'a> {
sess: &'a Session,
// `sess` contains a `Features`, but this might not be that one.
features: &'a Features,
}
impl<'a> PostExpansionVisitor<'a> {
fn check_abi(&self, abi: ast::StrLit) {
let ast::StrLit { symbol_unescaped, span, .. } = abi;
match &*symbol_unescaped.as_str() {
// Stable
"Rust" | "C" | "cdecl" | "stdcall" | "fastcall" | "aapcs" | "win64" | "sysv64"
| "system" => {}
"rust-intrinsic" => {
gate_feature_post!(&self, intrinsics, span, "intrinsics are subject to change");
}
"platform-intrinsic" => {
gate_feature_post!(
&self,
platform_intrinsics,
span,
"platform intrinsics are experimental and possibly buggy"
);
}
"vectorcall" => {
gate_feature_post!(
&self,
abi_vectorcall,
span,
"vectorcall is experimental and subject to change"
);
}
"thiscall" => {
gate_feature_post!(
&self,
abi_thiscall,
span,
"thiscall is experimental and subject to change"
);
}
"rust-call" => {
gate_feature_post!(
&self,
unboxed_closures,
span,
"rust-call ABI is subject to change"
);
}
"ptx-kernel" => {
gate_feature_post!(
&self,
abi_ptx,
span,
"PTX ABIs are experimental and subject to change"
);
}
"unadjusted" => {
gate_feature_post!(
&self,
abi_unadjusted,
span,
"unadjusted ABI is an implementation detail and perma-unstable"
);
}
"msp430-interrupt" => {
gate_feature_post!(
&self,
abi_msp430_interrupt,
span,
"msp430-interrupt ABI is experimental and subject to change"
);
}
"x86-interrupt" => {
gate_feature_post!(
&self,
abi_x86_interrupt,
span,
"x86-interrupt ABI is experimental and subject to change"
);
}
"amdgpu-kernel" => {
gate_feature_post!(
&self,
abi_amdgpu_kernel,
span,
"amdgpu-kernel ABI is experimental and subject to change"
);
}
"avr-interrupt" | "avr-non-blocking-interrupt" => {
gate_feature_post!(
&self,
abi_avr_interrupt,
span,
"avr-interrupt and avr-non-blocking-interrupt ABIs are experimental and subject to change"
);
}
"efiapi" => {
gate_feature_post!(
&self,
abi_efiapi,
span,
"efiapi ABI is experimental and subject to change"
);
}
"C-cmse-nonsecure-call" => {
gate_feature_post!(
&self,
abi_c_cmse_nonsecure_call,
span,
"C-cmse-nonsecure-call ABI is experimental and subject to change"
);
}
"C-unwind" => {
gate_feature_post!(
&self,
c_unwind,
span,
"C-unwind ABI is experimental and subject to change"
);
}
"stdcall-unwind" => {
gate_feature_post!(
&self,
c_unwind,
span,
"stdcall-unwind ABI is experimental and subject to change"
);
}
"system-unwind" => {
gate_feature_post!(
&self,
c_unwind,
span,
"system-unwind ABI is experimental and subject to change"
);
}
"thiscall-unwind" => {
gate_feature_post!(
&self,
c_unwind,
span,
"thiscall-unwind ABI is experimental and subject to change"
);
}
"wasm" => {
gate_feature_post!(
&self,
wasm_abi,
span,
"wasm ABI is experimental and subject to change"
);
}
abi => self
.sess
.parse_sess
.span_diagnostic
.delay_span_bug(span, &format!("unrecognized ABI not caught in lowering: {}", abi)),
}
}
fn check_extern(&self, ext: ast::Extern) {
if let ast::Extern::Explicit(abi) = ext {
self.check_abi(abi);
}
}
fn check_gat(&self, generics: &ast::Generics, span: Span) {
if !generics.params.is_empty() {
gate_feature_post!(
&self,
generic_associated_types,
span,
"generic associated types are unstable"
);
}
if !generics.where_clause.predicates.is_empty() {
gate_feature_post!(
&self,
generic_associated_types,
span,
"where clauses on associated types are unstable"
);
}
}
/// Feature gate `impl Trait` inside `type Alias = $type_expr;`.
fn check_impl_trait(&self, ty: &ast::Ty) {
struct ImplTraitVisitor<'a> {
vis: &'a PostExpansionVisitor<'a>,
}
impl Visitor<'_> for ImplTraitVisitor<'_> {
fn visit_ty(&mut self, ty: &ast::Ty) {
if let ast::TyKind::ImplTrait(..) = ty.kind {
gate_feature_post!(
&self.vis,
type_alias_impl_trait,
ty.span,
"`impl Trait` in type aliases is unstable"
);
}
visit::walk_ty(self, ty);
}
}
ImplTraitVisitor { vis: self }.visit_ty(ty);
}
}
impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
fn visit_attribute(&mut self, attr: &ast::Attribute) {
let attr_info =
attr.ident().and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name)).map(|a| **a);
// Check feature gates for built-in attributes.
if let Some((.., AttributeGate::Gated(_, name, descr, has_feature))) = attr_info {
gate_feature_fn!(self, has_feature, attr.span, name, descr);
}
// Check unstable flavors of the `#[doc]` attribute.
if self.sess.check_name(attr, sym::doc) {
for nested_meta in attr.meta_item_list().unwrap_or_default() {
macro_rules! gate_doc { ($($name:ident => $feature:ident)*) => {
$(if nested_meta.has_name(sym::$name) {
let msg = concat!("`#[doc(", stringify!($name), ")]` is experimental");
gate_feature_post!(self, $feature, attr.span, msg);
})*
}}
gate_doc!(
cfg => doc_cfg
masked => doc_masked
notable_trait => doc_notable_trait
keyword => doc_keyword
);
}
}
// Check for unstable modifiers on `#[link(..)]` attribute
if self.sess.check_name(attr, sym::link) {
for nested_meta in attr.meta_item_list().unwrap_or_default() {
if nested_meta.has_name(sym::modifiers) {
gate_feature_post!(
self,
native_link_modifiers,
nested_meta.span(),
"native link modifiers are experimental"
);
if let Some(modifiers) = nested_meta.value_str() {
for modifier in modifiers.as_str().split(',') {
if let Some(modifier) = modifier.strip_prefix(&['+', '-'][..]) {
macro_rules! gate_modifier { ($($name:literal => $feature:ident)*) => {
$(if modifier == $name {
let msg = concat!("`#[link(modifiers=\"", $name, "\")]` is unstable");
gate_feature_post!(
self,
$feature,
nested_meta.name_value_literal_span().unwrap(),
msg
);
})*
}}
gate_modifier!(
"bundle" => native_link_modifiers_bundle
"verbatim" => native_link_modifiers_verbatim
"whole-archive" => native_link_modifiers_whole_archive
"as-needed" => native_link_modifiers_as_needed
);
}
}
}
}
}
}
}
fn visit_item(&mut self, i: &'a ast::Item) {
match i.kind {
ast::ItemKind::ForeignMod(ref foreign_module) => {
if let Some(abi) = foreign_module.abi {
self.check_abi(abi);
}
}
ast::ItemKind::Fn(..) => {
if self.sess.contains_name(&i.attrs[..], sym::start) {
gate_feature_post!(
&self,
start,
i.span,
"`#[start]` functions are experimental \
and their signature may change \
over time"
);
}
}
ast::ItemKind::Struct(..) => {
for attr in self.sess.filter_by_name(&i.attrs[..], sym::repr) {
for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
if item.has_name(sym::simd) {
gate_feature_post!(
&self,
repr_simd,
attr.span,
"SIMD types are experimental and possibly buggy"
);
}
}
}
}
ast::ItemKind::Impl(box ast::ImplKind {
polarity, defaultness, ref of_trait, ..
}) => {
if let ast::ImplPolarity::Negative(span) = polarity {
gate_feature_post!(
&self,
negative_impls,
span.to(of_trait.as_ref().map_or(span, |t| t.path.span)),
"negative trait bounds are not yet fully implemented; \
use marker types for now"
);
}
if let ast::Defaultness::Default(_) = defaultness {
gate_feature_post!(&self, specialization, i.span, "specialization is unstable");
}
}
ast::ItemKind::Trait(box ast::TraitKind(ast::IsAuto::Yes, ..)) => {
gate_feature_post!(
&self,
auto_traits,
i.span,
"auto traits are experimental and possibly buggy"
);
}
ast::ItemKind::TraitAlias(..) => {
gate_feature_post!(&self, trait_alias, i.span, "trait aliases are experimental");
}
ast::ItemKind::MacroDef(ast::MacroDef { macro_rules: false, .. }) => {
let msg = "`macro` is experimental";
gate_feature_post!(&self, decl_macro, i.span, msg);
}
ast::ItemKind::TyAlias(box ast::TyAliasKind(_, _, _, Some(ref ty))) => {
self.check_impl_trait(&ty)
}
_ => {}
}
visit::walk_item(self, i);
}
fn visit_foreign_item(&mut self, i: &'a ast::ForeignItem) {
match i.kind {
ast::ForeignItemKind::Fn(..) | ast::ForeignItemKind::Static(..) => {
let link_name = self.sess.first_attr_value_str_by_name(&i.attrs, sym::link_name);
let links_to_llvm =
link_name.map_or(false, |val| val.as_str().starts_with("llvm."));
if links_to_llvm {
gate_feature_post!(
&self,
link_llvm_intrinsics,
i.span,
"linking to LLVM intrinsics is experimental"
);
}
}
ast::ForeignItemKind::TyAlias(..) => {
gate_feature_post!(&self, extern_types, i.span, "extern types are experimental");
}
ast::ForeignItemKind::MacCall(..) => {}
}
visit::walk_foreign_item(self, i)
}
fn visit_ty(&mut self, ty: &'a ast::Ty) {
match ty.kind {
ast::TyKind::BareFn(ref bare_fn_ty) => {
self.check_extern(bare_fn_ty.ext);
}
ast::TyKind::Never => {
gate_feature_post!(&self, never_type, ty.span, "the `!` type is experimental");
}
_ => {}
}
visit::walk_ty(self, ty)
}
fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FnRetTy) {
if let ast::FnRetTy::Ty(ref output_ty) = *ret_ty {
if let ast::TyKind::Never = output_ty.kind {
// Do nothing.
} else {
self.visit_ty(output_ty)
}
}
}
fn visit_expr(&mut self, e: &'a ast::Expr) {
match e.kind {
ast::ExprKind::Box(_) => {
gate_feature_post!(
&self,
box_syntax,
e.span,
"box expression syntax is experimental; you can call `Box::new` instead"
);
}
ast::ExprKind::Type(..) => {
// To avoid noise about type ascription in common syntax errors, only emit if it
// is the *only* error.
if self.sess.parse_sess.span_diagnostic.err_count() == 0 {
gate_feature_post!(
&self,
type_ascription,
e.span,
"type ascription is experimental"
);
}
}
ast::ExprKind::TryBlock(_) => {
gate_feature_post!(&self, try_blocks, e.span, "`try` expression is experimental");
}
ast::ExprKind::Block(_, opt_label) => {
if let Some(label) = opt_label {
gate_feature_post!(
&self,
label_break_value,
label.ident.span,
"labels on blocks are unstable"
);
}
}
_ => {}
}
visit::walk_expr(self, e)
}
fn visit_pat(&mut self, pattern: &'a ast::Pat) {
match &pattern.kind {
PatKind::Slice(pats) => {
for pat in pats {
let inner_pat = match &pat.kind {
PatKind::Ident(.., Some(pat)) => pat,
_ => pat,
};
if let PatKind::Range(Some(_), None, Spanned { .. }) = inner_pat.kind {
gate_feature_post!(
&self,
half_open_range_patterns,
pat.span,
"`X..` patterns in slices are experimental"
);
}
}
}
PatKind::Box(..) => {
gate_feature_post!(
&self,
box_patterns,
pattern.span,
"box pattern syntax is experimental"
);
}
PatKind::Range(_, Some(_), Spanned { node: RangeEnd::Excluded, .. }) => {
gate_feature_post!(
&self,
exclusive_range_pattern,
pattern.span,
"exclusive range pattern syntax is experimental"
);
}
_ => {}
}
visit::walk_pat(self, pattern)
}
fn visit_fn(&mut self, fn_kind: FnKind<'a>, span: Span, _: NodeId) {
if let Some(header) = fn_kind.header() {
// Stability of const fn methods are covered in `visit_assoc_item` below.
self.check_extern(header.ext);
if let (ast::Const::Yes(_), ast::Extern::Implicit)
| (ast::Const::Yes(_), ast::Extern::Explicit(_)) = (header.constness, header.ext)
{
gate_feature_post!(
&self,
const_extern_fn,
span,
"`const extern fn` definitions are unstable"
);
}
}
if fn_kind.ctxt() != Some(FnCtxt::Foreign) && fn_kind.decl().c_variadic() {
gate_feature_post!(&self, c_variadic, span, "C-variadic functions are unstable");
}
visit::walk_fn(self, fn_kind, span)
}
fn visit_assoc_ty_constraint(&mut self, constraint: &'a AssocTyConstraint) {
if let AssocTyConstraintKind::Bound { .. } = constraint.kind {
gate_feature_post!(
&self,
associated_type_bounds,
constraint.span,
"associated type bounds are unstable"
)
}
visit::walk_assoc_ty_constraint(self, constraint)
}
fn visit_assoc_item(&mut self, i: &'a ast::AssocItem, ctxt: AssocCtxt) {
let is_fn = match i.kind {
ast::AssocItemKind::Fn(_) => true,
ast::AssocItemKind::TyAlias(box ast::TyAliasKind(_, ref generics, _, ref ty)) => {
if let (Some(_), AssocCtxt::Trait) = (ty, ctxt) {
gate_feature_post!(
&self,
associated_type_defaults,
i.span,
"associated type defaults are unstable"
);
}
if let Some(ty) = ty {
self.check_impl_trait(ty);
}
self.check_gat(generics, i.span);
false
}
_ => false,
};
if let ast::Defaultness::Default(_) = i.kind.defaultness() {
// Limit `min_specialization` to only specializing functions.
gate_feature_fn!(
&self,
|x: &Features| x.specialization || (is_fn && x.min_specialization),
i.span,
sym::specialization,
"specialization is unstable"
);
}
visit::walk_assoc_item(self, i, ctxt)
}
fn visit_vis(&mut self, vis: &'a ast::Visibility) {
if let ast::VisibilityKind::Crate(ast::CrateSugar::JustCrate) = vis.kind {
gate_feature_post!(
&self,
crate_visibility_modifier,
vis.span,
"`crate` visibility modifier is experimental"
);
}
visit::walk_vis(self, vis)
}
}
pub fn check_crate(krate: &ast::Crate, sess: &Session) {
maybe_stage_features(sess, krate);
check_incompatible_features(sess);
let mut visitor = PostExpansionVisitor { sess, features: &sess.features_untracked() };
let spans = sess.parse_sess.gated_spans.spans.borrow();
macro_rules! gate_all {
($gate:ident, $msg:literal, $help:literal) => {
if let Some(spans) = spans.get(&sym::$gate) {
for span in spans {
gate_feature_post!(&visitor, $gate, *span, $msg, $help);
}
}
};
($gate:ident, $msg:literal) => {
if let Some(spans) = spans.get(&sym::$gate) {
for span in spans {
gate_feature_post!(&visitor, $gate, *span, $msg);
}
}
};
}
gate_all!(
if_let_guard,
"`if let` guards are experimental",
"you can write `if matches!(<expr>, <pattern>)` instead of `if let <pattern> = <expr>`"
);
gate_all!(
let_chains,
"`let` expressions in this position are experimental",
"you can write `matches!(<expr>, <pattern>)` instead of `let <pattern> = <expr>`"
);
gate_all!(
async_closure,
"async closures are unstable",
"to use an async block, remove the `||`: `async {`"
);
gate_all!(more_qualified_paths, "usage of qualified paths in this context is experimental");
gate_all!(generators, "yield syntax is experimental");
gate_all!(raw_ref_op, "raw address of syntax is experimental");
gate_all!(const_trait_bound_opt_out, "`?const` on trait bounds is experimental");
gate_all!(const_trait_impl, "const trait impls are experimental");
gate_all!(half_open_range_patterns, "half-open range patterns are unstable");
gate_all!(inline_const, "inline-const is experimental");
gate_all!(
const_generics_defaults,
"default values for const generic parameters are experimental"
);
if sess.parse_sess.span_diagnostic.err_count() == 0 {
// Errors for `destructuring_assignment` can get quite noisy, especially where `_` is
// involved, so we only emit errors where there are no other parsing errors.
gate_all!(destructuring_assignment, "destructuring assignments are unstable");
}
gate_all!(unnamed_fields, "unnamed fields are not yet fully implemented");
// All uses of `gate_all!` below this point were added in #65742,
// and subsequently disabled (with the non-early gating readded).
macro_rules! gate_all {
($gate:ident, $msg:literal) => {
// FIXME(eddyb) do something more useful than always
// disabling these uses of early feature-gatings.
if false {
for span in spans.get(&sym::$gate).unwrap_or(&vec![]) {
gate_feature_post!(&visitor, $gate, *span, $msg);
}
}
};
}
gate_all!(trait_alias, "trait aliases are experimental");
gate_all!(associated_type_bounds, "associated type bounds are unstable");
gate_all!(crate_visibility_modifier, "`crate` visibility modifier is experimental");
gate_all!(const_generics, "const generics are unstable");
gate_all!(decl_macro, "`macro` is experimental");
gate_all!(box_patterns, "box pattern syntax is experimental");
gate_all!(exclusive_range_pattern, "exclusive range pattern syntax is experimental");
gate_all!(try_blocks, "`try` blocks are unstable");
gate_all!(label_break_value, "labels on blocks are unstable");
gate_all!(box_syntax, "box expression syntax is experimental; you can call `Box::new` instead");
// To avoid noise about type ascription in common syntax errors,
// only emit if it is the *only* error. (Also check it last.)
if sess.parse_sess.span_diagnostic.err_count() == 0 {
gate_all!(type_ascription, "type ascription is experimental");
}
visit::walk_crate(&mut visitor, krate);
}
fn maybe_stage_features(sess: &Session, krate: &ast::Crate) {
use rustc_errors::Applicability;
if !sess.opts.unstable_features.is_nightly_build() {
let lang_features = &sess.features_untracked().declared_lang_features;
for attr in krate.attrs.iter().filter(|attr| sess.check_name(attr, sym::feature)) {
let mut err = struct_span_err!(
sess.parse_sess.span_diagnostic,
attr.span,
E0554,
"`#![feature]` may not be used on the {} release channel",
option_env!("CFG_RELEASE_CHANNEL").unwrap_or("(unknown)")
);
let mut all_stable = true;
for ident in
attr.meta_item_list().into_iter().flatten().map(|nested| nested.ident()).flatten()
{
let name = ident.name;
let stable_since = lang_features
.iter()
.flat_map(|&(feature, _, since)| if feature == name { since } else { None })
.next();
if let Some(since) = stable_since {
err.help(&format!(
"the feature `{}` has been stable since {} and no longer requires \
an attribute to enable",
name, since
));
} else {
all_stable = false;
}
}
if all_stable {
err.span_suggestion(
attr.span,
"remove the attribute",
String::new(),
Applicability::MachineApplicable,
);
}
err.emit();
}
}
}
fn check_incompatible_features(sess: &Session) {
let features = sess.features_untracked();
let declared_features = features
.declared_lang_features
.iter()
.copied()
.map(|(name, span, _)| (name, span))
.chain(features.declared_lib_features.iter().copied());
for (f1, f2) in rustc_feature::INCOMPATIBLE_FEATURES
.iter()
.filter(|&&(f1, f2)| features.enabled(f1) && features.enabled(f2))
{
if let Some((f1_name, f1_span)) = declared_features.clone().find(|(name, _)| name == f1) {
if let Some((f2_name, f2_span)) = declared_features.clone().find(|(name, _)| name == f2)
{
let spans = vec![f1_span, f2_span];
sess.struct_span_err(
spans.clone(),
&format!(
"features `{}` and `{}` are incompatible, using them at the same time \
is not allowed",
f1_name, f2_name
),
)
.help("remove one of these features")
.emit();
}
}
}
}
| 38.153061 | 110 | 0.471282 |
76e21e0db3be7f0be1feaaf6ef0ecdbc74f6ceff | 2,208 | use assert_cmd::prelude::*;
use std::process::Command;
#[test]
fn test_assert_output_contains_full_path() {
assert!(run_fnd(vec![]).contains("./src/cli.rs"));
assert!(run_fnd(vec![]).contains("./src/main.rs"));
assert!(run_fnd(vec![]).contains("./src/flags.rs"));
}
#[test]
fn test_string_match() {
assert!(run_fnd(vec!["cli"]).contains("./src/cli.rs"));
assert!(!run_fnd(vec!["cli"]).contains("./src/main.rs"));
}
#[test]
fn test_case_insensitive_string_match() {
assert!(run_fnd(vec!["CLI", "-i"]).contains("./src/cli.rs"));
assert!(run_fnd(vec!["readme", "-i"]).contains("./README.md"));
}
#[test]
fn test_regex_interpretation() {
assert!(run_fnd(vec!["src/\\S{2}i\\.rs$", "-r"]).contains("./src/cli.rs"));
assert!(!run_fnd(vec!["src/\\S{2}I\\.RS$", "-r"]).contains("./src/cli.rs"));
}
#[test]
fn test_case_insensitive_regex_interpretation() {
assert!(run_fnd(vec!["src/\\S{2}i\\.rs$", "-r"]).contains("./src/cli.rs"));
assert!(run_fnd(vec!["src/\\S{2}I\\.RS$", "-r", "-i"]).contains("./src/cli.rs"));
}
#[test]
fn test_hidden() {
assert!(run_fnd(vec!["-h"]).contains("./.git/object"));
assert!(!run_fnd(vec!["-h"]).contains("./target/debug"));
}
#[test]
fn test_all() {
assert!(run_fnd(vec!["-a"]).contains("./.git/object"));
assert!(run_fnd(vec!["-a"]).contains("./target/debug"));
}
#[test]
fn test_max_depth() {
assert!(run_fnd(vec![".rs", "-d", "3"]).contains("./src/cli.rs"));
assert!(run_fnd(vec![".rs", "-d", "2"]).contains("./src/cli.rs"));
assert!(run_fnd(vec!["README.md", "-d", "1"]).contains("./README.md"));
assert!(!run_fnd(vec![".rs", "-d", "1"]).contains("./src/cli.rs"));
}
#[test]
fn test_size_filtering() {
assert!(run_fnd(vec!["-s", "+1M"]).is_empty());
assert!(run_fnd(vec!["-s", "-1M"]).contains("./src/cli.rs"));
assert!(run_fnd(vec!["-s", "+12k"]).contains("./Cargo.lock"));
}
fn run_fnd(args: Vec<&str>) -> String {
let mut cmd = Command::cargo_bin("fnd").expect("fnd should exist");
for arg in args {
cmd.arg(arg);
}
std::str::from_utf8(&cmd.output().expect("stdout should exist").stdout)
.expect("failed to parse output")
.to_string()
}
| 30.246575 | 85 | 0.580163 |
914068433d3fc92ed197cd075c1384d1c39aa0eb | 3,485 | use std::future::Future;
use async_std::task;
use async_std::task::JoinHandle;
//use async_std::task::JoinHandle;
use log::trace;
use crate::timer::sleep;
/// run future and wait forever
/// this is typically used in the server
pub fn run<F>(spawn_closure: F)
where
F: Future<Output = ()> + Send + 'static,
{
task::block_on(spawn_closure);
}
/// run future and wait forever
/// this is typically used in the server
pub fn main<F>(spawn_closure: F)
where
F: Future<Output = ()> + Send + 'static,
{
use std::time::Duration;
task::block_on(async {
spawn_closure.await;
// do infinite loop for now
loop {
sleep(Duration::from_secs(3600)).await;
}
});
}
pub fn spawn<F, T>(future: F) -> JoinHandle<T>
where
F: Future<Output = T> + 'static + Send,
T: Send + 'static,
{
trace!("spawning future");
task::spawn(future)
}
#[cfg(feature = "task_unstable")]
pub fn spawn_blocking<F, T>(future: F) -> JoinHandle<T>
where
F: FnOnce() -> T + Send + 'static,
T: Send + 'static,
{
trace!("spawning blocking");
task::spawn_blocking(future)
}
/// same as async async std block on
pub fn run_block_on<F, T>(f: F) -> T
where
F: Future<Output = T>,
{
task::block_on(f)
}
#[cfg(test)]
mod basic_test {
use std::io::Error;
use std::thread;
use std::time;
use futures_lite::future::zip;
use log::debug;
use crate::task::spawn;
use crate::test_async;
#[test_async]
async fn future_join() -> Result<(), Error> {
// with join, futures are dispatched on same thread
// since ft1 starts first and
// blocks on thread, it will block future2
// should see ft1,ft1,ft2,ft2
//let mut ft_id = 0;
let ft1 = async {
debug!("ft1: starting sleeping for 1000ms");
// this will block ft2. both ft1 and ft2 share same thread
thread::sleep(time::Duration::from_millis(1000));
debug!("ft1: woke from sleep");
// ft_id = 1;
Ok(()) as Result<(), ()>
};
let ft2 = async {
debug!("ft2: starting sleeping for 500ms");
thread::sleep(time::Duration::from_millis(500));
debug!("ft2: woke up");
// ft_id = 2;
Ok(()) as Result<(), ()>
};
let core_threads = num_cpus::get().max(1);
debug!("num threads: {}", core_threads);
let _ = zip(ft1, ft2).await;
assert!(true);
Ok(())
}
#[test_async]
async fn future_spawn() -> Result<(), Error> {
// with spawn, futures are dispatched on separate thread
// in this case, thread sleep on ft1 won't block
// should see ft1, ft2, ft2, ft1
let ft1 = async {
debug!("ft1: starting sleeping for 1000ms");
thread::sleep(time::Duration::from_millis(1000)); // give time for server to come up
debug!("ft1: woke from sleep");
};
let ft2 = async {
debug!("ft2: starting sleeping for 500ms");
thread::sleep(time::Duration::from_millis(500));
debug!("ft2: woke up");
};
let core_threads = num_cpus::get().max(1);
debug!("num threads: {}", core_threads);
spawn(ft1);
spawn(ft2);
// wait for all futures complete
thread::sleep(time::Duration::from_millis(2000));
assert!(true);
Ok(())
}
}
| 24.716312 | 96 | 0.557819 |
eb6d0f626d662d289a5c5d2b5bb96b957ebfe6d1 | 1,813 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
impl super::IRQSTAT {
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R { bits: self.register.get() }
}
}
#[doc = r" Value of the field"]
pub struct RESERVED1R {
bits: u32,
}
impl RESERVED1R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct STATR {
bits: bool,
}
impl STATR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 1:31 - Software should not rely on the value of a reserved. Writing any other value than the reset value may result in undefined behavior."]
#[inline]
pub fn reserved1(&self) -> RESERVED1R {
let bits = {
const MASK: u32 = 2147483647;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) as u32
};
RESERVED1R { bits }
}
#[doc = "Bit 0 - TRNG Interrupt status. OR'ed version of IRQFLAGSTAT.SHUTDOWN_OVF and IRQFLAGSTAT.RDY"]
#[inline]
pub fn stat(&self) -> STATR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
STATR { bits }
}
}
| 25.535211 | 158 | 0.534473 |
ef156000be556111802a31ccdefe2d45dead9fff | 5,364 | use itertools::Itertools;
use crate::{
line::Line,
symbols::big_line::BIG_LINE_SPACER,
trigram::{Trigram, TrigramNamePair},
};
/// The 64 Hexagrams have several different orderings, the most
/// common of which is the King Wen sequence.
/// [See here for more details / history](https://en.wikipedia.org/wiki/King_Wen_sequence)
#[derive(Clone, Copy)]
pub enum HexagramOrdering {
/// The most common sequence
KingWen,
/// a.k.a Fu Xi sequence, Shao Yong sequence
Binary,
/// From the [Mawangdui Silk Texts](https://en.wikipedia.org/wiki/Mawangdui_Silk_Texts)
Mawangdui,
}
/// A `Hexagram` is a collection of lines divided into two groups: lines above and lines below.
/// The order of the lines determines the specific hexagram (the primary hexagram) and its
/// meaning. If lines are marked as "changing", then a second hexagram (the relating hexagram)
/// will be produced that provides additional meaning. Special attention should be paid to
/// "changing" lines as they can change the meaning of the primary hexagram.
/// [See here for more details / history](https://en.wikipedia.org/wiki/Hexagram_\(I_Ching\))
pub struct Hexagram {
_above: Trigram,
_below: Trigram,
}
impl Hexagram {
/// Create a new `Hexagram` from two [`Trigram`](../trigram/struct.Trigram.html)s. The `Trigram`s are consumed in the process.
pub fn new(above: Trigram, below: Trigram) -> Self {
Hexagram {
_above: above,
_below: below,
}
}
/// Create a new `Hexagram` from random [`Trigram`](../trigram/struct.Trigram.html)s. An alias for `default()`.
pub fn new_random() -> Self {
Self::default()
}
pub fn from_digits_str(digits: &str) -> Option<Self> {
if digits.len() != 6 {
return None;
};
let mut trigrams = digits
.chars()
.rev()
.map(|digit_char: char| digit_char.to_digit(10))
.map(|digit_option: Option<u32>| digit_option.unwrap())
.tuples::<(_, _, _)>()
.map(|triple| {
Trigram(
Line::from_usize(triple.0 as usize),
Line::from_usize(triple.1 as usize),
Line::from_usize(triple.2 as usize),
)
});
Some(Hexagram {
_above: trigrams.next().unwrap(),
_below: trigrams.next().unwrap(),
})
}
/// Generate a new `Hexagram` by using the coin toss method.
pub fn from_coin_tosses() -> Self {
Hexagram {
_above: Trigram::from_coin_tosses(),
_below: Trigram::from_coin_tosses(),
}
}
/// Get a `&str` to a unicode symbol representing the Hexagram.
pub fn symbol(&self, with_changes: bool) -> &str {
&self._as_trigram_name_pair(with_changes).as_symbol()
}
/// Get a `Vec` of `usize`s representing the positions of lines that are marked as "changing".
pub fn get_changing_line_positions(&self) -> Vec<usize> {
self.get_lines_as_vec()
.iter()
.rev()
.enumerate()
.filter_map(|enumerated_line| match enumerated_line {
(index, Line::BrokenChanging) | (index, Line::UnbrokenChanging) => Some(index),
_ => None,
})
.collect()
}
/// Get the number of this `Hexagram` pre- or post-change, according to a given sequence.
/// If `with_changes` is `true` but a hexagram has no changing lines, the resulting
/// number will be the same as if `with_changes` was `false`.
pub fn as_number(&self, with_changes: bool, sequence: HexagramOrdering) -> usize {
use self::HexagramOrdering::*;
let trigram_name_pair: TrigramNamePair = self._as_trigram_name_pair(with_changes);
match sequence {
KingWen => trigram_name_pair.king_wen_sequence_number(),
Binary => trigram_name_pair.binary_sequence_number(),
Mawangdui => trigram_name_pair.mawangdui_sequence_number(),
}
}
/// Get a vec of this `Hexagram`'s `Line`s *("above" lines followed by "below" lines.)*.
/// The lines are cloned.
pub fn get_lines_as_vec(&self) -> Vec<Line> {
let mut resulting_vec = Vec::new();
resulting_vec.extend_from_slice(&self._above.get_lines_as_vec());
resulting_vec.extend_from_slice(&self._below.get_lines_as_vec());
resulting_vec
}
/// Print the `Hexagram` as large ASCII-art lines.
pub fn print_big(&self) {
print!("{}", BIG_LINE_SPACER);
self._above.print_big();
self._below.print_big();
}
/// A utility function to get a `TrigramNamePair` pre- or post-changes. Mainly used to
/// interface with utilities in the `trigram` module.
fn _as_trigram_name_pair(&self, with_changes: bool) -> TrigramNamePair {
TrigramNamePair(
self._above.get_name(with_changes),
self._below.get_name(with_changes),
)
}
//
// fn _validate_digits_string(digits: &str) -> bool {
//
// }
}
impl Default for Hexagram {
/// Create a new `Hexagram` from random `Trigram`s.
fn default() -> Self {
Hexagram {
_above: Trigram::default(),
_below: Trigram::default(),
}
}
}
| 35.289474 | 130 | 0.609247 |
7a87a0ba315825e3fb0e9b65ea07ff5d685651d6 | 5,172 | // The spirv tools use generated code, for now we just replicate the minimum
// generation we need here by calling the *shudders* python script(s) we need
// to in a simple script and commit them to source control, as they only need
// to be regenerated when spirv-headers is updated
use std::{fs, process::Command};
fn python<S: AsRef<std::ffi::OsStr>>(args: impl IntoIterator<Item = S>) -> Result<(), i32> {
Command::new("python")
.args(args.into_iter())
.status()
.map_err(|_| -1)
.and_then(|es| {
if es.success() {
Ok(())
} else {
Err(es.code().unwrap_or(-1))
}
})
}
fn main() {
fs::create_dir_all("generated").expect("unable to create 'generated'");
python(&[
"spirv-tools/utils/update_build_version.py",
"spirv-tools",
"generated/build-version.inc",
])
.expect("failed to generate build version from spirv-headers");
enum_string_mapping("unified1");
core_table("unified1");
glsl_table("unified1");
opencl_table("unified1");
vendor_table("spv-amd-shader-explicit-vertex-parameter", None);
vendor_table("spv-amd-shader-trinary-minmax", None);
vendor_table("spv-amd-gcn-shader", None);
vendor_table("spv-amd-shader-ballot", None);
vendor_table("debuginfo", None);
vendor_table("nonsemantic.clspvreflection", None);
vendor_table("opencl.debuginfo.100", Some("CLDEBUG100_"));
registry_table();
}
fn enum_string_mapping(version: &str) {
python(&[
"spirv-tools/utils/generate_grammar_tables.py".to_owned(),
format!("--spirv-core-grammar=spirv-headers/include/spirv/{}/spirv.core.grammar.json", version),
"--extinst-debuginfo-grammar=spirv-headers/include/spirv/unified1/extinst.debuginfo.grammar.json".to_owned(),
"--extinst-cldebuginfo100-grammar=spirv-headers/include/spirv/unified1/extinst.opencl.debuginfo.100.grammar.json".to_owned(),
"--extension-enum-output=generated/extension_enum.inc".to_owned(),
"--enum-string-mapping-output=generated/enum_string_mapping.inc".to_owned(),
]).expect("failed to generate enum includes from spirv-headers");
}
fn vendor_table(which: &str, prefix: Option<&str>) {
python(&[
"spirv-tools/utils/generate_grammar_tables.py".to_owned(),
format!(
"--extinst-vendor-grammar=spirv-headers/include/spirv/unified1/extinst.{}.grammar.json",
which
),
format!("--vendor-insts-output=generated/{}.insts.inc", which),
format!(
"--vendor-operand-kind-prefix={}",
prefix.unwrap_or_default()
),
])
.expect("failed to generate vendor table");
}
fn core_table(which: &str) {
python(&[
"spirv-tools/utils/generate_grammar_tables.py".to_owned(),
"--spirv-core-grammar=spirv-headers/include/spirv/unified1/spirv.core.grammar.json".to_owned(),
format!("--core-insts-output=generated/core.insts-{}.inc", which),
"--extinst-debuginfo-grammar=spirv-headers/include/spirv/unified1/extinst.debuginfo.grammar.json".to_owned(),
"--extinst-cldebuginfo100-grammar=spirv-headers/include/spirv/unified1/extinst.opencl.debuginfo.100.grammar.json".to_owned(),
format!("--operand-kinds-output=generated/operand.kinds-{}.inc", which),
]).expect("failed to generate core table from spirv-headers");
}
fn registry_table() {
python(&[
"spirv-tools/utils/generate_registry_tables.py",
"--xml=spirv-headers/include/spirv/spir-v.xml",
"--generator=generated/generators.inc",
])
.expect("failed to generate core table from spirv-headers");
}
fn glsl_table(version: &str) {
python(&[
"spirv-tools/utils/generate_grammar_tables.py".to_owned(),
format!("--spirv-core-grammar=spirv-headers/include/spirv/{}/spirv.core.grammar.json", version),
"--extinst-debuginfo-grammar=spirv-headers/include/spirv/unified1/extinst.debuginfo.grammar.json".to_owned(),
"--extinst-cldebuginfo100-grammar=spirv-headers/include/spirv/unified1/extinst.opencl.debuginfo.100.grammar.json".to_owned(),
format!("--extinst-glsl-grammar=spirv-headers/include/spirv/{}/extinst.glsl.std.450.grammar.json", version),
"--glsl-insts-output=generated/glsl.std.450.insts.inc".to_owned(),
]).expect("failed to generate glsl table from spirv-headers");
}
fn opencl_table(version: &str) {
python(&[
"spirv-tools/utils/generate_grammar_tables.py".to_owned(),
format!("--spirv-core-grammar=spirv-headers/include/spirv/{}/spirv.core.grammar.json", version),
"--extinst-debuginfo-grammar=spirv-headers/include/spirv/unified1/extinst.debuginfo.grammar.json".to_owned(),
"--extinst-cldebuginfo100-grammar=spirv-headers/include/spirv/unified1/extinst.opencl.debuginfo.100.grammar.json".to_owned(),
format!("--extinst-opencl-grammar=spirv-headers/include/spirv/{}/extinst.opencl.std.100.grammar.json", version),
"--opencl-insts-output=generated/opencl.std.insts.inc".to_owned(),
]).expect("failed to generate glsl table from spirv-headers");
}
| 44.586207 | 133 | 0.673821 |
e5de6044e8d77e609a1051593ae0594c4c7549e7 | 2,261 | pub(super) mod properties;
#[cfg(feature = "vorbis_comments")]
pub(super) mod write;
use super::find_last_page;
#[cfg(feature = "vorbis_comments")]
use super::tag::VorbisComments;
use crate::error::Result;
use crate::ogg::constants::{OPUSHEAD, OPUSTAGS};
use crate::types::file::{AudioFile, FileType, TaggedFile};
use crate::types::properties::FileProperties;
use crate::types::tag::TagType;
use properties::OpusProperties;
use std::io::{Read, Seek};
/// An OGG Opus file
pub struct OpusFile {
#[cfg(feature = "vorbis_comments")]
/// The vorbis comments contained in the file
///
/// NOTE: While a metadata packet is required, it isn't required to actually have any data.
pub(crate) vorbis_comments: VorbisComments,
/// The file's audio properties
pub(crate) properties: OpusProperties,
}
impl From<OpusFile> for TaggedFile {
fn from(input: OpusFile) -> Self {
Self {
ty: FileType::Opus,
properties: FileProperties::from(input.properties),
#[cfg(feature = "vorbis_comments")]
tags: vec![input.vorbis_comments.into()],
#[cfg(not(feature = "vorbis_comments"))]
tags: Vec::new(),
}
}
}
impl AudioFile for OpusFile {
type Properties = OpusProperties;
fn read_from<R>(reader: &mut R, read_properties: bool) -> Result<Self>
where
R: Read + Seek,
{
let file_information = super::read::read_from(reader, OPUSHEAD, OPUSTAGS)?;
Ok(Self {
properties: if read_properties {properties::read_properties(reader, &file_information.1)? } else { OpusProperties::default() },
#[cfg(feature = "vorbis_comments")]
// Safe to unwrap, a metadata packet is mandatory in Opus
vorbis_comments: file_information.0.unwrap(),
})
}
fn properties(&self) -> &Self::Properties {
&self.properties
}
fn contains_tag(&self) -> bool {
true
}
fn contains_tag_type(&self, tag_type: &TagType) -> bool {
tag_type == &TagType::VorbisComments
}
}
impl OpusFile {
#[cfg(feature = "vorbis_comments")]
/// Returns a reference to the Vorbis comments tag
pub fn vorbis_comments(&self) -> &VorbisComments {
&self.vorbis_comments
}
#[cfg(feature = "vorbis_comments")]
/// Returns a mutable reference to the Vorbis comments tag
pub fn vorbis_comments_mut(&mut self) -> &mut VorbisComments {
&mut self.vorbis_comments
}
}
| 26.916667 | 130 | 0.706767 |
647a72128928283e8666c52c162b937023d7a942 | 3,734 | use std::{io, path::PathBuf};
use clap::AppSettings;
use failure::format_err;
#[derive(Clone, Debug)]
pub struct CliConfig {
pub command: Command,
pub config_path: Option<PathBuf>,
pub deploy_mode: Option<String>,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum Command {
Build,
Deploy,
}
fn app() -> clap::App<'static, 'static> {
clap::App::new("cargo screeps")
.bin_name("cargo")
.setting(AppSettings::ArgRequiredElseHelp)
.subcommand(
clap::SubCommand::with_name("screeps")
.author("David Ross")
.version(clap::crate_version!())
.about("Builds WASM-targetting Rust code and deploys to Screeps game servers")
.setting(AppSettings::ArgRequiredElseHelp)
.arg(
clap::Arg::with_name("verbose")
.short("v")
.long("verbose")
.multiple(true),
)
.arg(
clap::Arg::with_name("config")
.short("c")
.long("config")
.multiple(false)
.takes_value(true)
.value_name("CONFIG_FILE"),
)
.subcommand(
clap::SubCommand::with_name("build")
.about("build files, put in target/ in project root"),
)
.subcommand(
clap::SubCommand::with_name("deploy")
.about("run specified deploy mode (or the default if none is specified)")
.arg(
clap::Arg::with_name("mode")
.short("m")
.long("mode")
.multiple(false)
.takes_value(true)
.value_name("DEPLOY_MODE"),
),
)
.subcommand(clap::SubCommand::with_name("copy").about("run the copy deploy mode"))
.subcommand(
clap::SubCommand::with_name("upload").about("run the upload deploy mode"),
),
)
}
pub fn setup_cli() -> Result<CliConfig, failure::Error> {
let cargo_args = app().get_matches();
let args = cargo_args.subcommand_matches("screeps").ok_or_else(|| {
format_err!("expected first subcommand to be 'screeps'. please run as 'cargo screeps'")
})?;
let verbosity = match args.occurrences_of("verbose") {
0 => log::LevelFilter::Info,
1 => log::LevelFilter::Debug,
_ => log::LevelFilter::Trace,
};
fern::Dispatch::new()
.level(verbosity)
.format(|out, message, record| out.finish(format_args!("{}: {}", record.target(), message)))
.chain(io::stdout())
.apply()
.unwrap();
let mut mode = match args.subcommand_matches("deploy") {
Some(deploy_args) => deploy_args.value_of("mode").map(Into::into),
None => None,
};
let command = match args.subcommand_name() {
Some("build") => Command::Build,
Some("deploy") => Command::Deploy,
Some("copy") => {
mode = Some("copy".to_owned());
Command::Deploy
}
Some("upload") => {
mode = Some("upload".to_owned());
Command::Deploy
}
other => panic!("unexpected subcommand {:?}", other),
};
let config = CliConfig {
command,
config_path: args.value_of("config").map(Into::into),
deploy_mode: mode,
};
Ok(config)
}
| 33.339286 | 100 | 0.48527 |
1a47c2632efc6af658286fd061320dc5bcedbc1f | 4,754 | use migration_core::{
commands::{EvaluateDataLossInput, EvaluateDataLossOutput},
CoreResult, GenericApi,
};
use std::borrow::Cow;
use tempfile::TempDir;
#[must_use = "This struct does nothing on its own. See EvaluateDataLoss::send()"]
pub struct EvaluateDataLoss<'a> {
api: &'a dyn GenericApi,
migrations_directory: &'a TempDir,
prisma_schema: String,
rt: &'a tokio::runtime::Runtime,
}
impl<'a> EvaluateDataLoss<'a> {
pub fn new(
api: &'a dyn GenericApi,
migrations_directory: &'a TempDir,
prisma_schema: String,
rt: &'a tokio::runtime::Runtime,
) -> Self {
EvaluateDataLoss {
api,
migrations_directory,
prisma_schema,
rt,
}
}
fn send_impl(self) -> CoreResult<EvaluateDataLossAssertion<'a>> {
let output = self.rt.block_on(self.api.evaluate_data_loss(&EvaluateDataLossInput {
migrations_directory_path: self.migrations_directory.path().to_str().unwrap().to_owned(),
prisma_schema: self.prisma_schema,
}))?;
Ok(EvaluateDataLossAssertion {
output,
_api: self.api,
_migrations_directory: self.migrations_directory,
})
}
#[track_caller]
pub fn send(self) -> EvaluateDataLossAssertion<'a> {
self.send_impl().unwrap()
}
}
pub struct EvaluateDataLossAssertion<'a> {
output: EvaluateDataLossOutput,
_api: &'a dyn GenericApi,
_migrations_directory: &'a TempDir,
}
impl std::fmt::Debug for EvaluateDataLossAssertion<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("EvaluateDataLossAssertion").finish()
}
}
impl<'a> EvaluateDataLossAssertion<'a> {
#[track_caller]
pub fn assert_steps_count(self, count: usize) -> Self {
assert!(
self.output.migration_steps == count,
"Assertion failed. Expected evaluateDataLoss to return {} steps, found {}",
count,
self.output.migration_steps,
);
self
}
pub fn assert_warnings(self, warnings: &[Cow<'_, str>]) -> Self {
assert_eq!(
self.output.warnings.len(),
warnings.len(),
"Expected {} warnings, got {}.\n{:#?}",
warnings.len(),
self.output.warnings.len(),
self.output.warnings
);
let descriptions: Vec<Cow<'_, str>> = self
.output
.warnings
.iter()
.map(|warning| warning.message.as_str().into())
.collect();
assert_eq!(descriptions, warnings);
self
}
pub fn assert_warnings_with_indices(self, warnings: &[(Cow<'_, str>, usize)]) -> Self {
assert!(
self.output.warnings.len() == warnings.len(),
"Expected {} warnings, got {}.\n{:#?}",
warnings.len(),
self.output.warnings.len(),
self.output.warnings
);
let descriptions: Vec<(Cow<'_, str>, usize)> = self
.output
.warnings
.iter()
.map(|warning| (warning.message.as_str().into(), warning.step_index))
.collect();
assert_eq!(descriptions, warnings);
self
}
pub fn assert_unexecutable(self, unexecutable_steps: &[Cow<'_, str>]) -> Self {
assert!(
self.output.unexecutable_steps.len() == unexecutable_steps.len(),
"Expected {} unexecutable_steps, got {}.\n{:#?}",
unexecutable_steps.len(),
self.output.unexecutable_steps.len(),
self.output.unexecutable_steps
);
let descriptions: Vec<Cow<'_, str>> = self
.output
.unexecutable_steps
.iter()
.map(|warning| warning.message.as_str().into())
.collect();
assert_eq!(descriptions, unexecutable_steps);
self
}
pub fn assert_unexecutables_with_indices(self, unexecutables: &[(Cow<'_, str>, usize)]) -> Self {
assert!(
self.output.unexecutable_steps.len() == unexecutables.len(),
"Expected {} unexecutables, got {}.\n{:#?}",
unexecutables.len(),
self.output.unexecutable_steps.len(),
self.output.unexecutable_steps
);
let descriptions: Vec<(Cow<'_, str>, usize)> = self
.output
.unexecutable_steps
.iter()
.map(|warning| (warning.message.as_str().into(), warning.step_index))
.collect();
assert_eq!(descriptions, unexecutables);
self
}
pub fn into_output(self) -> EvaluateDataLossOutput {
self.output
}
}
| 29.165644 | 101 | 0.564998 |
7928558976674216e7c8f182e3d8b59324162c49 | 186 |
fn id<T: copy send>(t: T) -> T { return t; }
fn main() {
let expected = ~100;
let actual = id::<~int>(expected);
log(debug, *actual);
assert (*expected == *actual);
}
| 16.909091 | 44 | 0.532258 |
75bc34a2f3b602322dcdd928208b5127d86e2bd2 | 6,398 | use ckb_vm_definitions::{
asm::{
AsmCoreMachine, Trace, RET_DECODE_TRACE, RET_DYNAMIC_JUMP, RET_EBREAK, RET_ECALL,
RET_INVALID_PERMISSION, RET_MAX_CYCLES_EXCEEDED, RET_OUT_OF_BOUND, RET_SLOWPATH,
TRACE_ITEM_LENGTH,
},
instructions::{Instruction, INSTRUCTION_OPCODE_NAMES},
memory::{FLAG_DIRTY, FLAG_EXECUTABLE, FLAG_FREEZED, FLAG_WRITABLE, FLAG_WXORX_BIT},
registers::SP,
MEMORY_FRAMES, MEMORY_FRAMESIZE, MEMORY_FRAME_PAGE_SHIFTS, MEMORY_FRAME_SHIFTS,
RISCV_MAX_MEMORY, RISCV_PAGES, RISCV_PAGESIZE, RISCV_PAGE_SHIFTS,
};
use std::mem::{size_of, zeroed};
// This utility helps us generate C-based macros containing definitions
// such as return code, opcode, struct size, struct offset, etc. The exact
// data here are derived while inspecting Rust structs dynamically. We keep
// this in a separate crate so build failures from the main crate won't cause
// a problem when updating the definitions with this crate. Or you can think
// of this as a workaround to the problem that build.rs cannot depend on any
// of its crate contents.
fn main() {
println!("#define CKB_VM_ASM_RISCV_MAX_MEMORY {}", RISCV_MAX_MEMORY);
println!("#define CKB_VM_ASM_RISCV_PAGE_SHIFTS {}", RISCV_PAGE_SHIFTS);
println!("#define CKB_VM_ASM_RISCV_PAGE_SIZE {}", RISCV_PAGESIZE);
println!("#define CKB_VM_ASM_RISCV_PAGE_MASK {}", RISCV_PAGESIZE - 1);
println!("#define CKB_VM_ASM_RISCV_PAGES {}", RISCV_PAGES);
println!(
"#define CKB_VM_ASM_MEMORY_FRAME_SHIFTS {}",
MEMORY_FRAME_SHIFTS
);
println!("#define CKB_VM_ASM_MEMORY_FRAMESIZE {}", MEMORY_FRAMESIZE);
println!("#define CKB_VM_ASM_MEMORY_FRAMES {}", MEMORY_FRAMES);
println!(
"#define CKB_VM_ASM_MEMORY_FRAME_PAGE_SHIFTS {}",
MEMORY_FRAME_PAGE_SHIFTS
);
println!();
println!(
"#define CKB_VM_ASM_MAXIMUM_TRACE_ADDRESS_LENGTH {}",
TRACE_ITEM_LENGTH * 4
);
println!();
println!("#define CKB_VM_ASM_RET_DECODE_TRACE {}", RET_DECODE_TRACE);
println!("#define CKB_VM_ASM_RET_ECALL {}", RET_ECALL);
println!("#define CKB_VM_ASM_RET_EBREAK {}", RET_EBREAK);
println!("#define CKB_VM_ASM_RET_DYNAMIC_JUMP {}", RET_DYNAMIC_JUMP);
println!(
"#define CKB_VM_ASM_RET_MAX_CYCLES_EXCEEDED {}",
RET_MAX_CYCLES_EXCEEDED
);
println!("#define CKB_VM_ASM_RET_OUT_OF_BOUND {}", RET_OUT_OF_BOUND);
println!(
"#define CKB_VM_ASM_RET_INVALID_PERMISSION {}",
RET_INVALID_PERMISSION
);
println!("#define CKB_VM_ASM_RET_SLOWPATH {}", RET_SLOWPATH);
println!();
println!("#define CKB_VM_ASM_REGISTER_SP {}", SP);
println!();
println!("#define CKB_VM_ASM_MEMORY_FLAG_FREEZED {}", FLAG_FREEZED);
println!(
"#define CKB_VM_ASM_MEMORY_FLAG_EXECUTABLE {}",
FLAG_EXECUTABLE
);
println!(
"#define CKB_VM_ASM_MEMORY_FLAG_WXORX_BIT {}",
FLAG_WXORX_BIT
);
println!("#define CKB_VM_ASM_MEMORY_FLAG_WRITABLE {}", FLAG_WRITABLE);
println!("#define CKB_VM_ASM_MEMORY_FLAG_DIRTY {}", FLAG_DIRTY);
println!();
println!(
"#define CKB_VM_ASM_TRACE_STRUCT_SIZE {}",
size_of::<Trace>()
);
let t: Trace = unsafe { zeroed() };
let t_address = &t as *const Trace as usize;
println!(
"#define CKB_VM_ASM_TRACE_OFFSET_ADDRESS {}",
(&t.address as *const u64 as usize) - t_address
);
println!(
"#define CKB_VM_ASM_TRACE_OFFSET_LENGTH {}",
(&t.length as *const u8 as usize) - t_address
);
println!(
"#define CKB_VM_ASM_TRACE_OFFSET_CYCLES {}",
(&t.cycles as *const u64 as usize) - t_address
);
println!(
"#define CKB_VM_ASM_TRACE_OFFSET_INSTRUCTIONS {}",
(&t.instructions as *const Instruction as usize) - t_address
);
println!(
"#define CKB_VM_ASM_TRACE_OFFSET_THREAD {}",
(&t.thread as *const u64 as usize) - t_address
);
println!();
println!(
"#define CKB_VM_ASM_ASM_CORE_MACHINE_STRUCT_SIZE {}",
size_of::<AsmCoreMachine>()
);
let m: Box<AsmCoreMachine> = Box::<AsmCoreMachine>::default();
let m_address = &*m as *const AsmCoreMachine as usize;
println!(
"#define CKB_VM_ASM_ASM_CORE_MACHINE_OFFSET_REGISTERS {}",
(&m.registers as *const u64 as usize) - m_address
);
println!(
"#define CKB_VM_ASM_ASM_CORE_MACHINE_OFFSET_PC {}",
(&m.pc as *const u64 as usize) - m_address
);
println!(
"#define CKB_VM_ASM_ASM_CORE_MACHINE_OFFSET_CYCLES {}",
(&m.cycles as *const u64 as usize) - m_address
);
println!(
"#define CKB_VM_ASM_ASM_CORE_MACHINE_OFFSET_MAX_CYCLES {}",
(&m.max_cycles as *const u64 as usize) - m_address
);
println!(
"#define CKB_VM_ASM_ASM_CORE_MACHINE_OFFSET_CHAOS_MODE {}",
(&m.chaos_mode as *const u8 as usize) - m_address
);
println!(
"#define CKB_VM_ASM_ASM_CORE_MACHINE_OFFSET_CHAOS_SEED {}",
(&m.chaos_seed as *const u32 as usize) - m_address
);
println!(
"#define CKB_VM_ASM_ASM_CORE_MACHINE_OFFSET_FLAGS {}",
(&m.flags as *const u8 as usize) - m_address
);
println!(
"#define CKB_VM_ASM_ASM_CORE_MACHINE_OFFSET_MEMORY {}",
(&m.memory as *const u8 as usize) - m_address
);
println!(
"#define CKB_VM_ASM_ASM_CORE_MACHINE_OFFSET_TRACES {}",
(&m.traces as *const Trace as usize) - m_address
);
println!(
"#define CKB_VM_ASM_ASM_CORE_MACHINE_OFFSET_FRAMES {}",
(&m.frames as *const u8 as usize) - m_address
);
println!();
for (op, name) in INSTRUCTION_OPCODE_NAMES.iter().enumerate() {
println!("#define CKB_VM_ASM_OP_{} {}", name, op);
}
println!();
println!("#ifdef CKB_VM_ASM_GENERATE_LABEL_TABLES");
println!("#ifdef __APPLE__");
println!(".global _ckb_vm_asm_labels");
println!("_ckb_vm_asm_labels:");
println!("#else");
println!(".global ckb_vm_asm_labels");
println!("ckb_vm_asm_labels:");
println!("#endif");
println!(".CKB_VM_ASM_LABEL_TABLE:");
for name in INSTRUCTION_OPCODE_NAMES.iter() {
println!(
"\t.long\t.CKB_VM_ASM_LABEL_OP_{} - .CKB_VM_ASM_LABEL_TABLE",
name
);
}
println!("#endif /* CKB_VM_ASM_GENERATE_LABEL_TABLES */");
}
| 35.94382 | 89 | 0.662238 |
21c3771610ed4f5375565b2a84e807ee343bd096 | 18,561 | // Copyright 2019, The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use super::{
common,
error::ConnectionManagerError,
peer_connection::{self, PeerConnection},
types::ConnectionDirection,
ConnectionManagerConfig,
ConnectionManagerEvent,
};
use crate::{
bounded_executor::BoundedExecutor,
connection_manager::{
liveness::LivenessSession,
wire_mode::{WireMode, LIVENESS_WIRE_MODE},
},
multiaddr::Multiaddr,
multiplexing::Yamux,
noise::NoiseConfig,
peer_manager::{NodeIdentity, PeerFeatures},
protocol::ProtocolId,
runtime,
transports::Transport,
types::CommsPublicKey,
utils::multiaddr::multiaddr_to_socketaddr,
PeerManager,
};
use futures::{future, FutureExt};
use log::*;
use std::{
convert::TryInto,
future::Future,
io::{Error, ErrorKind},
sync::{
atomic::{AtomicUsize, Ordering},
Arc,
},
time::Duration,
};
use tari_crypto::tari_utilities::hex::Hex;
use tari_shutdown::{oneshot_trigger, oneshot_trigger::OneshotTrigger, ShutdownSignal};
use tokio::{
io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt},
sync::mpsc,
time,
};
use tokio_stream::StreamExt;
use tracing::{span, Instrument, Level};
const LOG_TARGET: &str = "comms::connection_manager::listener";
pub struct PeerListener<TTransport> {
config: ConnectionManagerConfig,
bind_address: Multiaddr,
bounded_executor: BoundedExecutor,
conn_man_notifier: mpsc::Sender<ConnectionManagerEvent>,
shutdown_signal: ShutdownSignal,
transport: TTransport,
noise_config: NoiseConfig,
peer_manager: Arc<PeerManager>,
node_identity: Arc<NodeIdentity>,
our_supported_protocols: Vec<ProtocolId>,
liveness_session_count: Arc<AtomicUsize>,
on_listening: OneshotTrigger<Result<Multiaddr, ConnectionManagerError>>,
}
impl<TTransport> PeerListener<TTransport>
where
TTransport: Transport + Send + Sync + 'static,
TTransport::Output: AsyncRead + AsyncWrite + Send + Unpin + 'static,
{
#[allow(clippy::too_many_arguments)]
pub fn new(
config: ConnectionManagerConfig,
bind_address: Multiaddr,
transport: TTransport,
noise_config: NoiseConfig,
conn_man_notifier: mpsc::Sender<ConnectionManagerEvent>,
peer_manager: Arc<PeerManager>,
node_identity: Arc<NodeIdentity>,
shutdown_signal: ShutdownSignal,
) -> Self {
Self {
transport,
bind_address,
noise_config,
conn_man_notifier,
peer_manager,
node_identity,
shutdown_signal,
our_supported_protocols: Vec::new(),
bounded_executor: BoundedExecutor::from_current(config.max_simultaneous_inbound_connects),
liveness_session_count: Arc::new(AtomicUsize::new(config.liveness_max_sessions)),
config,
on_listening: oneshot_trigger::channel(),
}
}
/// Returns a future that resolves once the listener has either succeeded (`Ok(bind_addr)`) or failed (`Err(...)`)
/// in binding the listener socket
// This returns an impl Future and is not async because we want to exclude &self from the future so that it has a
// 'static lifetime as well as to flatten the oneshot result for ergonomics
pub fn on_listening(&self) -> impl Future<Output = Result<Multiaddr, ConnectionManagerError>> + 'static {
let signal = self.on_listening.to_signal();
signal.map(|r| r.ok_or(ConnectionManagerError::ListenerOneshotCancelled)?)
}
/// Set the supported protocols of this node to send to peers during the peer identity exchange
pub fn set_supported_protocols(&mut self, our_supported_protocols: Vec<ProtocolId>) -> &mut Self {
self.our_supported_protocols = our_supported_protocols;
self
}
pub async fn listen(self) -> Result<Multiaddr, ConnectionManagerError> {
let on_listening = self.on_listening();
runtime::current().spawn(self.run());
on_listening.await
}
pub async fn run(mut self) {
let mut shutdown_signal = self.shutdown_signal.clone();
match self.bind().await {
Ok((mut inbound, address)) => {
info!(target: LOG_TARGET, "Listening for peer connections on '{}'", address);
self.on_listening.broadcast(Ok(address));
loop {
tokio::select! {
biased;
_ = &mut shutdown_signal => {
info!(target: LOG_TARGET, "PeerListener is shutting down because the shutdown signal was triggered");
break;
},
Some(inbound_result) = inbound.next() => {
if let Some((socket, peer_addr)) = log_if_error!(target: LOG_TARGET, inbound_result, "Inbound connection failed because '{error}'",) {
self.spawn_listen_task(socket, peer_addr).await;
}
},
}
}
},
Err(err) => {
warn!(target: LOG_TARGET, "PeerListener was unable to start because '{}'", err);
self.on_listening.broadcast(Err(err));
},
}
}
async fn read_wire_format(
socket: &mut TTransport::Output,
time_to_first_byte: Duration,
) -> Result<WireMode, Error> {
let mut buf = [0u8; 1];
match time::timeout(time_to_first_byte, socket.read_exact(&mut buf)).await {
Ok(result) => match result {
Ok(_) => match buf[0].try_into().ok() {
Some(wf) => Ok(wf),
None => {
warn!(target: LOG_TARGET, "Invalid wire format byte '{}'", buf[0]);
Err(ErrorKind::InvalidData.into())
},
},
Err(err) => {
warn!(
target: LOG_TARGET,
"Failed to read wire format byte due to error: {}", err
);
Err(err)
},
},
Err(elapsed) => {
warn!(
target: LOG_TARGET,
"Failed to read wire format byte within timeout of {:#?}. {}", time_to_first_byte, elapsed
);
Err(elapsed.into())
},
}
}
fn is_address_in_liveness_cidr_range(addr: &Multiaddr, allowlist: &[cidr::AnyIpCidr]) -> bool {
match multiaddr_to_socketaddr(addr) {
Ok(socket_addr) => allowlist.iter().any(|cidr| cidr.contains(&socket_addr.ip())),
Err(_) => {
warn!(
target: LOG_TARGET,
"Peer address '{}' is invalid for liveness checks. It must be an TCP/IP address.", addr
);
false
},
}
}
async fn spawn_liveness_session(
socket: TTransport::Output,
permit: Arc<AtomicUsize>,
shutdown_signal: ShutdownSignal,
) {
permit.fetch_sub(1, Ordering::SeqCst);
let liveness = LivenessSession::new(socket);
debug!(target: LOG_TARGET, "Started liveness session");
runtime::current().spawn(async move {
future::select(liveness.run(), shutdown_signal).await;
permit.fetch_add(1, Ordering::SeqCst);
});
}
async fn spawn_listen_task(&self, mut socket: TTransport::Output, peer_addr: Multiaddr) {
let node_identity = self.node_identity.clone();
let peer_manager = self.peer_manager.clone();
let conn_man_notifier = self.conn_man_notifier.clone();
let noise_config = self.noise_config.clone();
let config = self.config.clone();
let our_supported_protocols = self.our_supported_protocols.clone();
let liveness_session_count = self.liveness_session_count.clone();
let shutdown_signal = self.shutdown_signal.clone();
let span = span!(Level::TRACE, "connection_mann::listener::inbound_task",);
let inbound_fut = async move {
match Self::read_wire_format(&mut socket, config.time_to_first_byte).await {
Ok(WireMode::Comms(byte)) if byte == config.network_info.network_byte => {
let this_node_id_str = node_identity.node_id().short_str();
let result = Self::perform_socket_upgrade_procedure(
node_identity,
peer_manager,
noise_config.clone(),
conn_man_notifier.clone(),
socket,
peer_addr,
our_supported_protocols,
&config,
)
.await;
match result {
Ok(peer_conn) => {
log_if_error!(
target: LOG_TARGET,
conn_man_notifier
.send(ConnectionManagerEvent::PeerConnected(peer_conn))
.await,
"Failed to publish event because '{error}'",
);
},
Err(err) => {
debug!(
target: LOG_TARGET,
"[ThisNode={}] Peer connection upgrade failed for peer because '{:?}'",
this_node_id_str,
err
);
log_if_error!(
target: LOG_TARGET,
conn_man_notifier
.send(ConnectionManagerEvent::PeerInboundConnectFailed(err))
.await,
"Failed to publish event because '{error}'",
);
},
}
},
Ok(WireMode::Comms(byte)) => {
// TODO: This call is expensive and only added for the benefit of improved logging and may lead to
// TODO: DoS attacks. Remove later when not needed anymore or make it optional with a config file
// TODO: setting.
let public_key = Self::remote_public_key_from_socket(socket, noise_config).await;
warn!(
target: LOG_TARGET,
"Peer at address '{}' ({}) sent invalid wire format byte. Expected {:x?} got: {:x?} ",
peer_addr,
public_key,
config.network_info.network_byte,
byte,
);
},
Ok(WireMode::Liveness) => {
if liveness_session_count.load(Ordering::SeqCst) > 0 &&
Self::is_address_in_liveness_cidr_range(&peer_addr, &config.liveness_cidr_allowlist)
{
debug!(
target: LOG_TARGET,
"Connection at address '{}' requested liveness session", peer_addr
);
Self::spawn_liveness_session(socket, liveness_session_count, shutdown_signal).await;
} else {
debug!(
target: LOG_TARGET,
"No liveness sessions available or permitted for peer address '{}'", peer_addr
);
let _ = socket.shutdown().await;
}
},
Err(err) => {
warn!(
target: LOG_TARGET,
"Peer at address '{}' failed to send its wire format. Expected network byte {:x?} or liveness \
byte {:x?} not received. Error: {}",
peer_addr,
config.network_info.network_byte,
LIVENESS_WIRE_MODE,
err
);
},
}
}
.instrument(span);
// This will block (asynchronously) if we have reached the maximum simultaneous connections, creating
// back-pressure on nodes connecting to this node
self.bounded_executor.spawn(inbound_fut).await;
}
async fn remote_public_key_from_socket(socket: TTransport::Output, noise_config: NoiseConfig) -> String {
let public_key: Option<CommsPublicKey> = match time::timeout(
Duration::from_secs(30),
noise_config.upgrade_socket(socket, ConnectionDirection::Inbound),
)
.await
.map_err(|_| ConnectionManagerError::NoiseProtocolTimeout)
{
Ok(Ok(noise_socket)) => {
match noise_socket
.get_remote_public_key()
.ok_or(ConnectionManagerError::InvalidStaticPublicKey)
{
Ok(pk) => Some(pk),
_ => None,
}
},
_ => None,
};
match public_key {
None => "public key not known".to_string(),
Some(pk) => pk.to_hex(),
}
}
#[allow(clippy::too_many_arguments)]
async fn perform_socket_upgrade_procedure(
node_identity: Arc<NodeIdentity>,
peer_manager: Arc<PeerManager>,
noise_config: NoiseConfig,
conn_man_notifier: mpsc::Sender<ConnectionManagerEvent>,
socket: TTransport::Output,
peer_addr: Multiaddr,
our_supported_protocols: Vec<ProtocolId>,
config: &ConnectionManagerConfig,
) -> Result<PeerConnection, ConnectionManagerError> {
static CONNECTION_DIRECTION: ConnectionDirection = ConnectionDirection::Inbound;
debug!(
target: LOG_TARGET,
"Starting noise protocol upgrade for peer at address '{}'", peer_addr
);
let noise_socket = time::timeout(
Duration::from_secs(30),
noise_config.upgrade_socket(socket, CONNECTION_DIRECTION),
)
.await
.map_err(|_| ConnectionManagerError::NoiseProtocolTimeout)??;
let authenticated_public_key = noise_socket
.get_remote_public_key()
.ok_or(ConnectionManagerError::InvalidStaticPublicKey)?;
// Check if we know the peer and if it is banned
let known_peer = common::find_unbanned_peer(&peer_manager, &authenticated_public_key).await?;
let mut muxer = Yamux::upgrade_connection(noise_socket, CONNECTION_DIRECTION)
.await
.map_err(|err| ConnectionManagerError::YamuxUpgradeFailure(err.to_string()))?;
trace!(
target: LOG_TARGET,
"Starting peer identity exchange for peer with public key '{}'",
authenticated_public_key
);
let peer_identity = common::perform_identity_exchange(
&mut muxer,
&node_identity,
CONNECTION_DIRECTION,
&our_supported_protocols,
config.network_info.clone(),
)
.await?;
let features = PeerFeatures::from_bits_truncate(peer_identity.features);
debug!(
target: LOG_TARGET,
"Peer identity exchange succeeded on Inbound connection for peer '{}' (Features = {:?})",
authenticated_public_key,
features
);
trace!(target: LOG_TARGET, "{:?}", peer_identity);
let (peer_node_id, their_supported_protocols) = common::validate_and_add_peer_from_peer_identity(
&peer_manager,
known_peer,
authenticated_public_key,
peer_identity,
None,
config.allow_test_addresses,
)
.await?;
debug!(
target: LOG_TARGET,
"[ThisNode={}] Peer '{}' added to peer list.",
node_identity.node_id().short_str(),
peer_node_id.short_str()
);
peer_connection::create(
muxer,
peer_addr,
peer_node_id,
features,
CONNECTION_DIRECTION,
conn_man_notifier,
our_supported_protocols,
their_supported_protocols,
)
}
async fn bind(&mut self) -> Result<(TTransport::Listener, Multiaddr), ConnectionManagerError> {
let bind_address = self.bind_address.clone();
debug!(target: LOG_TARGET, "Attempting to listen on {}", bind_address);
self.transport
.listen(bind_address)
.await
.map_err(|err| ConnectionManagerError::TransportError(err.to_string()))
}
}
| 40.35 | 162 | 0.562685 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.