file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
raw.peggy.d.ts
export declare function parse(src: string): any;
_inputs.py
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities from ._enums import * __all__ = [ 'RemediationFiltersArgs', ] @pulumi.input_type class RemediationFiltersArgs: def __init__(__self__, *, locations: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None): """ The filters that will be applied to determine which resources to remediate. :param pulumi.Input[Sequence[pulumi.Input[str]]] locations: The resource locations that will be remediated. """ if locations is not None: pulumi.set(__self__, "locations", locations) @property @pulumi.getter def
(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ The resource locations that will be remediated. """ return pulumi.get(self, "locations") @locations.setter def locations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "locations", value)
locations
reason.rs
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use eq_modulo_pos::EqModuloPos; use hcons::Conser; use once_cell::sync::Lazy; use pos::{BPos, NPos, Pos, Positioned, Symbol, ToOxidized, TypeConstName, TypeName}; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::hash::Hash; use crate::decl; use crate::local; use crate::prop::{Prop, PropF}; use crate::visitor::Walkable; pub use oxidized::typing_reason::{ArgPosition, BlameSource}; pub trait Reason: Eq + EqModuloPos + Hash + Clone + Walkable<Self> + std::fmt::Debug + Send + Sync + Serialize + DeserializeOwned + for<'a> From<oxidized_by_ref::typing_reason::T_<'a>> + for<'a> ToOxidized<'a, Output = oxidized_by_ref::typing_reason::T_<'a>> + 'static { /// Position type. type Pos: Pos + Send + Sync + 'static; /// Make a new instance. If the implementing Reason is stateful, /// it will call cons() to obtain the ReasonImpl to construct the instance. fn mk(cons: impl FnOnce() -> ReasonImpl<Self, Self::Pos>) -> Self; fn none() -> Self; fn witness(pos: Self::Pos) -> Self { Self::mk(|| ReasonImpl::Rwitness(pos)) } fn witness_from_decl(pos: Self::Pos) -> Self { Self::mk(|| ReasonImpl::RwitnessFromDecl(pos)) } fn hint(pos: Self::Pos) -> Self { Self::mk(|| ReasonImpl::Rhint(pos)) } fn instantiate(r1: Self, ty_name: TypeName, r2: Self) -> Self { Self::mk(|| ReasonImpl::Rinstantiate(r1, ty_name, r2)) } fn class_class(pos: Self::Pos, ty_name: TypeName) -> Self { Self::mk(|| ReasonImpl::RclassClass(pos, ty_name)) } fn no_return(pos: Self::Pos) -> Self { Self::mk(|| ReasonImpl::RnoReturn(pos)) } fn implicit_upper_bound(pos: Self::Pos, sym: Symbol) -> Self { Self::mk(|| ReasonImpl::RimplicitUpperBound(pos, sym)) } fn pos(&self) -> &Self::Pos; fn decl_ty_conser() -> &'static Conser<decl::Ty_<Self>>; fn local_ty_conser() -> &'static Conser<local::Ty_<Self, local::Ty<Self>>>; fn prop_conser() -> &'static Conser<PropF<Self, Prop<Self>>>; fn from_oxidized(reason: oxidized_by_ref::typing_reason::T_<'_>) -> Self { Self::mk(|| { use oxidized_by_ref::typing_reason::Blame as OBlame; use oxidized_by_ref::typing_reason::T_ as OR; use ReasonImpl as RI; match reason { OR::Rnone => RI::Rnone, OR::Rwitness(pos) => RI::Rwitness(pos.into()), OR::RwitnessFromDecl(pos) => RI::RwitnessFromDecl(pos.into()), OR::Ridx(&(pos, r)) => RI::Ridx(pos.into(), r.into()), OR::RidxVector(pos) => RI::RidxVector(pos.into()), OR::RidxVectorFromDecl(pos) => RI::RidxVectorFromDecl(pos.into()), OR::Rforeach(pos) => RI::Rforeach(pos.into()), OR::Rasyncforeach(pos) => RI::Rasyncforeach(pos.into()), OR::Rarith(pos) => RI::Rarith(pos.into()), OR::RarithRet(pos) => RI::RarithRet(pos.into()), OR::RarithRetFloat(&(pos, r, arg_position)) => { RI::RarithRetFloat(pos.into(), r.into(), arg_position) } OR::RarithRetNum(&(pos, r, arg_position)) => { RI::RarithRetNum(pos.into(), r.into(), arg_position) } OR::RarithRetInt(pos) => RI::RarithRetInt(pos.into()), OR::RarithDynamic(pos) => RI::RarithDynamic(pos.into()), OR::RbitwiseDynamic(pos) => RI::RbitwiseDynamic(pos.into()), OR::RincdecDynamic(pos) => RI::RincdecDynamic(pos.into()), OR::Rcomp(pos) => RI::Rcomp(pos.into()), OR::RconcatRet(pos) => RI::RconcatRet(pos.into()), OR::RlogicRet(pos) => RI::RlogicRet(pos.into()), OR::Rbitwise(pos) => RI::Rbitwise(pos.into()), OR::RbitwiseRet(pos) => RI::RbitwiseRet(pos.into()), OR::RnoReturn(pos) => RI::RnoReturn(pos.into()), OR::RnoReturnAsync(pos) => RI::RnoReturnAsync(pos.into()), OR::RretFunKind(&(pos, fun_kind)) => RI::RretFunKind(pos.into(), fun_kind), OR::RretFunKindFromDecl(&(pos, fun_kind)) => { RI::RretFunKindFromDecl(pos.into(), fun_kind) } OR::Rhint(pos) => RI::Rhint(pos.into()), OR::Rthrow(pos) => RI::Rthrow(pos.into()), OR::Rplaceholder(pos) => RI::Rplaceholder(pos.into()), OR::RretDiv(pos) => RI::RretDiv(pos.into()), OR::RyieldGen(pos) => RI::RyieldGen(pos.into()), OR::RyieldAsyncgen(pos) => RI::RyieldAsyncgen(pos.into()), OR::RyieldAsyncnull(pos) => RI::RyieldAsyncnull(pos.into()), OR::RyieldSend(pos) => RI::RyieldSend(pos.into()), OR::RlostInfo(&(sym, r, OBlame::Blame(&(pos, blame_source)))) => { RI::RlostInfo(Symbol::new(sym), r.into(), Blame(pos.into(), blame_source)) } OR::Rformat(&(pos, sym, r)) => RI::Rformat(pos.into(), Symbol::new(sym), r.into()), OR::RclassClass(&(pos, s)) => RI::RclassClass(pos.into(), TypeName(Symbol::new(s))), OR::RunknownClass(pos) => RI::RunknownClass(pos.into()), OR::RvarParam(pos) => RI::RvarParam(pos.into()), OR::RvarParamFromDecl(pos) => RI::RvarParamFromDecl(pos.into()), OR::RunpackParam(&(pos1, pos2, i)) => RI::RunpackParam(pos1.into(), pos2.into(), i), OR::RinoutParam(pos) => RI::RinoutParam(pos.into()), OR::Rinstantiate(&(r1, sym, r2)) => { RI::Rinstantiate(r1.into(), TypeName(Symbol::new(sym)), r2.into()) } OR::Rtypeconst(&(r1, pos_id, sym, r2)) => { RI::Rtypeconst(r1.into(), pos_id.into(), Symbol::new(sym.0), r2.into()) } OR::RtypeAccess(&(r, list)) => RI::RtypeAccess( r.into(), list.iter() .map(|(&r, s)| (r.into(), Symbol::new(s.0))) .collect(), ), OR::RexprDepType(&(r, pos, edt_reason)) => { RI::RexprDepType(r.into(), pos.into(), edt_reason.into()) } OR::RnullsafeOp(pos) => RI::RnullsafeOp(pos.into()), OR::RtconstNoCstr(&pos_id) => RI::RtconstNoCstr(pos_id.into()), OR::Rpredicated(&(pos, s)) => RI::Rpredicated(pos.into(), Symbol::new(s)), OR::Ris(pos) => RI::Ris(pos.into()), OR::Ras(pos) => RI::Ras(pos.into()), OR::RvarrayOrDarrayKey(pos) => RI::RvarrayOrDarrayKey(pos.into()), OR::RvecOrDictKey(pos) => RI::RvecOrDictKey(pos.into()), OR::Rusing(pos) => RI::Rusing(pos.into()), OR::RdynamicProp(pos) => RI::RdynamicProp(pos.into()), OR::RdynamicCall(pos) => RI::RdynamicCall(pos.into()), OR::RdynamicConstruct(pos) => RI::RdynamicConstruct(pos.into()), OR::RidxDict(pos) => RI::RidxDict(pos.into()), OR::RsetElement(pos) => RI::RsetElement(pos.into()), OR::RmissingOptionalField(&(pos, s)) => { RI::RmissingOptionalField(pos.into(), Symbol::new(s)) } OR::RunsetField(&(pos, s)) => RI::RunsetField(pos.into(), Symbol::new(s)), OR::RcontravariantGeneric(&(r, s)) => { RI::RcontravariantGeneric(r.into(), Symbol::new(s)) } OR::RinvariantGeneric(&(r, s)) => RI::RinvariantGeneric(r.into(), Symbol::new(s)), OR::Rregex(pos) => RI::Rregex(pos.into()), OR::RimplicitUpperBound(&(pos, s)) => { RI::RimplicitUpperBound(pos.into(), Symbol::new(s)) } OR::RtypeVariable(pos) => RI::RtypeVariable(pos.into()), OR::RtypeVariableGenerics(&(pos, s1, s2)) => { RI::RtypeVariableGenerics(pos.into(), Symbol::new(s1), Symbol::new(s2)) } OR::RglobalTypeVariableGenerics(&(pos, s1, s2)) => { RI::RglobalTypeVariableGenerics(pos.into(), Symbol::new(s1), Symbol::new(s2)) } OR::RsolveFail(pos) => RI::RsolveFail(pos.into()), OR::RcstrOnGenerics(&(pos, pos_id)) => { RI::RcstrOnGenerics(pos.into(), pos_id.into()) } OR::RlambdaParam(&(pos, r)) => RI::RlambdaParam(pos.into(), r.into()), OR::Rshape(&(pos, s)) => RI::Rshape(pos.into(), Symbol::new(s)), OR::Renforceable(pos) => RI::Renforceable(pos.into()), OR::Rdestructure(pos) => RI::Rdestructure(pos.into()), OR::RkeyValueCollectionKey(pos) => RI::RkeyValueCollectionKey(pos.into()), OR::RglobalClassProp(pos) => RI::RglobalClassProp(pos.into()), OR::RglobalFunParam(pos) => RI::RglobalFunParam(pos.into()), OR::RglobalFunRet(pos) => RI::RglobalFunRet(pos.into()), OR::Rsplice(pos) => RI::Rsplice(pos.into()), OR::RetBoolean(pos) => RI::RetBoolean(pos.into()), OR::RdefaultCapability(pos) => RI::RdefaultCapability(pos.into()), OR::RconcatOperand(pos) => RI::RconcatOperand(pos.into()), OR::RinterpOperand(pos) => RI::RinterpOperand(pos.into()), OR::RdynamicCoercion(&r) => RI::RdynamicCoercion(r.into()), OR::RsupportDynamicType(pos) => RI::RsupportDynamicType(pos.into()), OR::RdynamicPartialEnforcement(&(pos, s, r)) => { RI::RdynamicPartialEnforcement(pos.into(), Symbol::new(s), r.into()) } OR::RrigidTvarEscape(&(pos, s1, s2, r)) => { RI::RrigidTvarEscape(pos.into(), Symbol::new(s1), Symbol::new(s2), r.into()) } } }) } } #[derive(Debug, Clone, PartialEq, Eq, EqModuloPos, Hash, Serialize, Deserialize)] pub struct Blame<P>(pub P, pub BlameSource); #[derive(Debug, Clone, PartialEq, Eq, EqModuloPos, Hash, Serialize, Deserialize)] pub enum ExprDepTypeReason { ERexpr(isize), ERstatic, ERclass(Symbol), ERparent(Symbol), ERself(Symbol), ERpu(Symbol), } impl<'a> From<oxidized_by_ref::typing_reason::ExprDepTypeReason<'a>> for ExprDepTypeReason { fn from(edtr: oxidized_by_ref::typing_reason::ExprDepTypeReason<'a>) -> Self { use oxidized_by_ref::typing_reason::ExprDepTypeReason as Obr; match edtr { Obr::ERexpr(i) => ExprDepTypeReason::ERexpr(i), Obr::ERstatic => ExprDepTypeReason::ERstatic, Obr::ERclass(s) => ExprDepTypeReason::ERclass(Symbol::new(s)), Obr::ERparent(s) => ExprDepTypeReason::ERparent(Symbol::new(s)), Obr::ERself(s) => ExprDepTypeReason::ERself(Symbol::new(s)), Obr::ERpu(s) => ExprDepTypeReason::ERpu(Symbol::new(s)), } } } impl<'a> ToOxidized<'a> for ExprDepTypeReason { type Output = oxidized_by_ref::typing_reason::ExprDepTypeReason<'a>; fn to_oxidized(&self, arena: &'a bumpalo::Bump) -> Self::Output { use oxidized_by_ref::typing_reason::ExprDepTypeReason as Obr; match self { ExprDepTypeReason::ERexpr(i) => Obr::ERexpr(*i), ExprDepTypeReason::ERstatic => Obr::ERstatic, ExprDepTypeReason::ERclass(s) => Obr::ERclass(s.to_oxidized(arena)), ExprDepTypeReason::ERparent(s) => Obr::ERparent(s.to_oxidized(arena)), ExprDepTypeReason::ERself(s) => Obr::ERself(s.to_oxidized(arena)), ExprDepTypeReason::ERpu(s) => Obr::ERpu(s.to_oxidized(arena)), } } } #[derive(Debug, Clone, PartialEq, Eq, EqModuloPos, Hash, Serialize, Deserialize)] pub enum ReasonImpl<R, P> { Rnone, Rwitness(P), RwitnessFromDecl(P), /// Used as an index into a vector-like array or string. /// Position of indexing, reason for the indexed type Ridx(P, R), RidxVector(P), /// Used as an index, in the Vector case RidxVectorFromDecl(P), /// Because it is iterated in a foreach loop Rforeach(P), /// Because it is iterated "await as" in foreach Rasyncforeach(P), Rarith(P), RarithRet(P), /// pos, arg float typing reason, arg position RarithRetFloat(P, R, oxidized::typing_reason::ArgPosition), /// pos, arg num typing reason, arg position RarithRetNum(P, R, oxidized::typing_reason::ArgPosition), RarithRetInt(P), RarithDynamic(P), RbitwiseDynamic(P), RincdecDynamic(P), Rcomp(P), RconcatRet(P), RlogicRet(P), Rbitwise(P), RbitwiseRet(P), RnoReturn(P), RnoReturnAsync(P), RretFunKind(P, oxidized::ast_defs::FunKind), RretFunKindFromDecl(P, oxidized::ast_defs::FunKind), Rhint(P), Rthrow(P), Rplaceholder(P), RretDiv(P), RyieldGen(P), RyieldAsyncgen(P), RyieldAsyncnull(P), RyieldSend(P), RlostInfo(Symbol, R, Blame<P>), Rformat(P, Symbol, R), RclassClass(P, TypeName), RunknownClass(P), RvarParam(P), RvarParamFromDecl(P), /// splat pos, fun def pos, number of args before splat RunpackParam(P, P, isize), RinoutParam(P), Rinstantiate(R, TypeName, R), Rtypeconst(R, Positioned<TypeConstName, P>, Symbol, R), RtypeAccess(R, Vec<(R, Symbol)>), RexprDepType(R, P, ExprDepTypeReason), /// ?-> operator is used RnullsafeOp(P), RtconstNoCstr(Positioned<TypeConstName, P>), Rpredicated(P, Symbol), Ris(P), Ras(P), RvarrayOrDarrayKey(P), RvecOrDictKey(P), Rusing(P), RdynamicProp(P), RdynamicCall(P), RdynamicConstruct(P), RidxDict(P), RsetElement(P), RmissingOptionalField(P, Symbol), RunsetField(P, Symbol), RcontravariantGeneric(R, Symbol), RinvariantGeneric(R, Symbol), Rregex(P), RimplicitUpperBound(P, Symbol), RtypeVariable(P), RtypeVariableGenerics(P, Symbol, Symbol), RglobalTypeVariableGenerics(P, Symbol, Symbol), RsolveFail(P), RcstrOnGenerics(P, Positioned<TypeName, P>), RlambdaParam(P, R), Rshape(P, Symbol), Renforceable(P), Rdestructure(P), RkeyValueCollectionKey(P), RglobalClassProp(P), RglobalFunParam(P), RglobalFunRet(P), Rsplice(P), RetBoolean(P), RdefaultCapability(P), RconcatOperand(P), RinterpOperand(P), RdynamicCoercion(R), RsupportDynamicType(P), RdynamicPartialEnforcement(P, Symbol, R), RrigidTvarEscape(P, Symbol, Symbol, R), } #[derive(Debug, Clone, PartialEq, Eq, EqModuloPos, Hash, Serialize, Deserialize)] pub struct BReason(Box<ReasonImpl<BReason, BPos>>); impl Reason for BReason { type Pos = BPos; fn mk(cons: impl FnOnce() -> ReasonImpl<Self, Self::Pos>) -> Self { let x = cons(); Self(Box::new(x)) } fn none() -> Self { BReason(Box::new(ReasonImpl::Rnone)) } fn pos(&self) -> &BPos { use ReasonImpl::*; match &*self.0 { Rnone => unimplemented!(), Rwitness(p) | RwitnessFromDecl(p) | Rhint(p) => p, r => unimplemented!("BReason::pos: {:?}", r), } } #[inline] fn decl_ty_conser() -> &'static Conser<decl::Ty_<BReason>> { static CONSER: Lazy<Conser<decl::Ty_<BReason>>> = Lazy::new(Conser::new); &CONSER } #[inline] fn local_ty_conser() -> &'static Conser<local::Ty_<BReason, local::Ty<BReason>>> { static CONSER: Lazy<Conser<local::Ty_<BReason, local::Ty<BReason>>>> = Lazy::new(Conser::new); &CONSER } #[inline] fn prop_conser() -> &'static Conser<PropF<BReason, Prop<BReason>>> { static CONSER: Lazy<Conser<PropF<BReason, Prop<BReason>>>> = Lazy::new(Conser::new); &CONSER } } impl Walkable<BReason> for BReason {} impl<'a> From<oxidized_by_ref::typing_reason::Reason<'a>> for BReason { fn from(reason: oxidized_by_ref::typing_reason::Reason<'a>) -> Self { Self::from_oxidized(reason) } } impl<'a> ToOxidized<'a> for BReason { type Output = oxidized_by_ref::typing_reason::Reason<'a>; fn to_oxidized(&self, arena: &'a bumpalo::Bump) -> Self::Output { use oxidized_by_ref::typing_reason::Blame as OBlame; use oxidized_by_ref::typing_reason::Reason as OR; use ReasonImpl as RI; match &*self.0 { RI::Rnone => OR::Rnone, RI::Rwitness(pos) => OR::Rwitness(pos.to_oxidized(arena)), RI::RwitnessFromDecl(pos) => OR::RwitnessFromDecl(pos.to_oxidized(arena)), RI::Ridx(pos, r) => { OR::Ridx(arena.alloc((pos.to_oxidized(arena), r.to_oxidized(arena)))) } RI::RidxVector(pos) => OR::RidxVector(pos.to_oxidized(arena)), RI::RidxVectorFromDecl(pos) => OR::RidxVectorFromDecl(pos.to_oxidized(arena)), RI::Rforeach(pos) => OR::Rforeach(pos.to_oxidized(arena)), RI::Rasyncforeach(pos) => OR::Rasyncforeach(pos.to_oxidized(arena)), RI::Rarith(pos) => OR::Rarith(pos.to_oxidized(arena)), RI::RarithRet(pos) => OR::RarithRet(pos.to_oxidized(arena)), RI::RarithRetFloat(pos, r, arg_position) => OR::RarithRetFloat(arena.alloc(( pos.to_oxidized(arena), r.to_oxidized(arena), *arg_position, ))), RI::RarithRetNum(pos, r, arg_position) => OR::RarithRetNum(arena.alloc(( pos.to_oxidized(arena), r.to_oxidized(arena), *arg_position, ))), RI::RarithRetInt(pos) => OR::RarithRetInt(pos.to_oxidized(arena)), RI::RarithDynamic(pos) => OR::RarithDynamic(pos.to_oxidized(arena)), RI::RbitwiseDynamic(pos) => OR::RbitwiseDynamic(pos.to_oxidized(arena)), RI::RincdecDynamic(pos) => OR::RincdecDynamic(pos.to_oxidized(arena)), RI::Rcomp(pos) => OR::Rcomp(pos.to_oxidized(arena)), RI::RconcatRet(pos) => OR::RconcatRet(pos.to_oxidized(arena)), RI::RlogicRet(pos) => OR::RlogicRet(pos.to_oxidized(arena)), RI::Rbitwise(pos) => OR::Rbitwise(pos.to_oxidized(arena)), RI::RbitwiseRet(pos) => OR::RbitwiseRet(pos.to_oxidized(arena)), RI::RnoReturn(pos) => OR::RnoReturn(pos.to_oxidized(arena)), RI::RnoReturnAsync(pos) => OR::RnoReturnAsync(pos.to_oxidized(arena)), RI::RretFunKind(pos, fun_kind) => { OR::RretFunKind(arena.alloc((pos.to_oxidized(arena), *fun_kind))) } RI::RretFunKindFromDecl(pos, fun_kind) => { OR::RretFunKindFromDecl(arena.alloc((pos.to_oxidized(arena), *fun_kind))) } RI::Rhint(pos) => OR::Rhint(pos.to_oxidized(arena)), RI::Rthrow(pos) => OR::Rthrow(pos.to_oxidized(arena)), RI::Rplaceholder(pos) => OR::Rplaceholder(pos.to_oxidized(arena)), RI::RretDiv(pos) => OR::RretDiv(pos.to_oxidized(arena)), RI::RyieldGen(pos) => OR::RyieldGen(pos.to_oxidized(arena)), RI::RyieldAsyncgen(pos) => OR::RyieldAsyncgen(pos.to_oxidized(arena)), RI::RyieldAsyncnull(pos) => OR::RyieldAsyncnull(pos.to_oxidized(arena)), RI::RyieldSend(pos) => OR::RyieldSend(pos.to_oxidized(arena)), RI::RlostInfo(sym, r, Blame(pos, blame_source)) => OR::RlostInfo(arena.alloc(( sym.to_oxidized(arena), r.to_oxidized(arena), OBlame::Blame(arena.alloc((pos.to_oxidized(arena), *blame_source))), ))), RI::Rformat(pos, sym, r) => OR::Rformat(arena.alloc(( pos.to_oxidized(arena), sym.to_oxidized(arena), r.to_oxidized(arena), ))), RI::RclassClass(pos, s) => { OR::RclassClass(arena.alloc((pos.to_oxidized(arena), s.to_oxidized(arena)))) } RI::RunknownClass(pos) => OR::RunknownClass(pos.to_oxidized(arena)), RI::RvarParam(pos) => OR::RvarParam(pos.to_oxidized(arena)), RI::RvarParamFromDecl(pos) => OR::RvarParamFromDecl(pos.to_oxidized(arena)), RI::RunpackParam(pos1, pos2, i) => OR::RunpackParam(arena.alloc(( pos1.to_oxidized(arena), pos2.to_oxidized(arena), *i, ))), RI::RinoutParam(pos) => OR::RinoutParam(pos.to_oxidized(arena)), RI::Rinstantiate(r1, type_name, r2) => OR::Rinstantiate(arena.alloc(( r1.to_oxidized(arena), type_name.to_oxidized(arena), r2.to_oxidized(arena), ))), RI::Rtypeconst(r1, pos_id, sym, r2) => OR::Rtypeconst(arena.alloc(( r1.to_oxidized(arena), pos_id.to_oxidized(arena), &*arena.alloc(oxidized_by_ref::lazy::Lazy(sym.to_oxidized(arena))), r2.to_oxidized(arena), ))), RI::RtypeAccess(r, list) => OR::RtypeAccess(arena.alloc(( r.to_oxidized(arena), &*arena.alloc_slice_fill_iter(list.iter().map(|(r, s)| { ( &*arena.alloc(r.to_oxidized(arena)), &*arena.alloc(oxidized_by_ref::lazy::Lazy(s.to_oxidized(arena))), ) })), ))), RI::RexprDepType(r, pos, edt_reason) => OR::RexprDepType(arena.alloc(( r.to_oxidized(arena), pos.to_oxidized(arena), edt_reason.to_oxidized(arena), ))), RI::RnullsafeOp(pos) => OR::RnullsafeOp(pos.to_oxidized(arena)), RI::RtconstNoCstr(pos_id) => OR::RtconstNoCstr(arena.alloc(pos_id.to_oxidized(arena))), RI::Rpredicated(pos, s) => { OR::Rpredicated(arena.alloc((pos.to_oxidized(arena), s.to_oxidized(arena)))) } RI::Ris(pos) => OR::Ris(pos.to_oxidized(arena)), RI::Ras(pos) => OR::Ras(pos.to_oxidized(arena)), RI::RvarrayOrDarrayKey(pos) => OR::RvarrayOrDarrayKey(pos.to_oxidized(arena)), RI::RvecOrDictKey(pos) => OR::RvecOrDictKey(pos.to_oxidized(arena)), RI::Rusing(pos) => OR::Rusing(pos.to_oxidized(arena)), RI::RdynamicProp(pos) => OR::RdynamicProp(pos.to_oxidized(arena)), RI::RdynamicCall(pos) => OR::RdynamicCall(pos.to_oxidized(arena)), RI::RdynamicConstruct(pos) => OR::RdynamicConstruct(pos.to_oxidized(arena)), RI::RidxDict(pos) => OR::RidxDict(pos.to_oxidized(arena)), RI::RsetElement(pos) => OR::RsetElement(pos.to_oxidized(arena)), RI::RmissingOptionalField(pos, s) => OR::RmissingOptionalField( arena.alloc((pos.to_oxidized(arena), s.to_oxidized(arena))), ), RI::RunsetField(pos, s) => { OR::RunsetField(arena.alloc((pos.to_oxidized(arena), s.to_oxidized(arena)))) } RI::RcontravariantGeneric(r, s) => { OR::RcontravariantGeneric(arena.alloc((r.to_oxidized(arena), s.to_oxidized(arena)))) } RI::RinvariantGeneric(r, s) => { OR::RinvariantGeneric(arena.alloc((r.to_oxidized(arena), s.to_oxidized(arena)))) } RI::Rregex(pos) => OR::Rregex(pos.to_oxidized(arena)), RI::RimplicitUpperBound(pos, s) => { OR::RimplicitUpperBound(arena.alloc((pos.to_oxidized(arena), s.to_oxidized(arena)))) } RI::RtypeVariable(pos) => OR::RtypeVariable(pos.to_oxidized(arena)), RI::RtypeVariableGenerics(pos, s1, s2) => OR::RtypeVariableGenerics(arena.alloc(( pos.to_oxidized(arena), s1.to_oxidized(arena), s2.to_oxidized(arena), ))), RI::RglobalTypeVariableGenerics(pos, s1, s2) => { OR::RglobalTypeVariableGenerics(arena.alloc(( pos.to_oxidized(arena), s1.to_oxidized(arena), s2.to_oxidized(arena), ))) } RI::RsolveFail(pos) => OR::RsolveFail(pos.to_oxidized(arena)), RI::RcstrOnGenerics(pos, pos_id) => OR::RcstrOnGenerics( arena.alloc((pos.to_oxidized(arena), pos_id.to_oxidized(arena))), ), RI::RlambdaParam(pos, r) => { OR::RlambdaParam(arena.alloc((pos.to_oxidized(arena), r.to_oxidized(arena)))) } RI::Rshape(pos, s) => { OR::Rshape(arena.alloc((pos.to_oxidized(arena), s.to_oxidized(arena)))) } RI::Renforceable(pos) => OR::Renforceable(pos.to_oxidized(arena)), RI::Rdestructure(pos) => OR::Rdestructure(pos.to_oxidized(arena)), RI::RkeyValueCollectionKey(pos) => OR::RkeyValueCollectionKey(pos.to_oxidized(arena)), RI::RglobalClassProp(pos) => OR::RglobalClassProp(pos.to_oxidized(arena)), RI::RglobalFunParam(pos) => OR::RglobalFunParam(pos.to_oxidized(arena)), RI::RglobalFunRet(pos) => OR::RglobalFunRet(pos.to_oxidized(arena)), RI::Rsplice(pos) => OR::Rsplice(pos.to_oxidized(arena)), RI::RetBoolean(pos) => OR::RetBoolean(pos.to_oxidized(arena)), RI::RdefaultCapability(pos) => OR::RdefaultCapability(pos.to_oxidized(arena)), RI::RconcatOperand(pos) => OR::RconcatOperand(pos.to_oxidized(arena)), RI::RinterpOperand(pos) => OR::RinterpOperand(pos.to_oxidized(arena)), RI::RdynamicCoercion(r) => OR::RdynamicCoercion(arena.alloc(r.to_oxidized(arena))), RI::RsupportDynamicType(pos) => OR::RsupportDynamicType(pos.to_oxidized(arena)), RI::RdynamicPartialEnforcement(pos, s, r) => { OR::RdynamicPartialEnforcement(arena.alloc(( pos.to_oxidized(arena), s.to_oxidized(arena), r.to_oxidized(arena), ))) } RI::RrigidTvarEscape(pos, s1, s2, r) => OR::RrigidTvarEscape(arena.alloc(( pos.to_oxidized(arena), s1.to_oxidized(arena), s2.to_oxidized(arena), r.to_oxidized(arena), ))), } } } /// A stateless sentinal Reason. #[derive(Debug, Clone, PartialEq, Eq, EqModuloPos, Hash, Serialize, Deserialize)] pub struct NReason; impl Reason for NReason { type Pos = NPos; fn mk(_cons: impl FnOnce() -> ReasonImpl<Self, Self::Pos>) -> Self { NReason } fn
() -> Self { NReason } fn pos(&self) -> &NPos { &NPos } #[inline] fn decl_ty_conser() -> &'static Conser<decl::Ty_<NReason>> { static CONSER: Lazy<Conser<decl::Ty_<NReason>>> = Lazy::new(Conser::new); &CONSER } #[inline] fn local_ty_conser() -> &'static Conser<local::Ty_<NReason, local::Ty<NReason>>> { static CONSER: Lazy<Conser<local::Ty_<NReason, local::Ty<NReason>>>> = Lazy::new(Conser::new); &CONSER } #[inline] fn prop_conser() -> &'static Conser<PropF<NReason, Prop<NReason>>> { static CONSER: Lazy<Conser<PropF<NReason, Prop<NReason>>>> = Lazy::new(Conser::new); &CONSER } } impl Walkable<NReason> for NReason {} impl<'a> From<oxidized_by_ref::typing_reason::T_<'a>> for NReason { fn from(reason: oxidized_by_ref::typing_reason::T_<'a>) -> Self { Self::from_oxidized(reason) } } impl<'a> ToOxidized<'a> for NReason { type Output = oxidized_by_ref::typing_reason::Reason<'a>; fn to_oxidized(&self, _arena: &'a bumpalo::Bump) -> Self::Output { oxidized_by_ref::typing_reason::Reason::Rnone } }
none
balancetwo.py
""" This is a plugin created by ShiN0 Copyright (c) 2020 ShiN0 <https://www.github.com/mgaertne/minqlx-plugin-tests> You are free to modify this plugin to your own one. """ import minqlx from minqlx import Plugin from minqlx.database import Redis import os import math import time import random import itertools import threading from abc import abstractmethod from operator import itemgetter import requests from requests.adapters import HTTPAdapter from requests.packages.urllib3.util.retry import Retry PLAYER_BASE = "minqlx:players:{0}" IPS_BASE = "minqlx:ips" SUPPORTED_GAMETYPES = ("ad", "ca", "ctf", "dom", "ft", "tdm") def requests_retry_session( retries=3, backoff_factor=0.1, status_forcelist=(500, 502, 504), session=None, ): session = session or requests.Session() retry = Retry( total=retries, read=retries, connect=retries, backoff_factor=backoff_factor, status_forcelist=status_forcelist, ) adapter = HTTPAdapter(max_retries=retry) session.mount('http://', adapter) session.mount('https://', adapter) return session class balancetwo(minqlx.Plugin): """ Checks qlstats for the elos of a player given as well as checking the elos of potentially aliases of the player by looking for connection from the same IP as the player has connected to locally. Uses: * qlx_balancetwo_ratingSystem (default: "mapbased-truskills") Either "mapbased-truskills", "truskills", "a-elo", "b-elo". In the future there might be a "custom" option for other rating providers. * qlx_balancetwo_ratingLimit_min (default: "15") minimum rating for players trying to connect * qlx_balancetwo_ratingLimit_max (default: "35") maximum rating for players trying to connect * qlx_balancetwo_ratingLimit_minGames (default: "10") minimum amount of rated games for player trying to connect * qlx_balancetwo_ratingStrategy (default: "") unused at the moment. For future use * qlx_balancetwo_autoSwitch (default: "0") automatically execute suggested switches rather than waiting for !agree from players. * qlx_balancetwo_uniquePlayerSwitches (default: "0") During a game, avoid switches that already happened during the same game * qlx_balancetwo_autoRebalance (default: "1") When new players join, the new players are automatically put on teams that result in the lower difference between the the teams. * qlx_balancetwo_elocheckPermission (default: "0") The permission for issuing the elocheck * qlx_balancetwo_elocheckReplyChannel (default: "public") The reply channel where the elocheck output is put to. Possible values: "public" or "private". Any other value leads to public announcements * qlx_balancetwo_elocheckShowSteamids (default: "0") Also lists the steam ids of the players checked """ database = Redis def __init__(self): super().__init__() self.set_cvar_once("qlx_balancetwo_ratingSystem", "mapbased-truskills") self.set_cvar_once("qlx_balancetwo_ratingLimit_kick", "1") self.set_cvar_once("qlx_balancetwo_ratingLimit_min", "15") self.set_cvar_once("qlx_balancetwo_ratingLimit_max", "35") self.set_cvar_once("qlx_balancetwo_ratingLimit_minGames", "10") self.set_cvar_once("qlx_balancetwo_elocheckPermission", "0") self.set_cvar_once("qlx_balancetwo_elocheckReplyChannel", "public") self.set_cvar_once("qlx_balancetwo_elocheckShowSteamids", "0") # indicates whether switch suggestions need to be opted-in (!a) or vetoed (!v) by the suggested players self.set_cvar_once("qlx_balancetwo_autoSwitch", "0") # if set to true, this avoids suggesting the same players for switching in the same game twice, might lead to # fewer possible suggestions self.set_cvar_once("qlx_balancetwo_uniquePlayerSwitches", "0") self.set_cvar_once("qlx_balancetwo_minimumSuggestionDiff", "25") self.set_cvar_once("qlx_balancetwo_minimumStddevDiff", "50") self.set_cvar_once("qlx_balancetwo_autoRebalance", "1") self.ratingLimit_kick = self.get_cvar("qlx_balancetwo_ratingLimit_kick", bool) self.ratingLimit_min = self.get_cvar("qlx_balancetwo_ratingLimit_min", int) self.ratingLimit_max = self.get_cvar("qlx_balancetwo_ratingLimit_max", int) self.ratingLimit_minGames = self.get_cvar("qlx_balancetwo_ratingLimit_minGames", int) self.reply_channel = self.get_cvar("qlx_balancetwo_elocheckReplyChannel") if self.reply_channel != "private": self.reply_channel = "public" self.show_steam_ids = self.get_cvar("qlx_balancetwo_elocheckShowSteamids", bool) self.auto_switch = self.get_cvar("qlx_balancetwo_autoSwitch", bool) self.unique_player_switches = self.get_cvar("qlx_balancetwo_uniquePlayerSwitches", bool) self.minimum_suggestion_diff = self.get_cvar("qlx_balancetwo_minimumSuggestionDiff", float) self.minimum_suggestion_stddev_diff = self.get_cvar("qlx_balancetwo_minimumStddevDiff", int) self.auto_rebalance = self.get_cvar("qlx_balancetwo_autoRebalance", bool) self.add_command(("elocheck", "getrating", "getelo", "elo"), self.cmd_elocheck, permission=self.get_cvar("qlx_balancetwo_elocheckPermission", int), usage="<player or steam_id>") self.add_command("aliases", self.cmd_aliases, permission=self.get_cvar("qlx_balancetwo_elocheckPermission", int), usage="[player or steam_id]") self.add_command(("ratings", "elos", "selo"), self.cmd_ratings) self.add_command("eloupdates", self.cmd_switch_elo_changes_notifications, usage="<0/1>") self.add_command("balance", self.cmd_balance, 1) self.add_command(("teams", "teens"), self.cmd_teams) self.add_command("do", self.cmd_do, 1) self.add_command("dont", self.cmd_dont, 1) self.add_command(("agree", "a"), self.cmd_agree, client_cmd_perm=0) self.add_command(("veto", "v"), self.cmd_veto, client_cmd_perm=0) self.add_command(("nokick", "dontkick"), self.cmd_nokick, 2, usage="[<name>]") self.add_hook("map", self.handle_map_change) self.add_hook("player_connect", self.handle_player_connect, priority=minqlx.PRI_LOWEST) self.add_hook("player_disconnect", self.handle_player_disconnect) self.add_hook("team_switch_attempt", self.handle_team_switch_attempt) self.add_hook("team_switch", self.handle_team_switch) self.add_hook("game_countdown", self.handle_game_countdown) self.add_hook("round_countdown", self.handle_round_countdown) self.add_hook("round_start", self.handle_round_start) self.add_hook("game_end", self.handle_game_end) self.rating_system = self.get_cvar("qlx_balancetwo_ratingSystem") self.balance_api = self.get_cvar("qlx_balanceApi") self.kickthreads = {} self.jointimes = {} self.last_new_player_id = None self.previous_teams = None self.previous_map = None self.previous_gametype = None self.previous_ratings = {} self.ratings = {} self.rating_diffs = {} self.fetch_elos_from_all_players() self.informed_players = [] self.switched_players = [] self.switch_suggestion = None self.in_countdown = False self.twovstwo_steam_ids = [] self.twovstwo_combinations = [] self.twovstwo_iter = None self.prevent = False self.last_action = "spec" @minqlx.thread def fetch_elos_from_all_players(self): self.fetch_ratings([player.steam_id for player in self.players()]) def fetch_ratings(self, steam_ids, mapname=None): self.fetch_mapbased_ratings(steam_ids, mapname) for rating_provider in [TRUSKILLS, A_ELO, B_ELO]: rating_results = rating_provider.fetch_elos(steam_ids) self.append_ratings(rating_provider.name, rating_results) def fetch_mapbased_ratings(self, steam_ids, mapname=None): if mapname is None and (self.game is None or self.game.map is None): return if mapname is None: mapname = self.game.map.lower() rating_results = TRUSKILLS.fetch_elos(steam_ids, headers={"X-QuakeLive-Map": mapname}) rating_provider_name = "{} {}".format(mapname, TRUSKILLS.name) self.append_ratings(rating_provider_name, rating_results) def append_ratings(self, rating_provider_name, json_result): if json_result is None: return if rating_provider_name in self.ratings: self.ratings[rating_provider_name].append_ratings(json_result) return self.ratings[rating_provider_name] = RatingProvider.from_json(json_result) def cmd_elocheck(self, player: minqlx.Player, msg: str, channel: minqlx.AbstractChannel): if len(msg) > 2: return minqlx.RET_USAGE if len(msg) == 1: target = player.steam_id else: target = msg[1] self.do_elocheck(player, target, channel) @minqlx.thread def do_elocheck(self, player: minqlx.Player, target: str, channel: minqlx.AbstractChannel): target_players = self.find_target_player(target) target_steam_id = None if target_players is None or len(target_players) == 0: try: target_steam_id = int(target) if not self.db.exists(PLAYER_BASE.format(target_steam_id)): player.tell("Sorry, player with steam id {} never played here.".format(target_steam_id)) return except ValueError: player.tell("Sorry, but no players matched your tokens: {}.".format(target)) return if len(target_players) > 1: player.tell("A total of ^6{}^7 players matched for {}:".format(len(target_players), target)) out = "" for p in target_players: out += " " * 2 out += "{}^6:^7 {}\n".format(p.id, p.name) player.tell(out[:-1]) return if len(target_players) == 1: target_steam_id = target_players.pop().steam_id reply_func = self.reply_func(player, channel) used_steam_ids = self.used_steam_ids_for(target_steam_id) aliases = self.fetch_aliases(used_steam_ids) truskill = RatingProvider.from_json(TRUSKILLS.fetch_elos(used_steam_ids)) a_elo = RatingProvider.from_json(A_ELO.fetch_elos(used_steam_ids)) b_elo = RatingProvider.from_json(B_ELO.fetch_elos(used_steam_ids)) map_based_truskill = None if self.game is not None and self.game.map is not None: map_based_truskill = RatingProvider.from_json( TRUSKILLS.fetch_elos(used_steam_ids, headers={"X-QuakeLive-Map": self.game.map.lower()})) if target_steam_id in aliases: target_player_elos = self.format_player_elos(a_elo, b_elo, truskill, map_based_truskill, target_steam_id, aliases=aliases[target_steam_id]) else: target_player_elos = self.format_player_elos(a_elo, b_elo, truskill, map_based_truskill, target_steam_id) reply_func("{0}^7".format(target_player_elos)) alternative_steam_ids = used_steam_ids[:] alternative_steam_ids.remove(target_steam_id) if len(alternative_steam_ids) == 0: return reply_func("Players from the same IPs:\n") for steam_id in alternative_steam_ids: if steam_id in aliases: player_elos = self.format_player_elos(a_elo, b_elo, truskill, map_based_truskill, steam_id, aliases=aliases[steam_id]) else: player_elos = self.format_player_elos(a_elo, b_elo, truskill, map_based_truskill, steam_id) reply_func("{0}^7".format(player_elos)) def find_target_player(self, target: str): try: steam_id = int(target) target_player = self.player(steam_id) if target_player: return [target_player] except ValueError: pass except minqlx.NonexistentPlayerError: pass return self.find_player(target) def reply_func(self, player, channel): if self.reply_channel == "private": return player.tell return self.identify_reply_channel(channel).reply def identify_reply_channel(self, channel): if channel in [minqlx.RED_TEAM_CHAT_CHANNEL, minqlx.BLUE_TEAM_CHAT_CHANNEL, minqlx.SPECTATOR_CHAT_CHANNEL, minqlx.FREE_CHAT_CHANNEL]: return minqlx.CHAT_CHANNEL return channel def used_steam_ids_for(self, steam_id): if not self.db.exists(PLAYER_BASE.format(steam_id) + ":ips"): return [steam_id] ips = self.db.smembers(PLAYER_BASE.format(steam_id) + ":ips") used_steam_ids = set() for ip in ips: if not self.db.exists(IPS_BASE + ":{0}".format(ip)): continue used_steam_ids = used_steam_ids | self.db.smembers(IPS_BASE + ":{0}".format(ip)) return [int(_steam_id) for _steam_id in used_steam_ids] def fetch_aliases(self, steam_ids): url_template = "{}aliases/".format(A_ELO.url_base) + "{}.json" try: result = requests_retry_session().get( url_template.format("+".join([str(steam_id) for steam_id in steam_ids])), timeout=A_ELO.timeout) except requests.RequestException as exception: self.logger.debug("request exception: {}".format(exception)) return {} if result.status_code != requests.codes.ok: return {} js = result.json() aliases = {} for steam_id in steam_ids: if str(steam_id) not in js: continue player_entry = js[str(steam_id)] aliases[steam_id] = [] cleaned_aliases = [] for entry in player_entry: if self.clean_text(entry) not in cleaned_aliases: aliases[steam_id].append(entry) cleaned_aliases.append(self.clean_text(entry)) return aliases def format_player_elos(self, a_elo, b_elo, truskill, map_based_truskill, steam_id, indent=0, aliases=None): display_name = self.resolve_player_name(steam_id) result = " " * indent + "{0}^7\n".format(self.format_player_name(steam_id)) if aliases is not None: displayed_aliases = aliases[:] displayed_aliases.remove(display_name) if len(displayed_aliases) != 0: if len(displayed_aliases) <= 5: result += " " * indent + "Aliases used: {}^7\n".format("^7, ".join(displayed_aliases[:5])) else: result += " " * indent + "Aliases used: {}^7, ... (^4!aliases <player>^7 to list all)\n" \ .format("^7, ".join(displayed_aliases[:5])) if map_based_truskill is not None: formatted_map_based_truskills = map_based_truskill.format_elos(steam_id) if formatted_map_based_truskills is not None and len(formatted_map_based_truskills) > 0: result += " " * indent + " " + "{1} Truskills: {0}\n" \ .format(formatted_map_based_truskills, self.game.map.lower()) formatted_truskills = truskill.format_elos(steam_id) if truskill is not None and len(formatted_truskills) > 0: result += " " * indent + " " + "Truskills: {0}\n".format(formatted_truskills) formatted_a_elos = a_elo.format_elos(steam_id) if a_elo is not None and len(formatted_a_elos) > 0: result += " " * indent + " " + "Elos: {0}\n".format(formatted_a_elos) formatted_b_elos = b_elo.format_elos(steam_id) if b_elo is not None and len(formatted_b_elos) > 0: result += " " * indent + " " + "B-Elos: {0}\n".format(formatted_b_elos) return result def format_player_name(self, steam_id): result = "" player_name = self.resolve_player_name(steam_id) result += "{0}^7".format(player_name) if self.show_steam_ids: result += " ({0})".format(steam_id) return result def resolve_player_name(self, steam_id): player = self.player(steam_id) if player is not None: return self.remove_trailing_color_code(player.name) if self.db.exists(PLAYER_BASE.format(steam_id) + ":last_used_name"): return self.remove_trailing_color_code(self.db[PLAYER_BASE.format(steam_id) + ":last_used_name"]) return "unknown" def
(self, text): if not text.endswith("^7"): return text return text[:-2] def cmd_aliases(self, player: minqlx.Player, msg: str, channel: minqlx.AbstractChannel): if len(msg) != 2: return minqlx.RET_USAGE self.do_aliases(player, msg[1], channel) @minqlx.thread def do_aliases(self, player: minqlx.Player, target: str, channel: minqlx.AbstractChannel): target_players = self.find_target_player(target) target_steam_id = None if target_players is None or len(target_players) == 0: try: target_steam_id = int(target) if not self.db.exists(PLAYER_BASE.format(target_steam_id)): player.tell("Sorry, player with steam id {} never played here.".format(target_steam_id)) return except ValueError: player.tell("Sorry, but no players matched your tokens: {}.".format(target)) return if len(target_players) > 1: player.tell("A total of ^6{}^7 players matched for {}:".format(len(target_players), target)) out = "" for p in target_players: out += " " * 2 out += "{}^6:^7 {}\n".format(p.id, p.name) player.tell(out[:-1]) return if len(target_players) == 1: target_steam_id = target_players.pop().steam_id reply_func = self.reply_func(player, channel) aliases = self.fetch_aliases([target_steam_id]) if target_steam_id not in aliases: reply_func("Sorry, no aliases returned for {}".format(target_steam_id)) return reply_func("{0}^7".format(self.format_player_aliases(target_steam_id, aliases[target_steam_id]))) def format_player_aliases(self, steam_id, aliases): result = "{0}^7\n".format(self.format_player_name(steam_id)) result += "Aliases used: {}".format("^7, ".join(aliases)) return result def cmd_ratings(self, player, msg, channel): teams = self.teams() gametype = self.game.type_short mapname = self.game.map.lower() map_based_rating_provider_name = "{} {}".format(mapname, TRUSKILLS.name) if TRUSKILLS.name in self.ratings and map_based_rating_provider_name in self.ratings: truskills_rating_provider = self.ratings[TRUSKILLS.name] mapbased_truskills_rating_provider = self.ratings[map_based_rating_provider_name] channel.reply("^3{}^7 ratings (^3general^7/^3map-based^7) (^3{}^7)" .format(TRUSKILLS.name, TRUSKILLS.url_base.split(':')[1].strip('/'))) self.report_ratings_for_team(channel, teams["free"], gametype, truskills_rating_provider, mapbased_truskills_rating_provider, primary_rating_prefix="^6", secondary_rating_prefix="^6") self.report_ratings_for_team(channel, teams["red"], gametype, truskills_rating_provider, mapbased_truskills_rating_provider, primary_rating_prefix="^1", secondary_rating_prefix="^1") self.report_ratings_for_team(channel, teams["blue"], gametype, truskills_rating_provider, mapbased_truskills_rating_provider, primary_rating_prefix="^4", secondary_rating_prefix="^4") self.report_ratings_for_team(channel, teams["spectator"], gametype, truskills_rating_provider, mapbased_truskills_rating_provider) if A_ELO.name in self.ratings and B_ELO.name in self.ratings: primary_rating_provider = self.ratings[A_ELO.name] secondary_rating_provider = self.ratings[B_ELO.name] channel.reply("^5=================================^7") channel.reply("^3Elo^7 ratings (^3A elo^7/^3B elo^7) (^3{}^7)" .format(A_ELO.url_base.split(':')[1].strip('/'))) self.report_ratings_for_team(channel, teams["free"], gametype, primary_rating_provider, secondary_rating_provider, primary_rating_prefix="A:^6", secondary_rating_prefix="B:^6") self.report_ratings_for_team(channel, teams["red"], gametype, primary_rating_provider, secondary_rating_provider, primary_rating_prefix="A:^1", secondary_rating_prefix="B:^1") self.report_ratings_for_team(channel, teams["blue"], gametype, primary_rating_provider, secondary_rating_provider, primary_rating_prefix="A:^4", secondary_rating_prefix="B:^4") self.report_ratings_for_team(channel, teams["spectator"], gametype, primary_rating_provider, secondary_rating_provider, primary_rating_prefix="A:", secondary_rating_prefix="B:") def report_ratings_for_team(self, channel, team, gametype, primary_rating_provider, secondary_rating_provider, primary_rating_prefix="", secondary_rating_prefix=""): if team is None or len(team) <= 0: return primary_filtered = [player for player in team if player.steam_id in primary_rating_provider.rated_steam_ids()] primary_filtered = [player for player in primary_filtered if gametype in primary_rating_provider.rated_gametypes_for(player.steam_id)] primary_filtered = [player for player in primary_filtered if primary_rating_provider.games_for(player.steam_id, gametype) > 0] rated_player_texts = [] if len(primary_filtered) > 0: primary_sorted = sorted(primary_filtered, key=lambda x: primary_rating_provider[x.steam_id][gametype]["elo"], reverse=True) for player in primary_sorted: if player.steam_id in secondary_rating_provider.rated_steam_ids() and \ gametype in secondary_rating_provider.rated_gametypes_for(player.steam_id) and \ secondary_rating_provider.games_for(player.steam_id, gametype) > 0: rated_player_texts.append("{}^7: {}{}^7/{}{}^7" .format(player.name, primary_rating_prefix, primary_rating_provider[player.steam_id][gametype]["elo"], secondary_rating_prefix, secondary_rating_provider[player.steam_id][gametype]["elo"])) else: rated_player_texts.append("{}^7: {}{}^7/{}^5{}^7" .format(player.name, primary_rating_prefix, primary_rating_provider[player.steam_id][gametype]["elo"], secondary_rating_prefix, secondary_rating_provider[player.steam_id][gametype]["elo"])) primary_unranked = [player for player in team if player not in primary_filtered] if len(primary_unranked) > 0: secondary_filtered = [player for player in primary_unranked if player.steam_id in secondary_rating_provider.rated_steam_ids()] secondary_filtered = [player for player in secondary_filtered if gametype in secondary_rating_provider.rated_gametypes_for(player.steam_id)] secondary_filtered = [player for player in secondary_filtered if secondary_rating_provider.games_for(player.steam_id, gametype) > 0] if len(secondary_filtered) > 0: secondary_sorted = sorted(secondary_filtered, key=lambda x: primary_rating_provider[x.steam_id][gametype]["elo"], reverse=True) for player in secondary_sorted: rated_player_texts.append("{}^7: {}^5{}/{}{}^7" .format(player.name, primary_rating_prefix, primary_rating_provider[player.steam_id][gametype]["elo"], secondary_rating_prefix, secondary_rating_provider[player.steam_id][gametype]["elo"])) secondary_unranked = [player for player in primary_unranked if player not in secondary_filtered] for player in secondary_unranked: rated_player_texts.append("{}^7: {}^5{}^7/{}^5{}^7" .format(player.name, primary_rating_prefix, primary_rating_provider[player.steam_id][gametype]["elo"], secondary_rating_prefix, secondary_rating_provider[player.steam_id][gametype]["elo"])) channel.reply(", ".join(rated_player_texts)) def cmd_switch_elo_changes_notifications(self, player, msg, channel): flag = self.wants_to_be_informed(player.steam_id) self.db.set_flag(player, "balancetwo:rating_changes", not flag) if flag: player.tell( "Notifications for elo and truskill changes have been disabled. " "Use ^6{}eloupdates^7 to enable them again.".format(self.get_cvar("qlx_commandPrefix"))) else: player.tell( "Notifications for elo and truskill changes have been enabled. " "Use ^6{}eloupdates^7 to disable them again.".format(self.get_cvar("qlx_commandPrefix"))) return minqlx.RET_STOP_ALL def wants_to_be_informed(self, steam_id): return self.db.get_flag(steam_id, "balancetwo:rating_changes", default=False) def cmd_balance(self, player, msg, channel): gt = self.game.type_short if gt not in SUPPORTED_GAMETYPES: player.tell("This game mode is not supported by the balance plugin.") return minqlx.RET_STOP_ALL teams = self.teams() if len(teams["red"] + teams["blue"]) % 2 != 0: player.tell("The total number of players should be an even number.") return minqlx.RET_STOP_ALL players = dict([(p.steam_id, gt) for p in teams["red"] + teams["blue"]]) self.callback_balance(players, minqlx.CHAT_CHANNEL) def callback_balance(self, players, channel): if not self.game: return if self.game.state == "in_progress": return teams = self.teams() current = teams["red"] + teams["blue"] if len(current) % 2 == 1: player_to_spec = self.find_player_to_spec(current) self.logger.debug("putting {} to spec".format(player_to_spec.clean_name)) player_to_spec.put("spectator") balanced_teams = self.find_balanced_teams() if balanced_teams is None: return red_steam_ids, blue_steam_ids = balanced_teams changed = False for steam_id in red_steam_ids: player = self.player(steam_id) if player.team != "red": changed = True self.logger.debug("putting {} to red".format(player.clean_name)) player.put("red") for steam_id in blue_steam_ids: player = self.player(steam_id) if player.team != "blue": changed = True self.logger.debug("putting {} to blue".format(player.clean_name)) player.put("blue") if not changed: channel.reply("Teams are good! Nothing to balance.") return True self.report_teams(red_steam_ids, blue_steam_ids, channel) return True def find_player_to_spec(self, players): return min([player for player in players], key=lambda _player: self.find_games_here(_player)) def find_games_here(self, player): completed_key = "minqlx:players:{}:games_completed" if not self.db.exists(completed_key.format(player.steam_id)): return 0 return int(self.db[completed_key.format(player.steam_id)]) def find_time(self, player): if not (player.steam_id in self.jointimes): self.jointimes[player.steam_id] = time.time() return self.jointimes[player.steam_id] def find_balanced_teams(self): teams = self.teams() # if 3 < len(teams["red"] + teams["blue"]) < 6: # return self.find_next_2vs2_teams() if len(teams["red"] + teams["blue"]) < 8: return self.find_non_recent_small_balanced_teams() return self.find_large_balanced_teams() def find_next_2vs2_teams(self): teams = self.teams() steam_ids = [player.steam_id for player in teams["red"] + teams["blue"]] if self.twovstwo_iter is None or not self.check_all_steam_ids(steam_ids): self.twovstwo_steam_ids = steam_ids self.twovstwo_combinations = self.filter_combinations(steam_ids) self.twovstwo_iter = random_iterator(self.twovstwo_combinations) red_steam_ids = list(next(self.twovstwo_iter)) blue_steam_ids = [steam_id for steam_id in steam_ids if steam_id not in red_steam_ids] return red_steam_ids, blue_steam_ids def check_all_steam_ids(self, steam_ids): return sorted(steam_ids) == sorted(self.twovstwo_steam_ids) def filter_combinations(self, steam_ids): gametype = self.game.type_short configured_rating_provider_name = self.configured_rating_provider_name() if configured_rating_provider_name not in self.ratings: self.logger.debug("Balancing aborted. No ratings found for {}.".format(configured_rating_provider_name)) return [] configured_rating_provider = self.ratings[configured_rating_provider_name] combinations = [] if len(steam_ids) != 4: return [] combinations_list = [(steam_ids[0], steam_ids[1]), (steam_ids[0], steam_ids[2]), (steam_ids[0], steam_ids[3])] for red_steam_ids in combinations_list: blue_steam_ids = [steam_id for steam_id in steam_ids if steam_id not in red_steam_ids] red_avg = self.team_average(red_steam_ids, gametype, rating_provider=configured_rating_provider) blue_avg = self.team_average(blue_steam_ids, gametype, rating_provider=configured_rating_provider) diff = abs(red_avg - blue_avg) if diff < self.minimum_suggestion_diff: combinations.append((red_steam_ids, diff)) return combinations_list def find_non_recent_small_balanced_teams(self): teams = self.teams() gt = self.game.type_short steam_ids = [player.steam_id for player in teams["red"] + teams["blue"]] configured_rating_provider_name = self.configured_rating_provider_name() if configured_rating_provider_name not in self.ratings: self.logger.debug("Balancing aborted. No ratings found for {}.".format(configured_rating_provider_name)) return configured_rating_provider = self.ratings[configured_rating_provider_name] team_combinations = [] for combination in itertools.combinations(steam_ids, int(len(steam_ids) / 2)): red_steam_ids = list(combination) blue_steam_ids = [steam_id for steam_id in steam_ids if steam_id not in red_steam_ids] if self.previous_teams is not None and ( sorted(red_steam_ids) == sorted(self.previous_teams[0]) or sorted(red_steam_ids) == sorted(self.previous_teams[1])): continue if self.previous_teams is not None and ( sorted(blue_steam_ids) == sorted(self.previous_teams[0]) or sorted(blue_steam_ids) == sorted(self.previous_teams[1])): continue red_avg = self.team_average(red_steam_ids, gt, rating_provider=configured_rating_provider) blue_avg = self.team_average(blue_steam_ids, gt, rating_provider=configured_rating_provider) diff = abs(red_avg - blue_avg) team_combinations.append((red_steam_ids, blue_steam_ids, diff)) filtered_combinations = [(red_steam_ids, blue_steam_ids, diff) for (red_steam_ids, blue_steam_ids, diff) in team_combinations if diff < self.minimum_suggestion_diff] self.logger.debug("team_combinations: {}".format(team_combinations)) self.logger.debug("filtered_combinations: {}".format(filtered_combinations)) if len(filtered_combinations) > 0: red_team, blue_team, diff = random.choice(filtered_combinations) elif len(team_combinations) > 0: red_team, blue_team, diff = min(team_combinations, key=itemgetter(2)) else: red_team = [player.steam_id for player in teams["red"]] blue_team = [player.steam_id for player in teams["blue"]] return red_team, blue_team def find_large_balanced_teams(self): teams = self.teams() gametype = self.game.type_short steam_ids = [player.steam_id for player in teams["red"] + teams["blue"]] configured_rating_provider_name = self.configured_rating_provider_name() if configured_rating_provider_name not in self.ratings: self.logger.debug("Balancing aborted. No ratings found for {}.".format(configured_rating_provider_name)) return [], [] configured_rating_provider = self.ratings[configured_rating_provider_name] rated_steam_ids = [steam_id for steam_id in steam_ids if steam_id in configured_rating_provider.rated_steam_ids()] rated_steam_ids = [steam_id for steam_id in rated_steam_ids if gametype in configured_rating_provider.rated_gametypes_for(steam_id)] rated_steam_ids = [steam_id for steam_id in rated_steam_ids if configured_rating_provider[steam_id][gametype]["games"] > 0] rated_steam_ids.sort(key=lambda steam_id: configured_rating_provider[steam_id][gametype]["elo"]) if len(rated_steam_ids) % 2 == 1: rated_steam_ids.remove(rated_steam_ids[0]) red_steam_ids = [] blue_steam_ids = [] while len(rated_steam_ids) > 0: player1 = rated_steam_ids.pop() player2 = rated_steam_ids.pop() option1_red_average = self.team_average(red_steam_ids + [player1], gametype, rating_provider=configured_rating_provider) option1_blue_average = self.team_average(blue_steam_ids + [player2], gametype, rating_provider=configured_rating_provider) option1_diff = abs(option1_red_average - option1_blue_average) option2_red_average = self.team_average(red_steam_ids + [player2], gametype, rating_provider=configured_rating_provider) option2_blue_average = self.team_average(blue_steam_ids + [player1], gametype, rating_provider=configured_rating_provider) option2_diff = abs(option2_red_average - option2_blue_average) if option1_diff < option2_diff: red_steam_ids.append(player1) blue_steam_ids.append(player2) else: red_steam_ids.append(player2) blue_steam_ids.append(player1) return red_steam_ids, blue_steam_ids def report_teams(self, red_team, blue_team, channel): gt = self.game.type_short configured_rating_provider_name = self.configured_rating_provider_name() if configured_rating_provider_name not in self.ratings: self.logger.debug("No ratings for configured rating provider {} found. Abandoning." .format(configured_rating_provider_name)) return configured_rating_provider = self.ratings[configured_rating_provider_name] avg_red = self.team_average(red_team, gt, rating_provider=configured_rating_provider) avg_blue = self.team_average(blue_team, gt, rating_provider=configured_rating_provider) avg_diff = avg_red - avg_blue stddev_red = self.team_stddev(red_team, gt, mu=avg_red, rating_provider=configured_rating_provider) stddev_blue = self.team_stddev(blue_team, gt, mu=avg_blue, rating_provider=configured_rating_provider) if configured_rating_provider_name.endswith(TRUSKILLS.name): if avg_diff >= 0.005: channel.reply( "{} ratings: ^1{:.02f} (deviation: {:.02f}) " "^7vs ^4{:.02f} (deviation: {:.02f})^7 - DIFFERENCE: ^1{:.02f}" .format(configured_rating_provider_name, avg_red, stddev_red, avg_blue, stddev_blue, abs(avg_diff))) return if avg_diff <= -0.005: channel.reply( "{} ratings: ^1{:.02f} (deviation: {:.02f}) " "^7vs ^4{:.02f} (deviation: {:.02f})^7 - DIFFERENCE: ^4{:.02f}" .format(configured_rating_provider_name, avg_red, stddev_red, avg_blue, stddev_blue, abs(avg_diff))) return channel.reply( "{} ratings: ^1{:.02f} (deviation: {:.02f}) ^7vs ^4{:.02f} (deviation: {:.02f})^7 - Holy shit!" .format(configured_rating_provider_name, avg_red, stddev_red, avg_blue, stddev_blue)) return if int(avg_diff) > 0: channel.reply("{} ratings: ^1{:.0f} (deviation: {:.0f}) " "^7vs ^4{:.0f} (deviation: {:.0f})^7 - DIFFERENCE: ^1{:.0f}" .format(configured_rating_provider_name, avg_red, stddev_red, avg_blue, stddev_blue, abs(avg_diff))) return if int(avg_diff) < 0: channel.reply("{} ratings: ^1{:.0f} (deviation: {:.0f}) " "^7vs ^4{:.0f} (deviation: {:.0f})^7 - DIFFERENCE: ^4{:.0f}" .format(configured_rating_provider_name, avg_red, stddev_red, avg_blue, stddev_blue, abs(avg_diff))) return channel.reply( "{} ratings: ^1{:.0f} (deviation: {:.0f}) ^7vs ^4{:.0f} (deviation: {:.0f})^7 - Holy shit!" .format(configured_rating_provider_name, avg_red, stddev_red, avg_blue, stddev_blue)) def configured_rating_provider_name(self): if self.game is not None and self.game.map is not None: if self.rating_system == "mapbased-truskills": rating_provider_name = "{} {}".format(self.game.map.lower(), TRUSKILLS.name) return rating_provider_name if self.rating_system.endswith("truskills"): return TRUSKILLS.name if self.rating_system == "a-elo": return A_ELO.name if self.rating_system == "b-elo": return B_ELO.name def team_average(self, steam_ids, gametype, rating_provider=None): if not steam_ids or len(steam_ids) == 0: return 0 configured_rating_provider = rating_provider if configured_rating_provider is None: configured_rating_provider_name = self.configured_rating_provider_name() if configured_rating_provider_name not in self.ratings: return 0 configured_rating_provider = self.ratings[configured_rating_provider_name] for steam_id in steam_ids: if steam_id not in configured_rating_provider.rated_steam_ids(): return 0 return sum([configured_rating_provider[steam_id][gametype]["elo"] for steam_id in steam_ids]) / len( steam_ids) def team_stddev(self, steam_ids, gametype, mu=None, rating_provider=None): if not steam_ids or len(steam_ids) == 0: return 0 configured_rating_provider = rating_provider if configured_rating_provider is None: configured_rating_provider_name = self.configured_rating_provider_name() if configured_rating_provider_name not in self.ratings: return 0 configured_rating_provider = self.ratings[configured_rating_provider_name] for steam_id in steam_ids: if steam_id not in configured_rating_provider.rated_steam_ids(): return 0 team_elos = [pow(configured_rating_provider[steam_id][gametype]["elo"] - mu, 2) for steam_id in steam_ids] return math.sqrt(sum(team_elos) / len(steam_ids)) def cmd_teams(self, player, msg, channel): gametype = self.game.type_short if gametype not in SUPPORTED_GAMETYPES: player.tell("This game mode is not supported by the balance plugin.") return minqlx.RET_STOP_ALL teams = self.teams() if len(teams["red"]) != len(teams["blue"]): player.tell("Both teams should have the same number of players.") return minqlx.RET_STOP_ALL self.report_teams([player.steam_id for player in teams["red"]], [player.steam_id for player in teams["blue"]], channel) if len(teams["red"] + teams["blue"]) == 0: channel.reply("No players active currently") return minqlx.RET_STOP_ALL if len(teams["red"] + teams["blue"]) == 4: i = random.randint(0, 99) if not i: channel.reply("Teens look ^6good!") else: channel.reply("Teams look good!") self.switch_suggestion = None return minqlx.RET_STOP_ALL self.collect_suggestions(teams, gametype, channel) @minqlx.thread def collect_suggestions(self, teams, gametype, channel): possible_switches = self.filtered_suggestions(teams, gametype) if self.unique_player_switches and len(self.switched_players) > 0: possible_switches = list(filter(lambda suggestion: suggestion.red_player.steam_id not in self.switched_players and suggestion.blue_player.steam_id not in self.switched_players, possible_switches)) self.handle_suggestions_collected(possible_switches, channel) def filtered_suggestions(self, teams, gametype): player_steam_ids = [player.steam_id for player in teams["red"] + teams["blue"]] configured_rating_provider_name = self.configured_rating_provider_name() configured_rating_provider = self.ratings[configured_rating_provider_name] minimum_suggestion_diff, minimum_suggestion_stddev_diff = \ self.minimum_suggestion_parameters(gametype, player_steam_ids) avg_red = self.team_average([player.steam_id for player in teams["red"]], gametype, rating_provider=configured_rating_provider) avg_blue = self.team_average([player.steam_id for player in teams["blue"]], gametype, rating_provider=configured_rating_provider) avg_diff = abs(avg_red - avg_blue) possible_switches = self.possible_switches(teams, gametype) if avg_diff <= minimum_suggestion_diff: stddev_red = self.team_stddev([player.steam_id for player in teams["red"]], gametype, mu=avg_red, rating_provider=configured_rating_provider) stddev_blue = self.team_stddev([player.steam_id for player in teams["blue"]], gametype, mu=avg_blue, rating_provider=configured_rating_provider) stddev_diff = abs(stddev_red - stddev_blue) return list(filter(lambda suggestion: stddev_diff - abs(suggestion.stddev_diff) >= minimum_suggestion_stddev_diff and abs(suggestion.stddev_diff) <= minimum_suggestion_stddev_diff and abs(suggestion.avg_diff) <= minimum_suggestion_diff, possible_switches)) return list(filter( lambda suggestion: avg_diff > abs(suggestion.avg_diff) and avg_diff - abs(suggestion.avg_diff) >= minimum_suggestion_diff, possible_switches)) def minimum_suggestion_parameters(self, gametype, steam_ids): return self.minimum_suggestion_diff, self.minimum_suggestion_stddev_diff def possible_switches(self, teams, gametype): player_steam_ids = [player.steam_id for player in teams["red"] + teams["blue"]] configured_rating_provider_name = self.configured_rating_provider_name() configured_rating_provider = self.ratings[configured_rating_provider_name] minimum_suggestion_diff, minimum_suggestion_stddev_diff = \ self.minimum_suggestion_parameters(gametype, player_steam_ids) switches = [] for red_p in teams["red"]: for blue_p in teams["blue"]: r = [player.steam_id for player in teams["red"] if player.steam_id != red_p.steam_id] + [blue_p.steam_id] b = [player.steam_id for player in teams["blue"] if player.steam_id != blue_p.steam_id] + [red_p.steam_id] avg_red = self.team_average(r, gametype, rating_provider=configured_rating_provider) avg_blue = self.team_average(b, gametype, rating_provider=configured_rating_provider) diff = avg_red - avg_blue if diff <= minimum_suggestion_diff: stddev_red = self.team_stddev(r, gametype, mu=avg_red, rating_provider=configured_rating_provider) stddev_blue = self.team_stddev(b, gametype, mu=avg_blue, rating_provider=configured_rating_provider) stddev_diff = stddev_red - stddev_blue suggestion = Suggestion(red_p, blue_p, diff, stddev_diff) switches.append(suggestion) return switches def handle_suggestions_collected(self, possible_switches, channel): rating_strategy = self.rating_strategy(self.get_cvar("qlx_balancetwo_ratingStrategy", str)) switch_suggestion_queue = SuggestionQueue(possible_switches, rating_strategy) if switch_suggestion_queue and len(switch_suggestion_queue) > 0: switch = switch_suggestion_queue.best_suggestion() channel.reply(switch.announcement()) if not self.switch_suggestion or switch != self.switch_suggestion: self.switch_suggestion = switch else: i = random.randint(0, 99) if not i: channel.reply("Teens look ^6good!") else: channel.reply("Teams look good!") self.switch_suggestion = None return True def rating_strategy(self, strategy): return DiffSuggestionRatingStrategy() def cmd_do(self, player, msg, channel): if self.auto_switch: return if not self.switch_suggestion: return self.switch_suggestion.execute() def cmd_dont(self, player, msg, channel): if not self.auto_switch: return if not self.switch_suggestion: return self.msg("An admin prevented the switch! The switch will be terminated.") self.switch_suggestion = None def cmd_agree(self, player, msg, channel): if self.auto_switch: return if not self.switch_suggestion: return if self.switch_suggestion.all_agreed(): return self.switch_suggestion.agree(player) if not self.switch_suggestion.all_agreed(): return # If the game's in progress and we're not in the round countdown, wait for next round. if self.game.state == "in_progress" and not self.in_countdown: self.msg("The switch will be executed at the start of next round.") return # Otherwise, switch right away. self.execute_suggestion() def execute_suggestion(self): try: self.switch_suggestion.execute() except minqlx.NonexistentPlayerError: self.switch_suggestion = None return except PlayerMovedToSpecError: self.switch_suggestion = None return self.switched_players += self.switch_suggestion.affected_steam_ids() self.switch_suggestion = None def cmd_veto(self, player, msg, channel): if not self.auto_switch: return if not self.switch_suggestion: return self.switch_suggestion.agree(player) if not self.switch_suggestion.all_agreed(): return self.msg("Both players vetoed! The switch will be terminated.") self.switch_suggestion = None def cmd_nokick(self, player, msg, channel): def dontkick(_steam_id): if _steam_id not in self.kickthreads: return kickthread = self.kickthreads[_steam_id] _resolved_player = self.player(_steam_id) if _resolved_player is None: return kickthread.stop() del self.kickthreads[_steam_id] _resolved_player.unmute() channel.reply("^7An admin has prevented {}^7 from being kicked.".format(_resolved_player.name)) if self.kickthreads is None or len(self.kickthreads.keys()) == 0: player.tell("^6Psst^7: There are no people being kicked right now.") return minqlx.RET_STOP_ALL if len(self.kickthreads.keys()) == 1: dontkick(list(self.kickthreads.keys())[0]) return _scheduled_players = [] for steam_id in self.kickthreads.keys(): if not self.kickthreads[steam_id].is_alive(): continue _player = self.player(steam_id) if _player is None: continue _scheduled_players.append(_player) _names = [p.name for p in _scheduled_players] if len(msg) < 2: player.tell("^6Psst^7: did you mean ^6{}^7?".format("^7 or ^6".join(_names))) return minqlx.RET_STOP_ALL matched_players = [_player for _player in _scheduled_players if msg[1] in _player.name] if len(matched_players) == 0: player.tell("^6Psst^7: no players matched '^6{}^7'?".format(msg[1])) return minqlx.RET_STOP_ALL if len(matched_players) > 1: _matched_names = [_player.name for _player in matched_players] player.tell("^6Psst^7: did you mean ^6{}^7?".format("^7 or ^6".join(_matched_names))) return minqlx.RET_STOP_ALL dontkick(matched_players[0].steam_id) def handle_map_change(self, mapname, factory): @minqlx.delay(3) def fetch_ratings_from_newmap(_mapname): steam_ids = [player.steam_id for player in self.players()] self.fetch_mapbased_ratings(steam_ids, mapname=_mapname) self.switched_players = [] self.informed_players = [] self.previous_ratings = self.ratings self.ratings = {} self.fetch_and_diff_ratings() fetch_ratings_from_newmap(mapname.lower()) self.clean_up_kickthreads() @minqlx.thread def clean_up_kickthreads(self): dead_threads = [] for steam_id in self.kickthreads.keys(): thread = self.kickthreads[steam_id] if not thread.is_alive(): dead_threads.append(steam_id) for dead_thread in dead_threads: del self.kickthreads[dead_thread] @minqlx.thread def fetch_and_diff_ratings(self): for rating_provider in [TRUSKILLS, A_ELO, B_ELO]: if rating_provider.name in self.previous_ratings: rating_results = \ rating_provider.fetch_elos(self.previous_ratings[rating_provider.name].rated_steam_ids()) if rating_results is None: continue self.append_ratings(rating_provider.name, rating_results) self.rating_diffs[rating_provider.name] = \ RatingProvider.from_json(rating_results) - self.previous_ratings[rating_provider.name] if self.previous_map is None: return rating_provider_name = "{} {}".format(self.previous_map, TRUSKILLS.name) if rating_provider_name not in self.previous_ratings: return rating_results = TRUSKILLS.fetch_elos(self.previous_ratings[rating_provider_name].rated_steam_ids(), headers={"X-QuakeLive-Map": self.previous_map}) if rating_results is None: return self.append_ratings(rating_provider_name, rating_results) self.rating_diffs[rating_provider_name] = \ RatingProvider.from_json(rating_results) - self.previous_ratings[rating_provider_name] def handle_player_connect(self, player): @minqlx.thread def fetch_player_elos(_player): self.fetch_ratings([_player.steam_id]) self.schedule_kick_for_players_outside_rating_limits([_player.steam_id]) self.record_join_times(player) fetch_player_elos(player) def record_join_times(self, player): if player.steam_id in self.jointimes: if (time.time() - self.jointimes[player.steam_id]) < 5: return self.jointimes[player.steam_id] = time.time() def schedule_kick_for_players_outside_rating_limits(self, steam_ids): if not self.ratingLimit_kick: return for steam_id in steam_ids: if not self.is_player_within_configured_rating_limit(steam_id): if steam_id not in self.kickthreads or not self.kickthreads[steam_id].is_alive(): configured_rating_provider_name = self.configured_rating_provider_name() configured_rating_provider = self.ratings[configured_rating_provider_name] if steam_id not in configured_rating_provider: continue gametype = self.game.type_short player_ratings = configured_rating_provider.rating_for(steam_id, gametype) if self.ratingLimit_min <= player_ratings: highlow = "high" else: highlow = "low" t = KickThread(steam_id, player_ratings, highlow) t.start() self.kickthreads[steam_id] = t def handle_player_disconnect(self, player, reason): if player.steam_id in self.jointimes: del self.jointimes[player.steam_id] def handle_team_switch_attempt(self, player, old, new): self.logger.debug("{} switched from {} to {}".format(player.clean_name, old, new)) if not self.game: return minqlx.RET_NONE gametype = self.game.type_short if gametype not in SUPPORTED_GAMETYPES: return minqlx.RET_NONE if new in ["red", "blue", "any", "free"]: rating_check = self.check_rating_limit(player) if rating_check is not None: return rating_check if self.game.state != "in_progress": return minqlx.RET_NONE return self.try_auto_rebalance(player, old, new) def check_rating_limit(self, player): if self.is_player_within_configured_rating_limit(player.steam_id): return if self.ratingLimit_kick: kickmsg = "so you'll be kicked shortly..." else: kickmsg = "but you are free to keep watching." player.tell("^6You do not meet the skill rating requirements to play on this server, {}".format(kickmsg)) player.center_print( "^6You do not meet the skill rating requirements to play on this server, {}".format(kickmsg)) return minqlx.RET_STOP_ALL def is_player_within_configured_rating_limit(self, steam_id): configured_rating_provider_name = self.configured_rating_provider_name() if configured_rating_provider_name.endswith("truskills"): configured_rating_provider_name = TRUSKILLS.name if configured_rating_provider_name not in self.ratings: self.logger.debug("Ratings not found. Allowing player to join: {}.".format(configured_rating_provider_name)) return True configured_rating_provider = self.ratings[configured_rating_provider_name] if steam_id not in configured_rating_provider: return False gametype = self.game.type_short player_ratings = configured_rating_provider.rating_for(steam_id, gametype) if self.ratingLimit_min <= player_ratings <= self.ratingLimit_max: return True player_games = configured_rating_provider.games_for(steam_id, gametype) return player_games < self.ratingLimit_minGames def try_auto_rebalance(self, player, old, new): if not self.auto_rebalance: return minqlx.RET_NONE if old not in ["spectator", "free"] or new not in ['red', 'blue', 'any']: return minqlx.RET_NONE teams = self.teams() if len(teams["red"]) == len(teams["blue"]): self.last_new_player_id = player.steam_id return minqlx.RET_NONE if not self.last_new_player_id: return minqlx.RET_NONE last_new_player = self.player(self.last_new_player_id) if not last_new_player: self.last_new_player_id = None return minqlx.RET_NONE gametype = self.game.type_short other_than_last_players_team = self.other_team(last_new_player.team) new_player_team = teams[other_than_last_players_team].copy() + [player] proposed_diff = self.calculate_player_average_difference(gametype, teams[last_new_player.team].copy(), new_player_team) alternative_team_a = [player for player in teams[last_new_player.team] if player != last_new_player] + \ [player] alternative_team_b = teams[other_than_last_players_team].copy() + [last_new_player] alternative_diff = self.calculate_player_average_difference(gametype, alternative_team_a, alternative_team_b) self.last_new_player_id = None if proposed_diff > alternative_diff: last_new_player.tell("{}, you have been moved to {} to maintain team balance." .format(last_new_player.clean_name, self.format_team(other_than_last_players_team))) last_new_player.put(other_than_last_players_team) if new in [last_new_player.team]: return minqlx.RET_NONE if new not in ["any"]: player.tell("{}, you have been moved to {} to maintain team balance." .format(player.clean_name, self.format_team(last_new_player.team))) player.put(last_new_player.team) return minqlx.RET_STOP_ALL if new not in ["any", other_than_last_players_team]: player.tell("{}, you have been moved to {} to maintain team balance." .format(player.clean_name, self.format_team(other_than_last_players_team))) player.put(other_than_last_players_team) return minqlx.RET_STOP_ALL return minqlx.RET_NONE def other_team(self, team): if team == "red": return "blue" return "red" def calculate_player_average_difference(self, gametype, team1, team2): team1_steam_ids = [player.steam_id for player in team1] team2_steam_ids = [player.steam_id for player in team2] configured_rating_provider_name = self.configured_rating_provider_name() configured_rating_provider = self.ratings[configured_rating_provider_name] team1_avg = self.team_average(gametype, team1_steam_ids, rating_provider=configured_rating_provider) team2_avg = self.team_average(gametype, team2_steam_ids, rating_provider=configured_rating_provider) return abs(team1_avg - team2_avg) def format_team(self, team): if team == "red": return "^1red^7" if team == "blue": return "^4blue^7" return "^3{}^7".format(team) def handle_team_switch(self, player, old, new): if self.last_new_player_id == player.steam_id and new in ["free", "spectator"]: self.last_new_player_id = None if new not in ["red", "blue", "any"]: return self.inform_about_rating_changes(player) def inform_about_rating_changes(self, player): if player.steam_id in self.informed_players: return self.informed_players.append(player.steam_id) if not self.wants_to_be_informed(player.steam_id): return changed_ratings = [] previous_truskills = "{} {}".format(self.previous_map, TRUSKILLS.name) for rating_provider_name in [previous_truskills, TRUSKILLS.name, A_ELO.name, B_ELO.name]: formatted_diffs = self.format_rating_diffs_for_rating_provider_name_and_player( rating_provider_name, player.steam_id) if formatted_diffs is not None: changed_ratings.append(formatted_diffs) if len(changed_ratings) == 0: return player.tell("Your ratings changed since the last map: {}".format(", ".join(changed_ratings))) def format_rating_diffs_for_rating_provider_name_and_player(self, rating_provider_name, steam_id): if rating_provider_name not in self.rating_diffs or steam_id not in self.rating_diffs[rating_provider_name] or \ self.previous_gametype not in self.rating_diffs[rating_provider_name][steam_id] or \ rating_provider_name not in self.ratings or steam_id not in self.ratings[rating_provider_name]: return None current_rating = self.ratings[rating_provider_name][steam_id][self.previous_gametype]["elo"] rating_diff = self.rating_diffs[rating_provider_name][steam_id][self.previous_gametype] if rating_provider_name.endswith(TRUSKILLS.name): if rating_diff < 0.0: return "^3{}^7: ^4{:.02f}^7 (^1{:+.02f}^7)".format(rating_provider_name, current_rating, rating_diff) elif rating_diff > 0.0: return "^3{}^7: ^4{:.02f}^7 (^2{:+.02f}^7)".format(rating_provider_name, current_rating, rating_diff) return None if rating_diff < 0: return "^3{}^7: ^4{:d}^7 (^1{:+d}^7)".format(rating_provider_name, current_rating, rating_diff) elif rating_diff > 0: return "^3{}^7: ^4{:d}^7 (^2{:+d}^7)".format(rating_provider_name, current_rating, rating_diff) return None @minqlx.delay(5) def handle_game_countdown(self): self.msg("^7Balancing on skill ratings...") self.callback_balance(None, minqlx.CHAT_CHANNEL) def handle_round_countdown(self, round_number): @minqlx.next_frame def execute_switch_suggestion(): self.execute_suggestion() if (not self.auto_switch and self.switch_suggestion is not None and self.switch_suggestion.all_agreed()) or \ (self.auto_switch and self.switch_suggestion is not None and not self.switch_suggestion.all_agreed()): execute_switch_suggestion() self.in_countdown = True self.even_up_teams() self.balance_before_start(round_number) def even_up_teams(self): teams = self.teams() player_count = len(teams["red"] + teams["blue"]) if player_count == 1: return team_diff = len(teams["red"]) - len(teams["blue"]) if abs(team_diff) == 0: return even_to, even_from = ["blue", "red"] if team_diff > 0 else ["red", "blue"] n = int(abs(team_diff) / 2) last = self.identify_player_to_move() if team_diff % 2 == 0: amount_players_moved = last.name if n == 1 else "{} players".format(n) self.msg( "^6Uneven teams detected!^7 At round start i'll move {} to {}".format(amount_players_moved, even_to)) return amount_players_moved = "lowest player" if n == 1 else "{} lowest players".format(n) message = " and move {} to {}".format(amount_players_moved, even_to) if n else '' self.msg("^6Uneven teams detected!^7 Server will auto spec {}{}.".format(last.name, message)) def identify_player_to_move(self): teams = self.teams() # See which team is bigger than the other if len(teams["blue"]) > len(teams["red"]): bigger_team = teams["blue"].copy() elif len(teams["red"]) > len(teams["blue"]): bigger_team = teams["red"].copy() else: self.msg("Cannot pick last player since there are none.") return if (self.game.red_score + self.game.blue_score) >= 1: self.msg("Picking someone to {} based on score".format(self.last_action)) lowest_score = bigger_team[0].score lowest_players = [bigger_team[0]] for p in bigger_team: if lowest_score == 0 and p.score <= lowest_score: lowest_players.append(p) elif p.score < lowest_players[0].score: lowest_score = max(p.score, 0) lowest_players = [p] elif p.score == lowest_players[0].score: lowest_players.append(p) if len(lowest_players) == 1: lowest_player = lowest_players[0] else: lowest_players2 = [lowest_players[0]] for player in lowest_players: if player.stats.damage_dealt < lowest_players2[0].stats.damage_dealt: lowest_players2 = [player] elif player.stats.damage_dealt == lowest_players2[0].stats.damage_dealt: lowest_players2.append(player) if len(lowest_players2) == 1: lowest_player = lowest_players2[0] else: lowest_player = max(lowest_players2, key=lambda e1: self.find_time(e1)) else: self.msg("Picking someone to {} based on join times.".format(self.last_action)) lowest_player = max(bigger_team, key=lambda e1: self.find_time(e1)) self.msg("Picked {} from the {} team.".format(lowest_player.name, lowest_player.team)) return lowest_player def handle_round_start(self, round_number): self.last_new_player_id = None self.in_countdown = False self.balance_before_start(round_number, True) @minqlx.thread def balance_before_start(self, roundnumber, direct=False): @minqlx.next_frame def game_logic(func): func() @minqlx.next_frame def slay_player(p): p.health = 0 def exclude_player(p): t = self.teams().copy() if p in t['red']: t['red'].remove(p) if p in t['blue']: t['blue'].remove(p) return t countdown = int(self.get_cvar('g_roundWarmupDelay')) if self.game.type_short == "ft": countdown = int(self.get_cvar('g_freezeRoundDelay')) if not direct: time.sleep(max(countdown / 1000 - 0.8, 0)) teams = self.teams() player_count = len(teams["red"] + teams["blue"]) if player_count == 1 or self.game.state not in ["in_progress"]: return if self.game.type_short == "ca": if self.game.roundlimit in [self.game.blue_score, self.game.red_score]: return if self.game.type_short == "tdm": if self.game.fraglimit in [self.game.blue_score, self.game.red_score]: return if self.game.type_short == "ctf": if self.game.capturelimit in [self.game.blue_score, self.game.red_score]: return team_diff = len(teams["red"]) - len(teams["blue"]) while abs(team_diff) >= 1: last = self.identify_player_to_move() if not last: self.msg( "Error: Trying to balance before round {} start. Red({}) - Blue({}) players" .format(roundnumber, len(teams['red']), len(teams['blue']))) return if team_diff % 2 == 0: even_to, even_from = ["blue", "red"] if team_diff > 0 else ["red", "blue"] game_logic(lambda: last.put(even_to)) self.msg("^6Uneven teams action^7: Moved {} from {} to {}".format(last.name, even_from, even_to)) else: if self.prevent or self.last_action == "ignore": excluded_teams = exclude_player(last) self.msg("^6Uneven teams^7: {} will not be moved to spec".format(last.name)) elif self.last_action == "slay": if "anti_rape" in minqlx.Plugin._loaded_plugins: game_logic(lambda: last.put("spectator")) self.msg("^6Uneven teams action^7: {} was moved to spec to even teams!".format(last.name)) self.msg("Not slayed because anti_rape plugin is loaded.") else: slay_player(last) self.msg("{} ^7has been ^1slain ^7to even the teams!") else: self.msg("^6Uneven teams action^7: {} was moved to spec to even teams!".format(last.name)) game_logic(lambda: last.put("spectator")) time.sleep(0.2) def handle_game_end(self, data): if not self.game or bool(data["ABORTED"]): return teams = self.teams() self.previous_teams = [player.steam_id for player in teams["red"]], \ [player.steam_id for player in teams["blue"]] self.previous_map = data["MAP"].lower() self.previous_gametype = data["GAME_TYPE"].lower() # self.record_team_stats(self.previous_gametype) if len(teams["red"] + teams["blue"]) == 4 and self.twovstwo_iter is None: steam_ids = [player.steam_id for player in teams["red"] + teams["blue"]] self.twovstwo_steam_ids = steam_ids self.twovstwo_combinations = [(steam_ids[0], steam_ids[1]), (steam_ids[0], steam_ids[2]), (steam_ids[0], steam_ids[3])] self.twovstwo_iter = random_iterator(self.twovstwo_combinations) next_twovstwo = sorted(list(next(self.twovstwo_iter))) other_twovstwo = sorted([steam_id for steam_id in steam_ids if steam_id not in next_twovstwo]) red_steam_ids = sorted([player.steam_id for player in teams["red"]]) blue_steam_ids = sorted([player.steam_id for player in teams["blue"]]) while not (next_twovstwo == red_steam_ids or next_twovstwo == blue_steam_ids or other_twovstwo == red_steam_ids or other_twovstwo == blue_steam_ids): next_twovstwo = sorted(list(next(self.twovstwo_iter))) other_twovstwo = sorted([steam_id for steam_id in steam_ids if steam_id not in next_twovstwo]) @minqlx.thread def record_team_stats(self, gametype): teams = self.teams() if len(teams["red"] + teams["blue"]) == 2: return stats = [ self.game.map, self.game.red_score, self.game.blue_score, self.team_stats(teams["red"], gametype), self.team_stats(teams["blue"], gametype) ] elostats_filename = os.path.join(self.get_cvar("fs_homepath"), "elostats.txt") with open(elostats_filename, "a") as elostats_file: elostats_file.write("{}\n".format(stats)) def team_stats(self, team, gametype): returned = {} for player in team: a_elo = 0 if A_ELO.name in self.ratings and player.steam_id in self.ratings[A_ELO.name]: a_elo = self.ratings[A_ELO.name][player.steam_id][gametype]["elo"] b_elo = 0 if B_ELO.name in self.ratings and player.steam_id in self.ratings[B_ELO.name]: b_elo = self.ratings[B_ELO.name][player.steam_id][gametype]["elo"] truskill = 0 if TRUSKILLS.name in self.ratings and player.steam_id in self.ratings[TRUSKILLS.name]: truskill = self.ratings[TRUSKILLS.name][player.steam_id][gametype]["elo"] returned[player.steam_id] = [a_elo, b_elo, truskill] return returned FILTERED_OUT_GAMETYPE_RESPONSES = ["steamid"] class SkillRatingProvider: def __init__(self, name, url_base, balance_api, timeout=7): self.name = name self.url_base = url_base self.balance_api = balance_api self.timeout = timeout def fetch_elos(self, steam_ids, headers=None): if len(steam_ids) == 0: return None request_url = self.url_base + "{}/{}".format(self.balance_api, "+".join([str(steam_id) for steam_id in steam_ids])) try: result = requests_retry_session().get(request_url, headers=headers, timeout=self.timeout) except requests.RequestException as exception: minqlx.get_logger("balancetwo").debug("request exception: {}".format(exception)) return None if result.status_code != requests.codes.ok: return None return result.json() TRUSKILLS = SkillRatingProvider("Truskill", "http://stats.houseofquake.com/", "elo/map_based") A_ELO = SkillRatingProvider("Elo", "http://qlstats.net/", "elo", timeout=15) B_ELO = SkillRatingProvider("B-Elo", "http://qlstats.net/", "elo_b", timeout=15) class RatingProvider: def __init__(self, json): self.jsons = [json] def __iter__(self): return iter(self.rated_steam_ids()) def __contains__(self, item): if not isinstance(item, int) and not isinstance(item, str): return False steam_id = item if isinstance(item, str): try: steam_id = int(item) except ValueError: return False for json_rating in self.jsons: if "playerinfo" not in json_rating: continue if str(steam_id) in json_rating["playerinfo"]: return True return False def __getitem__(self, item): if item not in self: raise TypeError steam_id = item if isinstance(item, str): try: steam_id = int(item) except ValueError: raise TypeError for json_rating in reversed(self.jsons): if "playerinfo" not in json_rating: continue if str(steam_id) not in json_rating["playerinfo"]: continue return PlayerRating(json_rating["playerinfo"][str(steam_id)]) return None def __sub__(self, other): returned = {} if not isinstance(other, RatingProvider): raise TypeError("Can't subtract '{}' from a RatingProvider".format(type(other).__name__)) for steam_id in self: if steam_id not in other: returned[steam_id] = {gametype: self.gametype_data_for(steam_id, gametype) for gametype in self.rated_gametypes_for(steam_id)} continue returned[steam_id] = {} for gametype in self.rated_gametypes_for(steam_id): if gametype not in other.rated_gametypes_for(steam_id): returned[steam_id][gametype] = self.gametype_data_for(steam_id, gametype) continue gametype_diff = self.gametype_data_for(steam_id, gametype)["elo"] - \ other.gametype_data_for(steam_id, gametype)["elo"] if gametype_diff == 0: continue returned[steam_id][gametype] = round(gametype_diff, 2) return returned @staticmethod def from_json(json_response): return RatingProvider(json_response) def append_ratings(self, json_response): self.jsons.append(json_response) def player_data_for(self, steam_id): return self[steam_id] def gametype_data_for(self, steam_id, gametype): if gametype not in self[steam_id]: return None return self[steam_id][gametype] def rating_for(self, steam_id, gametype): if gametype not in self[steam_id]: return None if "elo" not in self[steam_id][gametype]: return None return self[steam_id][gametype]["elo"] def games_for(self, steam_id, gametype): if gametype not in self[steam_id]: return None if "games" not in self[steam_id][gametype]: return None return self[steam_id][gametype]["games"] def rated_gametypes_for(self, steam_id): player_data = self[steam_id] if player_data is None: return [] return [gametype for gametype in player_data if gametype not in FILTERED_OUT_GAMETYPE_RESPONSES] def privacy_for(self, steam_id): player_data = self[steam_id] if player_data is None: return None if "privacy" not in player_data: return "private" return player_data["privacy"] def rated_steam_ids(self): returned = [] for json_rating in self.jsons: if "playerinfo" not in json_rating: continue returned = returned + [int(steam_id) for steam_id in json_rating["playerinfo"]] return [steam_id for steam_id in set(returned)] def format_elos(self, steam_id): result = "" for gametype in self.rated_gametypes_for(steam_id): if self.games_for(steam_id, gametype) != 0: result += "^2{0}^7: ^4{1}^7 ({2} games) ".format(gametype.upper(), self[steam_id][gametype]["elo"], self[steam_id][gametype]["games"]) return result def has_ratings_for_all(self, gametype, steam_ids): for steam_id in steam_ids: if steam_id not in self: return False if gametype not in self[steam_id]: return False if self[steam_id][gametype]["games"] == 0: return False return True class PlayerRating: def __init__(self, ratings, _time=-1, local=False): self.ratings = ratings self.time = _time self.local = local def __iter__(self): return iter(self.ratings["ratings"]) def __contains__(self, item): if not isinstance(item, str): return False return item in self.ratings["ratings"] def __getitem__(self, item): if item not in self: raise KeyError if not isinstance(item, str): raise KeyError returned = self.ratings["ratings"][item].copy() returned["time"] = self.time returned["local"] = self.local return returned def __getattr__(self, attr): if attr not in ["privacy"]: raise AttributeError("'{}' object has no atrribute '{}'".format(self.__class__.__name__, attr)) return self.ratings["privacy"] class SuggestionRatingStrategy: @abstractmethod def best_suggestion(self, suggestions): pass class DiffSuggestionRatingStrategy(SuggestionRatingStrategy): def best_suggestion(self, suggestions): return min(suggestions, key=lambda suggestion: abs(suggestion.avg_diff)) class SuggestionQueue: def __init__(self, items=None, strategy=DiffSuggestionRatingStrategy()): self.suggestions = items if items is not None else [] self.strategy = strategy def __str__(self): return "[{}]".format(", ".join([str(suggestion) for suggestion in self.suggestions])) def __len__(self): return len(self.suggestions) def best_suggestion(self): if len(self.suggestions) == 0: return None if len(self.suggestions) == 1: return self.suggestions[0] return self.strategy.best_suggestion(self.suggestions) class Suggestion: def __init__(self, red_player, blue_player, avg_diff, stddev_diff=0): self.red_player = red_player self.blue_player = blue_player self.avg_diff = avg_diff self.stddev_diff = stddev_diff self._agreed = dict() self.auto_switch = Plugin.get_cvar("qlx_balancetwo_autoSwitch", bool) def __eq__(self, other): if not isinstance(other, Suggestion): return False return self.red_player == other.red_player and self.blue_player == other.blue_player and \ self.avg_diff == other.avg_diff and self.stddev_diff == other.stddev_diff def __ne__(self, other): return not self.__eq__(other) def __str__(self): red_player = "({}, score: {}, dmg: {}, time: {})".format(self.red_player.clean_name, self.red_player.score, self.red_player.stats.damage_dealt, self.red_player.stats.time) blue_player = "({}, score: {}, dmg: {}, time: {})".format(self.blue_player.clean_name, self.blue_player.score, self.blue_player.stats.damage_dealt, self.blue_player.stats.time) return "Switch {} with {}, resulting diff: {}" \ .format(red_player, blue_player, self.avg_diff, self.stddev_diff) def announcement(self): if not self.auto_switch: return "SUGGESTION: switch ^6{}^7 with ^6{}^7. Mentioned players can type ^6!a^7 to agree." \ .format(self.red_player.clean_name, self.blue_player.clean_name) return "NOTICE: Server will switch ^6{}^7 with ^6{}^7 at start of next round. " \ "Both mentioned players need to type ^6!v^7 to veto the switch." \ .format(self.red_player.clean_name, self.blue_player.clean_name) def agree(self, player): self._agreed[player.steam_id] = True def agreed(self, player): return self._agreed.get(player.steam_id, False) def all_agreed(self): return self.agreed(self.red_player) and self.agreed(self.blue_player) def affected_steam_ids(self): return [self.red_player.steam_id, self.blue_player.steam_id] def validate_players(self): self.red_player.update() self.blue_player.update() def execute(self): self.red_player.update() self.blue_player.update() if self.red_player.team == "spectator": raise PlayerMovedToSpecError(self.red_player) if self.blue_player.team == "spectator": raise PlayerMovedToSpecError(self.blue_player) Plugin.switch(self.red_player, self.blue_player) @property def max_score(self): return max(self.red_player.score, self.blue_player.score) @property def score_sum(self): return self.red_player.score + self.blue_player.score class KickThread(threading.Thread): def __init__(self, steam_id, rating, highlow): threading.Thread.__init__(self) self.steam_id = steam_id self.rating = rating self.highlow = highlow self.go = True def try_msg(self): time.sleep(5) player = Plugin.player(self.steam_id) if not player: return if not self.go: return kickmsg = "so you'll be ^6kicked ^7shortly..." Plugin.msg("^7Sorry, {} your rating ({}) is too {}, {}".format(player.name, self.rating, self.highlow, kickmsg)) def try_mute(self): @minqlx.next_frame def execute(): try: player.mute() except ValueError: pass time.sleep(5) player = Plugin.player(self.steam_id) if not player: return if not self.go: return execute() def try_kick(self): @minqlx.next_frame def execute(): try: player.kick("^1GOT KICKED!^7 Rating ({}) was too {} for this server.".format(self.rating, self.highlow)) except ValueError: pass time.sleep(30) player = Plugin.player(self.steam_id) if not player: return if not self.go: return execute() def run(self): self.try_mute() self.try_msg() self.try_kick() def stop(self): self.go = False class PlayerMovedToSpecError(Exception): def __init__(self, player): self.player = player class random_iterator: def __init__(self, seq): self.seq = seq self.random_seq = random.sample(self.seq, len(self.seq)) self.iterator = iter(self.random_seq) def __iter__(self): return self def __next__(self): try: return next(self.iterator) except StopIteration: self.random_seq = random.sample(self.seq, len(self.seq)) self.iterator = iter(self.random_seq) return next(self.iterator)
remove_trailing_color_code
general_util.py
import logging import os import sys import json import time import re UNCENSORED_LOGGING = os.getenv("UNCENSORED_LOGGING") LOG_CENSOR = [ { "regex": r"(eyJ0e[A-Za-z0-9-_]{10})[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*([A-Za-z0-9-_]{10})", "replace": "\\g<1>XXX<JWTTOKEN>XXX\\g<2>", "description": "X-out JWT Token payload" }, { "regex": r"(EDL-[A-Za-z0-9]+)[A-Za-z0-9]{40}([A-Za-z0-9]{10})", "replace": "\\g<1>XXX<EDLTOKEN>XXX\\g<2>", "description": "X-out non-JWT EDL token" }, { "regex": r"(Basic [A-Za-z0-9-_]{5})[A-Za-z0-9]*([A-Za-z0-9-_]{5})", "replace": "\\g<1>XXX<BASICAUTH>XXX\\g<2>", "description": "X-out Basic Auth Credentials" }, { "regex": r"([^A-Za-z0-9/+=][A-Za-z0-9/+=]{5})[A-Za-z0-9/+=]{30}([A-Za-z0-9/+=]{5}[^A-Za-z0-9/+=])", "replace": "\\g<1>XXX<AWSSECRET>XXX\\g<2>", "description": "X-out AWS Secret" } ] def return_timing_object(**timing): timing_object = { "service": "Unknown", "endpoint": "Unknown", "method": "GET", "duration": 0, "unit": "milliseconds"} timing_object.update({k.lower(): v for k,v in timing.items()}) return {"timing":timing_object } def duration(time_in): # Return the time duration in milliseconds delta = time.time() - time_in return(float("{:.2f}".format(delta*1000))) def filter_log_credentials(msg): if UNCENSORED_LOGGING: return msg for regex in LOG_CENSOR: result = re.sub(regex["regex"], regex["replace"], msg, 0, re.MULTILINE) if result: msg = str(result) return msg def
(msg): if type(msg) is dict: return json.dumps(msg).replace("'", '"') if '{' in msg: try: json_obj = json.loads(msg) return json.dumps(json_obj).replace("'", '"') except json.decoder.JSONDecodeError: # Not JSON. pass return '"{0}"'.format(msg) class CustomLogFilter(logging.Filter): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.params = { 'build_vers': os.getenv("BUILD_VERSION", "NOBUILD"), 'maturity': os.getenv('MATURITY', 'DEV'), 'request_id': None, 'origin_request_id': None, 'user_id': None, 'route': None } def filter(self, record): record.msg = filter_log_credentials(reformat_for_json(record.msg)) record.build_vers = self.params['build_vers'] record.maturity = self.params['maturity'] record.request_id = self.params['request_id'] record.origin_request_id = self.params['origin_request_id'] record.user_id = self.params['user_id'] record.route = self.params['route'] return True def update(self, **context): for key in context: self.params.update({key: context[key]}) custom_log_filter = CustomLogFilter() def log_context(**context): custom_log_filter.update(**context) def get_log(): loglevel = os.getenv('LOGLEVEL', 'INFO') logtype = os.getenv('LOGTYPE', 'json') if logtype == 'flat': log_fmt_str = "%(levelname)s: %(message)s (%(filename)s line " + \ "%(lineno)d/%(build_vers)s/%(maturity)s) - " + \ "RequestId: %(request_id)s; OriginRequestId: %(origin_request_id)s; user_id: %(user_id)s; route: %(route)s" else: log_fmt_str = '{"level": "%(levelname)s", ' + \ '"RequestId": "%(request_id)s", ' + \ '"OriginRequestId": "%(origin_request_id)s", ' + \ '"message": %(message)s, ' + \ '"maturity": "%(maturity)s", ' + \ '"user_id": "%(user_id)s", ' + \ '"route": "%(route)s", ' + \ '"build": "%(build_vers)s", ' + \ '"filename": "%(filename)s", ' + \ '"lineno": %(lineno)d } ' logger = logging.getLogger() for h in logger.handlers: logger.removeHandler(h) h = logging.StreamHandler(sys.stdout) h.setFormatter(logging.Formatter(log_fmt_str)) h.addFilter(custom_log_filter) logger.addHandler(h) logger.setLevel(getattr(logging, loglevel)) if os.getenv("QUIETBOTO", 'TRUE').upper() == 'TRUE': # BOTO, be quiet plz logging.getLogger('boto3').setLevel(logging.ERROR) logging.getLogger('botocore').setLevel(logging.ERROR) logging.getLogger('nose').setLevel(logging.ERROR) logging.getLogger('elasticsearch').setLevel(logging.ERROR) logging.getLogger('s3transfer').setLevel(logging.ERROR) logging.getLogger('urllib3').setLevel(logging.ERROR) logging.getLogger('connectionpool').setLevel(logging.ERROR) return logger
reformat_for_json
keyframes.js
class Keyframes { constructor(elem) { this.elem = elem; } isSupported() { return document.body.style.animationName !== undefined; } reset(callback) { this.removeEvents(); this.elem.style.animationPlayState = 'running'; this.elem.style.animation = 'none'; if (callback) { requestAnimationFrame(callback); } } pause() { this.elem.style.animationPlayState = 'paused'; } resume() { this.elem.style.animationPlayState = 'running'; } play(frameOptions, callback) { if (this.elem.style.animationName === frameOptions.name) { this.reset(() => this.play(frameOptions, callback)); return this; } const animationcss = Keyframes.playCSS(frameOptions); const addEvent = (type, eventCallback) => { const listenerName = `${type}Listener`; this.elem.removeEventListener(type, this[listenerName]); this[listenerName] = eventCallback; this.elem.addEventListener(type, this[listenerName]); }; this.elem.style.animationPlayState = 'running'; this.elem.style.animation = animationcss; this.frameOptions = frameOptions; addEvent('animationiteration', callback || frameOptions.complete); addEvent('animationend', callback || frameOptions.complete); return this; } removeEvents() { this.elem.removeEventListener('animationiteration', this.animationiterationListener); this.elem.removeEventListener('animationend', this.animationendListener); } static playCSS(frameOptions) { const animObjToStr = function (obj) { const newObj = Object.assign({}, { duration: '0s', timingFunction: 'ease', delay: '0s', iterationCount: 1, direction: 'normal', fillMode: 'forwards', }, obj); return [ newObj.name, newObj.duration, newObj.timingFunction, newObj.delay, newObj.iterationCount, newObj.direction, newObj.fillMode, ].join(' '); }; if (frameOptions.constructor === Array) { const frameOptionsStrings = []; for (let i = 0; i < frameOptions.length; i += 1) { frameOptionsStrings.push(typeof frameOptions[i] === 'string' ? frameOptions[i] : animObjToStr(frameOptions[i])); } return frameOptionsStrings.join(', '); } else if (typeof frameOptions === 'string') { return frameOptions; } return animObjToStr(frameOptions); } static generateCSS(frameData) { let css = `@keyframes ${frameData.name} {`; for (const key in frameData) { if (key !== 'name' && key !== 'media' && key !== 'complete') { css += `${key} {`; for (const property in frameData[key]) { css += `${property}:${frameData[key][property]};`; } css += '}'; } } css += '}'; if (frameData.media) { css = `@media ${frameData.media}{${css}}`; } return css; } static generate(frameData) { const css = this.generateCSS(frameData); const oldFrameIndex = Keyframes.rules.indexOf(frameData.name); if (oldFrameIndex > -1) { Keyframes.sheet.deleteRule(oldFrameIndex); delete Keyframes.rules[oldFrameIndex]; } const ruleIndex = Keyframes.sheet.insertRule(css); Keyframes.rules[ruleIndex] = frameData.name; } static define(frameData) { if (frameData.length) { for (let i = 0; i < frameData.length; i += 1) { this.generate(frameData[i]); } } else { this.generate(frameData); } } static defineCSS(frameData) { if (frameData.length) { let css = ''; for (let i = 0; i < frameData.length; i += 1) { css += this.generateCSS(frameData[i]); } return css; } return this.generateCSS(frameData); } static plugin(pluginFunc) { if (pluginFunc.constructor === Array) { for (let i = 0; i < pluginFunc.length; i += 1) { pluginFunc[i](Keyframes); } } else { pluginFunc(Keyframes); } } } if (typeof document !== 'undefined') { const style = document.createElement('style'); style.setAttribute('id', 'keyframesjs-stylesheet'); document.head.appendChild(style); Keyframes.sheet = style.sheet; Keyframes.rules = []; }
export default Keyframes;
create.go
package cloudfiles import ( "fmt" "io" "github.com/fastly/cli/pkg/cmd" "github.com/fastly/cli/pkg/compute/manifest" "github.com/fastly/cli/pkg/config" "github.com/fastly/cli/pkg/text" "github.com/fastly/go-fastly/v3/fastly" ) // CreateCommand calls the Fastly API to create a Cloudfiles logging endpoint. type CreateCommand struct { cmd.Base Manifest manifest.Data // required EndpointName string // Can't shadow cmd.Base method Name(). Token string User string AccessKey string BucketName string ServiceVersion cmd.OptionalServiceVersion // optional AutoClone cmd.OptionalAutoClone Path cmd.OptionalString Region cmd.OptionalString PublicKey cmd.OptionalString Period cmd.OptionalUint GzipLevel cmd.OptionalUint MessageType cmd.OptionalString TimestampFormat cmd.OptionalString Format cmd.OptionalString FormatVersion cmd.OptionalUint ResponseCondition cmd.OptionalString Placement cmd.OptionalString CompressionCodec cmd.OptionalString } // NewCreateCommand returns a usable command registered under the parent. func NewCreateCommand(parent cmd.Registerer, globals *config.Data) *CreateCommand { var c CreateCommand c.Globals = globals c.Manifest.File.SetOutput(c.Globals.Output) c.Manifest.File.Read(manifest.Filename) c.CmdClause = parent.Command("create", "Create a Cloudfiles logging endpoint on a Fastly service version").Alias("add") c.CmdClause.Flag("name", "The name of the Cloudfiles logging object. Used as a primary key for API access").Short('n').Required().StringVar(&c.EndpointName) c.RegisterServiceVersionFlag(cmd.ServiceVersionFlagOpts{ Dst: &c.ServiceVersion.Value, }) c.RegisterAutoCloneFlag(cmd.AutoCloneFlagOpts{ Action: c.AutoClone.Set, Dst: &c.AutoClone.Value, }) c.CmdClause.Flag("user", "The username for your Cloudfile account").Required().StringVar(&c.User) c.CmdClause.Flag("access-key", "Your Cloudfile account access key").Required().StringVar(&c.AccessKey) c.CmdClause.Flag("bucket", "The name of your Cloudfiles container").Required().StringVar(&c.BucketName) c.RegisterServiceIDFlag(&c.Manifest.Flag.ServiceID) c.CmdClause.Flag("path", "The path to upload logs to").Action(c.Path.Set).StringVar(&c.Path.Value) c.CmdClause.Flag("region", "The region to stream logs to. One of: DFW-Dallas, ORD-Chicago, IAD-Northern Virginia, LON-London, SYD-Sydney, HKG-Hong Kong").Action(c.Region.Set).StringVar(&c.Region.Value) c.CmdClause.Flag("placement", "Where in the generated VCL the logging call should be placed, overriding any format_version default. Can be none or waf_debug").Action(c.Placement.Set).StringVar(&c.Placement.Value) c.CmdClause.Flag("period", "How frequently log files are finalized so they can be available for reading (in seconds, default 3600)").Action(c.Period.Set).UintVar(&c.Period.Value) c.CmdClause.Flag("gzip-level", "What level of GZIP encoding to have when dumping logs (default 0, no compression)").Action(c.GzipLevel.Set).UintVar(&c.GzipLevel.Value) c.CmdClause.Flag("format", "Apache style log formatting").Action(c.Format.Set).StringVar(&c.Format.Value) c.CmdClause.Flag("format-version", "The version of the custom logging format used for the configured endpoint. Can be either 2 (default) or 1").Action(c.FormatVersion.Set).UintVar(&c.FormatVersion.Value) c.CmdClause.Flag("response-condition", "The name of an existing condition in the configured endpoint, or leave blank to always execute").Action(c.ResponseCondition.Set).StringVar(&c.ResponseCondition.Value) c.CmdClause.Flag("message-type", "How the message should be formatted. One of: classic (default), loggly, logplex or blank").Action(c.MessageType.Set).StringVar(&c.MessageType.Value) c.CmdClause.Flag("timestamp-format", `strftime specified timestamp formatting (default "%Y-%m-%dT%H:%M:%S.000")`).Action(c.TimestampFormat.Set).StringVar(&c.TimestampFormat.Value) c.CmdClause.Flag("public-key", "A PGP public key that Fastly will use to encrypt your log files before writing them to disk").Action(c.PublicKey.Set).StringVar(&c.PublicKey.Value) c.CmdClause.Flag("compression-codec", `The codec used for compression of your logs. Valid values are zstd, snappy, and gzip. If the specified codec is "gzip", gzip_level will default to 3. To specify a different level, leave compression_codec blank and explicitly set the level using gzip_level. Specifying both compression_codec and gzip_level in the same API request will result in an error.`).Action(c.CompressionCodec.Set).StringVar(&c.CompressionCodec.Value) return &c } // ConstructInput transforms values parsed from CLI flags into an object to be used by the API client library. func (c *CreateCommand) ConstructInput(serviceID string, serviceVersion int) (*fastly.CreateCloudfilesInput, error) { var input fastly.CreateCloudfilesInput input.ServiceID = serviceID input.ServiceVersion = serviceVersion input.Name = c.EndpointName input.User = c.User input.AccessKey = c.AccessKey input.BucketName = c.BucketName // The following blocks enforces the mutual exclusivity of the // CompressionCodec and GzipLevel flags. if c.CompressionCodec.WasSet && c.GzipLevel.WasSet { return nil, fmt.Errorf("error parsing arguments: the --compression-codec flag is mutually exclusive with the --gzip-level flag") } if c.Path.WasSet { input.Path = c.Path.Value } if c.Region.WasSet { input.Region = c.Region.Value } if c.Placement.WasSet { input.Placement = c.Placement.Value } if c.Period.WasSet { input.Period = c.Period.Value } if c.GzipLevel.WasSet { input.GzipLevel = c.GzipLevel.Value } if c.Format.WasSet { input.Format = c.Format.Value } if c.FormatVersion.WasSet { input.FormatVersion = c.FormatVersion.Value } if c.ResponseCondition.WasSet { input.ResponseCondition = c.ResponseCondition.Value } if c.MessageType.WasSet
if c.TimestampFormat.WasSet { input.TimestampFormat = c.TimestampFormat.Value } if c.PublicKey.WasSet { input.PublicKey = c.PublicKey.Value } if c.CompressionCodec.WasSet { input.CompressionCodec = c.CompressionCodec.Value } return &input, nil } // Exec invokes the application logic for the command. func (c *CreateCommand) Exec(in io.Reader, out io.Writer) error { serviceID, serviceVersion, err := cmd.ServiceDetails(cmd.ServiceDetailsOpts{ AutoCloneFlag: c.AutoClone, Client: c.Globals.Client, Manifest: c.Manifest, Out: out, ServiceVersionFlag: c.ServiceVersion, VerboseMode: c.Globals.Flag.Verbose, }) if err != nil { return err } input, err := c.ConstructInput(serviceID, serviceVersion.Number) if err != nil { return err } d, err := c.Globals.Client.CreateCloudfiles(input) if err != nil { return err } text.Success(out, "Created Cloudfiles logging endpoint %s (service %s version %d)", d.Name, d.ServiceID, d.ServiceVersion) return nil }
{ input.MessageType = c.MessageType.Value }
test_migrations.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Tests for database migrations. This test case reads the configuration file test_migrations.conf for database connection settings to use in the tests. For each connection found in the config file, the test case runs a series of test cases to ensure that migrations work properly both upgrading and downgrading, and that no data loss occurs if possible. """ import os import fixtures from migrate.versioning import api as migration_api from migrate.versioning import repository from oslo_db.sqlalchemy import enginefacade from oslo_db.sqlalchemy import test_fixtures from oslo_db.sqlalchemy import test_migrations from oslo_db.sqlalchemy import utils as db_utils from oslotest import base as test_base import sqlalchemy from sqlalchemy.engine import reflection from cinder.db import migration import cinder.db.sqlalchemy.migrate_repo from cinder.tests.unit import utils as test_utils from cinder.volume import volume_types class MigrationsMixin(test_migrations.WalkVersionsMixin): """Test sqlalchemy-migrate migrations.""" BOOL_TYPE = sqlalchemy.types.BOOLEAN TIME_TYPE = sqlalchemy.types.DATETIME INTEGER_TYPE = sqlalchemy.types.INTEGER VARCHAR_TYPE = sqlalchemy.types.VARCHAR TEXT_TYPE = sqlalchemy.types.Text @property def INIT_VERSION(self): return migration.INIT_VERSION @property def REPOSITORY(self): migrate_file = cinder.db.sqlalchemy.migrate_repo.__file__ return repository.Repository( os.path.abspath(os.path.dirname(migrate_file))) @property def migration_api(self): return migration_api def setUp(self):
# (zzzeek) This mixin states that it uses the # "self.engine" attribute in the migrate_engine() method. # So the mixin must set that up for itself, oslo_db no longer # makes these assumptions for you. self.engine = enginefacade.writer.get_engine() @property def migrate_engine(self): return self.engine def get_table_ref(self, engine, name, metadata): metadata.bind = engine return sqlalchemy.Table(name, metadata, autoload=True) class BannedDBSchemaOperations(fixtures.Fixture): """Ban some operations for migrations""" def __init__(self, banned_resources=None): super(MigrationsMixin.BannedDBSchemaOperations, self).__init__() self._banned_resources = banned_resources or [] @staticmethod def _explode(resource, op): print('%s.%s()' % (resource, op)) # noqa raise Exception( 'Operation %s.%s() is not allowed in a database migration' % ( resource, op)) def setUp(self): super(MigrationsMixin.BannedDBSchemaOperations, self).setUp() for thing in self._banned_resources: self.useFixture(fixtures.MonkeyPatch( 'sqlalchemy.%s.drop' % thing, lambda *a, **k: self._explode(thing, 'drop'))) self.useFixture(fixtures.MonkeyPatch( 'sqlalchemy.%s.alter' % thing, lambda *a, **k: self._explode(thing, 'alter'))) def migrate_up(self, version, with_data=False): # NOTE(dulek): This is a list of migrations where we allow dropping # things. The rules for adding things here are very very specific. # Insight on how to drop things from the DB in a backward-compatible # manner is provided in Cinder's developer documentation. # Reviewers: DO NOT ALLOW THINGS TO BE ADDED HERE WITHOUT CARE exceptions = [ # NOTE(brinzhang): 127 changes size of quota_usage.resource # to 300. This should be safe for the 'quota_usage' db table, # because of the 255 is the length limit of volume_type_name, # it should be add the additional prefix before volume_type_name, # which we of course allow *this* size to 300. 127, # 136 modifies the the tables having volume_type_id field to set # as non nullable 136, ] if version not in exceptions: banned = ['Table', 'Column'] else: banned = None with MigrationsMixin.BannedDBSchemaOperations(banned): super(MigrationsMixin, self).migrate_up(version, with_data) def __check_cinderbase_fields(self, columns): """Check fields inherited from CinderBase ORM class.""" self.assertIsInstance(columns.created_at.type, self.TIME_TYPE) self.assertIsInstance(columns.updated_at.type, self.TIME_TYPE) self.assertIsInstance(columns.deleted_at.type, self.TIME_TYPE) self.assertIsInstance(columns.deleted.type, self.BOOL_TYPE) def get_table_names(self, engine): inspector = reflection.Inspector.from_engine(engine) return inspector.get_table_names() def get_foreign_key_columns(self, engine, table_name): foreign_keys = set() table = db_utils.get_table(engine, table_name) inspector = reflection.Inspector.from_engine(engine) for column_dict in inspector.get_columns(table_name): column_name = column_dict['name'] column = getattr(table.c, column_name) if column.foreign_keys: foreign_keys.add(column_name) return foreign_keys def get_indexed_columns(self, engine, table_name): indexed_columns = set() for index in db_utils.get_indexes(engine, table_name): for column_name in index['column_names']: indexed_columns.add(column_name) return indexed_columns def assert_each_foreign_key_is_part_of_an_index(self): engine = self.migrate_engine non_indexed_foreign_keys = set() for table_name in self.get_table_names(engine): indexed_columns = self.get_indexed_columns(engine, table_name) foreign_key_columns = self.get_foreign_key_columns( engine, table_name ) for column_name in foreign_key_columns - indexed_columns: non_indexed_foreign_keys.add(table_name + '.' + column_name) self.assertSetEqual(set(), non_indexed_foreign_keys) def _check_127(self, engine, data): quota_usage_resource = db_utils.get_table(engine, 'quota_usages') self.assertIn('resource', quota_usage_resource.c) self.assertIsInstance(quota_usage_resource.c.resource.type, self.VARCHAR_TYPE) self.assertEqual(300, quota_usage_resource.c.resource.type.length) def _check_128(self, engine, data): volume_transfer = db_utils.get_table(engine, 'transfers') self.assertIn('source_project_id', volume_transfer.c) self.assertIn('destination_project_id', volume_transfer.c) self.assertIn('accepted', volume_transfer.c) def _check_132(self, engine, data): """Test create default volume type.""" vol_types = db_utils.get_table(engine, 'volume_types') vtype = (vol_types.select(vol_types.c.name == volume_types.DEFAULT_VOLUME_TYPE) .execute().first()) self.assertIsNotNone(vtype) def _check_136(self, engine, data): """Test alter volume_type_id columns.""" vol_table = db_utils.get_table(engine, 'volumes') snap_table = db_utils.get_table(engine, 'snapshots') encrypt_table = db_utils.get_table(engine, 'encryption') self.assertFalse(vol_table.c.volume_type_id.nullable) self.assertFalse(snap_table.c.volume_type_id.nullable) self.assertFalse(encrypt_table.c.volume_type_id.nullable) # NOTE: this test becomes slower with each addition of new DB migration. # 'pymysql' works much slower on slow nodes than 'psycopg2'. And such # timeout mostly required for testing of 'mysql' backend. @test_utils.set_timeout(300) def test_walk_versions(self): self.walk_versions(False, False) self.assert_each_foreign_key_is_part_of_an_index() class TestSqliteMigrations(test_fixtures.OpportunisticDBTestMixin, MigrationsMixin, test_base.BaseTestCase): def assert_each_foreign_key_is_part_of_an_index(self): # Skip the test for SQLite because SQLite does not list # UniqueConstraints as indexes, which makes this test fail. # Given that SQLite is only for testing purposes, it is safe to skip pass class TestMysqlMigrations(test_fixtures.OpportunisticDBTestMixin, MigrationsMixin, test_base.BaseTestCase): FIXTURE = test_fixtures.MySQLOpportunisticFixture BOOL_TYPE = sqlalchemy.dialects.mysql.TINYINT @test_utils.set_timeout(300) def test_mysql_innodb(self): """Test that table creation on mysql only builds InnoDB tables.""" # add this to the global lists to make reset work with it, it's removed # automatically in tearDown so no need to clean it up here. # sanity check migration.db_sync(engine=self.migrate_engine) total = self.migrate_engine.execute( "SELECT count(*) " "from information_schema.TABLES " "where TABLE_SCHEMA='{0}'".format( self.migrate_engine.url.database)) self.assertGreater(total.scalar(), 0, msg="No tables found. Wrong schema?") noninnodb = self.migrate_engine.execute( "SELECT count(*) " "from information_schema.TABLES " "where TABLE_SCHEMA='openstack_citest' " "and ENGINE!='InnoDB' " "and TABLE_NAME!='migrate_version'") count = noninnodb.scalar() self.assertEqual(count, 0, "%d non InnoDB tables created" % count) def _check_127(self, engine, data): quota_usage_resource = db_utils.get_table(engine, 'quota_usages') self.assertIn('resource', quota_usage_resource.c) self.assertIsInstance(quota_usage_resource.c.resource.type, self.VARCHAR_TYPE) # Depending on the MariaDB version, and the page size, we may not have # been able to change quota_usage_resource to 300 chars, it could still # be 255. self.assertIn(quota_usage_resource.c.resource.type.length, (255, 300)) class TestPostgresqlMigrations(test_fixtures.OpportunisticDBTestMixin, MigrationsMixin, test_base.BaseTestCase): FIXTURE = test_fixtures.PostgresqlOpportunisticFixture TIME_TYPE = sqlalchemy.types.TIMESTAMP
super(MigrationsMixin, self).setUp()
color_correction.rs
use alvr_common::glam::UVec2; use alvr_graphics::{BindingDesc, TARGET_FORMAT}; use alvr_session::ColorCorrectionDesc; use wgpu::{ BindGroup, BindingResource, BindingType, CommandEncoder, Device, Extent3d, RenderPipeline, TextureDescriptor, TextureDimension, TextureSampleType, TextureUsages, TextureView, TextureViewDimension, }; pub struct ColorCorrectionPass { input: TextureView, pipeline: RenderPipeline, bind_group: BindGroup, } impl ColorCorrectionPass { pub fn new(device: &Device, input_size: UVec2) -> Self { let texture = device.create_texture(&TextureDescriptor { label: None, size: Extent3d { width: input_size.x, height: input_size.y, depth_or_array_layers: 1, }, mip_level_count: 1, sample_count: 1, dimension: TextureDimension::D2, format: TARGET_FORMAT, usage: TextureUsages::RENDER_ATTACHMENT | TextureUsages::STORAGE_BINDING, }); let input = texture.create_view(&Default::default()); let (pipeline, bind_group) = alvr_graphics::create_default_render_pipeline( "color correction", device, include_str!("../resources/color_correction.wgsl"), vec![BindingDesc { index: 0, binding_type: BindingType::Texture { sample_type: TextureSampleType::Float { filterable: false }, view_dimension: TextureViewDimension::D2, multisampled: false, }, array_size: None, resource: BindingResource::TextureView(&input), }], 0, ); Self { input, pipeline, bind_group, } } pub fn input(&self) -> &TextureView { &self.input } pub fn draw( &self, encoder: &mut CommandEncoder, desc: &ColorCorrectionDesc, output: &TextureView, ) { alvr_graphics::execute_default_pass( encoder, &self.pipeline, &self.bind_group, bytemuck::bytes_of(desc), output,
) } }
datahandler.py
from torch.utils.data import Dataset, DataLoader import glob import os import numpy as np import cv2 import torch from torchvision import transforms, utils from skimage.transform import resize class SegDataset(Dataset): """Segmentation Dataset""" def __init__(self, root_dir, imageFolder, maskFolder, transform=None, seed=None, fraction=None, subset=None, imagecolormode='rgb', maskcolormode='grayscale'): """ Args: root_dir (string): Directory with all the images and should have the following structure. root --Images -----Img 1 -----Img N --Mask -----Mask 1 -----Mask N imageFolder (string) = 'Images' : Name of the folder which contains the Images. maskFolder (string) = 'Masks : Name of the folder which contains the Masks. transform (callable, optional): Optional transform to be applied on a sample. seed: Specify a seed for the train and test split fraction: A float value from 0 to 1 which specifies the validation split fraction subset: 'Train' or 'Test' to select the appropriate set. imagecolormode: 'rgb' or 'grayscale' maskcolormode: 'rgb' or 'grayscale' """ self.color_dict = {'rgb': 1, 'grayscale': 0} assert(imagecolormode in ['rgb', 'grayscale']) assert(maskcolormode in ['rgb', 'grayscale']) self.imagecolorflag = self.color_dict[imagecolormode] self.maskcolorflag = self.color_dict[maskcolormode] self.root_dir = root_dir self.transform = transform if not fraction: self.image_names = sorted( glob.glob(os.path.join(self.root_dir, imageFolder, '*'))) self.mask_names = sorted( glob.glob(os.path.join(self.root_dir, maskFolder, '*'))) else: assert(subset in ['Train', 'Test']) self.fraction = fraction self.image_list = np.array( sorted(glob.glob(os.path.join(self.root_dir, imageFolder, '*')))) self.mask_list = np.array( sorted(glob.glob(os.path.join(self.root_dir, maskFolder, '*')))) if seed: np.random.seed(seed) indices = np.arange(len(self.image_list)) np.random.shuffle(indices) self.image_list = self.image_list[indices] self.mask_list = self.mask_list[indices] if subset == 'Train': self.image_names = self.image_list[:int( np.ceil(len(self.image_list)*(1-self.fraction)))] self.mask_names = self.mask_list[:int( np.ceil(len(self.mask_list)*(1-self.fraction)))] else: self.image_names = self.image_list[int( np.ceil(len(self.image_list)*(1-self.fraction))):] self.mask_names = self.mask_list[int( np.ceil(len(self.mask_list)*(1-self.fraction))):] def __len__(self):
def __getitem__(self, idx): img_name = self.image_names[idx] if self.imagecolorflag: image = cv2.imread( img_name, self.imagecolorflag).transpose(2, 0, 1) else: image = cv2.imread(img_name, self.imagecolorflag) msk_name = self.mask_names[idx] if self.maskcolorflag: mask = cv2.imread(msk_name, self.maskcolorflag).transpose(2, 0, 1) else: mask = cv2.imread(msk_name, self.maskcolorflag) sample = {'image': image, 'mask': mask} if self.transform: sample = self.transform(sample) return sample # Define few transformations for the Segmentation Dataloader class Resize(object): """Resize image and/or masks.""" def __init__(self, imageresize, maskresize): self.imageresize = imageresize self.maskresize = maskresize def __call__(self, sample): image, mask = sample['image'], sample['mask'] if len(image.shape) == 3: image = image.transpose(1, 2, 0) if len(mask.shape) == 3: mask = mask.transpose(1, 2, 0) mask = cv2.resize(mask, self.maskresize, cv2.INTER_AREA) #mask = 256 * resize(mask, (256, 256), anti_aliasing = True) image = cv2.resize(image, self.imageresize, cv2.INTER_AREA) #image = 256 * resize(image, (256, 256), anti_aliasing = True) if len(image.shape) == 3: image = image.transpose(2, 0, 1) if len(mask.shape) == 3: mask = mask.transpose(2, 0, 1) return {'image': image, 'mask': mask} class ToTensor(object): """Convert ndarrays in sample to Tensors.""" def __call__(self, sample, maskresize=None, imageresize=None): image, mask = sample['image'], sample['mask'] if len(mask.shape) == 2: mask = mask.reshape((1,)+mask.shape) if len(image.shape) == 2: image = image.reshape((1,)+image.shape) return {'image': torch.from_numpy(image), 'mask': torch.from_numpy(mask)} class Normalize(object): '''Normalize image''' def __call__(self, sample): image, mask = sample['image'], sample['mask'] return {'image': image.type(torch.FloatTensor)/255, 'mask': mask.type(torch.FloatTensor)/255} def get_dataloader_single_folder(data_dir, imageFolder='Images', maskFolder='Masks', fraction=0.2, batch_size=4): """ Create training and testing dataloaders from a single folder. """ data_transforms = { 'Train': transforms.Compose([Resize((256, 256), (256, 256)), ToTensor(), Normalize()]), 'Test': transforms.Compose([Resize((256,256), (256, 256)), ToTensor(), Normalize()]), } image_datasets = {x: SegDataset(data_dir, imageFolder=imageFolder, maskFolder=maskFolder, seed=100, fraction=fraction, subset=x, transform=data_transforms[x]) for x in ['Train', 'Test']} dataloaders = {x: DataLoader(image_datasets[x], batch_size=batch_size, shuffle=True, num_workers=8) for x in ['Train', 'Test']} return dataloaders
return len(self.image_names)
models.py
import uuid from django.db import models from django.contrib.auth.models import User from django.db.models.signals import post_save class Profile(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) user = models.OneToOneField(User, on_delete=models.CASCADE) profile_photo = models.ImageField(upload_to='profile_pictures/', blank=True, null=True) bio = models.TextField(max_length=200, blank=True) def __str__(self): return str(self.id) class Image(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) image = models.ImageField(upload_to='images/') image_name = models.CharField(max_length=50, blank=True) image_caption = models.TextField(blank=True) poster = models.ForeignKey(User, on_delete=models.CASCADE) def __str__(self): return str(self.id) def save_image(self): self.save() def delete_image(self): self.delete() def update_caption(self, caption): self.update(caption=caption) @classmethod def get_image_by_id(cls, n): Image.objects.get(id=n) def create_user_profile(sender, **kwargs): if kwargs['created']: user_profile = Profile.objects.create(user=kwargs['instance']) post_save.connect(create_user_profile, sender=User) class Comment(models.Model): text = models.TextField()
commenter = models.ForeignKey(User, verbose_name="Comment", on_delete=models.CASCADE)
image = models.ForeignKey(Image, on_delete=models.CASCADE)
mml.rs
extern crate minuit; use minuit::prelude::*; fn
() { let mut demo = App::new("MinUIt Markup Language demonstration"); demo.load_mml_as_root("demo.mml"); demo.run(); }
main
main.rs
#![allow(clippy::integer_arithmetic)] use { crate::{bigtable::*, ledger_path::*}, clap::{ crate_description, crate_name, value_t, value_t_or_exit, values_t_or_exit, App, AppSettings, Arg, ArgMatches, SubCommand, }, dashmap::DashMap, itertools::Itertools, log::*, regex::Regex, serde::Serialize, serde_json::json, solana_clap_utils::{ input_parsers::{cluster_type_of, pubkey_of, pubkeys_of}, input_validators::{ is_parsable, is_pow2, is_pubkey, is_pubkey_or_keypair, is_slot, is_valid_percentage, }, }, solana_core::system_monitor_service::SystemMonitorService, solana_entry::entry::Entry, solana_ledger::{ ancestor_iterator::AncestorIterator, bank_forks_utils, blockstore::{create_new_ledger, Blockstore, PurgeType}, blockstore_db::{ self, AccessType, BlockstoreOptions, BlockstoreRecoveryMode, Database, LedgerColumnOptions, }, blockstore_processor::{BlockstoreProcessorError, ProcessOptions}, shred::Shred, }, solana_measure::measure::Measure, solana_runtime::{ accounts_db::{AccountsDbConfig, FillerAccountsConfig}, accounts_index::{AccountsIndexConfig, IndexLimitMb, ScanConfig}, bank::{Bank, RewardCalculationEvent}, bank_forks::BankForks, cost_model::CostModel, cost_tracker::CostTracker, hardened_unpack::{open_genesis_config, MAX_GENESIS_ARCHIVE_UNPACKED_SIZE}, runtime_config::RuntimeConfig, snapshot_archive_info::SnapshotArchiveInfoGetter, snapshot_config::SnapshotConfig, snapshot_hash::StartingSnapshotHashes, snapshot_utils::{ self, ArchiveFormat, SnapshotVersion, DEFAULT_MAX_FULL_SNAPSHOT_ARCHIVES_TO_RETAIN, DEFAULT_MAX_INCREMENTAL_SNAPSHOT_ARCHIVES_TO_RETAIN, }, }, solana_sdk::{ account::{AccountSharedData, ReadableAccount, WritableAccount}, account_utils::StateMut, clock::{Epoch, Slot}, feature::{self, Feature}, feature_set, genesis_config::{ClusterType, GenesisConfig}, hash::Hash, inflation::Inflation, native_token::{lamports_to_sol, sol_to_lamports, Sol}, pubkey::Pubkey, rent::Rent, shred_version::compute_shred_version, stake::{self, state::StakeState}, system_program, transaction::{MessageHash, SanitizedTransaction, SimpleAddressLoader}, }, solana_stake_program::stake_state::{self, PointValue}, solana_transaction_status::VersionedTransactionWithStatusMeta, solana_vote_program::{ self, vote_state::{self, VoteState}, }, std::{ collections::{BTreeMap, BTreeSet, HashMap, HashSet}, ffi::OsStr, fs::File, io::{self, stdout, BufRead, BufReader, Write}, path::{Path, PathBuf}, process::{exit, Command, Stdio}, str::FromStr, sync::{ atomic::{AtomicBool, Ordering}, Arc, RwLock, }, }, }; mod bigtable; mod ledger_path; #[derive(PartialEq)] enum LedgerOutputMethod { Print, Json, } fn output_slot_rewards(blockstore: &Blockstore, slot: Slot, method: &LedgerOutputMethod) { // Note: rewards are not output in JSON yet if *method == LedgerOutputMethod::Print { if let Ok(Some(rewards)) = blockstore.read_rewards(slot) { if !rewards.is_empty() { println!(" Rewards:"); println!( " {:<44} {:^15} {:<15} {:<20} {:>10}", "Address", "Type", "Amount", "New Balance", "Commission", ); for reward in rewards { let sign = if reward.lamports < 0 { "-" } else { "" }; println!( " {:<44} {:^15} {}◎{:<14.9} ◎{:<18.9} {}", reward.pubkey, if let Some(reward_type) = reward.reward_type { format!("{}", reward_type) } else { "-".to_string() }, sign, lamports_to_sol(reward.lamports.unsigned_abs()), lamports_to_sol(reward.post_balance), reward .commission .map(|commission| format!("{:>9}%", commission)) .unwrap_or_else(|| " -".to_string()) ); } } } } } fn output_entry( blockstore: &Blockstore, method: &LedgerOutputMethod, slot: Slot, entry_index: usize, entry: Entry, ) { match method { LedgerOutputMethod::Print => { println!( " Entry {} - num_hashes: {}, hash: {}, transactions: {}", entry_index, entry.num_hashes, entry.hash, entry.transactions.len() ); for (transactions_index, transaction) in entry.transactions.into_iter().enumerate() { println!(" Transaction {}", transactions_index); let tx_signature = transaction.signatures[0]; let tx_with_meta = blockstore .read_transaction_status((tx_signature, slot)) .unwrap_or_else(|err| { eprintln!( "Failed to read transaction status for {} at slot {}: {}", transaction.signatures[0], slot, err ); None }) .map(|meta| VersionedTransactionWithStatusMeta { transaction, meta }); if let Some(tx_with_meta) = tx_with_meta { let status = tx_with_meta.meta.into(); solana_cli_output::display::println_transaction( &tx_with_meta.transaction, Some(&status), " ", None, None, ); } } } LedgerOutputMethod::Json => { // Note: transaction status is not output in JSON yet serde_json::to_writer(stdout(), &entry).expect("serialize entry"); stdout().write_all(b",\n").expect("newline"); } } } fn output_slot( blockstore: &Blockstore, slot: Slot, allow_dead_slots: bool, method: &LedgerOutputMethod, verbose_level: u64, ) -> Result<(), String> { if blockstore.is_dead(slot) { if allow_dead_slots { if *method == LedgerOutputMethod::Print { println!(" Slot is dead"); } } else { return Err("Dead slot".to_string()); } } let (entries, num_shreds, is_full) = blockstore .get_slot_entries_with_shred_info(slot, 0, allow_dead_slots) .map_err(|err| format!("Failed to load entries for slot {}: {:?}", slot, err))?; if *method == LedgerOutputMethod::Print { if let Ok(Some(meta)) = blockstore.meta(slot) { if verbose_level >= 2 { println!(" Slot Meta {:?} is_full: {}", meta, is_full); } else { println!( " num_shreds: {}, parent_slot: {:?}, num_entries: {}, is_full: {}", num_shreds, meta.parent_slot, entries.len(), is_full, ); } } } if verbose_level >= 2 { for (entry_index, entry) in entries.into_iter().enumerate() { output_entry(blockstore, method, slot, entry_index, entry); } output_slot_rewards(blockstore, slot, method); } else if verbose_level >= 1 { let mut transactions = 0; let mut num_hashes = 0; let mut program_ids = HashMap::new(); let blockhash = if let Some(entry) = entries.last() { entry.hash } else { Hash::default() }; for entry in entries { transactions += entry.transactions.len(); num_hashes += entry.num_hashes; for transaction in entry.transactions { let tx_signature = transaction.signatures[0]; let sanitize_result = SanitizedTransaction::try_create( transaction, MessageHash::Compute, None, SimpleAddressLoader::Disabled, ); match sanitize_result { Ok(transaction) => { for (program_id, _) in transaction.message().program_instructions_iter() { *program_ids.entry(*program_id).or_insert(0) += 1; } } Err(err) => { warn!( "Failed to analyze unsupported transaction {}: {:?}", tx_signature, err ); } } } } println!( " Transactions: {}, hashes: {}, block_hash: {}", transactions, num_hashes, blockhash, ); println!(" Programs: {:?}", program_ids); } Ok(()) } fn output_ledger( blockstore: Blockstore, starting_slot: Slot, ending_slot: Slot, allow_dead_slots: bool, method: LedgerOutputMethod, num_slots: Option<Slot>, verbose_level: u64, only_rooted: bool, ) { let slot_iterator = blockstore .slot_meta_iterator(starting_slot) .unwrap_or_else(|err| { eprintln!( "Failed to load entries starting from slot {}: {:?}", starting_slot, err ); exit(1); }); if method == LedgerOutputMethod::Json { stdout().write_all(b"{\"ledger\":[\n").expect("open array"); } let num_slots = num_slots.unwrap_or(Slot::MAX); let mut num_printed = 0; for (slot, slot_meta) in slot_iterator { if only_rooted && !blockstore.is_root(slot) { continue; } if slot > ending_slot { break; } match method { LedgerOutputMethod::Print => { println!("Slot {} root?: {}", slot, blockstore.is_root(slot)) } LedgerOutputMethod::Json => { serde_json::to_writer(stdout(), &slot_meta).expect("serialize slot_meta"); stdout().write_all(b",\n").expect("newline"); } } if let Err(err) = output_slot(&blockstore, slot, allow_dead_slots, &method, verbose_level) { eprintln!("{}", err); } num_printed += 1; if num_printed >= num_slots as usize { break; } } if method == LedgerOutputMethod::Json { stdout().write_all(b"\n]}\n").expect("close array"); } } fn output_account( pubkey: &Pubkey, account: &AccountSharedData, modified_slot: Option<Slot>, print_account_data: bool, ) { println!("{}", pubkey); println!(" balance: {} SOL", lamports_to_sol(account.lamports())); println!(" owner: '{}'", account.owner()); println!(" executable: {}", account.executable()); if let Some(slot) = modified_slot { println!(" slot: {}", slot); } println!(" rent_epoch: {}", account.rent_epoch()); println!(" data_len: {}", account.data().len()); if print_account_data { println!(" data: '{}'", bs58::encode(account.data()).into_string()); } } fn render_dot(dot: String, output_file: &str, output_format: &str) -> io::Result<()> { let mut child = Command::new("dot") .arg(format!("-T{}", output_format)) .arg(format!("-o{}", output_file)) .stdin(Stdio::piped()) .spawn() .map_err(|err| { eprintln!("Failed to spawn dot: {:?}", err); err })?; let stdin = child.stdin.as_mut().unwrap(); stdin.write_all(&dot.into_bytes())?; let status = child.wait_with_output()?.status; if !status.success() { return Err(io::Error::new( io::ErrorKind::Other, format!("dot failed with error {}", status.code().unwrap_or(-1)), )); } Ok(()) } #[allow(clippy::cognitive_complexity)] fn graph_forks(bank_forks: &BankForks, include_all_votes: bool) -> String { let frozen_banks = bank_forks.frozen_banks(); let mut fork_slots: HashSet<_> = frozen_banks.keys().cloned().collect(); for (_, bank) in frozen_banks { for parent in bank.parents() { fork_slots.remove(&parent.slot()); } } // Search all forks and collect the last vote made by each validator let mut last_votes = HashMap::new(); let default_vote_state = VoteState::default(); for fork_slot in &fork_slots { let bank = &bank_forks[*fork_slot]; let total_stake = bank .vote_accounts() .iter() .map(|(_, (stake, _))| stake) .sum(); for (stake, vote_account) in bank.vote_accounts().values() { let vote_state = vote_account.vote_state(); let vote_state = vote_state.as_ref().unwrap_or(&default_vote_state); if let Some(last_vote) = vote_state.votes.iter().last() { let entry = last_votes.entry(vote_state.node_pubkey).or_insert(( last_vote.slot, vote_state.clone(), *stake, total_stake, )); if entry.0 < last_vote.slot { *entry = (last_vote.slot, vote_state.clone(), *stake, total_stake); } } } } // Figure the stake distribution at all the nodes containing the last vote from each // validator let mut slot_stake_and_vote_count = HashMap::new(); for (last_vote_slot, _, stake, total_stake) in last_votes.values() { let entry = slot_stake_and_vote_count .entry(last_vote_slot) .or_insert((0, 0, *total_stake)); entry.0 += 1; entry.1 += stake; assert_eq!(entry.2, *total_stake) } let mut dot = vec!["digraph {".to_string()]; // Build a subgraph consisting of all banks and links to their parent banks dot.push(" subgraph cluster_banks {".to_string()); dot.push(" style=invis".to_string()); let mut styled_slots = HashSet::new(); let mut all_votes: HashMap<Pubkey, HashMap<Slot, VoteState>> = HashMap::new(); for fork_slot in &fork_slots { let mut bank = bank_forks[*fork_slot].clone(); let mut first = true; loop { for (_, vote_account) in bank.vote_accounts().values() { let vote_state = vote_account.vote_state(); let vote_state = vote_state.as_ref().unwrap_or(&default_vote_state); if let Some(last_vote) = vote_state.votes.iter().last() { let validator_votes = all_votes.entry(vote_state.node_pubkey).or_default(); validator_votes .entry(last_vote.slot) .or_insert_with(|| vote_state.clone()); } } if !styled_slots.contains(&bank.slot()) { dot.push(format!( r#" "{}"[label="{} (epoch {})\nleader: {}{}{}",style="{}{}"];"#, bank.slot(), bank.slot(), bank.epoch(), bank.collector_id(), if let Some(parent) = bank.parent() { format!( "\ntransactions: {}", bank.transaction_count() - parent.transaction_count(), ) } else { "".to_string() }, if let Some((votes, stake, total_stake)) = slot_stake_and_vote_count.get(&bank.slot()) { format!( "\nvotes: {}, stake: {:.1} SOL ({:.1}%)", votes, lamports_to_sol(*stake), *stake as f64 / *total_stake as f64 * 100., ) } else { "".to_string() }, if first { "filled," } else { "" }, "" )); styled_slots.insert(bank.slot()); } first = false; match bank.parent() { None => { if bank.slot() > 0 { dot.push(format!(r#" "{}" -> "..." [dir=back]"#, bank.slot(),)); } break; } Some(parent) => { let slot_distance = bank.slot() - parent.slot(); let penwidth = if bank.epoch() > parent.epoch() { "5" } else { "1" }; let link_label = if slot_distance > 1 { format!("label=\"{} slots\",color=red", slot_distance) } else { "color=blue".to_string() }; dot.push(format!( r#" "{}" -> "{}"[{},dir=back,penwidth={}];"#, bank.slot(), parent.slot(), link_label, penwidth )); bank = parent.clone(); } } } } dot.push(" }".to_string()); // Strafe the banks with links from validators to the bank they last voted on, // while collecting information about the absent votes and stakes let mut absent_stake = 0; let mut absent_votes = 0; let mut lowest_last_vote_slot = std::u64::MAX; let mut lowest_total_stake = 0; for (node_pubkey, (last_vote_slot, vote_state, stake, total_stake)) in &last_votes { all_votes.entry(*node_pubkey).and_modify(|validator_votes| { validator_votes.remove(last_vote_slot); }); dot.push(format!( r#" "last vote {}"[shape=box,label="Latest validator vote: {}\nstake: {} SOL\nroot slot: {}\nvote history:\n{}"];"#, node_pubkey, node_pubkey, lamports_to_sol(*stake), vote_state.root_slot.unwrap_or(0), vote_state .votes .iter() .map(|vote| format!("slot {} (conf={})", vote.slot, vote.confirmation_count)) .collect::<Vec<_>>() .join("\n") )); dot.push(format!( r#" "last vote {}" -> "{}" [style=dashed,label="latest vote"];"#, node_pubkey, if styled_slots.contains(last_vote_slot) { last_vote_slot.to_string() } else { if *last_vote_slot < lowest_last_vote_slot { lowest_last_vote_slot = *last_vote_slot; lowest_total_stake = *total_stake; } absent_votes += 1; absent_stake += stake; "...".to_string() }, )); } // Annotate the final "..." node with absent vote and stake information if absent_votes > 0 { dot.push(format!( r#" "..."[label="...\nvotes: {}, stake: {:.1} SOL {:.1}%"];"#, absent_votes, lamports_to_sol(absent_stake), absent_stake as f64 / lowest_total_stake as f64 * 100., )); } // Add for vote information from all banks. if include_all_votes { for (node_pubkey, validator_votes) in &all_votes { for (vote_slot, vote_state) in validator_votes { dot.push(format!( r#" "{} vote {}"[shape=box,style=dotted,label="validator vote: {}\nroot slot: {}\nvote history:\n{}"];"#, node_pubkey, vote_slot, node_pubkey, vote_state.root_slot.unwrap_or(0), vote_state .votes .iter() .map(|vote| format!("slot {} (conf={})", vote.slot, vote.confirmation_count)) .collect::<Vec<_>>() .join("\n") )); dot.push(format!( r#" "{} vote {}" -> "{}" [style=dotted,label="vote"];"#, node_pubkey, vote_slot, if styled_slots.contains(vote_slot) { vote_slot.to_string() } else { "...".to_string() }, )); } } } dot.push("}".to_string()); dot.join("\n") } fn analyze_column< C: solana_ledger::blockstore_db::Column + solana_ledger::blockstore_db::ColumnName, >( db: &Database, name: &str, ) { let mut key_tot: u64 = 0; let mut val_hist = histogram::Histogram::new(); let mut val_tot: u64 = 0; let mut row_hist = histogram::Histogram::new(); let a = C::key_size() as u64; for (_x, y) in db.iter::<C>(blockstore_db::IteratorMode::Start).unwrap() { let b = y.len() as u64; key_tot += a; val_hist.increment(b).unwrap(); val_tot += b; row_hist.increment(a + b).unwrap(); } let json_result = if val_hist.entries() > 0 { json!({ "column":name, "entries":val_hist.entries(), "key_stats":{ "max":a, "total_bytes":key_tot, }, "val_stats":{ "p50":val_hist.percentile(50.0).unwrap(), "p90":val_hist.percentile(90.0).unwrap(), "p99":val_hist.percentile(99.0).unwrap(), "p999":val_hist.percentile(99.9).unwrap(), "min":val_hist.minimum().unwrap(), "max":val_hist.maximum().unwrap(), "stddev":val_hist.stddev().unwrap(), "total_bytes":val_tot, }, "row_stats":{ "p50":row_hist.percentile(50.0).unwrap(), "p90":row_hist.percentile(90.0).unwrap(), "p99":row_hist.percentile(99.0).unwrap(), "p999":row_hist.percentile(99.9).unwrap(), "min":row_hist.minimum().unwrap(), "max":row_hist.maximum().unwrap(), "stddev":row_hist.stddev().unwrap(), "total_bytes":key_tot + val_tot, }, }) } else { json!({ "column":name, "entries":val_hist.entries(), "key_stats":{ "max":a, "total_bytes":0, }, "val_stats":{ "total_bytes":0, }, "row_stats":{ "total_bytes":0, }, }) }; println!("{}", serde_json::to_string_pretty(&json_result).unwrap()); } fn analyze_storage(database: &Database) { use blockstore_db::columns::*; analyze_column::<SlotMeta>(database, "SlotMeta"); analyze_column::<Orphans>(database, "Orphans"); analyze_column::<DeadSlots>(database, "DeadSlots"); analyze_column::<DuplicateSlots>(database, "DuplicateSlots"); analyze_column::<ErasureMeta>(database, "ErasureMeta"); analyze_column::<BankHash>(database, "BankHash"); analyze_column::<Root>(database, "Root"); analyze_column::<Index>(database, "Index"); analyze_column::<ShredData>(database, "ShredData"); analyze_column::<ShredCode>(database, "ShredCode"); analyze_column::<TransactionStatus>(database, "TransactionStatus"); analyze_column::<AddressSignatures>(database, "AddressSignatures"); analyze_column::<TransactionMemos>(database, "TransactionMemos"); analyze_column::<TransactionStatusIndex>(database, "TransactionStatusIndex"); analyze_column::<Rewards>(database, "Rewards"); analyze_column::<Blocktime>(database, "Blocktime"); analyze_column::<PerfSamples>(database, "PerfSamples"); analyze_column::<BlockHeight>(database, "BlockHeight"); analyze_column::<ProgramCosts>(database, "ProgramCosts"); } fn open_blockstore( ledger_path: &Path, access_type: AccessType, wal_recovery_mode: Option<BlockstoreRecoveryMode>, ) -> Blockstore { match Blockstore::open_with_options( ledger_path, BlockstoreOptions { access_type, recovery_mode: wal_recovery_mode, enforce_ulimit_nofile: true, ..BlockstoreOptions::default() }, ) { Ok(blockstore) => blockstore, Err(err) => { eprintln!("Failed to open ledger at {:?}: {:?}", ledger_path, err); exit(1); } } } // This function is duplicated in validator/src/main.rs... fn hardforks_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<Slot>> { if matches.is_present(name) { Some(values_t_or_exit!(matches, name, Slot)) } else { None } } fn load_bank_forks( arg_matches: &ArgMatches, genesis_config: &GenesisConfig, blockstore: &Blockstore, process_options: ProcessOptions, snapshot_archive_path: Option<PathBuf>, ) -> Result<(Arc<RwLock<BankForks>>, Option<StartingSnapshotHashes>), BlockstoreProcessorError> { let bank_snapshots_dir = blockstore .ledger_path() .join(if blockstore.is_primary_access() { "snapshot" } else { "snapshot.ledger-tool" }); let snapshot_config = if arg_matches.is_present("no_snapshot") { None } else { let snapshot_archives_dir = snapshot_archive_path.unwrap_or_else(|| blockstore.ledger_path().to_path_buf()); Some(SnapshotConfig { full_snapshot_archive_interval_slots: Slot::MAX, incremental_snapshot_archive_interval_slots: Slot::MAX, snapshot_archives_dir, bank_snapshots_dir, ..SnapshotConfig::default() }) }; let account_paths = if let Some(account_paths) = arg_matches.value_of("account_paths") { if !blockstore.is_primary_access() { // Be defensive, when default account dir is explicitly specified, it's still possible // to wipe the dir possibly shared by the running validator! eprintln!("Error: custom accounts path is not supported under secondary access"); exit(1); } account_paths.split(',').map(PathBuf::from).collect() } else if blockstore.is_primary_access() { vec![blockstore.ledger_path().join("accounts")] } else { let non_primary_accounts_path = blockstore.ledger_path().join("accounts.ledger-tool"); warn!( "Default accounts path is switched aligning with Blockstore's secondary access: {:?}", non_primary_accounts_path ); vec![non_primary_accounts_path] }; bank_forks_utils::load( genesis_config, blockstore, account_paths, None, snapshot_config.as_ref(), process_options, None, None, None, ) .map(|(bank_forks, .., starting_snapshot_hashes)| (bank_forks, starting_snapshot_hashes)) } fn compute_slot_cost(blockstore: &Blockstore, slot: Slot) -> Result<(), String> { if blockstore.is_dead(slot) { return Err("Dead slot".to_string()); } let (entries, _num_shreds, _is_full) = blockstore .get_slot_entries_with_shred_info(slot, 0, false) .map_err(|err| format!(" Slot: {}, Failed to load entries, err {:?}", slot, err))?; let num_entries = entries.len(); let mut num_transactions = 0; let mut num_programs = 0; let mut program_ids = HashMap::new(); let mut cost_model = CostModel::default(); cost_model.initialize_cost_table(&blockstore.read_program_costs().unwrap()); let mut cost_tracker = CostTracker::default(); for entry in entries { num_transactions += entry.transactions.len(); entry .transactions .into_iter() .filter_map(|transaction| { SanitizedTransaction::try_create( transaction, MessageHash::Compute, None, SimpleAddressLoader::Disabled, ) .map_err(|err| { warn!("Failed to compute cost of transaction: {:?}", err); }) .ok() }) .for_each(|transaction| { num_programs += transaction.message().instructions().len(); let tx_cost = cost_model.calculate_cost(&transaction); let result = cost_tracker.try_add(&tx_cost); if result.is_err() { println!( "Slot: {}, CostModel rejected transaction {:?}, reason {:?}", slot, transaction, result, ); } for (program_id, _instruction) in transaction.message().program_instructions_iter() { *program_ids.entry(*program_id).or_insert(0) += 1; } }); } println!( "Slot: {}, Entries: {}, Transactions: {}, Programs {}", slot, num_entries, num_transactions, num_programs, ); println!(" Programs: {:?}", program_ids); Ok(()) } fn open_genesis_config_by(ledger_path: &Path, matches: &ArgMatches<'_>) -> GenesisConfig { let max_genesis_archive_unpacked_size = value_t_or_exit!(matches, "max_genesis_archive_unpacked_size", u64); open_genesis_config(ledger_path, max_genesis_archive_unpacked_size) } fn assert_capitalization(bank: &Bank) { let debug_verify = true; assert!(bank.calculate_and_verify_capitalization(debug_verify)); } #[cfg(not(target_env = "msvc"))] use jemallocator::Jemalloc; #[cfg(not(target_env = "msvc"))] #[global_allocator] static GLOBAL: Jemalloc = Jemalloc; #[allow(clippy::cognitive_complexity)] fn main() { // Ignore SIGUSR1 to prevent long-running calls being killed by logrotate // in warehouse deployments #[cfg(unix)] { // `register()` is unsafe because the action is called in a signal handler // with the usual caveats. So long as this action body stays empty, we'll // be fine unsafe { signal_hook::low_level::register(signal_hook::consts::SIGUSR1, || {}) }.unwrap(); } const DEFAULT_ROOT_COUNT: &str = "1"; const DEFAULT_MAX_SLOTS_ROOT_REPAIR: &str = "2000"; solana_logger::setup_with_default("solana=info"); let starting_slot_arg = Arg::with_name("starting_slot") .long("starting-slot") .value_name("SLOT") .takes_value(true) .default_value("0") .help("Start at this slot"); let ending_slot_arg = Arg::with_name("ending_slot") .long("ending-slot") .value_name("SLOT") .takes_value(true) .help("The last slot to iterate to"); let no_snapshot_arg = Arg::with_name("no_snapshot") .long("no-snapshot") .takes_value(false) .help("Do not start from a local snapshot if present"); let no_bpf_jit_arg = Arg::with_name("no_bpf_jit") .long("no-bpf-jit") .takes_value(false) .help("Disable the just-in-time compiler and instead use the interpreter for BP"); let no_accounts_db_caching_arg = Arg::with_name("no_accounts_db_caching") .long("no-accounts-db-caching") .takes_value(false) .help("Disables accounts-db caching"); let accounts_index_bins = Arg::with_name("accounts_index_bins") .long("accounts-index-bins") .value_name("BINS") .validator(is_pow2) .takes_value(true) .help("Number of bins to divide the accounts index into"); let accounts_index_limit = Arg::with_name("accounts_index_memory_limit_mb") .long("accounts-index-memory-limit-mb") .value_name("MEGABYTES") .validator(is_parsable::<usize>) .takes_value(true) .help("How much memory the accounts index can consume. If this is exceeded, some account index entries will be stored on disk. If missing, the entire index is stored in memory."); let disable_disk_index = Arg::with_name("disable_accounts_disk_index") .long("disable-accounts-disk-index") .help("Disable the disk-based accounts index if it is enabled by default.") .conflicts_with("accounts_index_memory_limit_mb"); let accountsdb_skip_shrink = Arg::with_name("accounts_db_skip_shrink") .long("accounts-db-skip-shrink") .help( "Enables faster starting of ledger-tool by skipping shrink. \ This option is for use during testing.", ); let accounts_filler_count = Arg::with_name("accounts_filler_count") .long("accounts-filler-count") .value_name("COUNT") .validator(is_parsable::<usize>) .takes_value(true) .default_value("0") .help("How many accounts to add to stress the system. Accounts are ignored in operations related to correctness."); let accounts_filler_size = Arg::with_name("accounts_filler_size") .long("accounts-filler-size") .value_name("BYTES") .validator(is_parsable::<usize>) .takes_value(true) .default_value("0") .requires("accounts_filler_count") .help("Size per filler account in bytes."); let account_paths_arg = Arg::with_name("account_paths") .long("accounts") .value_name("PATHS") .takes_value(true) .help("Comma separated persistent accounts location"); let accounts_index_path_arg = Arg::with_name("accounts_index_path") .long("accounts-index-path") .value_name("PATH") .takes_value(true) .multiple(true) .help( "Persistent accounts-index location. \ May be specified multiple times. \ [default: [ledger]/accounts_index]", ); let accounts_db_test_hash_calculation_arg = Arg::with_name("accounts_db_test_hash_calculation") .long("accounts-db-test-hash-calculation") .help("Enable hash calculation test"); let halt_at_slot_arg = Arg::with_name("halt_at_slot") .long("halt-at-slot") .value_name("SLOT") .validator(is_slot) .takes_value(true) .help("Halt processing at the given slot"); let skip_rewrites_arg = Arg::with_name("accounts_db_skip_rewrites") .long("accounts-db-skip-rewrites") .help( "Accounts that are rent exempt and have no changes are not rewritten. \ This produces snapshots that older versions cannot read.", ) .hidden(true); let verify_index_arg = Arg::with_name("verify_accounts_index") .long("verify-accounts-index") .takes_value(false) .help("For debugging and tests on accounts index."); let limit_load_slot_count_from_snapshot_arg = Arg::with_name("limit_load_slot_count_from_snapshot") .long("limit-load-slot-count-from-snapshot") .value_name("SLOT") .validator(is_slot) .takes_value(true) .help("For debugging and profiling with large snapshots, artificially limit how many slots are loaded from a snapshot."); let hard_forks_arg = Arg::with_name("hard_forks") .long("hard-fork") .value_name("SLOT") .validator(is_slot) .multiple(true) .takes_value(true) .help("Add a hard fork at this slot"); let allow_dead_slots_arg = Arg::with_name("allow_dead_slots") .long("allow-dead-slots") .takes_value(false) .help("Output dead slots as well"); let default_genesis_archive_unpacked_size = MAX_GENESIS_ARCHIVE_UNPACKED_SIZE.to_string(); let max_genesis_archive_unpacked_size_arg = Arg::with_name("max_genesis_archive_unpacked_size") .long("max-genesis-archive-unpacked-size") .value_name("NUMBER") .takes_value(true) .default_value(&default_genesis_archive_unpacked_size) .help("maximum total uncompressed size of unpacked genesis archive"); let hashes_per_tick = Arg::with_name("hashes_per_tick") .long("hashes-per-tick") .value_name("NUM_HASHES|\"sleep\"") .takes_value(true) .help( "How many PoH hashes to roll before emitting the next tick. \ If \"sleep\", for development \ sleep for the target tick duration instead of hashing", ); let snapshot_version_arg = Arg::with_name("snapshot_version") .long("snapshot-version") .value_name("SNAPSHOT_VERSION") .validator(is_parsable::<SnapshotVersion>) .takes_value(true) .default_value(SnapshotVersion::default().into()) .help("Output snapshot version"); let default_max_full_snapshot_archives_to_retain = &DEFAULT_MAX_FULL_SNAPSHOT_ARCHIVES_TO_RETAIN.to_string(); let maximum_full_snapshot_archives_to_retain = Arg::with_name( "maximum_full_snapshots_to_retain", ) .long("maximum-full-snapshots-to-retain") .alias("maximum-snapshots-to-retain") .value_name("NUMBER") .takes_value(true) .default_value(default_max_full_snapshot_archives_to_retain) .help( "The maximum number of full snapshot archives to hold on to when purging older snapshots.", ); let default_max_incremental_snapshot_archives_to_retain = &DEFAULT_MAX_INCREMENTAL_SNAPSHOT_ARCHIVES_TO_RETAIN.to_string(); let maximum_incremental_snapshot_archives_to_retain = Arg::with_name( "maximum_incremental_snapshots_to_retain", ) .long("maximum-incremental-snapshots-to-retain") .value_name("NUMBER") .takes_value(true) .default_value(default_max_incremental_snapshot_archives_to_retain) .help("The maximum number of incremental snapshot archives to hold on to when purging older snapshots."); let rent = Rent::default(); let default_bootstrap_validator_lamports = &sol_to_lamports(500.0) .max(VoteState::get_rent_exempt_reserve(&rent)) .to_string(); let default_bootstrap_validator_stake_lamports = &sol_to_lamports(0.5) .max(rent.minimum_balance(StakeState::size_of())) .to_string(); let matches = App::new(crate_name!()) .about(crate_description!()) .version(solana_version::version!()) .setting(AppSettings::InferSubcommands) .setting(AppSettings::SubcommandRequiredElseHelp) .setting(AppSettings::VersionlessSubcommands) .arg( Arg::with_name("ledger_path") .short("l") .long("ledger") .value_name("DIR") .takes_value(true) .global(true) .default_value("ledger") .help("Use DIR as ledger location"), ) .arg( Arg::with_name("wal_recovery_mode") .long("wal-recovery-mode") .value_name("MODE") .takes_value(true) .global(true) .possible_values(&[ "tolerate_corrupted_tail_records", "absolute_consistency", "point_in_time", "skip_any_corrupted_record"]) .help( "Mode to recovery the ledger db write ahead log" ), ) .arg( Arg::with_name("snapshot_archive_path") .long("snapshot-archive-path") .value_name("DIR") .takes_value(true) .global(true) .help("Use DIR for ledger location"), ) .arg( Arg::with_name("output_format") .long("output") .value_name("FORMAT") .global(true) .takes_value(true) .possible_values(&["json", "json-compact"]) .help("Return information in specified output format, \ currently only available for bigtable subcommands"), ) .arg( Arg::with_name("verbose") .short("v") .long("verbose") .global(true) .multiple(true) .takes_value(false) .help("Show additional information where supported"), ) .bigtable_subcommand() .subcommand( SubCommand::with_name("print") .about("Print the ledger") .arg(&starting_slot_arg) .arg(&allow_dead_slots_arg) .arg(&ending_slot_arg) .arg( Arg::with_name("num_slots") .long("num-slots") .value_name("SLOT") .validator(is_slot) .takes_value(true) .help("Number of slots to print"), ) .arg( Arg::with_name("only_rooted") .long("only-rooted") .takes_value(false) .help("Only print root slots"), ) ) .subcommand( SubCommand::with_name("copy") .about("Copy the ledger") .arg(&starting_slot_arg) .arg(&ending_slot_arg) .arg( Arg::with_name("target_db") .long("target-db") .value_name("DIR") .takes_value(true) .help("Target db"), ) ) .subcommand( SubCommand::with_name("slot") .about("Print the contents of one or more slots") .arg( Arg::with_name("slots") .index(1) .value_name("SLOTS") .validator(is_slot) .takes_value(true) .multiple(true) .required(true) .help("Slots to print"), ) .arg(&allow_dead_slots_arg) ) .subcommand( SubCommand::with_name("dead-slots") .arg(&starting_slot_arg) .about("Print all the dead slots in the ledger") ) .subcommand( SubCommand::with_name("duplicate-slots") .arg(&starting_slot_arg) .about("Print all the duplicate slots in the ledger") ) .subcommand( SubCommand::with_name("set-dead-slot") .about("Mark one or more slots dead") .arg( Arg::with_name("slots") .index(1) .value_name("SLOTS") .validator(is_slot) .takes_value(true) .multiple(true) .required(true) .help("Slots to mark dead"), ) ) .subcommand( SubCommand::with_name("remove-dead-slot") .about("Remove the dead flag for a slot") .arg( Arg::with_name("slots") .index(1) .value_name("SLOTS") .validator(is_slot) .takes_value(true) .multiple(true) .required(true) .help("Slots to mark as not dead"), ) ) .subcommand( SubCommand::with_name("genesis") .about("Prints the ledger's genesis config") .arg(&max_genesis_archive_unpacked_size_arg) .arg( Arg::with_name("accounts") .long("accounts") .takes_value(false) .help("Print the ledger's genesis accounts"), ) .arg( Arg::with_name("no_account_data") .long("no-account-data") .takes_value(false) .requires("accounts") .help("Do not print account data when printing account contents."), ) ) .subcommand( SubCommand::with_name("genesis-hash") .about("Prints the ledger's genesis hash") .arg(&max_genesis_archive_unpacked_size_arg) ) .subcommand( SubCommand::with_name("parse_full_frozen") .about("Parses log for information about critical events about \ ancestors of the given `ending_slot`") .arg(&starting_slot_arg) .arg(&ending_slot_arg) .arg( Arg::with_name("log_path") .long("log-path") .value_name("PATH") .takes_value(true) .help("path to log file to parse"), ) ) .subcommand( SubCommand::with_name("modify-genesis") .about("Modifies genesis parameters") .arg(&max_genesis_archive_unpacked_size_arg) .arg(&hashes_per_tick) .arg( Arg::with_name("cluster_type") .long("cluster-type") .possible_values(&ClusterType::STRINGS) .takes_value(true) .help( "Selects the features that will be enabled for the cluster" ), ) .arg( Arg::with_name("output_directory") .index(1) .value_name("DIR") .takes_value(true) .help("Output directory for the modified genesis config"), ) ) .subcommand( SubCommand::with_name("shred-version") .about("Prints the ledger's shred hash") .arg(&hard_forks_arg) .arg(&max_genesis_archive_unpacked_size_arg) ) .subcommand( SubCommand::with_name("shred-meta") .about("Prints raw shred metadata") .arg(&starting_slot_arg) .arg(&ending_slot_arg) ) .subcommand( SubCommand::with_name("bank-hash") .about("Prints the hash of the working bank after reading the ledger") .arg(&max_genesis_archive_unpacked_size_arg) ) .subcommand( SubCommand::with_name("bounds") .about("Print lowest and highest non-empty slots. \ Note that there may be empty slots within the bounds") .arg( Arg::with_name("all") .long("all") .takes_value(false) .required(false) .help("Additionally print all the non-empty slots within the bounds"), ) ).subcommand( SubCommand::with_name("json") .about("Print the ledger in JSON format") .arg(&starting_slot_arg) .arg(&allow_dead_slots_arg) ) .subcommand( SubCommand::with_name("verify") .about("Verify the ledger") .arg(&no_snapshot_arg) .arg(&account_paths_arg) .arg(&accounts_index_path_arg) .arg(&halt_at_slot_arg) .arg(&limit_load_slot_count_from_snapshot_arg) .arg(&accounts_index_bins) .arg(&accounts_index_limit) .arg(&disable_disk_index) .arg(&accountsdb_skip_shrink) .arg(&accounts_filler_count) .arg(&accounts_filler_size) .arg(&verify_index_arg) .arg(&skip_rewrites_arg) .arg(&hard_forks_arg) .arg(&no_accounts_db_caching_arg) .arg(&accounts_db_test_hash_calculation_arg) .arg(&no_bpf_jit_arg) .arg(&allow_dead_slots_arg) .arg(&max_genesis_archive_unpacked_size_arg) .arg( Arg::with_name("skip_poh_verify") .long("skip-poh-verify") .takes_value(false) .help("Skip ledger PoH verification"), ) .arg( Arg::with_name("print_accounts_stats") .long("print-accounts-stats") .takes_value(false) .help("After verifying the ledger, print some information about the account stores"), ) ).subcommand( SubCommand::with_name("graph") .about("Create a Graphviz rendering of the ledger") .arg(&no_snapshot_arg) .arg(&account_paths_arg) .arg(&halt_at_slot_arg) .arg(&hard_forks_arg) .arg(&max_genesis_archive_unpacked_size_arg) .arg( Arg::with_name("include_all_votes") .long("include-all-votes") .help("Include all votes in the graph"), ) .arg( Arg::with_name("graph_filename") .index(1) .value_name("FILENAME") .takes_value(true) .help("Output file"), ) ).subcommand( SubCommand::with_name("create-snapshot") .about("Create a new ledger snapshot") .arg(&no_snapshot_arg) .arg(&account_paths_arg) .arg(&hard_forks_arg) .arg(&max_genesis_archive_unpacked_size_arg) .arg(&snapshot_version_arg) .arg(&maximum_full_snapshot_archives_to_retain) .arg(&maximum_incremental_snapshot_archives_to_retain) .arg( Arg::with_name("snapshot_slot") .index(1) .value_name("SLOT") .validator(|value| { if value.parse::<Slot>().is_ok() || value == "ROOT" { Ok(()) } else { Err(format!( "Unable to parse as a number or the keyword ROOT, provided: {}", value )) } }) .takes_value(true) .help("Slot at which to create the snapshot; accepts keyword ROOT for the highest root"), ) .arg( Arg::with_name("output_directory") .index(2) .value_name("DIR") .takes_value(true) .help("Output directory for the snapshot [default: --ledger directory]"), ) .arg( Arg::with_name("warp_slot") .required(false) .long("warp-slot") .takes_value(true) .value_name("WARP_SLOT") .validator(is_slot) .help("After loading the snapshot slot warp the ledger to WARP_SLOT, \ which could be a slot in a galaxy far far away"), ) .arg( Arg::with_name("faucet_lamports") .short("t") .long("faucet-lamports") .value_name("LAMPORTS") .takes_value(true) .requires("faucet_pubkey") .help("Number of lamports to assign to the faucet"), ) .arg( Arg::with_name("faucet_pubkey") .short("m") .long("faucet-pubkey") .value_name("PUBKEY") .takes_value(true) .validator(is_pubkey_or_keypair) .requires("faucet_lamports") .help("Path to file containing the faucet's pubkey"), ) .arg( Arg::with_name("bootstrap_validator") .short("b") .long("bootstrap-validator") .value_name("IDENTITY_PUBKEY VOTE_PUBKEY STAKE_PUBKEY") .takes_value(true) .validator(is_pubkey_or_keypair) .number_of_values(3) .multiple(true) .help("The bootstrap validator's identity, vote and stake pubkeys"), ) .arg( Arg::with_name("bootstrap_stake_authorized_pubkey") .long("bootstrap-stake-authorized-pubkey") .value_name("BOOTSTRAP STAKE AUTHORIZED PUBKEY") .takes_value(true) .validator(is_pubkey_or_keypair) .help( "Path to file containing the pubkey authorized to manage the bootstrap \ validator's stake [default: --bootstrap-validator IDENTITY_PUBKEY]", ), ) .arg( Arg::with_name("bootstrap_validator_lamports") .long("bootstrap-validator-lamports") .value_name("LAMPORTS") .takes_value(true) .default_value(default_bootstrap_validator_lamports) .help("Number of lamports to assign to the bootstrap validator"), ) .arg( Arg::with_name("bootstrap_validator_stake_lamports") .long("bootstrap-validator-stake-lamports") .value_name("LAMPORTS") .takes_value(true) .default_value(default_bootstrap_validator_stake_lamports) .help("Number of lamports to assign to the bootstrap validator's stake account"), ) .arg( Arg::with_name("rent_burn_percentage") .long("rent-burn-percentage") .value_name("NUMBER") .takes_value(true) .help("Adjust percentage of collected rent to burn") .validator(is_valid_percentage), ) .arg(&hashes_per_tick) .arg( Arg::with_name("accounts_to_remove") .required(false) .long("remove-account") .takes_value(true) .value_name("PUBKEY") .validator(is_pubkey) .multiple(true) .help("List of accounts to remove while creating the snapshot"), ) .arg( Arg::with_name("vote_accounts_to_destake") .required(false) .long("destake-vote-account") .takes_value(true) .value_name("PUBKEY") .validator(is_pubkey) .multiple(true) .help("List of validator vote accounts to destake") ) .arg( Arg::with_name("remove_stake_accounts") .required(false) .long("remove-stake-accounts") .takes_value(false) .help("Remove all existing stake accounts from the new snapshot") ) .arg( Arg::with_name("incremental") .long("incremental") .takes_value(false) .help("Create an incremental snapshot instead of a full snapshot. This requires \ that the ledger is loaded from a full snapshot, which will be used as the \ base for the incremental snapshot.") .conflicts_with("no_snapshot") ) ).subcommand( SubCommand::with_name("accounts") .about("Print account stats and contents after processing the ledger") .arg(&no_snapshot_arg) .arg(&account_paths_arg) .arg(&halt_at_slot_arg) .arg(&hard_forks_arg) .arg( Arg::with_name("include_sysvars") .long("include-sysvars") .takes_value(false) .help("Include sysvars too"), ) .arg( Arg::with_name("no_account_contents") .long("no-account-contents") .takes_value(false) .help("Do not print contents of each account, which is very slow with lots of accounts."), ) .arg(Arg::with_name("no_account_data") .long("no-account-data") .takes_value(false) .help("Do not print account data when printing account contents."), ) .arg(&max_genesis_archive_unpacked_size_arg) ).subcommand( SubCommand::with_name("capitalization") .about("Print capitalization (aka, total supply) while checksumming it") .arg(&no_snapshot_arg) .arg(&account_paths_arg) .arg(&halt_at_slot_arg) .arg(&hard_forks_arg) .arg(&max_genesis_archive_unpacked_size_arg) .arg( Arg::with_name("warp_epoch") .required(false) .long("warp-epoch") .takes_value(true) .value_name("WARP_EPOCH") .help("After loading the snapshot warp the ledger to WARP_EPOCH, \ which could be an epoch in a galaxy far far away"), ) .arg( Arg::with_name("inflation") .required(false) .long("inflation") .takes_value(true) .possible_values(&["pico", "full", "none"]) .help("Overwrite inflation when warping"), ) .arg( Arg::with_name("enable_credits_auto_rewind") .required(false) .long("enable-credits-auto-rewind") .takes_value(false) .help("Enable credits auto rewind"), ) .arg( Arg::with_name("recalculate_capitalization") .required(false) .long("recalculate-capitalization") .takes_value(false) .help("Recalculate capitalization before warping; circumvents \ bank's out-of-sync capitalization"), ) .arg( Arg::with_name("csv_filename") .long("csv-filename") .value_name("FILENAME") .takes_value(true) .help("Output file in the csv format"), ) ).subcommand( SubCommand::with_name("purge") .about("Delete a range of slots from the ledger") .arg( Arg::with_name("start_slot") .index(1) .value_name("SLOT") .takes_value(true) .required(true) .help("Start slot to purge from (inclusive)"), ) .arg( Arg::with_name("end_slot") .index(2) .value_name("SLOT") .help("Ending slot to stop purging (inclusive) \ [default: the highest slot in the ledger]"), ) .arg( Arg::with_name("batch_size") .long("batch-size") .value_name("NUM") .takes_value(true) .default_value("1000") .help("Removes at most BATCH_SIZE slots while purging in loop"), ) .arg( Arg::with_name("no_compaction") .long("no-compaction") .required(false) .takes_value(false) .help("Skip ledger compaction after purge") ) .arg( Arg::with_name("dead_slots_only") .long("dead-slots-only") .required(false) .takes_value(false) .help("Limit purging to dead slots only") ) ) .subcommand( SubCommand::with_name("list-roots") .about("Output up to last <num-roots> root hashes and their \ heights starting at the given block height") .arg( Arg::with_name("max_height") .long("max-height") .value_name("NUM") .takes_value(true) .help("Maximum block height") ) .arg( Arg::with_name("start_root") .long("start-root") .value_name("NUM") .takes_value(true) .help("First root to start searching from") ) .arg( Arg::with_name("slot_list") .long("slot-list") .value_name("FILENAME") .required(false) .takes_value(true) .help("The location of the output YAML file. A list of \ rollback slot heights and hashes will be written to the file") ) .arg( Arg::with_name("num_roots") .long("num-roots") .value_name("NUM") .takes_value(true) .default_value(DEFAULT_ROOT_COUNT) .required(false) .help("Number of roots in the output"), ) ) .subcommand( SubCommand::with_name("repair-roots") .about("Traverses the AncestorIterator backward from a last known root \ to restore missing roots to the Root column") .arg( Arg::with_name("start_root") .long("before") .value_name("NUM") .takes_value(true) .help("First good root after the range to repair") ) .arg( Arg::with_name("end_root") .long("until") .value_name("NUM") .takes_value(true) .help("Last slot to check for root repair") ) .arg( Arg::with_name("max_slots") .long("repair-limit") .value_name("NUM") .takes_value(true) .default_value(DEFAULT_MAX_SLOTS_ROOT_REPAIR) .required(true) .help("Override the maximum number of slots to check for root repair") ) ) .subcommand( SubCommand::with_name("analyze-storage") .about("Output statistics in JSON format about \ all column families in the ledger rocksdb") ) .subcommand( SubCommand::with_name("compute-slot-cost") .about("runs cost_model over the block at the given slots, \ computes how expensive a block was based on cost_model") .arg( Arg::with_name("slots") .index(1) .value_name("SLOTS") .validator(is_slot) .multiple(true) .takes_value(true) .help("Slots that their blocks are computed for cost, default to all slots in ledger"), ) ) .get_matches(); info!("{} {}", crate_name!(), solana_version::version!()); let ledger_path = parse_ledger_path(&matches, "ledger_path"); let snapshot_archive_path = value_t!(matches, "snapshot_archive_path", String) .ok() .map(PathBuf::from); let wal_recovery_mode = matches .value_of("wal_recovery_mode") .map(BlockstoreRecoveryMode::from); let verbose_level = matches.occurrences_of("verbose"); if let ("bigtable", Some(arg_matches)) = matches.subcommand() { bigtable_process_command(&ledger_path, arg_matches) } else { let ledger_path = canonicalize_ledger_path(&ledger_path); match matches.subcommand() { ("print", Some(arg_matches)) => { let starting_slot = value_t_or_exit!(arg_matches, "starting_slot", Slot); let ending_slot = value_t!(arg_matches, "ending_slot", Slot).unwrap_or(Slot::MAX); let num_slots = value_t!(arg_matches, "num_slots", Slot).ok(); let allow_dead_slots = arg_matches.is_present("allow_dead_slots"); let only_rooted = arg_matches.is_present("only_rooted"); output_ledger( open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ), starting_slot, ending_slot, allow_dead_slots, LedgerOutputMethod::Print, num_slots, verbose_level, only_rooted, ); } ("copy", Some(arg_matches)) => { let starting_slot = value_t_or_exit!(arg_matches, "starting_slot", Slot); let ending_slot = value_t_or_exit!(arg_matches, "ending_slot", Slot); let target_db = PathBuf::from(value_t_or_exit!(arg_matches, "target_db", String)); let source = open_blockstore(&ledger_path, AccessType::TryPrimaryThenSecondary, None); let target = open_blockstore(&target_db, AccessType::PrimaryOnly, None); for (slot, _meta) in source.slot_meta_iterator(starting_slot).unwrap() { if slot > ending_slot { break; } if let Ok(shreds) = source.get_data_shreds_for_slot(slot, 0) { if target.insert_shreds(shreds, None, true).is_err() { warn!("error inserting shreds for slot {}", slot); } } } } ("genesis", Some(arg_matches)) => { let genesis_config = open_genesis_config_by(&ledger_path, arg_matches); let print_accouunts = arg_matches.is_present("accounts"); if print_accouunts { let print_account_data = !arg_matches.is_present("no_account_data"); for (pubkey, account) in genesis_config.accounts { output_account( &pubkey, &AccountSharedData::from(account), None, print_account_data, ); } } else { println!("{}", genesis_config); } } ("genesis-hash", Some(arg_matches)) => { println!( "{}", open_genesis_config_by(&ledger_path, arg_matches).hash() ); } ("modify-genesis", Some(arg_matches)) => { let mut genesis_config = open_genesis_config_by(&ledger_path, arg_matches); let output_directory = PathBuf::from(arg_matches.value_of("output_directory").unwrap()); if let Some(cluster_type) = cluster_type_of(arg_matches, "cluster_type") { genesis_config.cluster_type = cluster_type; } if let Some(hashes_per_tick) = arg_matches.value_of("hashes_per_tick") { genesis_config.poh_config.hashes_per_tick = match hashes_per_tick { // Note: Unlike `solana-genesis`, "auto" is not supported here. "sleep" => None, _ => Some(value_t_or_exit!(arg_matches, "hashes_per_tick", u64)), } } create_new_ledger( &output_directory, &genesis_config, solana_runtime::hardened_unpack::MAX_GENESIS_ARCHIVE_UNPACKED_SIZE, LedgerColumnOptions::default(), ) .unwrap_or_else(|err| { eprintln!("Failed to write genesis config: {:?}", err); exit(1); }); println!("{}", open_genesis_config_by(&output_directory, arg_matches)); } ("shred-version", Some(arg_matches)) => { let process_options = ProcessOptions { new_hard_forks: hardforks_of(arg_matches, "hard_forks"), halt_at_slot: Some(0), poh_verify: false, ..ProcessOptions::default() }; let genesis_config = open_genesis_config_by(&ledger_path, arg_matches); let blockstore = open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ); match load_bank_forks( arg_matches, &genesis_config, &blockstore, process_options, snapshot_archive_path, ) { Ok((bank_forks, ..)) => { println!( "{}", compute_shred_version( &genesis_config.hash(), Some( &bank_forks .read() .unwrap() .working_bank() .hard_forks() .read() .unwrap() ) ) ); } Err(err) => { eprintln!("Failed to load ledger: {:?}", err); exit(1); } } } ("shred-meta", Some(arg_matches)) => { #[derive(Debug)] #[allow(dead_code)] struct ShredMeta<'a> { slot: Slot, full_slot: bool, shred_index: usize, data: bool, code: bool, last_in_slot: bool, data_complete: bool, shred: &'a Shred, } let starting_slot = value_t_or_exit!(arg_matches, "starting_slot", Slot); let ending_slot = value_t!(arg_matches, "ending_slot", Slot).unwrap_or(Slot::MAX); let ledger = open_blockstore(&ledger_path, AccessType::TryPrimaryThenSecondary, None); for (slot, _meta) in ledger .slot_meta_iterator(starting_slot) .unwrap() .take_while(|(slot, _)| *slot <= ending_slot) { let full_slot = ledger.is_full(slot); if let Ok(shreds) = ledger.get_data_shreds_for_slot(slot, 0) { for (shred_index, shred) in shreds.iter().enumerate() { println!( "{:#?}", ShredMeta { slot, full_slot, shred_index, data: shred.is_data(), code: shred.is_code(), data_complete: shred.data_complete(), last_in_slot: shred.last_in_slot(), shred, } ); } } } } ("bank-hash", Some(arg_matches)) => { let process_options = ProcessOptions { new_hard_forks: hardforks_of(arg_matches, "hard_forks"), halt_at_slot: Some(0), poh_verify: false, ..ProcessOptions::default() }; let genesis_config = open_genesis_config_by(&ledger_path, arg_matches); let blockstore = open_blockstore( &ledger_path,
); match load_bank_forks( arg_matches, &genesis_config, &blockstore, process_options, snapshot_archive_path, ) { Ok((bank_forks, ..)) => { println!("{}", &bank_forks.read().unwrap().working_bank().hash()); } Err(err) => { eprintln!("Failed to load ledger: {:?}", err); exit(1); } } } ("slot", Some(arg_matches)) => { let slots = values_t_or_exit!(arg_matches, "slots", Slot); let allow_dead_slots = arg_matches.is_present("allow_dead_slots"); let blockstore = open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ); for slot in slots { println!("Slot {}", slot); if let Err(err) = output_slot( &blockstore, slot, allow_dead_slots, &LedgerOutputMethod::Print, verbose_level, ) { eprintln!("{}", err); } } } ("json", Some(arg_matches)) => { let starting_slot = value_t_or_exit!(arg_matches, "starting_slot", Slot); let allow_dead_slots = arg_matches.is_present("allow_dead_slots"); output_ledger( open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ), starting_slot, Slot::MAX, allow_dead_slots, LedgerOutputMethod::Json, None, std::u64::MAX, true, ); } ("dead-slots", Some(arg_matches)) => { let blockstore = open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ); let starting_slot = value_t_or_exit!(arg_matches, "starting_slot", Slot); for slot in blockstore.dead_slots_iterator(starting_slot).unwrap() { println!("{}", slot); } } ("duplicate-slots", Some(arg_matches)) => { let blockstore = open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ); let starting_slot = value_t_or_exit!(arg_matches, "starting_slot", Slot); for slot in blockstore.duplicate_slots_iterator(starting_slot).unwrap() { println!("{}", slot); } } ("set-dead-slot", Some(arg_matches)) => { let slots = values_t_or_exit!(arg_matches, "slots", Slot); let blockstore = open_blockstore(&ledger_path, AccessType::PrimaryOnly, wal_recovery_mode); for slot in slots { match blockstore.set_dead_slot(slot) { Ok(_) => println!("Slot {} dead", slot), Err(err) => eprintln!("Failed to set slot {} dead slot: {:?}", slot, err), } } } ("remove-dead-slot", Some(arg_matches)) => { let slots = values_t_or_exit!(arg_matches, "slots", Slot); let blockstore = open_blockstore(&ledger_path, AccessType::PrimaryOnly, wal_recovery_mode); for slot in slots { match blockstore.remove_dead_slot(slot) { Ok(_) => println!("Slot {} not longer marked dead", slot), Err(err) => { eprintln!("Failed to remove dead flag for slot {}, {:?}", slot, err) } } } } ("parse_full_frozen", Some(arg_matches)) => { let starting_slot = value_t_or_exit!(arg_matches, "starting_slot", Slot); let ending_slot = value_t_or_exit!(arg_matches, "ending_slot", Slot); let blockstore = open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ); let mut ancestors = BTreeSet::new(); assert!( blockstore.meta(ending_slot).unwrap().is_some(), "Ending slot doesn't exist" ); for a in AncestorIterator::new(ending_slot, &blockstore) { ancestors.insert(a); if a <= starting_slot { break; } } println!("ancestors: {:?}", ancestors.iter()); let mut frozen = BTreeMap::new(); let mut full = BTreeMap::new(); let frozen_regex = Regex::new(r"bank frozen: (\d*)").unwrap(); let full_regex = Regex::new(r"slot (\d*) is full").unwrap(); let log_file = PathBuf::from(value_t_or_exit!(arg_matches, "log_path", String)); let f = BufReader::new(File::open(log_file).unwrap()); println!("Reading log file"); for line in f.lines().flatten() { let parse_results = { if let Some(slot_string) = frozen_regex.captures_iter(&line).next() { Some((slot_string, &mut frozen)) } else { full_regex .captures_iter(&line) .next() .map(|slot_string| (slot_string, &mut full)) } }; if let Some((slot_string, map)) = parse_results { let slot = slot_string .get(1) .expect("Only one match group") .as_str() .parse::<u64>() .unwrap(); if ancestors.contains(&slot) && !map.contains_key(&slot) { map.insert(slot, line); } if slot == ending_slot && frozen.contains_key(&slot) && full.contains_key(&slot) { break; } } } for ((slot1, frozen_log), (slot2, full_log)) in frozen.iter().zip(full.iter()) { assert_eq!(slot1, slot2); println!( "Slot: {}\n, full: {}\n, frozen: {}", slot1, full_log, frozen_log ); } } ("verify", Some(arg_matches)) => { let mut accounts_index_config = AccountsIndexConfig::default(); if let Some(bins) = value_t!(arg_matches, "accounts_index_bins", usize).ok() { accounts_index_config.bins = Some(bins); } let exit_signal = Arc::new(AtomicBool::new(false)); let system_monitor_service = SystemMonitorService::new(Arc::clone(&exit_signal), true, false); accounts_index_config.index_limit_mb = if let Some(limit) = value_t!(arg_matches, "accounts_index_memory_limit_mb", usize).ok() { IndexLimitMb::Limit(limit) } else if arg_matches.is_present("disable_accounts_disk_index") { IndexLimitMb::InMemOnly } else { IndexLimitMb::Unspecified }; { let mut accounts_index_paths: Vec<PathBuf> = if arg_matches.is_present("accounts_index_path") { values_t_or_exit!(arg_matches, "accounts_index_path", String) .into_iter() .map(PathBuf::from) .collect() } else { vec![] }; if accounts_index_paths.is_empty() { accounts_index_paths = vec![ledger_path.join("accounts_index")]; } accounts_index_config.drives = Some(accounts_index_paths); } let filler_accounts_config = FillerAccountsConfig { count: value_t_or_exit!(arg_matches, "accounts_filler_count", usize), size: value_t_or_exit!(arg_matches, "accounts_filler_size", usize), }; let accounts_db_config = Some(AccountsDbConfig { index: Some(accounts_index_config), accounts_hash_cache_path: Some(ledger_path.clone()), filler_accounts_config, skip_rewrites: matches.is_present("accounts_db_skip_rewrites"), ..AccountsDbConfig::default() }); let process_options = ProcessOptions { new_hard_forks: hardforks_of(arg_matches, "hard_forks"), poh_verify: !arg_matches.is_present("skip_poh_verify"), halt_at_slot: value_t!(arg_matches, "halt_at_slot", Slot).ok(), accounts_db_caching_enabled: !arg_matches.is_present("no_accounts_db_caching"), limit_load_slot_count_from_snapshot: value_t!( arg_matches, "limit_load_slot_count_from_snapshot", usize ) .ok(), accounts_db_config, verify_index: arg_matches.is_present("verify_accounts_index"), allow_dead_slots: arg_matches.is_present("allow_dead_slots"), accounts_db_test_hash_calculation: arg_matches .is_present("accounts_db_test_hash_calculation"), accounts_db_skip_shrink: arg_matches.is_present("accounts_db_skip_shrink"), runtime_config: RuntimeConfig { bpf_jit: !matches.is_present("no_bpf_jit"), ..RuntimeConfig::default() }, ..ProcessOptions::default() }; let print_accounts_stats = arg_matches.is_present("print_accounts_stats"); println!( "genesis hash: {}", open_genesis_config_by(&ledger_path, arg_matches).hash() ); let blockstore = open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ); let (bank_forks, ..) = load_bank_forks( arg_matches, &open_genesis_config_by(&ledger_path, arg_matches), &blockstore, process_options, snapshot_archive_path, ) .unwrap_or_else(|err| { eprintln!("Ledger verification failed: {:?}", err); exit(1); }); if print_accounts_stats { let working_bank = bank_forks.read().unwrap().working_bank(); working_bank.print_accounts_stats(); } exit_signal.store(true, Ordering::Relaxed); system_monitor_service.join().unwrap(); println!("Ok"); } ("graph", Some(arg_matches)) => { let output_file = value_t_or_exit!(arg_matches, "graph_filename", String); let process_options = ProcessOptions { new_hard_forks: hardforks_of(arg_matches, "hard_forks"), halt_at_slot: value_t!(arg_matches, "halt_at_slot", Slot).ok(), poh_verify: false, ..ProcessOptions::default() }; let blockstore = open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ); match load_bank_forks( arg_matches, &open_genesis_config_by(&ledger_path, arg_matches), &blockstore, process_options, snapshot_archive_path, ) { Ok((bank_forks, ..)) => { let dot = graph_forks( &bank_forks.read().unwrap(), arg_matches.is_present("include_all_votes"), ); let extension = Path::new(&output_file).extension(); let result = if extension == Some(OsStr::new("pdf")) { render_dot(dot, &output_file, "pdf") } else if extension == Some(OsStr::new("png")) { render_dot(dot, &output_file, "png") } else { File::create(&output_file) .and_then(|mut file| file.write_all(&dot.into_bytes())) }; match result { Ok(_) => println!("Wrote {}", output_file), Err(err) => eprintln!("Unable to write {}: {}", output_file, err), } } Err(err) => { eprintln!("Failed to load ledger: {:?}", err); exit(1); } } } ("create-snapshot", Some(arg_matches)) => { let output_directory = value_t!(arg_matches, "output_directory", PathBuf) .unwrap_or_else(|_| ledger_path.clone()); let mut warp_slot = value_t!(arg_matches, "warp_slot", Slot).ok(); let remove_stake_accounts = arg_matches.is_present("remove_stake_accounts"); let new_hard_forks = hardforks_of(arg_matches, "hard_forks"); let faucet_pubkey = pubkey_of(arg_matches, "faucet_pubkey"); let faucet_lamports = value_t!(arg_matches, "faucet_lamports", u64).unwrap_or(0); let rent_burn_percentage = value_t!(arg_matches, "rent_burn_percentage", u8); let hashes_per_tick = arg_matches.value_of("hashes_per_tick"); let bootstrap_stake_authorized_pubkey = pubkey_of(arg_matches, "bootstrap_stake_authorized_pubkey"); let bootstrap_validator_lamports = value_t_or_exit!(arg_matches, "bootstrap_validator_lamports", u64); let bootstrap_validator_stake_lamports = value_t_or_exit!(arg_matches, "bootstrap_validator_stake_lamports", u64); let minimum_stake_lamports = rent.minimum_balance(StakeState::size_of()); if bootstrap_validator_stake_lamports < minimum_stake_lamports { eprintln!( "Error: insufficient --bootstrap-validator-stake-lamports. \ Minimum amount is {}", minimum_stake_lamports ); exit(1); } let bootstrap_validator_pubkeys = pubkeys_of(arg_matches, "bootstrap_validator"); let accounts_to_remove = pubkeys_of(arg_matches, "accounts_to_remove").unwrap_or_default(); let vote_accounts_to_destake: HashSet<_> = pubkeys_of(arg_matches, "vote_accounts_to_destake") .unwrap_or_default() .into_iter() .collect(); let snapshot_version = arg_matches.value_of("snapshot_version").map_or( SnapshotVersion::default(), |s| { s.parse::<SnapshotVersion>().unwrap_or_else(|e| { eprintln!("Error: {}", e); exit(1) }) }, ); let maximum_full_snapshot_archives_to_retain = value_t_or_exit!(arg_matches, "maximum_full_snapshots_to_retain", usize); let maximum_incremental_snapshot_archives_to_retain = value_t_or_exit!( arg_matches, "maximum_incremental_snapshots_to_retain", usize ); let genesis_config = open_genesis_config_by(&ledger_path, arg_matches); let blockstore = open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ); let is_incremental = arg_matches.is_present("incremental"); let snapshot_slot = if Some("ROOT") == arg_matches.value_of("snapshot_slot") { blockstore .rooted_slot_iterator(0) .expect("Failed to get rooted slot iterator") .last() .expect("Failed to get root") } else { value_t_or_exit!(arg_matches, "snapshot_slot", Slot) }; info!( "Creating {}snapshot of slot {} in {}", if is_incremental { "incremental " } else { "" }, snapshot_slot, output_directory.display() ); match load_bank_forks( arg_matches, &genesis_config, &blockstore, ProcessOptions { new_hard_forks, halt_at_slot: Some(snapshot_slot), poh_verify: false, ..ProcessOptions::default() }, snapshot_archive_path, ) { Ok((bank_forks, starting_snapshot_hashes)) => { let mut bank = bank_forks .read() .unwrap() .get(snapshot_slot) .unwrap_or_else(|| { eprintln!("Error: Slot {} is not available", snapshot_slot); exit(1); }) .clone(); let child_bank_required = rent_burn_percentage.is_ok() || hashes_per_tick.is_some() || remove_stake_accounts || !accounts_to_remove.is_empty() || !vote_accounts_to_destake.is_empty() || faucet_pubkey.is_some() || bootstrap_validator_pubkeys.is_some(); if child_bank_required { let mut child_bank = Bank::new_from_parent(&bank, bank.collector_id(), bank.slot() + 1); if let Ok(rent_burn_percentage) = rent_burn_percentage { child_bank.set_rent_burn_percentage(rent_burn_percentage); } if let Some(hashes_per_tick) = hashes_per_tick { child_bank.set_hashes_per_tick(match hashes_per_tick { // Note: Unlike `solana-genesis`, "auto" is not supported here. "sleep" => None, _ => { Some(value_t_or_exit!(arg_matches, "hashes_per_tick", u64)) } }); } bank = Arc::new(child_bank); } if let Some(faucet_pubkey) = faucet_pubkey { bank.store_account( &faucet_pubkey, &AccountSharedData::new(faucet_lamports, 0, &system_program::id()), ); } if remove_stake_accounts { for (address, mut account) in bank .get_program_accounts(&stake::program::id(), &ScanConfig::default()) .unwrap() .into_iter() { account.set_lamports(0); bank.store_account(&address, &account); } } for address in accounts_to_remove { let mut account = bank.get_account(&address).unwrap_or_else(|| { eprintln!( "Error: Account does not exist, unable to remove it: {}", address ); exit(1); }); account.set_lamports(0); bank.store_account(&address, &account); } if !vote_accounts_to_destake.is_empty() { for (address, mut account) in bank .get_program_accounts(&stake::program::id(), &ScanConfig::default()) .unwrap() .into_iter() { if let Ok(StakeState::Stake(meta, stake)) = account.state() { if vote_accounts_to_destake .contains(&stake.delegation.voter_pubkey) { if verbose_level > 0 { warn!( "Undelegating stake account {} from {}", address, stake.delegation.voter_pubkey, ); } account.set_state(&StakeState::Initialized(meta)).unwrap(); bank.store_account(&address, &account); } } } } if let Some(bootstrap_validator_pubkeys) = bootstrap_validator_pubkeys { assert_eq!(bootstrap_validator_pubkeys.len() % 3, 0); // Ensure there are no duplicated pubkeys in the --bootstrap-validator list { let mut v = bootstrap_validator_pubkeys.clone(); v.sort(); v.dedup(); if v.len() != bootstrap_validator_pubkeys.len() { eprintln!( "Error: --bootstrap-validator pubkeys cannot be duplicated" ); exit(1); } } // Delete existing vote accounts for (address, mut account) in bank .get_program_accounts( &solana_vote_program::id(), &ScanConfig::default(), ) .unwrap() .into_iter() { account.set_lamports(0); bank.store_account(&address, &account); } // Add a new identity/vote/stake account for each of the provided bootstrap // validators let mut bootstrap_validator_pubkeys_iter = bootstrap_validator_pubkeys.iter(); loop { let identity_pubkey = match bootstrap_validator_pubkeys_iter.next() { None => break, Some(identity_pubkey) => identity_pubkey, }; let vote_pubkey = bootstrap_validator_pubkeys_iter.next().unwrap(); let stake_pubkey = bootstrap_validator_pubkeys_iter.next().unwrap(); bank.store_account( identity_pubkey, &AccountSharedData::new( bootstrap_validator_lamports, 0, &system_program::id(), ), ); let vote_account = vote_state::create_account_with_authorized( identity_pubkey, identity_pubkey, identity_pubkey, 100, VoteState::get_rent_exempt_reserve(&rent).max(1), ); bank.store_account( stake_pubkey, &stake_state::create_account( bootstrap_stake_authorized_pubkey .as_ref() .unwrap_or(identity_pubkey), vote_pubkey, &vote_account, &rent, bootstrap_validator_stake_lamports, ), ); bank.store_account(vote_pubkey, &vote_account); } // Warp ahead at least two epochs to ensure that the leader schedule will be // updated to reflect the new bootstrap validator(s) let minimum_warp_slot = genesis_config.epoch_schedule.get_first_slot_in_epoch( genesis_config.epoch_schedule.get_epoch(snapshot_slot) + 2, ); if let Some(warp_slot) = warp_slot { if warp_slot < minimum_warp_slot { eprintln!( "Error: --warp-slot too close. Must be >= {}", minimum_warp_slot ); exit(1); } } else { warn!("Warping to slot {}", minimum_warp_slot); warp_slot = Some(minimum_warp_slot); } } if child_bank_required { while !bank.is_complete() { bank.register_tick(&Hash::new_unique()); } } bank.set_capitalization(); let bank = if let Some(warp_slot) = warp_slot { Arc::new(Bank::warp_from_parent( &bank, bank.collector_id(), warp_slot, )) } else { bank }; println!( "Creating a version {} {}snapshot of slot {}", snapshot_version, if is_incremental { "incremental " } else { "" }, bank.slot(), ); if is_incremental { if starting_snapshot_hashes.is_none() { eprintln!("Unable to create incremental snapshot without a base full snapshot"); exit(1); } let full_snapshot_slot = starting_snapshot_hashes.unwrap().full.hash.0; if bank.slot() <= full_snapshot_slot { eprintln!( "Unable to create incremental snapshot: Slot must be greater than full snapshot slot. slot: {}, full snapshot slot: {}", bank.slot(), full_snapshot_slot, ); exit(1); } let incremental_snapshot_archive_info = snapshot_utils::bank_to_incremental_snapshot_archive( ledger_path, &bank, full_snapshot_slot, Some(snapshot_version), output_directory, ArchiveFormat::TarZstd, maximum_full_snapshot_archives_to_retain, maximum_incremental_snapshot_archives_to_retain, ) .unwrap_or_else(|err| { eprintln!("Unable to create incremental snapshot: {}", err); exit(1); }); println!( "Successfully created incremental snapshot for slot {}, hash {}, base slot: {}: {}", bank.slot(), bank.hash(), full_snapshot_slot, incremental_snapshot_archive_info.path().display(), ); } else { let full_snapshot_archive_info = snapshot_utils::bank_to_full_snapshot_archive( ledger_path, &bank, Some(snapshot_version), output_directory, ArchiveFormat::TarZstd, maximum_full_snapshot_archives_to_retain, maximum_incremental_snapshot_archives_to_retain, ) .unwrap_or_else(|err| { eprintln!("Unable to create snapshot: {}", err); exit(1); }); println!( "Successfully created snapshot for slot {}, hash {}: {}", bank.slot(), bank.hash(), full_snapshot_archive_info.path().display(), ); } println!( "Shred version: {}", compute_shred_version( &genesis_config.hash(), Some(&bank.hard_forks().read().unwrap()) ) ); } Err(err) => { eprintln!("Failed to load ledger: {:?}", err); exit(1); } } } ("accounts", Some(arg_matches)) => { let halt_at_slot = value_t!(arg_matches, "halt_at_slot", Slot).ok(); let process_options = ProcessOptions { new_hard_forks: hardforks_of(arg_matches, "hard_forks"), halt_at_slot, poh_verify: false, ..ProcessOptions::default() }; let genesis_config = open_genesis_config_by(&ledger_path, arg_matches); let include_sysvars = arg_matches.is_present("include_sysvars"); let blockstore = open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ); let (bank_forks, ..) = load_bank_forks( arg_matches, &genesis_config, &blockstore, process_options, snapshot_archive_path, ) .unwrap_or_else(|err| { eprintln!("Failed to load ledger: {:?}", err); exit(1); }); let bank = bank_forks.read().unwrap().working_bank(); let mut measure = Measure::start("getting accounts"); let accounts: BTreeMap<_, _> = bank .get_all_accounts_with_modified_slots() .unwrap() .into_iter() .filter(|(pubkey, _account, _slot)| { include_sysvars || !solana_sdk::sysvar::is_sysvar_id(pubkey) }) .map(|(pubkey, account, slot)| (pubkey, (account, slot))) .collect(); measure.stop(); info!("{}", measure); let mut measure = Measure::start("calculating total accounts stats"); let total_accounts_stats = bank.calculate_total_accounts_stats( accounts .iter() .map(|(pubkey, (account, _slot))| (pubkey, account)), ); measure.stop(); info!("{}", measure); let print_account_contents = !arg_matches.is_present("no_account_contents"); if print_account_contents { let print_account_data = !arg_matches.is_present("no_account_data"); let mut measure = Measure::start("printing account contents"); for (pubkey, (account, slot)) in accounts.into_iter() { output_account(&pubkey, &account, Some(slot), print_account_data); } measure.stop(); info!("{}", measure); } println!("{:#?}", total_accounts_stats); } ("capitalization", Some(arg_matches)) => { let halt_at_slot = value_t!(arg_matches, "halt_at_slot", Slot).ok(); let process_options = ProcessOptions { new_hard_forks: hardforks_of(arg_matches, "hard_forks"), halt_at_slot, poh_verify: false, ..ProcessOptions::default() }; let genesis_config = open_genesis_config_by(&ledger_path, arg_matches); let blockstore = open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ); match load_bank_forks( arg_matches, &genesis_config, &blockstore, process_options, snapshot_archive_path, ) { Ok((bank_forks, ..)) => { let bank_forks = bank_forks.read().unwrap(); let slot = bank_forks.working_bank().slot(); let bank = bank_forks.get(slot).unwrap_or_else(|| { eprintln!("Error: Slot {} is not available", slot); exit(1); }); if arg_matches.is_present("recalculate_capitalization") { println!("Recalculating capitalization"); let old_capitalization = bank.set_capitalization(); if old_capitalization == bank.capitalization() { eprintln!( "Capitalization was identical: {}", Sol(old_capitalization) ); } } if arg_matches.is_present("warp_epoch") { let base_bank = bank; let raw_warp_epoch = value_t!(arg_matches, "warp_epoch", String).unwrap(); let warp_epoch = if raw_warp_epoch.starts_with('+') { base_bank.epoch() + value_t!(arg_matches, "warp_epoch", Epoch).unwrap() } else { value_t!(arg_matches, "warp_epoch", Epoch).unwrap() }; if warp_epoch < base_bank.epoch() { eprintln!( "Error: can't warp epoch backwards: {} => {}", base_bank.epoch(), warp_epoch ); exit(1); } if let Ok(raw_inflation) = value_t!(arg_matches, "inflation", String) { let inflation = match raw_inflation.as_str() { "pico" => Inflation::pico(), "full" => Inflation::full(), "none" => Inflation::new_disabled(), _ => unreachable!(), }; println!( "Forcing to: {:?} (was: {:?})", inflation, base_bank.inflation() ); base_bank.set_inflation(inflation); } let next_epoch = base_bank .epoch_schedule() .get_first_slot_in_epoch(warp_epoch); // disable eager rent collection because this creates many unrelated // rent collection account updates base_bank .lazy_rent_collection .store(true, std::sync::atomic::Ordering::Relaxed); let feature_account_balance = std::cmp::max( genesis_config.rent.minimum_balance(Feature::size_of()), 1, ); if arg_matches.is_present("enable_credits_auto_rewind") { base_bank.unfreeze_for_ledger_tool(); let mut force_enabled_count = 0; if base_bank .get_account(&feature_set::credits_auto_rewind::id()) .is_none() { base_bank.store_account( &feature_set::credits_auto_rewind::id(), &feature::create_account( &Feature { activated_at: None }, feature_account_balance, ), ); force_enabled_count += 1; } if force_enabled_count == 0 { warn!( "Already credits_auto_rewind is activated (or scheduled)" ); } let mut store_failed_count = 0; if force_enabled_count >= 1 { if base_bank .get_account(&feature_set::deprecate_rewards_sysvar::id()) .is_some() { // steal some lamports from the pretty old feature not to affect // capitalizaion, which doesn't affect inflation behavior! base_bank.store_account( &feature_set::deprecate_rewards_sysvar::id(), &AccountSharedData::default(), ); force_enabled_count -= 1; } else { store_failed_count += 1; } } assert_eq!(force_enabled_count, store_failed_count); if store_failed_count >= 1 { // we have no choice; maybe locally created blank cluster with // not-Development cluster type. let old_cap = base_bank.set_capitalization(); let new_cap = base_bank.capitalization(); warn!( "Skewing capitalization a bit to enable credits_auto_rewind as \ requested: increasing {} from {} to {}", feature_account_balance, old_cap, new_cap, ); assert_eq!( old_cap + feature_account_balance * store_failed_count, new_cap ); } } #[derive(Default, Debug)] struct PointDetail { epoch: Epoch, points: u128, stake: u128, credits: u128, } #[derive(Default, Debug)] struct CalculationDetail { epochs: usize, voter: Pubkey, voter_owner: Pubkey, current_effective_stake: u64, total_stake: u64, rent_exempt_reserve: u64, points: Vec<PointDetail>, base_rewards: u64, commission: u8, vote_rewards: u64, stake_rewards: u64, activation_epoch: Epoch, deactivation_epoch: Option<Epoch>, point_value: Option<PointValue>, old_credits_observed: Option<u64>, new_credits_observed: Option<u64>, skipped_reasons: String, } use solana_stake_program::stake_state::InflationPointCalculationEvent; let stake_calculation_details: DashMap<Pubkey, CalculationDetail> = DashMap::new(); let last_point_value = Arc::new(RwLock::new(None)); let tracer = |event: &RewardCalculationEvent| { // Currently RewardCalculationEvent enum has only Staking variant // because only staking tracing is supported! #[allow(irrefutable_let_patterns)] if let RewardCalculationEvent::Staking(pubkey, event) = event { let mut detail = stake_calculation_details.entry(**pubkey).or_default(); match event { InflationPointCalculationEvent::CalculatedPoints( epoch, stake, credits, points, ) => { if *points > 0 { detail.epochs += 1; detail.points.push(PointDetail {epoch: *epoch, points: *points, stake: *stake, credits: *credits}); } } InflationPointCalculationEvent::SplitRewards( all, voter, staker, point_value, ) => { detail.base_rewards = *all; detail.vote_rewards = *voter; detail.stake_rewards = *staker; detail.point_value = Some(point_value.clone()); // we have duplicate copies of `PointValue`s for possible // miscalculation; do some minimum sanity check let mut last_point_value = last_point_value.write().unwrap(); if let Some(last_point_value) = last_point_value.as_ref() { assert_eq!(last_point_value, point_value); } else { *last_point_value = Some(point_value.clone()); } } InflationPointCalculationEvent::EffectiveStakeAtRewardedEpoch(stake) => { detail.current_effective_stake = *stake; } InflationPointCalculationEvent::Commission(commission) => { detail.commission = *commission; } InflationPointCalculationEvent::RentExemptReserve(reserve) => { detail.rent_exempt_reserve = *reserve; } InflationPointCalculationEvent::CreditsObserved( old_credits_observed, new_credits_observed, ) => { detail.old_credits_observed = Some(*old_credits_observed); detail.new_credits_observed = *new_credits_observed; } InflationPointCalculationEvent::Delegation( delegation, owner, ) => { detail.voter = delegation.voter_pubkey; detail.voter_owner = *owner; detail.total_stake = delegation.stake; detail.activation_epoch = delegation.activation_epoch; if delegation.deactivation_epoch < Epoch::max_value() { detail.deactivation_epoch = Some(delegation.deactivation_epoch); } } InflationPointCalculationEvent::Skipped(skipped_reason) => { if detail.skipped_reasons.is_empty() { detail.skipped_reasons = format!("{:?}", skipped_reason); } else { detail.skipped_reasons += &format!("/{:?}", skipped_reason); } } } } }; let warped_bank = Bank::new_from_parent_with_tracer( base_bank, base_bank.collector_id(), next_epoch, tracer, ); warped_bank.freeze(); let mut csv_writer = if arg_matches.is_present("csv_filename") { let csv_filename = value_t_or_exit!(arg_matches, "csv_filename", String); let file = File::create(&csv_filename).unwrap(); Some(csv::WriterBuilder::new().from_writer(file)) } else { None }; println!("Slot: {} => {}", base_bank.slot(), warped_bank.slot()); println!("Epoch: {} => {}", base_bank.epoch(), warped_bank.epoch()); assert_capitalization(base_bank); assert_capitalization(&warped_bank); let interest_per_epoch = ((warped_bank.capitalization() as f64) / (base_bank.capitalization() as f64) * 100_f64) - 100_f64; let interest_per_year = interest_per_epoch / warped_bank.epoch_duration_in_years(base_bank.epoch()); println!( "Capitalization: {} => {} (+{} {}%; annualized {}%)", Sol(base_bank.capitalization()), Sol(warped_bank.capitalization()), Sol(warped_bank.capitalization() - base_bank.capitalization()), interest_per_epoch, interest_per_year, ); let mut overall_delta = 0; let modified_accounts = warped_bank.get_all_accounts_modified_since_parent(); let mut rewarded_accounts = modified_accounts .iter() .map(|(pubkey, account)| { ( pubkey, account, base_bank .get_account(pubkey) .map(|a| a.lamports()) .unwrap_or_default(), ) }) .collect::<Vec<_>>(); rewarded_accounts.sort_unstable_by_key( |(pubkey, account, base_lamports)| { ( *account.owner(), *base_lamports, account.lamports() - base_lamports, *pubkey, ) }, ); let mut unchanged_accounts = stake_calculation_details .iter() .map(|entry| *entry.key()) .collect::<HashSet<_>>() .difference( &rewarded_accounts .iter() .map(|(pubkey, ..)| **pubkey) .collect(), ) .map(|pubkey| (*pubkey, warped_bank.get_account(pubkey).unwrap())) .collect::<Vec<_>>(); unchanged_accounts.sort_unstable_by_key(|(pubkey, account)| { (*account.owner(), account.lamports(), *pubkey) }); let unchanged_accounts = unchanged_accounts.into_iter(); let rewarded_accounts = rewarded_accounts .into_iter() .map(|(pubkey, account, ..)| (*pubkey, account.clone())); let all_accounts = unchanged_accounts.chain(rewarded_accounts); for (pubkey, warped_account) in all_accounts { // Don't output sysvars; it's always updated but not related to // inflation. if solana_sdk::sysvar::is_sysvar_id(&pubkey) { continue; } if let Some(base_account) = base_bank.get_account(&pubkey) { let delta = warped_account.lamports() - base_account.lamports(); let detail_ref = stake_calculation_details.get(&pubkey); let detail: Option<&CalculationDetail> = detail_ref.as_ref().map(|detail_ref| detail_ref.value()); println!( "{:<45}({}): {} => {} (+{} {:>4.9}%) {:?}", format!("{}", pubkey), // format! is needed to pad/justify correctly. base_account.owner(), Sol(base_account.lamports()), Sol(warped_account.lamports()), Sol(delta), ((warped_account.lamports() as f64) / (base_account.lamports() as f64) * 100_f64) - 100_f64, detail, ); if let Some(ref mut csv_writer) = csv_writer { #[derive(Serialize)] struct InflationRecord { cluster_type: String, rewarded_epoch: Epoch, account: String, owner: String, old_balance: u64, new_balance: u64, data_size: usize, delegation: String, delegation_owner: String, effective_stake: String, delegated_stake: String, rent_exempt_reserve: String, activation_epoch: String, deactivation_epoch: String, earned_epochs: String, epoch: String, epoch_credits: String, epoch_points: String, epoch_stake: String, old_credits_observed: String, new_credits_observed: String, base_rewards: String, stake_rewards: String, vote_rewards: String, commission: String, cluster_rewards: String, cluster_points: String, old_capitalization: u64, new_capitalization: u64, } fn format_or_na<T: std::fmt::Display>( data: Option<T>, ) -> String { data.map(|data| format!("{}", data)) .unwrap_or_else(|| "N/A".to_owned()) } let mut point_details = detail .map(|d| d.points.iter().map(Some).collect::<Vec<_>>()) .unwrap_or_default(); // ensure to print even if there is no calculation/point detail if point_details.is_empty() { point_details.push(None); } for point_detail in point_details { let record = InflationRecord { cluster_type: format!( "{:?}", base_bank.cluster_type() ), rewarded_epoch: base_bank.epoch(), account: format!("{}", pubkey), owner: format!("{}", base_account.owner()), old_balance: base_account.lamports(), new_balance: warped_account.lamports(), data_size: base_account.data().len(), delegation: format_or_na(detail.map(|d| d.voter)), delegation_owner: format_or_na( detail.map(|d| d.voter_owner), ), effective_stake: format_or_na( detail.map(|d| d.current_effective_stake), ), delegated_stake: format_or_na( detail.map(|d| d.total_stake), ), rent_exempt_reserve: format_or_na( detail.map(|d| d.rent_exempt_reserve), ), activation_epoch: format_or_na(detail.map(|d| { if d.activation_epoch < Epoch::max_value() { d.activation_epoch } else { // bootstraped 0 } })), deactivation_epoch: format_or_na( detail.and_then(|d| d.deactivation_epoch), ), earned_epochs: format_or_na( detail.map(|d| d.epochs), ), epoch: format_or_na(point_detail.map(|d| d.epoch)), epoch_credits: format_or_na( point_detail.map(|d| d.credits), ), epoch_points: format_or_na( point_detail.map(|d| d.points), ), epoch_stake: format_or_na( point_detail.map(|d| d.stake), ), old_credits_observed: format_or_na( detail.and_then(|d| d.old_credits_observed), ), new_credits_observed: format_or_na( detail.and_then(|d| d.new_credits_observed), ), base_rewards: format_or_na( detail.map(|d| d.base_rewards), ), stake_rewards: format_or_na( detail.map(|d| d.stake_rewards), ), vote_rewards: format_or_na( detail.map(|d| d.vote_rewards), ), commission: format_or_na( detail.map(|d| d.commission), ), cluster_rewards: format_or_na( last_point_value .read() .unwrap() .clone() .map(|pv| pv.rewards), ), cluster_points: format_or_na( last_point_value .read() .unwrap() .clone() .map(|pv| pv.points), ), old_capitalization: base_bank.capitalization(), new_capitalization: warped_bank.capitalization(), }; csv_writer.serialize(&record).unwrap(); } } overall_delta += delta; } else { error!("new account!?: {}", pubkey); } } if overall_delta > 0 { println!("Sum of lamports changes: {}", Sol(overall_delta)); } } else { if arg_matches.is_present("recalculate_capitalization") { eprintln!( "Capitalization isn't verified because it's recalculated" ); } if arg_matches.is_present("inflation") { eprintln!( "Forcing inflation isn't meaningful because bank isn't warping" ); } assert_capitalization(bank); println!("Inflation: {:?}", bank.inflation()); println!("RentCollector: {:?}", bank.rent_collector()); println!("Capitalization: {}", Sol(bank.capitalization())); } } Err(err) => { eprintln!("Failed to load ledger: {:?}", err); exit(1); } } } ("purge", Some(arg_matches)) => { let start_slot = value_t_or_exit!(arg_matches, "start_slot", Slot); let end_slot = value_t!(arg_matches, "end_slot", Slot).ok(); let no_compaction = arg_matches.is_present("no_compaction"); let dead_slots_only = arg_matches.is_present("dead_slots_only"); let batch_size = value_t_or_exit!(arg_matches, "batch_size", usize); let access_type = if !no_compaction { AccessType::PrimaryOnly } else { AccessType::PrimaryOnlyForMaintenance }; let blockstore = open_blockstore(&ledger_path, access_type, wal_recovery_mode); let end_slot = match end_slot { Some(end_slot) => end_slot, None => match blockstore.slot_meta_iterator(start_slot) { Ok(metas) => { let slots: Vec<_> = metas.map(|(slot, _)| slot).collect(); if slots.is_empty() { eprintln!("Purge range is empty"); exit(1); } *slots.last().unwrap() } Err(err) => { eprintln!("Unable to read the Ledger: {:?}", err); exit(1); } }, }; if end_slot < start_slot { eprintln!( "end slot {} is less than start slot {}", end_slot, start_slot ); exit(1); } info!( "Purging data from slots {} to {} ({} slots) (skip compaction: {}) (dead slot only: {})", start_slot, end_slot, end_slot - start_slot, no_compaction, dead_slots_only, ); let purge_from_blockstore = |start_slot, end_slot| { blockstore.purge_from_next_slots(start_slot, end_slot); if no_compaction { blockstore.purge_slots(start_slot, end_slot, PurgeType::Exact); } else { blockstore.purge_and_compact_slots(start_slot, end_slot); } }; if !dead_slots_only { let slots_iter = &(start_slot..=end_slot).chunks(batch_size); for slots in slots_iter { let slots = slots.collect::<Vec<_>>(); assert!(!slots.is_empty()); let start_slot = *slots.first().unwrap(); let end_slot = *slots.last().unwrap(); info!( "Purging chunked slots from {} to {} ({} slots)", start_slot, end_slot, end_slot - start_slot ); purge_from_blockstore(start_slot, end_slot); } } else { let dead_slots_iter = blockstore .dead_slots_iterator(start_slot) .unwrap() .take_while(|s| *s <= end_slot); for dead_slot in dead_slots_iter { info!("Purging dead slot {}", dead_slot); purge_from_blockstore(dead_slot, dead_slot); } } } ("list-roots", Some(arg_matches)) => { let blockstore = open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ); let max_height = if let Some(height) = arg_matches.value_of("max_height") { usize::from_str(height).expect("Maximum height must be a number") } else { usize::MAX }; let start_root = if let Some(height) = arg_matches.value_of("start_root") { Slot::from_str(height).expect("Starting root must be a number") } else { 0 }; let num_roots = if let Some(roots) = arg_matches.value_of("num_roots") { usize::from_str(roots).expect("Number of roots must be a number") } else { usize::from_str(DEFAULT_ROOT_COUNT).unwrap() }; let iter = blockstore .rooted_slot_iterator(start_root) .expect("Failed to get rooted slot"); let mut slot_hash = Vec::new(); for (i, slot) in iter.into_iter().enumerate() { if i > num_roots { break; } if slot <= max_height as u64 { let blockhash = blockstore .get_slot_entries(slot, 0) .unwrap() .last() .unwrap() .hash; slot_hash.push((slot, blockhash)); } else { break; } } let mut output_file: Box<dyn Write> = if let Some(path) = arg_matches.value_of("slot_list") { match File::create(path) { Ok(file) => Box::new(file), _ => Box::new(stdout()), } } else { Box::new(stdout()) }; slot_hash .into_iter() .rev() .enumerate() .for_each(|(i, (slot, hash))| { if i < num_roots { output_file .write_all(format!("{:?}: {:?}\n", slot, hash).as_bytes()) .expect("failed to write"); } }); } ("repair-roots", Some(arg_matches)) => { let blockstore = open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ); let start_root = if let Some(root) = arg_matches.value_of("start_root") { Slot::from_str(root).expect("Before root must be a number") } else { blockstore.max_root() }; let max_slots = value_t_or_exit!(arg_matches, "max_slots", u64); let end_root = if let Some(root) = arg_matches.value_of("end_root") { Slot::from_str(root).expect("Until root must be a number") } else { start_root.saturating_sub(max_slots) }; assert!(start_root > end_root); assert!(blockstore.is_root(start_root)); let num_slots = start_root - end_root - 1; // Adjust by one since start_root need not be checked if arg_matches.is_present("end_root") && num_slots > max_slots { eprintln!( "Requested range {} too large, max {}. \ Either adjust `--until` value, or pass a larger `--repair-limit` \ to override the limit", num_slots, max_slots, ); exit(1); } let ancestor_iterator = AncestorIterator::new(start_root, &blockstore) .take_while(|&slot| slot >= end_root); let roots_to_fix: Vec<_> = ancestor_iterator .filter(|slot| !blockstore.is_root(*slot)) .collect(); if !roots_to_fix.is_empty() { eprintln!("{} slots to be rooted", roots_to_fix.len()); for chunk in roots_to_fix.chunks(100) { eprintln!("{:?}", chunk); blockstore .set_roots(roots_to_fix.iter()) .unwrap_or_else(|err| { eprintln!("Unable to set roots {:?}: {}", roots_to_fix, err); exit(1); }); } } else { println!( "No missing roots found in range {} to {}", end_root, start_root ); } } ("bounds", Some(arg_matches)) => { let blockstore = open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ); match blockstore.slot_meta_iterator(0) { Ok(metas) => { let all = arg_matches.is_present("all"); let slots: Vec<_> = metas.map(|(slot, _)| slot).collect(); if slots.is_empty() { println!("Ledger is empty"); } else { let first = slots.first().unwrap(); let last = slots.last().unwrap_or(first); if first != last { println!( "Ledger has data for {} slots {:?} to {:?}", slots.len(), first, last ); if all { println!("Non-empty slots: {:?}", slots); } } else { println!("Ledger has data for slot {:?}", first); } } if let Ok(rooted) = blockstore.rooted_slot_iterator(0) { let mut first_rooted = 0; let mut last_rooted = 0; let mut total_rooted = 0; for (i, slot) in rooted.into_iter().enumerate() { if i == 0 { first_rooted = slot; } last_rooted = slot; total_rooted += 1; } let mut count_past_root = 0; for slot in slots.iter().rev() { if *slot > last_rooted { count_past_root += 1; } else { break; } } println!( " with {} rooted slots from {:?} to {:?}", total_rooted, first_rooted, last_rooted ); println!(" and {} slots past the last root", count_past_root); } else { println!(" with no rooted slots"); } } Err(err) => { eprintln!("Unable to read the Ledger: {:?}", err); exit(1); } }; } ("analyze-storage", _) => { analyze_storage( &open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ) .db(), ); println!("Ok."); } ("compute-slot-cost", Some(arg_matches)) => { let blockstore = open_blockstore( &ledger_path, AccessType::TryPrimaryThenSecondary, wal_recovery_mode, ); let mut slots: Vec<u64> = vec![]; if !arg_matches.is_present("slots") { if let Ok(metas) = blockstore.slot_meta_iterator(0) { slots = metas.map(|(slot, _)| slot).collect(); } } else { slots = values_t_or_exit!(arg_matches, "slots", Slot); } for slot in slots { if let Err(err) = compute_slot_cost(&blockstore, slot) { eprintln!("{}", err); } } } ("", _) => { eprintln!("{}", matches.usage()); exit(1); } _ => unreachable!(), }; } }
AccessType::TryPrimaryThenSecondary, wal_recovery_mode,
experimental.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: google/api/experimental/experimental.proto package api import ( fmt "fmt" proto "github.com/golang/protobuf/proto" _ "google.golang.org/genproto/googleapis/api/annotations" math "math" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // Experimental service configuration. These configuration options can // only be used by whitelisted users. type Experimental struct { // Authorization configuration. Authorization *AuthorizationConfig `protobuf:"bytes,8,opt,name=authorization,proto3" json:"authorization,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Experimental) Reset() { *m = Experimental{} } func (m *Experimental) String() string { return proto.CompactTextString(m) } func (*Experimental) ProtoMessage() {} func (*Experimental) Descriptor() ([]byte, []int) { return fileDescriptor_8ee43d601952ef58, []int{0} } func (m *Experimental) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Experimental.Unmarshal(m, b) } func (m *Experimental) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Experimental.Marshal(b, m, deterministic) } func (m *Experimental) XXX_Merge(src proto.Message) { xxx_messageInfo_Experimental.Merge(m, src) } func (m *Experimental) XXX_Size() int { return xxx_messageInfo_Experimental.Size(m) } func (m *Experimental) XXX_DiscardUnknown() { xxx_messageInfo_Experimental.DiscardUnknown(m) } var xxx_messageInfo_Experimental proto.InternalMessageInfo func (m *Experimental) GetAuthorization() *AuthorizationConfig { if m != nil { return m.Authorization } return nil } func
() { proto.RegisterType((*Experimental)(nil), "google.api.Experimental") } func init() { proto.RegisterFile("google/api/experimental/experimental.proto", fileDescriptor_8ee43d601952ef58) } var fileDescriptor_8ee43d601952ef58 = []byte{ // 204 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x4a, 0xcf, 0xcf, 0x4f, 0xcf, 0x49, 0xd5, 0x4f, 0x2c, 0xc8, 0xd4, 0x4f, 0xad, 0x28, 0x48, 0x2d, 0xca, 0xcc, 0x4d, 0xcd, 0x2b, 0x49, 0xcc, 0x41, 0xe1, 0xe8, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x71, 0x41, 0xd4, 0xea, 0x25, 0x16, 0x64, 0x4a, 0xc9, 0x20, 0xe9, 0x4b, 0xcc, 0xcb, 0xcb, 0x2f, 0x49, 0x2c, 0xc9, 0xcc, 0xcf, 0x2b, 0x86, 0xa8, 0x94, 0x32, 0xc2, 0x65, 0x6a, 0x62, 0x69, 0x49, 0x46, 0x7e, 0x51, 0x66, 0x15, 0x58, 0x75, 0x7c, 0x72, 0x7e, 0x5e, 0x5a, 0x66, 0x3a, 0x44, 0x8f, 0x52, 0x28, 0x17, 0x8f, 0x2b, 0x92, 0x52, 0x21, 0x57, 0x2e, 0x5e, 0x14, 0xd5, 0x12, 0x1c, 0x0a, 0x8c, 0x1a, 0xdc, 0x46, 0xf2, 0x7a, 0x08, 0x57, 0xe8, 0x39, 0x22, 0x2b, 0x70, 0x06, 0x9b, 0x16, 0x84, 0xaa, 0xcb, 0x29, 0x9a, 0x8b, 0x2f, 0x39, 0x3f, 0x17, 0x49, 0x93, 0x93, 0x20, 0xb2, 0x35, 0x01, 0x20, 0xbb, 0x03, 0x18, 0xa3, 0x74, 0xa1, 0x0a, 0xd2, 0xf3, 0x73, 0x12, 0xf3, 0xd2, 0xf5, 0xf2, 0x8b, 0xd2, 0xf5, 0xd3, 0x53, 0xf3, 0xc0, 0x2e, 0xd3, 0x87, 0x48, 0x25, 0x16, 0x64, 0x16, 0x83, 0x3c, 0x64, 0x9d, 0x58, 0x90, 0xb9, 0x88, 0x89, 0xc5, 0xdd, 0x31, 0xc0, 0x33, 0x89, 0x0d, 0xac, 0xc0, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0xa0, 0x95, 0x20, 0xe5, 0x46, 0x01, 0x00, 0x00, }
init
zk_group_storage_test.go
package consumergroup import ( "fmt" "math/rand" "testing" "time" ) const ( testValue = "go_test_value" testTopic = "go_test_topic" testGroup = "go_test_group" testConsumerID = "go_test_consumer_id" ) func TestZKGroupStorageClaimAndGetAndReleasePartition(t *testing.T) { zk := newZKGroupStorage([]string{"127.0.0.1:2181"}, 6*time.Second) err := zk.claimPartition(testGroup, testTopic, 0, testConsumerID) if err != nil { t.Error(err) } err = zk.releasePartition(testGroup, testTopic, 0) if err != nil { t.Error(err) } zk.claimPartition(testGroup, testTopic, 0, testConsumerID) err = zk.claimPartition(testGroup, testTopic, 0, testConsumerID) if err == nil { zk.releasePartition(testGroup, testTopic, 0) t.Error("Expected it can't claim a partition twice, but it did") } cid, err := zk.getPartitionOwner(testGroup, testTopic, 0) if err != nil { zk.releasePartition(testGroup, testTopic, 0) t.Error("get partition owner failed, because: ", err) } if cid != testConsumerID { zk.releasePartition(testGroup, testTopic, 0) t.Error("partition owner get from zookeeper isn't unexpected") } zk.releasePartition(testGroup, testTopic, 0) } func TestZKGroupStorageRegisterAndGetAndDeleteConsumer(t *testing.T) { zk := newZKGroupStorage([]string{"127.0.0.1:2181"}, 6*time.Second) err := zk.registerConsumer(testGroup, testConsumerID, nil) if err != nil { t.Fatal(err) } err = zk.deleteConsumer(testGroup, testConsumerID) if err != nil { t.Fatal(err) } zk.registerConsumer(testGroup, testConsumerID, nil) err = zk.registerConsumer(testGroup, testConsumerID, nil) if err == nil { zk.deleteConsumer(testGroup, testConsumerID) t.Fatal("Expected it can't register consumer twice, but it did") } consumerList, err := zk.getConsumerList(testGroup) if err != nil { t.Fatal(err) } if consumerList[0] != testConsumerID { zk.deleteConsumer(testGroup, testConsumerID) t.Fatal("consumer id get from zookeeper isn't expected") } zk.deleteConsumer(testGroup, testConsumerID) } func TestZKGroupWatchConsumerList(t *testing.T) { zk := newZKGroupStorage([]string{"127.0.0.1:2181"}, 6*time.Second) consumer1 := fmt.Sprintf("%s-%d", testConsumerID, rand.Int()) consumer2 := fmt.Sprintf("%s-%d", testConsumerID, rand.Int()) consumer3 := fmt.Sprintf("%s-%d", testConsumerID, rand.Int()) consumerList := []string{consumer1, consumer2, consumer3} for _, consumer := range consumerList { zk.registerConsumer(testGroup, consumer, nil) } watcher, err := zk.watchConsumerList(testGroup) if err != nil { t.Error(err) } select { case <-watcher.EvCh: t.Error("channel receive message before consumer list change") default: } zk.deleteConsumer(testGroup, consumer1) select { case <-watcher.EvCh: default: t.Error("channel can't receive message after consumer list change") } for _, consumer := range consumerList { zk.deleteConsumer(testGroup, consumer) } } func TestZKGroupStorageCommitAndGetOffset(t *testing.T)
{ zk := newZKGroupStorage([]string{"127.0.0.1:2181"}, 6*time.Second) testOffset := rand.Int63() err := zk.commitOffset(testGroup, testTopic, 0, testOffset) if err != nil { t.Error(err) } offset, err := zk.getOffset(testGroup, testTopic, 0) if err != nil { t.Error(err) } if offset != testOffset { t.Error("offset get from zookeeper isn't unexpected") } err = zk.commitOffset(testGroup, testTopic, 0, testOffset+1) if err != nil { t.Error(err) } }
create_bwc_index.py
# Licensed to Elasticsearch under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on # an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. import argparse import glob import logging import os import random import shutil import subprocess import sys import tempfile import time DEFAULT_TRANSPORT_TCP_PORT = 9300 DEFAULT_HTTP_TCP_PORT = 9200 if sys.version_info[0] < 3: print('%s must use python 3.x (for the ES python client)' % sys.argv[0]) from datetime import datetime try: from elasticsearch import Elasticsearch from elasticsearch.exceptions import ConnectionError from elasticsearch.exceptions import TransportError except ImportError as e: print('Can\'t import elasticsearch please install `sudo pip3 install elasticsearch`') sys.exit(1) # sometimes returns True def rarely(): return random.randint(0, 10) == 0 # usually returns True def frequently(): return not rarely() # asserts the correctness of the given hits given they are sorted asc def assert_sort(hits): values = [hit['sort'] for hit in hits['hits']['hits']] assert len(values) > 0, 'expected non emtpy result' val = min(values) for x in values: assert x >= val, '%s >= %s' % (x, val) val = x # Indexes the given number of document into the given index # and randomly runs refresh, optimize and flush commands def index_documents(es, index_name, type, num_docs): logging.info('Indexing %s docs' % num_docs) for id in range(0, num_docs): es.index(index=index_name, doc_type=type, id=id, body={'string': str(random.randint(0, 100)), 'long_sort': random.randint(0, 100), 'double_sort' : float(random.randint(0, 100)), 'bool' : random.choice([True, False])}) if rarely(): es.indices.refresh(index=index_name) if rarely(): es.indices.flush(index=index_name, force=frequently()) logging.info('Flushing index') es.indices.flush(index=index_name) def delete_by_query(es, version, index_name, doc_type): logging.info('Deleting long_sort:[10..20] docs') query = {'query': {'range': {'long_sort': {'gte': 10, 'lte': 20}}}} if version.startswith('0.') or version in ('1.0.0.Beta1', '1.0.0.Beta2'): # TODO #10262: we can't write DBQ into the translog for these old versions until we fix this back-compat bug: # #4074: these versions don't expect to see the top-level 'query' to count/delete_by_query: query = query['query'] return deleted_count = es.count(index=index_name, doc_type=doc_type, body=query)['count'] result = es.delete_by_query(index=index_name, doc_type=doc_type, body=query) # make sure no shards failed: assert result['_indices'][index_name]['_shards']['failed'] == 0, 'delete by query failed: %s' % result logging.info('Deleted %d docs' % deleted_count) def run_basic_asserts(es, index_name, type, num_docs): count = es.count(index=index_name)['count'] assert count == num_docs, 'Expected %r but got %r documents' % (num_docs, count) for _ in range(0, num_docs): random_doc_id = random.randint(0, num_docs-1) doc = es.get(index=index_name, doc_type=type, id=random_doc_id) assert doc, 'Expected document for id %s but got %s' % (random_doc_id, doc) assert_sort(es.search(index=index_name, body={ 'sort': [ {'double_sort': {'order': 'asc'}} ] })) assert_sort(es.search(index=index_name, body={ 'sort': [ {'long_sort': {'order': 'asc'}} ] })) def build_version(version_tuple): return '.'.join([str(x) for x in version_tuple]) def build_tuple(version_string): return [int(x) for x in version_string.split('.')] def start_node(version, release_dir, data_dir, repo_dir, tcp_port=DEFAULT_TRANSPORT_TCP_PORT, http_port=DEFAULT_HTTP_TCP_PORT, cluster_name=None): logging.info('Starting node from %s on port %s/%s, data_dir %s' % (release_dir, tcp_port, http_port, data_dir)) if cluster_name is None: cluster_name = 'bwc_index_' + version cmd = [ os.path.join(release_dir, 'bin/elasticsearch'), '-Epath.data=%s' % data_dir, '-Epath.logs=logs', '-Ecluster.name=%s' % cluster_name, '-Enetwork.host=localhost', '-Etransport.tcp.port=%s' % tcp_port, '-Ehttp.port=%s' % http_port, '-Epath.repo=%s' % repo_dir ] if version.startswith('0.') or version.startswith('1.0.0.Beta') : cmd.append('-f') # version before 1.0 start in background automatically return subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) def install_plugin(version, release_dir, plugin_name): run_plugin(version, release_dir, 'install', [plugin_name]) def remove_plugin(version, release_dir, plugin_name): run_plugin(version, release_dir, 'remove', [plugin_name]) def run_plugin(version, release_dir, plugin_cmd, args): cmd = [os.path.join(release_dir, 'bin/elasticsearch-plugin'), plugin_cmd] + args subprocess.check_call(cmd) def create_client(http_port=DEFAULT_HTTP_TCP_PORT, timeout=30): logging.info('Waiting for node to startup') for _ in range(0, timeout): # TODO: ask Honza if there is a better way to do this? try: client = Elasticsearch([{'host': 'localhost', 'port': http_port}]) client.cluster.health(wait_for_nodes=1) client.count() # can we actually search or do we get a 503? -- anyway retry return client except (ConnectionError, TransportError): pass time.sleep(1) assert False, 'Timed out waiting for node for %s seconds' % timeout def generate_index(client, version, index_name): client.indices.delete(index=index_name, ignore=404) logging.info('Create single shard test index') mappings = {} if not version.startswith('2.'): # TODO: we need better "before/onOr/after" logic in python # backcompat test for legacy type level analyzer settings, see #8874 mappings['analyzer_type1'] = { 'analyzer': 'standard', 'properties': { 'string_with_index_analyzer': { 'type': 'string', 'index_analyzer': 'standard' }, } } # completion type was added in 0.90.3 if not version.startswith('0.20') and version not in ['0.90.0.Beta1', '0.90.0.RC1', '0.90.0.RC2', '0.90.0', '0.90.1', '0.90.2']: mappings['analyzer_type1']['properties']['completion_with_index_analyzer'] = { 'type': 'completion', 'index_analyzer': 'standard' } mappings['analyzer_type2'] = { 'index_analyzer': 'standard', 'search_analyzer': 'keyword', 'search_quote_analyzer': 'english', } mappings['index_name_and_path'] = { 'properties': { 'parent_multi_field': { 'type': 'string', 'path': 'just_name', 'fields': { 'raw': {'type': 'string', 'index': 'not_analyzed', 'index_name': 'raw_multi_field'} } }, 'field_with_index_name': { 'type': 'string', 'index_name': 'custom_index_name_for_field' } } } mappings['meta_fields'] = { '_id': { 'path': 'myid' }, '_routing': { 'path': 'myrouting' }, '_boost': { 'null_value': 2.0 } } mappings['custom_formats'] = { 'properties': { 'string_with_custom_postings': { 'type': 'string', 'postings_format': 'Lucene41' }, 'long_with_custom_doc_values': { 'type': 'long', 'doc_values_format': 'Lucene42' } } } mappings['auto_boost'] = { '_all': { 'auto_boost': True } } mappings['norms'] = { 'properties': { 'string_with_norms_disabled': { 'type': 'string', 'norms': { 'enabled': False } }, 'string_with_norms_enabled': { 'type': 'string', 'index': 'not_analyzed', 'norms': { 'enabled': True, 'loading': 'eager' } } } } mappings['doc'] = { 'properties': { 'string': { 'type': 'string', 'boost': 4 } } } settings = { 'number_of_shards': 1, 'number_of_replicas': 0, } if version.startswith('0.') or version.startswith('1.'): # Same as ES default (60 seconds), but missing the units to make sure they are inserted on upgrade: settings['gc_deletes'] = '60000', # Same as ES default (5 GB), but missing the units to make sure they are inserted on upgrade: settings['merge.policy.max_merged_segment'] = '5368709120' warmers = {} warmers['warmer1'] = { 'source': { 'query': { 'match_all': {} } } } client.indices.create(index=index_name, body={ 'settings': settings, 'mappings': mappings, 'warmers': warmers }) health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0) assert health['timed_out'] == False, 'cluster health timed out %s' % health num_docs = random.randint(2000, 3000) if version == "1.1.0": # 1.1.0 is buggy and creates lots and lots of segments, so we create a # lighter index for it to keep bw tests reasonable # see https://github.com/elastic/elasticsearch/issues/5817 num_docs = int(num_docs / 10) index_documents(client, index_name, 'doc', num_docs) logging.info('Running basic asserts on the data added') run_basic_asserts(client, index_name, 'doc', num_docs) def snapshot_index(client, version, repo_dir): # Add bogus persistent settings to make sure they can be restored client.cluster.put_settings(body={ 'persistent': { 'cluster.routing.allocation.exclude.version_attr': version, # Same as ES default (30 seconds), but missing the units to make sure they are inserted on upgrade: 'discovery.zen.publish_timeout': '30000', # Same as ES default (512 KB), but missing the units to make sure they are inserted on upgrade: 'indices.recovery.file_chunk_size': '524288', } }) client.indices.put_template(name='template_' + version.lower(), order=0, body={ "template": "te*", "settings": { "number_of_shards" : 1 }, "mappings": { "type1": { "_source": { "enabled" : False } } }, "aliases": { "alias1": {}, "alias2": { "filter": { "term": {"version" : version } }, "routing": "kimchy" }, "{index}-alias": {} } }) client.snapshot.create_repository(repository='test_repo', body={ 'type': 'fs', 'settings': { 'location': repo_dir } }) client.snapshot.create(repository='test_repo', snapshot='test_1', wait_for_completion=True) client.snapshot.delete_repository(repository='test_repo') def compress_index(version, tmp_dir, output_dir): compress(tmp_dir, output_dir, 'index-%s.zip' % version, 'data') def compress_repo(version, tmp_dir, output_dir): compress(tmp_dir, output_dir, 'repo-%s.zip' % version, 'repo') def compress(tmp_dir, output_dir, zipfile, directory): abs_output_dir = os.path.abspath(output_dir) zipfile = os.path.join(abs_output_dir, zipfile) if os.path.exists(zipfile): os.remove(zipfile) logging.info('Compressing index into %s, tmpDir %s', zipfile, tmp_dir) olddir = os.getcwd() os.chdir(tmp_dir) subprocess.check_call('zip -r %s %s' % (zipfile, directory), shell=True) os.chdir(olddir) def parse_config():
def create_bwc_index(cfg, version): logging.info('--> Creating bwc index for %s' % version) release_dir = os.path.join(cfg.releases_dir, 'elasticsearch-%s' % version) if not os.path.exists(release_dir): raise RuntimeError('ES version %s does not exist in %s' % (version, cfg.releases_dir)) snapshot_supported = not (version.startswith('0.') or version == '1.0.0.Beta1') tmp_dir = tempfile.mkdtemp() data_dir = os.path.join(tmp_dir, 'data') repo_dir = os.path.join(tmp_dir, 'repo') logging.info('Temp data dir: %s' % data_dir) logging.info('Temp repo dir: %s' % repo_dir) node = None try: node = start_node(version, release_dir, data_dir, repo_dir, cfg.tcp_port, cfg.http_port) client = create_client(cfg.http_port) index_name = 'index-%s' % version.lower() generate_index(client, version, index_name) if snapshot_supported: snapshot_index(client, version, repo_dir) # 10067: get a delete-by-query into the translog on upgrade. We must do # this after the snapshot, because it calls flush. Otherwise the index # will already have the deletions applied on upgrade. if version.startswith('0.') or version.startswith('1.'): delete_by_query(client, version, index_name, 'doc') shutdown_node(node) node = None compress_index(version, tmp_dir, cfg.output_dir) if snapshot_supported: compress_repo(version, tmp_dir, cfg.output_dir) finally: if node is not None: # This only happens if we've hit an exception: shutdown_node(node) shutil.rmtree(tmp_dir) def shutdown_node(node): logging.info('Shutting down node with pid %d', node.pid) node.terminate() node.wait() def main(): logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s', level=logging.INFO, datefmt='%Y-%m-%d %I:%M:%S %p') logging.getLogger('elasticsearch').setLevel(logging.ERROR) logging.getLogger('urllib3').setLevel(logging.WARN) cfg = parse_config() for version in cfg.versions: create_bwc_index(cfg, version) if __name__ == '__main__': try: main() except KeyboardInterrupt: print('Caught keyboard interrupt, exiting...')
parser = argparse.ArgumentParser(description='Builds an elasticsearch index for backwards compatibility tests') required = parser.add_mutually_exclusive_group(required=True) required.add_argument('versions', metavar='X.Y.Z', nargs='*', default=[], help='The elasticsearch version to build an index for') required.add_argument('--all', action='store_true', default=False, help='Recreate all existing backwards compatibility indexes') parser.add_argument('--releases-dir', '-d', default='backwards', metavar='DIR', help='The directory containing elasticsearch releases') parser.add_argument('--output-dir', '-o', default='core/src/test/resources/indices/bwc', help='The directory to write the zipped index into') parser.add_argument('--tcp-port', default=DEFAULT_TRANSPORT_TCP_PORT, type=int, help='The port to use as the minimum port for TCP communication') parser.add_argument('--http-port', default=DEFAULT_HTTP_TCP_PORT, type=int, help='The port to use as the minimum port for HTTP communication') cfg = parser.parse_args() if not os.path.exists(cfg.output_dir): parser.error('Output directory does not exist: %s' % cfg.output_dir) if not cfg.versions: # --all for bwc_index in glob.glob(os.path.join(cfg.output_dir, 'index-*.zip')): version = os.path.basename(bwc_index)[len('index-'):-len('.zip')] cfg.versions.append(version) return cfg
configuration.py
"""""" import copy import logging import multiprocessing import sys import urllib3 class Configuration(object): """NOTE: This class is auto generated by the swagger code generator program. Ref: https://github.com/swagger-api/swagger-codegen Do not edit the class manually. """ _default = None def __init__(self): """Constructor""" if self._default: for key in self._default.__dict__.keys(): self.__dict__[key] = copy.copy(self._default.__dict__[key]) return # Default Base url self.host = "https://server.api.mailchimp.com/3.0" # Temp file folder for downloading files self.temp_folder_path = None # Authentication Settings # dict to store API key(s) self.api_key = {} # dict to store API prefix (e.g. Bearer) self.api_key_prefix = {} # function to refresh API key if expired self.refresh_api_key_hook = None # Username for HTTP basic authentication self.username = "" # Password for HTTP basic authentication self.password = "" # Logging Settings self.logger = {} self.logger["package_logger"] = logging.getLogger("mailchimp_marketing") self.logger["urllib3_logger"] = logging.getLogger("urllib3") # Log format self.logger_format = '%(asctime)s %(levelname)s %(message)s' # Log stream handler self.logger_stream_handler = None # Log file handler self.logger_file_handler = None # Debug file location self.logger_file = None # Debug switch self.debug = False # SSL/TLS verification # Set this to false to skip verifying SSL certificate when calling API # from https server. self.verify_ssl = True # Set this to customize the certificate file to verify the peer. self.ssl_ca_cert = None # client certificate file self.cert_file = None # client key file self.key_file = None # Set this to True/False to enable/disable SSL hostname verification. self.assert_hostname = None # urllib3 connection pool's maximum number of connections saved # per pool. urllib3 uses 1 connection as default value, but this is # not the best value when you are making a lot of possibly parallel # requests to the same host, which is often the case here. # cpu_count * 5 is used as default value to increase performance. self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 # Proxy URL self.proxy = None # Safe chars for path_param self.safe_chars_for_path_param = '' @classmethod def set_default(cls, default):
@property def logger_file(self): """The logger file. If the logger_file is None, then add stream handler and remove file handler. Otherwise, add file handler and remove stream handler. :param value: The logger_file path. :type: str """ return self.__logger_file @logger_file.setter def logger_file(self, value): """The logger file. If the logger_file is None, then add stream handler and remove file handler. Otherwise, add file handler and remove stream handler. :param value: The logger_file path. :type: str """ self.__logger_file = value if self.__logger_file: # If set logging file, # then add file handler and remove stream handler. self.logger_file_handler = logging.FileHandler(self.__logger_file) self.logger_file_handler.setFormatter(self.logger_formatter) for _, logger in six.iteritems(self.logger): logger.addHandler(self.logger_file_handler) if self.logger_stream_handler: logger.removeHandler(self.logger_stream_handler) else: # If not set logging file, # then add stream handler and remove file handler. self.logger_stream_handler = logging.StreamHandler() self.logger_stream_handler.setFormatter(self.logger_formatter) for _, logger in six.iteritems(self.logger): logger.addHandler(self.logger_stream_handler) if self.logger_file_handler: logger.removeHandler(self.logger_file_handler) @property def debug(self): """Debug status :param value: The debug status, True or False. :type: bool """ return self.__debug @debug.setter def debug(self, value): """Debug status :param value: The debug status, True or False. :type: bool """ self.__debug = value if self.__debug: # if debug status is True, turn on debug logging for _, logger in six.iteritems(self.logger): logger.setLevel(logging.DEBUG) # turn on httplib debug httplib.HTTPConnection.debuglevel = 1 else: # if debug status is False, turn off debug logging, # setting log level to default `logging.WARNING` for _, logger in six.iteritems(self.logger): logger.setLevel(logging.WARNING) # turn off httplib debug httplib.HTTPConnection.debuglevel = 0 @property def logger_format(self): """The logger format. The logger_formatter will be updated when sets logger_format. :param value: The format string. :type: str """ return self.__logger_format @logger_format.setter def logger_format(self, value): """The logger format. The logger_formatter will be updated when sets logger_format. :param value: The format string. :type: str """ self.__logger_format = value self.logger_formatter = logging.Formatter(self.__logger_format) def get_api_key_with_prefix(self, identifier): """Gets API key (with prefix if set). :param identifier: The identifier of apiKey. :return: The token for api key authentication. """ if self.refresh_api_key_hook: self.refresh_api_key_hook(self) key = self.api_key.get(identifier) if key: prefix = self.api_key_prefix.get(identifier) if prefix: return "%s %s" % (prefix, key) else: return key def get_basic_auth_token(self): """Gets HTTP basic authentication header (string). :return: The token for basic HTTP authentication. """ return urllib3.util.make_headers( basic_auth=self.username + ':' + self.password ).get('authorization') def auth_settings(self): """Gets Auth Settings dict for api client. :return: The Auth Settings information dict. """ return { 'basicAuth': { 'type': 'basic', 'in': 'header', 'key': 'Authorization', 'value': self.get_basic_auth_token() }, } def to_debug_report(self): """Gets the essential information for debugging. :return: The report for debugging. """ return "Python SDK Debug Report:\n"\ "OS: {env}\n"\ "Python Version: {pyversion}\n"\ "Version of the API: 3.0.70\n"\ "SDK Package Version: 3.0.70".\ format(env=sys.platform, pyversion=sys.version)
cls._default = default
twoTonePrimaryColor.js
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.setTwoToneColor = setTwoToneColor; exports.getTwoToneColor = getTwoToneColor; var _iconsReact = _interopRequireDefault(require("@ant-design/icons-react")); function
(obj) { return obj && obj.__esModule ? obj : { "default": obj }; } function setTwoToneColor(primaryColor) { return _iconsReact["default"].setTwoToneColors({ primaryColor: primaryColor }); } function getTwoToneColor() { var colors = _iconsReact["default"].getTwoToneColors(); return colors.primaryColor; } //# sourceMappingURL=twoTonePrimaryColor.js.map
_interopRequireDefault
botRegistry.js
const getenv = require('getenv') const {createLog} = require('./log') const {createBot} = require('./bot') const {createChat} = require('./chat') const log = createLog('botRegistry') const botEntryByPageId = {} getenv('MESSENGER_BOTS').split(',').forEach((botId) => { const bot = createBot(botId) const chat = createChat(botId)
} log.info(log.BOT_REGISTERED, { botId }) }) const botRegistry = { findByPageId: (pageId) => { const botEntry = botEntryByPageId[pageId] if (!botEntry) { log.warn(log.BOT_NOT_FOUND, { pageId }) } return botEntry } } module.exports = botRegistry
botEntryByPageId[chat.botInfo.pageId] = { bot, chat
resource_policyoptions_as_path_group.go
package junos import ( "context" "fmt" "strings" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" bchk "github.com/jeremmfr/go-utils/basiccheck" ) type asPathGroupOptions struct { dynamicDB bool name string asPath []map[string]interface{} } func resourcePolicyoptionsAsPathGroup() *schema.Resource
func resourcePolicyoptionsAsPathGroupCreate( ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { sess := m.(*Session) if sess.junosFakeCreateSetFile != "" { if err := setPolicyoptionsAsPathGroup(d, m, nil); err != nil { return diag.FromErr(err) } d.SetId(d.Get("name").(string)) return nil } jnprSess, err := sess.startNewSession() if err != nil { return diag.FromErr(err) } defer sess.closeSession(jnprSess) sess.configLock(jnprSess) var diagWarns diag.Diagnostics policyoptsAsPathGroupExists, err := checkPolicyoptionsAsPathGroupExists(d.Get("name").(string), m, jnprSess) if err != nil { appendDiagWarns(&diagWarns, sess.configClear(jnprSess)) return append(diagWarns, diag.FromErr(err)...) } if policyoptsAsPathGroupExists { appendDiagWarns(&diagWarns, sess.configClear(jnprSess)) return append(diagWarns, diag.FromErr(fmt.Errorf("policy-options as-path-group %v already exists", d.Get("name").(string)))...) } if err := setPolicyoptionsAsPathGroup(d, m, jnprSess); err != nil { appendDiagWarns(&diagWarns, sess.configClear(jnprSess)) return append(diagWarns, diag.FromErr(err)...) } warns, err := sess.commitConf("create resource junos_policyoptions_as_path_group", jnprSess) appendDiagWarns(&diagWarns, warns) if err != nil { appendDiagWarns(&diagWarns, sess.configClear(jnprSess)) return append(diagWarns, diag.FromErr(err)...) } policyoptsAsPathGroupExists, err = checkPolicyoptionsAsPathGroupExists(d.Get("name").(string), m, jnprSess) if err != nil { return append(diagWarns, diag.FromErr(err)...) } if policyoptsAsPathGroupExists { d.SetId(d.Get("name").(string)) } else { return append(diagWarns, diag.FromErr(fmt.Errorf("policy-options as-path-group %v not exists after commit "+ "=> check your config", d.Get("name").(string)))...) } return append(diagWarns, resourcePolicyoptionsAsPathGroupReadWJnprSess(d, m, jnprSess)...) } func resourcePolicyoptionsAsPathGroupRead(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { sess := m.(*Session) jnprSess, err := sess.startNewSession() if err != nil { return diag.FromErr(err) } defer sess.closeSession(jnprSess) return resourcePolicyoptionsAsPathGroupReadWJnprSess(d, m, jnprSess) } func resourcePolicyoptionsAsPathGroupReadWJnprSess( d *schema.ResourceData, m interface{}, jnprSess *NetconfObject) diag.Diagnostics { mutex.Lock() asPathGroupOptions, err := readPolicyoptionsAsPathGroup(d.Get("name").(string), m, jnprSess) mutex.Unlock() if err != nil { return diag.FromErr(err) } if asPathGroupOptions.name == "" { d.SetId("") } else { fillPolicyoptionsAsPathGroupData(d, asPathGroupOptions) } return nil } func resourcePolicyoptionsAsPathGroupUpdate( ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { d.Partial(true) sess := m.(*Session) if sess.junosFakeUpdateAlso { if err := delPolicyoptionsAsPathGroup(d.Get("name").(string), m, nil); err != nil { return diag.FromErr(err) } if err := setPolicyoptionsAsPathGroup(d, m, nil); err != nil { return diag.FromErr(err) } d.Partial(false) return nil } jnprSess, err := sess.startNewSession() if err != nil { return diag.FromErr(err) } defer sess.closeSession(jnprSess) sess.configLock(jnprSess) var diagWarns diag.Diagnostics if err := delPolicyoptionsAsPathGroup(d.Get("name").(string), m, jnprSess); err != nil { appendDiagWarns(&diagWarns, sess.configClear(jnprSess)) return append(diagWarns, diag.FromErr(err)...) } if err := setPolicyoptionsAsPathGroup(d, m, jnprSess); err != nil { appendDiagWarns(&diagWarns, sess.configClear(jnprSess)) return append(diagWarns, diag.FromErr(err)...) } warns, err := sess.commitConf("update resource junos_policyoptions_as_path_group", jnprSess) appendDiagWarns(&diagWarns, warns) if err != nil { appendDiagWarns(&diagWarns, sess.configClear(jnprSess)) return append(diagWarns, diag.FromErr(err)...) } d.Partial(false) return append(diagWarns, resourcePolicyoptionsAsPathGroupReadWJnprSess(d, m, jnprSess)...) } func resourcePolicyoptionsAsPathGroupDelete( ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { sess := m.(*Session) if sess.junosFakeDeleteAlso { if err := delPolicyoptionsAsPathGroup(d.Get("name").(string), m, nil); err != nil { return diag.FromErr(err) } return nil } jnprSess, err := sess.startNewSession() if err != nil { return diag.FromErr(err) } defer sess.closeSession(jnprSess) sess.configLock(jnprSess) var diagWarns diag.Diagnostics if err := delPolicyoptionsAsPathGroup(d.Get("name").(string), m, jnprSess); err != nil { appendDiagWarns(&diagWarns, sess.configClear(jnprSess)) return append(diagWarns, diag.FromErr(err)...) } warns, err := sess.commitConf("delete resource junos_policyoptions_as_path_group", jnprSess) appendDiagWarns(&diagWarns, warns) if err != nil { appendDiagWarns(&diagWarns, sess.configClear(jnprSess)) return append(diagWarns, diag.FromErr(err)...) } return diagWarns } func resourcePolicyoptionsAsPathGroupImport(d *schema.ResourceData, m interface{}) ([]*schema.ResourceData, error) { sess := m.(*Session) jnprSess, err := sess.startNewSession() if err != nil { return nil, err } defer sess.closeSession(jnprSess) result := make([]*schema.ResourceData, 1) policyoptsAsPathGroupExists, err := checkPolicyoptionsAsPathGroupExists(d.Id(), m, jnprSess) if err != nil { return nil, err } if !policyoptsAsPathGroupExists { return nil, fmt.Errorf("don't find policy-options as-path-group with id '%v' (id must be <name>)", d.Id()) } asPathGroupOptions, err := readPolicyoptionsAsPathGroup(d.Id(), m, jnprSess) if err != nil { return nil, err } fillPolicyoptionsAsPathGroupData(d, asPathGroupOptions) result[0] = d return result, nil } func checkPolicyoptionsAsPathGroupExists(name string, m interface{}, jnprSess *NetconfObject) (bool, error) { sess := m.(*Session) showConfig, err := sess.command("show configuration"+ " policy-options as-path-group "+name+" | display set", jnprSess) if err != nil { return false, err } if showConfig == emptyWord { return false, nil } return true, nil } func setPolicyoptionsAsPathGroup(d *schema.ResourceData, m interface{}, jnprSess *NetconfObject) error { sess := m.(*Session) configSet := make([]string, 0) setPrefix := "set policy-options as-path-group " + d.Get("name").(string) asPathNameList := make([]string, 0) for _, v := range d.Get("as_path").([]interface{}) { asPath := v.(map[string]interface{}) if bchk.StringInSlice(asPath["name"].(string), asPathNameList) { return fmt.Errorf("multiple blocks as_path with the same name %s", asPath["name"].(string)) } asPathNameList = append(asPathNameList, asPath["name"].(string)) configSet = append(configSet, setPrefix+ " as-path "+asPath["name"].(string)+ " \""+asPath["path"].(string)+"\"") } if d.Get("dynamic_db").(bool) { configSet = append(configSet, setPrefix+" dynamic-db") } return sess.configSet(configSet, jnprSess) } func readPolicyoptionsAsPathGroup(name string, m interface{}, jnprSess *NetconfObject) (asPathGroupOptions, error) { sess := m.(*Session) var confRead asPathGroupOptions showConfig, err := sess.command("show configuration"+ " policy-options as-path-group "+name+" | display set relative", jnprSess) if err != nil { return confRead, err } if showConfig != emptyWord { confRead.name = name for _, item := range strings.Split(showConfig, "\n") { if strings.Contains(item, "<configuration-output>") { continue } if strings.Contains(item, "</configuration-output>") { break } itemTrim := strings.TrimPrefix(item, setLineStart) switch { case itemTrim == dynamicDB: confRead.dynamicDB = true case strings.HasPrefix(itemTrim, "as-path "): asPath := map[string]interface{}{ "name": "", "path": "", } itemSplit := strings.Split(strings.TrimPrefix(itemTrim, "as-path "), " ") asPath["name"] = itemSplit[0] asPath["path"] = strings.Trim(strings.TrimPrefix(itemTrim, "as-path "+asPath["name"].(string)+" "), "\"") confRead.asPath = append(confRead.asPath, asPath) } } } return confRead, nil } func delPolicyoptionsAsPathGroup(asPathGroup string, m interface{}, jnprSess *NetconfObject) error { sess := m.(*Session) configSet := make([]string, 0, 1) configSet = append(configSet, "delete policy-options as-path-group "+asPathGroup) return sess.configSet(configSet, jnprSess) } func fillPolicyoptionsAsPathGroupData(d *schema.ResourceData, asPathGroupOptions asPathGroupOptions) { if tfErr := d.Set("name", asPathGroupOptions.name); tfErr != nil { panic(tfErr) } if tfErr := d.Set("as_path", asPathGroupOptions.asPath); tfErr != nil { panic(tfErr) } if tfErr := d.Set("dynamic_db", asPathGroupOptions.dynamicDB); tfErr != nil { panic(tfErr) } }
{ return &schema.Resource{ CreateContext: resourcePolicyoptionsAsPathGroupCreate, ReadContext: resourcePolicyoptionsAsPathGroupRead, UpdateContext: resourcePolicyoptionsAsPathGroupUpdate, DeleteContext: resourcePolicyoptionsAsPathGroupDelete, Importer: &schema.ResourceImporter{ State: resourcePolicyoptionsAsPathGroupImport, }, Schema: map[string]*schema.Schema{ "name": { Type: schema.TypeString, ForceNew: true, Required: true, ValidateDiagFunc: validateNameObjectJunos([]string{}, 64, formatDefault), }, "as_path": { Type: schema.TypeList, Optional: true, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "name": { Type: schema.TypeString, Required: true, ValidateDiagFunc: validateNameObjectJunos([]string{}, 64, formatDefault), }, "path": { Type: schema.TypeString, Required: true, }, }, }, }, "dynamic_db": { Type: schema.TypeBool, Optional: true, }, }, } }
lib.rs
#[deny(missing_docs)] /// A client of the Minecraft protocol. pub mod client; /// The error for errors when binding, accepting, reading and writing. pub mod error; /// A player in the play state. pub mod player; /// The server that stores all players. pub mod server; use log::{error, info}; use tokio::net::TcpListener; use error::{NetError, Result}; /// Starts the server by taking the host name and the port as arguments. pub async fn start_server(host: &str, port: u16) -> Result<()> { let listener = TcpListener::bind((host, port)) .await .map_err(|e| NetError::ServerBindError(format!("{}", e)))?; info!("Started server on {}:{}.", host, port); loop { let (socket, ip) = listener .accept() .await .map_err(|e| NetError::ClientAcceptError(format!("{}", e)))?; info!( "Client with ip {} and port {} connected.", ip.ip(), ip.port() ); tokio::spawn(async move { let mut buf = [0]; socket.peek(&mut buf).await.unwrap(); if buf[0] == 0xfe { error!("Implement legacy server ping."); } else
}); } }
{ let client = client::Client::new(socket); client.connect().await; }
exchange.go
package probit // Copyright (c) 2015-2019 Bitontop Technologies Inc. // Distributed under the MIT software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. import ( "fmt" "log" "sort" "strconv" "sync" cmap "github.com/orcaman/concurrent-map" "github.com/chz8494/gored/coin" "github.com/chz8494/gored/exchange" "github.com/chz8494/gored/pair" "github.com/chz8494/gored/utils" ) type Probit struct { ID int Name string `bson:"name"` Website string `bson:"website"` API_KEY string API_SECRET string Source exchange.DataSource // / exchange API / microservicve api 1 / PSQL SourceURI string } var pairConstraintMap cmap.ConcurrentMap var coinConstraintMap cmap.ConcurrentMap var balanceMap cmap.ConcurrentMap var instance *Probit var once sync.Once /***************************************************/ func CreateProbit(config *exchange.Config) *Probit { once.Do(func() { instance = &Probit{ ID: DEFAULT_ID, Name: "Probit", Website: "https://www.probit.com/en-us/", API_KEY: config.API_KEY, API_SECRET: config.API_SECRET, Source: config.Source, SourceURI: config.SourceURI, } balanceMap = cmap.New() coinConstraintMap = cmap.New() pairConstraintMap = cmap.New() if err := instance.InitData(); err != nil { log.Printf("%v", err) instance = nil } }) return instance } func (e *Probit) InitData() error { switch e.Source { case exchange.EXCHANGE_API: if err := e.GetCoinsData(); err != nil { return err } if err := e.GetPairsData(); err != nil { return err } break case exchange.MICROSERVICE_API: break case exchange.JSON_FILE: exchangeData := utils.GetExchangeDataFromJSON(e.SourceURI, e.GetName()) if exchangeData == nil { return fmt.Errorf("%s Initial Data Error.", e.GetName()) } else { coinConstraintMap = exchangeData.CoinConstraint pairConstraintMap = exchangeData.PairConstraint } break case exchange.PSQL: default: return fmt.Errorf("%s Initial Coin: There is not selected data source.", e.GetName()) } return nil } /**************** Exchange Information ****************/ func (e *Probit) GetID() int { return e.ID } func (e *Probit) GetName() exchange.ExchangeName { return exchange.PROBIT } func (e *Probit) GetTradingWebURL(pair *pair.Pair) string { return fmt.Sprintf("https://www.probit.com/app/exchange/%s", e.GetSymbolByPair(pair)) } func (e *Probit) GetBalance(coin *coin.Coin) float64 { if tmp, ok := balanceMap.Get(coin.Code); ok { return tmp.(float64) } else { return 0.0 } } /*************** Coins on the Exchanges ***************/ func (e *Probit) GetCoinConstraint(coin *coin.Coin) *exchange.CoinConstraint { if tmp, ok := coinConstraintMap.Get(fmt.Sprintf("%d", coin.ID)); ok { return tmp.(*exchange.CoinConstraint) } return nil } func (e *Probit) SetCoinConstraint(coinConstraint *exchange.CoinConstraint) { coinConstraintMap.Set(fmt.Sprintf("%d", coinConstraint.CoinID), coinConstraint) } func (e *Probit) GetCoins() []*coin.Coin { coinList := []*coin.Coin{} keySort := []int{} for _, key := range coinConstraintMap.Keys() { id, _ := strconv.Atoi(key) keySort = append(keySort, id) } sort.Ints(keySort) for _, key := range keySort { c := coin.GetCoinByID(key) if c != nil { coinList = append(coinList, c) } } return coinList } func (e *Probit) GetCoinBySymbol(symbol string) *coin.Coin { for _, id := range coinConstraintMap.Keys() { if tmp, ok := coinConstraintMap.Get(id); ok { cc := tmp.(*exchange.CoinConstraint) if cc.ExSymbol == symbol { return cc.Coin } } else { log.Printf("Get ID %s CoinConstraint Err", id) } } return nil } func (e *Probit) GetSymbolByCoin(coin *coin.Coin) string { key := fmt.Sprintf("%d", coin.ID) if tmp, ok := coinConstraintMap.Get(key); ok { cc := tmp.(*exchange.CoinConstraint) return cc.ExSymbol } return "" } func (e *Probit) DeleteCoin(coin *coin.Coin) { coinConstraintMap.Remove(fmt.Sprintf("%d", coin.ID)) } /*************** Pairs on the Exchanges ***************/ func (e *Probit) GetPairConstraint(pair *pair.Pair) *exchange.PairConstraint { if pair == nil
if tmp, ok := pairConstraintMap.Get(fmt.Sprintf("%d", pair.ID)); ok { return tmp.(*exchange.PairConstraint) } return nil } func (e *Probit) SetPairConstraint(pairConstraint *exchange.PairConstraint) { pairConstraintMap.Set(fmt.Sprintf("%d", pairConstraint.PairID), pairConstraint) } func (e *Probit) GetPairs() []*pair.Pair { pairList := []*pair.Pair{} keySort := []int{} for _, key := range pairConstraintMap.Keys() { id, _ := strconv.Atoi(key) keySort = append(keySort, id) } sort.Ints(keySort) for _, key := range keySort { p := pair.GetPairByID(key) if p != nil { pairList = append(pairList, p) } } return pairList } func (e *Probit) GetPairBySymbol(symbol string) *pair.Pair { for _, id := range pairConstraintMap.Keys() { if tmp, ok := pairConstraintMap.Get(id); ok { pc := tmp.(*exchange.PairConstraint) if pc.ExSymbol == symbol { return pc.Pair } } } return nil } func (e *Probit) GetSymbolByPair(pair *pair.Pair) string { pairConstraint := e.GetPairConstraint(pair) if pairConstraint != nil { return pairConstraint.ExSymbol } return "" } func (e *Probit) HasPair(pair *pair.Pair) bool { return pairConstraintMap.Has(fmt.Sprintf("%d", pair.ID)) } func (e *Probit) DeletePair(pair *pair.Pair) { pairConstraintMap.Remove(fmt.Sprintf("%d", pair.ID)) } /**************** Exchange Constraint ****************/ func (e *Probit) GetConstraintFetchMethod(pair *pair.Pair) *exchange.ConstrainFetchMethod { constrainFetchMethod := &exchange.ConstrainFetchMethod{} constrainFetchMethod.PublicAPI = true constrainFetchMethod.PrivateAPI = false constrainFetchMethod.HealthAPI = true constrainFetchMethod.HasWithdraw = false constrainFetchMethod.HasTransfer = false constrainFetchMethod.Fee = true constrainFetchMethod.LotSize = true constrainFetchMethod.PriceFilter = true constrainFetchMethod.TxFee = true constrainFetchMethod.Withdraw = true constrainFetchMethod.Deposit = true constrainFetchMethod.Confirmation = true constrainFetchMethod.ConstrainSource = 1 constrainFetchMethod.ApiRestrictIP = false return constrainFetchMethod } func (e *Probit) UpdateConstraint() { e.GetCoinsData() e.GetPairsData() } /**************** Coin Constraint ****************/ func (e *Probit) GetTxFee(coin *coin.Coin) float64 { coinConstraint := e.GetCoinConstraint(coin) if coinConstraint == nil { return 0.0 } return coinConstraint.TxFee } func (e *Probit) CanWithdraw(coin *coin.Coin) bool { coinConstraint := e.GetCoinConstraint(coin) if coinConstraint == nil { return false } return coinConstraint.Withdraw } func (e *Probit) CanDeposit(coin *coin.Coin) bool { coinConstraint := e.GetCoinConstraint(coin) if coinConstraint == nil { return false } return coinConstraint.Deposit } func (e *Probit) GetConfirmation(coin *coin.Coin) int { coinConstraint := e.GetCoinConstraint(coin) if coinConstraint == nil { return 0 } return coinConstraint.Confirmation } /**************** Pair Constraint ****************/ func (e *Probit) GetFee(pair *pair.Pair) float64 { pairConstraint := e.GetPairConstraint(pair) if pairConstraint == nil { return 0.0 } return pairConstraint.TakerFee } func (e *Probit) GetLotSize(pair *pair.Pair) float64 { pairConstraint := e.GetPairConstraint(pair) if pairConstraint == nil { return 0.0 } return pairConstraint.LotSize } func (e *Probit) GetPriceFilter(pair *pair.Pair) float64 { pairConstraint := e.GetPairConstraint(pair) if pairConstraint == nil { return 0.0 } return pairConstraint.PriceFilter }
{ return nil }
zu_ZA_test.go
package zu_ZA import ( "testing" "time" "github.com/nicola-spb/locales" "github.com/nicola-spb/locales/currency" ) func TestLocale(t *testing.T) { trans := New() expected := "zu_ZA" if trans.Locale() != expected { t.Errorf("Expected '%s' Got '%s'", expected, trans.Locale()) } } func TestPluralsRange(t *testing.T) { trans := New() tests := []struct { expected locales.PluralRule }{ // { // expected: locales.PluralRuleOther, // }, } rules := trans.PluralsRange() // expected := 1 // if len(rules) != expected { // t.Errorf("Expected '%d' Got '%d'", expected, len(rules)) // } for _, tt := range tests { r := locales.PluralRuleUnknown for i := 0; i < len(rules); i++ { if rules[i] == tt.expected { r = rules[i] break } } if r == locales.PluralRuleUnknown { t.Errorf("Expected '%s' Got '%s'", tt.expected, r) } } } func TestPluralsOrdinal(t *testing.T) { trans := New() tests := []struct { expected locales.PluralRule }{ // { // expected: locales.PluralRuleOne, // }, // { // expected: locales.PluralRuleTwo, // }, // { // expected: locales.PluralRuleFew, // }, // { // expected: locales.PluralRuleOther, // }, } rules := trans.PluralsOrdinal() // expected := 4 // if len(rules) != expected { // t.Errorf("Expected '%d' Got '%d'", expected, len(rules)) // } for _, tt := range tests { r := locales.PluralRuleUnknown for i := 0; i < len(rules); i++ { if rules[i] == tt.expected { r = rules[i] break } } if r == locales.PluralRuleUnknown { t.Errorf("Expected '%s' Got '%s'", tt.expected, r) } } } func TestPluralsCardinal(t *testing.T) { trans := New() tests := []struct { expected locales.PluralRule }{ // { // expected: locales.PluralRuleOne, // }, // { // expected: locales.PluralRuleOther, // }, } rules := trans.PluralsCardinal() // expected := 2 // if len(rules) != expected { // t.Errorf("Expected '%d' Got '%d'", expected, len(rules)) // } for _, tt := range tests { r := locales.PluralRuleUnknown for i := 0; i < len(rules); i++ { if rules[i] == tt.expected { r = rules[i] break } } if r == locales.PluralRuleUnknown { t.Errorf("Expected '%s' Got '%s'", tt.expected, r) } } } func TestRangePlurals(t *testing.T) { trans := New() tests := []struct { num1 float64 v1 uint64 num2 float64 v2 uint64 expected locales.PluralRule }{ // { // num1: 1, // v1: 1, // num2: 2, // v2: 2, // expected: locales.PluralRuleOther, // }, } for _, tt := range tests { rule := trans.RangePluralRule(tt.num1, tt.v1, tt.num2, tt.v2) if rule != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, rule) } } } func TestOrdinalPlurals(t *testing.T) {
tests := []struct { num float64 v uint64 expected locales.PluralRule }{ // { // num: 1, // v: 0, // expected: locales.PluralRuleOne, // }, // { // num: 2, // v: 0, // expected: locales.PluralRuleTwo, // }, // { // num: 3, // v: 0, // expected: locales.PluralRuleFew, // }, // { // num: 4, // v: 0, // expected: locales.PluralRuleOther, // }, } for _, tt := range tests { rule := trans.OrdinalPluralRule(tt.num, tt.v) if rule != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, rule) } } } func TestCardinalPlurals(t *testing.T) { trans := New() tests := []struct { num float64 v uint64 expected locales.PluralRule }{ // { // num: 1, // v: 0, // expected: locales.PluralRuleOne, // }, // { // num: 4, // v: 0, // expected: locales.PluralRuleOther, // }, } for _, tt := range tests { rule := trans.CardinalPluralRule(tt.num, tt.v) if rule != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, rule) } } } func TestDaysAbbreviated(t *testing.T) { trans := New() days := trans.WeekdaysAbbreviated() for i, day := range days { s := trans.WeekdayAbbreviated(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", day, s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "Sun", // }, // { // idx: 1, // expected: "Mon", // }, // { // idx: 2, // expected: "Tue", // }, // { // idx: 3, // expected: "Wed", // }, // { // idx: 4, // expected: "Thu", // }, // { // idx: 5, // expected: "Fri", // }, // { // idx: 6, // expected: "Sat", // }, } for _, tt := range tests { s := trans.WeekdayAbbreviated(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestDaysNarrow(t *testing.T) { trans := New() days := trans.WeekdaysNarrow() for i, day := range days { s := trans.WeekdayNarrow(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", string(day), s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "S", // }, // { // idx: 1, // expected: "M", // }, // { // idx: 2, // expected: "T", // }, // { // idx: 3, // expected: "W", // }, // { // idx: 4, // expected: "T", // }, // { // idx: 5, // expected: "F", // }, // { // idx: 6, // expected: "S", // }, } for _, tt := range tests { s := trans.WeekdayNarrow(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestDaysShort(t *testing.T) { trans := New() days := trans.WeekdaysShort() for i, day := range days { s := trans.WeekdayShort(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", day, s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "Su", // }, // { // idx: 1, // expected: "Mo", // }, // { // idx: 2, // expected: "Tu", // }, // { // idx: 3, // expected: "We", // }, // { // idx: 4, // expected: "Th", // }, // { // idx: 5, // expected: "Fr", // }, // { // idx: 6, // expected: "Sa", // }, } for _, tt := range tests { s := trans.WeekdayShort(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestDaysWide(t *testing.T) { trans := New() days := trans.WeekdaysWide() for i, day := range days { s := trans.WeekdayWide(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", day, s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "Sunday", // }, // { // idx: 1, // expected: "Monday", // }, // { // idx: 2, // expected: "Tuesday", // }, // { // idx: 3, // expected: "Wednesday", // }, // { // idx: 4, // expected: "Thursday", // }, // { // idx: 5, // expected: "Friday", // }, // { // idx: 6, // expected: "Saturday", // }, } for _, tt := range tests { s := trans.WeekdayWide(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestMonthsAbbreviated(t *testing.T) { trans := New() months := trans.MonthsAbbreviated() for i, month := range months { s := trans.MonthAbbreviated(time.Month(i + 1)) if s != month { t.Errorf("Expected '%s' Got '%s'", month, s) } } tests := []struct { idx int expected string }{ // { // idx: 1, // expected: "Jan", // }, // { // idx: 2, // expected: "Feb", // }, // { // idx: 3, // expected: "Mar", // }, // { // idx: 4, // expected: "Apr", // }, // { // idx: 5, // expected: "May", // }, // { // idx: 6, // expected: "Jun", // }, // { // idx: 7, // expected: "Jul", // }, // { // idx: 8, // expected: "Aug", // }, // { // idx: 9, // expected: "Sep", // }, // { // idx: 10, // expected: "Oct", // }, // { // idx: 11, // expected: "Nov", // }, // { // idx: 12, // expected: "Dec", // }, } for _, tt := range tests { s := trans.MonthAbbreviated(time.Month(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestMonthsNarrow(t *testing.T) { trans := New() months := trans.MonthsNarrow() for i, month := range months { s := trans.MonthNarrow(time.Month(i + 1)) if s != month { t.Errorf("Expected '%s' Got '%s'", month, s) } } tests := []struct { idx int expected string }{ // { // idx: 1, // expected: "J", // }, // { // idx: 2, // expected: "F", // }, // { // idx: 3, // expected: "M", // }, // { // idx: 4, // expected: "A", // }, // { // idx: 5, // expected: "M", // }, // { // idx: 6, // expected: "J", // }, // { // idx: 7, // expected: "J", // }, // { // idx: 8, // expected: "A", // }, // { // idx: 9, // expected: "S", // }, // { // idx: 10, // expected: "O", // }, // { // idx: 11, // expected: "N", // }, // { // idx: 12, // expected: "D", // }, } for _, tt := range tests { s := trans.MonthNarrow(time.Month(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestMonthsWide(t *testing.T) { trans := New() months := trans.MonthsWide() for i, month := range months { s := trans.MonthWide(time.Month(i + 1)) if s != month { t.Errorf("Expected '%s' Got '%s'", month, s) } } tests := []struct { idx int expected string }{ // { // idx: 1, // expected: "January", // }, // { // idx: 2, // expected: "February", // }, // { // idx: 3, // expected: "March", // }, // { // idx: 4, // expected: "April", // }, // { // idx: 5, // expected: "May", // }, // { // idx: 6, // expected: "June", // }, // { // idx: 7, // expected: "July", // }, // { // idx: 8, // expected: "August", // }, // { // idx: 9, // expected: "September", // }, // { // idx: 10, // expected: "October", // }, // { // idx: 11, // expected: "November", // }, // { // idx: 12, // expected: "December", // }, } for _, tt := range tests { s := string(trans.MonthWide(time.Month(tt.idx))) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeFull(t *testing.T) { // loc, err := time.LoadLocation("America/Toronto") // if err != nil { // t.Errorf("Expected '<nil>' Got '%s'", err) // } // fixed := time.FixedZone("OTHER", -4) tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, loc), // expected: "9:05:01 am Eastern Standard Time", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, fixed), // expected: "8:05:01 pm OTHER", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeFull(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeLong(t *testing.T) { // loc, err := time.LoadLocation("America/Toronto") // if err != nil { // t.Errorf("Expected '<nil>' Got '%s'", err) // } tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, loc), // expected: "9:05:01 am EST", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, loc), // expected: "8:05:01 pm EST", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeLong(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeMedium(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, time.UTC), // expected: "9:05:01 am", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, time.UTC), // expected: "8:05:01 pm", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeMedium(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeShort(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, time.UTC), // expected: "9:05 am", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, time.UTC), // expected: "8:05 pm", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeShort(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateFull(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "Wednesday, February 3, 2016", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateFull(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateLong(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "February 3, 2016", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateLong(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateMedium(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "Feb 3, 2016", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateMedium(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateShort(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "2/3/16", // }, // { // t: time.Date(-500, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "2/3/500", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateShort(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtNumber(t *testing.T) { tests := []struct { num float64 v uint64 expected string }{ // { // num: 1123456.5643, // v: 2, // expected: "1,123,456.56", // }, // { // num: 1123456.5643, // v: 1, // expected: "1,123,456.6", // }, // { // num: 221123456.5643, // v: 3, // expected: "221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // expected: "-221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // expected: "-221,123,456.564", // }, // { // num: 0, // v: 2, // expected: "0.00", // }, // { // num: -0, // v: 2, // expected: "0.00", // }, // { // num: -0, // v: 2, // expected: "0.00", // }, } trans := New() for _, tt := range tests { s := trans.FmtNumber(tt.num, tt.v) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtCurrency(t *testing.T) { tests := []struct { num float64 v uint64 currency currency.Type expected string }{ // { // num: 1123456.5643, // v: 2, // currency: currency.USD, // expected: "$1,123,456.56", // }, // { // num: 1123456.5643, // v: 1, // currency: currency.USD, // expected: "$1,123,456.60", // }, // { // num: 221123456.5643, // v: 3, // currency: currency.USD, // expected: "$221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.USD, // expected: "-$221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.CAD, // expected: "-CAD 221,123,456.564", // }, // { // num: 0, // v: 2, // currency: currency.USD, // expected: "$0.00", // }, // { // num: -0, // v: 2, // currency: currency.USD, // expected: "$0.00", // }, // { // num: -0, // v: 2, // currency: currency.CAD, // expected: "CAD 0.00", // }, // { // num: 1.23, // v: 0, // currency: currency.USD, // expected: "$1.00", // }, } trans := New() for _, tt := range tests { s := trans.FmtCurrency(tt.num, tt.v, tt.currency) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtAccounting(t *testing.T) { tests := []struct { num float64 v uint64 currency currency.Type expected string }{ // { // num: 1123456.5643, // v: 2, // currency: currency.USD, // expected: "$1,123,456.56", // }, // { // num: 1123456.5643, // v: 1, // currency: currency.USD, // expected: "$1,123,456.60", // }, // { // num: 221123456.5643, // v: 3, // currency: currency.USD, // expected: "$221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.USD, // expected: "($221,123,456.564)", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.CAD, // expected: "(CAD 221,123,456.564)", // }, // { // num: -0, // v: 2, // currency: currency.USD, // expected: "$0.00", // }, // { // num: -0, // v: 2, // currency: currency.CAD, // expected: "CAD 0.00", // }, // { // num: 1.23, // v: 0, // currency: currency.USD, // expected: "$1.00", // }, } trans := New() for _, tt := range tests { s := trans.FmtAccounting(tt.num, tt.v, tt.currency) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtPercent(t *testing.T) { tests := []struct { num float64 v uint64 expected string }{ // { // num: 15, // v: 0, // expected: "15%", // }, // { // num: 15, // v: 2, // expected: "15.00%", // }, // { // num: 434.45, // v: 0, // expected: "434%", // }, // { // num: 34.4, // v: 2, // expected: "34.40%", // }, // { // num: -34, // v: 0, // expected: "-34%", // }, } trans := New() for _, tt := range tests { s := trans.FmtPercent(tt.num, tt.v) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } }
trans := New()
verlet.rs
fn verlet(mut pos: f64, acc: f64, dt: f64) -> f64
fn stormer_verlet(mut pos: f64, acc: f64, dt: f64) -> (f64, f64) { let mut prev_pos = pos; let mut time = 0.0; let mut vel = 0.0; while pos > 0.0 { time += dt; let temp_pos = pos; pos = pos * 2.0 - prev_pos + acc * dt * dt; prev_pos = temp_pos; // Because acceleration is constant, velocity is // straightforward vel += acc * dt; } (time, vel) } fn velocity_verlet(mut pos: f64, acc: f64, dt: f64) -> (f64, f64) { let mut time = 0.0; let mut vel = 0.0; while pos > 0.0 { time += dt; pos += vel * dt + 0.5 * acc * dt * dt; vel += acc * dt; } (time, vel) } fn main() { let time_v = verlet(5.0, -10.0, 0.01); let (time_sv, vel_sv) = stormer_verlet(5.0, -10.0, 0.01); let (time_vv, vel_vv) = velocity_verlet(5.0, -10.0, 0.01); println!("[#] Time for Verlet integration is:"); println!("{}", time_v); println!("[#] Time for Stormer Verlet integration is:"); println!("{}", time_sv); println!("[#] Velocity for Stormer Verlet integration is:"); println!("{}", vel_sv); println!("[#] Time for velocity Verlet integration is:"); println!("{}", time_vv); println!("[#] Velocity for velocity Verlet integration is:"); println!("{}", vel_vv); }
{ let mut prev_pos = pos; let mut time = 0.0; while pos > 0.0 { time += dt; let temp_pos = pos; pos = pos * 2.0 - prev_pos + acc * dt * dt; prev_pos = temp_pos; } time }
class_wrapper.py
""" The class wrapper for the networks """ # Built-in import os import time # Torch import torch from torch import nn from torch.utils.tensorboard import SummaryWriter from torchsummary import summary # Libs import numpy as np # Own module class Network(object): def __init__(self, model_fn, flags, train_loader, test_loader, ckpt_dir=os.path.join(os.path.abspath(''), 'models'), inference_mode=False, saved_model=None): self.model_fn = model_fn # The model maker function self.flags = flags # The Flags containing the specs if inference_mode: # If inference mode, use saved model self.ckpt_dir = os.path.join(ckpt_dir, saved_model) self.saved_model = saved_model else: # training mode, create a new ckpt folder self.ckpt_dir = os.path.join(ckpt_dir, time.strftime('%Y%m%d_%H%M%S', time.localtime())) self.model = self.create_model() # The model itself self.loss = self.make_loss() # The loss function self.optm = self.make_optimizer() # The optimizer self.train_loader = train_loader # The train data loader self.test_loader = test_loader # The test data loader self.log = SummaryWriter(self.ckpt_dir) # Create a summary writer for keeping the summary to the tensor board self.best_validation_loss = float('inf') # Set the BVL to large number def create_model(self): """ Function to create the network module from provided model fn and flags :return: the created nn module """ model = self.model_fn(self.flags) #summary(model, input_size=(128, 8)) print(model) return model def make_loss(self, logit=None, labels=None): """ Create a tensor that represents the loss. This is consistant both at training time \ and inference time for Backward model :param logit: The output of the network :return: the total loss """ if logit is None: return None MSE_loss = nn.functional.mse_loss(logit, labels) # The MSE Loss of the BDY_loss = 0 # Implemenation later return MSE_loss + BDY_loss def make_optimizer(self): """ Make the corresponding optimizer from the flags. Only below optimizers are allowed. Welcome to add more :return: """ if self.flags.optim == 'Adam': op = torch.optim.Adam(self.model.parameters(), lr=self.flags.lr, weight_decay=self.flags.reg_scale) elif self.flags.optim == 'RMSprop': op = torch.optim.RMSprop(self.model.parameters(), lr=self.flags.lr, weight_decay=self.flags.reg_scale) elif self.flags.optim == 'SGD': op = torch.optim.SGD(self.model.parameters(), lr=self.flags.lr, weight_decay=self.flags.reg_scale) else: raise Exception("Your Optimizer is neither Adam, RMSprop or SGD, please change in param or contact Ben")
""" Saving the model to the current check point folder with name best_model.pt :return: None """ #torch.save(self.model.state_dict, os.path.join(self.ckpt_dir, 'best_model_state_dict.pt')) torch.save(self.model, os.path.join(self.ckpt_dir, 'best_model.pt')) def load(self): """ Loading the model from the check point folder with name best_model.pt :return: """ #self.model.load_state_dict(torch.load(os.path.join(self.ckpt_dir, 'best_model_state_dict.pt'))) self.model.load(torch.load(os.path.join(self.ckpt_dir, 'best_model.pt'))) def train(self): """ The major training function. This would start the training using information given in the flags :return: None """ cuda = True if torch.cuda.is_available() else False if cuda: self.model.cuda() for epoch in range(self.flags.train_step): # Set to Training Mode train_loss = 0 self.model.train() for j, (geometry, spectra) in enumerate(self.train_loader): if cuda: geometry = geometry.cuda() # Put data onto GPU spectra = spectra.cuda() # Put data onto GPU self.optm.zero_grad() # Zero the gradient first logit = self.model(geometry) # Get the output loss = self.make_loss(logit, spectra) # Get the loss tensor loss.backward() # Calculate the backward gradients self.optm.step() # Move one step the optimizer train_loss += loss # Aggregate the loss if epoch % self.flags.eval_step: # For eval steps, do the evaluations and tensor board # Record the training loss to the tensorboard train_avg_loss = train_loss.data.numpy() / (j+1) self.log.add_scalar('Loss/train', train_avg_loss, epoch) # Set to Evaluation Mode self.model.eval() print("Doing Evaluation on the model now") test_loss = 0 for j, (geometry, spectra) in enumerate(self.test_loader): # Loop through the eval set if cuda: geometry = geometry.cuda() spectra = spectra.cuda() logit = self.model(geometry) loss = self.make_loss(logit, spectra) # compute the loss test_loss += loss # Aggregate the loss # Record the testing loss to the tensorboard test_avg_loss = test_loss.data.numpy() / (j+1) self.log.add_scalar('Loss/test', test_avg_loss, epoch) print("This is Epoch %d, training loss %.5f, validation loss %.5f" \ % (epoch, train_avg_loss, test_avg_loss )) # Model improving, save the model down if test_avg_loss < self.best_validation_loss: self.best_validation_loss = test_avg_loss self.save() print("Saving the model down...") if self.best_validation_loss < self.flags.stop_threshold: print("Training finished EARLIER at epoch %d, reaching loss of %.5f" %\ (epoch, self.best_validation_loss)) return None def evaluate(self, save_dir='data/'): self.load() self.model.eval() # Evaluation mode # Get the file names Ypred_file = os.path.join(save_dir, 'test_Ypred_{}.csv'.format(self.saved_model)) Xtruth_file = os.path.join(save_dir, 'test_Xtruth_{}.csv'.format(self.saved_model)) Ytruth_file = os.path.join(save_dir, 'test_Ytruth_{}.csv'.format(self.saved_model)) #Xpred_file = os.path.join(save_dir, 'test_Xpred_{}.csv'.format(self.saved_model)) # For pure forward model, there is no Xpred # Open those files to append with open(Xtruth_file,'a') as fxt,open(Ytruth_file, 'a') as fyt, open(Ypred_file,'a') as fyp: # Loop through the eval data and evaluate for ind, (geometry, spectra) in enumerate(self.test_loader): logits = self.model(geometry) np.savetxt(fxt, geometry.numpy(), fmt='%.3f') np.savetxt(fyt, spectra.numpy(), fmt='%.3f') np.savetxt(fyp, logits.numpy(), fmt='%.3f')
return op def save(self):
to_der.rs
use asn1_rs::*; use hex_literal::hex; // use nom::HexDisplay; use std::convert::{TryFrom, TryInto}; #[test] fn to_der_length() { // indefinite length let length = Length::Indefinite; let v = length.to_der_vec().expect("serialization failed"); assert_eq!(&v, &[0x80]); // definite, short form let length = Length::Definite(3); let v = length.to_der_vec().expect("serialization failed"); assert_eq!(&v, &[0x03]); // definite, long form let length = Length::Definite(250); let v = length.to_der_vec().expect("serialization failed"); assert_eq!(&v, &[0x81, 0xfa]); } #[test] fn to_der_length_long() { let s = core::str::from_utf8(&[0x41; 256]).unwrap(); let v = s.to_der_vec().expect("serialization failed"); assert_eq!(&v[..4], &[0x0c, 0x82, 0x01, 0x00]); assert_eq!(&v[4..], s.as_bytes()); } #[test] fn to_der_tag() { // short tag, UNIVERSAL let v = (Class::Universal, false, Tag(0x1a)) .to_der_vec() .expect("serialization failed"); assert_eq!(&v, &[0x1a]); // short tag, APPLICATION let v = (Class::Application, false, Tag(0x1a)) .to_der_vec() .expect("serialization failed"); assert_eq!(&v, &[0x1a | (0b01 << 6)]); // short tag, constructed let v = (Class::Universal, true, Tag(0x10)) .to_der_vec() .expect("serialization failed"); assert_eq!(&v, &[0x30]); // long tag, UNIVERSAL let v = (Class::Universal, false, Tag(0x1a1a)) .to_der_vec() .expect("serialization failed"); assert_eq!(&v, &[0b1_1111, 0x9a, 0x34]); } #[test] fn to_der_header() { // simple header let header = Header::new_simple(Tag::Integer); let v = header.to_der_vec().expect("serialization failed"); assert_eq!(&v, &[0x2, 0x0]); // indefinite length let header = Header::new(Class::Universal, false, Tag::Integer, Length::Indefinite); let v = header.to_der_vec().expect("serialization failed"); assert_eq!(&v, &[0x2, 0x80]); } #[test] fn to_der_any() { let header = Header::new_simple(Tag::Integer); let any = Any::new(header, &hex!("02")); let v = any.to_der_vec().expect("serialization failed"); assert_eq!(&v, &[0x02, 0x01, 0x02]); } #[test] fn to_der_any_raw() { let header = Header::new(Class::Universal, false, Tag::Integer, Length::Definite(3)); let any = Any::new(header, &hex!("02")); // to_vec should compute the length let v = any.to_der_vec().expect("serialization failed"); assert_eq!(&v, &[0x02, 0x01, 0x02]); // to_vec_raw will use the header as provided let v = any.to_der_vec_raw().expect("serialization failed"); assert_eq!(&v, &[0x02, 0x03, 0x02]); } #[test] fn to_der_bitstring() { let bitstring = BitString::new(6, &hex!("6e 5d c0")); let v = bitstring.to_der_vec().expect("serialization failed"); assert_eq!(&v, &hex!("03 04 06 6e 5d c0")); let (_, result) = BitString::from_der(&v).expect("parsing failed"); assert!(bitstring.eq(&result)); } #[test] fn to_der_bool() { let v = Boolean::new(0xff) .to_der_vec() .expect("serialization failed"); assert_eq!(&v, &[0x01, 0x01, 0xff]); // let v = false.to_der_vec().expect("serialization failed"); assert_eq!(&v, &[0x01, 0x01, 0x00]); // let v = true.to_der_vec().expect("serialization failed"); assert_eq!(&v, &[0x01, 0x01, 0xff]); // raw value (not 0 of 0xff) let v = Boolean::new(0x8a) .to_der_vec_raw() .expect("serialization failed"); assert_eq!(&v, &[0x01, 0x01, 0x8a]); } #[test] fn to_der_generalizedtime() { // date without millisecond let dt = ASN1DateTime::new(1999, 12, 31, 23, 59, 59, None, ASN1TimeZone::Z); let time = GeneralizedTime::new(dt); let v = time.to_der_vec().expect("serialization failed"); assert_eq!(&v[..2], &hex!("18 0f")); assert_eq!(&v[2..], b"19991231235959Z"); let (_, time2) = GeneralizedTime::from_der(&v).expect("decoding serialized object failed"); assert!(time.eq(&time2)); // // date with millisecond let dt = ASN1DateTime::new(1999, 12, 31, 23, 59, 59, Some(123), ASN1TimeZone::Z); let time = GeneralizedTime::new(dt); let v = time.to_der_vec().expect("serialization failed"); assert_eq!(&v[..2], &hex!("18 13")); assert_eq!(&v[2..], b"19991231235959.123Z"); let (_, time2) = GeneralizedTime::from_der(&v).expect("decoding serialized object failed"); assert!(time.eq(&time2)); } fn encode_decode_assert_int<T>(t: T, expected: &[u8]) where T: ToDer + std::fmt::Debug + Eq, for<'a> T: TryFrom<Integer<'a>, Error = Error>, { let v = t.to_der_vec().expect("serialization failed"); assert_eq!(&v, expected); let (_, obj) = Integer::from_der(&v).expect("decoding serialized object failed"); let t2: T = obj.try_into().unwrap(); assert_eq!(t, t2); } #[test] fn to_der_integer() { let int = Integer::new(&hex!("02")); let v = int.to_der_vec().expect("serialization failed"); assert_eq!(&v, &[0x02, 0x01, 0x02]); // from_u32 let int = Integer::from_u32(2); let v = int.to_der_vec().expect("serialization failed"); assert_eq!(&v, &[0x02, 0x01, 0x02]); // impl ToDer for primitive types encode_decode_assert_int(2u32, &[0x02, 0x01, 0x02]); // signed i32 (> 0) encode_decode_assert_int(4, &[0x02, 0x01, 0x04]); // signed i32 (< 0) encode_decode_assert_int(-4, &[0x02, 0x05, 0x00, 0xff, 0xff, 0xff, 0xfc]); } #[test] fn
() { let bytes: &[u8] = &hex!("01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f"); let s = OctetString::from(bytes); let v = s.to_der_vec().expect("serialization failed"); assert_eq!(&v[..2], &hex!("04 0f")); assert_eq!(&v[2..], bytes); let (_, s2) = OctetString::from_der(&v).expect("decoding serialized object failed"); assert!(s.eq(&s2)); } #[test] fn to_der_real_binary() { // base = 2, value = 4 let r = Real::binary(2.0, 2, 1); let v = r.to_der_vec().expect("serialization failed"); assert_eq!(&v, &hex!("09 03 80 02 01")); let (_, result) = Real::from_der(&v).expect("parsing failed"); assert!((r.f64() - result.f64()).abs() < f64::EPSILON); // // base = 2, value = 0.5 let r = Real::binary(0.5, 2, 0); let v = r.to_der_vec().expect("serialization failed"); assert_eq!(&v, &hex!("09 03 80 ff 01")); let (_, result) = Real::from_der(&v).expect("parsing failed"); assert!((r.f64() - result.f64()).abs() < f64::EPSILON); // // base = 2, value = 3.25, but change encoding base (8) let r = Real::binary(3.25, 2, 0).with_enc_base(8); let v = r.to_der_vec().expect("serialization failed"); // note: this encoding has a scale factor (not DER compliant) assert_eq!(&v, &hex!("09 03 94 ff 0d")); let (_, result) = Real::from_der(&v).expect("parsing failed"); assert!((r.f64() - result.f64()).abs() < f64::EPSILON); // // base = 2, value = 0.00390625, but change encoding base (16) let r = Real::binary(0.00390625, 2, 0).with_enc_base(16); let v = r.to_der_vec().expect("serialization failed"); // note: this encoding has a scale factor (not DER compliant) assert_eq!(&v, &hex!("09 03 a0 fe 01")); let (_, result) = Real::from_der(&v).expect("parsing failed"); assert!((r.f64() - result.f64()).abs() < f64::EPSILON); // // 2 octets for exponent, negative exponent and abs(exponent) is all 1's and fills the whole octet(s) let r = Real::binary(3.0, 2, -1020); let v = r.to_der_vec().expect("serialization failed"); assert_eq!(&v, &hex!("09 04 81 fc 04 03")); let (_, result) = Real::from_der(&v).expect("parsing failed"); assert!((r.f64() - result.f64()).abs() < f64::EPSILON); // // 3 octets for exponent, and // check that first 9 bits for exponent are not all 1's let r = Real::binary(1.0, 2, 262140); let v = r.to_der_vec().expect("serialization failed"); assert_eq!(&v, &hex!("09 05 82 03 ff fc 01")); let (_, result) = Real::from_der(&v).expect("parsing failed"); // XXX value cannot be represented as f64 (inf) assert!(result.f64().is_infinite()); // // >3 octets for exponent, and // mantissa < 0 let r = Real::binary(-1.0, 2, 76354972); let v = r.to_der_vec().expect("serialization failed"); let (_, result) = Real::from_der(&v).expect("parsing failed"); assert_eq!(&v, &hex!("09 07 c3 04 04 8d 15 9c 01")); // XXX value cannot be represented as f64 (-inf) assert!(result.f64().is_infinite()); } #[test] fn to_der_real_special() { // ZERO let r = Real::Zero; let v = r.to_der_vec().expect("serialization failed"); assert_eq!(&v, &hex!("09 00")); let (_, result) = Real::from_der(&v).expect("parsing failed"); assert!(r.eq(&result)); // INFINITY let r = Real::Infinity; let v = r.to_der_vec().expect("serialization failed"); assert_eq!(&v, &hex!("09 01 40")); let (_, result) = Real::from_der(&v).expect("parsing failed"); assert!(r.eq(&result)); // MINUS INFINITY let r = Real::NegInfinity; let v = r.to_der_vec().expect("serialization failed"); assert_eq!(&v, &hex!("09 01 41")); let (_, result) = Real::from_der(&v).expect("parsing failed"); assert!(r.eq(&result)); } #[test] fn to_der_real_string() { // non-zero value, base 10 let r = Real::new(1.2345); let v = r.to_der_vec().expect("serialization failed"); // assert_eq!(&v, &hex!("09 00")); let (_, result) = Real::from_der(&v).expect("parsing failed"); assert!(r.eq(&result)); } #[test] fn to_der_sequence() { let it = [2, 3, 4].iter(); let seq = Sequence::from_iter_to_der(it).unwrap(); let v = seq.to_der_vec().expect("serialization failed"); assert_eq!(&v, &hex!("30 09 02 01 02 02 01 03 02 01 04")); let (_, seq2) = Sequence::from_der(&v).expect("decoding serialized object failed"); assert_eq!(seq, seq2); // Vec<T>::ToDer let v = vec![2, 3, 4].to_der_vec().expect("serialization failed"); assert_eq!(&v, &hex!("30 09 02 01 02 02 01 03 02 01 04")); } #[test] fn to_der_tagged_explicit() { let tagged = TaggedParser::new_explicit(Class::ContextSpecific, 1, 2u32); let v = tagged.to_der_vec().expect("serialization failed"); assert_eq!(&v, &hex!("a1 03 02 01 02")); let (_, t2) = TaggedParser::<Explicit, u32>::from_der(&v).expect("decoding serialized object failed"); assert!(tagged.eq(&t2)); // TaggedValue API let tagged = TaggedValue::explicit(2u32); let v = tagged.to_der_vec().expect("serialization failed"); assert_eq!(&v, &hex!("a1 03 02 01 02")); let (_, t2) = TaggedExplicit::<u32, 1>::from_der(&v).expect("decoding serialized object failed"); assert!(tagged.eq(&t2)); } #[test] fn to_der_tagged_implicit() { let tagged = TaggedParser::new_implicit(Class::ContextSpecific, false, 1, 2u32); let v = tagged.to_der_vec().expect("serialization failed"); assert_eq!(&v, &hex!("81 01 02")); let (_, t2) = TaggedParser::<Implicit, u32>::from_der(&v).expect("decoding serialized object failed"); assert!(tagged.eq(&t2)); // TaggedValue API let tagged = TaggedValue::implicit(2u32); let v = tagged.to_der_vec().expect("serialization failed"); assert_eq!(&v, &hex!("81 01 02")); let (_, t2) = TaggedImplicit::<u32, 1>::from_der(&v).expect("decoding serialized object failed"); assert!(tagged.eq(&t2)); } #[test] fn to_der_utctime() { let dt = ASN1DateTime::new(99, 12, 31, 23, 59, 59, None, ASN1TimeZone::Z); let time = UtcTime::new(dt); let v = time.to_der_vec().expect("serialization failed"); assert_eq!(&v[..2], &hex!("17 0d")); assert_eq!(&v[2..], b"991231235959Z"); let (_, time2) = UtcTime::from_der(&v).expect("decoding serialized object failed"); assert!(time.eq(&time2)); } #[test] fn to_der_utf8string() { let s = Utf8String::from("abcdef"); let v = s.to_der_vec().expect("serialization failed"); assert_eq!(&v[..2], &hex!("0c 06")); assert_eq!(&v[2..], b"abcdef"); let (_, s2) = Utf8String::from_der(&v).expect("decoding serialized object failed"); assert!(s.eq(&s2)); } #[test] fn to_der_universalstring() { let s = UniversalString::from("abcdef"); let v = s.to_der_vec().expect("serialization failed"); assert_eq!( &v, &hex!("1c 18 00000061 00000062 00000063 00000064 00000065 00000066") ); let (_, s2) = UniversalString::from_der(&v).expect("decoding serialized object failed"); assert!(s.eq(&s2)); }
to_der_octetstring
essay-template.js
import React from "react" import { graphql } from "gatsby" import Bio from "../components/bio" import Layout from "../layouts/layout" const EssayTemplate = ({ data, _pageContext, location }) => { const post = data.markdownRemark // const { _previous, _next } = pageContext return ( <Layout location={location} title={post.frontmatter.title} description={post.frontmatter.description || post.excerpt}> <article> <header> <h1 style={{ textAlign: `center`, marginBottom: 0 }}> {post.frontmatter.title}
</p> </header> <section dangerouslySetInnerHTML={{ __html: post.html }} /> <div> <hr style={{ margin: `30px 0` }} /> <Bio /> </div> </article> </Layout> ) } export default EssayTemplate export const pageQuery = graphql` query EssayBySlug($slug: String!) { markdownRemark(fields: { slug: { eq: $slug } }) { id excerpt(pruneLength: 160) html frontmatter { title date(formatString: "MMMM DD, YYYY") description } } } `
</h1> <p className="sans" style={{ display: `block`, textAlign: `center`, fontSize: 20 }}> {post.frontmatter.date}
ami2text.py
import xml.etree.ElementTree as ET import os import codecs import logging import sys import argparse logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) program = os.path.basename(sys.argv[0]) logger = logging.getLogger(program) def convert_ami(ami_root_dir, txt_output_dir):
if __name__ == '__main__': parser = argparse.ArgumentParser(description='') parser.add_argument('-a', '--ami-root-dir', dest='ami_root_dir', help='Ami root directory, corpus is read from this directory', type=str, default = './data/ami_raw/words/') parser.add_argument('-t', '--txt-output-dir', dest='txt_output_dir', help='Txt output directory', type=str, default = './data/ami_transcripts/' ) args = parser.parse_args() logger.info('Using ami directory:' + args.ami_root_dir) logger.info('Output text is saved in:' + args.txt_output_dir) convert_ami(args.ami_root_dir, args.txt_output_dir)
logger.info('Starting conversion process...') for myfile in os.listdir(ami_root_dir): if myfile.endswith('.xml'): with codecs.open(os.path.join(ami_root_dir, myfile), 'r', encoding='utf-8', errors='replace') as in_file: raw = in_file.read() tree = ET.fromstring(raw) text = ET.tostring(tree, encoding='utf-8', method='text') output = u' '.join(text.split()) filename = os.path.splitext(myfile)[0] output_file = os.path.join(txt_output_dir, filename + '.txt') with codecs.open(output_file, 'w', encoding='utf-8') as out_file: out_file.write(output) logger.info(output_file + ' written') logger.info('Conversion done.')
histogram_component.ts
/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ import { AfterViewInit, ChangeDetectionStrategy, ChangeDetectorRef, Component, ElementRef, Input, OnChanges, OnDestroy, ViewChild, } from '@angular/core'; import {fromEvent, Subject} from 'rxjs'; import {takeUntil} from 'rxjs/operators'; import * as d3 from '../../third_party/d3'; import {HCLColor} from '../../third_party/d3'; import { Bin, HistogramData, HistogramDatum, HistogramMode, TimeProperty, } from './histogram_types'; type BinScale = d3.ScaleLinear<number, number>; type CountScale = d3.ScaleLinear<number, number>; type TemporalScale = | d3.ScaleLinear<number, number> | d3.ScaleTime<number, number>; type D3ColorScale = d3.ScaleLinear<HCLColor, string>; interface Layout { histogramHeight: number; contentClientRect: {width: number; height: number}; } interface Scales { binScale: BinScale; countScale: CountScale; temporalScale: TemporalScale; d3ColorScale: D3ColorScale; } export interface TooltipData { xPositionInBinCoord: number; closestDatum: HistogramDatum; // Bin closest to the cursor in the `closestDatum`. closestBin: Bin; xAxis: { position: number; label: string; }; yAxis: { position: number; label: string; }; value: { position: {x: number; y: number}; label: string; }; } export interface LinkedTime { startStep: number; endStep: number | null; } @Component({ selector: 'tb-histogram', templateUrl: 'histogram_component.ng.html', styleUrls: ['histogram_component.css'], changeDetection: ChangeDetectionStrategy.OnPush, }) export class HistogramComponent implements AfterViewInit, OnChanges, OnDestroy { @ViewChild('main') private readonly main!: ElementRef; @ViewChild('xAxis') private readonly xAxis!: ElementRef; @ViewChild('yAxis') private readonly yAxis!: ElementRef; @ViewChild('content') private readonly content!: ElementRef; @ViewChild('histograms') private readonly histograms!: ElementRef; @Input() mode: HistogramMode = HistogramMode.OFFSET; @Input() timeProperty: TimeProperty = TimeProperty.STEP; @Input() color?: string; @Input() data!: HistogramData; @Input() linkedTime: LinkedTime | null = null; readonly HistogramMode = HistogramMode; readonly TimeProperty = TimeProperty; tooltipData: null | TooltipData = null; private ngUnsubscribe = new Subject<void>(); private readonly layout: Layout = { histogramHeight: 0, contentClientRect: {height: 0, width: 0}, }; scales: Scales | null = null; private formatters = { binNumber: d3.format('.3~s'), count: d3.format('.3n'), // DefinitelyTyped is incorrect that the `timeFormat` only takes `Date` as // an input. Better type it for downstream types. wallTime: d3.timeFormat('%m/%d %X') as unknown as ( dateSinceEpoch: number ) => string, step: d3.format('.0f'), relative: (timeDiffInMs: number): string => { // TODO(tensorboarad-team): this `.1r` drops important information and // needs to be fixed. For example, `24h` would be shown as `20h`. This // behavior is a carry over from vz-histogram-timeseries for now. return d3.format('.1r')(timeDiffInMs / 3.6e6) + 'h'; // Convert to hours. }, }; private domVisible = false; constructor(private readonly changeDetector: ChangeDetectorRef) { // `data` and layout are not be available at the constructor time. Since we // recalculate the scales after the view becomes first visible, let's just // initialize `scales` with their default values. // this.scales = this.computeScales([]); } ngOnChanges() { this.updateChartIfVisible(); } ngOnDestroy() { this.ngUnsubscribe.next(); this.ngUnsubscribe.complete(); } ngAfterViewInit() { fromEvent<MouseEvent>(this.main.nativeElement, 'mousemove', { passive: true, }) .pipe(takeUntil(this.ngUnsubscribe)) .subscribe((event) => this.onMouseMove(event)); } getCssTranslatePx(x: number, y: number): string { return `translate(${x}px, ${y}px)`; } getClosestBinFromBinCoordinate( datum: HistogramDatum, xInBinCoord: number ): Bin { if (!datum.bins.length) { return {x: 0, dx: 0, y: 0}; } const firstBin = datum.bins[0]; const lastBin = datum.bins.slice(-1)[0]; if (xInBinCoord < firstBin.x) return firstBin; if (xInBinCoord >= lastBin.x + lastBin.dx) return lastBin; const closestBin = datum.bins.find((bin) => { return bin.x <= xInBinCoord && xInBinCoord < bin.x + bin.dx; })!; return closestBin; } getUiCoordFromBinForContent(bin: Bin): {x: number; y: number} { if (!this.scales) return {x: 0, y: 0}; return { x: this.scales.binScale(getXCentroid(bin)), y: this.scales.countScale(bin.y), }; } getHistogramPath(datum: HistogramDatum): string { // Unlike other methods used in Angular template, if we return non-empty // value before the DOM and everything is initialized, this method can emit // junk (path with NaN) values causing browser to noisily print warnings. if (!this.scales || !datum.bins.length) return ''; const xScale = this.scales.binScale; const yScale = this.scales.countScale; const firstBin = datum.bins[0]; const lastBin = datum.bins.slice(-1)[0]; const pathBuilder = [`M${xScale(getXCentroid(firstBin))},${yScale(0)}`]; for (const bin of datum.bins) { pathBuilder.push(`L${xScale(getXCentroid(bin))},${yScale(bin.y)}`); } pathBuilder.push(`L${xScale(getXCentroid(lastBin))},${yScale(0)}`); return pathBuilder.join(''); } trackByWallTime(datum: HistogramDatum): number { return datum.wallTime; } // translates container for histogram so we can have more sensible coordinate // system for reasoning with the coordinate system of a histogram. getGroupTransform(datum: HistogramDatum): string { // Unlike other methods used in Angular template, if we return non-empty // value before the DOM and everything is initialized, this method can emit // junk (translate with NaN) values causing browser to noisily print // warnings. if (!this.scales || this.mode === HistogramMode.OVERLAY) { return ''; } return this.getCssTranslatePx( 0, this.scales.temporalScale(this.getTimeValue(datum)) ); } isLinkedTimeEnabled(linkedTime: LinkedTime | null): linkedTime is LinkedTime { return Boolean( this.mode === HistogramMode.OFFSET && this.timeProperty === TimeProperty.STEP && this.scales && linkedTime ); } isDatumInLinkedTimeRange(datum: HistogramDatum): boolean { if (!this.isLinkedTimeEnabled(this.linkedTime)) { return true; } if (this.linkedTime.endStep === null) { return this.linkedTime.startStep === datum.step; } return ( this.linkedTime.startStep <= datum.step && this.linkedTime.endStep >= datum.step ); } getHistogramFill(datum: HistogramDatum): string { return this.scales ? this.scales.d3ColorScale(this.getTimeValue(datum)) : ''; } getGridTickYLocs(): number[] { if (!this.scales || this.mode === HistogramMode.OFFSET) return []; const yScale = this.scales.countScale; return yScale.ticks().map((tick) => yScale(tick)); } onResize() { this.updateClientRects(); this.updateChartIfVisible(); } onVisibilityChange({visible}: {visible: boolean}) { this.domVisible = visible; if (!visible) return; this.updateClientRects(); this.updateChartIfVisible(); } private getTimeValue(datum: HistogramDatum): number { switch (this.timeProperty) { case TimeProperty.WALL_TIME: return datum.wallTime; case TimeProperty.STEP: return datum.step; case TimeProperty.RELATIVE: return datum.wallTime - this.data[0].wallTime; } } private updateClientRects() { if (this.content) { this.layout.contentClientRect = this.content.nativeElement.getBoundingClientRect(); this.layout.histogramHeight = this.layout.contentClientRect.height / 2.5; } } private updateChartIfVisible() { if (!this.domVisible) return; this.scales = this.computeScales(this.data); // Update axes DOM directly using d3 API. this.renderXAxis(); this.renderYAxis(); // Update Angular rendered part of the histogram. this.changeDetector.detectChanges(); } private computeScales(data: HistogramData): Scales { const {width, height} = this.layout.contentClientRect; // === Get counts from data for calculating domain below. === const {min: binMin, max: binMax} = getMinMax( data, (datum) => getMin(datum.bins, (binVal) => binVal.x), (datum) => getMax(datum.bins, ({x, dx}) => x + dx) ); const countMax = getMax(data, (datum) => { return getMax(datum.bins, ({y}) => y); }); // === Create scale and set the domain. === const binScale = d3.scaleLinear().domain([binMin, binMax]).nice(); const temporalScale = this.mode !== HistogramMode.OVERLAY && this.timeProperty == TimeProperty.WALL_TIME ? d3.scaleTime() : d3.scaleLinear(); const timeValues = data.map((datum) => this.getTimeValue(datum)); const {min: timeMin, max: timeMax} = getMinMax(timeValues, (val) => val); const temporalDomain = [timeMin, timeMax]; temporalScale.domain(temporalDomain); const countScale = d3.scaleLinear(); countScale.domain([0, countMax]); const d3Color = d3.hcl(this.color || '#000'); const d3ColorScale = d3.scaleLinear<HCLColor, string>(); d3ColorScale.domain(temporalDomain); // === Set range on scales. === // x-axis or bin scale does not change depending on a mode. binScale.range([0, width]); d3ColorScale.range([d3Color.brighter(), d3Color.darker()]); d3ColorScale.interpolate(d3.interpolateHcl); // Explanation of the coordinate systems: // When in the offset mode, we render in 2.5D. Y-axis both have temporal // element while some space is allocated to show magnitude of counts. To // make the coordinate system easier for the offset, we use `<g transform>` // to locate the histogram at the correct temporal axis and let `countScale` // only deal with the height of the histogram. // When in overlay mode, we have very simple 2D where temporal axis is not // used meaningfully and `countScale` act as the y-axis and thus spans // `[height, 0]`. if (this.mode === HistogramMode.OVERLAY) { temporalScale.range([height, height]); countScale.range([height, 0]); } else { const offsetAxisHeight = this.mode === HistogramMode.OFFSET ? height - this.layout.histogramHeight : 0; temporalScale.range([height - offsetAxisHeight, height]); countScale.range([0, -this.layout.histogramHeight]); } return { binScale, d3ColorScale, countScale, temporalScale, }; } private renderXAxis() { if (!this.scales) return; const {width} = this.layout.contentClientRect; const xAxis = d3 .axisBottom(this.scales.binScale) .ticks(Math.max(2, width / 20)); xAxis.tickFormat(this.formatters.binNumber); xAxis(d3.select(this.xAxis.nativeElement)); } private getYAxisFormatter() { // d3 on DefinitelyTyped is typed incorrectly and it does not allow function // that takes (d: Data) => string to be specified in the parameter unlike // the real d3. if (this.mode === HistogramMode.OVERLAY) { return this.formatters.count; } switch (this.timeProperty) { case TimeProperty.WALL_TIME: return this.formatters.wallTime; case TimeProperty.STEP: { return this.formatters.step; } case TimeProperty.RELATIVE: { return this.formatters.relative; } default: const _ = this.timeProperty as never; throw RangeError(`Y axis formatter for ${_} must be implemented`); } } private renderYAxis() { if (!this.scales) return; const yScale = this.mode === HistogramMode.OVERLAY ? this.scales.countScale : this.scales.temporalScale; const {height} = this.layout.contentClientRect; const yAxis = d3.axisRight(yScale).ticks(Math.max(2, height / 15)); // d3 on DefinitelyTyped is typed incorrectly and it does not allow function // that takes (d: Data) => string to be specified in the parameter unlike // the real d3. const anyYAxis = yAxis as any; anyYAxis.tickFormat(this.getYAxisFormatter()); yAxis(d3.select(this.yAxis.nativeElement)); } private findClosestDatumIndex(mouseEvent: MouseEvent): number { let cursor: Element | null = mouseEvent.target as Element; let child: Element = cursor; while (cursor && cursor !== this.histograms.nativeElement) { child = cursor; cursor = cursor.parentElement; } return !cursor ? -1 : Array.from(cursor.children).indexOf(child); } // This method is hard to precisely test with DOM. Instead of asserting on // DOM, we are exposing this method so it can be tested with a manual // invocation. onMouseMoveForTestOnly(mouseEvent: MouseEvent) { return this.onMouseMove(mouseEvent); } private onMouseMove(mouseEvent: MouseEvent) { if (!this.scales) return; const relativeX = mouseEvent.offsetX; const relativeY = mouseEvent.offsetY; const closestIndex = this.findClosestDatumIndex(mouseEvent); if (closestIndex < 0) return; const binCoord = this.scales.binScale.invert(relativeX); const closestDatum = this.data[closestIndex]; const closestBin = this.getClosestBinFromBinCoordinate( closestDatum, binCoord ); this.tooltipData = { value: { position: {x: relativeX, y: relativeY}, label: this.mode === HistogramMode.OFFSET ? this.formatters.count(closestBin.y) : `Step: ${this.formatters.step(closestDatum.step)}`, }, xAxis: { position: this.getUiCoordFromBinForContent(closestBin).x, label: this.formatters.binNumber(getXCentroid(closestBin)), }, yAxis: { position: this.scales.countScale( this.mode === HistogramMode.OFFSET ? 0 : closestBin.y ), label: this.mode === HistogramMode.OFFSET ? this.getYAxisFormatter()(this.getTimeValue(closestDatum)) : this.formatters.binNumber(closestBin.y), }, xPositionInBinCoord: binCoord, closestDatum, closestBin, }; this.changeDetector.detectChanges(); } } function
<T>(data: T[], valueAccessor: (val: T) => number): number { return data.reduce((prevMin, value) => { return Math.min(prevMin, valueAccessor(value)); }, Infinity); } function getMax<T>(data: T[], valueAccessor: (val: T) => number): number { return data.reduce((prevMax, value) => { return Math.max(prevMax, valueAccessor(value)); }, -Infinity); } /** * Returns min and max at the same time by iterating through data once. */ function getMinMax<T>( data: T[], lowerValueAccessor: (val: T) => number, upperValueAccessor?: (val: T) => number ): {min: number; max: number} { if (!upperValueAccessor) { upperValueAccessor = lowerValueAccessor; } let min = Infinity; let max = -Infinity; for (const datum of data) { min = Math.min(min, lowerValueAccessor(datum)); max = Math.max(max, upperValueAccessor(datum)); } return {min, max}; } function getXCentroid(bin: Bin): number { return bin.x + bin.dx * 0.5; }
getMin
argmax_op_test.py
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tensorflow.ops.argmax_op.""" import functools import numpy as np from tensorflow.python.framework import dtypes from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.platform import test class ArgMaxTest(test.TestCase): def _testArg(self, method, x, axis, expected_values, use_gpu=False, expected_err_re=None): with self.session(use_gpu=use_gpu): ans = method(x, axis=axis) if expected_err_re is None: tf_ans = self.evaluate(ans) # Defaults to int64 output. self.assertEqual(np.int64, tf_ans.dtype) self.assertAllEqual(tf_ans, expected_values) self.assertShapeEqual(expected_values, ans) else: with self.assertRaisesOpError(expected_err_re): self.evaluate(ans) def _testBothArg(self, method, x, axis, expected_values, expected_err_re=None):
def _testBasic(self, dtype): x = np.arange(200, dtype=np.float32).astype(dtype) np.random.shuffle(x) # Check that argmin and argmax match numpy along the primary axis self._testBothArg(math_ops.argmax, x, 0, x.argmax()) self._testBothArg(math_ops.argmin, x, 0, x.argmin()) def _testTieBreaking(self, dtype): x = np.zeros(200, dtype=dtype) # Check that argmin and argmax match numpy along the primary axis for # breaking ties. self._testBothArg(math_ops.argmax, x, 0, x.argmax()) self._testBothArg(math_ops.argmin, x, 0, x.argmin()) # Check that argmin and argmax match numpy along axis=1 for # breaking ties. x = np.array([[0, 0, 1, 1], [1, 1, 0, 0], [0, 1, 0, 1]], dtype=dtype) self._testBothArg(math_ops.argmax, x, 1, x.argmax(axis=1)) self._testBothArg(math_ops.argmin, x, 1, x.argmin(axis=1)) def _testDim(self, dtype): shape = (3, 2, 4, 5, 6, 3, 7) x = np.arange( functools.reduce(lambda x, y: x * y, shape), dtype=np.float32).astype(dtype) np.random.shuffle(x) x = x.reshape(shape) # Check that argmin and argmax match numpy along all axes for axis in range(-7, 7): self._testBothArg(math_ops.argmax, x, axis, x.argmax(axis)) self._testBothArg(math_ops.argmin, x, axis, x.argmin(axis)) def testFloat(self): self._testBasic(np.float32) self._testTieBreaking(np.float32) self._testDim(np.float32) def testFloatInt32Output(self): x = np.asarray(100 * np.random.randn(200), dtype=np.float32) expected_values = x.argmax() with self.session(): ans = math_ops.argmax(x, axis=0, output_type=dtypes.int32) tf_ans = self.evaluate(ans) self.assertEqual(np.int32, tf_ans.dtype) # The values are equal when comparing int32 to int64 because # the values don't have a range that exceeds 32-bit integers. self.assertAllEqual(tf_ans, expected_values) expected_values = x.argmin() with self.session(): ans = math_ops.argmin(x, axis=0, output_type=dtypes.int32) tf_ans = self.evaluate(ans) self.assertEqual(np.int32, tf_ans.dtype) self.assertAllEqual(tf_ans, expected_values) def testDouble(self): self._testBasic(np.float64) self._testTieBreaking(np.float64) self._testDim(np.float64) def testInt32(self): self._testBasic(np.int32) self._testTieBreaking(np.int32) self._testDim(np.int32) def testInt64(self): self._testBasic(np.int64) self._testTieBreaking(np.int64) self._testDim(np.int64) def testBool(self): self._testBasic(np.bool_) self._testTieBreaking(np.bool_) self._testDim(np.bool_) def testEmpty(self): with self.cached_session(): for op in math_ops.argmin, math_ops.argmax: with self.assertRaisesOpError( r"Reduction axis 0 is empty in shape \[0\]"): op([], 0).eval() @test_util.run_deprecated_v1 def testDefaultAxis(self): with self.cached_session(): for op in math_ops.argmin, math_ops.argmax: ans = op([1]).eval() self.assertAllEqual(ans, 0) @test_util.run_deprecated_v1 def testOutputEmpty(self): with self.cached_session(): for op in math_ops.argmin, math_ops.argmax: ret = op(array_ops.zeros(shape=[1, 0, 2]), axis=-1).eval() self.assertEqual(ret.shape, (1, 0)) if __name__ == "__main__": test.main()
self._testArg(method, x, axis, expected_values, True, expected_err_re) # Compilation time is too large with XLA/CPU autojit. if not test_util.is_xla_enabled(): self._testArg(method, x, axis, expected_values, False, expected_err_re)
station_live_board_api_controller_get1_parameters.go
// Code generated by go-swagger; DO NOT EDIT. package t_r_a // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "context" "net/http" "time" "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // NewStationLiveBoardAPIControllerGet1Params creates a new StationLiveBoardAPIControllerGet1Params object, // with the default timeout for this client. // // Default values are not hydrated, since defaults are normally applied by the API server side. // // To enforce default values in parameter, use SetDefaults or WithDefaults. func NewStationLiveBoardAPIControllerGet1Params() *StationLiveBoardAPIControllerGet1Params { return &StationLiveBoardAPIControllerGet1Params{ timeout: cr.DefaultTimeout, } } // NewStationLiveBoardAPIControllerGet1ParamsWithTimeout creates a new StationLiveBoardAPIControllerGet1Params object // with the ability to set a timeout on a request. func NewStationLiveBoardAPIControllerGet1ParamsWithTimeout(timeout time.Duration) *StationLiveBoardAPIControllerGet1Params { return &StationLiveBoardAPIControllerGet1Params{ timeout: timeout, } } // NewStationLiveBoardAPIControllerGet1ParamsWithContext creates a new StationLiveBoardAPIControllerGet1Params object // with the ability to set a context for a request. func NewStationLiveBoardAPIControllerGet1ParamsWithContext(ctx context.Context) *StationLiveBoardAPIControllerGet1Params { return &StationLiveBoardAPIControllerGet1Params{ Context: ctx, } } // NewStationLiveBoardAPIControllerGet1ParamsWithHTTPClient creates a new StationLiveBoardAPIControllerGet1Params object // with the ability to set a custom HTTPClient for a request. func NewStationLiveBoardAPIControllerGet1ParamsWithHTTPClient(client *http.Client) *StationLiveBoardAPIControllerGet1Params { return &StationLiveBoardAPIControllerGet1Params{ HTTPClient: client, } } /* StationLiveBoardAPIControllerGet1Params contains all the parameters to send to the API endpoint for the station live board Api controller get 1 operation. Typically these are written to a http.Request. */ type StationLiveBoardAPIControllerGet1Params struct { /* DollarCount. 查詢數量 */ DollarCount *bool /* DollarFilter. 過濾 */ DollarFilter *string /* DollarFormat. 指定來源格式 */ DollarFormat string /* DollarOrderby. 排序 */ DollarOrderby *string /* DollarSelect. 挑選 */ DollarSelect *string /* DollarSkip. 跳過前幾筆 */ DollarSkip *string /* DollarTop. 取前幾筆 Default: 30 */ DollarTop *int64 /* StationID. 欲查詢車站的代碼 */ StationID string /* Health. 加入參數'?health=true'即可查詢此API服務的健康狀態 */ Health *string timeout time.Duration Context context.Context HTTPClient *http.Client } // WithDefaults hydrates default values in the station live board Api controller get 1 params (not the query body). // // All values with no default are reset to their zero value. func (o *StationLiveBoardAPIControllerGet1Params) WithDefaults() *StationLiveBoardAPIControllerGet1Params { o.SetDefaults() return o } // SetDefaults hydrates default values in the station live board Api controller get 1 params (not the query body). // // All values with no default are reset to their zero value. func (o *StationLiveBoardAPIControllerGet1Params) SetDefaults() { var ( dollarTopDefault = int64(30) ) val := StationLiveBoardAPIControllerGet1Params{ DollarTop: &dollarTopDefault, } val.timeout = o.timeout val.Context = o.Context val.HTTPClient = o.HTTPClient *o = val } // WithTimeout adds the timeout to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) WithTimeout(timeout time.Duration) *StationLiveBoardAPIControllerGet1Params { o.SetTimeout(timeout) return o } // SetTimeout adds the timeout to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) SetTimeout(timeout time.Duration) { o.timeout = timeout } // WithContext adds the context to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) WithContext(ctx context.Context) *StationLiveBoardAPIControllerGet1Params { o.SetContext(ctx) return o } // SetContext adds the context to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) SetContext(ctx context.Context) { o.Context = ctx } // WithHTTPClient adds the HTTPClient to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) WithHTTPClient(client *http.Client) *StationLiveBoardAPIControllerGet1Params { o.SetHTTPClient(client) return o } // SetHTTPClient adds the HTTPClient to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) SetHTTPClient(client *http.Client) { o.HTTPClient = client } // WithDollarCount adds the dollarCount to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) WithDollarCount(dollarCount *bool) *StationLiveBoardAPIControllerGet1Params { o.SetDollarCount(dollarCount) return o } // SetDollarCount adds the dollarCount to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) SetDollarCount(dollarCount *bool) { o.DollarCount = dollarCount } // WithDollarFilter adds the dollarFilter to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) WithDollarFilter(dollarFilter *string) *StationLiveBoardAPIControllerGet1Params { o.SetDollarFilter(dollarFilter) return o } // SetDollarFilter adds the dollarFilter to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) SetDollarFilter(dollarFilter *string) { o.DollarFilter = dollarFilter } // WithDollarFormat adds the dollarFormat to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) WithDollarFormat(dollarFormat string) *StationLiveBoardAPIControllerGet1Params { o.SetDollarFormat(dollarFormat) return o } // SetDollarFormat adds the dollarFormat to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) SetDollarFormat(dollarFormat string) { o.DollarFormat = dollarFormat } // WithDollarOrderby adds the dollarOrderby to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) WithDollarOrderby(dollarOrderby *string) *StationLiveBoardAPIControllerGet1Params { o.SetDollarOrderby(dollarOrderby) return o } // SetDollarOrderby adds the dollarOrderby to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) SetDollarOrderby(dollarOrderby *string) { o.DollarOrderby = dollarOrderby } // WithDollarSelect adds the dollarSelect to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) WithDollarSelect(dollarSelect *string) *StationLiveBoardAPIControllerGet1Params { o.SetDollarSelect(dollarSelect) return o } // SetDollarSelect adds the dollarSelect to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) SetDollarSelect(dollarSelect *string) { o.DollarSelect = dollarSelect } // WithDollarSkip adds the dollarSkip to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) WithDollarSkip(dollarSkip *string) *StationLiveBoardAPIControllerGet1Params { o.SetDollarSkip(dollarSkip) return o } // SetDollarSkip adds the dollarSkip to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) SetDollarSkip(dollarSkip *string) { o.DollarSkip = dollarSkip } // WithDollarTop adds the dollarTop to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) WithDollarTop(dollarTop *int64) *StationLiveBoardAPIControllerGet1Params { o.SetDollarTop(dollarTop) return o } // SetDollarTop adds the dollarTop to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) SetDollarTop(dollarTop *int64) { o.DollarTop = dollarTop } // WithStationID adds the stationID to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) WithStationID(stationID string) *StationLiveBoardAPIControllerGet1Params { o.SetStationID(stationID) return o } // SetStationID adds the stationId to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) SetStationID(stationID string) { o.StationID = stationID } // WithHealth adds the health to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) WithHealth(health *string) *StationLiveBoardAPIControllerGet1Params { o.SetHealth(health) return o } // SetHealth adds the health to the station live board Api controller get 1 params func (o *StationLiveBoardAPIControllerGet1Params) SetHealth(health *string) { o.Health = health } // WriteToRequest writes these params to a swagger request func (o *StationLiveBoardAPIControllerGet1Params) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { if err := r.SetTimeout(o.timeout); err != nil { return err } var res []error if o.DollarCount != nil { // query param $count var qrDollarCount bool if o.DollarCount != nil { qrDollarCount = *o.DollarCount } qDollarCount := swag.FormatBool(qrDollarCount) if qDollarCount != "" { if err := r.SetQueryParam("$count", qDollarCount); err != nil { return err } } } if o.DollarFilter != nil { // query param $filter var qrDollarFilter string if o.DollarFilter != nil { qrDollarF
DollarFormat != "" { if err := r.SetQueryParam("$format", qDollarFormat); err != nil { return err } } if o.DollarOrderby != nil { // query param $orderby var qrDollarOrderby string if o.DollarOrderby != nil { qrDollarOrderby = *o.DollarOrderby } qDollarOrderby := qrDollarOrderby if qDollarOrderby != "" { if err := r.SetQueryParam("$orderby", qDollarOrderby); err != nil { return err } } } if o.DollarSelect != nil { // query param $select var qrDollarSelect string if o.DollarSelect != nil { qrDollarSelect = *o.DollarSelect } qDollarSelect := qrDollarSelect if qDollarSelect != "" { if err := r.SetQueryParam("$select", qDollarSelect); err != nil { return err } } } if o.DollarSkip != nil { // query param $skip var qrDollarSkip string if o.DollarSkip != nil { qrDollarSkip = *o.DollarSkip } qDollarSkip := qrDollarSkip if qDollarSkip != "" { if err := r.SetQueryParam("$skip", qDollarSkip); err != nil { return err } } } if o.DollarTop != nil { // query param $top var qrDollarTop int64 if o.DollarTop != nil { qrDollarTop = *o.DollarTop } qDollarTop := swag.FormatInt64(qrDollarTop) if qDollarTop != "" { if err := r.SetQueryParam("$top", qDollarTop); err != nil { return err } } } // path param StationID if err := r.SetPathParam("StationID", o.StationID); err != nil { return err } if o.Health != nil { // query param health var qrHealth string if o.Health != nil { qrHealth = *o.Health } qHealth := qrHealth if qHealth != "" { if err := r.SetQueryParam("health", qHealth); err != nil { return err } } } if len(res) > 0 { return errors.CompositeValidationError(res...) } return nil }
ilter = *o.DollarFilter } qDollarFilter := qrDollarFilter if qDollarFilter != "" { if err := r.SetQueryParam("$filter", qDollarFilter); err != nil { return err } } } // query param $format qrDollarFormat := o.DollarFormat qDollarFormat := qrDollarFormat if q
factory_test.go
/* Copyright 2014 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package factory import ( "net/http" "net/http/httptest" "reflect" "testing" "time" "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/client-go/informers" clientset "k8s.io/client-go/kubernetes" restclient "k8s.io/client-go/rest" "k8s.io/client-go/tools/cache" utiltesting "k8s.io/client-go/util/testing" "k8s.io/kubernetes/pkg/api" apitesting "k8s.io/kubernetes/pkg/api/testing" "k8s.io/kubernetes/plugin/pkg/scheduler/algorithm" schedulerapi "k8s.io/kubernetes/plugin/pkg/scheduler/api" latestschedulerapi "k8s.io/kubernetes/plugin/pkg/scheduler/api/latest" "k8s.io/kubernetes/plugin/pkg/scheduler/schedulercache" "k8s.io/kubernetes/plugin/pkg/scheduler/util" ) const enableEquivalenceCache = true func
(t *testing.T) { handler := utiltesting.FakeHandler{ StatusCode: 500, ResponseBody: "", T: t, } server := httptest.NewServer(&handler) defer server.Close() client := clientset.NewForConfigOrDie(&restclient.Config{Host: server.URL, ContentConfig: restclient.ContentConfig{GroupVersion: &api.Registry.GroupOrDie(v1.GroupName).GroupVersion}}) informerFactory := informers.NewSharedInformerFactory(client, 0) factory := NewConfigFactory( v1.DefaultSchedulerName, client, informerFactory.Core().V1().Nodes(), informerFactory.Core().V1().Pods(), informerFactory.Core().V1().PersistentVolumes(), informerFactory.Core().V1().PersistentVolumeClaims(), informerFactory.Core().V1().ReplicationControllers(), informerFactory.Extensions().V1beta1().ReplicaSets(), informerFactory.Apps().V1beta1().StatefulSets(), informerFactory.Core().V1().Services(), v1.DefaultHardPodAffinitySymmetricWeight, enableEquivalenceCache, ) factory.Create() } // Test configures a scheduler from a policies defined in a file // It combines some configurable predicate/priorities with some pre-defined ones func TestCreateFromConfig(t *testing.T) { var configData []byte var policy schedulerapi.Policy handler := utiltesting.FakeHandler{ StatusCode: 500, ResponseBody: "", T: t, } server := httptest.NewServer(&handler) defer server.Close() client := clientset.NewForConfigOrDie(&restclient.Config{Host: server.URL, ContentConfig: restclient.ContentConfig{GroupVersion: &api.Registry.GroupOrDie(v1.GroupName).GroupVersion}}) informerFactory := informers.NewSharedInformerFactory(client, 0) factory := NewConfigFactory( v1.DefaultSchedulerName, client, informerFactory.Core().V1().Nodes(), informerFactory.Core().V1().Pods(), informerFactory.Core().V1().PersistentVolumes(), informerFactory.Core().V1().PersistentVolumeClaims(), informerFactory.Core().V1().ReplicationControllers(), informerFactory.Extensions().V1beta1().ReplicaSets(), informerFactory.Apps().V1beta1().StatefulSets(), informerFactory.Core().V1().Services(), v1.DefaultHardPodAffinitySymmetricWeight, enableEquivalenceCache, ) // Pre-register some predicate and priority functions RegisterFitPredicate("PredicateOne", PredicateOne) RegisterFitPredicate("PredicateTwo", PredicateTwo) RegisterPriorityFunction("PriorityOne", PriorityOne, 1) RegisterPriorityFunction("PriorityTwo", PriorityTwo, 1) configData = []byte(`{ "kind" : "Policy", "apiVersion" : "v1", "predicates" : [ {"name" : "TestZoneAffinity", "argument" : {"serviceAffinity" : {"labels" : ["zone"]}}}, {"name" : "TestRequireZone", "argument" : {"labelsPresence" : {"labels" : ["zone"], "presence" : true}}}, {"name" : "PredicateOne"}, {"name" : "PredicateTwo"} ], "priorities" : [ {"name" : "RackSpread", "weight" : 3, "argument" : {"serviceAntiAffinity" : {"label" : "rack"}}}, {"name" : "PriorityOne", "weight" : 2}, {"name" : "PriorityTwo", "weight" : 1} ] }`) if err := runtime.DecodeInto(latestschedulerapi.Codec, configData, &policy); err != nil { t.Errorf("Invalid configuration: %v", err) } factory.CreateFromConfig(policy) hpa := factory.GetHardPodAffinitySymmetricWeight() if hpa != v1.DefaultHardPodAffinitySymmetricWeight { t.Errorf("Wrong hardPodAffinitySymmetricWeight, ecpected: %d, got: %d", v1.DefaultHardPodAffinitySymmetricWeight, hpa) } } func TestCreateFromConfigWithHardPodAffinitySymmetricWeight(t *testing.T) { var configData []byte var policy schedulerapi.Policy handler := utiltesting.FakeHandler{ StatusCode: 500, ResponseBody: "", T: t, } server := httptest.NewServer(&handler) defer server.Close() client := clientset.NewForConfigOrDie(&restclient.Config{Host: server.URL, ContentConfig: restclient.ContentConfig{GroupVersion: &api.Registry.GroupOrDie(v1.GroupName).GroupVersion}}) informerFactory := informers.NewSharedInformerFactory(client, 0) factory := NewConfigFactory( v1.DefaultSchedulerName, client, informerFactory.Core().V1().Nodes(), informerFactory.Core().V1().Pods(), informerFactory.Core().V1().PersistentVolumes(), informerFactory.Core().V1().PersistentVolumeClaims(), informerFactory.Core().V1().ReplicationControllers(), informerFactory.Extensions().V1beta1().ReplicaSets(), informerFactory.Apps().V1beta1().StatefulSets(), informerFactory.Core().V1().Services(), v1.DefaultHardPodAffinitySymmetricWeight, enableEquivalenceCache, ) // Pre-register some predicate and priority functions RegisterFitPredicate("PredicateOne", PredicateOne) RegisterFitPredicate("PredicateTwo", PredicateTwo) RegisterPriorityFunction("PriorityOne", PriorityOne, 1) RegisterPriorityFunction("PriorityTwo", PriorityTwo, 1) configData = []byte(`{ "kind" : "Policy", "apiVersion" : "v1", "predicates" : [ {"name" : "TestZoneAffinity", "argument" : {"serviceAffinity" : {"labels" : ["zone"]}}}, {"name" : "TestRequireZone", "argument" : {"labelsPresence" : {"labels" : ["zone"], "presence" : true}}}, {"name" : "PredicateOne"}, {"name" : "PredicateTwo"} ], "priorities" : [ {"name" : "RackSpread", "weight" : 3, "argument" : {"serviceAntiAffinity" : {"label" : "rack"}}}, {"name" : "PriorityOne", "weight" : 2}, {"name" : "PriorityTwo", "weight" : 1} ], "hardPodAffinitySymmetricWeight" : 10 }`) if err := runtime.DecodeInto(latestschedulerapi.Codec, configData, &policy); err != nil { t.Errorf("Invalid configuration: %v", err) } factory.CreateFromConfig(policy) hpa := factory.GetHardPodAffinitySymmetricWeight() if hpa != 10 { t.Errorf("Wrong hardPodAffinitySymmetricWeight, ecpected: %d, got: %d", 10, hpa) } } func TestCreateFromEmptyConfig(t *testing.T) { var configData []byte var policy schedulerapi.Policy handler := utiltesting.FakeHandler{ StatusCode: 500, ResponseBody: "", T: t, } server := httptest.NewServer(&handler) defer server.Close() client := clientset.NewForConfigOrDie(&restclient.Config{Host: server.URL, ContentConfig: restclient.ContentConfig{GroupVersion: &api.Registry.GroupOrDie(v1.GroupName).GroupVersion}}) informerFactory := informers.NewSharedInformerFactory(client, 0) factory := NewConfigFactory( v1.DefaultSchedulerName, client, informerFactory.Core().V1().Nodes(), informerFactory.Core().V1().Pods(), informerFactory.Core().V1().PersistentVolumes(), informerFactory.Core().V1().PersistentVolumeClaims(), informerFactory.Core().V1().ReplicationControllers(), informerFactory.Extensions().V1beta1().ReplicaSets(), informerFactory.Apps().V1beta1().StatefulSets(), informerFactory.Core().V1().Services(), v1.DefaultHardPodAffinitySymmetricWeight, enableEquivalenceCache, ) configData = []byte(`{}`) if err := runtime.DecodeInto(latestschedulerapi.Codec, configData, &policy); err != nil { t.Errorf("Invalid configuration: %v", err) } factory.CreateFromConfig(policy) } func PredicateOne(pod *v1.Pod, meta algorithm.PredicateMetadata, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) { return true, nil, nil } func PredicateTwo(pod *v1.Pod, meta algorithm.PredicateMetadata, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) { return true, nil, nil } func PriorityOne(pod *v1.Pod, nodeNameToInfo map[string]*schedulercache.NodeInfo, nodes []*v1.Node) (schedulerapi.HostPriorityList, error) { return []schedulerapi.HostPriority{}, nil } func PriorityTwo(pod *v1.Pod, nodeNameToInfo map[string]*schedulercache.NodeInfo, nodes []*v1.Node) (schedulerapi.HostPriorityList, error) { return []schedulerapi.HostPriority{}, nil } func TestDefaultErrorFunc(t *testing.T) { testPod := &v1.Pod{ ObjectMeta: metav1.ObjectMeta{Name: "foo", Namespace: "bar"}, Spec: apitesting.V1DeepEqualSafePodSpec(), } handler := utiltesting.FakeHandler{ StatusCode: 200, ResponseBody: runtime.EncodeOrDie(util.Test.Codec(), testPod), T: t, } mux := http.NewServeMux() // FakeHandler musn't be sent requests other than the one you want to test. mux.Handle(util.Test.ResourcePath(string(v1.ResourcePods), "bar", "foo"), &handler) server := httptest.NewServer(mux) defer server.Close() client := clientset.NewForConfigOrDie(&restclient.Config{Host: server.URL, ContentConfig: restclient.ContentConfig{GroupVersion: &api.Registry.GroupOrDie(v1.GroupName).GroupVersion}}) informerFactory := informers.NewSharedInformerFactory(client, 0) factory := NewConfigFactory( v1.DefaultSchedulerName, client, informerFactory.Core().V1().Nodes(), informerFactory.Core().V1().Pods(), informerFactory.Core().V1().PersistentVolumes(), informerFactory.Core().V1().PersistentVolumeClaims(), informerFactory.Core().V1().ReplicationControllers(), informerFactory.Extensions().V1beta1().ReplicaSets(), informerFactory.Apps().V1beta1().StatefulSets(), informerFactory.Core().V1().Services(), v1.DefaultHardPodAffinitySymmetricWeight, enableEquivalenceCache, ) queue := cache.NewFIFO(cache.MetaNamespaceKeyFunc) podBackoff := util.CreatePodBackoff(1*time.Millisecond, 1*time.Second) errFunc := factory.MakeDefaultErrorFunc(podBackoff, queue) errFunc(testPod, nil) for { // This is a terrible way to do this but I plan on replacing this // whole error handling system in the future. The test will time // out if something doesn't work. time.Sleep(10 * time.Millisecond) got, exists, _ := queue.Get(testPod) if !exists { continue } handler.ValidateRequest(t, util.Test.ResourcePath(string(v1.ResourcePods), "bar", "foo"), "GET", nil) if e, a := testPod, got; !reflect.DeepEqual(e, a) { t.Errorf("Expected %v, got %v", e, a) } break } } func TestNodeEnumerator(t *testing.T) { testList := &v1.NodeList{ Items: []v1.Node{ {ObjectMeta: metav1.ObjectMeta{Name: "foo"}}, {ObjectMeta: metav1.ObjectMeta{Name: "bar"}}, {ObjectMeta: metav1.ObjectMeta{Name: "baz"}}, }, } me := nodeEnumerator{testList} if e, a := 3, me.Len(); e != a { t.Fatalf("expected %v, got %v", e, a) } for i := range testList.Items { gotObj := me.Get(i) if e, a := testList.Items[i].Name, gotObj.(*v1.Node).Name; e != a { t.Errorf("Expected %v, got %v", e, a) } if e, a := &testList.Items[i], gotObj; !reflect.DeepEqual(e, a) { t.Errorf("Expected %#v, got %v#", e, a) } } } func TestBind(t *testing.T) { table := []struct { binding *v1.Binding }{ {binding: &v1.Binding{ ObjectMeta: metav1.ObjectMeta{ Namespace: metav1.NamespaceDefault, Name: "foo", }, Target: v1.ObjectReference{ Name: "foohost.kubernetes.mydomain.com", }, }}, } for _, item := range table { handler := utiltesting.FakeHandler{ StatusCode: 200, ResponseBody: "", T: t, } server := httptest.NewServer(&handler) defer server.Close() client := clientset.NewForConfigOrDie(&restclient.Config{Host: server.URL, ContentConfig: restclient.ContentConfig{GroupVersion: &api.Registry.GroupOrDie(v1.GroupName).GroupVersion}}) b := binder{client} if err := b.Bind(item.binding); err != nil { t.Errorf("Unexpected error: %v", err) continue } expectedBody := runtime.EncodeOrDie(util.Test.Codec(), item.binding) handler.ValidateRequest(t, util.Test.SubResourcePath(string(v1.ResourcePods), metav1.NamespaceDefault, "foo", "binding"), "POST", &expectedBody) } } // TestResponsibleForPod tests if a pod with an annotation that should cause it to // be picked up by the default scheduler, is in fact picked by the default scheduler // Two schedulers are made in the test: one is default scheduler and other scheduler // is of name "foo-scheduler". A pod must be picked up by at most one of the two // schedulers. func TestResponsibleForPod(t *testing.T) { handler := utiltesting.FakeHandler{ StatusCode: 500, ResponseBody: "", T: t, } server := httptest.NewServer(&handler) defer server.Close() client := clientset.NewForConfigOrDie(&restclient.Config{Host: server.URL, ContentConfig: restclient.ContentConfig{GroupVersion: &api.Registry.GroupOrDie(v1.GroupName).GroupVersion}}) // factory of "default-scheduler" informerFactory := informers.NewSharedInformerFactory(client, 0) factoryDefaultScheduler := NewConfigFactory( v1.DefaultSchedulerName, client, informerFactory.Core().V1().Nodes(), informerFactory.Core().V1().Pods(), informerFactory.Core().V1().PersistentVolumes(), informerFactory.Core().V1().PersistentVolumeClaims(), informerFactory.Core().V1().ReplicationControllers(), informerFactory.Extensions().V1beta1().ReplicaSets(), informerFactory.Apps().V1beta1().StatefulSets(), informerFactory.Core().V1().Services(), v1.DefaultHardPodAffinitySymmetricWeight, enableEquivalenceCache, ) // factory of "foo-scheduler" factoryFooScheduler := NewConfigFactory( "foo-scheduler", client, informerFactory.Core().V1().Nodes(), informerFactory.Core().V1().Pods(), informerFactory.Core().V1().PersistentVolumes(), informerFactory.Core().V1().PersistentVolumeClaims(), informerFactory.Core().V1().ReplicationControllers(), informerFactory.Extensions().V1beta1().ReplicaSets(), informerFactory.Apps().V1beta1().StatefulSets(), informerFactory.Core().V1().Services(), v1.DefaultHardPodAffinitySymmetricWeight, enableEquivalenceCache, ) // scheduler annotations to be tested schedulerFitsDefault := "default-scheduler" schedulerFitsFoo := "foo-scheduler" schedulerFitsNone := "bar-scheduler" tests := []struct { pod *v1.Pod pickedByDefault bool pickedByFoo bool }{ { // pod with "spec.Schedulername=default-scheduler" should be picked // by the scheduler of name "default-scheduler", NOT by the one of name "foo-scheduler" pod: &v1.Pod{ObjectMeta: metav1.ObjectMeta{Name: "foo", Namespace: "bar"}, Spec: v1.PodSpec{SchedulerName: schedulerFitsDefault}}, pickedByDefault: true, pickedByFoo: false, }, { // pod with "spec.SchedulerName=foo-scheduler" should be NOT // be picked by the scheduler of name "default-scheduler", but by the one of name "foo-scheduler" pod: &v1.Pod{ObjectMeta: metav1.ObjectMeta{Name: "foo", Namespace: "bar"}, Spec: v1.PodSpec{SchedulerName: schedulerFitsFoo}}, pickedByDefault: false, pickedByFoo: true, }, { // pod with "spec.SchedulerName=foo-scheduler" should be NOT // be picked by niether the scheduler of name "default-scheduler" nor the one of name "foo-scheduler" pod: &v1.Pod{ObjectMeta: metav1.ObjectMeta{Name: "foo", Namespace: "bar"}, Spec: v1.PodSpec{SchedulerName: schedulerFitsNone}}, pickedByDefault: false, pickedByFoo: false, }, } for _, test := range tests { podOfDefault := factoryDefaultScheduler.ResponsibleForPod(test.pod) podOfFoo := factoryFooScheduler.ResponsibleForPod(test.pod) results := []bool{podOfDefault, podOfFoo} expected := []bool{test.pickedByDefault, test.pickedByFoo} if !reflect.DeepEqual(results, expected) { t.Errorf("expected: {%v, %v}, got {%v, %v}", test.pickedByDefault, test.pickedByFoo, podOfDefault, podOfFoo) } } } func TestInvalidHardPodAffinitySymmetricWeight(t *testing.T) { handler := utiltesting.FakeHandler{ StatusCode: 500, ResponseBody: "", T: t, } server := httptest.NewServer(&handler) // TODO: Uncomment when fix #19254 // defer server.Close() client := clientset.NewForConfigOrDie(&restclient.Config{Host: server.URL, ContentConfig: restclient.ContentConfig{GroupVersion: &api.Registry.GroupOrDie(v1.GroupName).GroupVersion}}) // factory of "default-scheduler" informerFactory := informers.NewSharedInformerFactory(client, 0) factory := NewConfigFactory( v1.DefaultSchedulerName, client, informerFactory.Core().V1().Nodes(), informerFactory.Core().V1().Pods(), informerFactory.Core().V1().PersistentVolumes(), informerFactory.Core().V1().PersistentVolumeClaims(), informerFactory.Core().V1().ReplicationControllers(), informerFactory.Extensions().V1beta1().ReplicaSets(), informerFactory.Apps().V1beta1().StatefulSets(), informerFactory.Core().V1().Services(), -1, enableEquivalenceCache, ) _, err := factory.Create() if err == nil { t.Errorf("expected err: invalid hardPodAffinitySymmetricWeight, got nothing") } } func TestInvalidFactoryArgs(t *testing.T) { handler := utiltesting.FakeHandler{ StatusCode: 500, ResponseBody: "", T: t, } server := httptest.NewServer(&handler) defer server.Close() client := clientset.NewForConfigOrDie(&restclient.Config{Host: server.URL, ContentConfig: restclient.ContentConfig{GroupVersion: &api.Registry.GroupOrDie(v1.GroupName).GroupVersion}}) testCases := []struct { hardPodAffinitySymmetricWeight int expectErr string }{ { hardPodAffinitySymmetricWeight: -1, expectErr: "invalid hardPodAffinitySymmetricWeight: -1, must be in the range 0-100", }, { hardPodAffinitySymmetricWeight: 101, expectErr: "invalid hardPodAffinitySymmetricWeight: 101, must be in the range 0-100", }, } for _, test := range testCases { informerFactory := informers.NewSharedInformerFactory(client, 0) factory := NewConfigFactory( v1.DefaultSchedulerName, client, informerFactory.Core().V1().Nodes(), informerFactory.Core().V1().Pods(), informerFactory.Core().V1().PersistentVolumes(), informerFactory.Core().V1().PersistentVolumeClaims(), informerFactory.Core().V1().ReplicationControllers(), informerFactory.Extensions().V1beta1().ReplicaSets(), informerFactory.Apps().V1beta1().StatefulSets(), informerFactory.Core().V1().Services(), test.hardPodAffinitySymmetricWeight, enableEquivalenceCache, ) _, err := factory.Create() if err == nil { t.Errorf("expected err: %s, got nothing", test.expectErr) } } }
TestCreate
test_init_final.py
# -*- coding: utf-8 -*- ################ Server Ver. 19 (2020. 8. 6.) ##################### import sys, os import asyncio, discord, aiohttp import random, re, datetime, time, logging from discord.ext import tasks, commands from discord.ext.commands import CommandNotFound, MissingRequiredArgument from gtts import gTTS from github import Github import base64 import gspread #정산 from oauth2client.service_account import ServiceAccountCredentials #정산 from io import StringIO import urllib.request from math import ceil, floor ##################### 로깅 ########################### log_stream = StringIO() logging.basicConfig(stream=log_stream, level=logging.WARNING) #ilsanglog = logging.getLogger('discord') #ilsanglog.setLevel(level = logging.WARNING) #handler = logging.StreamHandler() #handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s')) #ilsanglog.addHandler(handler) ##################################################### basicSetting = [] bossData = [] fixed_bossData = [] bossNum = 0 fixed_bossNum = 0 chkvoicechannel = 0 chkrelogin = 0 chflg = 0 LoadChk = 0 bossTime = [] tmp_bossTime = [] fixed_bossTime = [] bossTimeString = [] bossDateString = [] tmp_bossTimeString = [] tmp_bossDateString = [] bossFlag = [] bossFlag0 = [] fixed_bossFlag = [] fixed_bossFlag0 = [] bossMungFlag = [] bossMungCnt = [] channel_info = [] channel_name = [] channel_id = [] channel_voice_name = [] channel_voice_id = [] channel_type = [] FixedBossDateData = [] indexFixedBossname = [] access_token = os.environ["BOT_TOKEN"] git_access_token = os.environ["GIT_TOKEN"] git_access_repo = os.environ["GIT_REPO"] git_access_repo_restart = os.environ["GIT_REPO_RESTART"] g = Github(git_access_token) repo = g.get_repo(git_access_repo) repo_restart = g.get_repo(git_access_repo_restart) def init(): global basicSetting global bossData global fixed_bossData global bossNum global fixed_bossNum global chkvoicechannel global chkrelogin global bossTime global tmp_bossTime global fixed_bossTime global bossTimeString global bossDateString global tmp_bossTimeString global tmp_bossDateString global bossFlag global bossFlag0 global fixed_bossFlag global fixed_bossFlag0 global bossMungFlag global bossMungCnt global voice_client1 global channel_info global channel_name global channel_voice_name global channel_voice_id global channel_id global channel_type global LoadChk global indexFixedBossname global FixedBossDateData global endTime global gc #정산 global credentials #정산 global regenembed global command global kill_Data global kill_Time global item_Data global tmp_racing_unit command = [] tmp_bossData = [] tmp_fixed_bossData = [] FixedBossDateData = [] indexFixedBossname = [] kill_Data = {} tmp_kill_Data = [] item_Data = {} tmp_item_Data = [] f = [] fb = [] fk = [] fc = [] fi = [] tmp_racing_unit = [] inidata = repo.get_contents("test_setting.ini") file_data1 = base64.b64decode(inidata.content) file_data1 = file_data1.decode('utf-8') inputData = file_data1.split('\n') command_inidata = repo.get_contents("command.ini") file_data4 = base64.b64decode(command_inidata.content) file_data4 = file_data4.decode('utf-8') command_inputData = file_data4.split('\n') boss_inidata = repo.get_contents("boss.ini") file_data3 = base64.b64decode(boss_inidata.content) file_data3 = file_data3.decode('utf-8') boss_inputData = file_data3.split('\n') fixed_inidata = repo.get_contents("fixed_boss.ini") file_data2 = base64.b64decode(fixed_inidata.content) file_data2 = file_data2.decode('utf-8') fixed_inputData = file_data2.split('\n') kill_inidata = repo.get_contents("kill_list.ini") file_data5 = base64.b64decode(kill_inidata.content) file_data5 = file_data5.decode('utf-8') kill_inputData = file_data5.split('\n') item_inidata = repo.get_contents("item_list.ini") file_data6 = base64.b64decode(item_inidata.content) file_data6 = file_data6.decode('utf-8') item_inputData = file_data6.split('\n') for i in range(len(fixed_inputData)): FixedBossDateData.append(fixed_inputData[i]) index_fixed = 0 for value in FixedBossDateData: if value.find('bossname') != -1: indexFixedBossname.append(index_fixed) index_fixed = index_fixed + 1 for i in range(inputData.count('\r')): inputData.remove('\r') for i in range(command_inputData.count('\r')): command_inputData.remove('\r') for i in range(boss_inputData.count('\r')): boss_inputData.remove('\r') for i in range(fixed_inputData.count('\r')): fixed_inputData.remove('\r') for i in range(kill_inputData.count('\r')): kill_inputData.remove('\r') for i in range(item_inputData.count('\r')): item_inputData.remove('\r') del(command_inputData[0]) del(boss_inputData[0]) del(fixed_inputData[0]) del(kill_inputData[0]) del(item_inputData[0]) ############## 보탐봇 초기 설정 리스트 ##################### basicSetting.append(inputData[0][11:]) #basicSetting[0] : timezone basicSetting.append(inputData[8][15:]) #basicSetting[1] : before_alert basicSetting.append(inputData[10][10:]) #basicSetting[2] : mungChk basicSetting.append(inputData[9][16:]) #basicSetting[3] : before_alert1 basicSetting.append(inputData[13][14:16]) #basicSetting[4] : restarttime 시 basicSetting.append(inputData[13][17:]) #basicSetting[5] : restarttime 분 basicSetting.append(inputData[1][15:]) #basicSetting[6] : voice채널 ID basicSetting.append(inputData[2][14:]) #basicSetting[7] : text채널 ID basicSetting.append(inputData[3][16:]) #basicSetting[8] : 사다리 채널 ID basicSetting.append(inputData[12][14:]) #basicSetting[9] : !ㅂ 출력 수 basicSetting.append(inputData[16][11:]) #basicSetting[10] : json 파일명 basicSetting.append(inputData[4][17:]) #basicSetting[11] : 정산 채널 ID basicSetting.append(inputData[15][12:]) #basicSetting[12] : sheet 이름 basicSetting.append(inputData[14][16:]) #basicSetting[13] : restart 주기 basicSetting.append(inputData[17][12:]) #basicSetting[14] : 시트 이름 basicSetting.append(inputData[18][12:]) #basicSetting[15] : 입력 셀 basicSetting.append(inputData[19][13:]) #basicSetting[16] : 출력 셀 basicSetting.append(inputData[11][13:]) #basicSetting[17] : 멍삭제횟수 basicSetting.append(inputData[5][14:]) #basicSetting[18] : kill채널 ID basicSetting.append(inputData[6][16:]) #basicSetting[19] : racing 채널 ID basicSetting.append(inputData[7][14:]) #basicSetting[20] : item 채널 ID ############## 보탐봇 명령어 리스트 ##################### for i in range(len(command_inputData)): tmp_command = command_inputData[i][12:].rstrip('\r') fc = tmp_command.split(', ') command.append(fc) fc = [] #command.append(command_inputData[i][12:].rstrip('\r')) #command[0] ~ [24] : 명령어 ################## 척살 명단 ########################### for i in range(len(kill_inputData)): tmp_kill_Data.append(kill_inputData[i].rstrip('\r')) fk.append(tmp_kill_Data[i][:tmp_kill_Data[i].find(' ')]) fk.append(tmp_kill_Data[i][tmp_kill_Data[i].find(' ')+1:]) try: kill_Data[fk[0]] = int(fk[1]) except: pass fk = [] for i in range(len(item_inputData)): tmp_item_Data.append(item_inputData[i].rstrip('\r')) fi.append(tmp_item_Data[i][:tmp_item_Data[i].find(' ')]) fi.append(tmp_item_Data[i][tmp_item_Data[i].find(' ')+1:]) try: item_Data[fi[0]] = int(fi[1]) except: pass fi = [] tmp_killtime = datetime.datetime.now().replace(hour=int(5), minute=int(0), second = int(0)) kill_Time = datetime.datetime.now() if tmp_killtime < kill_Time : kill_Time = tmp_killtime + datetime.timedelta(days=int(1)) else: kill_Time = tmp_killtime for i in range(len(basicSetting)): basicSetting[i] = basicSetting[i].strip() if basicSetting[6] != "": basicSetting[6] = int(basicSetting[6]) if basicSetting[7] != "": basicSetting[7] = int(basicSetting[7]) if basicSetting[8] != "": basicSetting[8] = int(basicSetting[8]) if basicSetting[11] != "": basicSetting[11] = int(basicSetting[11]) if basicSetting[18] != "": basicSetting[18] = int(basicSetting[18]) if basicSetting[19] != "": basicSetting[19] = int(basicSetting[19]) if basicSetting[20] != "": basicSetting[20] = int(basicSetting[20]) tmp_now = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) if int(basicSetting[13]) == 0 : endTime = tmp_now.replace(hour=int(basicSetting[4]), minute=int(basicSetting[5]), second = int(0)) endTime = endTime + datetime.timedelta(days=int(1000)) else : endTime = tmp_now.replace(hour=int(basicSetting[4]), minute=int(basicSetting[5]), second = int(0)) if endTime < tmp_now : endTime = endTime + datetime.timedelta(days=int(basicSetting[13])) bossNum = int(len(boss_inputData)/5) fixed_bossNum = int(len(fixed_inputData)/6) for i in range(bossNum): tmp_bossData.append(boss_inputData[i*5:i*5+5]) for i in range(fixed_bossNum): tmp_fixed_bossData.append(fixed_inputData[i*6:i*6+6]) #print (tmp_bossData) for j in range(bossNum): for i in range(len(tmp_bossData[j])): tmp_bossData[j][i] = tmp_bossData[j][i].strip() for j in range(fixed_bossNum): for i in range(len(tmp_fixed_bossData[j])): tmp_fixed_bossData[j][i] = tmp_fixed_bossData[j][i].strip() ############## 일반보스 정보 리스트 ##################### for j in range(bossNum): tmp_len = tmp_bossData[j][1].find(':') f.append(tmp_bossData[j][0][11:]) #bossData[0] : 보스명 f.append(tmp_bossData[j][1][10:tmp_len]) #bossData[1] : 시 f.append(tmp_bossData[j][2][13:]) #bossData[2] : 멍/미입력 f.append(tmp_bossData[j][3][20:]) #bossData[3] : 분전 알림멘트 f.append(tmp_bossData[j][4][13:]) #bossData[4] : 젠 알림멘트 f.append(tmp_bossData[j][1][tmp_len+1:]) #bossData[5] : 분 f.append('') #bossData[6] : 메세지 bossData.append(f) f = [] bossTime.append(datetime.datetime.now()+datetime.timedelta(days=365, hours = int(basicSetting[0]))) tmp_bossTime.append(datetime.datetime.now()+datetime.timedelta(days=365, hours = int(basicSetting[0]))) bossTimeString.append('99:99:99') bossDateString.append('9999-99-99') tmp_bossTimeString.append('99:99:99') tmp_bossDateString.append('9999-99-99') bossFlag.append(False) bossFlag0.append(False) bossMungFlag.append(False) bossMungCnt.append(0) tmp_fixed_now = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) ############## 고정보스 정보 리스트 ##################### for j in range(fixed_bossNum): tmp_fixed_len = tmp_fixed_bossData[j][1].find(':') tmp_fixedGen_len = tmp_fixed_bossData[j][2].find(':') fb.append(tmp_fixed_bossData[j][0][11:]) #fixed_bossData[0] : 보스명 fb.append(tmp_fixed_bossData[j][1][11:tmp_fixed_len]) #fixed_bossData[1] : 시 fb.append(tmp_fixed_bossData[j][1][tmp_fixed_len+1:]) #fixed_bossData[2] : 분 fb.append(tmp_fixed_bossData[j][4][20:]) #fixed_bossData[3] : 분전 알림멘트 fb.append(tmp_fixed_bossData[j][5][13:]) #fixed_bossData[4] : 젠 알림멘트 fb.append(tmp_fixed_bossData[j][2][12:tmp_fixedGen_len]) #fixed_bossData[5] : 젠주기-시 fb.append(tmp_fixed_bossData[j][2][tmp_fixedGen_len+1:]) #fixed_bossData[6] : 젠주기-분 fb.append(tmp_fixed_bossData[j][3][12:16]) #fixed_bossData[7] : 시작일-년 fb.append(tmp_fixed_bossData[j][3][17:19]) #fixed_bossData[8] : 시작일-월 fb.append(tmp_fixed_bossData[j][3][20:22]) #fixed_bossData[9] : 시작일-일 fixed_bossData.append(fb) fb = [] fixed_bossFlag.append(False) fixed_bossFlag0.append(False) fixed_bossTime.append(tmp_fixed_now.replace(year = int(fixed_bossData[j][7]), month = int(fixed_bossData[j][8]), day = int(fixed_bossData[j][9]), hour=int(fixed_bossData[j][1]), minute=int(fixed_bossData[j][2]), second = int(0))) if fixed_bossTime[j] < tmp_fixed_now : while fixed_bossTime[j] < tmp_fixed_now : fixed_bossTime[j] = fixed_bossTime[j] + datetime.timedelta(hours=int(fixed_bossData[j][5]), minutes=int(fixed_bossData[j][6]), seconds = int(0)) ################# 이모지 로드 ###################### emo_inidata = repo.get_contents("emoji.ini") emoji_data1 = base64.b64decode(emo_inidata.content) emoji_data1 = emoji_data1.decode('utf-8') emo_inputData = emoji_data1.split('\n') for i in range(len(emo_inputData)): tmp_emo = emo_inputData[i][8:].rstrip('\r') if tmp_emo != "": tmp_racing_unit.append(tmp_emo) ################# 리젠보스 시간 정렬 ###################### regenData = [] regenTime = [] regenbossName = [] outputTimeHour = [] outputTimeMin = [] for i in range(bossNum): if bossData[i][2] == "1": f.append(bossData[i][0] + "R") else: f.append(bossData[i][0]) f.append(bossData[i][1] + bossData[i][5]) regenData.append(f) regenTime.append(bossData[i][1] + bossData[i][5]) f = [] regenTime = sorted(list(set(regenTime))) for j in range(len(regenTime)): for i in range(len(regenData)): if regenTime[j] == regenData[i][1] : f.append(regenData[i][0]) regenbossName.append(f) outputTimeHour.append(int(regenTime[j][:2])) outputTimeMin.append(int(regenTime[j][2:])) f = [] regenembed = discord.Embed( title='----- 보스별 리스폰 시간 -----', description= ' ') for i in range(len(regenTime)): if outputTimeMin[i] == 0 : regenembed.add_field(name=str(outputTimeHour[i]) + '시간', value= '```'+ ', '.join(map(str, sorted(regenbossName[i]))) + '```', inline=False) else : regenembed.add_field(name=str(outputTimeHour[i]) + '시간' + str(outputTimeMin[i]) + '분', value= '```' + ','.join(map(str, sorted(regenbossName[i]))) + '```', inline=False) regenembed.set_footer(text = 'R : 멍 보스') ########################################################## if basicSetting[10] !="": scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive'] #정산 credentials = ServiceAccountCredentials.from_json_keyfile_name(basicSetting[10], scope) #정산 init() channel = '' #mp3 파일 생성함수(gTTS 이용, 남성목소리) async def MakeSound(saveSTR, filename): tts = gTTS(saveSTR, lang = 'ko') tts.save('./' + filename + '.wav') ''' try: encText = urllib.parse.quote(saveSTR) urllib.request.urlretrieve("https://clova.ai/proxy/voice/api/tts?text=" + encText + "%0A&voicefont=1&format=wav",filename + '.wav') except Exception as e: print (e) tts = gTTS(saveSTR, lang = 'ko') tts.save('./' + filename + '.wav') pass ''' #mp3 파일 재생함수 async def PlaySound(voiceclient, filename): source = discord.FFmpegPCMAudio(filename) try: voiceclient.play(source) except discord.errors.ClientException: while voiceclient.is_playing(): await asyncio.sleep(1) while voiceclient.is_playing(): await asyncio.sleep(1) voiceclient.stop() source.cleanup() #my_bot.db 저장하기 async def dbSave(): global bossData global bossNum global bossTime global bossTimeString global bossDateString global bossMungFlag global bossMungCnt for i in range(bossNum): for j in range(bossNum): if bossTimeString[i] and bossTimeString[j] != '99:99:99': if bossTimeString[i] == bossTimeString[j] and i != j: tmp_time1 = bossTimeString[j][:6] tmp_time2 = (int(bossTimeString[j][6:]) + 1)%100 if tmp_time2 < 10 : tmp_time22 = '0' + str(tmp_time2) elif tmp_time2 == 60 : tmp_time22 = '00' else : tmp_time22 = str(tmp_time2) bossTimeString[j] = tmp_time1 + tmp_time22 datelist1 = bossTime datelist = list(set(datelist1)) information1 = '----- 보스탐 정보 -----\n' for timestring in sorted(datelist): for i in range(bossNum): if timestring == bossTime[i]: if bossTimeString[i] != '99:99:99' or bossMungFlag[i] == True : if bossMungFlag[i] == True : if bossData[i][2] == '0' : information1 += ' - ' + bossData[i][0] + '(' + bossData[i][1] + '.' + bossData[i][5] + ') : ' + tmp_bossTime[i].strftime('%H:%M:%S') + ' @ ' + tmp_bossTime[i].strftime('%Y-%m-%d') + ' (미입력 ' + str(bossMungCnt[i]) + '회)' + ' * ' + bossData[i][6] + '\n' else : information1 += ' - ' + bossData[i][0] + '(' + bossData[i][1] + '.' + bossData[i][5] + ') : ' + tmp_bossTime[i].strftime('%H:%M:%S') + ' @ ' + tmp_bossTime[i].strftime('%Y-%m-%d') + ' (멍 ' + str(bossMungCnt[i]) + '회)' + ' * ' + bossData[i][6] + '\n' else: if bossData[i][2] == '0' : information1 += ' - ' + bossData[i][0] + '(' + bossData[i][1] + '.' + bossData[i][5] + ') : ' + bossTimeString[i] + ' @ ' + bossDateString[i] + ' (미입력 ' + str(bossMungCnt[i]) + '회)' + ' * ' + bossData[i][6] + '\n' else : information1 += ' - ' + bossData[i][0] + '(' + bossData[i][1] + '.' + bossData[i][5] + ') : ' + bossTimeString[i] + ' @ ' + bossDateString[i] + ' (멍 ' + str(bossMungCnt[i]) + '회)' + ' * ' + bossData[i][6] + '\n' try : contents = repo.get_contents("my_bot.db") repo.update_file(contents.path, "bossDB", information1, contents.sha) except GithubException as e : print ('save error!!') print(e.args[1]['message']) # output: This repository is empty. errortime = datetime.datetime.now() print (errortime) pass #my_bot.db 불러오기 async def dbLoad(): global LoadChk contents1 = repo.get_contents("my_bot.db") file_data = base64.b64decode(contents1.content) file_data = file_data.decode('utf-8') beforeBossData = file_data.split('\n') if len(beforeBossData) > 1: for i in range(len(beforeBossData)-1): for j in range(bossNum): startPos = beforeBossData[i+1].find('-') endPos = beforeBossData[i+1].find('(') if beforeBossData[i+1][startPos+2:endPos] == bossData[j][0] : #if beforeBossData[i+1].find(bossData[j][0]) != -1 : tmp_mungcnt = 0 tmp_len = beforeBossData[i+1].find(':') tmp_datelen = beforeBossData[i+1].find('@') tmp_msglen = beforeBossData[i+1].find('*') years1 = beforeBossData[i+1][tmp_datelen+2:tmp_datelen+6] months1 = beforeBossData[i+1][tmp_datelen+7:tmp_datelen+9] days1 = beforeBossData[i+1][tmp_datelen+10:tmp_datelen+12] hours1 = beforeBossData[i+1][tmp_len+2:tmp_len+4] minutes1 = beforeBossData[i+1][tmp_len+5:tmp_len+7] seconds1 = beforeBossData[i+1][tmp_len+8:tmp_len+10] now2 = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = tmp_now.replace(year = int(years1), month = int(months1), day = int(days1), hour=int(hours1), minute=int(minutes1), second = int(seconds1)) tmp_now_chk = tmp_now + datetime.timedelta(minutes = int(basicSetting[2])) if tmp_now_chk < now2 : deltaTime = datetime.timedelta(hours = int(bossData[j][1]), minutes = int(bossData[j][5])) while tmp_now_chk < now2 : tmp_now_chk = tmp_now_chk + deltaTime tmp_now = tmp_now + deltaTime tmp_mungcnt = tmp_mungcnt + 1 if tmp_now_chk > now2 > tmp_now: #젠중. bossMungFlag[j] = True tmp_bossTime[j] = tmp_now tmp_bossTimeString[j] = tmp_bossTime[j].strftime('%H:%M:%S') tmp_bossDateString[j] = tmp_bossTime[j].strftime('%Y-%m-%d') bossTimeString[j] = '99:99:99' bossDateString[j] = '9999-99-99' bossTime[j] = tmp_bossTime[j] + datetime.timedelta(days=365) else: tmp_bossTime[j] = bossTime[j] = tmp_now tmp_bossTimeString[j] = bossTimeString[j] = bossTime[j].strftime('%H:%M:%S') tmp_bossDateString[j] = bossDateString[j] = bossTime[j].strftime('%Y-%m-%d') bossData[j][6] = beforeBossData[i+1][tmp_msglen+2:len(beforeBossData[i+1])] if beforeBossData[i+1][tmp_msglen-4:tmp_msglen-3] != 0 and beforeBossData[i+1][tmp_msglen-5:tmp_msglen-4] == ' ': bossMungCnt[j] = int(beforeBossData[i+1][tmp_msglen-4:tmp_msglen-3]) + tmp_mungcnt elif beforeBossData[i+1][tmp_msglen-5:tmp_msglen-4] != ' ': bossMungCnt[j] = int(beforeBossData[i+1][tmp_msglen-5:tmp_msglen-4] + beforeBossData[i+1][tmp_msglen-4:tmp_msglen-3]) + tmp_mungcnt else: bossMungCnt[j] = 0 LoadChk = 0 print ("<불러오기 완료>") else: LoadChk = 1 print ("보스타임 정보가 없습니다.") #고정보스 날짜저장 async def FixedBossDateSave(): global fixed_bossData global fixed_bossTime global fixed_bossNum global FixedBossDateData global indexFixedBossname for i in range(fixed_bossNum): FixedBossDateData[indexFixedBossname[i] + 3] = 'startDate = '+ fixed_bossTime[i].strftime('%Y-%m-%d') + '\n' FixedBossDateDataSTR = "" for j in range(len(FixedBossDateData)): pos = len(FixedBossDateData[j]) tmpSTR = FixedBossDateData[j][:pos-1] + '\r\n' FixedBossDateDataSTR += tmpSTR contents = repo.get_contents("fixed_boss.ini") repo.update_file(contents.path, "bossDB", FixedBossDateDataSTR, contents.sha) #사다리함수 async def LadderFunc(number, ladderlist, channelVal): if number < len(ladderlist): result_ladder = random.sample(ladderlist, number) result_ladderSTR = ','.join(map(str, result_ladder)) embed = discord.Embed( title = "----- 당첨! -----", description= '```' + result_ladderSTR + '```', color=0xff00ff ) await channelVal.send(embed=embed, tts=False) else: await channelVal.send('```추첨인원이 총 인원과 같거나 많습니다. 재입력 해주세요```', tts=False) #data초기화 async def init_data_list(filename, first_line : str = "-----------"): try : contents = repo.get_contents(filename) repo.update_file(contents.path, "deleted list " + str(filename), first_line, contents.sha) print ('< 데이터 초기화 >') except GithubException as e : print ('save error!!') print(e.args[1]['message']) # output: This repository is empty. errortime = datetime.datetime.now() print (errortime) pass #data저장 async def data_list_Save(filename, first_line : str = "-----------", save_data : dict = {}): output_list = first_line+ '\n' for key, value in save_data.items(): output_list += str(key) + ' ' + str(value) + '\n' try : contents = repo.get_contents(filename) repo.update_file(contents.path, "updated " + str(filename), output_list, contents.sha) except GithubException as e : print ('save error!!') print(e.args[1]['message']) # output: This repository is empty. errortime = datetime.datetime.now() print (errortime) pass #서버(길드) 정보 async def get_guild_channel_info(bot): text_channel_name : list = [] text_channel_id : list = [] voice_channel_name : list = [] voice_channel_id : list = [] for guild in bot.guilds: for text_channel in guild.text_channels: text_channel_name.append(text_channel.name) text_channel_id.append(str(text_channel.id)) for voice_channel in guild.voice_channels: voice_channel_name.append(voice_channel.name) voice_channel_id.append(str(voice_channel.id)) return text_channel_name, text_channel_id, voice_channel_name, voice_channel_id #초성추출 함수 def convertToInitialLetters(text): CHOSUNG_START_LETTER = 4352 JAMO_START_LETTER = 44032 JAMO_END_LETTER = 55203 JAMO_CYCLE = 588 def isHangul(ch): return ord(ch) >= JAMO_START_LETTER and ord(ch) <= JAMO_END_LETTER def isBlankOrNumber(ch): return ord(ch) == 32 or ord(ch) >= 48 and ord(ch) <= 57 def convertNomalInitialLetter(ch): dic_InitalLetter = {4352:"ㄱ" ,4353:"ㄲ" ,4354:"ㄴ" ,4355:"ㄷ" ,4356:"ㄸ" ,4357:"ㄹ" ,4358:"ㅁ" ,4359:"ㅂ" ,4360:"ㅃ" ,4361:"ㅅ" ,4362:"ㅆ" ,4363:"ㅇ" ,4364:"ㅈ" ,4365:"ㅉ" ,4366:"ㅊ" ,4367:"ㅋ" ,4368:"ㅌ" ,4369:"ㅍ" ,4370:"ㅎ" ,32:" " ,48:"0" ,49:"1" ,50:"2" ,51:"3" ,52:"4" ,53:"5" ,54:"6" ,55:"7" ,56:"8" ,57:"9" } return dic_InitalLetter[ord(ch)] result = "" for ch in text: if isHangul(ch): #한글이 아닌 글자는 걸러냅니다. result += convertNomalInitialLetter(chr((int((ord(ch)-JAMO_START_LETTER)/JAMO_CYCLE))+CHOSUNG_START_LETTER)) elif isBlankOrNumber(ch): result += convertNomalInitialLetter(chr(int(ord(ch)))) return result class taskCog(commands.Cog): def __init__(self, bot): self.bot = bot self.main_task.start() @tasks.loop(seconds=1.0, count=1) async def main_task(self): boss_task = asyncio.get_event_loop().create_task(self.boss_check()) await boss_task @main_task.before_loop async def before_tast(self): await self.bot.wait_until_ready() ################ 명존쎄 ################ @commands.command(name=command[8][0], aliases=command[8][1:]) async def command_task_list(self, ctx : commands.Context): if ctx.message.channel.id != basicSetting[7]: return for t in asyncio.Task.all_tasks(): # print(t._coro.__name__) if t._coro.__name__ == f"boss_check": if t.done(): try: t.exception() except asyncio.CancelledError: continue continue t.cancel() await ctx.send( '< 보탐봇 명치 맞고 숨 고르기 중! 잠시만요! >', tts=False) print("명치!") await dbSave() await data_list_Save("kill_list.ini", "-----척살명단-----", kill_Data) await data_list_Save("item_list.ini", "-----아이템목록-----", item_Data) if ctx.voice_client is not None: if ctx.voice_client.is_playing(): ctx.voice_client.stop() await ctx.voice_client.disconnect() boss_task = asyncio.Task(self.boss_check()) async def boss_check(self): await self.bot.wait_until_ready() global channel global endTime global basicSetting global bossData global fixed_bossData global bossNum global fixed_bossNum global chkvoicechannel global chkrelogin global bossTime global tmp_bossTime global fixed_bossTime global bossTimeString global bossDateString global tmp_bossTimeString global tmp_bossDateString global bossFlag global bossFlag0 global fixed_bossFlag global fixed_bossFlag0 global bossMungFlag global bossMungCnt global voice_client1 global channel_info global channel_name global channel_id global channel_voice_name global channel_voice_id global channel_type global endTime global kill_Time if chflg == 1 : if voice_client1.is_connected() == False : voice_client1 = await self.bot.get_channel(basicSetting[6]).connect(reconnect=True) if voice_client1.is_connected() : await dbLoad() await self.bot.get_channel(channel).send( '< 다시 왔습니다! >', tts=False) print("명치복구완료!") while not self.bot.is_closed(): ############ 워닝잡자! ############ if log_stream.getvalue().find("Awaiting") != -1: log_stream.truncate(0) log_stream.seek(0) await self.bot.get_channel(channel).send( '< 디코접속에러! 잠깐 나갔다 올께요! >', tts=False) await dbSave() break log_stream.truncate(0) log_stream.seek(0) ################################## now = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) priv0 = now+datetime.timedelta(minutes=int(basicSetting[3])) priv = now+datetime.timedelta(minutes=int(basicSetting[1])) aftr = now+datetime.timedelta(minutes=int(0-int(basicSetting[2]))) if channel != '': ################ 보탐봇 재시작 ################ if endTime.strftime('%Y-%m-%d ') + endTime.strftime('%H:%M:%S') == now.strftime('%Y-%m-%d ') + now.strftime('%H:%M:%S'): await dbSave() await FixedBossDateSave() await data_list_Save("kill_list.ini", "-----척살명단-----", kill_Data) await data_list_Save("item_list.ini", "-----아이템목록-----", item_Data) print("보탐봇재시작!") endTime = endTime + datetime.timedelta(days = int(basicSetting[13])) await voice_client1.disconnect() await asyncio.sleep(2) inidata_restart = repo_restart.get_contents("restart.txt") file_data_restart = base64.b64decode(inidata_restart.content) file_data_restart = file_data_restart.decode('utf-8') inputData_restart = file_data_restart.split('\n') if len(inputData_restart) < 3: contents12 = repo_restart.get_contents("restart.txt") repo_restart.update_file(contents12.path, "restart_0", "restart\nrestart\nrestrat\n", contents12.sha) else: contents12 = repo_restart.get_contents("restart.txt") repo_restart.update_file(contents12.path, "restart_1", "", contents12.sha) ################ 킬 목록 초기화 ################ if kill_Time.strftime('%Y-%m-%d ') + kill_Time.strftime('%H:%M') == now.strftime('%Y-%m-%d ') + now.strftime('%H:%M'): kill_Time = kill_Time + datetime.timedelta(days=int(1)) await init_data_list('kill_list.ini', '-----척살명단-----') ################ 고정 보스 확인 ################ for i in range(fixed_bossNum): ################ before_alert1 ################ if fixed_bossTime[i] <= priv0 and fixed_bossTime[i] > priv: if basicSetting[3] != '0': if fixed_bossFlag0[i] == False: fixed_bossFlag0[i] = True await self.bot.get_channel(channel).send("```" + fixed_bossData[i][0] + ' ' + basicSetting[3] + '분 전 ' + fixed_bossData[i][3] +' [' + fixed_bossTime[i].strftime('%H:%M:%S') + ']```', tts=False) await PlaySound(voice_client1, './sound/' + fixed_bossData[i][0] + '알림1.mp3') ################ before_alert ################ if fixed_bossTime[i] <= priv and fixed_bossTime[i] > now: if basicSetting[1] != '0' : if fixed_bossFlag[i] == False: fixed_bossFlag[i] = True await self.bot.get_channel(channel).send("```" + fixed_bossData[i][0] + ' ' + basicSetting[1] + '분 전 ' + fixed_bossData[i][3] +' [' + fixed_bossTime[i].strftime('%H:%M:%S') + ']```', tts=False) await PlaySound(voice_client1, './sound/' + fixed_bossData[i][0] + '알림.mp3') ################ 보스 젠 시간 확인 ################ if fixed_bossTime[i] <= now : fixed_bossTime[i] = fixed_bossTime[i]+datetime.timedelta(hours=int(fixed_bossData[i][5]), minutes=int(fixed_bossData[i][6]), seconds = int(0)) fixed_bossFlag0[i] = False fixed_bossFlag[i] = False embed = discord.Embed( description= "```" + fixed_bossData[i][0] + fixed_bossData[i][4] + "```" , color=0x00ff00 ) await self.bot.get_channel(channel).send(embed=embed, tts=False) await PlaySound(voice_client1, './sound/' + fixed_bossData[i][0] + '젠.mp3') ################ 일반 보스 확인 ################ for i in range(bossNum): ################ before_alert1 ################ if bossTime[i] <= priv0 and bossTime[i] > priv: if basicSetting[3] != '0': if bossFlag0[i] == False: bossFlag0[i] = True if bossData[i][6] != '' : await self.bot.get_channel(channel).send("```" + bossData[i][0] + ' ' + basicSetting[3] + '분 전 ' + bossData[i][3] + " [" + bossTimeString[i] + "]" + '\n<' + bossData[i][6] + '>```', tts=False) else : await self.bot.get_channel(channel).send("```" + bossData[i][0] + ' ' + basicSetting[3] + '분 전 ' + bossData[i][3] + " [" + bossTimeString[i] + "]```", tts=False) await PlaySound(voice_client1, './sound/' + bossData[i][0] + '알림1.mp3') ################ before_alert ################ if bossTime[i] <= priv and bossTime[i] > now: if basicSetting[1] != '0' : if bossFlag[i] == False: bossFlag[i] = True if bossData[i][6] != '' : await self.bot.get_channel(channel).send("```" + bossData[i][0] + ' ' + basicSetting[1] + '분 전 ' + bossData[i][3] + " [" + bossTimeString[i] + "]" + '\n<' + bossData[i][6] + '>```', tts=False) else : await self.bot.get_channel(channel).send("```" + bossData[i][0] + ' ' + basicSetting[1] + '분 전 ' + bossData[i][3] + " [" + bossTimeString[i] + "]```", tts=False) await PlaySound(voice_client1, './sound/' + bossData[i][0] + '알림.mp3') ################ 보스 젠 시간 확인 ################ if bossTime[i] <= now : #print ('if ', bossTime[i]) bossMungFlag[i] = True tmp_bossTime[i] = bossTime[i] tmp_bossTimeString[i] = tmp_bossTime[i].strftime('%H:%M:%S') tmp_bossDateString[i] = tmp_bossTime[i].strftime('%Y-%m-%d') bossTimeString[i] = '99:99:99' bossDateString[i] = '9999-99-99' bossTime[i] = now+datetime.timedelta(days=365) if bossData[i][6] != '' : embed = discord.Embed( description= "```" + bossData[i][0] + bossData[i][4] + '\n<' + bossData[i][6] + '>```' , color=0x00ff00 ) else : embed = discord.Embed( description= "```" + bossData[i][0] + bossData[i][4] + "```" , color=0x00ff00 ) await self.bot.get_channel(channel).send(embed=embed, tts=False) await PlaySound(voice_client1, './sound/' + bossData[i][0] + '젠.mp3') ################ 보스 자동 멍 처리 ################ if bossMungFlag[i] == True: if (bossTime[i]+datetime.timedelta(days=-365)) <= aftr: if basicSetting[2] != '0': if int(basicSetting[17]) <= bossMungCnt[i] and int(basicSetting[17]) != 0: bossTime[i] = datetime.datetime.now()+datetime.timedelta(days=365, hours = int(basicSetting[0])) tmp_bossTime[i] = datetime.datetime.now()+datetime.timedelta(days=365, hours = int(basicSetting[0])) bossTimeString[i] = '99:99:99' bossDateString[i] = '9999-99-99' tmp_bossTimeString[i] = '99:99:99' tmp_bossDateString[i] = '9999-99-99' bossFlag[i] = False bossFlag0[i] = False bossMungFlag[i] = False bossMungCnt[i] = 0 if bossData[i][2] == '0': await self.bot.get_channel(channel).send(f'```자동 미입력 횟수 {basicSetting[17]}회 초과! [{bossData[i][0]}] 삭제!```', tts=False) print ('자동미입력 횟수초과 <' + bossData[i][0] + ' 삭제완료>') else: await self.bot.get_channel(channel).send(f'```자동 멍처리 횟수 {basicSetting[17]}회 초과! [{bossData[i][0]}] 삭제!```', tts=False) print ('자동멍처리 횟수초과 <' + bossData[i][0] + ' 삭제완료>') #await dbSave() else: ################ 미입력 보스 ################ if bossData[i][2] == '0': bossFlag[i] = False bossFlag0[i] = False bossMungFlag[i] = False bossMungCnt[i] = bossMungCnt[i] + 1 tmp_bossTime[i] = bossTime[i] = nextTime = tmp_bossTime[i]+datetime.timedelta(hours=int(bossData[i][1]), minutes=int(bossData[i][5])) tmp_bossTimeString[i] = bossTimeString[i] = nextTime.strftime('%H:%M:%S') tmp_bossDateString[i] = bossDateString[i] = nextTime.strftime('%Y-%m-%d') await self.bot.get_channel(channel).send("```" + bossData[i][0] + ' 미입력 됐습니다.```', tts=False) embed = discord.Embed( description= '```다음 ' + bossData[i][0] + ' ' + bossTimeString[i] + '입니다.```', color=0xff0000 ) await self.bot.get_channel(channel).send(embed=embed, tts=False) await PlaySound(voice_client1, './sound/' + bossData[i][0] + '미입력.mp3') ################ 멍 보스 ################ else : bossFlag[i] = False bossFlag0[i] = False bossMungFlag[i] = False bossMungCnt[i] = bossMungCnt[i] + 1 tmp_bossTime[i] = bossTime[i] = nextTime = tmp_bossTime[i]+datetime.timedelta(hours=int(bossData[i][1]), minutes=int(bossData[i][5])) tmp_bossTimeString[i] = bossTimeString[i] = nextTime.strftime('%H:%M:%S') tmp_bossDateString[i] = bossDateString[i] = nextTime.strftime('%Y-%m-%d') await self.bot.get_channel(channel).send("```" + bossData[i][0] + ' 멍 입니다.```') embed = discord.Embed( description= '```다음 ' + bossData[i][0] + ' ' + bossTimeString[i] + '입니다.```', color=0xff0000 ) await self.bot.get_channel(channel).send(embed=embed, tts=False) await PlaySound(voice_client1, './sound/' + bossData[i][0] + '멍.mp3') await asyncio.sleep(1) # task runs every 60 seconds if voice_client1 is not None: if voice_client1.is_playing(): voice_client1.stop() await voice_client1.disconnect() for t in asyncio.Task.all_tasks(): if t._coro.__name__ == f"boss_check": print("-------------") if t.done(): try: t.exception() except asyncio.CancelledError: continue continue t.cancel() await dbSave() await data_list_Save("kill_list.ini", "-----척살명단-----", kill_Data) await data_list_Save("item_list.ini", "-----아이템목록-----", item_Data) boss_task = asyncio.Task(self.boss_check()) class mainCog(commands.Cog): def __init__(self, bot): self.bot = bot ################ 보탐봇 입장 ################ @commands.has_permissions(manage_messages=True) @commands.command(name=command[0][0], aliases=command[0][1:]) async def join_(self, ctx): global basicSetting global chflg global voice_client1 if basicSetting[7] == "": channel = ctx.message.channel.id #메세지가 들어온 채널 ID print ('[ ', basicSetting[7], ' ]') print ('] ', ctx.message.channel.name, ' [') inidata_textCH = repo.get_contents("test_setting.ini") file_data_textCH = base64.b64decode(inidata_textCH.content) file_data_textCH = file_data_textCH.decode('utf-8') inputData_textCH = file_data_textCH.split('\n') for i in range(len(inputData_textCH)): if inputData_textCH[i].startswith("textchannel ="): inputData_textCH[i] = 'textchannel = ' + str(channel) + '\r' basicSetting[7] = channel #print ('======', inputData_text[i]) result_textCH = '\n'.join(inputData_textCH) #print (result_textCH) contents = repo.get_contents("test_setting.ini") repo.update_file(contents.path, "test_setting", result_textCH, contents.sha) await ctx.send(f"< 텍스트채널 [{ctx.message.channel.name}] 접속완료 >\n< 음성채널 접속 후 [{command[5][0]}] 명령을 사용 하세요 >", tts=False) print('< 텍스트채널 [' + self.bot.get_channel(basicSetting[7]).name + '] 접속완료>') if basicSetting[6] != "": voice_client1 = await self.bot.get_channel(basicSetting[6]).connect(reconnect=True) print('< 음성채널 [' + self.bot.get_channel(basicSetting[6]).name + '] 접속완료>') if basicSetting[8] != "": if str(basicSetting[8]) in channel_id: print('< 사다리채널 [' + self.bot.get_channel(int(basicSetting[8])).name + '] 접속완료 >') else: basicSetting[8] = "" print(f"사다리채널 ID 오류! [{command[28][0]} 사다리] 명령으로 재설정 바랍니다.") if basicSetting[11] != "": if str(basicSetting[11]) in channel_id: print('< 정산채널 [' + self.bot.get_channel(int(basicSetting[11])).name + '] 접속완료>') else: basicSetting[11] = "" print(f"정산채널 ID 오류! [{command[28][0]} 정산] 명령으로 재설정 바랍니다.") if basicSetting[18] != "": if str(basicSetting[18]) in channel_id: print('< 척살채널 [' + self.bot.get_channel(int(basicSetting[18])).name + '] 접속완료>') else: basicSetting[18] = "" print(f"척살채널 ID 오류! [{command[28][0]} 척살] 명령으로 재설정 바랍니다.") if basicSetting[19] != "": if str(basicSetting[19]) in channel_id: print('< 경주채널 [' + self.bot.get_channel(int(basicSetting[19])).name + '] 접속완료>') else: basicSetting[19] = "" print(f"경주채널 ID 오류! [{command[28][0]} 경주] 명령으로 재설정 바랍니다.") if basicSetting[20] != "": if str(basicSetting[20]) in channel_id: print('< 아이템채널 [' + self.bot.get_channel(int(basicSetting[20])).name + '] 접속완료>') else: basicSetting[20] = "" print(f"아이템채널 ID 오류! [{command[28][0]} 아이템] 명령으로 재설정 바랍니다.") if int(basicSetting[13]) != 0 : print('< 보탐봇 재시작 시간 ' + endTime.strftime('%Y-%m-%d ') + endTime.strftime('%H:%M:%S') + ' >') print('< 보탐봇 재시작 주기 ' + basicSetting[13] + '일 >') else : print('< 보탐봇 재시작 설정안됨 >') chflg = 1 else: for guild in self.bot.guilds: for text_channel in guild.text_channels: if basicSetting[7] == text_channel.id: curr_guild_info = guild emoji_list : list = ["⭕", "❌"] guild_error_message = await ctx.send(f"이미 **[{curr_guild_info.name}]** 서버 **[{setting_channel_name}]** 채널이 명령어 채널로 설정되어 있습니다.\n해당 채널로 명령어 채널을 변경 하시려면 ⭕ 그대로 사용하시려면 ❌ 를 눌러주세요.\n(10초이내 미입력시 기존 설정 그대로 설정됩니다.)", tts=False) for emoji in emoji_list: await guild_error_message.add_reaction(emoji) def reaction_check(reaction, user): return (reaction.message.id == guild_error_message.id) and (user.id == ctx.author.id) and (str(reaction) in emoji_list) try: reaction, user = await self.bot.wait_for('reaction_add', check = reaction_check, timeout = 10) except asyncio.TimeoutError: return await ctx.send(f"시간이 초과됐습니다. **[{curr_guild_info.name}]** 서버 **[{setting_channel_name}]** 채널에서 사용해주세요!") if str(reaction) == "⭕": await voice_client1.disconnect() basicSetting[6] = "" basicSetting[7] = int(ctx.message.channel.id) print ('[ ', basicSetting[7], ' ]') print ('] ', ctx.message.channel.name, ' [') inidata_textCH = repo.get_contents("test_setting.ini") file_data_textCH = base64.b64decode(inidata_textCH.content) file_data_textCH = file_data_textCH.decode('utf-8') inputData_textCH = file_data_textCH.split('\n') for i in range(len(inputData_textCH)): if inputData_textCH[i].startswith("textchannel ="): inputData_textCH[i] = 'textchannel = ' + str(basicSetting[7]) + '\r' result_textCH = '\n'.join(inputData_textCH) contents = repo.get_contents("test_setting.ini") repo.update_file(contents.path, "test_setting", result_textCH, contents.sha) return await ctx.send(f"명령어 채널이 **[{ctx.author.guild.name}]** 서버 **[{ctx.message.channel.name}]** 채널로 새로 설정되었습니다.\n< 음성채널 접속 후 [{command[5][0]}] 명령을 사용 하세요 >") else: return await ctx.send(f"명령어 채널 설정이 취소되었습니다.\n**[{curr_guild_info.name}]** 서버 **[{setting_channel_name}]** 채널에서 사용해주세요!") ################ 보탐봇 메뉴 출력 ################ @commands.command(name=command[1][0], aliases=command[1][1:]) async def menu_(self, ctx): if ctx.message.channel.id == basicSetting[7]: command_list = '' command_list += ','.join(command[2]) + '\n' #!설정확인 command_list += ','.join(command[3]) + '\n' #!채널확인 command_list += ','.join(command[4]) + ' [채널명]\n' #!채널이동 command_list += ','.join(command[5]) + ' ※ 관리자만 실행 가능\n' #!소환 command_list += ','.join(command[6]) + '\n' #!불러오기 command_list += ','.join(command[7]) + '\n' #!초기화 command_list += ','.join(command[8]) + '\n' #!명치 command_list += ','.join(command[9]) + '\n' #!재시작 command_list += ','.join(command[10]) + '\n' #!미예약 command_list += ','.join(command[11]) + ' [인원] [금액]\n' #!분배 command_list += ','.join(command[12]) + ' [뽑을인원수] [아이디1] [아이디2]...\n' #!사다리 command_list += ','.join(command[27]) + ' [아이디1] [아이디2]...(최대 12명)\n' #!경주 command_list += ','.join(command[35]) + ' [판매금액] (거래소세금)\n' #!수수료 command_list += ','.join(command[36]) + ' [거래소금액] [실거래금액] (거래소세금)\n' #!페이백 command_list += ','.join(command[13]) + ' [아이디]\n' #!정산 command_list += ','.join(command[14]) + ' 또는 ' + ','.join(command[14]) + ' 0000, 00:00\n' #!보스일괄 command_list += ','.join(command[15]) + '\n' #!q command_list += ','.join(command[16]) + ' [할말]\n' #!v command_list += ','.join(command[17]) + '\n' #!리젠 command_list += ','.join(command[18]) + '\n' #!현재시간 command_list += ','.join(command[24]) + '\n' #!킬초기화 command_list += ','.join(command[25]) + '\n' #!킬횟수 확인 command_list += ','.join(command[25]) + ' [아이디]\n' #!킬 command_list += ','.join(command[26]) + ' [아이디]\n' #!킬삭제 command_list += ','.join(command[33]) + ' [아이디] 또는 ' + ','.join(command[33]) + ' [아이디] [횟수]\n' #!킬차감 command_list += ','.join(command[29]) + '\n' #!아이템 목록 초기화 command_list += ','.join(command[30]) + '\n' #!아이템 목록 확인 command_list += ','.join(command[30]) + ' [아이템] 또는 ' + ','.join(command[30]) + ' [아이템] [개수]\n' #!아이템 목록 입력 command_list += ','.join(command[31]) + ' [아이템]\n' #!아이템 목록에서 삭제 command_list += ','.join(command[32]) + ' [아이템] 또는 ' + ','.join(command[32]) + ' [아이템] [개수]\n' #!아이템 차감 command_list += ','.join(command[19]) + '\n' #!공지 command_list += ','.join(command[19]) + ' [공지내용]\n' #!공지 command_list += ','.join(command[20]) + '\n' #!공지삭제 command_list += ','.join(command[21]) + ' [할말]\n' #!상태 command_list += ','.join(command[28]) + ' 사다리, 정산, 척살, 경주, 아이템\n' #!채널설정 command_list += ','.join(command[34]) + ' ※ 관리자만 실행 가능\n\n' #서버나가기 command_list += ','.join(command[22]) + '\n' #보스탐 command_list += ','.join(command[23]) + '\n' #!보스탐 command_list += '[보스명]컷 또는 [보스명]컷 0000, 00:00\n' command_list += '[보스명] 컷 또는 [보스명] 컷 0000, 00:00\n' command_list += '[보스명]멍 또는 [보스명]멍 0000, 00:00\n' command_list += '[보스명]예상 또는 [보스명]예상 0000, 00:00\n' command_list += '[보스명]삭제\n' command_list += '[보스명]메모 [할말]\n' embed = discord.Embed( title = "----- 명령어 -----", description= '```' + command_list + '```', color=0xff00ff ) embed.add_field( name="----- 추가기능 -----", value= '```- [보스명]컷/멍/예상 [할말] : 보스시간 입력 후 빈칸 두번!! 메모 가능\n- [보스명]컷 명령어는 초성으로 입력가능합니다.\n ex)' + bossData[0][0] + '컷 => ' + convertToInitialLetters(bossData[0][0] +'컷') + ', ' + bossData[0][0] + ' 컷 => ' + convertToInitialLetters(bossData[0][0] +' 컷') + '```' ) await ctx.send( embed=embed, tts=False) else: return ################ 보탐봇 기본 설정확인 ################ @commands.command(name=command[2][0], aliases=command[2][1:]) async def setting_(self, ctx): #print (ctx.message.channel.id) if ctx.message.channel.id == basicSetting[7]: setting_val = '보탐봇버전 : Server Ver. 19 (2020. 8. 6.)\n' setting_val += '음성채널 : ' + self.bot.get_channel(basicSetting[6]).name + '\n' setting_val += '텍스트채널 : ' + self.bot.get_channel(basicSetting[7]).name +'\n' if basicSetting[8] != "" : setting_val += '사다리채널 : ' + self.bot.get_channel(int(basicSetting[8])).name + '\n' if basicSetting[11] != "" : setting_val += '정산채널 : ' + self.bot.get_channel(int(basicSetting[11])).name + '\n' if basicSetting[18] != "" : setting_val += '척살채널 : ' + self.bot.get_channel(int(basicSetting[18])).name + '\n' if basicSetting[19] != "" : setting_val += '경주채널 : ' + self.bot.get_channel(int(basicSetting[19])).name + '\n' if basicSetting[20] != "" : setting_val += '아이템채널 : ' + self.bot.get_channel(int(basicSetting[20])).name + '\n' setting_val += '보스젠알림시간1 : ' + basicSetting[1] + ' 분 전\n' setting_val += '보스젠알림시간2 : ' + basicSetting[3] + ' 분 전\n' setting_val += '보스멍확인시간 : ' + basicSetting[2] + ' 분 후\n' embed = discord.Embed( title = "----- 설정내용 -----", description= f'```{setting_val}```', color=0xff00ff ) embed.add_field( name="----- Special Thanks to. -----", value= '```총무님, 옹님```' ) await ctx.send(embed=embed, tts=False) else: return ################ 서버 채널 확인 ################ @commands.command(name=command[3][0], aliases=command[3][1:]) async def chChk_(self, ctx): if ctx.message.channel.id == basicSetting[7]: channel_name, channel_id, channel_voice_name, channel_voice_id = await get_guild_channel_info(self.bot) ch_information = [] cnt = 0 ch_information.append("") ch_voice_information = [] cntV = 0 ch_voice_information.append("") for guild in self.bot.guilds: ch_information[cnt] = f"{ch_information[cnt]}👑 {guild.name} 👑\n" for i in range(len(channel_name)): for text_channel in guild.text_channels: if channel_id[i] == str(text_channel.id): if len(ch_information[cnt]) > 900 : ch_information.append("") cnt += 1 ch_information[cnt] = f"{ch_information[cnt]}[{channel_id[i]}] {channel_name[i]}\n" ch_voice_information[cntV] = f"{ch_voice_information[cntV]}👑 {guild.name} 👑\n" for i in range(len(channel_voice_name)): for voice_channel in guild.voice_channels: if channel_voice_id[i] == str(voice_channel.id): if len(ch_voice_information[cntV]) > 900 : ch_voice_information.append("") cntV += 1 ch_voice_information[cntV] = f"{ch_voice_information[cntV]}[{channel_voice_id[i]}] {channel_voice_name[i]}\n" ###################### if len(ch_information) == 1 and len(ch_voice_information) == 1: embed = discord.Embed( title = "----- 채널 정보 -----", description= '', color=0xff00ff ) embed.add_field( name="< 택스트 채널 >", value= '```' + ch_information[0] + '```', inline = False ) embed.add_field( name="< 보이스 채널 >", value= '```' + ch_voice_information[0] + '```', inline = False ) await ctx.send( embed=embed, tts=False) else : embed = discord.Embed( title = "----- 채널 정보 -----\n< 택스트 채널 >", description= '```' + ch_information[0] + '```', color=0xff00ff ) await ctx.send( embed=embed, tts=False) for i in range(len(ch_information)-1): embed = discord.Embed( title = '', description= '```' + ch_information[i+1] + '```', color=0xff00ff ) await ctx.send( embed=embed, tts=False) embed = discord.Embed( title = "< 음성 채널 >", description= '```' + ch_voice_information[0] + '```', color=0xff00ff ) await ctx.send( embed=embed, tts=False) for i in range(len(ch_voice_information)-1): embed = discord.Embed( title = '', description= '```' + ch_voice_information[i+1] + '```', color=0xff00ff ) await ctx.send( embed=embed, tts=False) else: return ################ 텍스트채널이동 ################ @commands.command(name=command[4][0], aliases=command[4][1:]) async def chMove_(self, ctx): global basicSetting if ctx.message.channel.id == basicSetting[7]: msg = ctx.message.content[len(ctx.invoked_with)+1:] for i in range(len(channel_name)): if channel_name[i] == msg: channel = int(channel_id[i]) inidata_textCH = repo.get_contents("test_setting.ini") file_data_textCH = base64.b64decode(inidata_textCH.content) file_data_textCH = file_data_textCH.decode('utf-8') inputData_textCH = file_data_textCH.split('\n') for i in range(len(inputData_textCH)): if inputData_textCH[i].startswith('textchannel ='): inputData_textCH[i] = 'textchannel = ' + str(channel) + '\r' basicSetting[7] = int(channel) result_textCH = '\n'.join(inputData_textCH) contents = repo.get_contents("test_setting.ini") repo.update_file(contents.path, "test_setting", result_textCH, contents.sha) await ctx.send( f"명령어 채널이 < {ctx.message.channel.name} >에서 < {self.bot.get_channel(channel).name} > 로 이동되었습니다.", tts=False) await self.bot.get_channel(channel).send( f"< {self.bot.get_channel(channel).name} 이동완료 >", tts=False) else: return ################ 보탐봇 음성채널 소환 ################ @commands.has_permissions(manage_messages=True) @commands.command(name=command[5][0], aliases=command[5][1:]) async def connectVoice_(self, ctx): global voice_client1 global basicSetting if ctx.message.channel.id == basicSetting[7]: if ctx.voice_client is None: if ctx.author.voice: voice_client1 = await ctx.author.voice.channel.connect(reconnect = True) else: await ctx.send('음성채널에 먼저 들어가주세요.', tts=False) return else: if ctx.voice_client.is_playing(): ctx.voice_client.stop() await ctx.voice_client.move_to(ctx.author.voice.channel) voice_channel = ctx.author.voice.channel print ('< ', basicSetting[6], ' >') print ('> ', self.bot.get_channel(voice_channel.id).name, ' <') if basicSetting[6] == "": inidata_voiceCH = repo.get_contents("test_setting.ini") file_data_voiceCH = base64.b64decode(inidata_voiceCH.content) file_data_voiceCH = file_data_voiceCH.decode('utf-8') inputData_voiceCH = file_data_voiceCH.split('\n') for i in range(len(inputData_voiceCH)): if inputData_voiceCH[i].startswith('voicechannel ='): inputData_voiceCH[i] = 'voicechannel = ' + str(voice_channel.id) + '\r' basicSetting[6] = int(voice_channel.id) result_voiceCH = '\n'.join(inputData_voiceCH) contents = repo.get_contents("test_setting.ini") repo.update_file(contents.path, "test_setting", result_voiceCH, contents.sha) elif basicSetting[6] != int(voice_channel.id): inidata_voiceCH = repo.get_contents("test_setting.ini") file_data_voiceCH = base64.b64decode(inidata_voiceCH.content) file_data_voiceCH = file_data_voiceCH.decode('utf-8') inputData_voiceCH = file_data_voiceCH.split('\n') for i in range(len(inputData_voiceCH)): if inputData_voiceCH[i].startswith('voicechannel ='): inputData_voiceCH[i] = 'voicechannel = ' + str(voice_channel.id) + '\r' basicSetting[6] = int(voice_channel.id) result_voiceCH = '\n'.join(inputData_voiceCH) contents = repo.get_contents("test_setting.ini") repo.update_file(contents.path, "test_setting", result_voiceCH, contents.sha) await ctx.send('< 음성채널 [' + self.bot.get_channel(voice_channel.id).name + '] 접속완료>', tts=False) else: return ################ my_bot.db에 저장된 보스타임 불러오기 ################ @commands.command(name=command[6][0], aliases=command[6][1:]) async def loadDB_(self, ctx): if ctx.message.channel.id == basicSetting[7]: await dbLoad() if LoadChk == 0: await ctx.send('<불러오기 완료>', tts=False) else: await ctx.send('<보스타임 정보가 없습니다.>', tts=False) else: return ################ 저장된 정보 초기화 ################ @commands.command(name=command[7][0], aliases=command[7][1:]) async def initVal_(self, ctx): global basicSetting global bossData global fixed_bossData global bossTime global tmp_bossTime global fixed_bossTime global bossTimeString global bossDateString global tmp_bossTimeString global tmp_bossDateString global bossFlag global bossFlag0 global fixed_bossFlag global fixed_bossFlag0 global bossMungFlag global bossMungCnt global FixedBossDateData global indexFixedBossname if ctx.message.channel.id == basicSetting[7]: basicSetting = [] bossData = [] fixed_bossData = [] bossTime = [] tmp_bossTime = [] fixed_bossTime = [] bossTimeString = [] bossDateString = [] tmp_bossTimeString = [] tmp_bossDateString = [] bossFlag = [] bossFlag0 = [] fixed_bossFlag = [] fixed_bossFlag0 = [] bossMungFlag = [] bossMungCnt = [] FixedBossDateData = [] indexFixedBossname = [] init() await dbSave() await ctx.send('< 초기화 완료 >', tts=False) print ("< 초기화 완료 >") else: return ################ 보탐봇 재시작 ################ @commands.command(name=command[9][0], aliases=command[9][1:]) async def restart_(self, ctx): global basicSetting global bossTimeString global bossDateString if ctx.message.channel.id == basicSetting[7]: if basicSetting[2] != '0': for i in range(bossNum): if bossMungFlag[i] == True: bossTimeString[i] = tmp_bossTime[i].strftime('%H:%M:%S') bossDateString[i] = tmp_bossTime[i].strftime('%Y-%m-%d') await dbSave() await data_list_Save("kill_list.ini", "-----척살명단-----", kill_Data) await data_list_Save("item_list.ini", "-----아이템목록-----", item_Data) await voice_client1.disconnect() print("보탐봇강제재시작!") await asyncio.sleep(2) inidata_restart = repo_restart.get_contents("restart.txt") file_data_restart = base64.b64decode(inidata_restart.content) file_data_restart = file_data_restart.decode('utf-8') inputData_restart = file_data_restart.split('\n') if len(inputData_restart) < 3: contents12 = repo_restart.get_contents("restart.txt") repo_restart.update_file(contents12.path, "restart_0", "restart\nrestart\nrestrat\n", contents12.sha) else: contents12 = repo_restart.get_contents("restart.txt") repo_restart.update_file(contents12.path, "restart_1", "", contents12.sha) else: return ################ 미예약 보스타임 출력 ################ @commands.command(name=command[10][0], aliases=command[10][1:]) async def nocheckBoss_(self, ctx): if ctx.message.channel.id == basicSetting[7]: tmp_boss_information = [] tmp_cnt = 0 tmp_boss_information.append('') for i in range(bossNum): if bossTimeString[i] == '99:99:99' and bossMungFlag[i] != True : if len(tmp_boss_information[tmp_cnt]) > 1800 : tmp_boss_information.append('') tmp_cnt += 1 tmp_boss_information[tmp_cnt] = tmp_boss_information[tmp_cnt] + bossData[i][0] + ',' if len(tmp_boss_information) == 1: if len(tmp_boss_information[0]) != 0: tmp_boss_information[0] = "```fix\n" + tmp_boss_information[0][:len(tmp_boss_information[0])-1] + "\n```" else : tmp_boss_information[0] = '``` ```' embed = discord.Embed( title = "----- 미예약 보스 -----", description= tmp_boss_information[0], color=0x0000ff ) await ctx.send( embed=embed, tts=False) else: if len(tmp_boss_information[0]) != 0: if len(tmp_boss_information) == 1 : tmp_boss_information[0] = "```fix\n" + tmp_boss_information[0][:len(tmp_boss_information[0])-1] + "\n```" else: tmp_boss_information[0] = "```fix\n" + tmp_boss_information[0] + "\n```" else : tmp_boss_information[0] = '``` ```' embed = discord.Embed( title = "----- 미예약 보스 -----", description= tmp_boss_information[0], color=0x0000ff ) await ctx.send( embed=embed, tts=False) for i in range(len(tmp_boss_information)-1): if len(tmp_boss_information[i+1]) != 0: if i == len(tmp_boss_information)-2: tmp_boss_information[i+1] = "```fix\n" + tmp_boss_information[i+1][:len(tmp_boss_information[i+1])-1] + "\n```" else: tmp_boss_information[i+1] = "```fix\n" + tmp_boss_information[i+1] + "\n```" else : tmp_boss_information[i+1] = '``` ```' embed = discord.Embed( title = '', description= tmp_boss_information[i+1], color=0x0000ff ) await ctx.send( embed=embed, tts=False) else: return ################ 분배 결과 출력 ################ @commands.command(name=command[11][0], aliases=command[11][1:]) async def bunbae_(self, ctx): if ctx.message.channel.id == basicSetting[7]: msg = ctx.message.content[len(ctx.invoked_with)+1:] separate_money = [] separate_money = msg.split(" ") num_sep = floor(int(separate_money[0])) cal_tax1 = floor(float(separate_money[1])*0.05) real_money = floor(floor(float(separate_money[1])) - cal_tax1) cal_tax2 = floor(real_money/num_sep) - floor(float(floor(real_money/num_sep))*0.95) if num_sep == 0 : await ctx.send('```분배 인원이 0입니다. 재입력 해주세요.```', tts=False) else : embed = discord.Embed( title = "----- 분배결과! -----", description= '```1차 세금 : ' + str(cal_tax1) + '\n1차 수령액 : ' + str(real_money) + '\n분배자 거래소등록금액 : ' + str(floor(real_money/num_sep)) + '\n2차 세금 : ' + str(cal_tax2) + '\n인당 실수령액 : ' + str(floor(float(floor(real_money/num_sep))*0.95)) + '```', color=0xff00ff ) await ctx.send(embed=embed, tts=False) else: return ################ 사다리 결과 출력 ################ @commands.command(name=command[12][0], aliases=command[12][1:]) async def ladder_(self, ctx): if ctx.message.channel.id == basicSetting[7] or ctx.message.channel.id == basicSetting[8]: msg = ctx.message.content[len(ctx.invoked_with)+1:] ladder = [] ladder = msg.split(" ") try: num_cong = int(ladder[0]) del(ladder[0]) except ValueError: return await ctx.send('```뽑을 인원은 숫자로 입력바랍니다\nex)!사다리 1 가 나 다 ...```') await LadderFunc(num_cong, ladder, ctx) else: return ################ 정산확인 ################ @commands.command(name=command[13][0], aliases=command[13][1:]) async def jungsan_(self, ctx): if ctx.message.channel.id == basicSetting[7] or ctx.message.channel.id == basicSetting[11]: msg = ctx.message.content[len(ctx.invoked_with)+1:] if basicSetting[10] !="" and basicSetting[12] !="" and basicSetting[14] !="" and basicSetting[15] !="" and basicSetting[16] !="" : SearchID = msg gc = gspread.authorize(credentials) wks = gc.open(basicSetting[12]).worksheet(basicSetting[14]) wks.update_acell(basicSetting[15], SearchID) result = wks.acell(basicSetting[16]).value embed = discord.Embed( description= '```' + SearchID + ' 님이 받을 다이야는 ' + result + ' 다이야 입니다.```', color=0xff00ff ) await ctx.send(embed=embed, tts=False) else: return ################ 보스타임 일괄 설정 ################ @commands.command(name=command[14][0], aliases=command[14][1:]) async def allBossInput_(self, ctx): global basicSetting global bossData global fixed_bossData global bossTime global tmp_bossTime global fixed_bossTime global bossTimeString global bossDateString global tmp_bossTimeString global tmp_bossDateString global bossFlag global bossFlag0 global bossMungFlag global bossMungCnt if ctx.message.channel.id == basicSetting[7]: msg = ctx.message.content[len(ctx.invoked_with)+1:] for i in range(bossNum): tmp_msg = msg if len(tmp_msg) > 3 : if tmp_msg.find(':') != -1 : chkpos = tmp_msg.find(':') hours1 = tmp_msg[chkpos-2:chkpos] minutes1 = tmp_msg[chkpos+1:chkpos+3] now2 = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = tmp_now.replace(hour=int(hours1), minute=int(minutes1)) else: chkpos = len(tmp_msg)-2 hours1 = tmp_msg[chkpos-2:chkpos] minutes1 = tmp_msg[chkpos:chkpos+2] now2 = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = tmp_now.replace(hour=int(hours1), minute=int(minutes1)) else: now2 = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = now2 bossFlag[i] = False bossFlag0[i] = False bossMungFlag[i] = False bossMungCnt[i] = 1 if tmp_now > now2 : tmp_now = tmp_now + datetime.timedelta(days=int(-1)) if tmp_now < now2 : deltaTime = datetime.timedelta(hours = int(bossData[i][1]), minutes = int(bossData[i][5])) while now2 > tmp_now : tmp_now = tmp_now + deltaTime bossMungCnt[i] = bossMungCnt[i] + 1 now2 = tmp_now bossMungCnt[i] = bossMungCnt[i] - 1 else : now2 = now2 + datetime.timedelta(hours = int(bossData[i][1]), minutes = int(bossData[i][5])) tmp_bossTime[i] = bossTime[i] = nextTime = now2 tmp_bossTimeString[i] = bossTimeString[i] = nextTime.strftime('%H:%M:%S') tmp_bossDateString[i] = bossDateString[i] = nextTime.strftime('%Y-%m-%d') await dbSave() await dbLoad() await dbSave() await ctx.send('<보스 일괄 입력 완료>', tts=False) print ("<보스 일괄 입력 완료>") else: return ################ 가장 근접한 보스타임 출력 ################ @commands.command(name=command[15][0], aliases=command[15][1:]) async def nearTimeBoss_(self, ctx): if ctx.message.channel.id == basicSetting[7]: checkTime = datetime.datetime.now() + datetime.timedelta(days=1, hours = int(basicSetting[0])) datelist = [] datelist2 = [] ouput_bossData = [] aa = [] sorted_datelist = [] for i in range(bossNum): if bossMungFlag[i] != True and bossTimeString[i] != '99:99:99' : datelist2.append(bossTime[i]) for i in range(fixed_bossNum): if fixed_bossTime[i] < datetime.datetime.now() + datetime.timedelta(hours=int(basicSetting[0])+3): datelist2.append(fixed_bossTime[i]) datelist = list(set(datelist2)) for i in range(bossNum): if bossMungFlag[i] != True : aa.append(bossData[i][0]) #output_bossData[0] : 보스명 aa.append(bossTime[i]) #output_bossData[1] : 시간 aa.append(bossTime[i].strftime('%H:%M:%S')) #output_bossData[2] : 시간(00:00:00) ouput_bossData.append(aa) aa = [] for i in range(fixed_bossNum): aa.append(fixed_bossData[i][0]) #output_bossData[0] : 보스명 aa.append(fixed_bossTime[i]) #output_bossData[1] : 시간 aa.append(fixed_bossTime[i].strftime('%H:%M:%S')) #output_bossData[2] : 시간(00:00:00) ouput_bossData.append(aa) aa = [] tmp_sorted_datelist = sorted(datelist) for i in range(len(tmp_sorted_datelist)): if checkTime > tmp_sorted_datelist[i]: sorted_datelist.append(tmp_sorted_datelist[i]) if len(sorted_datelist) == 0: await ctx.send( '<보스타임 정보가 없습니다.>', tts=False) else : result_lefttime = '' if len(sorted_datelist) > int(basicSetting[9]): for j in range(int(basicSetting[9])): for i in range(len(ouput_bossData)): if sorted_datelist[j] == ouput_bossData[i][1]: leftTime = ouput_bossData[i][1] - (datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0]))) total_seconds = int(leftTime.total_seconds()) hours, remainder = divmod(total_seconds,60*60) minutes, seconds = divmod(remainder,60) result_lefttime += '다음 ' + ouput_bossData[i][0] + '탐까지 %02d:%02d:%02d 남았습니다. ' % (hours,minutes,seconds) + '[' + ouput_bossData[i][2] + ']\n' else : for j in range(len(sorted_datelist)): for i in range(len(ouput_bossData)): if sorted_datelist[j] == ouput_bossData[i][1]: leftTime = ouput_bossData[i][1] - (datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0]))) total_seconds = int(leftTime.total_seconds()) hours, remainder = divmod(total_seconds,60*60) minutes, seconds = divmod(remainder,60) result_lefttime += '다음 ' + ouput_bossData[i][0] + '탐까지 %02d:%02d:%02d 남았습니다. ' % (hours,minutes,seconds) + '[' + ouput_bossData[i][2] + ']\n' embed = discord.Embed( description= result_lefttime, color=0xff0000 ) await ctx.send( embed=embed, tts=False) else: return ################ 음성파일 생성 후 재생 ################ @commands.command(name=command[16][0], aliases=command[16][1:]) async def playText_(self, ctx): if ctx.message.channel.id == basicSetting[7]: msg = ctx.message.content[len(ctx.invoked_with)+1:] sayMessage = msg await MakeSound(ctx.message.author.display_name +'님이, ' + sayMessage, './sound/say') await ctx.send("```< " + ctx.author.display_name + " >님이 \"" + sayMessage + "\"```", tts=False) await PlaySound(voice_client1, './sound/say.wav') else: return ################ 리젠시간 출력 ################ @commands.command(name=command[17][0], aliases=command[17][1:]) async def regenTime_(self, ctx): if ctx.message.channel.id == basicSetting[7]: await ctx.send(embed=regenembed, tts=False) else: return ################ 현재시간 확인 ################ @commands.command(name=command[18][0], aliases=command[18][1:]) async def currentTime_(self, ctx): if ctx.message.channel.id == basicSetting[7]: curruntTime = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) embed = discord.Embed( title = '현재시간은 ' + curruntTime.strftime('%H') + '시 ' + curruntTime.strftime('%M') + '분 ' + curruntTime.strftime('%S')+ '초 입니다.', color=0xff00ff ) await ctx.send( embed=embed, tts=False) else: return ################ 공지 등록/확인 ################ @commands.command(name=command[19][0], aliases=command[19][1:]) async def notice_(self, ctx): if ctx.message.channel.id == basicSetting[7]: msg = ctx.message.content.split(" ") if len(msg) > 1: sayMessage = " ".join(msg[1:]) contents = repo.get_contents("notice.ini") repo.update_file(contents.path, "notice 등록", sayMessage, contents.sha) await ctx.send( '< 공지 등록완료 >', tts=False) else: notice_initdata = repo.get_contents("notice.ini") notice = base64.b64decode(notice_initdata.content) notice = notice.decode('utf-8') if notice != '' : embed = discord.Embed( description= str(notice), color=0xff00ff ) else : embed = discord.Embed( description= '```등록된 공지가 없습니다.```', color=0xff00ff ) await ctx.send(embed=embed, tts=False) else: return ################ 공지 삭제 ################ @commands.command(name=command[20][0], aliases=command[20][1:]) async def noticeDel_(self, ctx): if ctx.message.channel.id == basicSetting[7]: contents = repo.get_contents("notice.ini") repo.update_file(contents.path, "notice 삭제", '', contents.sha) await ctx.send( '< 공지 삭제완료 >', tts=False) else: return ################ 봇 상태메세지 변경 ################ @commands.command(name=command[21][0], aliases=command[21][1:]) async def botStatus_(self, ctx): if ctx.message.channel.id == basicSetting[7]: msg = ctx.message.content[len(ctx.invoked_with)+1:] sayMessage = msg await self.bot.change_presence(status=discord.Status.dnd, activity=discord.Game(name=sayMessage, type=1), afk = False) await ctx.send( '< 상태메세지 변경완료 >', tts=False) else: return ################ 보스타임 출력 ################ @commands.command(name=command[22][0], aliases=command[22][1:]) async def bossTime_(self, ctx): if ctx.message.channel.id == basicSetting[7]: datelist = [] datelist2 = [] ouput_bossData = [] aa = [] for i in range(bossNum): if bossMungFlag[i] == True : datelist2.append(tmp_bossTime[i]) else : datelist2.append(bossTime[i]) for i in range(fixed_bossNum): if fixed_bossTime[i] < datetime.datetime.now() + datetime.timedelta(hours=int(basicSetting[0])+3): datelist2.append(fixed_bossTime[i]) datelist = list(set(datelist2)) tmp_boss_information = [] tmp_cnt = 0 tmp_boss_information.append('') for i in range(bossNum): if bossTimeString[i] == '99:99:99' and bossMungFlag[i] != True : if len(tmp_boss_information[tmp_cnt]) > 1000 : tmp_boss_information.append('') tmp_cnt += 1 tmp_boss_information[tmp_cnt] = tmp_boss_information[tmp_cnt] + bossData[i][0] + ',' else : aa.append(bossData[i][0]) #output_bossData[0] : 보스명 if bossMungFlag[i] == True : aa.append(tmp_bossTime[i]) #output_bossData[1] : 시간 aa.append(tmp_bossTime[i].strftime('%H:%M:%S')) #output_bossData[2] : 시간(00:00:00) -> 초빼기 : aa.append(tmp_bossTime[i].strftime('%H:%M')) aa.append('-') #output_bossData[3] : - else : aa.append(bossTime[i]) #output_bossData[1] : 시간 aa.append(bossTime[i].strftime('%H:%M:%S')) #output_bossData[2] : 시간(00:00:00) -> 초빼기 : aa.append(bossTime[i].strftime('%H:%M')) aa.append('+') #output_bossData[3] : + aa.append(bossData[i][2]) #output_bossData[4] : 멍/미입력 보스 aa.append(bossMungCnt[i]) #output_bossData[5] : 멍/미입력횟수 aa.append(bossData[i][6]) #output_bossData[6] : 메세지 ouput_bossData.append(aa) aa = [] for i in range(fixed_bossNum): aa.append(fixed_bossData[i][0]) #output_bossData[0] : 보스명 aa.append(fixed_bossTime[i]) #output_bossData[1] : 시간 aa.append(fixed_bossTime[i].strftime('%H:%M:%S')) #output_bossData[2] : 시간(00:00:00) -> 초빼기 : aa.append(fixed_bossTime[i].strftime('%H:%M')) aa.append('@') #output_bossData[3] : @ aa.append(0) #output_bossData[4] : 멍/미입력 보스 aa.append(0) #output_bossData[5] : 멍/미입력횟수 aa.append("") #output_bossData[6] : 메세지 ouput_bossData.append(aa) aa = [] boss_information = [] cnt = 0 boss_information.append('') for timestring in sorted(datelist): if len(boss_information[cnt]) > 1800 : boss_information.append('') cnt += 1 for i in range(len(ouput_bossData)): if timestring == ouput_bossData[i][1]: if ouput_bossData[i][4] == '0' : if ouput_bossData[i][5] == 0 : boss_information[cnt] = boss_information[cnt] + ouput_bossData[i][3] + ' ' + ouput_bossData[i][2] + ' : ' + ouput_bossData[i][0] + ' ' + ouput_bossData[i][6] + '\n' else : boss_information[cnt] = boss_information[cnt] + ouput_bossData[i][3] + ' ' + ouput_bossData[i][2] + ' : ' + ouput_bossData[i][0] + ' (미 ' + str(ouput_bossData[i][5]) + '회)' + ' ' + ouput_bossData[i][6] + '\n' else : if ouput_bossData[i][5] == 0 : boss_information[cnt] = boss_information[cnt] + ouput_bossData[i][3] + ' ' + ouput_bossData[i][2] + ' : ' + ouput_bossData[i][0] + ' ' + ouput_bossData[i][6] + '\n' else : boss_information[cnt] = boss_information[cnt] + ouput_bossData[i][3] + ' ' + ouput_bossData[i][2] + ' : ' + ouput_bossData[i][0] + ' (멍 ' + str(ouput_bossData[i][5]) + '회)' + ' ' + ouput_bossData[i][6] + '\n' if len(boss_information) == 1 and len(tmp_boss_information) == 1: ########################### if len(boss_information[0]) != 0: boss_information[0] = "```diff\n" + boss_information[0] + "\n```" else : boss_information[0] = '``` ```' if len(tmp_boss_information[0]) != 0: tmp_boss_information[0] = "```fix\n" + tmp_boss_information[0][:len(tmp_boss_information[0])-1] + "\n```" else : tmp_boss_information[0] = '``` ```' embed = discord.Embed( title = "----- 보스탐 정보 -----", description= boss_information[0], color=0x0000ff ) embed.add_field( name="----- 미예약 보스 -----", value= tmp_boss_information[0], inline = False ) await ctx.send( embed=embed, tts=False) else : ###########################일반보스출력 if len(boss_information[0]) != 0: boss_information[0] = "```diff\n" + boss_information[0] + "\n```" else : boss_information[0] = '``` ```' embed = discord.Embed( title = "----- 보스탐 정보 -----", description= boss_information[0], color=0x0000ff ) await ctx.send( embed=embed, tts=False) for i in range(len(boss_information)-1): if len(boss_information[i+1]) != 0: boss_information[i+1] = "```diff\n" + boss_information[i+1] + "\n```" else : boss_information[i+1] = '``` ```' embed = discord.Embed( title = '', description= boss_information[i+1], color=0x0000ff ) await ctx.send( embed=embed, tts=False) ###########################미예약보스출력 if len(tmp_boss_information[0]) != 0: if len(tmp_boss_information) == 1 : tmp_boss_information[0] = "```fix\n" + tmp_boss_information[0][:len(tmp_boss_information[0])-1] + "\n```" else: tmp_boss_information[0] = "```fix\n" + tmp_boss_information[0] + "\n```" else : tmp_boss_information[0] = '``` ```' embed = discord.Embed( title = "----- 미예약 보스 -----", description= tmp_boss_information[0], color=0x0000ff ) await ctx.send( embed=embed, tts=False) for i in range(len(tmp_boss_information)-1): if len(tmp_boss_information[i+1]) != 0: if i == len(tmp_boss_information)-2: tmp_boss_information[i+1] = "```fix\n" + tmp_boss_information[i+1][:len(tmp_boss_information[i+1])-1] + "\n```" else: tmp_boss_information[i+1] = "```fix\n" + tmp_boss_information[i+1] + "\n```" else : tmp_boss_information[i+1] = '``` ```' embed = discord.Embed( title = '', description= tmp_boss_information[i+1], color=0x0000ff ) await ctx.send( embed=embed, tts=False) await dbSave() await data_list_Save("kill_list.ini", "-----척살명단-----", kill_Data) await data_list_Save("item_list.ini", "-----아이템목록-----", item_Data) else: return ################ 보스타임 출력(고정보스포함) ################ @commands.command(name=command[23][0], aliases=command[23][1:]) async def bossTime_fixed_(self, ctx): if ctx.message.channel.id == basicSetting[7]: datelist = [] datelist2 = [] ouput_bossData = [] aa = [] fixed_datelist = [] for i in range(bossNum): if bossMungFlag[i] == True : datelist2.append(tmp_bossTime[i]) else : datelist2.append(bossTime[i]) datelist = list(set(datelist2)) tmp_boss_information = [] tmp_cnt = 0 tmp_boss_information.append('') for i in range(bossNum): if bossTimeString[i] == '99:99:99' and bossMungFlag[i] != True : if len(tmp_boss_information[tmp_cnt]) > 1800 : tmp_boss_information.append('') tmp_cnt += 1 tmp_boss_information[tmp_cnt] = tmp_boss_information[tmp_cnt] + bossData[i][0] + ',' else : aa.append(bossData[i][0]) #output_bossData[0] : 보스명 if bossMungFlag[i] == True : aa.append(tmp_bossTime[i]) #output_bossData[1] : 시간 aa.append(tmp_bossTime[i].strftime('%H:%M:%S')) #output_bossData[2] : 시간(00:00:00) -> 초빼기 : aa.append(tmp_bossTime[i].strftime('%H:%M')) aa.append('-') #output_bossData[3] : - else : aa.append(bossTime[i]) #output_bossData[1] : 시간 aa.append(bossTime[i].strftime('%H:%M:%S')) #output_bossData[2] : 시간(00:00:00) -> 초빼기 : aa.append(bossTime[i].strftime('%H:%M')) aa.append('+') #output_bossData[3] : + aa.append(bossData[i][2]) #output_bossData[4] : 멍/미입력 보스 aa.append(bossMungCnt[i]) #output_bossData[5] : 멍/미입력횟수 aa.append(bossData[i][6]) #output_bossData[6] : 메세지 ouput_bossData.append(aa) aa = [] for i in range(fixed_bossNum): fixed_datelist.append(fixed_bossTime[i]) fixed_datelist = list(set(fixed_datelist)) fixedboss_information = [] cntF = 0 fixedboss_information.append('') for timestring1 in sorted(fixed_datelist): if len(fixedboss_information[cntF]) > 1800 : fixedboss_information.append('') cntF += 1 for i in range(fixed_bossNum): if timestring1 == fixed_bossTime[i]: if (datetime.datetime.now() + datetime.timedelta(hours=int(basicSetting[0]))).strftime('%Y-%m-%d') == fixed_bossTime[i].strftime('%Y-%m-%d'): tmp_timeSTR = fixed_bossTime[i].strftime('%H:%M:%S') #초빼기 : tmp_timeSTR = fixed_bossTime[i].strftime('%H:%M') else: tmp_timeSTR = '[' + fixed_bossTime[i].strftime('%Y-%m-%d') + '] ' + fixed_bossTime[i].strftime('%H:%M:%S') #초빼기 : tmp_timeSTR = '[' + fixed_bossTime[i].strftime('%Y-%m-%d') + '] ' + fixed_bossTime[i].strftime('%H:%M') fixedboss_information[cntF] = fixedboss_information[cntF] + tmp_timeSTR + ' : ' + fixed_bossData[i][0] + '\n' boss_information = [] cnt = 0 boss_information.append('') for timestring in sorted(datelist): if len(boss_information[cnt]) > 1800 : boss_information.append('') cnt += 1 for i in range(len(ouput_bossData)): if timestring == ouput_bossData[i][1]: if ouput_bossData[i][4] == '0' : if ouput_bossData[i][5] == 0 : boss_information[cnt] = boss_information[cnt] + ouput_bossData[i][3] + ' ' + ouput_bossData[i][2] + ' : ' + ouput_bossData[i][0] + ' ' + ouput_bossData[i][6] + '\n' else : boss_information[cnt] = boss_information[cnt] + ouput_bossData[i][3] + ' ' + ouput_bossData[i][2] + ' : ' + ouput_bossData[i][0] + ' (미 ' + str(ouput_bossData[i][5]) + '회)' + ' ' + ouput_bossData[i][6] + '\n' else : if ouput_bossData[i][5] == 0 : boss_information[cnt] = boss_information[cnt] + ouput_bossData[i][3] + ' ' + ouput_bossData[i][2] + ' : ' + ouput_bossData[i][0] + ' ' + ouput_bossData[i][6] + '\n' else : boss_information[cnt] = boss_information[cnt] + ouput_bossData[i][3] + ' ' + ouput_bossData[i][2] + ' : ' + ouput_bossData[i][0] + ' (멍 ' + str(ouput_bossData[i][5]) + '회)' + ' ' + ouput_bossData[i][6] + '\n' ###########################고정보스출력 if len(fixedboss_information[0]) != 0: fixedboss_information[0] = "```diff\n" + fixedboss_information[0] + "\n```" else : fixedboss_information[0] = '``` ```' embed = discord.Embed( title = "----- 고 정 보 스 -----", description= fixedboss_information[0], color=0x0000ff ) await ctx.send( embed=embed, tts=False) for i in range(len(fixedboss_information)-1): if len(fixedboss_information[i+1]) != 0: fixedboss_information[i+1] = "```diff\n" + fixedboss_information[i+1] + "\n```" else : fixedboss_information[i+1] = '``` ```' embed = discord.Embed( title = '', description= fixedboss_information[i+1], color=0x0000ff ) await ctx.send( embed=embed, tts=False) ###########################일반보스출력 if len(boss_information[0]) != 0: boss_information[0] = "```diff\n" + boss_information[0] + "\n```" else : boss_information[0] = '``` ```' embed = discord.Embed( title = "----- 보스탐 정보 -----", description= boss_information[0], color=0x0000ff ) await ctx.send( embed=embed, tts=False) for i in range(len(boss_information)-1): if len(boss_information[i+1]) != 0: boss_information[i+1] = "```diff\n" + boss_information[i+1] + "\n```" else : boss_information[i+1] = '``` ```' embed = discord.Embed( title = '', description= boss_information[i+1], color=0x0000ff ) await ctx.send( embed=embed, tts=False) ###########################미예약보스출력 if len(tmp_boss_information[0]) != 0: if len(tmp_boss_information) == 1 : tmp_boss_information[0] = "```fix\n" + tmp_boss_information[0][:len(tmp_boss_information[0])-1] + "\n```" else: tmp_boss_information[0] = "```fix\n" + tmp_boss_information[0] + "\n```" else : tmp_boss_information[0] = '``` ```' embed = discord.Embed( title = "----- 미예약 보스 -----", description= tmp_boss_information[0], color=0x0000ff ) await ctx.send( embed=embed, tts=False) for i in range(len(tmp_boss_information)-1): if len(tmp_boss_information[i+1]) != 0: if i == len(tmp_boss_information)-2: tmp_boss_information[i+1] = "```fix\n" + tmp_boss_information[i+1][:len(tmp_boss_information[i+1])-1] + "\n```" else: tmp_boss_information[i+1] = "```fix\n" + tmp_boss_information[i+1] + "\n```" else : tmp_boss_information[i+1] = '``` ```' embed = discord.Embed( title = '', description= tmp_boss_information[i+1], color=0x0000ff ) await ctx.send( embed=embed, tts=False) await dbSave() await data_list_Save("kill_list.ini", "-----척살명단-----", kill_Data) await data_list_Save("item_list.ini", "-----아이템목록-----", item_Data) else: return ################ 킬초기화 ################ @commands.command(name=command[24][0], aliases=command[24][1:]) async def killInit_(self, ctx): if ctx.message.channel.id == basicSetting[7] or ctx.message.channel.id == basicSetting[18]: global kill_Data kill_Data = {} await init_data_list('kill_list.ini', '-----척살명단-----') return await ctx.send( '< 킬 목록 초기화완료 >', tts=False) else: return ################ 킬명단 확인 및 추가################ @commands.command(name=command[25][0], aliases=command[25][1:]) async def killList_(self, ctx, *, args : str = None): if ctx.message.channel.id == basicSetting[7] or ctx.message.channel.id == basicSetting[18]: global kill_Data if not args: kill_output = '' for key, value in kill_Data.items(): kill_output += ':skull_crossbones: ' + str(key) + ' : ' + str(value) + '번 따히!\n' if kill_output != '' : embed = discord.Embed( description= str(kill_output), color=0xff00ff ) else : embed = discord.Embed( description= '등록된 킬 목록이 없습니다. 분발하세요!', color=0xff00ff ) return await ctx.send(embed=embed, tts=False) if args in kill_Data: kill_Data[args] += 1 else: kill_Data[args] = 1 embed = discord.Embed( description= ':skull_crossbones: ' + args + ' 따히! [' + str(kill_Data[args]) + '번]\n', color=0xff00ff ) return await ctx.send(embed=embed, tts=False) else: return ################ 킬삭제 ################ @commands.command(name=command[26][0], aliases=command[26][1:]) async def killDel_(self, ctx, *, args : str = None): if ctx.message.channel.id == basicSetting[7] or ctx.message.channel.id == basicSetting[18]: global kill_Data if not args: return await ctx.send( '```제대로 된 아이디를 입력해주세요!\n```', tts=False) if args in kill_Data: del kill_Data[args] return await ctx.send( ':angel: ' + args + ' 삭제완료!', tts=False) else : return await ctx.send( '```킬 목록에 등록되어 있지 않습니다!\n```', tts=False) else: return ################ 킬 차감 ################ @commands.command(name=command[33][0], aliases=command[33][1:]) async def killSubtract_(self, ctx, *, args : str = None): if ctx.message.channel.id == basicSetting[7] or ctx.message.channel.id == basicSetting[18]: global kill_Data if not args: return await ctx.send(f'{command[33][0]} [아이디] 혹은 {command[33][0]} [아이디] [횟수] 양식에 맞춰 입력해주세요!', tts = False) input_data = args.split() if len(input_data) == 1: kill_name = args count = 1 elif len(input_data) == 2: kill_name = input_data[0] try: count = int(input_data[1]) except ValueError: return await ctx.send(f'[횟수]는 숫자로 입력바랍니다') else: return await ctx.send(f'{command[33][0]} [아이디] 혹은 {command[33][0]} [아이디] [횟수] 양식에 맞춰 입력해주세요!', tts = False) if kill_name in kill_Data: if kill_Data[kill_name] < int(count): return await ctx.send( f"등록된 킬 횟수[{str(kill_Data[kill_name])}번]보다 차감 횟수[{str(count)}번]가 많습니다. 킬 횟수에 맞게 재입력 바랍니다.", tts=False) else: kill_Data[kill_name] -= int(count) else: return await ctx.send( '```킬 목록에 등록되어 있지 않습니다!\n```', tts=False) embed = discord.Embed( description= f':angel: [{kill_name}] [{str(count)}번] 차감 완료! [잔여 : {str(kill_Data[kill_name])}번]\n', color=0xff00ff ) if kill_Data[kill_name] == 0: del kill_Data[kill_name] return await ctx.send(embed=embed, tts=False) else: return ################ 경주 ################ @commands.command(name=command[27][0], aliases=command[27][1:]) async def race_(self, ctx): if ctx.message.channel.id == basicSetting[7] or ctx.message.channel.id == basicSetting[19]: msg = ctx.message.content[len(ctx.invoked_with)+1:] race_info = [] fr = [] racing_field = [] str_racing_field = [] cur_pos = [] race_val = [] random_pos = [] racing_result = [] output = ':camera: :camera: :camera: 신나는 레이싱! :camera: :camera: :camera:\n' #racing_unit = [':giraffe:', ':elephant:', ':tiger2:', ':hippopotamus:', ':crocodile:',':leopard:',':ox:', ':sheep:', ':pig2:',':dromedary_camel:',':dragon:',':rabbit2:'] #동물스킨 #racing_unit = [':red_car:', ':taxi:', ':bus:', ':trolleybus:', ':race_car:', ':police_car:', ':ambulance:', ':fire_engine:', ':minibus:', ':truck:', ':articulated_lorry:', ':tractor:', ':scooter:', ':manual_wheelchair:', ':motor_scooter:', ':auto_rickshaw:', ':blue_car:', ':bike:', ':helicopter:', ':steam_locomotive:'] #탈것스킨 #random.shuffle(racing_unit) racing_member = msg.split(" ") racing_unit = [] emoji = discord.Emoji emoji = ctx.message.guild.emojis for j in range(len(tmp_racing_unit)): racing_unit.append(':' + tmp_racing_unit[j] + ':') for i in range(len(emoji)): if emoji[i].name == tmp_racing_unit[j].strip(":"): racing_unit[j] = '<:' + tmp_racing_unit[j] + ':' + str(emoji[i].id) + '>' random.shuffle(racing_unit) field_size = 60 tmp_race_tab = 35 - len(racing_member) if len(racing_member) <= 1: await ctx.send('레이스 인원이 2명보다 작습니다.') return elif len(racing_member) >= 13: await ctx.send('레이스 인원이 12명 초과입니다.') return else : race_val = random.sample(range(tmp_race_tab, tmp_race_tab+len(racing_member)), len(racing_member)) random.shuffle(race_val) for i in range(len(racing_member)): fr.append(racing_member[i]) fr.append(racing_unit[i]) fr.append(race_val[i]) race_info.append(fr) fr = [] for i in range(field_size): fr.append(" ") racing_field.append(fr) fr = [] for i in range(len(racing_member)): racing_field[i][0] = "|" racing_field[i][field_size-2] = race_info[i][1] if len(race_info[i][0]) > 5: racing_field[i][field_size-1] = "| " + race_info[i][0][:5] + '..' else: racing_field[i][field_size-1] = "| " + race_info[i][0] str_racing_field.append("".join(racing_field[i])) cur_pos.append(field_size-2) for i in range(len(racing_member)): output += str_racing_field[i] + '\n' result_race = await ctx.send(output + ':traffic_light: 3초 후 경주가 시작됩니다!') await asyncio.sleep(1) await result_race.edit(content = output + ':traffic_light: 2초 후 경주가 시작됩니다!') await asyncio.sleep(1) await result_race.edit(content = output + ':traffic_light: 1초 후 경주가 시작됩니다!') await asyncio.sleep(1) await result_race.edit(content = output + ':checkered_flag: 경주 시작!') for i in range(len(racing_member)): test = random.sample(range(2,field_size-2), race_info[i][2]) while len(test) != tmp_race_tab + len(racing_member)-1 : test.append(1) test.append(1) test.sort(reverse=True) random_pos.append(test) for j in range(len(random_pos[0])): if j%2 == 0: output = ':camera: :camera_with_flash: :camera: 신나는 레이싱! :camera_with_flash: :camera: :camera_with_flash:\n' else : output = ':camera_with_flash: :camera: :camera_with_flash: 신나는 레이싱! :camera: :camera_with_flash: :camera:\n' str_racing_field = [] for i in range(len(racing_member)): temp_pos = cur_pos[i] racing_field[i][random_pos[i][j]], racing_field[i][temp_pos] = racing_field[i][temp_pos], racing_field[i][random_pos[i][j]] cur_pos[i] = random_pos[i][j] str_racing_field.append("".join(racing_field[i])) await asyncio.sleep(1) for i in range(len(racing_member)): output += str_racing_field[i] + '\n' await result_race.edit(content = output + ':checkered_flag: 경주 시작!') for i in range(len(racing_field)): fr.append(race_info[i][0]) fr.append((race_info[i][2]) - tmp_race_tab + 1) racing_result.append(fr) fr = [] result = sorted(racing_result, key=lambda x: x[1]) result_str = '' for i in range(len(result)): if result[i][1] == 1: result[i][1] = ':first_place:' elif result[i][1] == 2: result[i][1] = ':second_place:' elif result[i][1] == 3: result[i][1] = ':third_place:' elif result[i][1] == 4: result[i][1] = ':four:' elif result[i][1] == 5: result[i][1] = ':five:' elif result[i][1] == 6: result[i][1] = ':six:' elif result[i][1] == 7: result[i][1] = ':seven:' elif result[i][1] == 8: result[i][1] = ':eight:' elif result[i][1] == 9: result[i][1] = ':nine:' elif result[i][1] == 10: result[i][1] = ':keycap_ten:' else: result[i][1] = ':x:' result_str += result[i][1] + " " + result[i][0] + " " #print(result) await asyncio.sleep(1) return await result_race.edit(content = output + ':tada: 경주 종료!\n' + result_str) else: return ################ 보탐봇 입장 ################ @commands.command(name=command[28][0], aliases=command[28][1:]) async def set_channel_(self, ctx): global basicSetting msg = ctx.message.content[len(ctx.invoked_with)+1:] channel = ctx.message.channel.id #메세지가 들어온 채널 ID if msg == '사다리' : #사다리 채널 설정 inidata_textCH = repo.get_contents("test_setting.ini") file_data_textCH = base64.b64decode(inidata_textCH.content) file_data_textCH = file_data_textCH.decode('utf-8') inputData_textCH = file_data_textCH.split('\n') for i in range(len(inputData_textCH)): if inputData_textCH[i].startswith('ladderchannel'): inputData_textCH[i] = 'ladderchannel = ' + str(channel) + '\r' basicSetting[8] = channel result_textCH = '\n'.join(inputData_textCH) contents = repo.get_contents("test_setting.ini") repo.update_file(contents.path, "test_setting", result_textCH, contents.sha) print(f'< 사다리채널 [{ctx.message.channel.name}] 설정완료 >') return await ctx.send(f'< 사다리채널 [{ctx.message.channel.name}] 설정완료 >', tts=False) elif msg == '정산' : inidata_textCH = repo.get_contents("test_setting.ini") file_data_textCH = base64.b64decode(inidata_textCH.content) file_data_textCH = file_data_textCH.decode('utf-8') inputData_textCH = file_data_textCH.split('\n') for i in range(len(inputData_textCH)): if inputData_textCH[i].startswith('jungsanchannel'): inputData_textCH[i] = 'jungsanchannel = ' + str(channel) + '\r' basicSetting[11] = channel result_textCH = '\n'.join(inputData_textCH) contents = repo.get_contents("test_setting.ini") repo.update_file(contents.path, "test_setting", result_textCH, contents.sha) print(f'< 정산채널 [{ctx.message.channel.name}] 설정완료 >') return await ctx.send(f'< 정산채널 [{ctx.message.channel.name}] 설정완료 >', tts=False) elif msg == '척살' : inidata_textCH = repo.get_contents("test_setting.ini") file_data_textCH = base64.b64decode(inidata_textCH.content) file_data_textCH = file_data_textCH.decode('utf-8') inputData_textCH = file_data_textCH.split('\n') for i in range(len(inputData_textCH)): if inputData_textCH[i].startswith('killchannel'): inputData_textCH[i] = 'killchannel = ' + str(channel) + '\r' basicSetting[18] = channel result_textCH = '\n'.join(inputData_textCH) contents = repo.get_contents("test_setting.ini") repo.update_file(contents.path, "test_setting", result_textCH, contents.sha) print(f'< 척살채널 [{ctx.message.channel.name}] 설정완료 >') return await ctx.send(f'< 척살채널 [{ctx.message.channel.name}] 설정완료 >', tts=False) elif msg == '경주' : inidata_textCH = repo.get_contents("test_setting.ini") file_data_textCH = base64.b64decode(inidata_textCH.content) file_data_textCH = file_data_textCH.decode('utf-8') inputData_textCH = file_data_textCH.split('\n') for i in range(len(inputData_textCH)): if inputData_textCH[i].startswith('racingchannel'): inputData_textCH[i] = 'racingchannel = ' + str(channel) + '\r' basicSetting[19] = channel result_textCH = '\n'.join(inputData_textCH) contents = repo.get_contents("test_setting.ini") repo.update_file(contents.path, "test_setting", result_textCH, contents.sha) print(f'< 경주채널 [{ctx.message.channel.name}] 설정완료 >') return await ctx.send(f'< 경주채널 [{ctx.message.channel.name}] 설정완료 >', tts=False) elif msg == '아이템' : inidata_textCH = repo.get_contents("test_setting.ini") file_data_textCH = base64.b64decode(inidata_textCH.content) file_data_textCH = file_data_textCH.decode('utf-8') inputData_textCH = file_data_textCH.split('\n') for i in range(len(inputData_textCH)): if inputData_textCH[i].startswith('itemchannel'): inputData_textCH[i] = 'itemchannel = ' + str(channel) + '\r' basicSetting[20] = channel result_textCH = '\n'.join(inputData_textCH) contents = repo.get_contents("test_setting.ini") repo.update_file(contents.path, "test_setting", result_textCH, contents.sha) print(f'< 아이템채널 [{ctx.message.channel.name}] 설정완료 >') return await ctx.send(f'< 아이템채널 [{ctx.message.channel.name}] 설정완료 >', tts=False) else : return await ctx.send(f'```올바른 명령어를 입력해주세요.```', tts=False) ################ 아이템초기화 확인 ################ @commands.command(name=command[29][0], aliases=command[29][1:]) async def itemInit_(self, ctx): if ctx.message.channel.id == basicSetting[7] or ctx.message.channel.id == basicSetting[20]: global item_Data item_Data = {} await init_data_list('item_list.ini', '-----아이템 목록-----') return await ctx.send( '< 아이템 목록 초기화완료 >', tts=False) else: return ################ 아이템 목록 확인 및 추가 ################ @commands.command(name=command[30][0], aliases=command[30][1:]) async def itemList_(self, ctx, *, args : str = None): if ctx.message.channel.id == basicSetting[7] or ctx.message.channel.id == basicSetting[20]: global item_Data if not args: sorted_item_list = sorted(item_Data.items(), key=lambda x: x[0]) embed_list : list = [] embed_index : int = 0 embed_cnt : int = 0 embed = discord.Embed(title = '', description = f'`{self.bot.user.name}\'s 창고`', color = 0x00ff00) embed_list.append(embed) if len(sorted_item_list) > 0 : for item_id, count in sorted_item_list: embed_cnt += 1 if embed_cnt > 24 : embed_cnt = 0 embed_index += 1 tmp_embed = discord.Embed( title = "", description = "", color=0x00ff00 ) embed_list.append(tmp_embed) embed_list[embed_index].add_field(name = item_id, value = count) embed_list[len(embed_list)-1].set_footer(text = f"전체 아이템 종류 : {len(item_Data)}개") if len(embed_list) > 1: for embed_data in embed_list: await asyncio.sleep(0.1) await ctx.send(embed = embed_data) return else: return await ctx.send(embed=embed, tts=False) else : embed.add_field(name = '\u200b\n', value = '창고가 비었습니다.\n\u200b') return await ctx.send(embed=embed, tts=False) input_data = args.split() if len(input_data) == 1: item_name = args count = 1 elif len(input_data) == 2: item_name = input_data[0] try: count = int(input_data[1]) except ValueError: return await ctx.send(f'아이템 [개수]는 숫자로 입력바랍니다') else: return await ctx.send(f'{command[30][0]} [아이템명] 혹은 {command[30][0]} [아이템명] [개수] 양식에 맞춰 입력해주세요!', tts = False) if item_name in item_Data: item_Data[item_name] += int(count) else: item_Data[item_name] = int(count) embed = discord.Embed( description= f':inbox_tray: **[{item_name}] [{str(count)}개]** 등록 완료! [잔여 : {str(item_Data[item_name])}개]\n', color=0xff00ff ) return await ctx.send(embed=embed, tts=False) else: return ################ 아이템 삭제 ################ @commands.command(name=command[31][0], aliases=command[31][1:]) async def itemDel_(self, ctx, *, args : str = None): if ctx.message.channel.id == basicSetting[7] or ctx.message.channel.id == basicSetting[20]: global item_Data if not args: return await ctx.send( f'{command[31][0]} [아이템명] 양식에 맞춰 입력해주세요!', tts = False) if args in item_Data: del item_Data[args] embed = discord.Embed( description= ':outbox_tray: ' + args + ' 삭제완료!', color=0xff00ff ) return await ctx.send(embed=embed, tts=False) else : return await ctx.send( '```아이템 목록에 등록되어 있지 않습니다!\n```', tts=False) else: return ################ 아이템 차감 ################ @commands.command(name=command[32][0], aliases=command[32][1:]) async def itemSubtract_(self, ctx, *, args : str = None): if ctx.message.channel.id == basicSetting[7] or ctx.message.channel.id == basicSetting[20]: global item_Data if not args: return await ctx.send(f'{command[32][0]} [아이템명] 혹은 {command[32][0]} [아이템명] [개수] 양식에 맞춰 입력해주세요!', tts = False) input_data = args.split() if len(input_data) == 1: item_name = args count = 1 elif len(input_data) == 2: item_name = input_data[0] try: count = int(input_data[1]) except ValueError: return await ctx.send(f'아이템 [개수]는 숫자로 입력바랍니다') else: return await ctx.send(f'{command[32][0]} [아이템명] 혹은 {command[32][0]} [아이템명] [개수] 양식에 맞춰 입력해주세요!', tts = False) if item_name in item_Data: if item_Data[item_name] < int(count): return await ctx.send( f"등록된 아이템 개수[{str(item_Data[item_name])}개]보다 차감 개수[{str(count)}개]가 많습니다. 등록 개수에 맞게 재입력 바랍니다.", tts=False) else: item_Data[item_name] -= int(count) else: return await ctx.send( '```아이템 목록에 등록되어 있지 않습니다!\n```', tts=False) embed = discord.Embed( description= f':outbox_tray: **[{item_name}] [{str(count)}개]** 차감 완료! [잔여 : {str(item_Data[item_name])}개]\n', color=0xff00ff ) if item_Data[item_name] == 0: del item_Data[item_name] return await ctx.send(embed=embed, tts=False) else: return ################ 서버 나가기 ################ @commands.has_permissions(manage_messages=True) @commands.command(name=command[34][0], aliases=command[34][1:]) async def leaveGuild_(self, ctx): if ctx.message.channel.id == basicSetting[7]: guild_list : str = "" guild_name : str = "" for i, gulid_name in enumerate(self.bot.guilds): guild_list += f"`{i+1}.` {gulid_name}\n" embed = discord.Embed( title = "----- 서버 목록 -----", description = guild_list, color=0x00ff00 ) await ctx.send(embed = embed) try: await ctx.send(f"```떠나고 싶은 서버의 [숫자]를 입력하여 선택해 주세요```") message_result : discord.Message = await self.bot.wait_for("message", timeout = 10, check=(lambda message: message.channel == ctx.message.channel and message.author == ctx.message.author)) except asyncio.TimeoutError: return await ctx.send(f"```서버 선택 시간이 초과됐습니다! 필요시 명령어를 재입력해 주세요```") try: guild_name = self.bot.guilds[int(message_result.content)-1].name await self.bot.get_guild(self.bot.guilds[int(message_result.content)-1].id).leave() return await ctx.send(f"```[{guild_name}] 서버에서 떠났습니다.!```") except ValueError: return ################ 수수료 계산기 ################ @commands.command(name=command[35][0], aliases=command[35][1:]) async def tax_check(self, ctx, *, args : str = None): if ctx.message.channel.id == basicSetting[7] or ctx.message.channel.id == basicSetting[22]: if not args: return await ctx.send(f"**{command[35][0]} [판매금액] (거래소세금)** 양식으로 입력 해주세요\n※ 거래소세금은 미입력시 5%입니다.") input_money_data : list = args.split() len_input_money_data = len(input_money_data) try: for i in range(len_input_money_data): input_money_data[i] = int(input_money_data[i]) except ValueError: return await ctx.send(f"**[판매금액] (거래소세금)**은 숫자로 입력 해주세요.") if len_input_money_data < 1 or len_input_money_data > 3: return await ctx.send(f"**{command[35][0]} [판매금액] (거래소세금)** 양식으로 입력 해주세요\n※ 거래소세금은 미입력시 5%입니다.") elif len_input_money_data == 2: tax = input_money_data[1] else: tax = 5 price_first_tax = int(input_money_data[0] * ((100-tax)/100)) price_second_tax = int(price_first_tax * ((100-tax)/100)) price_rev_tax = int((input_money_data[0] * 100)/(100-tax)+0.5) embed = discord.Embed( title = f"🧮 수수료 계산결과 (세율 {tax}% 기준) ", description = f"", color=0x00ff00 ) embed.add_field(name = "⚖️ 수수료 지원", value = f"```등록가 : {price_rev_tax}\n수령가 : {input_money_data[0]}\n세 금 : {price_rev_tax-input_money_data[0]}```") embed.add_field(name = "⚖️ 1차 거래", value = f"```등록가 : {input_money_data[0]}\n정산가 : {price_first_tax}\n세 금 : {input_money_data[0]-price_first_tax}```") embed.add_field(name = "⚖️ 2차 거래", value = f"```등록가 : {price_first_tax}\n정산가 : {price_second_tax}\n세 금 : {price_first_tax-price_second_tax}```") return await ctx.send(embed = embed) else: return ################ 페이백 계산기 ################ @commands.command(name=command[36][0], aliases=command[36][1:]) async def payback_check(self, ctx, *, args : str = None): if ctx.message.channel.id == basicSetting[7] or ctx.message.channel.id == basicSetting[22]: if not args: return await ctx.send(f"**{command[36][0]} [거래소가격] [실거래가] (거래소세금)** 양식으로 입력 해주세요\n※ 거래소세금은 미입력시 5%입니다.") input_money_data : list = args.split() len_input_money_data = len(input_money_data) try: for i in range(len_input_money_data): input_money_data[i] = int(input_money_data[i]) except ValueError: return await ctx.send(f"**[판매금액] (거래소세금)**은 숫자로 입력 해주세요.") if len_input_money_data < 2 or len_input_money_data > 4: return await ctx.send(f"**{command[36][0]} [거래소가격] [실거래가] (거래소세금)** 양식으로 입력 해주세요\n※ 거래소세금은 미입력시 5%입니다.") elif len_input_money_data == 3: tax = input_money_data[2] else: tax = 5 price_reg_tax = int(input_money_data[0] * ((100-tax)/100)) price_real_tax = int(input_money_data[1] * ((100-tax)/100)) reault_payback = price_reg_tax - price_real_tax reault_payback1= price_reg_tax - input_money_data[1] embed = discord.Embed( title = f"🧮 페이백 계산결과1 (세율 {tax}% 기준) ", description = f"**```fix\n{reault_payback}```**", color=0x00ff00 ) embed.add_field(name = "⚖️ 거래소", value = f"```등록가 : {input_money_data[0]}\n정산가 : {price_reg_tax}\n세 금 : {input_money_data[0]-price_reg_tax}```") embed.add_field(name = "🕵️ 실거래", value = f"```등록가 : {input_money_data[1]}\n정산가 : {price_real_tax}\n세 금 : {input_money_data[1]-price_real_tax}```") await ctx.send(embed = embed) embed2 = discord.Embed( title = f"🧮 페이백 계산결과2 (세율 {tax}% 기준) ", description = f"**```fix\n{reault_payback1}```**", color=0x00ff00 ) embed2.add_field(name = "⚖️ 거래소", value = f"```등록가 : {input_money_data[0]}\n정산가 : {price_reg_tax}\n세 금 : {input_money_data[0]-price_reg_tax}```") embed2.add_field(name = "🕵️ 실거래", value = f"```내판가 : {input_money_data[1]}```") return await ctx.send(embed = embed2) else: return ################ ?????????????? ################ @commands.command(name='!오빠') async def brother1_(self, ctx): await PlaySound(voice_client1, './sound/오빠.mp3') @commands.command(name='!언니') async def sister_(self, ctx): await PlaySound(voice_client1, './sound/언니.mp3') @commands.command(name='!형') async def brother2_(self, ctx): await PlaySound(voice_client1, './sound/형.mp3') @commands.command(name='!TJ', aliases=['!tj']) async def TJ_(self, ctx): resultTJ = random.randrange(1,9) await PlaySound(voice_client1, './sound/TJ' + str(resultTJ) +'.mp3') class IlsangDistributionBot(commands.AutoShardedBot): def __init__(self): super().__init__(command_prefix=[""], help_command=None) self.session = aiohttp.ClientSession(loop=self.loop) def run(self): super().run(access_token, reconnect=True) async def on_ready(self): global basicSetting global channel global voice_client1 global channel_info global channel_name global channel_id global channel_voice_name global channel_voice_id global channel_type global chkvoicechannel global chflg global endTime global setting_channel_name print("Logged in as ") #화면에 봇의 아이디, 닉네임이 출력됩니다. print(self.user.name) print(self.user.id) print("===========") channel_name, channel_id, channel_voice_name, channel_voice_id = await get_guild_channel_info(self) await dbLoad() if str(basicSetting[6]) in channel_voice_id and str(basicSetting[7]) in channel_id: voice_client1 = await self.get_channel(basicSetting[6]).connect(reconnect=True) channel = basicSetting[7] setting_channel_name = self.get_channel(basicSetting[7]).name now = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) print('< 접속시간 [' + now.strftime('%Y-%m-%d ') + now.strftime('%H:%M:%S') + '] >') print('< 텍스트채널 [' + self.get_channel(basicSetting[7]).name + '] 접속완료>') print('< 음성채널 [' + self.get_channel(basicSetting[6]).name + '] 접속완료>') if basicSetting[8] != "": if str(basicSetting[8]) in channel_id: print('< 사다리채널 [' + self.get_channel(int(basicSetting[8])).name + '] 접속완료 >') else: basicSetting[8] = "" print(f"사다리채널 ID 오류! [{command[28][0]} 사다리] 명령으로 재설정 바랍니다.") if basicSetting[11] != "": if str(basicSetting[11]) in channel_id: print('< 정산채널 [' + self.get_channel(int(basicSetting[11])).name + '] 접속완료>') else: basicSetting[11] = "" print(f"정산채널 ID 오류! [{command[28][0]} 정산] 명령으로 재설정 바랍니다.") if basicSetting[18] != "": if str(basicSetting[18]) in channel_id: print('< 척살채널 [' + self.get_channel(int(basicSetting[18])).name + '] 접속완료>') else: basicSetting[18] = "" print(f"척살채널 ID 오류! [{command[28][0]} 척살] 명령으로 재설정 바랍니다.") if basicSetting[19] != "": if str(basicSetting[19]) in channel_id: print('< 경주채널 [' + self.get_channel(int(basicSetting[19])).name + '] 접속완료>') else: basicSetting[19] = "" print(f"경주채널 ID 오류! [{command[28][0]} 경주] 명령으로 재설정 바랍니다.") if basicSetting[20] != "": if str(basicSetting[20]) in channel_id: print('< 아이템채널 [' + self.get_channel(int(basicSetting[20])).name + '] 접속완료>') else: basicSetting[20] = "" print(f"아이템채널 ID 오류! [{command[28][0]} 아이템] 명령으로 재설정 바랍니다.") if int(basicSetting[13]) != 0 : print('< 보탐봇 재시작 시간 ' + endTime.strftime('%Y-%m-%d ') + endTime.strftime('%H:%M:%S') + ' >') print('< 보탐봇 재시작 주기 ' + basicSetting[13] + '일 >') else : print('< 보탐봇 재시작 설정안됨 >') chflg = 1 else: basicSetting[6] = "" basicSetting[7] = "" print(f"설정된 채널 값이 없거나 잘못 됐습니다. **[{command[0][0]}]** 명령어 먼저 입력하여 사용해주시기 바랍니다.") # 디스코드에는 현재 본인이 어떤 게임을 플레이하는지 보여주는 기능이 있습니다. # 이 기능을 사용하여 봇의 상태를 간단하게 출력해줄 수 있습니다. await self.change_presence(status=discord.Status.dnd, activity=discord.Game(name=command[1][0], type=1), afk=False) async def on_message(self, msg): await self.wait_until_ready() if msg.author.bot: #만약 메시지를 보낸사람이 봇일 경우에는 return None #동작하지 않고 무시합니다. ori_msg = msg global channel global basicSetting global bossData global fixed_bossData global bossNum global fixed_bossNum global chkvoicechannel global chkrelogin global bossTime global tmp_bossTime global fixed_bossTime global bossTimeString global bossDateString global tmp_bossTimeString global tmp_bossDateString global bossFlag global bossFlag0 global bossMungFlag global bossMungCnt global voice_client1 global channel_info global channel_name global channel_id global channel_voice_name global channel_voice_id global channel_type global chflg global LoadChk global indexFixedBossname global FixedBossDateData global gc #정산 global credentials #정산 global regenembed global command global kill_Data id = msg.author.id #id라는 변수에는 메시지를 보낸사람의 ID를 담습니다. if chflg == 1 : if self.get_channel(basicSetting[7]).id == msg.channel.id: channel = basicSetting[7] message = msg hello = message.content for i in range(bossNum): ################ 보스 컷처리 ################ if message.content.startswith(bossData[i][0] +'컷') or message.content.startswith(convertToInitialLetters(bossData[i][0] +'컷')) or message.content.startswith(bossData[i][0] +' 컷') or message.content.startswith(convertToInitialLetters(bossData[i][0] +' 컷')): if hello.find(' ') != -1 : bossData[i][6] = hello[hello.find(' ')+2:] hello = hello[:hello.find(' ')] else: bossData[i][6] = '' tmp_msg = bossData[i][0] +'컷' if len(hello) > len(tmp_msg) + 3 : if hello.find(':') != -1 : chkpos = hello.find(':') hours1 = hello[chkpos-2:chkpos] minutes1 = hello[chkpos+1:chkpos+3] now2 = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = tmp_now.replace(hour=int(hours1), minute=int(minutes1)) else: chkpos = len(hello)-2 hours1 = hello[chkpos-2:chkpos] minutes1 = hello[chkpos:chkpos+2] now2 = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = tmp_now.replace(hour=int(hours1), minute=int(minutes1)) else: now2 = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = now2 bossFlag[i] = False bossFlag0[i] = False bossMungFlag[i] = False bossMungCnt[i] = 0 if tmp_now > now2 : tmp_now = tmp_now + datetime.timedelta(days=int(-1)) if tmp_now < now2 : del
datetime.timedelta(hours = int(bossData[i][1]), minutes = int(bossData[i][5])) while now2 > tmp_now : tmp_now = tmp_now + deltaTime bossMungCnt[i] = bossMungCnt[i] + 1 now2 = tmp_now bossMungCnt[i] = bossMungCnt[i] - 1 else : now2 = now2 + datetime.timedelta(hours = int(bossData[i][1]), minutes = int(bossData[i][5])) tmp_bossTime[i] = bossTime[i] = nextTime = now2 tmp_bossTimeString[i] = bossTimeString[i] = nextTime.strftime('%H:%M:%S') tmp_bossDateString[i] = bossDateString[i] = nextTime.strftime('%Y-%m-%d') embed = discord.Embed( description= '```다음 ' + bossData[i][0] + ' ' + bossTimeString[i] + '입니다.```', color=0xff0000 ) await self.get_channel(channel).send(embed=embed, tts=False) ################ 보스 멍 처리 ################ if message.content.startswith(bossData[i][0] +'멍') or message.content.startswith(bossData[i][0] +' 멍'): if hello.find(' ') != -1 : bossData[i][6] = hello[hello.find(' ')+2:] hello = hello[:hello.find(' ')] else: bossData[i][6] = '' tmp_msg = bossData[i][0] +'멍' tmp_now = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) if len(hello) > len(tmp_msg) + 3 : temptime = tmp_now if hello.find(':') != -1 : chkpos = hello.find(':') hours1 = hello[chkpos-2:chkpos] minutes1 = hello[chkpos+1:chkpos+3] temptime = tmp_now.replace(hour=int(hours1), minute=int(minutes1)) else: chkpos = len(hello)-2 hours1 = hello[chkpos-2:chkpos] minutes1 = hello[chkpos:chkpos+2] temptime = tmp_now.replace(hour=int(hours1), minute=int(minutes1)) bossMungCnt[i] = 0 bossFlag[i] = False bossFlag0[i] = False bossMungFlag[i] = False if temptime > tmp_now : temptime = temptime + datetime.timedelta(days=int(-1)) if temptime < tmp_now : deltaTime = datetime.timedelta(hours = int(bossData[i][1]), minutes = int(bossData[i][5])) while temptime < tmp_now : temptime = temptime + deltaTime bossMungCnt[i] = bossMungCnt[i] + 1 tmp_bossTime[i] = bossTime[i] = temptime tmp_bossTimeString[i] = bossTimeString[i] = temptime.strftime('%H:%M:%S') tmp_bossDateString[i] = bossDateString[i] = temptime.strftime('%Y-%m-%d') embed = discord.Embed( description= '```다음 ' + bossData[i][0] + ' ' + bossTimeString[i] + '입니다.```', color=0xff0000 ) await self.get_channel(channel).send(embed=embed, tts=False) else: if tmp_bossTime[i] < tmp_now : nextTime = tmp_bossTime[i] + datetime.timedelta(hours = int(bossData[i][1]), minutes = int(bossData[i][5])) bossFlag[i] = False bossFlag0[i] = False bossMungFlag[i] = False bossMungCnt[i] = bossMungCnt[i] + 1 tmp_bossTime[i] = bossTime[i] = nextTime tmp_bossTimeString[i] = bossTimeString[i] = nextTime.strftime('%H:%M:%S') tmp_bossDateString[i] = bossDateString[i] = nextTime.strftime('%Y-%m-%d') embed = discord.Embed( description= '```다음 ' + bossData[i][0] + ' ' + bossTimeString[i] + '입니다.```', color=0xff0000 ) await self.get_channel(channel).send(embed=embed, tts=False) else: await self.get_channel(channel).send('```' + bossData[i][0] + '탐이 아직 안됐습니다. 다음 ' + bossData[i][0] + '탐 [' + tmp_bossTimeString[i] + '] 입니다```', tts=False) ################ 예상 보스 타임 입력 ################ if message.content.startswith(bossData[i][0] +'예상') or message.content.startswith(bossData[i][0] +' 예상'): if hello.find(' ') != -1 : bossData[i][6] = hello[hello.find(' ')+2:] hello = hello[:hello.find(' ')] else: bossData[i][6] = '' tmp_msg = bossData[i][0] +'예상' if len(hello) > len(tmp_msg) + 4 : if hello.find(':') != -1 : chkpos = hello.find(':') hours1 = hello[chkpos-2:chkpos] minutes1 = hello[chkpos+1:chkpos+3] now2 = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = tmp_now.replace(hour=int(hours1), minute=int(minutes1)) else: chkpos = len(hello)-2 hours1 = hello[chkpos-2:chkpos] minutes1 = hello[chkpos:chkpos+2] now2 = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = datetime.datetime.now() + datetime.timedelta(hours = int(basicSetting[0])) tmp_now = tmp_now.replace(hour=int(hours1), minute=int(minutes1)) bossFlag[i] = False bossFlag0[i] = False bossMungFlag[i] = False bossMungCnt[i] = 0 if tmp_now < now2 : tmp_now = tmp_now + datetime.timedelta(days=int(1)) tmp_bossTime[i] = bossTime[i] = nextTime = tmp_now tmp_bossTimeString[i] = bossTimeString[i] = nextTime.strftime('%H:%M:%S') tmp_bossDateString[i] = bossDateString[i] = nextTime.strftime('%Y-%m-%d') embed = discord.Embed( description= '```다음 ' + bossData[i][0] + ' ' + bossTimeString[i] + '입니다.```', color=0xff0000 ) await self.get_channel(channel).send(embed=embed, tts=False) else: await self.get_channel(channel).send('```' + bossData[i][0] +' 예상 시간을 입력해주세요.```', tts=False) ################ 보스타임 삭제 ################ if message.content == bossData[i][0] +'삭제' or message.content == bossData[i][0] +' 삭제': bossTime[i] = datetime.datetime.now()+datetime.timedelta(days=365, hours = int(basicSetting[0])) tmp_bossTime[i] = datetime.datetime.now()+datetime.timedelta(days=365, hours = int(basicSetting[0])) bossTimeString[i] = '99:99:99' bossDateString[i] = '9999-99-99' tmp_bossTimeString[i] = '99:99:99' tmp_bossDateString[i] = '9999-99-99' bossFlag[i] = False bossFlag0[i] = False bossMungFlag[i] = False bossMungCnt[i] = 0 await self.get_channel(channel).send('<' + bossData[i][0] + ' 삭제완료>', tts=False) await dbSave() print ('<' + bossData[i][0] + ' 삭제완료>') ################ 보스별 메모 ################ if message.content.startswith(bossData[i][0] +'메모 '): tmp_msg = bossData[i][0] +'메모 ' bossData[i][6] = hello[len(tmp_msg):] await self.get_channel(channel).send('< ' + bossData[i][0] + ' [ ' + bossData[i][6] + ' ] 메모등록 완료>', tts=False) if message.content.startswith(bossData[i][0] +'메모삭제'): bossData[i][6] = '' await self.get_channel(channel).send('< ' + bossData[i][0] + ' 메모삭제 완료>', tts=False) await self.process_commands(ori_msg) async def on_command_error(self, ctx : commands.Context, error : commands.CommandError): if isinstance(error, CommandNotFound): return elif isinstance(error, MissingRequiredArgument): return elif isinstance(error, discord.ext.commands.MissingPermissions): return await ctx.send(f"**[{ctx.message.content.split()[0]}]** 명령을 사용할 권한이 없습니다.!") elif isinstance(error, discord.ext.commands.CheckFailure): return await ctx.send(f"**[{ctx.message.content.split()[0]}]** 명령을 사용할 권한이 없습니다.!") raise error async def close(self): await self.session.close() await super().close() print("일상디코봇 종료 완료.") ilsang_distribution_bot : IlsangDistributionBot = IlsangDistributionBot() ilsang_distribution_bot.add_cog(mainCog(ilsang_distribution_bot)) ilsang_distribution_bot.add_cog(taskCog(ilsang_distribution_bot)) ilsang_distribution_bot.run()
taTime =
updater.go
package trip import ( "context" carpb "happy-car/car/api/gen/v1" "happy-car/car/mq" rentalpb "happy-car/rental/api/gen/v1" "happy-car/shared/auth" "happy-car/shared/id" "go.uber.org/zap" "google.golang.org/grpc" ) // RunUpdater runs a trip updater. func RunUpdater(sub mq.Subscriber, ts rentalpb.TripServiceClient, logger *zap.Logger) { ch, cleanUp, err := sub.Subscribe(context.Background()) defer cleanUp() if err != nil { logger.Fatal("cannot subscribe", zap.Error(err)) } for car := range ch { if car.Car.Status == carpb.CarStatus_UNLOCKED && car.Car.TripId != "" && car.Car.Driver.Id != ""
} } type impersonation struct { AccountID id.AccountID } func (i *impersonation) GetRequestMetadata(ctx context.Context, uri ...string) (map[string]string, error) { return map[string]string{ auth.ImpersonateAccountHeader: i.AccountID.String(), }, nil } func (i *impersonation) RequireTransportSecurity() bool { return false }
{ _, err := ts.UpdateTrip(context.Background(), &rentalpb.UpdateTripRequest{ Id: car.Car.TripId, Current: &rentalpb.Location{ Latitude: car.Car.Position.Latitude, Longitude: car.Car.Position.Longitude, }, }, grpc.PerRPCCredentials(&impersonation{ AccountID: id.AccountID(car.Car.Driver.Id), })) if err != nil { logger.Error("cannot update trip", zap.String("trip_id", car.Car.TripId), zap.Error(err)) } }
class-implements-multiple-traits.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // xfail-test FIXME #7305 extern mod extra; use extra::oldmap::*; use vec::*; use dvec::{dvec, extensions}; enum furniture { chair, couch, bed } enum body_part { finger, toe, nose, ear } trait noisy { fn speak() -> int; } trait scratchy { fn scratch() -> Option<furniture>; } trait bitey { fn bite() -> body_part; } fn vec_includes<T>(xs: ~[T], x: T) -> bool
// vtables other than the 1st one don't seem to work class cat : noisy, scratchy, bitey { priv { let meows : @mut uint; let scratched : dvec<furniture>; let bite_counts : hashmap<body_part, uint>; fn meow() -> uint { info!("Meow: %u", *self.meows); *self.meows += 1u; if *self.meows % 5u == 0u { *self.how_hungry += 1; } *self.meows } } let how_hungry : @mut int; let name : str; new(in_x : uint, in_y : int, in_name: str) { self.meows = @mut in_x; self.how_hungry = @mut in_y; self.name = in_name; self.scratched = dvec(); let hsher: hashfn<body_part> = |p| int::hash(p as int); let eqer : eqfn<body_part> = |p, q| p == q; let t : hashmap<body_part, uint> = hashmap::<body_part, uint>(hsher, eqer); self.bite_counts = t; do iter(~[finger, toe, nose, ear]) |p| { self.bite_counts.insert(p, 0u); }; } fn speak() -> int { self.meow() as int } fn meow_count() -> uint { *self.meows } fn scratch() -> Option<furniture> { let all = ~[chair, couch, bed]; log(error, self.scratched); let mut rslt = None; for each(all) |thing| { if !self.scratched.contains(thing) { self.scratched.push(thing); return Some(thing); }} rslt } fn bite() -> body_part { error!("In bite()"); let all = ~[toe, nose, ear]; let mut min = finger; do iter(all) |next| { info!("min = %?", min); if self.bite_counts.get(next) < self.bite_counts.get(min) { min = next; }}; self.bite_counts.insert(min, self.bite_counts.get(min) + 1u); info!("Bit %?", min); min } } fn annoy_neighbors<T:noisy>(critter: T) { for uint::range(0u, 10u) |i| { let what = critter.speak(); info!("%u %d", i, what); } } fn bite_everything<T:bitey>(critter: T) -> bool { let mut left : ~[body_part] = ~[finger, toe, nose, ear]; while left.len() > 0u { let part = critter.bite(); info!("%? %?", left, part); if vec_includes(left, part) { left = vec::filter(left, |p| p != part ); } else { return false; } } true } fn scratched_something<T:scratchy>(critter: T) -> bool { option::is_some(critter.scratch()) } pub fn main() { let nyan : cat = cat(0u, 2, "nyan"); annoy_neighbors(nyan as noisy); assert_eq!(nyan.meow_count(), 10u); assert!((bite_everything(nyan as bitey))); assert!((scratched_something(nyan as scratchy))); }
{ for each(xs) |y| { if y == x { return true; }} return false; }
main.rs
// Copyright (c) 2020 ESRLabs // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{anyhow, Context, Error, Result}; use api::{client::Client, model::Message}; use futures::{sink::SinkExt, StreamExt}; use northstar::{ api::{ self, model::{Container, NonNullString, Request}, }, common::version::Version, }; use std::{ collections::HashMap, convert::{TryFrom, TryInto}, path::PathBuf, process, str::FromStr, }; use structopt::{clap, clap::AppSettings, StructOpt}; use tokio::{ fs, io::{copy, AsyncBufReadExt, AsyncRead, AsyncWrite, BufReader}, net::{TcpStream, UnixStream}, time, }; pub trait N: AsyncRead + AsyncWrite + Send + Unpin {} impl<T> N for T where T: AsyncRead + AsyncWrite + Send + Unpin {} mod pretty; /// Default nstar address const DEFAULT_HOST: &str = "tcp://localhost:4200"; /// About string for CLI fn about() -> &'static str { Box::leak(Box::new(format!( "Northstar API version {}", api::model::version() ))) } /// Subcommands #[derive(StructOpt, Clone)] #[structopt(name = "nstar", author, about = about(), global_setting(AppSettings::ColoredHelp))] pub enum Subcommand { /// List available containers #[structopt(alias = "ls", alias = "list")] Containers, /// List configured repositories #[structopt(alias = "repos")] Repositories, /// Mount a container Mount { /// Container name name: String, /// Container version version: Version, }, /// Umount a container Umount { /// Container name name: String, /// Container version version: Version, }, /// Start a container Start { /// Container name name: String, /// Container version version: Version, /// Command line arguments #[structopt(short, long)] args: Option<Vec<String>>, /// Environment variables in KEY=VALUE format #[structopt(short, long)] env: Option<Vec<String>>, }, /// Stop a container Kill { /// Container name name: String, /// Container version version: Version, /// Signal signal: Option<i32>, }, /// Install a npk Install { /// Path to the .npk file npk: PathBuf, /// Target repository repository: String, }, /// Uninstall a container Uninstall { /// Container name name: String, /// Container version version: Version, }, /// Shutdown Northstar Shutdown, /// Notifications Notifications { /// Exit after n notifications #[structopt(short, long)] number: Option<usize>, }, /// Shell completion script generation Completion { /// Output directory where to generate completions into #[structopt(short, long)] output: PathBuf, /// Generate completions for shell type #[structopt(short, long)] shell: clap::Shell, }, ContainerStats { /// Container name name: String, /// Container version version: Version, }, } /// CLI #[derive(StructOpt)] pub struct Opt { /// Northstar address #[structopt(short, long, default_value = DEFAULT_HOST)] pub url: url::Url, /// Output json #[structopt(short, long)] pub json: bool, /// Connect timeout in seconds #[structopt(short, long, default_value = "10", parse(try_from_str = parse_secs))] pub timeout: time::Duration, /// Command #[structopt(subcommand)] pub command: Subcommand, } /// Parse a str containing a u64 into a `std::time::Duration` and take the value /// as seconds fn parse_secs(src: &str) -> Result<time::Duration, anyhow::Error> { u64::from_str(src) .map(time::Duration::from_secs) .map_err(Into::into) } impl TryFrom<Subcommand> for Request { type Error = Error; fn try_from(command: Subcommand) -> Result<Self, Self::Error> { match command { Subcommand::Containers => Ok(Request::Containers), Subcommand::Repositories => Ok(Request::Repositories), Subcommand::Mount { name, version } => Ok(Request::Mount(vec![Container::new( name.try_into()?, version, )])), Subcommand::Umount { name, version } => { Ok(Request::Umount(Container::new(name.try_into()?, version))) } Subcommand::Start { name, version, args, env, } => { // Convert args let args = if let Some(args) = args { let mut non_null = Vec::with_capacity(args.len()); for arg in args { non_null .push(NonNullString::try_from(arg.as_str()).context("Invalid arg")?); } Some(non_null) } else { None }; // Convert env let env = if let Some(env) = env { let mut non_null = HashMap::with_capacity(env.len()); for env in env { let mut split = env.split('='); let key = split .next() .ok_or_else(|| anyhow!("Invalid env")) .and_then(|s| NonNullString::try_from(s).context("Invalid key"))?; let value = split .next() .ok_or_else(|| anyhow!("Invalid env")) .and_then(|s| NonNullString::try_from(s).context("Invalid value"))?; non_null.insert(key, value); } Some(non_null) } else { None }; Ok(Request::Start( Container::new(name.try_into()?, version), args, env, )) } Subcommand::Kill { name, version, signal, } => Ok(Request::Kill( Container::new(name.try_into()?, version), signal.unwrap_or(15), )), Subcommand::Install { npk, repository: repo_id, } => { let size = npk.metadata().map(|m| m.len())?; Ok(Request::Install(repo_id, size)) } Subcommand::Uninstall { name, version } => Ok(Request::Uninstall(Container::new( name.try_into()?, version, ))), Subcommand::Shutdown => Ok(Request::Shutdown), Subcommand::ContainerStats { name, version } => Ok(Request::ContainerStats( Container::new(name.try_into()?, version), )), Subcommand::Notifications { .. } | Subcommand::Completion { .. } => unreachable!(), } } } #[tokio::main(flavor = "current_thread")] async fn
() -> Result<()> { let opt = Opt::from_args(); let timeout = time::Duration::from_secs(5); let io = match opt.url.scheme() { "tcp" => { let addresses = opt.url.socket_addrs(|| Some(4200))?; let address = addresses .first() .ok_or_else(|| anyhow!("Failed to resolve {}", opt.url))?; let stream = time::timeout(timeout, TcpStream::connect(address)) .await .context("Failed to connect")??; Box::new(stream) as Box<dyn N> } "unix" => { let stream = time::timeout(timeout, UnixStream::connect(opt.url.path())) .await .context("Failed to connect")??; Box::new(stream) as Box<dyn N> } _ => return Err(anyhow!("Invalid url")), }; match opt.command { // Generate shell completions and exit on give subcommand Subcommand::Completion { output, shell } => { println!("Generating {} completions to {}", shell, output.display()); Opt::clap().gen_completions(env!("CARGO_PKG_NAME"), shell, output); process::exit(0); } // Subscribe to notifications and print them Subcommand::Notifications { number } => { if opt.json { let framed = Client::new(io, Some(100), opt.timeout) .await .with_context(|| format!("Failed to connect to {}", opt.url))? .framed(); let mut lines = BufReader::new(framed).lines(); for _ in 0..number.unwrap_or(usize::MAX) { match lines.next_line().await.context("Failed to read stream")? { Some(line) => println!("{}", line), None => break, } } } else { let client = Client::new(io, Some(100), opt.timeout) .await .with_context(|| format!("Failed to connect to {}", opt.url))?; let mut notifications = client.take(number.unwrap_or(usize::MAX)); while let Some(notification) = notifications.next().await { let notification = notification.context("Failed to receive notification")?; pretty::notification(&notification); } process::exit(0); } } // Request response mode command => { // Connect let mut framed = Client::new(io, None, opt.timeout) .await .context("Failed to connect")? .framed(); // Request let request = Request::try_from(command.clone()) .context("Failed to convert command into request")?; framed .send(Message::new_request(request)) .await .context("Failed to send request")?; // Extra file transfer for install hack if let Subcommand::Install { npk, .. } = command { copy( &mut fs::File::open(npk).await.context("Failed to open npk")?, &mut framed, ) .await .context("Failed to stream npk")?; } if opt.json { let response = BufReader::new(framed) .lines() .next_line() .await .context("Failed to receive response")? .ok_or_else(|| anyhow!("Failed to receive response"))?; println!("{}", response); process::exit(0); } else { // Read next deserialized response and pretty print let exit = match framed .next() .await .ok_or_else(|| anyhow!("Failed to receive response"))?? { api::model::Message::Response(response) => pretty::response(&response), _ => unreachable!(), }; process::exit(exit); } } }; Ok(()) }
main
BitrueClient.ts
/* eslint-disable @typescript-eslint/member-ordering */ /* eslint-disable @typescript-eslint/no-unsafe-call */ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ /* eslint-disable @typescript-eslint/unbound-method */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ import { BasicClient } from "../BasicClient"; import { ClientOptions } from "../ClientOptions"; import { NotImplementedFn } from "../NotImplementedFn"; import { Ticker } from "../Ticker"; import { Trade } from "../Trade"; import * as zlib from "zlib"; /** * Implements the exchange according to API specifications: * */ export class
extends BasicClient { public id: number; public _pingInterval: NodeJS.Timeout; constructor({ wssPath = "wss://ws.bitrue.com/kline-api/ws", watcherMs, retryTimeoutMs }: ClientOptions = {}) { super(wssPath, "Bitrue", undefined, watcherMs, retryTimeoutMs); this.hasTickers = true; this.hasTrades = true; this.id = 0; this._onMessageInf = this._onMessageInf.bind(this); this._sendPing = this._sendPing.bind(this); } protected _beforeConnect() { this._wss.on("connected", this._startPing.bind(this)); this._wss.on("disconnected", this._stopPing.bind(this)); this._wss.on("closed", this._stopPing.bind(this)); } protected _startPing() { clearInterval(this._pingInterval); this._pingInterval = setInterval(this._sendPing, 60000); } protected _stopPing() { clearInterval(this._pingInterval); } protected _sendPing() { if (this._wss) { this._wss.send( JSON.stringify({ "pong": Date.now() }) ); } } protected _sendSubTicker(remote_id: string) { this._wss.send( JSON.stringify({ event: "sub", params: { cb_id: `${remote_id.toLowerCase()}`, channel: `market_${remote_id.toLowerCase()}_ticker` }, id: ++this.id, }), ); } protected _sendUnsubTicker(remote_id: string) { this._wss.send( JSON.stringify({ event: "unsub", params: { cb_id: `${remote_id.toLowerCase()}`, channel: `market_${remote_id.toLowerCase()}_ticker` }, id: ++this.id, }), ); } protected _sendSubTrades(remote_id: string) { this._wss.send( JSON.stringify({ event: "sub", params: { cb_id: `${remote_id.toLowerCase()}`, channel: `market_${remote_id.toLowerCase()}_trade_ticker` }, id: ++this.id, }), ); } protected _sendUnsubTrades(remote_id) { this._wss.send( JSON.stringify({ method: "unsub", params: `market_${remote_id.toLowerCase()}_trade_ticker`, id: ++this.id, }), ); } protected _sendSubLevel2Updates = NotImplementedFn; protected _sendUnsubLevel2Updates = NotImplementedFn; protected _sendSubCandles = NotImplementedFn; protected _sendUnsubCandles = NotImplementedFn; protected _sendSubLevel2Snapshots = NotImplementedFn; protected _sendUnsubLevel2Snapshots = NotImplementedFn; protected _sendSubLevel3Snapshots = NotImplementedFn; protected _sendUnsubLevel3Snapshots = NotImplementedFn; protected _sendSubLevel3Updates = NotImplementedFn; protected _sendUnsubLevel3Updates = NotImplementedFn; protected _onMessage(raw) { zlib.gunzip(raw, this._onMessageInf); } protected _onMessageInf(err, raw) { // handle inflate error if (err) { this.emit("error", err); return; } // handle parse error let msg; try { msg = JSON.parse(raw.toString("utf8")); } catch (err) { this.emit("error", err, raw); return; } // handle subscription success if (msg.event_rep === "subed" && msg.status === "ok") { return; } // handle errors if (msg.error) { this.emit("error", msg.error); return; } // handle ping if (msg.ping) { this._sendPing(); return; } // handle trades if (msg.channel.includes("_trade_ticker")) { const remote_id = msg.channel.replace("market_", "").replace("_trade_ticker", ""); const market = this._tradeSubs.get(remote_id.toUpperCase()) || this._tradeSubs.get(remote_id.toLowerCase()); if (!market) return; // trades arrive newest first for (const datum of msg.tick.data.reverse()) { const trade = this._constructTrade({ ...datum, ts: msg.ts }, market); this.emit("trade", trade, market); } return; } // handle ticker if (msg.channel.includes("_ticker") && !msg.channel.includes("_trade_ticker")) { const data = { ...msg.tick, ts: msg.ts }; const remote_id = msg.channel.replace("market_", "").replace("_ticker", ""); const market = this._tickerSubs.get(remote_id.toUpperCase()) || this._tickerSubs.get(remote_id.toLowerCase()); if (!market) return; const ticker = this._constructTicker(data, market); this.emit("ticker", ticker, market); return; } } /** { tick: { amount: 375936776.909343, rose: 0.015, close: 47495.02, vol: 7942.2064, high: 48189.84, low: 46741.86, open: 46790.69 }, channel: 'market_btcusdt_ticker', ts: 1648518029349 } */ protected _constructTicker(data, market) { const change = Number(data.close) - Number(data.open); const changePercent = (change / Number(data.open)) * 100; return new Ticker({ exchange: this.name, base: market.base, quote: market.quote, timestamp: data.ts, last: data.close.toString(), open: data.open.toString(), high: data.high.toString(), low: data.low.toString(), volume: data.vol.toString(), quoteVolume: data.amount.toString(), change: change.toFixed(8), changePercent: changePercent.toFixed(2), ask: data.close.toString(), askVolume: undefined, bid: data.close.toString(), bidVolume: undefined, }); } /** { "method": "trades.update", "params": [ true, [ { id: 3282939928, time: 1597419159, amount: '0.1', price: '11687.04', type: 'sell' } ], "ETH_USDT" ], "id": null } */ protected _constructTrade(datum, market) { const { id, ts, price, amount, side } = datum; return new Trade({ exchange: this.name, base: market.base, quote: market.quote, tradeId: id.toString(), side: side?.toLowerCase(), unix: ts, price: price?.toString(), amount: amount?.toString(), }); } }
BitrueClient
test_utils.py
# Copyright (c) 2010 Robert Mela # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # try: import unittest2 as unittest except ImportError: import unittest import datetime import hashlib import hmac import locale import mock import thread import time import boto.utils from boto.utils import Password from boto.utils import pythonize_name from boto.utils import _build_instance_metadata_url from boto.utils import get_instance_userdata from boto.utils import retry_url from boto.utils import LazyLoadMetadata from boto.compat import json @unittest.skip("http://bugs.python.org/issue7980") class TestThreadImport(unittest.TestCase): def test_strptime(self): def f(): for m in xrange(1, 13): for d in xrange(1,29): boto.utils.parse_ts('2013-01-01T00:00:00Z') for _ in xrange(10): thread.start_new_thread(f, ()) time.sleep(3) class TestPassword(unittest.TestCase): """Test basic password functionality""" def clstest(self, cls): """Insure that password.__eq__ hashes test value before compare.""" password = cls('foo') self.assertNotEquals(password, 'foo') password.set('foo') hashed = str(password) self.assertEquals(password, 'foo') self.assertEquals(password.str, hashed) password = cls(hashed) self.assertNotEquals(password.str, 'foo') self.assertEquals(password, 'foo') self.assertEquals(password.str, hashed) def test_aaa_version_1_9_default_behavior(self): self.clstest(Password) def test_custom_hashclass(self): class SHA224Password(Password): hashfunc = hashlib.sha224 password = SHA224Password() password.set('foo') self.assertEquals(hashlib.sha224('foo').hexdigest(), str(password)) def test_hmac(self): def hmac_hashfunc(cls, msg): return hmac.new('mysecretkey', msg) class HMACPassword(Password): hashfunc = hmac_hashfunc self.clstest(HMACPassword) password = HMACPassword() password.set('foo') self.assertEquals(str(password), hmac.new('mysecretkey', 'foo').hexdigest()) def test_constructor(self): hmac_hashfunc = lambda msg: hmac.new('mysecretkey', msg) password = Password(hashfunc=hmac_hashfunc) password.set('foo') self.assertEquals(password.str, hmac.new('mysecretkey', 'foo').hexdigest()) class TestPythonizeName(unittest.TestCase): def test_empty_string(self): self.assertEqual(pythonize_name(''), '') def test_all_lower_case(self): self.assertEqual(pythonize_name('lowercase'), 'lowercase') def test_all_upper_case(self): self.assertEqual(pythonize_name('UPPERCASE'), 'uppercase') def test_camel_case(self): self.assertEqual(pythonize_name('OriginallyCamelCased'), 'originally_camel_cased') def test_already_pythonized(self): self.assertEqual(pythonize_name('already_pythonized'), 'already_pythonized') def test_multiple_upper_cased_letters(self): self.assertEqual(pythonize_name('HTTPRequest'), 'http_request') self.assertEqual(pythonize_name('RequestForHTTP'), 'request_for_http') def test_string_with_numbers(self): self.assertEqual(pythonize_name('HTTPStatus200Ok'), 'http_status_200_ok') class TestBuildInstanceMetadataURL(unittest.TestCase): def test_normal(self): # This is the all-defaults case. self.assertEqual(_build_instance_metadata_url( 'http://169.254.169.254', 'latest', 'meta-data/' ), 'http://169.254.169.254/latest/meta-data/' ) def test_custom_path(self): self.assertEqual(_build_instance_metadata_url( 'http://169.254.169.254', 'latest', 'dynamic/' ), 'http://169.254.169.254/latest/dynamic/' ) def test_custom_version(self): self.assertEqual(_build_instance_metadata_url( 'http://169.254.169.254', '1.0', 'meta-data/' ), 'http://169.254.169.254/1.0/meta-data/' ) def test_custom_url(self): self.assertEqual(_build_instance_metadata_url( 'http://10.0.1.5', 'latest', 'meta-data/' ), 'http://10.0.1.5/latest/meta-data/' ) def test_all_custom(self): self.assertEqual(_build_instance_metadata_url( 'http://10.0.1.5', '2013-03-22', 'user-data' ), 'http://10.0.1.5/2013-03-22/user-data' ) class TestRetryURL(unittest.TestCase): def setUp(self): self.urlopen_patch = mock.patch('urllib2.urlopen') self.opener_patch = mock.patch('urllib2.build_opener') self.urlopen = self.urlopen_patch.start() self.opener = self.opener_patch.start() def tearDown(self): self.urlopen_patch.stop() self.opener_patch.stop() def set_normal_response(self, response): fake_response = mock.Mock() fake_response.read.return_value = response self.urlopen.return_value = fake_response def set_no_proxy_allowed_response(self, response): fake_response = mock.Mock() fake_response.read.return_value = response self.opener.return_value.open.return_value = fake_response def test_retry_url_uses_proxy(self): self.set_normal_response('normal response') self.set_no_proxy_allowed_response('no proxy response') response = retry_url('http://10.10.10.10/foo', num_retries=1) self.assertEqual(response, 'no proxy response') class TestLazyLoadMetadata(unittest.TestCase): def setUp(self): self.retry_url_patch = mock.patch('boto.utils.retry_url') boto.utils.retry_url = self.retry_url_patch.start() def tearDown(self): self.retry_url_patch.stop() def set_normal_response(self, data): # here "data" should be a list of return values in some order fake_response = mock.Mock() fake_response.side_effect = data boto.utils.retry_url = fake_response def test_meta_data_with_invalid_json_format_happened_once(self): # here "key_data" will be stored in the "self._leaves" # when the class "LazyLoadMetadata" initialized key_data = "test" invalid_data = '{"invalid_json_format" : true,}' valid_data = '{ "%s" : {"valid_json_format": true}}' % key_data url = "/".join(["http://169.254.169.254", key_data]) num_retries = 2 self.set_normal_response([key_data, invalid_data, valid_data]) response = LazyLoadMetadata(url, num_retries) self.assertEqual(response.values()[0], json.loads(valid_data)) def test_meta_data_with_invalid_json_format_happened_twice(self): key_data = "test" invalid_data = '{"invalid_json_format" : true,}' valid_data = '{ "%s" : {"valid_json_format": true}}' % key_data url = "/".join(["http://169.254.169.254", key_data]) num_retries = 2 self.set_normal_response([key_data, invalid_data, invalid_data]) response = LazyLoadMetadata(url, num_retries) with self.assertRaises(ValueError): response.values()[0] def
(self): self.set_normal_response(['foo']) userdata = get_instance_userdata() self.assertEqual('foo', userdata) boto.utils.retry_url.assert_called_with( 'http://169.254.169.254/latest/user-data', retry_on_404=False) class TestStringToDatetimeParsing(unittest.TestCase): """ Test string to datetime parsing """ def setUp(self): self._saved = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, 'de_DE.UTF-8') def tearDown(self): locale.setlocale(locale.LC_ALL, self._saved) def test_nonus_locale(self): test_string = 'Thu, 15 May 2014 09:06:03 GMT' # Default strptime shoudl fail with self.assertRaises(ValueError): datetime.datetime.strptime(test_string, boto.utils.RFC1123) # Our parser should succeed result = boto.utils.parse_ts(test_string) self.assertEqual(2014, result.year) self.assertEqual(5, result.month) self.assertEqual(15, result.day) self.assertEqual(9, result.hour) self.assertEqual(6, result.minute) if __name__ == '__main__': unittest.main()
test_user_data
is._string.js
/** * ----------------------------------------------------------------------------- * VITALS UNIT TESTS: vitals.is._string * ----------------------------------------------------------------------------- * @section base * @see [vitals.is docs](https://github.com/imaginate/vitals/wiki/vitals.is) * @see [test api](https://github.com/imaginate/vitals/blob/master/test/setup/interface.js) * @see [test helpers](https://github.com/imaginate/vitals/blob/master/test/setup/helpers.js) * * @author Adam Smith <[email protected]> (https://github.com/imaginate) * @copyright 2017 Adam A Smith <[email protected]> (https://github.com/imaginate) * * Annotations: * @see [JSDoc3](http://usejsdoc.org) * @see [Closure Compiler JSDoc Syntax](https://developers.google.com/closure/compiler/docs/js-for-compiler) */ method('is._string', 'is._str', function() { should('return true', function() { test('str', function() { var result = vitals.is._str('str'); assert( result === true ); }); test('str', 'str', 'str', function() { var result = vitals.is._str('str', 'str', 'str'); assert( result === true ); }); }); should('return false', function() { test('', function() { var result = vitals.is._str(''); assert( result === false ); }); test('<String>', 'str', 'str', function() { var result = vitals.is._str(new String('str'), 'str', 'str'); assert( result === false ); }); }); should('throw an error', function() { test(function() { assert.throws(function() { vitals.is._str(); }, validErr);
}); }); });
openapi.go
package openapi //placeholder to satisfy go.mod - this will be generated.
button.rs
use serde::{Deserialize, Serialize}; use winit::{MouseButton, VirtualKeyCode}; use super::{controller::ControllerButton, scroll_direction::ScrollDirection}; /// A Button is any kind of digital input that the engine supports. #[derive(Eq, PartialEq, Debug, Copy, Clone, Hash, Serialize, Deserialize)] pub enum Button { /// Virtual Keyboard keys, use this when the letter on the key matters /// more than the position of the key. Key(VirtualKeyCode), /// Scan code from keyboard, use this when the position of the key matters /// more than the letter on the key. ScanCode(u32), /// Mouse buttons Mouse(MouseButton), /// Mouse wheel (Do not use these with an emulated axis, instead use the MouseWheel axis.) MouseWheel(ScrollDirection), /// Controller buttons matching SDL controller model. /// A tuple of sequential controller_id in order of connection /// and specific type of used controller button. Controller(u32, ControllerButton), } impl From<VirtualKeyCode> for Button { fn from(keycode: VirtualKeyCode) -> Self { Button::Key(keycode) } } impl From<MouseButton> for Button { fn
(mouse_button: MouseButton) -> Self { Button::Mouse(mouse_button) } }
from
command.go
package session import ( "github.com/df-mc/dragonfly/server/cmd" "github.com/go-gl/mathgl/mgl64" "github.com/sandertv/gophertunnel/minecraft/protocol" "github.com/sandertv/gophertunnel/minecraft/protocol/packet" ) // SendCommandOutput sends the output of a command to the player. It will be shown to the caller of the // command, which might be the player or a websocket server. func (s *Session) SendCommandOutput(output *cmd.Output) { if s == Nop { return } messages := make([]protocol.CommandOutputMessage, 0, output.MessageCount()+output.ErrorCount()) for _, message := range output.Messages() { messages = append(messages, protocol.CommandOutputMessage{ Success: true, Message: message, }) } for _, err := range output.Errors() { messages = append(messages, protocol.CommandOutputMessage{ Success: false, Message: err.Error(), }) } s.writePacket(&packet.CommandOutput{ CommandOrigin: s.handlers[packet.IDCommandRequest].(*CommandRequestHandler).origin, OutputType: packet.CommandOutputTypeAllOutput, SuccessCount: uint32(output.MessageCount()), OutputMessages: messages, }) } // sendAvailableCommands sends all available commands of the server. Once sent, they will be visible in the // /help list and will be auto-completed. func (s *Session) sendAvailableCommands() map[string]map[int]cmd.Runnable { commands := cmd.Commands() m := make(map[string]map[int]cmd.Runnable, len(commands)) pk := &packet.AvailableCommands{} for alias, c := range commands { if c.Name() != alias { // Don't add duplicate entries for aliases. continue } m[alias] = c.Runnables(s.c) params := c.Params(s.c) overloads := make([]protocol.CommandOverload, len(params)) for i, params := range params { for _, paramInfo := range params { t, enum := valueToParamType(paramInfo.Value, s.c) t |= protocol.CommandArgValid opt := byte(0) if _, ok := paramInfo.Value.(bool); ok { opt |= protocol.ParamOptionCollapseEnum } overloads[i].Parameters = append(overloads[i].Parameters, protocol.CommandParameter{ Name: paramInfo.Name, Type: t, Optional: paramInfo.Optional, Options: opt, Enum: enum, Suffix: paramInfo.Suffix, }) } } if len(params) > 0 { pk.Commands = append(pk.Commands, protocol.Command{ Name: c.Name(), Description: c.Description(), Aliases: c.Aliases(), Overloads: overloads, }) } } s.writePacket(pk) return m } // valueToParamType finds the command argument type of the value passed and returns it, in addition to creating // an enum if applicable. func valueToParamType(i any, source cmd.Source) (t uint32, enum protocol.CommandEnum)
// resendCommands resends all commands that a Session has access to if the map of runnable commands passed does not // match with the commands that the Session is currently allowed to execute. // True is returned if the commands were resent. func (s *Session) resendCommands(before map[string]map[int]cmd.Runnable) (map[string]map[int]cmd.Runnable, bool) { commands := cmd.Commands() m := make(map[string]map[int]cmd.Runnable, len(commands)) for alias, c := range commands { if c.Name() == alias { m[alias] = c.Runnables(s.c) } } if len(before) != len(m) { return s.sendAvailableCommands(), true } for name, r := range m { for k := range r { if _, ok := before[name][k]; !ok { return s.sendAvailableCommands(), true } } } return m, false } // enums returns a map of all enums exposed to the Session and records the values those enums currently hold. func (s *Session) enums() (map[string]cmd.Enum, map[string][]string) { enums, enumValues := make(map[string]cmd.Enum), make(map[string][]string) for alias, c := range cmd.Commands() { if c.Name() == alias { for _, params := range c.Params(s.c) { for _, paramInfo := range params { if enum, ok := paramInfo.Value.(cmd.Enum); ok { enums[enum.Type()] = enum enumValues[enum.Type()] = enum.Options(s.c) } } } } } return enums, enumValues } // resendEnums checks the options of the enums passed against the values that were previously recorded. If they do not // match, the enum is resent to the client and the values are updated in the before map. func (s *Session) resendEnums(enums map[string]cmd.Enum, before map[string][]string) { for name, enum := range enums { valuesBefore := before[name] values := enum.Options(s.c) before[name] = values if len(valuesBefore) != len(values) { s.writePacket(&packet.UpdateSoftEnum{EnumType: name, Options: values, ActionType: packet.SoftEnumActionSet}) continue } for k, v := range values { if valuesBefore[k] != v { s.writePacket(&packet.UpdateSoftEnum{EnumType: name, Options: values, ActionType: packet.SoftEnumActionSet}) break } } } }
{ switch i.(type) { case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64: return protocol.CommandArgTypeInt, enum case float32, float64: return protocol.CommandArgTypeFloat, enum case string: return protocol.CommandArgTypeString, enum case cmd.Varargs: return protocol.CommandArgTypeRawText, enum case cmd.Target, []cmd.Target: return protocol.CommandArgTypeTarget, enum case bool: return 0, protocol.CommandEnum{ Type: "bool", Options: []string{"true", "1", "false", "0"}, } case mgl64.Vec3: return protocol.CommandArgTypePosition, enum } if sub, ok := i.(cmd.SubCommand); ok { return 0, protocol.CommandEnum{ Type: "SubCommand" + sub.SubName(), Options: []string{sub.SubName()}, } } if enum, ok := i.(cmd.Enum); ok { return 0, protocol.CommandEnum{ Type: enum.Type(), Options: enum.Options(source), Dynamic: true, } } return protocol.CommandArgTypeValue, enum }
main.go
// Copyright 2016-2020, Pulumi Corporation. All rights reserved. package main import ( "encoding/base64" "github.com/pulumi/pulumi-azure-native/sdk/go/azure/containerservice" "github.com/pulumi/pulumi-azure-native/sdk/go/azure/resources"
) func main() { pulumi.Run(func(ctx *pulumi.Context) error { // Create an Azure Resource Group resourceGroup, err := resources.NewResourceGroup(ctx, "azure-go-aks", nil) if err != nil { return err } // Create an AD service principal. adApp, err := azuread.NewApplication(ctx, "aks", &azuread.ApplicationArgs{ DisplayName: pulumi.String("aks"), }) if err != nil { return err } adSp, err := azuread.NewServicePrincipal(ctx, "aksSp", &azuread.ServicePrincipalArgs{ ApplicationId: adApp.ApplicationId, }) if err != nil { return err } // Generate a random password. password, err := random.NewRandomPassword(ctx, "password", &random.RandomPasswordArgs{ Length: pulumi.Int(20), Special: pulumi.Bool(true), }) if err != nil { return err } // Create the Service Principal Password. adSpPassword, err := azuread.NewServicePrincipalPassword(ctx, "aksSpPassword", &azuread.ServicePrincipalPasswordArgs{ ServicePrincipalId: adSp.ID(), Value: password.Result, EndDate: pulumi.String("2099-01-01T00:00:00Z"), }) if err != nil { return err } // Generate an SSH key. sshArgs := tls.PrivateKeyArgs{ Algorithm: pulumi.String("RSA"), RsaBits: pulumi.Int(4096), } sshKey, err := tls.NewPrivateKey(ctx, "ssh-key", &sshArgs) if err != nil { return err } // Create the Azure Kubernetes Service cluster. cluster, err := containerservice.NewManagedCluster(ctx, "go-aks", &containerservice.ManagedClusterArgs{ ResourceGroupName: resourceGroup.Name, AgentPoolProfiles: containerservice.ManagedClusterAgentPoolProfileArray{ &containerservice.ManagedClusterAgentPoolProfileArgs{ Name: pulumi.String("agentpool"), Mode: pulumi.String("System"), OsDiskSizeGB: pulumi.Int(30), Count: pulumi.Int(3), VmSize: pulumi.String("Standard_DS2_v2"), OsType: pulumi.String("Linux"), }, }, LinuxProfile: &containerservice.ContainerServiceLinuxProfileArgs{ AdminUsername: pulumi.String("testuser"), Ssh: containerservice.ContainerServiceSshConfigurationArgs{ PublicKeys: containerservice.ContainerServiceSshPublicKeyArray{ containerservice.ContainerServiceSshPublicKeyArgs{ KeyData: sshKey.PublicKeyOpenssh, }, }, }, }, DnsPrefix: resourceGroup.Name, ServicePrincipalProfile: &containerservice.ManagedClusterServicePrincipalProfileArgs{ ClientId: adApp.ApplicationId, Secret: adSpPassword.Value, }, KubernetesVersion: pulumi.String("1.18.14"), }) if err != nil { return err } ctx.Export("kubeconfig", pulumi.All(cluster.Name, resourceGroup.Name, resourceGroup.ID()).ApplyT(func(args interface{}) (string, error) { clusterName := args.([]interface{})[0].(string) resourceGroupName := args.([]interface{})[1].(string) creds, err := containerservice.ListManagedClusterUserCredentials(ctx, &containerservice.ListManagedClusterUserCredentialsArgs{ ResourceGroupName: resourceGroupName, ResourceName: clusterName, }) if err != nil { return "", err } encoded := creds.Kubeconfigs[0].Value kubeconfig, err := base64.StdEncoding.DecodeString(encoded) if err != nil { return "", err } return string(kubeconfig), nil })) return nil }) }
"github.com/pulumi/pulumi-azuread/sdk/v4/go/azuread" "github.com/pulumi/pulumi-random/sdk/v4/go/random" "github.com/pulumi/pulumi-tls/sdk/v4/go/tls" "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
myfilter.py
# -*- coding: utf8 -*- from pandocfilters import toJSONFilter, Link, Str def
(key, value, form, meta): if key == 'Link': return Str("replaced_text") if __name__ == "__main__": toJSONFilter(myfilter)
myfilter
memory.rs
//! The memory subsystem. //! //! Generally, we use `Pointer` to denote memory addresses. However, some operations //! have a "size"-like parameter, and they take `Scalar` for the address because //! if the size is 0, then the pointer can also be a (properly aligned, non-null) //! integer. It is crucial that these operations call `check_align` *before* //! short-circuiting the empty case! use std::assert_matches::assert_matches; use std::borrow::Cow; use std::collections::VecDeque; use std::convert::TryFrom; use std::fmt; use std::ptr; use rustc_ast::Mutability; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_middle::mir::display_allocation; use rustc_middle::ty::{Instance, ParamEnv, TyCtxt}; use rustc_target::abi::{Align, HasDataLayout, Size}; use super::{ alloc_range, AllocId, AllocMap, AllocRange, Allocation, CheckInAllocMsg, GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak, Pointer, PointerArithmetic, Provenance, Scalar, ScalarMaybeUninit, }; #[derive(Debug, PartialEq, Copy, Clone)] pub enum MemoryKind<T> { /// Stack memory. Error if deallocated except during a stack pop. Stack, /// Memory allocated by `caller_location` intrinsic. Error if ever deallocated. CallerLocation, /// Additional memory kinds a machine wishes to distinguish from the builtin ones. Machine(T), } impl<T: MayLeak> MayLeak for MemoryKind<T> { #[inline] fn may_leak(self) -> bool { match self { MemoryKind::Stack => false, MemoryKind::CallerLocation => true, MemoryKind::Machine(k) => k.may_leak(), } } } impl<T: fmt::Display> fmt::Display for MemoryKind<T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { MemoryKind::Stack => write!(f, "stack variable"), MemoryKind::CallerLocation => write!(f, "caller location"), MemoryKind::Machine(m) => write!(f, "{}", m), } } } /// Used by `get_size_and_align` to indicate whether the allocation needs to be live. #[derive(Debug, Copy, Clone)] pub enum AllocCheck { /// Allocation must be live and not a function pointer. Dereferenceable, /// Allocations needs to be live, but may be a function pointer. Live, /// Allocation may be dead. MaybeDead, } /// The value of a function pointer. #[derive(Debug, Copy, Clone)] pub enum FnVal<'tcx, Other> { Instance(Instance<'tcx>), Other(Other), } impl<'tcx, Other> FnVal<'tcx, Other> { pub fn as_instance(self) -> InterpResult<'tcx, Instance<'tcx>> { match self { FnVal::Instance(instance) => Ok(instance), FnVal::Other(_) => { throw_unsup_format!("'foreign' function pointers are not supported in this context") } } } } // `Memory` has to depend on the `Machine` because some of its operations // (e.g., `get`) call a `Machine` hook. pub struct Memory<'mir, 'tcx, M: Machine<'mir, 'tcx>> { /// Allocations local to this instance of the miri engine. The kind /// helps ensure that the same mechanism is used for allocation and /// deallocation. When an allocation is not found here, it is a /// global and looked up in the `tcx` for read access. Some machines may /// have to mutate this map even on a read-only access to a global (because /// they do pointer provenance tracking and the allocations in `tcx` have /// the wrong type), so we let the machine override this type. /// Either way, if the machine allows writing to a global, doing so will /// create a copy of the global allocation here. // FIXME: this should not be public, but interning currently needs access to it pub(super) alloc_map: M::MemoryMap, /// Map for "extra" function pointers. extra_fn_ptr_map: FxHashMap<AllocId, M::ExtraFnVal>, /// To be able to compare pointers with null, and to check alignment for accesses /// to ZSTs (where pointers may dangle), we keep track of the size even for allocations /// that do not exist any more. // FIXME: this should not be public, but interning currently needs access to it pub(super) dead_alloc_map: FxHashMap<AllocId, (Size, Align)>, } /// A reference to some allocation that was already bounds-checked for the given region /// and had the on-access machine hooks run. #[derive(Copy, Clone)] pub struct AllocRef<'a, 'tcx, Tag, Extra> { alloc: &'a Allocation<Tag, Extra>, range: AllocRange, tcx: TyCtxt<'tcx>, alloc_id: AllocId, } /// A reference to some allocation that was already bounds-checked for the given region /// and had the on-access machine hooks run. pub struct AllocRefMut<'a, 'tcx, Tag, Extra> { alloc: &'a mut Allocation<Tag, Extra>, range: AllocRange, tcx: TyCtxt<'tcx>, alloc_id: AllocId, } impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { pub fn new() -> Self { Memory { alloc_map: M::MemoryMap::default(), extra_fn_ptr_map: FxHashMap::default(), dead_alloc_map: FxHashMap::default(), } } /// This is used by [priroda](https://github.com/oli-obk/priroda) pub fn alloc_map(&self) -> &M::MemoryMap { &self.alloc_map } } impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { /// Call this to turn untagged "global" pointers (obtained via `tcx`) into /// the machine pointer to the allocation. Must never be used /// for any other pointers, nor for TLS statics. /// /// Using the resulting pointer represents a *direct* access to that memory /// (e.g. by directly using a `static`), /// as opposed to access through a pointer that was created by the program. /// /// This function can fail only if `ptr` points to an `extern static`. #[inline] pub fn global_base_pointer( &self, ptr: Pointer<AllocId>, ) -> InterpResult<'tcx, Pointer<M::PointerTag>> { let alloc_id = ptr.provenance; // We need to handle `extern static`. match self.tcx.get_global_alloc(alloc_id) { Some(GlobalAlloc::Static(def_id)) if self.tcx.is_thread_local_static(def_id) => { bug!("global memory cannot point to thread-local static") } Some(GlobalAlloc::Static(def_id)) if self.tcx.is_foreign_item(def_id) => { return M::extern_static_base_pointer(self, def_id); } _ => {} } // And we need to get the tag. Ok(M::tag_alloc_base_pointer(self, ptr)) } pub fn create_fn_alloc_ptr( &mut self, fn_val: FnVal<'tcx, M::ExtraFnVal>, ) -> Pointer<M::PointerTag> { let id = match fn_val { FnVal::Instance(instance) => self.tcx.create_fn_alloc(instance), FnVal::Other(extra) => { // FIXME(RalfJung): Should we have a cache here? let id = self.tcx.reserve_alloc_id(); let old = self.memory.extra_fn_ptr_map.insert(id, extra); assert!(old.is_none()); id } }; // Functions are global allocations, so make sure we get the right base pointer. // We know this is not an `extern static` so this cannot fail. self.global_base_pointer(Pointer::from(id)).unwrap() } pub fn allocate_ptr( &mut self, size: Size, align: Align, kind: MemoryKind<M::MemoryKind>, ) -> InterpResult<'tcx, Pointer<M::PointerTag>> { let alloc = Allocation::uninit(size, align, M::PANIC_ON_ALLOC_FAIL)?; Ok(self.allocate_raw_ptr(alloc, kind)) } pub fn allocate_bytes_ptr( &mut self, bytes: &[u8], align: Align, kind: MemoryKind<M::MemoryKind>, mutability: Mutability, ) -> Pointer<M::PointerTag> { let alloc = Allocation::from_bytes(bytes, align, mutability); self.allocate_raw_ptr(alloc, kind) } pub fn allocate_raw_ptr( &mut self, alloc: Allocation, kind: MemoryKind<M::MemoryKind>, ) -> Pointer<M::PointerTag> { let id = self.tcx.reserve_alloc_id(); debug_assert_ne!( Some(kind), M::GLOBAL_KIND.map(MemoryKind::Machine), "dynamically allocating global memory" ); let alloc = M::init_allocation_extra(self, id, Cow::Owned(alloc), Some(kind)); self.memory.alloc_map.insert(id, (kind, alloc.into_owned())); M::tag_alloc_base_pointer(self, Pointer::from(id)) } pub fn reallocate_ptr( &mut self, ptr: Pointer<Option<M::PointerTag>>, old_size_and_align: Option<(Size, Align)>, new_size: Size, new_align: Align, kind: MemoryKind<M::MemoryKind>, ) -> InterpResult<'tcx, Pointer<M::PointerTag>> { let (alloc_id, offset, _tag) = self.ptr_get_alloc_id(ptr)?; if offset.bytes() != 0 { throw_ub_format!( "reallocating {:?} which does not point to the beginning of an object", ptr ); } // For simplicities' sake, we implement reallocate as "alloc, copy, dealloc". // This happens so rarely, the perf advantage is outweighed by the maintenance cost. let new_ptr = self.allocate_ptr(new_size, new_align, kind)?; let old_size = match old_size_and_align { Some((size, _align)) => size, None => self.get_alloc_raw(alloc_id)?.size(), }; // This will also call the access hooks. self.mem_copy( ptr, Align::ONE, new_ptr.into(), Align::ONE, old_size.min(new_size), /*nonoverlapping*/ true, )?; self.deallocate_ptr(ptr, old_size_and_align, kind)?; Ok(new_ptr) } #[instrument(skip(self), level = "debug")] pub fn deallocate_ptr( &mut self, ptr: Pointer<Option<M::PointerTag>>, old_size_and_align: Option<(Size, Align)>, kind: MemoryKind<M::MemoryKind>, ) -> InterpResult<'tcx> { let (alloc_id, offset, tag) = self.ptr_get_alloc_id(ptr)?; trace!("deallocating: {}", alloc_id); if offset.bytes() != 0 { throw_ub_format!( "deallocating {:?} which does not point to the beginning of an object", ptr ); } let Some((alloc_kind, mut alloc)) = self.memory.alloc_map.remove(&alloc_id) else { // Deallocating global memory -- always an error return Err(match self.tcx.get_global_alloc(alloc_id) { Some(GlobalAlloc::Function(..)) => { err_ub_format!("deallocating {}, which is a function", alloc_id) } Some(GlobalAlloc::Static(..) | GlobalAlloc::Memory(..)) => { err_ub_format!("deallocating {}, which is static memory", alloc_id) } None => err_ub!(PointerUseAfterFree(alloc_id)), } .into()); }; debug!(?alloc); if alloc.mutability == Mutability::Not { throw_ub_format!("deallocating immutable allocation {}", alloc_id); } if alloc_kind != kind { throw_ub_format!( "deallocating {}, which is {} memory, using {} deallocation operation", alloc_id, alloc_kind, kind ); } if let Some((size, align)) = old_size_and_align { if size != alloc.size() || align != alloc.align { throw_ub_format!( "incorrect layout on deallocation: {} has size {} and alignment {}, but gave size {} and alignment {}", alloc_id, alloc.size().bytes(), alloc.align.bytes(), size.bytes(), align.bytes(), ) } } // Let the machine take some extra action let size = alloc.size(); M::memory_deallocated( *self.tcx, &mut self.machine, &mut alloc.extra, (alloc_id, tag), alloc_range(Size::ZERO, size), )?; // Don't forget to remember size and align of this now-dead allocation let old = self.memory.dead_alloc_map.insert(alloc_id, (size, alloc.align)); if old.is_some() { bug!("Nothing can be deallocated twice"); } Ok(()) } /// Internal helper function to determine the allocation and offset of a pointer (if any). #[inline(always)] fn get_ptr_access( &self, ptr: Pointer<Option<M::PointerTag>>, size: Size, align: Align, ) -> InterpResult<'tcx, Option<(AllocId, Size, M::TagExtra)>> { let align = M::enforce_alignment(&self).then_some(align); self.check_and_deref_ptr( ptr, size, align, CheckInAllocMsg::MemoryAccessTest, |alloc_id, offset, tag| { let (size, align) = self.get_alloc_size_and_align(alloc_id, AllocCheck::Dereferenceable)?; Ok((size, align, (alloc_id, offset, tag))) }, ) } /// Check if the given pointer points to live memory of given `size` and `align` /// (ignoring `M::enforce_alignment`). The caller can control the error message for the /// out-of-bounds case. #[inline(always)] pub fn check_ptr_access_align( &self, ptr: Pointer<Option<M::PointerTag>>, size: Size, align: Align, msg: CheckInAllocMsg, ) -> InterpResult<'tcx> { self.check_and_deref_ptr(ptr, size, Some(align), msg, |alloc_id, _, _| { let check = match msg { CheckInAllocMsg::DerefTest | CheckInAllocMsg::MemoryAccessTest => { AllocCheck::Dereferenceable } CheckInAllocMsg::PointerArithmeticTest | CheckInAllocMsg::OffsetFromTest | CheckInAllocMsg::InboundsTest => AllocCheck::Live, }; let (size, align) = self.get_alloc_size_and_align(alloc_id, check)?; Ok((size, align, ())) })?; Ok(()) } /// Low-level helper function to check if a ptr is in-bounds and potentially return a reference /// to the allocation it points to. Supports both shared and mutable references, as the actual /// checking is offloaded to a helper closure. `align` defines whether and which alignment check /// is done. Returns `None` for size 0, and otherwise `Some` of what `alloc_size` returned. fn check_and_deref_ptr<T>( &self, ptr: Pointer<Option<M::PointerTag>>, size: Size, align: Option<Align>, msg: CheckInAllocMsg, alloc_size: impl FnOnce(AllocId, Size, M::TagExtra) -> InterpResult<'tcx, (Size, Align, T)>, ) -> InterpResult<'tcx, Option<T>> { fn check_offset_align<'tcx>(offset: u64, align: Align) -> InterpResult<'tcx> { if offset % align.bytes() == 0 { Ok(()) } else { // The biggest power of two through which `offset` is divisible. let offset_pow2 = 1 << offset.trailing_zeros(); throw_ub!(AlignmentCheckFailed { has: Align::from_bytes(offset_pow2).unwrap(), required: align, }) } } Ok(match self.ptr_try_get_alloc_id(ptr) { Err(addr) => { // We couldn't get a proper allocation. This is only okay if the access size is 0, // and the address is not null. if size.bytes() > 0 || addr == 0 { throw_ub!(DanglingIntPointer(addr, msg)); } // Must be aligned. if let Some(align) = align { check_offset_align(addr, align)?; } None } Ok((alloc_id, offset, tag)) => { let (alloc_size, alloc_align, ret_val) = alloc_size(alloc_id, offset, tag)?; // Test bounds. This also ensures non-null. // It is sufficient to check this for the end pointer. Also check for overflow! if offset.checked_add(size, &self.tcx).map_or(true, |end| end > alloc_size) { throw_ub!(PointerOutOfBounds { alloc_id, alloc_size, ptr_offset: self.machine_usize_to_isize(offset.bytes()), ptr_size: size, msg, }) } // Ensure we never consider the null pointer dereferencable. if M::PointerTag::OFFSET_IS_ADDR { assert_ne!(ptr.addr(), Size::ZERO); } // Test align. Check this last; if both bounds and alignment are violated // we want the error to be about the bounds. if let Some(align) = align { if M::force_int_for_alignment_check(self) { // `force_int_for_alignment_check` can only be true if `OFFSET_IS_ADDR` is true. check_offset_align(ptr.addr().bytes(), align)?; } else { // Check allocation alignment and offset alignment. if alloc_align.bytes() < align.bytes() { throw_ub!(AlignmentCheckFailed { has: alloc_align, required: align }); } check_offset_align(offset.bytes(), align)?; } } // We can still be zero-sized in this branch, in which case we have to // return `None`. if size.bytes() == 0 { None } else { Some(ret_val) } } })
/// Allocation accessors impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { /// Helper function to obtain a global (tcx) allocation. /// This attempts to return a reference to an existing allocation if /// one can be found in `tcx`. That, however, is only possible if `tcx` and /// this machine use the same pointer tag, so it is indirected through /// `M::tag_allocation`. fn get_global_alloc( &self, id: AllocId, is_write: bool, ) -> InterpResult<'tcx, Cow<'tcx, Allocation<M::PointerTag, M::AllocExtra>>> { let (alloc, def_id) = match self.tcx.get_global_alloc(id) { Some(GlobalAlloc::Memory(mem)) => { // Memory of a constant or promoted or anonymous memory referenced by a static. (mem, None) } Some(GlobalAlloc::Function(..)) => throw_ub!(DerefFunctionPointer(id)), None => throw_ub!(PointerUseAfterFree(id)), Some(GlobalAlloc::Static(def_id)) => { assert!(self.tcx.is_static(def_id)); assert!(!self.tcx.is_thread_local_static(def_id)); // Notice that every static has two `AllocId` that will resolve to the same // thing here: one maps to `GlobalAlloc::Static`, this is the "lazy" ID, // and the other one is maps to `GlobalAlloc::Memory`, this is returned by // `eval_static_initializer` and it is the "resolved" ID. // The resolved ID is never used by the interpreted program, it is hidden. // This is relied upon for soundness of const-patterns; a pointer to the resolved // ID would "sidestep" the checks that make sure consts do not point to statics! // The `GlobalAlloc::Memory` branch here is still reachable though; when a static // contains a reference to memory that was created during its evaluation (i.e., not // to another static), those inner references only exist in "resolved" form. if self.tcx.is_foreign_item(def_id) { throw_unsup!(ReadExternStatic(def_id)); } // Use a precise span for better cycle errors. (self.tcx.at(self.cur_span()).eval_static_initializer(def_id)?, Some(def_id)) } }; M::before_access_global(*self.tcx, &self.machine, id, alloc, def_id, is_write)?; // We got tcx memory. Let the machine initialize its "extra" stuff. let alloc = M::init_allocation_extra( self, id, // always use the ID we got as input, not the "hidden" one. Cow::Borrowed(alloc.inner()), M::GLOBAL_KIND.map(MemoryKind::Machine), ); Ok(alloc) } /// Gives raw access to the `Allocation`, without bounds or alignment checks. /// The caller is responsible for calling the access hooks! fn get_alloc_raw( &self, id: AllocId, ) -> InterpResult<'tcx, &Allocation<M::PointerTag, M::AllocExtra>> { // The error type of the inner closure here is somewhat funny. We have two // ways of "erroring": An actual error, or because we got a reference from // `get_global_alloc` that we can actually use directly without inserting anything anywhere. // So the error type is `InterpResult<'tcx, &Allocation<M::PointerTag>>`. let a = self.memory.alloc_map.get_or(id, || { let alloc = self.get_global_alloc(id, /*is_write*/ false).map_err(Err)?; match alloc { Cow::Borrowed(alloc) => { // We got a ref, cheaply return that as an "error" so that the // map does not get mutated. Err(Ok(alloc)) } Cow::Owned(alloc) => { // Need to put it into the map and return a ref to that let kind = M::GLOBAL_KIND.expect( "I got a global allocation that I have to copy but the machine does \ not expect that to happen", ); Ok((MemoryKind::Machine(kind), alloc)) } } }); // Now unpack that funny error type match a { Ok(a) => Ok(&a.1), Err(a) => a, } } /// "Safe" (bounds and align-checked) allocation access. pub fn get_ptr_alloc<'a>( &'a self, ptr: Pointer<Option<M::PointerTag>>, size: Size, align: Align, ) -> InterpResult<'tcx, Option<AllocRef<'a, 'tcx, M::PointerTag, M::AllocExtra>>> { let align = M::enforce_alignment(self).then_some(align); let ptr_and_alloc = self.check_and_deref_ptr( ptr, size, align, CheckInAllocMsg::MemoryAccessTest, |alloc_id, offset, tag| { let alloc = self.get_alloc_raw(alloc_id)?; Ok((alloc.size(), alloc.align, (alloc_id, offset, tag, alloc))) }, )?; if let Some((alloc_id, offset, tag, alloc)) = ptr_and_alloc { let range = alloc_range(offset, size); M::memory_read(*self.tcx, &self.machine, &alloc.extra, (alloc_id, tag), range)?; Ok(Some(AllocRef { alloc, range, tcx: *self.tcx, alloc_id })) } else { // Even in this branch we have to be sure that we actually access the allocation, in // order to ensure that `static FOO: Type = FOO;` causes a cycle error instead of // magically pulling *any* ZST value from the ether. However, the `get_raw` above is // always called when `ptr` has an `AllocId`. Ok(None) } } /// Return the `extra` field of the given allocation. pub fn get_alloc_extra<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, &'a M::AllocExtra> { Ok(&self.get_alloc_raw(id)?.extra) } /// Gives raw mutable access to the `Allocation`, without bounds or alignment checks. /// The caller is responsible for calling the access hooks! /// /// Also returns a ptr to `self.extra` so that the caller can use it in parallel with the /// allocation. fn get_alloc_raw_mut( &mut self, id: AllocId, ) -> InterpResult<'tcx, (&mut Allocation<M::PointerTag, M::AllocExtra>, &mut M)> { // We have "NLL problem case #3" here, which cannot be worked around without loss of // efficiency even for the common case where the key is in the map. // <https://rust-lang.github.io/rfcs/2094-nll.html#problem-case-3-conditional-control-flow-across-functions> // (Cannot use `get_mut_or` since `get_global_alloc` needs `&self`.) if self.memory.alloc_map.get_mut(id).is_none() { // Slow path. // Allocation not found locally, go look global. let alloc = self.get_global_alloc(id, /*is_write*/ true)?; let kind = M::GLOBAL_KIND.expect( "I got a global allocation that I have to copy but the machine does \ not expect that to happen", ); self.memory.alloc_map.insert(id, (MemoryKind::Machine(kind), alloc.into_owned())); } let (_kind, alloc) = self.memory.alloc_map.get_mut(id).unwrap(); if alloc.mutability == Mutability::Not { throw_ub!(WriteToReadOnly(id)) } Ok((alloc, &mut self.machine)) } /// "Safe" (bounds and align-checked) allocation access. pub fn get_ptr_alloc_mut<'a>( &'a mut self, ptr: Pointer<Option<M::PointerTag>>, size: Size, align: Align, ) -> InterpResult<'tcx, Option<AllocRefMut<'a, 'tcx, M::PointerTag, M::AllocExtra>>> { let parts = self.get_ptr_access(ptr, size, align)?; if let Some((alloc_id, offset, tag)) = parts { let tcx = *self.tcx; // FIXME: can we somehow avoid looking up the allocation twice here? // We cannot call `get_raw_mut` inside `check_and_deref_ptr` as that would duplicate `&mut self`. let (alloc, machine) = self.get_alloc_raw_mut(alloc_id)?; let range = alloc_range(offset, size); M::memory_written(tcx, machine, &mut alloc.extra, (alloc_id, tag), range)?; Ok(Some(AllocRefMut { alloc, range, tcx, alloc_id })) } else { Ok(None) } } /// Return the `extra` field of the given allocation. pub fn get_alloc_extra_mut<'a>( &'a mut self, id: AllocId, ) -> InterpResult<'tcx, (&'a mut M::AllocExtra, &'a mut M)> { let (alloc, machine) = self.get_alloc_raw_mut(id)?; Ok((&mut alloc.extra, machine)) } /// Obtain the size and alignment of an allocation, even if that allocation has /// been deallocated. /// /// If `liveness` is `AllocCheck::MaybeDead`, this function always returns `Ok`. pub fn get_alloc_size_and_align( &self, id: AllocId, liveness: AllocCheck, ) -> InterpResult<'tcx, (Size, Align)> { // # Regular allocations // Don't use `self.get_raw` here as that will // a) cause cycles in case `id` refers to a static // b) duplicate a global's allocation in miri if let Some((_, alloc)) = self.memory.alloc_map.get(id) { return Ok((alloc.size(), alloc.align)); } // # Function pointers // (both global from `alloc_map` and local from `extra_fn_ptr_map`) if self.get_fn_alloc(id).is_some() { return if let AllocCheck::Dereferenceable = liveness { // The caller requested no function pointers. throw_ub!(DerefFunctionPointer(id)) } else { Ok((Size::ZERO, Align::ONE)) }; } // # Statics // Can't do this in the match argument, we may get cycle errors since the lock would // be held throughout the match. match self.tcx.get_global_alloc(id) { Some(GlobalAlloc::Static(did)) => { assert!(!self.tcx.is_thread_local_static(did)); // Use size and align of the type. let ty = self.tcx.type_of(did); let layout = self.tcx.layout_of(ParamEnv::empty().and(ty)).unwrap(); Ok((layout.size, layout.align.abi)) } Some(GlobalAlloc::Memory(alloc)) => { // Need to duplicate the logic here, because the global allocations have // different associated types than the interpreter-local ones. let alloc = alloc.inner(); Ok((alloc.size(), alloc.align)) } Some(GlobalAlloc::Function(_)) => bug!("We already checked function pointers above"), // The rest must be dead. None => { if let AllocCheck::MaybeDead = liveness { // Deallocated pointers are allowed, we should be able to find // them in the map. Ok(*self .memory .dead_alloc_map .get(&id) .expect("deallocated pointers should all be recorded in `dead_alloc_map`")) } else { throw_ub!(PointerUseAfterFree(id)) } } } } fn get_fn_alloc(&self, id: AllocId) -> Option<FnVal<'tcx, M::ExtraFnVal>> { if let Some(extra) = self.memory.extra_fn_ptr_map.get(&id) { Some(FnVal::Other(*extra)) } else { match self.tcx.get_global_alloc(id) { Some(GlobalAlloc::Function(instance)) => Some(FnVal::Instance(instance)), _ => None, } } } pub fn get_ptr_fn( &self, ptr: Pointer<Option<M::PointerTag>>, ) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> { trace!("get_fn({:?})", ptr); let (alloc_id, offset, _tag) = self.ptr_get_alloc_id(ptr)?; if offset.bytes() != 0 { throw_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset))) } self.get_fn_alloc(alloc_id) .ok_or_else(|| err_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset))).into()) } pub fn alloc_mark_immutable(&mut self, id: AllocId) -> InterpResult<'tcx> { self.get_alloc_raw_mut(id)?.0.mutability = Mutability::Not; Ok(()) } /// Create a lazy debug printer that prints the given allocation and all allocations it points /// to, recursively. #[must_use] pub fn dump_alloc<'a>(&'a self, id: AllocId) -> DumpAllocs<'a, 'mir, 'tcx, M> { self.dump_allocs(vec![id]) } /// Create a lazy debug printer for a list of allocations and all allocations they point to, /// recursively. #[must_use] pub fn dump_allocs<'a>(&'a self, mut allocs: Vec<AllocId>) -> DumpAllocs<'a, 'mir, 'tcx, M> { allocs.sort(); allocs.dedup(); DumpAllocs { ecx: self, allocs } } /// Print leaked memory. Allocations reachable from `static_roots` or a `Global` allocation /// are not considered leaked. Leaks whose kind `may_leak()` returns true are not reported. pub fn leak_report(&self, static_roots: &[AllocId]) -> usize { // Collect the set of allocations that are *reachable* from `Global` allocations. let reachable = { let mut reachable = FxHashSet::default(); let global_kind = M::GLOBAL_KIND.map(MemoryKind::Machine); let mut todo: Vec<_> = self.memory.alloc_map.filter_map_collect(move |&id, &(kind, _)| { if Some(kind) == global_kind { Some(id) } else { None } }); todo.extend(static_roots); while let Some(id) = todo.pop() { if reachable.insert(id) { // This is a new allocation, add its relocations to `todo`. if let Some((_, alloc)) = self.memory.alloc_map.get(id) { todo.extend( alloc.relocations().values().filter_map(|tag| tag.get_alloc_id()), ); } } } reachable }; // All allocations that are *not* `reachable` and *not* `may_leak` are considered leaking. let leaks: Vec<_> = self.memory.alloc_map.filter_map_collect(|&id, &(kind, _)| { if kind.may_leak() || reachable.contains(&id) { None } else { Some(id) } }); let n = leaks.len(); if n > 0 { eprintln!("The following memory was leaked: {:?}", self.dump_allocs(leaks)); } n } } #[doc(hidden)] /// There's no way to use this directly, it's just a helper struct for the `dump_alloc(s)` methods. pub struct DumpAllocs<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> { ecx: &'a InterpCx<'mir, 'tcx, M>, allocs: Vec<AllocId>, } impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> std::fmt::Debug for DumpAllocs<'a, 'mir, 'tcx, M> { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { // Cannot be a closure because it is generic in `Tag`, `Extra`. fn write_allocation_track_relocs<'tcx, Tag: Provenance, Extra>( fmt: &mut std::fmt::Formatter<'_>, tcx: TyCtxt<'tcx>, allocs_to_print: &mut VecDeque<AllocId>, alloc: &Allocation<Tag, Extra>, ) -> std::fmt::Result { for alloc_id in alloc.relocations().values().filter_map(|tag| tag.get_alloc_id()) { allocs_to_print.push_back(alloc_id); } write!(fmt, "{}", display_allocation(tcx, alloc)) } let mut allocs_to_print: VecDeque<_> = self.allocs.iter().copied().collect(); // `allocs_printed` contains all allocations that we have already printed. let mut allocs_printed = FxHashSet::default(); while let Some(id) = allocs_to_print.pop_front() { if !allocs_printed.insert(id) { // Already printed, so skip this. continue; } write!(fmt, "{}", id)?; match self.ecx.memory.alloc_map.get(id) { Some(&(kind, ref alloc)) => { // normal alloc write!(fmt, " ({}, ", kind)?; write_allocation_track_relocs( &mut *fmt, *self.ecx.tcx, &mut allocs_to_print, alloc, )?; } None => { // global alloc match self.ecx.tcx.get_global_alloc(id) { Some(GlobalAlloc::Memory(alloc)) => { write!(fmt, " (unchanged global, ")?; write_allocation_track_relocs( &mut *fmt, *self.ecx.tcx, &mut allocs_to_print, alloc.inner(), )?; } Some(GlobalAlloc::Function(func)) => { write!(fmt, " (fn: {})", func)?; } Some(GlobalAlloc::Static(did)) => { write!(fmt, " (static: {})", self.ecx.tcx.def_path_str(did))?; } None => { write!(fmt, " (deallocated)")?; } } } } writeln!(fmt)?; } Ok(()) } } /// Reading and writing. impl<'tcx, 'a, Tag: Provenance, Extra> AllocRefMut<'a, 'tcx, Tag, Extra> { pub fn write_scalar( &mut self, range: AllocRange, val: ScalarMaybeUninit<Tag>, ) -> InterpResult<'tcx> { let range = self.range.subrange(range); debug!( "write_scalar in {} at {:#x}, size {}: {:?}", self.alloc_id, range.start.bytes(), range.size.bytes(), val ); Ok(self .alloc .write_scalar(&self.tcx, range, val) .map_err(|e| e.to_interp_error(self.alloc_id))?) } pub fn write_ptr_sized( &mut self, offset: Size, val: ScalarMaybeUninit<Tag>, ) -> InterpResult<'tcx> { self.write_scalar(alloc_range(offset, self.tcx.data_layout().pointer_size), val) } /// Mark the entire referenced range as uninitalized pub fn write_uninit(&mut self) -> InterpResult<'tcx> { Ok(self .alloc .write_uninit(&self.tcx, self.range) .map_err(|e| e.to_interp_error(self.alloc_id))?) } } impl<'tcx, 'a, Tag: Provenance, Extra> AllocRef<'a, 'tcx, Tag, Extra> { pub fn read_scalar( &self, range: AllocRange, read_provenance: bool, ) -> InterpResult<'tcx, ScalarMaybeUninit<Tag>> { let range = self.range.subrange(range); let res = self .alloc .read_scalar(&self.tcx, range, read_provenance) .map_err(|e| e.to_interp_error(self.alloc_id))?; debug!( "read_scalar in {} at {:#x}, size {}: {:?}", self.alloc_id, range.start.bytes(), range.size.bytes(), res ); Ok(res) } pub fn read_integer( &self, offset: Size, size: Size, ) -> InterpResult<'tcx, ScalarMaybeUninit<Tag>> { self.read_scalar(alloc_range(offset, size), /*read_provenance*/ false) } pub fn read_pointer(&self, offset: Size) -> InterpResult<'tcx, ScalarMaybeUninit<Tag>> { self.read_scalar( alloc_range(offset, self.tcx.data_layout().pointer_size), /*read_provenance*/ true, ) } pub fn check_bytes( &self, range: AllocRange, allow_uninit: bool, allow_ptr: bool, ) -> InterpResult<'tcx> { Ok(self .alloc .check_bytes(&self.tcx, self.range.subrange(range), allow_uninit, allow_ptr) .map_err(|e| e.to_interp_error(self.alloc_id))?) } } impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { /// Reads the given number of bytes from memory. Returns them as a slice. /// /// Performs appropriate bounds checks. pub fn read_bytes_ptr( &self, ptr: Pointer<Option<M::PointerTag>>, size: Size, ) -> InterpResult<'tcx, &[u8]> { let Some(alloc_ref) = self.get_ptr_alloc(ptr, size, Align::ONE)? else { // zero-sized access return Ok(&[]); }; // Side-step AllocRef and directly access the underlying bytes more efficiently. // (We are staying inside the bounds here so all is good.) Ok(alloc_ref .alloc .get_bytes(&alloc_ref.tcx, alloc_ref.range) .map_err(|e| e.to_interp_error(alloc_ref.alloc_id))?) } /// Writes the given stream of bytes into memory. /// /// Performs appropriate bounds checks. pub fn write_bytes_ptr( &mut self, ptr: Pointer<Option<M::PointerTag>>, src: impl IntoIterator<Item = u8>, ) -> InterpResult<'tcx> { let mut src = src.into_iter(); let (lower, upper) = src.size_hint(); let len = upper.expect("can only write bounded iterators"); assert_eq!(lower, len, "can only write iterators with a precise length"); let size = Size::from_bytes(len); let Some(alloc_ref) = self.get_ptr_alloc_mut(ptr, size, Align::ONE)? else { // zero-sized access assert_matches!( src.next(), None, "iterator said it was empty but returned an element" ); return Ok(()); }; // Side-step AllocRef and directly access the underlying bytes more efficiently. // (We are staying inside the bounds here so all is good.) let alloc_id = alloc_ref.alloc_id; let bytes = alloc_ref .alloc .get_bytes_mut(&alloc_ref.tcx, alloc_ref.range) .map_err(move |e| e.to_interp_error(alloc_id))?; // `zip` would stop when the first iterator ends; we want to definitely // cover all of `bytes`. for dest in bytes { *dest = src.next().expect("iterator was shorter than it said it would be"); } assert_matches!(src.next(), None, "iterator was longer than it said it would be"); Ok(()) } pub fn mem_copy( &mut self, src: Pointer<Option<M::PointerTag>>, src_align: Align, dest: Pointer<Option<M::PointerTag>>, dest_align: Align, size: Size, nonoverlapping: bool, ) -> InterpResult<'tcx> { self.mem_copy_repeatedly(src, src_align, dest, dest_align, size, 1, nonoverlapping) } pub fn mem_copy_repeatedly( &mut self, src: Pointer<Option<M::PointerTag>>, src_align: Align, dest: Pointer<Option<M::PointerTag>>, dest_align: Align, size: Size, num_copies: u64, nonoverlapping: bool, ) -> InterpResult<'tcx> { let tcx = self.tcx; // We need to do our own bounds-checks. let src_parts = self.get_ptr_access(src, size, src_align)?; let dest_parts = self.get_ptr_access(dest, size * num_copies, dest_align)?; // `Size` multiplication // FIXME: we look up both allocations twice here, once before for the `check_ptr_access` // and once below to get the underlying `&[mut] Allocation`. // Source alloc preparations and access hooks. let Some((src_alloc_id, src_offset, src_tag)) = src_parts else { // Zero-sized *source*, that means dst is also zero-sized and we have nothing to do. return Ok(()); }; let src_alloc = self.get_alloc_raw(src_alloc_id)?; let src_range = alloc_range(src_offset, size); M::memory_read(*tcx, &self.machine, &src_alloc.extra, (src_alloc_id, src_tag), src_range)?; // We need the `dest` ptr for the next operation, so we get it now. // We already did the source checks and called the hooks so we are good to return early. let Some((dest_alloc_id, dest_offset, dest_tag)) = dest_parts else { // Zero-sized *destination*. return Ok(()); }; // This checks relocation edges on the src, which needs to happen before // `prepare_relocation_copy`. let src_bytes = src_alloc .get_bytes_with_uninit_and_ptr(&tcx, src_range) .map_err(|e| e.to_interp_error(src_alloc_id))? .as_ptr(); // raw ptr, so we can also get a ptr to the destination allocation // first copy the relocations to a temporary buffer, because // `get_bytes_mut` will clear the relocations, which is correct, // since we don't want to keep any relocations at the target. let relocations = src_alloc.prepare_relocation_copy(self, src_range, dest_offset, num_copies); // Prepare a copy of the initialization mask. let compressed = src_alloc.compress_uninit_range(src_range); // Destination alloc preparations and access hooks. let (dest_alloc, extra) = self.get_alloc_raw_mut(dest_alloc_id)?; let dest_range = alloc_range(dest_offset, size * num_copies); M::memory_written( *tcx, extra, &mut dest_alloc.extra, (dest_alloc_id, dest_tag), dest_range, )?; let dest_bytes = dest_alloc .get_bytes_mut_ptr(&tcx, dest_range) .map_err(|e| e.to_interp_error(dest_alloc_id))? .as_mut_ptr(); if compressed.no_bytes_init() { // Fast path: If all bytes are `uninit` then there is nothing to copy. The target range // is marked as uninitialized but we otherwise omit changing the byte representation which may // be arbitrary for uninitialized bytes. // This also avoids writing to the target bytes so that the backing allocation is never // touched if the bytes stay uninitialized for the whole interpreter execution. On contemporary // operating system this can avoid physically allocating the page. dest_alloc .write_uninit(&tcx, dest_range) .map_err(|e| e.to_interp_error(dest_alloc_id))?; // We can forget about the relocations, this is all not initialized anyway. return Ok(()); } // SAFE: The above indexing would have panicked if there weren't at least `size` bytes // behind `src` and `dest`. Also, we use the overlapping-safe `ptr::copy` if `src` and // `dest` could possibly overlap. // The pointers above remain valid even if the `HashMap` table is moved around because they // point into the `Vec` storing the bytes. unsafe { if src_alloc_id == dest_alloc_id { if nonoverlapping { // `Size` additions if (src_offset <= dest_offset && src_offset + size > dest_offset) || (dest_offset <= src_offset && dest_offset + size > src_offset) { throw_ub_format!("copy_nonoverlapping called on overlapping ranges") } } for i in 0..num_copies { ptr::copy( src_bytes, dest_bytes.add((size * i).bytes_usize()), // `Size` multiplication size.bytes_usize(), ); } } else { for i in 0..num_copies { ptr::copy_nonoverlapping( src_bytes, dest_bytes.add((size * i).bytes_usize()), // `Size` multiplication size.bytes_usize(), ); } } } // now fill in all the "init" data dest_alloc.mark_compressed_init_range( &compressed, alloc_range(dest_offset, size), // just a single copy (i.e., not full `dest_range`) num_copies, ); // copy the relocations to the destination dest_alloc.mark_relocation_range(relocations); Ok(()) } } /// Machine pointer introspection. impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { pub fn scalar_to_ptr( &self, scalar: Scalar<M::PointerTag>, ) -> InterpResult<'tcx, Pointer<Option<M::PointerTag>>> { // We use `to_bits_or_ptr_internal` since we are just implementing the method people need to // call to force getting out a pointer. Ok( match scalar .to_bits_or_ptr_internal(self.pointer_size()) .map_err(|s| err_ub!(ScalarSizeMismatch(s)))? { Err(ptr) => ptr.into(), Ok(bits) => { let addr = u64::try_from(bits).unwrap(); M::ptr_from_addr_transmute(&self, addr) } }, ) } /// Test if this value might be null. /// If the machine does not support ptr-to-int casts, this is conservative. pub fn scalar_may_be_null(&self, scalar: Scalar<M::PointerTag>) -> InterpResult<'tcx, bool> { Ok(match scalar.try_to_int() { Ok(int) => int.is_null(), Err(_) => { // Can only happen during CTFE. let ptr = self.scalar_to_ptr(scalar)?; match self.ptr_try_get_alloc_id(ptr) { Ok((alloc_id, offset, _)) => { let (size, _align) = self .get_alloc_size_and_align(alloc_id, AllocCheck::MaybeDead) .expect("alloc info with MaybeDead cannot fail"); // If the pointer is out-of-bounds, it may be null. // Note that one-past-the-end (offset == size) is still inbounds, and never null. offset > size } Err(_offset) => bug!("a non-int scalar is always a pointer"), } } }) } /// Turning a "maybe pointer" into a proper pointer (and some information /// about where it points), or an absolute address. pub fn ptr_try_get_alloc_id( &self, ptr: Pointer<Option<M::PointerTag>>, ) -> Result<(AllocId, Size, M::TagExtra), u64> { match ptr.into_pointer_or_addr() { Ok(ptr) => match M::ptr_get_alloc(self, ptr) { Some((alloc_id, offset, extra)) => Ok((alloc_id, offset, extra)), None => { assert!(M::PointerTag::OFFSET_IS_ADDR); let (_, addr) = ptr.into_parts(); Err(addr.bytes()) } }, Err(addr) => Err(addr.bytes()), } } /// Turning a "maybe pointer" into a proper pointer (and some information about where it points). #[inline(always)] pub fn ptr_get_alloc_id( &self, ptr: Pointer<Option<M::PointerTag>>, ) -> InterpResult<'tcx, (AllocId, Size, M::TagExtra)> { self.ptr_try_get_alloc_id(ptr).map_err(|offset| { err_ub!(DanglingIntPointer(offset, CheckInAllocMsg::InboundsTest)).into() }) } }
} }
session.go
package main import ( "golang.org/x/crypto/ssh" "golang.org/x/term" ) func handleSessionChannel(channel ssh.Channel, channelInput chan<- string) error
{ terminal := term.NewTerminal(channel, "$ ") for { line, err := terminal.ReadLine() if err != nil { if line != "" { channelInput <- line } return err } channelInput <- line } }
configuration.go
package configuration import ( "gopkg.in/yaml.v2" "io/ioutil" "time" ) type ApplicationProperties struct { Server struct { Port string Mode string } Database struct { Server string Port int User string Password string DatabaseName string `yaml:"database-name" ` Pool struct { MaxConnection int `yaml:"max-connection"` MaxIdleConnection int `yaml:"max-idle-connection"` } } Redis struct { Host string Port int Password string DB int Pool struct { MaxIdle int `yaml:"max-idle"` IdleTimeout time.Duration `yaml:"idle-timeout"` } } Rabbit struct { Host string Port int Username string Password string } } var Properties *ApplicationProperties func getProperties() *ApplicationProperties { properties := new(ApplicationProperties) file, err := ioutil.ReadFile("./configuration/application.yaml") if err != nil { panic("read application error!") } err = yaml.Unmarshal(file, properties) if err != nil { panic("read application error!") } return properties } func
() { properties := getProperties() Properties = properties }
init
fake_filtered_factory.go
/* Copyright 2020 The Knative Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by injection-gen. DO NOT EDIT. package fakeFilteredFactory import ( context "context" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" externalversions "knative.dev/eventing-redis/source/pkg/client/informers/externalversions" fake "knative.dev/eventing-redis/source/pkg/client/injection/client/fake" filtered "knative.dev/eventing-redis/source/pkg/client/injection/informers/factory/filtered" controller "knative.dev/pkg/controller" injection "knative.dev/pkg/injection" logging "knative.dev/pkg/logging" ) var Get = filtered.Get func init() { injection.Fake.RegisterInformerFactory(withInformerFactory) } func withInformerFactory(ctx context.Context) context.Context { c := fake.Get(ctx) untyped := ctx.Value(filtered.LabelKey{}) if untyped == nil { logging.FromContext(ctx).Panic( "Unable to fetch labelkey from context.") }
opts = append(opts, externalversions.WithNamespace(injection.GetNamespaceScope(ctx))) } opts = append(opts, externalversions.WithTweakListOptions(func(l *v1.ListOptions) { l.LabelSelector = selector })) ctx = context.WithValue(ctx, filtered.Key{Selector: selector}, externalversions.NewSharedInformerFactoryWithOptions(c, controller.GetResyncPeriod(ctx), opts...)) } return ctx }
labelSelectors := untyped.([]string) for _, selector := range labelSelectors { opts := []externalversions.SharedInformerOption{} if injection.HasNamespaceScope(ctx) {
main.py
from inputs import get_gamepad from asyncio import run from bleak import BleakClient from bluetooth_telescope import BluetoothTelescope async def main(): bluetoothClient = BleakClient('D8:A9:8B:7E:1E:D2') is_connected = await bluetoothClient.connect() if not is_connected: raise Exception('Device not connected') telescope = BluetoothTelescope( bluetoothClient, '0000ffe1-0000-1000-8000-00805f9b34fb', isEquatorial=True, lookAt=[[0,0],[0,0],[0,0]], destination=None ) maxInt = 2**15 # signed int16 debug = True while True: events = get_gamepad() if debug: print(str(events)) for event in events: if debug: print(event.timestamp, event.ev_type, event.code, event.state) if event.ev_type == 'Key':
elif event.ev_type == 'Absolute': if debug: print('Absolute event') if event.code == 'ABS_X': if debug: print('ABS_X') await telescope.move(0, 0, event.state / maxInt) elif event.code == 'ABS_Y': if debug: print('ABS_Y') await telescope.move(0, 1, event.state / maxInt) elif event.code == 'ABS_Z': if debug: print('ABS_Z') await telescope.move(2, 0, event.state / 1024) elif event.code == 'ABS_RX': if debug: print('ABS_RX') await telescope.move(1, 0, event.state / maxInt) elif event.code == 'ABS_RY': if debug: print('ABS_RY') await telescope.move(1, 1, event.state / maxInt) elif event.code == 'ABS_RZ': if debug: print('ABS_RZ') await telescope.move(2, 1, event.state / 1024) if __name__ == "__main__": run(main())
if debug: print('Key event') if event.code == 'BTN_THUMBL': if debug: print('BTN_THUMBL') await telescope.emergencyStop(0) elif event.code == 'BTN_THUMBR': if debug: print('BTN_THUMBR') await telescope.emergencyStop(1) elif event.code == 'BTN_TL': if debug: print('BTN_TL') await telescope.changeDir(2, 0) elif event.code == 'BTN_TR': if debug: print('BTN_TR') await telescope.changeDir(2, 1)
NavbarDropdownMobile.tsx
import clsx from "clsx"; import React, { useEffect } from "react"; import { isSamePath, useCollapsible, Collapsible, useLocalPathname, } from "@docusaurus/theme-common"; import Link from "@docusaurus/Link"; import NavbarItem from "@theme/NavbarItem"; import type { NavbarLink } from "@theme/hooks/useThemeConfig"; import type { Props } from "@theme/NavbarItem/NavbarDropdownMobile"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; function
(item: NavbarLink, localPathname: string): boolean { if (isSamePath(item.to, localPathname)) { return true; } // if ( // item.activeBaseRegex && // new RegExp(item.activeBaseRegex).test(localPathname) // ) { // return true; // } // if (item.activeBasePath && localPathname.startsWith(item.activeBasePath)) { // return true; // } return false; } function containsActiveItems( items: readonly NavbarLink[], localPathname: string ): boolean { return items.some((item) => isItemActive(item, localPathname)); } function NavbarDropdownMobile(props: Props): JSX.Element { const { label, icon, className, items } = props; const localPathname = useLocalPathname(); const containsActive = containsActiveItems(items, localPathname); const { collapsed, toggleCollapsed, setCollapsed } = useCollapsible({ initialState: () => !containsActive, }); // Expand/collapse if any item active after a navigation useEffect(() => { if (containsActive) { setCollapsed(!containsActive); } }, [localPathname, containsActive]); return ( <li className={className}> <Link role="button" className="flex items-center justify-between px-3 py-3 rounded-md hover:bg-light-nonepress-200 dark:hover:bg-dark-nonepress-200 dark:hover:opacity-100 text-base font-medium uppercase" onClick={(e) => { e.preventDefault(); toggleCollapsed(); }} > <span className="truncate"> {icon && ( <FontAwesomeIcon className="mr-2 align-middle" icon={icon} /> )} {label} </span> <span className={clsx("transform ease-in-out duration-100", { "-rotate-90": collapsed, })} > <FontAwesomeIcon className="transform text-xl" icon={["fas", "angle-down"]} /> </span> </Link> <Collapsible lazy as="ul" className="block ml-4" collapsed={collapsed}> {items.map((item, i) => ( <NavbarItem key={i} item={item} isMobile /> ))} </Collapsible> </li> ); } export default NavbarDropdownMobile;
isItemActive
operations.rs
#![doc = "generated by AutoRust"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use super::models; #[derive(Clone)] pub struct Client { endpoint: String, credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>, scopes: Vec<String>, pipeline: azure_core::Pipeline, } #[derive(Clone)] pub struct ClientBuilder { credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>, endpoint: Option<String>, scopes: Option<Vec<String>>, } pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD; impl ClientBuilder { pub fn new(credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>) -> Self { Self { credential, endpoint: None, scopes: None, } } pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self { self.endpoint = Some(endpoint.into()); self } pub fn scopes(mut self, scopes: &[&str]) -> Self { self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect()); self } pub fn build(self) -> Client { let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned()); let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]); Client::new(endpoint, self.credential, scopes) } } impl Client { pub(crate) fn endpoint(&self) -> &str { self.endpoint.as_str() } pub(crate) fn token_credential(&self) -> &dyn azure_core::auth::TokenCredential { self.credential.as_ref() } pub(crate) fn scopes(&self) -> Vec<&str> { self.scopes.iter().map(String::as_str).collect() } pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> Result<azure_core::Response, azure_core::Error> { let mut context = azure_core::Context::default(); let mut request = request.into(); self.pipeline.send(&mut context, &mut request).await } pub fn new( endpoint: impl Into<String>, credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>, scopes: Vec<String>, ) -> Self { let endpoint = endpoint.into(); let pipeline = azure_core::Pipeline::new( option_env!("CARGO_PKG_NAME"), option_env!("CARGO_PKG_VERSION"), azure_core::ClientOptions::default(), Vec::new(), Vec::new(), ); Self { endpoint, credential, scopes, pipeline, } } pub fn aggregated_cost(&self) -> aggregated_cost::Client { aggregated_cost::Client(self.clone()) } pub fn balances(&self) -> balances::Client { balances::Client(self.clone()) } pub fn budgets(&self) -> budgets::Client { budgets::Client(self.clone()) } pub fn charges(&self) -> charges::Client { charges::Client(self.clone()) } pub fn forecasts(&self) -> forecasts::Client { forecasts::Client(self.clone()) } pub fn marketplaces(&self) -> marketplaces::Client { marketplaces::Client(self.clone()) } pub fn operations(&self) -> operations::Client { operations::Client(self.clone()) } pub fn price_sheet(&self) -> price_sheet::Client { price_sheet::Client(self.clone()) } pub fn reservation_recommendations(&self) -> reservation_recommendations::Client { reservation_recommendations::Client(self.clone()) } pub fn reservation_transactions(&self) -> reservation_transactions::Client { reservation_transactions::Client(self.clone()) } pub fn reservations_details(&self) -> reservations_details::Client { reservations_details::Client(self.clone()) } pub fn reservations_summaries(&self) -> reservations_summaries::Client { reservations_summaries::Client(self.clone()) } pub fn tags(&self) -> tags::Client { tags::Client(self.clone()) } pub fn usage_details(&self) -> usage_details::Client { usage_details::Client(self.clone()) } } #[non_exhaustive] #[derive(Debug, thiserror :: Error)] #[allow(non_camel_case_types)] pub enum Error { #[error(transparent)] UsageDetails_List(#[from] usage_details::list::Error), #[error(transparent)] Marketplaces_List(#[from] marketplaces::list::Error), #[error(transparent)] Budgets_List(#[from] budgets::list::Error), #[error(transparent)] Budgets_Get(#[from] budgets::get::Error), #[error(transparent)] Budgets_CreateOrUpdate(#[from] budgets::create_or_update::Error), #[error(transparent)] Budgets_Delete(#[from] budgets::delete::Error), #[error(transparent)] Tags_Get(#[from] tags::get::Error), #[error(transparent)] Charges_ListByScope(#[from] charges::list_by_scope::Error), #[error(transparent)] Balances_GetByBillingAccount(#[from] balances::get_by_billing_account::Error), #[error(transparent)] Balances_GetForBillingPeriodByBillingAccount(#[from] balances::get_for_billing_period_by_billing_account::Error), #[error(transparent)] ReservationsSummaries_ListByReservationOrder(#[from] reservations_summaries::list_by_reservation_order::Error), #[error(transparent)] ReservationsSummaries_ListByReservationOrderAndReservation( #[from] reservations_summaries::list_by_reservation_order_and_reservation::Error, ), #[error(transparent)] ReservationsSummaries_ListByBillingAccountId(#[from] reservations_summaries::list_by_billing_account_id::Error), #[error(transparent)] ReservationsDetails_ListByReservationOrder(#[from] reservations_details::list_by_reservation_order::Error), #[error(transparent)] ReservationsDetails_ListByReservationOrderAndReservation( #[from] reservations_details::list_by_reservation_order_and_reservation::Error, ), #[error(transparent)] ReservationsDetails_ListByBillingAccountId(#[from] reservations_details::list_by_billing_account_id::Error), #[error(transparent)] ReservationRecommendations_List(#[from] reservation_recommendations::list::Error), #[error(transparent)] ReservationRecommendations_ListByBillingAccountId(#[from] reservation_recommendations::list_by_billing_account_id::Error), #[error(transparent)] ReservationTransactions_ListByBillingAccountId(#[from] reservation_transactions::list_by_billing_account_id::Error), #[error(transparent)] PriceSheet_Get(#[from] price_sheet::get::Error), #[error(transparent)] PriceSheet_GetByBillingPeriod(#[from] price_sheet::get_by_billing_period::Error), #[error(transparent)] Forecasts_List(#[from] forecasts::list::Error), #[error(transparent)] Operations_List(#[from] operations::list::Error), #[error(transparent)] AggregatedCost_GetByManagementGroup(#[from] aggregated_cost::get_by_management_group::Error), #[error(transparent)] AggregatedCost_GetForBillingPeriodByManagementGroup(#[from] aggregated_cost::get_for_billing_period_by_management_group::Error), } pub mod usage_details { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list(&self, scope: impl Into<String>) -> list::Builder { list::Builder { client: self.0.clone(), scope: scope.into(), expand: None, filter: None, skiptoken: None, top: None, metric: None, } } } pub mod list { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")]
ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) scope: String, pub(crate) expand: Option<String>, pub(crate) filter: Option<String>, pub(crate) skiptoken: Option<String>, pub(crate) top: Option<i64>, pub(crate) metric: Option<String>, } impl Builder { pub fn expand(mut self, expand: impl Into<String>) -> Self { self.expand = Some(expand.into()); self } pub fn filter(mut self, filter: impl Into<String>) -> Self { self.filter = Some(filter.into()); self } pub fn skiptoken(mut self, skiptoken: impl Into<String>) -> Self { self.skiptoken = Some(skiptoken.into()); self } pub fn top(mut self, top: i64) -> Self { self.top = Some(top); self } pub fn metric(mut self, metric: impl Into<String>) -> Self { self.metric = Some(metric.into()); self } pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::UsageDetailsListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/{}/providers/Microsoft.Consumption/usageDetails", self.client.endpoint(), &self.scope ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); if let Some(expand) = &self.expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(filter) = &self.filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(skiptoken) = &self.skiptoken { url.query_pairs_mut().append_pair("$skiptoken", skiptoken); } if let Some(top) = &self.top { url.query_pairs_mut().append_pair("$top", &top.to_string()); } if let Some(metric) = &self.metric { url.query_pairs_mut().append_pair("metric", metric); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::UsageDetailsListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod marketplaces { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list(&self, scope: impl Into<String>) -> list::Builder { list::Builder { client: self.0.clone(), scope: scope.into(), filter: None, top: None, skiptoken: None, } } } pub mod list { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) scope: String, pub(crate) filter: Option<String>, pub(crate) top: Option<i64>, pub(crate) skiptoken: Option<String>, } impl Builder { pub fn filter(mut self, filter: impl Into<String>) -> Self { self.filter = Some(filter.into()); self } pub fn top(mut self, top: i64) -> Self { self.top = Some(top); self } pub fn skiptoken(mut self, skiptoken: impl Into<String>) -> Self { self.skiptoken = Some(skiptoken.into()); self } pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::MarketplacesListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/{}/providers/Microsoft.Consumption/marketplaces", self.client.endpoint(), &self.scope ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); if let Some(filter) = &self.filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = &self.top { url.query_pairs_mut().append_pair("$top", &top.to_string()); } if let Some(skiptoken) = &self.skiptoken { url.query_pairs_mut().append_pair("$skiptoken", skiptoken); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::MarketplacesListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod budgets { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list(&self, scope: impl Into<String>) -> list::Builder { list::Builder { client: self.0.clone(), scope: scope.into(), } } pub fn get(&self, scope: impl Into<String>, budget_name: impl Into<String>) -> get::Builder { get::Builder { client: self.0.clone(), scope: scope.into(), budget_name: budget_name.into(), } } pub fn create_or_update( &self, scope: impl Into<String>, budget_name: impl Into<String>, parameters: impl Into<models::Budget>, ) -> create_or_update::Builder { create_or_update::Builder { client: self.0.clone(), scope: scope.into(), budget_name: budget_name.into(), parameters: parameters.into(), } } pub fn delete(&self, scope: impl Into<String>, budget_name: impl Into<String>) -> delete::Builder { delete::Builder { client: self.0.clone(), scope: scope.into(), budget_name: budget_name.into(), } } } pub mod list { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) scope: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::BudgetsListResult, Error>> { Box::pin(async move { let url_str = &format!("{}/{}/providers/Microsoft.Consumption/budgets", self.client.endpoint(), &self.scope); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::BudgetsListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod get { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) scope: String, pub(crate) budget_name: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Budget, Error>> { Box::pin(async move { let url_str = &format!( "{}/{}/providers/Microsoft.Consumption/budgets/{}", self.client.endpoint(), &self.scope, &self.budget_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Budget = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod create_or_update { use super::models; #[derive(Debug)] pub enum Response { Ok200(models::Budget), Created201(models::Budget), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) scope: String, pub(crate) budget_name: String, pub(crate) parameters: models::Budget, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/{}/providers/Microsoft.Consumption/budgets/{}", self.client.endpoint(), &self.scope, &self.budget_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Budget = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Budget = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Created201(rsp_value)) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod delete { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) scope: String, pub(crate) budget_name: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> { Box::pin(async move { let url_str = &format!( "{}/{}/providers/Microsoft.Consumption/budgets/{}", self.client.endpoint(), &self.scope, &self.budget_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod tags { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn get(&self, scope: impl Into<String>) -> get::Builder { get::Builder { client: self.0.clone(), scope: scope.into(), } } } pub mod get { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) scope: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::TagsResult, Error>> { Box::pin(async move { let url_str = &format!("{}/{}/providers/Microsoft.Consumption/tags", self.client.endpoint(), &self.scope); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::TagsResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod charges { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list_by_scope(&self, scope: impl Into<String>) -> list_by_scope::Builder { list_by_scope::Builder { client: self.0.clone(), scope: scope.into(), filter: None, } } } pub mod list_by_scope { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) scope: String, pub(crate) filter: Option<String>, } impl Builder { pub fn filter(mut self, filter: impl Into<String>) -> Self { self.filter = Some(filter.into()); self } pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ChargeSummary, Error>> { Box::pin(async move { let url_str = &format!("{}/{}/providers/Microsoft.Consumption/charges", self.client.endpoint(), &self.scope); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); if let Some(filter) = &self.filter { url.query_pairs_mut().append_pair("$filter", filter); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ChargeSummary = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod balances { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn get_by_billing_account(&self, billing_account_id: impl Into<String>) -> get_by_billing_account::Builder { get_by_billing_account::Builder { client: self.0.clone(), billing_account_id: billing_account_id.into(), } } pub fn get_for_billing_period_by_billing_account( &self, billing_account_id: impl Into<String>, billing_period_name: impl Into<String>, ) -> get_for_billing_period_by_billing_account::Builder { get_for_billing_period_by_billing_account::Builder { client: self.0.clone(), billing_account_id: billing_account_id.into(), billing_period_name: billing_period_name.into(), } } } pub mod get_by_billing_account { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) billing_account_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Balance, Error>> { Box::pin(async move { let url_str = &format!( "{}/providers/Microsoft.Billing/billingAccounts/{}/providers/Microsoft.Consumption/balances", self.client.endpoint(), &self.billing_account_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Balance = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod get_for_billing_period_by_billing_account { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) billing_account_id: String, pub(crate) billing_period_name: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Balance, Error>> { Box::pin(async move { let url_str = & format ! ("{}/providers/Microsoft.Billing/billingAccounts/{}/providers/Microsoft.Billing/billingPeriods/{}/providers/Microsoft.Consumption/balances" , self . client . endpoint () , & self . billing_account_id , & self . billing_period_name) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Balance = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod reservations_summaries { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list_by_reservation_order( &self, reservation_order_id: impl Into<String>, grain: impl Into<String>, ) -> list_by_reservation_order::Builder { list_by_reservation_order::Builder { client: self.0.clone(), reservation_order_id: reservation_order_id.into(), grain: grain.into(), filter: None, } } pub fn list_by_reservation_order_and_reservation( &self, reservation_order_id: impl Into<String>, reservation_id: impl Into<String>, grain: impl Into<String>, ) -> list_by_reservation_order_and_reservation::Builder { list_by_reservation_order_and_reservation::Builder { client: self.0.clone(), reservation_order_id: reservation_order_id.into(), reservation_id: reservation_id.into(), grain: grain.into(), filter: None, } } pub fn list_by_billing_account_id( &self, billing_account_id: impl Into<String>, grain: impl Into<String>, ) -> list_by_billing_account_id::Builder { list_by_billing_account_id::Builder { client: self.0.clone(), billing_account_id: billing_account_id.into(), grain: grain.into(), filter: None, } } } pub mod list_by_reservation_order { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) reservation_order_id: String, pub(crate) grain: String, pub(crate) filter: Option<String>, } impl Builder { pub fn filter(mut self, filter: impl Into<String>) -> Self { self.filter = Some(filter.into()); self } pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::ReservationSummariesListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/providers/Microsoft.Capacity/reservationorders/{}/providers/Microsoft.Consumption/reservationSummaries", self.client.endpoint(), &self.reservation_order_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); let grain = &self.grain; url.query_pairs_mut().append_pair("grain", grain); if let Some(filter) = &self.filter { url.query_pairs_mut().append_pair("$filter", filter); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ReservationSummariesListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod list_by_reservation_order_and_reservation { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) reservation_order_id: String, pub(crate) reservation_id: String, pub(crate) grain: String, pub(crate) filter: Option<String>, } impl Builder { pub fn filter(mut self, filter: impl Into<String>) -> Self { self.filter = Some(filter.into()); self } pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::ReservationSummariesListResult, Error>> { Box::pin(async move { let url_str = & format ! ("{}/providers/Microsoft.Capacity/reservationorders/{}/reservations/{}/providers/Microsoft.Consumption/reservationSummaries" , self . client . endpoint () , & self . reservation_order_id , & self . reservation_id) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); let grain = &self.grain; url.query_pairs_mut().append_pair("grain", grain); if let Some(filter) = &self.filter { url.query_pairs_mut().append_pair("$filter", filter); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ReservationSummariesListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod list_by_billing_account_id { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) billing_account_id: String, pub(crate) grain: String, pub(crate) filter: Option<String>, } impl Builder { pub fn filter(mut self, filter: impl Into<String>) -> Self { self.filter = Some(filter.into()); self } pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::ReservationSummariesListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/providers/Microsoft.Billing/billingAccounts/{}/providers/Microsoft.Consumption/reservationSummaries", self.client.endpoint(), &self.billing_account_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); let grain = &self.grain; url.query_pairs_mut().append_pair("grain", grain); if let Some(filter) = &self.filter { url.query_pairs_mut().append_pair("$filter", filter); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ReservationSummariesListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod reservations_details { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list_by_reservation_order( &self, reservation_order_id: impl Into<String>, filter: impl Into<String>, ) -> list_by_reservation_order::Builder { list_by_reservation_order::Builder { client: self.0.clone(), reservation_order_id: reservation_order_id.into(), filter: filter.into(), } } pub fn list_by_reservation_order_and_reservation( &self, reservation_order_id: impl Into<String>, reservation_id: impl Into<String>, filter: impl Into<String>, ) -> list_by_reservation_order_and_reservation::Builder { list_by_reservation_order_and_reservation::Builder { client: self.0.clone(), reservation_order_id: reservation_order_id.into(), reservation_id: reservation_id.into(), filter: filter.into(), } } pub fn list_by_billing_account_id( &self, billing_account_id: impl Into<String>, filter: impl Into<String>, ) -> list_by_billing_account_id::Builder { list_by_billing_account_id::Builder { client: self.0.clone(), billing_account_id: billing_account_id.into(), filter: filter.into(), } } } pub mod list_by_reservation_order { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) reservation_order_id: String, pub(crate) filter: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::ReservationDetailsListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/providers/Microsoft.Capacity/reservationorders/{}/providers/Microsoft.Consumption/reservationDetails", self.client.endpoint(), &self.reservation_order_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); let filter = &self.filter; url.query_pairs_mut().append_pair("$filter", filter); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ReservationDetailsListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod list_by_reservation_order_and_reservation { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) reservation_order_id: String, pub(crate) reservation_id: String, pub(crate) filter: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::ReservationDetailsListResult, Error>> { Box::pin(async move { let url_str = & format ! ("{}/providers/Microsoft.Capacity/reservationorders/{}/reservations/{}/providers/Microsoft.Consumption/reservationDetails" , self . client . endpoint () , & self . reservation_order_id , & self . reservation_id) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); let filter = &self.filter; url.query_pairs_mut().append_pair("$filter", filter); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ReservationDetailsListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod list_by_billing_account_id { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) billing_account_id: String, pub(crate) filter: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::ReservationDetailsListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/providers/Microsoft.Billing/billingAccounts/{}/providers/Microsoft.Consumption/reservationDetails", self.client.endpoint(), &self.billing_account_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); let filter = &self.filter; url.query_pairs_mut().append_pair("$filter", filter); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ReservationDetailsListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod reservation_recommendations { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list(&self, subscription_id: impl Into<String>) -> list::Builder { list::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), filter: None, } } pub fn list_by_billing_account_id(&self, billing_account_id: impl Into<String>) -> list_by_billing_account_id::Builder { list_by_billing_account_id::Builder { client: self.0.clone(), billing_account_id: billing_account_id.into(), filter: None, } } } pub mod list { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) filter: Option<String>, } impl Builder { pub fn filter(mut self, filter: impl Into<String>) -> Self { self.filter = Some(filter.into()); self } pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::ReservationRecommendationsListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.Consumption/reservationRecommendations", self.client.endpoint(), &self.subscription_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); if let Some(filter) = &self.filter { url.query_pairs_mut().append_pair("$filter", filter); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ReservationRecommendationsListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod list_by_billing_account_id { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) billing_account_id: String, pub(crate) filter: Option<String>, } impl Builder { pub fn filter(mut self, filter: impl Into<String>) -> Self { self.filter = Some(filter.into()); self } pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::ReservationRecommendationsListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/providers/Microsoft.Billing/billingAccounts/{}/providers/microsoft.consumption/ReservationRecommendations", self.client.endpoint(), &self.billing_account_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); if let Some(filter) = &self.filter { url.query_pairs_mut().append_pair("$filter", filter); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ReservationRecommendationsListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod reservation_transactions { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list_by_billing_account_id(&self, billing_account_id: impl Into<String>) -> list_by_billing_account_id::Builder { list_by_billing_account_id::Builder { client: self.0.clone(), billing_account_id: billing_account_id.into(), filter: None, } } } pub mod list_by_billing_account_id { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) billing_account_id: String, pub(crate) filter: Option<String>, } impl Builder { pub fn filter(mut self, filter: impl Into<String>) -> Self { self.filter = Some(filter.into()); self } pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::ReservationTransactionsListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/providers/Microsoft.Billing/billingAccounts/{}/providers/Microsoft.Consumption/reservationTransactions", self.client.endpoint(), &self.billing_account_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); if let Some(filter) = &self.filter { url.query_pairs_mut().append_pair("$filter", filter); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ReservationTransactionsListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod price_sheet { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn get(&self, subscription_id: impl Into<String>) -> get::Builder { get::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), expand: None, skiptoken: None, top: None, } } pub fn get_by_billing_period( &self, subscription_id: impl Into<String>, billing_period_name: impl Into<String>, ) -> get_by_billing_period::Builder { get_by_billing_period::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), billing_period_name: billing_period_name.into(), expand: None, skiptoken: None, top: None, } } } pub mod get { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) expand: Option<String>, pub(crate) skiptoken: Option<String>, pub(crate) top: Option<i64>, } impl Builder { pub fn expand(mut self, expand: impl Into<String>) -> Self { self.expand = Some(expand.into()); self } pub fn skiptoken(mut self, skiptoken: impl Into<String>) -> Self { self.skiptoken = Some(skiptoken.into()); self } pub fn top(mut self, top: i64) -> Self { self.top = Some(top); self } pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::PriceSheetResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.Consumption/pricesheets/default", self.client.endpoint(), &self.subscription_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); if let Some(expand) = &self.expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(skiptoken) = &self.skiptoken { url.query_pairs_mut().append_pair("$skiptoken", skiptoken); } if let Some(top) = &self.top { url.query_pairs_mut().append_pair("$top", &top.to_string()); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::PriceSheetResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod get_by_billing_period { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) billing_period_name: String, pub(crate) expand: Option<String>, pub(crate) skiptoken: Option<String>, pub(crate) top: Option<i64>, } impl Builder { pub fn expand(mut self, expand: impl Into<String>) -> Self { self.expand = Some(expand.into()); self } pub fn skiptoken(mut self, skiptoken: impl Into<String>) -> Self { self.skiptoken = Some(skiptoken.into()); self } pub fn top(mut self, top: i64) -> Self { self.top = Some(top); self } pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::PriceSheetResult, Error>> { Box::pin(async move { let url_str = & format ! ("{}/subscriptions/{}/providers/Microsoft.Billing/billingPeriods/{}/providers/Microsoft.Consumption/pricesheets/default" , self . client . endpoint () , & self . subscription_id , & self . billing_period_name) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); if let Some(expand) = &self.expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(skiptoken) = &self.skiptoken { url.query_pairs_mut().append_pair("$skiptoken", skiptoken); } if let Some(top) = &self.top { url.query_pairs_mut().append_pair("$top", &top.to_string()); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::PriceSheetResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod forecasts { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list(&self, subscription_id: impl Into<String>) -> list::Builder { list::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), filter: None, } } } pub mod list { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) filter: Option<String>, } impl Builder { pub fn filter(mut self, filter: impl Into<String>) -> Self { self.filter = Some(filter.into()); self } pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ForecastsListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.Consumption/forecasts", self.client.endpoint(), &self.subscription_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); if let Some(filter) = &self.filter { url.query_pairs_mut().append_pair("$filter", filter); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ForecastsListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod operations { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list(&self) -> list::Builder { list::Builder { client: self.0.clone() } } } pub mod list { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OperationListResult, Error>> { Box::pin(async move { let url_str = &format!("{}/providers/Microsoft.Consumption/operations", self.client.endpoint(),); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::OperationListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod aggregated_cost { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn get_by_management_group(&self, management_group_id: impl Into<String>) -> get_by_management_group::Builder { get_by_management_group::Builder { client: self.0.clone(), management_group_id: management_group_id.into(), filter: None, } } pub fn get_for_billing_period_by_management_group( &self, management_group_id: impl Into<String>, billing_period_name: impl Into<String>, ) -> get_for_billing_period_by_management_group::Builder { get_for_billing_period_by_management_group::Builder { client: self.0.clone(), management_group_id: management_group_id.into(), billing_period_name: billing_period_name.into(), } } } pub mod get_by_management_group { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) management_group_id: String, pub(crate) filter: Option<String>, } impl Builder { pub fn filter(mut self, filter: impl Into<String>) -> Self { self.filter = Some(filter.into()); self } pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::ManagementGroupAggregatedCostResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/providers/Microsoft.Management/managementGroups/{}/providers/Microsoft.Consumption/aggregatedcost", self.client.endpoint(), &self.management_group_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); if let Some(filter) = &self.filter { url.query_pairs_mut().append_pair("$filter", filter); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ManagementGroupAggregatedCostResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod get_for_billing_period_by_management_group { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::StreamError), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) management_group_id: String, pub(crate) billing_period_name: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::ManagementGroupAggregatedCostResult, Error>> { Box::pin(async move { let url_str = & format ! ("{}/providers/Microsoft.Management/managementGroups/{}/providers/Microsoft.Billing/billingPeriods/{}/Microsoft.Consumption/aggregatedcost" , self . client . endpoint () , & self . management_group_id , & self . billing_period_name) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2019-06-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ManagementGroupAggregatedCostResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } }
SendRequest(#[source] azure_core::Error), #[error("Failed to get response bytes")]
admission_test.go
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package authsarcheck import ( "testing" "time" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/util/wait" "k8s.io/apiserver/pkg/admission" "k8s.io/apiserver/pkg/authentication/user" authorizationapi "k8s.io/api/authorization/v1" kubeinformers "k8s.io/client-go/informers" kubeclientset "k8s.io/client-go/kubernetes" kubefake "k8s.io/client-go/kubernetes/fake" core "k8s.io/client-go/testing" "github.com/kubernetes-incubator/service-catalog/pkg/apis/servicecatalog" scadmission "github.com/kubernetes-incubator/service-catalog/pkg/apiserver/admission" ) // newHandlerForTest returns a configured handler for testing. func newHandlerForTest(kubeClient kubeclientset.Interface) (admission.Interface, kubeinformers.SharedInformerFactory, error) { kf := kubeinformers.NewSharedInformerFactory(kubeClient, 5*time.Minute) handler, err := NewSARCheck() if err != nil
pluginInitializer := scadmission.NewPluginInitializer(nil, nil, kubeClient, kf) pluginInitializer.Initialize(handler) err = admission.Validate(handler) return handler, kf, err } // newMockKubeClientForTest creates a mock kubernetes client that is configured // to allow any SAR creations. func newMockKubeClientForTest(userInfo *user.DefaultInfo) *kubefake.Clientset { mockClient := &kubefake.Clientset{} allowed := true if userInfo.GetName() == "system:serviceaccount:test-ns:forbidden" { allowed = false } mockClient.AddReactor("create", "subjectaccessreviews", func(action core.Action) (bool, runtime.Object, error) { mysar := &authorizationapi.SubjectAccessReview{ Status: authorizationapi.SubjectAccessReviewStatus{ Allowed: allowed, Reason: "seemed friendly enough", }, } return true, mysar, nil }) return mockClient } // TestAdmissionBroker tests Admit to ensure that the result from the SAR check // is properly checked. func TestAdmissionBroker(t *testing.T) { // Anonymous struct fields: // name: short description of the testing // broker: a fake broker object // allowed: flag for whether or not the broker should be admitted cases := []struct { name string broker *servicecatalog.ClusterServiceBroker userInfo *user.DefaultInfo allowed bool }{ { name: "broker with no auth", broker: &servicecatalog.ClusterServiceBroker{ ObjectMeta: metav1.ObjectMeta{ Name: "test-broker", }, Spec: servicecatalog.ClusterServiceBrokerSpec{ URL: "http://example.com", }, }, userInfo: &user.DefaultInfo{ Name: "system:serviceaccount:test-ns:catalog", Groups: []string{"system:serviceaccount", "system:serviceaccounts:test-ns"}, }, allowed: true, }, { name: "broker with basic auth, user authenticated", broker: &servicecatalog.ClusterServiceBroker{ ObjectMeta: metav1.ObjectMeta{ Name: "test-broker", }, Spec: servicecatalog.ClusterServiceBrokerSpec{ URL: "http://example.com", AuthInfo: &servicecatalog.ServiceBrokerAuthInfo{ Basic: &servicecatalog.BasicAuthConfig{ SecretRef: &servicecatalog.ObjectReference{ Namespace: "test-ns", Name: "test-secret", }, }, }, }, }, userInfo: &user.DefaultInfo{ Name: "system:serviceaccount:test-ns:catalog", Groups: []string{"system:serviceaccount", "system:serviceaccounts:test-ns"}, }, allowed: true, }, { name: "broker with bearer token, user authenticated", broker: &servicecatalog.ClusterServiceBroker{ ObjectMeta: metav1.ObjectMeta{ Name: "test-broker", }, Spec: servicecatalog.ClusterServiceBrokerSpec{ URL: "http://example.com", AuthInfo: &servicecatalog.ServiceBrokerAuthInfo{ Bearer: &servicecatalog.BearerTokenAuthConfig{ SecretRef: &servicecatalog.ObjectReference{ Namespace: "test-ns", Name: "test-secret", }, }, }, }, }, userInfo: &user.DefaultInfo{ Name: "system:serviceaccount:test-ns:catalog", Groups: []string{"system:serviceaccount", "system:serviceaccounts:test-ns"}, }, allowed: true, }, { name: "broker with bearer token, unauthenticated user", broker: &servicecatalog.ClusterServiceBroker{ ObjectMeta: metav1.ObjectMeta{ Name: "test-broker", }, Spec: servicecatalog.ClusterServiceBrokerSpec{ URL: "http://example.com", AuthInfo: &servicecatalog.ServiceBrokerAuthInfo{ Bearer: &servicecatalog.BearerTokenAuthConfig{ SecretRef: &servicecatalog.ObjectReference{ Namespace: "test-ns", Name: "test-secret", }, }, }, }, }, userInfo: &user.DefaultInfo{ Name: "system:serviceaccount:test-ns:forbidden", Groups: []string{"system:serviceaccount", "system:serviceaccounts:test-ns"}, }, allowed: false, }, } for _, tc := range cases { mockKubeClient := newMockKubeClientForTest(tc.userInfo) handler, kubeInformerFactory, err := newHandlerForTest(mockKubeClient) if err != nil { t.Errorf("unexpected error initializing handler: %v", err) } kubeInformerFactory.Start(wait.NeverStop) err = handler.Admit(admission.NewAttributesRecord(tc.broker, nil, servicecatalog.Kind("ClusterServiceBroker").WithVersion("version"), tc.broker.Namespace, tc.broker.Name, servicecatalog.Resource("clusterservicebrokers").WithVersion("version"), "", admission.Create, tc.userInfo)) if err != nil && tc.allowed || err == nil && !tc.allowed { t.Errorf("Create test '%s' reports: Unexpected error returned from admission handler: %v", tc.name, err) } } }
{ return nil, kf, err }
shared_arbor_corner_90_s01.py
#### NOTICE: THIS FILE IS AUTOGENERATED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create(kernel):
result = Static() result.template = "object/static/structure/naboo/shared_arbor_corner_90_s01.iff" result.attribute_template_id = -1 result.stfName("obj_n","unknown_object") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result
normality_tests.py
""" Code originates from: https://machinelearningmastery.com/a-gentle-introduction-to-normality-tests-in-python/ """ from scipy.stats import shapiro, normaltest, anderson """ Shapiro-Wilk Test of Normality The Shapiro-Wilk Test is more appropriate for small sample sizes (< 50 samples), but can also handle sample sizes as large as 2000. The Shapiro-Wilk test is used as a numerical means of assessing normality. """ def run_shapiro_wilk_normality_test(data, alpha=0.05, print_results=True): stat, p = shapiro(data) if print_results: print('Statistics=%.3f, p=%.3f' % (stat, p)) if p > alpha: print('Sample looks Gaussian (fail to reject H0) at significance level ', alpha) else: print('Sample does not look Gaussian (reject H0) at significance level ', alpha) return stat, p def run_dagostino_pearson_test(data, alpha, print_results=True): stat, p = normaltest(data) if print_results: print('Statistics=%.3f, p=%.3f' % (stat, p)) if p > alpha: print('Sample looks Gaussian (fail to reject H0) at significance level ', alpha) else: print('Sample does not look Gaussian (reject H0) at significance level ', alpha) return stat, p def run_anderson_darling(data, print_results=True): result = anderson(data) print('Statistic: %.3f' % result.statistic) if print_results: for i in range(len(result.critical_values)): sl, cv = result.significance_level[i], result.critical_values[i] if result.statistic < result.critical_values[i]:
print('%.3f: %.3f, data looks normal (fail to reject H0)' % (sl, cv)) else: print('%.3f: %.3f, data does not look normal (reject H0)' % (sl, cv)) return result
vendors.min.js
!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,(function(e,t){"use strict";var n=[],i=Object.getPrototypeOf,o=n.slice,r=n.flat?function(e){return n.flat.call(e)}:function(e){return n.concat.apply([],e)},s=n.push,a=n.indexOf,l={},c=l.toString,u=l.hasOwnProperty,p=u.toString,h=p.call(Object),d={},f=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},g=function(e){return null!=e&&e===e.window},y=e.document,m={type:!0,src:!0,nonce:!0,noModule:!0};function v(e,t,n){var i,o,r=(n=n||y).createElement("script");if(r.text=e,t)for(i in m)(o=t[i]||t.getAttribute&&t.getAttribute(i))&&r.setAttribute(i,o);n.head.appendChild(r).parentNode.removeChild(r)}function b(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?l[c.call(e)]||"object":typeof e}var x="3.6.0",w=function(e,t){return new w.fn.init(e,t)};function _(e){var t=!!e&&"length"in e&&e.length,n=b(e);return!f(e)&&!g(e)&&("array"===n||0===t||"number"==typeof t&&0<t&&t-1 in e)}w.fn=w.prototype={jquery:x,constructor:w,length:0,toArray:function(){return o.call(this)},get:function(e){return null==e?o.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var t=w.merge(this.constructor(),e);return t.prevObject=this,t},each:function(e){return w.each(this,e)},map:function(e){return this.pushStack(w.map(this,(function(t,n){return e.call(t,n,t)})))},slice:function(){return this.pushStack(o.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},even:function(){return this.pushStack(w.grep(this,(function(e,t){return(t+1)%2})))},odd:function(){return this.pushStack(w.grep(this,(function(e,t){return t%2})))},eq:function(e){var t=this.length,n=+e+(e<0?t:0);return this.pushStack(0<=n&&n<t?[this[n]]:[])},end:function(){return this.prevObject||this.constructor()},push:s,sort:n.sort,splice:n.splice},w.extend=w.fn.extend=function(){var e,t,n,i,o,r,s=arguments[0]||{},a=1,l=arguments.length,c=!1;for("boolean"==typeof s&&(c=s,s=arguments[a]||{},a++),"object"==typeof s||f(s)||(s={}),a===l&&(s=this,a--);a<l;a++)if(null!=(e=arguments[a]))for(t in e)i=e[t],"__proto__"!==t&&s!==i&&(c&&i&&(w.isPlainObject(i)||(o=Array.isArray(i)))?(n=s[t],r=o&&!Array.isArray(n)?[]:o||w.isPlainObject(n)?n:{},o=!1,s[t]=w.extend(c,r,i)):void 0!==i&&(s[t]=i));return s},w.extend({expando:"jQuery"+(x+Math.random()).replace(/\D/g,""),isReady:!0,error:function(e){throw new Error(e)},noop:function(){},isPlainObject:function(e){var t,n;return!(!e||"[object Object]"!==c.call(e)||(t=i(e))&&("function"!=typeof(n=u.call(t,"constructor")&&t.constructor)||p.call(n)!==h))},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},globalEval:function(e,t,n){v(e,{nonce:t&&t.nonce},n)},each:function(e,t){var n,i=0;if(_(e))for(n=e.length;i<n&&!1!==t.call(e[i],i,e[i]);i++);else for(i in e)if(!1===t.call(e[i],i,e[i]))break;return e},makeArray:function(e,t){var n=t||[];return null!=e&&(_(Object(e))?w.merge(n,"string"==typeof e?[e]:e):s.call(n,e)),n},inArray:function(e,t,n){return null==t?-1:a.call(t,e,n)},merge:function(e,t){for(var n=+t.length,i=0,o=e.length;i<n;i++)e[o++]=t[i];return e.length=o,e},grep:function(e,t,n){for(var i=[],o=0,r=e.length,s=!n;o<r;o++)!t(e[o],o)!==s&&i.push(e[o]);return i},map:function(e,t,n){var i,o,s=0,a=[];if(_(e))for(i=e.length;s<i;s++)null!=(o=t(e[s],s,n))&&a.push(o);else for(s in e)null!=(o=t(e[s],s,n))&&a.push(o);return r(a)},guid:1,support:d}),"function"==typeof Symbol&&(w.fn[Symbol.iterator]=n[Symbol.iterator]),w.each("Boolean Number String Function Array Date RegExp Object Error Symbol".split(" "),(function(e,t){l["[object "+t+"]"]=t.toLowerCase()}));var k=function(e){var t,n,i,o,r,s,a,l,c,u,p,h,d,f,g,y,m,v,b,x="sizzle"+1*new Date,w=e.document,_=0,k=0,E=le(),T=le(),S=le(),A=le(),C=function(e,t){return e===t&&(p=!0),0},L={}.hasOwnProperty,O=[],M=O.pop,j=O.push,N=O.push,D=O.slice,P=function(e,t){for(var n=0,i=e.length;n<i;n++)if(e[n]===t)return n;return-1},I="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",H="[\\x20\\t\\r\\n\\f]",R="(?:\\\\[\\da-fA-F]{1,6}"+H+"?|\\\\[^\\r\\n\\f]|[\\w-]|[^\0-\\x7f])+",q="\\["+H+"*("+R+")(?:"+H+"*([*^$|!~]?=)"+H+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+R+"))|)"+H+"*\\]",z=":("+R+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+q+")*)|.*)\\)|)",W=new RegExp(H+"+","g"),F=new RegExp("^"+H+"+|((?:^|[^\\\\])(?:\\\\.)*)"+H+"+$","g"),U=new RegExp("^"+H+"*,"+H+"*"),X=new RegExp("^"+H+"*([>+~]|"+H+")"+H+"*"),Y=new RegExp(H+"|>"),B=new RegExp(z),V=new RegExp("^"+R+"$"),$={ID:new RegExp("^#("+R+")"),CLASS:new RegExp("^\\.("+R+")"),TAG:new RegExp("^("+R+"|[*])"),ATTR:new RegExp("^"+q),PSEUDO:new RegExp("^"+z),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+H+"*(even|odd|(([+-]|)(\\d*)n|)"+H+"*(?:([+-]|)"+H+"*(\\d+)|))"+H+"*\\)|)","i"),bool:new RegExp("^(?:"+I+")$","i"),needsContext:new RegExp("^"+H+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+H+"*((?:-\\d)?\\d*)"+H+"*\\)|)(?=[^-]|$)","i")},K=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,G=/^h\d$/i,J=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\[\\da-fA-F]{1,6}"+H+"?|\\\\([^\\r\\n\\f])","g"),ne=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},ie=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,oe=function(e,t){return t?"\0"===e?"�":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},re=function(){h()},se=xe((function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()}),{dir:"parentNode",next:"legend"});try{N.apply(O=D.call(w.childNodes),w.childNodes),O[w.childNodes.length].nodeType}catch(t){N={apply:O.length?function(e,t){j.apply(e,D.call(t))}:function(e,t){for(var n=e.length,i=0;e[n++]=t[i++];);e.length=n-1}}}function ae(e,t,i,o){var r,a,c,u,p,f,m,v=t&&t.ownerDocument,w=t?t.nodeType:9;if(i=i||[],"string"!=typeof e||!e||1!==w&&9!==w&&11!==w)return i;if(!o&&(h(t),t=t||d,g)){if(11!==w&&(p=Z.exec(e)))if(r=p[1]){if(9===w){if(!(c=t.getElementById(r)))return i;if(c.id===r)return i.push(c),i}else if(v&&(c=v.getElementById(r))&&b(t,c)&&c.id===r)return i.push(c),i}else{if(p[2])return N.apply(i,t.getElementsByTagName(e)),i;if((r=p[3])&&n.getElementsByClassName&&t.getElementsByClassName)return N.apply(i,t.getElementsByClassName(r)),i}if(n.qsa&&!A[e+" "]&&(!y||!y.test(e))&&(1!==w||"object"!==t.nodeName.toLowerCase())){if(m=e,v=t,1===w&&(Y.test(e)||X.test(e))){for((v=ee.test(e)&&me(t.parentNode)||t)===t&&n.scope||((u=t.getAttribute("id"))?u=u.replace(ie,oe):t.setAttribute("id",u=x)),a=(f=s(e)).length;a--;)f[a]=(u?"#"+u:":scope")+" "+be(f[a]);m=f.join(",")}try{return N.apply(i,v.querySelectorAll(m)),i}catch(t){A(e,!0)}finally{u===x&&t.removeAttribute("id")}}}return l(e.replace(F,"$1"),t,i,o)}function le(){var e=[];return function t(n,o){return e.push(n+" ")>i.cacheLength&&delete t[e.shift()],t[n+" "]=o}}function ce(e){return e[x]=!0,e}function ue(e){var t=d.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function pe(e,t){for(var n=e.split("|"),o=n.length;o--;)i.attrHandle[n[o]]=t}function he(e,t){var n=t&&e,i=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(i)return i;if(n)for(;n=n.nextSibling;)if(n===t)return-1;return e?1:-1}function de(e){return function(t){return"input"===t.nodeName.toLowerCase()&&t.type===e}}function fe(e){return function(t){var n=t.nodeName.toLowerCase();return("input"===n||"button"===n)&&t.type===e}}function ge(e){return function(t){return"form"in t?t.parentNode&&!1===t.disabled?"label"in t?"label"in t.parentNode?t.parentNode.disabled===e:t.disabled===e:t.isDisabled===e||t.isDisabled!==!e&&se(t)===e:t.disabled===e:"label"in t&&t.disabled===e}}function ye(e){return ce((function(t){return t=+t,ce((function(n,i){for(var o,r=e([],n.length,t),s=r.length;s--;)n[o=r[s]]&&(n[o]=!(i[o]=n[o]))}))}))}function me(e){return e&&void 0!==e.getElementsByTagName&&e}for(t in n=ae.support={},r=ae.isXML=function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!K.test(t||n&&n.nodeName||"HTML")},h=ae.setDocument=function(e){var t,o,s=e?e.ownerDocument||e:w;return s!=d&&9===s.nodeType&&s.documentElement&&(f=(d=s).documentElement,g=!r(d),w!=d&&(o=d.defaultView)&&o.top!==o&&(o.addEventListener?o.addEventListener("unload",re,!1):o.attachEvent&&o.attachEvent("onunload",re)),n.scope=ue((function(e){return f.appendChild(e).appendChild(d.createElement("div")),void 0!==e.querySelectorAll&&!e.querySelectorAll(":scope fieldset div").length})),n.attributes=ue((function(e){return e.className="i",!e.getAttribute("className")})),n.getElementsByTagName=ue((function(e){return e.appendChild(d.createComment("")),!e.getElementsByTagName("*").length})),n.getElementsByClassName=J.test(d.getElementsByClassName),n.getById=ue((function(e){return f.appendChild(e).id=x,!d.getElementsByName||!d.getElementsByName(x).length})),n.getById?(i.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},i.find.ID=function(e,t){if(void 0!==t.getElementById&&g){var n=t.getElementById(e);return n?[n]:[]}}):(i.filter.ID=function(e){var t=e.replace(te,ne);return function(e){var n=void 0!==e.getAttributeNode&&e.getAttributeNode("id");return n&&n.value===t}},i.find.ID=function(e,t){if(void 0!==t.getElementById&&g){var n,i,o,r=t.getElementById(e);if(r){if((n=r.getAttributeNode("id"))&&n.value===e)return[r];for(o=t.getElementsByName(e),i=0;r=o[i++];)if((n=r.getAttributeNode("id"))&&n.value===e)return[r]}return[]}}),i.find.TAG=n.getElementsByTagName?function(e,t){return void 0!==t.getElementsByTagName?t.getElementsByTagName(e):n.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,i=[],o=0,r=t.getElementsByTagName(e);if("*"===e){for(;n=r[o++];)1===n.nodeType&&i.push(n);return i}return r},i.find.CLASS=n.getElementsByClassName&&function(e,t){if(void 0!==t.getElementsByClassName&&g)return t.getElementsByClassName(e)},m=[],y=[],(n.qsa=J.test(d.querySelectorAll))&&(ue((function(e){var t;f.appendChild(e).innerHTML="<a id='"+x+"'></a><select id='"+x+"-\r\\' msallowcapture=''><option selected=''></option></select>",e.querySelectorAll("[msallowcapture^='']").length&&y.push("[*^$]="+H+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||y.push("\\["+H+"*(?:value|"+I+")"),e.querySelectorAll("[id~="+x+"-]").length||y.push("~="),(t=d.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||y.push("\\["+H+"*name"+H+"*="+H+"*(?:''|\"\")"),e.querySelectorAll(":checked").length||y.push(":checked"),e.querySelectorAll("a#"+x+"+*").length||y.push(".#.+[+~]"),e.querySelectorAll("\\\f"),y.push("[\\r\\n\\f]")})),ue((function(e){e.innerHTML="<a href='' disabled='disabled'></a><select disabled='disabled'><option/></select>";var t=d.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&y.push("name"+H+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&y.push(":enabled",":disabled"),f.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&y.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),y.push(",.*:")}))),(n.matchesSelector=J.test(v=f.matches||f.webkitMatchesSelector||f.mozMatchesSelector||f.oMatchesSelector||f.msMatchesSelector))&&ue((function(e){n.disconnectedMatch=v.call(e,"*"),v.call(e,"[s!='']:x"),m.push("!=",z)})),y=y.length&&new RegExp(y.join("|")),m=m.length&&new RegExp(m.join("|")),t=J.test(f.compareDocumentPosition),b=t||J.test(f.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,i=t&&t.parentNode;return e===i||!(!i||1!==i.nodeType||!(n.contains?n.contains(i):e.compareDocumentPosition&&16&e.compareDocumentPosition(i)))}:function(e,t){if(t)for(;t=t.parentNode;)if(t===e)return!0;return!1},C=t?function(e,t){if(e===t)return p=!0,0;var i=!e.compareDocumentPosition-!t.compareDocumentPosition;return i||(1&(i=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!n.sortDetached&&t.compareDocumentPosition(e)===i?e==d||e.ownerDocument==w&&b(w,e)?-1:t==d||t.ownerDocument==w&&b(w,t)?1:u?P(u,e)-P(u,t):0:4&i?-1:1)}:function(e,t){if(e===t)return p=!0,0;var n,i=0,o=e.parentNode,r=t.parentNode,s=[e],a=[t];if(!o||!r)return e==d?-1:t==d?1:o?-1:r?1:u?P(u,e)-P(u,t):0;if(o===r)return he(e,t);for(n=e;n=n.parentNode;)s.unshift(n);for(n=t;n=n.parentNode;)a.unshift(n);for(;s[i]===a[i];)i++;return i?he(s[i],a[i]):s[i]==w?-1:a[i]==w?1:0}),d},ae.matches=function(e,t){return ae(e,null,null,t)},ae.matchesSelector=function(e,t){if(h(e),n.matchesSelector&&g&&!A[t+" "]&&(!m||!m.test(t))&&(!y||!y.test(t)))try{var i=v.call(e,t);if(i||n.disconnectedMatch||e.document&&11!==e.document.nodeType)return i}catch(e){A(t,!0)}return 0<ae(t,d,null,[e]).length},ae.contains=function(e,t){return(e.ownerDocument||e)!=d&&h(e),b(e,t)},ae.attr=function(e,t){(e.ownerDocument||e)!=d&&h(e);var o=i.attrHandle[t.toLowerCase()],r=o&&L.call(i.attrHandle,t.toLowerCase())?o(e,t,!g):void 0;return void 0!==r?r:n.attributes||!g?e.getAttribute(t):(r=e.getAttributeNode(t))&&r.specified?r.value:null},ae.escape=function(e){return(e+"").replace(ie,oe)},ae.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},ae.uniqueSort=function(e){var t,i=[],o=0,r=0;if(p=!n.detectDuplicates,u=!n.sortStable&&e.slice(0),e.sort(C),p){for(;t=e[r++];)t===e[r]&&(o=i.push(r));for(;o--;)e.splice(i[o],1)}return u=null,e},o=ae.getText=function(e){var t,n="",i=0,r=e.nodeType;if(r){if(1===r||9===r||11===r){if("string"==typeof e.textContent)return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=o(e)}else if(3===r||4===r)return e.nodeValue}else for(;t=e[i++];)n+=o(t);return n},(i=ae.selectors={cacheLength:50,createPseudo:ce,match:$,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||ae.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&ae.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return $.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&B.test(n)&&(t=s(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=E[e+" "];return t||(t=new RegExp("(^|"+H+")"+e+"("+H+"|$)"))&&E(e,(function(e){return t.test("string"==typeof e.className&&e.className||void 0!==e.getAttribute&&e.getAttribute("class")||"")}))},ATTR:function(e,t,n){return function(i){var o=ae.attr(i,e);return null==o?"!="===t:!t||(o+="","="===t?o===n:"!="===t?o!==n:"^="===t?n&&0===o.indexOf(n):"*="===t?n&&-1<o.indexOf(n):"$="===t?n&&o.slice(-n.length)===n:"~="===t?-1<(" "+o.replace(W," ")+" ").indexOf(n):"|="===t&&(o===n||o.slice(0,n.length+1)===n+"-"))}},CHILD:function(e,t,n,i,o){var r="nth"!==e.slice(0,3),s="last"!==e.slice(-4),a="of-type"===t;return 1===i&&0===o?function(e){return!!e.parentNode}:function(t,n,l){var c,u,p,h,d,f,g=r!==s?"nextSibling":"previousSibling",y=t.parentNode,m=a&&t.nodeName.toLowerCase(),v=!l&&!a,b=!1;if(y){if(r){for(;g;){for(h=t;h=h[g];)if(a?h.nodeName.toLowerCase()===m:1===h.nodeType)return!1;f=g="only"===e&&!f&&"nextSibling"}return!0}if(f=[s?y.firstChild:y.lastChild],s&&v){for(b=(d=(c=(u=(p=(h=y)[x]||(h[x]={}))[h.uniqueID]||(p[h.uniqueID]={}))[e]||[])[0]===_&&c[1])&&c[2],h=d&&y.childNodes[d];h=++d&&h&&h[g]||(b=d=0)||f.pop();)if(1===h.nodeType&&++b&&h===t){u[e]=[_,d,b];break}}else if(v&&(b=d=(c=(u=(p=(h=t)[x]||(h[x]={}))[h.uniqueID]||(p[h.uniqueID]={}))[e]||[])[0]===_&&c[1]),!1===b)for(;(h=++d&&h&&h[g]||(b=d=0)||f.pop())&&((a?h.nodeName.toLowerCase()!==m:1!==h.nodeType)||!++b||(v&&((u=(p=h[x]||(h[x]={}))[h.uniqueID]||(p[h.uniqueID]={}))[e]=[_,b]),h!==t)););return(b-=o)===i||b%i==0&&0<=b/i}}},PSEUDO:function(e,t){var n,o=i.pseudos[e]||i.setFilters[e.toLowerCase()]||ae.error("unsupported pseudo: "+e);return o[x]?o(t):1<o.length?(n=[e,e,"",t],i.setFilters.hasOwnProperty(e.toLowerCase())?ce((function(e,n){for(var i,r=o(e,t),s=r.length;s--;)e[i=P(e,r[s])]=!(n[i]=r[s])})):function(e){return o(e,0,n)}):o}},pseudos:{not:ce((function(e){var t=[],n=[],i=a(e.replace(F,"$1"));return i[x]?ce((function(e,t,n,o){for(var r,s=i(e,null,o,[]),a=e.length;a--;)(r=s[a])&&(e[a]=!(t[a]=r))})):function(e,o,r){return t[0]=e,i(t,null,r,n),t[0]=null,!n.pop()}})),has:ce((function(e){return function(t){return 0<ae(e,t).length}})),contains:ce((function(e){return e=e.replace(te,ne),function(t){return-1<(t.textContent||o(t)).indexOf(e)}})),lang:ce((function(e){return V.test(e||"")||ae.error("unsupported lang: "+e),e=e.replace(te,ne).toLowerCase(),function(t){var n;do{if(n=g?t.lang:t.getAttribute("xml:lang")||t.getAttribute("lang"))return(n=n.toLowerCase())===e||0===n.indexOf(e+"-")}while((t=t.parentNode)&&1===t.nodeType);return!1}})),target:function(t){var n=e.location&&e.location.hash;return n&&n.slice(1)===t.id},root:function(e){return e===f},focus:function(e){return e===d.activeElement&&(!d.hasFocus||d.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},enabled:ge(!1),disabled:ge(!0),checked:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&!!e.checked||"option"===t&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,!0===e.selected},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeType<6)return!1;return!0},parent:function(e){return!i.pseudos.empty(e)},header:function(e){return G.test(e.nodeName)},input:function(e){return Q.test(e.nodeName)},button:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&"button"===e.type||"button"===t},text:function(e){var t;return"input"===e.nodeName.toLowerCase()&&"text"===e.type&&(null==(t=e.getAttribute("type"))||"text"===t.toLowerCase())},first:ye((function(){return[0]})),last:ye((function(e,t){return[t-1]})),eq:ye((function(e,t,n){return[n<0?n+t:n]})),even:ye((function(e,t){for(var n=0;n<t;n+=2)e.push(n);return e})),odd:ye((function(e,t){for(var n=1;n<t;n+=2)e.push(n);return e})),lt:ye((function(e,t,n){for(var i=n<0?n+t:t<n?t:n;0<=--i;)e.push(i);return e})),gt:ye((function(e,t,n){for(var i=n<0?n+t:n;++i<t;)e.push(i);return e}))}}).pseudos.nth=i.pseudos.eq,{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})i.pseudos[t]=de(t);for(t in{submit:!0,reset:!0})i.pseudos[t]=fe(t);function ve(){}function be(e){for(var t=0,n=e.length,i="";t<n;t++)i+=e[t].value;return i}function xe(e,t,n){var i=t.dir,o=t.next,r=o||i,s=n&&"parentNode"===r,a=k++;return t.first?function(t,n,o){for(;t=t[i];)if(1===t.nodeType||s)return e(t,n,o);return!1}:function(t,n,l){var c,u,p,h=[_,a];if(l){for(;t=t[i];)if((1===t.nodeType||s)&&e(t,n,l))return!0}else for(;t=t[i];)if(1===t.nodeType||s)if(u=(p=t[x]||(t[x]={}))[t.uniqueID]||(p[t.uniqueID]={}),o&&o===t.nodeName.toLowerCase())t=t[i]||t;else{if((c=u[r])&&c[0]===_&&c[1]===a)return h[2]=c[2];if((u[r]=h)[2]=e(t,n,l))return!0}return!1}}function we(e){return 1<e.length?function(t,n,i){for(var o=e.length;o--;)if(!e[o](t,n,i))return!1;return!0}:e[0]}function _e(e,t,n,i,o){for(var r,s=[],a=0,l=e.length,c=null!=t;a<l;a++)(r=e[a])&&(n&&!n(r,i,o)||(s.push(r),c&&t.push(a)));return s}function ke(e,t,n,i,o,r){return i&&!i[x]&&(i=ke(i)),o&&!o[x]&&(o=ke(o,r)),ce((function(r,s,a,l){var c,u,p,h=[],d=[],f=s.length,g=r||function(e,t,n){for(var i=0,o=t.length;i<o;i++)ae(e,t[i],n);return n}(t||"*",a.nodeType?[a]:a,[]),y=!e||!r&&t?g:_e(g,h,e,a,l),m=n?o||(r?e:f||i)?[]:s:y;if(n&&n(y,m,a,l),i)for(c=_e(m,d),i(c,[],a,l),u=c.length;u--;)(p=c[u])&&(m[d[u]]=!(y[d[u]]=p));if(r){if(o||e){if(o){for(c=[],u=m.length;u--;)(p=m[u])&&c.push(y[u]=p);o(null,m=[],c,l)}for(u=m.length;u--;)(p=m[u])&&-1<(c=o?P(r,p):h[u])&&(r[c]=!(s[c]=p))}}else m=_e(m===s?m.splice(f,m.length):m),o?o(null,s,m,l):N.apply(s,m)}))}function Ee(e){for(var t,n,o,r=e.length,s=i.relative[e[0].type],a=s||i.relative[" "],l=s?1:0,u=xe((function(e){return e===t}),a,!0),p=xe((function(e){return-1<P(t,e)}),a,!0),h=[function(e,n,i){var o=!s&&(i||n!==c)||((t=n).nodeType?u(e,n,i):p(e,n,i));return t=null,o}];l<r;l++)if(n=i.relative[e[l].type])h=[xe(we(h),n)];else{if((n=i.filter[e[l].type].apply(null,e[l].matches))[x]){for(o=++l;o<r&&!i.relative[e[o].type];o++);return ke(1<l&&we(h),1<l&&be(e.slice(0,l-1).concat({value:" "===e[l-2].type?"*":""})).replace(F,"$1"),n,l<o&&Ee(e.slice(l,o)),o<r&&Ee(e=e.slice(o)),o<r&&be(e))}h.push(n)}return we(h)}return ve.prototype=i.filters=i.pseudos,i.setFilters=new ve,s=ae.tokenize=function(e,t){var n,o,r,s,a,l,c,u=T[e+" "];if(u)return t?0:u.slice(0);for(a=e,l=[],c=i.preFilter;a;){for(s in n&&!(o=U.exec(a))||(o&&(a=a.slice(o[0].length)||a),l.push(r=[])),n=!1,(o=X.exec(a))&&(n=o.shift(),r.push({value:n,type:o[0].replace(F," ")}),a=a.slice(n.length)),i.filter)!(o=$[s].exec(a))||c[s]&&!(o=c[s](o))||(n=o.shift(),r.push({value:n,type:s,matches:o}),a=a.slice(n.length));if(!n)break}return t?a.length:a?ae.error(e):T(e,l).slice(0)},a=ae.compile=function(e,t){var n,o,r,a,l,u,p=[],f=[],y=S[e+" "];if(!y){for(t||(t=s(e)),n=t.length;n--;)(y=Ee(t[n]))[x]?p.push(y):f.push(y);(y=S(e,(o=f,a=0<(r=p).length,l=0<o.length,u=function(e,t,n,s,u){var p,f,y,m=0,v="0",b=e&&[],x=[],w=c,k=e||l&&i.find.TAG("*",u),E=_+=null==w?1:Math.random()||.1,T=k.length;for(u&&(c=t==d||t||u);v!==T&&null!=(p=k[v]);v++){if(l&&p){for(f=0,t||p.ownerDocument==d||(h(p),n=!g);y=o[f++];)if(y(p,t||d,n)){s.push(p);break}u&&(_=E)}a&&((p=!y&&p)&&m--,e&&b.push(p))}if(m+=v,a&&v!==m){for(f=0;y=r[f++];)y(b,x,t,n);if(e){if(0<m)for(;v--;)b[v]||x[v]||(x[v]=M.call(s));x=_e(x)}N.apply(s,x),u&&!e&&0<x.length&&1<m+r.length&&ae.uniqueSort(s)}return u&&(_=E,c=w),b},a?ce(u):u))).selector=e}return y},l=ae.select=function(e,t,n,o){var r,l,c,u,p,h="function"==typeof e&&e,d=!o&&s(e=h.selector||e);if(n=n||[],1===d.length){if(2<(l=d[0]=d[0].slice(0)).length&&"ID"===(c=l[0]).type&&9===t.nodeType&&g&&i.relative[l[1].type]){if(!(t=(i.find.ID(c.matches[0].replace(te,ne),t)||[])[0]))return n;h&&(t=t.parentNode),e=e.slice(l.shift().value.length)}for(r=$.needsContext.test(e)?0:l.length;r--&&(c=l[r],!i.relative[u=c.type]);)if((p=i.find[u])&&(o=p(c.matches[0].replace(te,ne),ee.test(l[0].type)&&me(t.parentNode)||t))){if(l.splice(r,1),!(e=o.length&&be(l)))return N.apply(n,o),n;break}}return(h||a(e,d))(o,t,!g,n,!t||ee.test(e)&&me(t.parentNode)||t),n},n.sortStable=x.split("").sort(C).join("")===x,n.detectDuplicates=!!p,h(),n.sortDetached=ue((function(e){return 1&e.compareDocumentPosition(d.createElement("fieldset"))})),ue((function(e){return e.innerHTML="<a href='#'></a>","#"===e.firstChild.getAttribute("href")}))||pe("type|href|height|width",(function(e,t,n){if(!n)return e.getAttribute(t,"type"===t.toLowerCase()?1:2)})),n.attributes&&ue((function(e){return e.innerHTML="<input/>",e.firstChild.setAttribute("value",""),""===e.firstChild.getAttribute("value")}))||pe("value",(function(e,t,n){if(!n&&"input"===e.nodeName.toLowerCase())return e.defaultValue})),ue((function(e){return null==e.getAttribute("disabled")}))||pe(I,(function(e,t,n){var i;if(!n)return!0===e[t]?t.toLowerCase():(i=e.getAttributeNode(t))&&i.specified?i.value:null})),ae}(e);w.find=k,w.expr=k.selectors,w.expr[":"]=w.expr.pseudos,w.uniqueSort=w.unique=k.uniqueSort,w.text=k.getText,w.isXMLDoc=k.isXML,w.contains=k.contains,w.escapeSelector=k.escape;var E=function(e,t,n){for(var i=[],o=void 0!==n;(e=e[t])&&9!==e.nodeType;)if(1===e.nodeType){if(o&&w(e).is(n))break;i.push(e)}return i},T=function(e,t){for(var n=[];e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n},S=w.expr.match.needsContext;function A(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()}var C=/^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function L(e,t,n){return f(t)?w.grep(e,(function(e,i){return!!t.call(e,i,e)!==n})):t.nodeType?w.grep(e,(function(e){return e===t!==n})):"string"!=typeof t?w.grep(e,(function(e){return-1<a.call(t,e)!==n})):w.filter(t,e,n)}w.filter=function(e,t,n){var i=t[0];return n&&(e=":not("+e+")"),1===t.length&&1===i.nodeType?w.find.matchesSelector(i,e)?[i]:[]:w.find.matches(e,w.grep(t,(function(e){return 1===e.nodeType})))},w.fn.extend({find:function(e){var t,n,i=this.length,o=this;if("string"!=typeof e)return this.pushStack(w(e).filter((function(){for(t=0;t<i;t++)if(w.contains(o[t],this))return!0})));for(n=this.pushStack([]),t=0;t<i;t++)w.find(e,o[t],n);return 1<i?w.uniqueSort(n):n},filter:function(e){return this.pushStack(L(this,e||[],!1))},not:function(e){return this.pushStack(L(this,e||[],!0))},is:function(e){return!!L(this,"string"==typeof e&&S.test(e)?w(e):e||[],!1).length}});var O,M=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/;(w.fn.init=function(e,t,n){var i,o;if(!e)return this;if(n=n||O,"string"==typeof e){if(!(i="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:M.exec(e))||!i[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(i[1]){if(t=t instanceof w?t[0]:t,w.merge(this,w.parseHTML(i[1],t&&t.nodeType?t.ownerDocument||t:y,!0)),C.test(i[1])&&w.isPlainObject(t))for(i in t)f(this[i])?this[i](t[i]):this.attr(i,t[i]);return this}return(o=y.getElementById(i[2]))&&(this[0]=o,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):f(e)?void 0!==n.ready?n.ready(e):e(w):w.makeArray(e,this)}).prototype=w.fn,O=w(y);var j=/^(?:parents|prev(?:Until|All))/,N={children:!0,contents:!0,next:!0,prev:!0};function D(e,t){for(;(e=e[t])&&1!==e.nodeType;);return e}w.fn.extend({has:function(e){var t=w(e,this),n=t.length;return this.filter((function(){for(var e=0;e<n;e++)if(w.contains(this,t[e]))return!0}))},closest:function(e,t){var n,i=0,o=this.length,r=[],s="string"!=typeof e&&w(e);if(!S.test(e))for(;i<o;i++)for(n=this[i];n&&n!==t;n=n.parentNode)if(n.nodeType<11&&(s?-1<s.index(n):1===n.nodeType&&w.find.matchesSelector(n,e))){r.push(n);break}return this.pushStack(1<r.length?w.uniqueSort(r):r)},index:function(e){return e?"string"==typeof e?a.call(w(e),this[0]):a.call(this,e.jquery?e[0]:e):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){return this.pushStack(w.uniqueSort(w.merge(this.get(),w(e,t))))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}}),w.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return E(e,"parentNode")},parentsUntil:function(e,t,n){return E(e,"parentNode",n)},next:function(e){return D(e,"nextSibling")},prev:function(e){return D(e,"previousSibling")},nextAll:function(e){return E(e,"nextSibling")},prevAll:function(e){return E(e,"previousSibling")},nextUntil:function(e,t,n){return E(e,"nextSibling",n)},prevUntil:function(e,t,n){return E(e,"previousSibling",n)},siblings:function(e){return T((e.parentNode||{}).firstChild,e)},children:function(e){return T(e.firstChild)},contents:function(e){return null!=e.contentDocument&&i(e.contentDocument)?e.contentDocument:(A(e,"template")&&(e=e.content||e),w.merge([],e.childNodes))}},(function(e,t){w.fn[e]=function(n,i){var o=w.map(this,t,n);return"Until"!==e.slice(-5)&&(i=n),i&&"string"==typeof i&&(o=w.filter(i,o)),1<this.length&&(N[e]||w.uniqueSort(o),j.test(e)&&o.reverse()),this.pushStack(o)}}));var P=/[^\x20\t\r\n\f]+/g;function I(e){return e}function H(e){throw e}function R(e,t,n,i){var o;try{e&&f(o=e.promise)?o.call(e).done(t).fail(n):e&&f(o=e.then)?o.call(e,t,n):t.apply(void 0,[e].slice(i))}catch(e){n.apply(void 0,[e])}}w.Callbacks=function(e){var t,n;e="string"==typeof e?(t=e,n={},w.each(t.match(P)||[],(function(e,t){n[t]=!0})),n):w.extend({},e);var i,o,r,s,a=[],l=[],c=-1,u=function(){for(s=s||e.once,r=i=!0;l.length;c=-1)for(o=l.shift();++c<a.length;)!1===a[c].apply(o[0],o[1])&&e.stopOnFalse&&(c=a.length,o=!1);e.memory||(o=!1),i=!1,s&&(a=o?[]:"")},p={add:function(){return a&&(o&&!i&&(c=a.length-1,l.push(o)),function t(n){w.each(n,(function(n,i){f(i)?e.unique&&p.has(i)||a.push(i):i&&i.length&&"string"!==b(i)&&t(i)}))}(arguments),o&&!i&&u()),this},remove:function(){return w.each(arguments,(function(e,t){for(var n;-1<(n=w.inArray(t,a,n));)a.splice(n,1),n<=c&&c--})),this},has:function(e){return e?-1<w.inArray(e,a):0<a.length},empty:function(){return a&&(a=[]),this},disable:function(){return s=l=[],a=o="",this},disabled:function(){return!a},lock:function(){return s=l=[],o||i||(a=o=""),this},locked:function(){return!!s},fireWith:function(e,t){return s||(t=[e,(t=t||[]).slice?t.slice():t],l.push(t),i||u()),this},fire:function(){return p.fireWith(this,arguments),this},fired:function(){return!!r}};return p},w.extend({Deferred:function(t){var n=[["notify","progress",w.Callbacks("memory"),w.Callbacks("memory"),2],["resolve","done",w.Callbacks("once memory"),w.Callbacks("once memory"),0,"resolved"],["reject","fail",w.Callbacks("once memory"),w.Callbacks("once memory"),1,"rejected"]],i="pending",o={state:function(){return i},always:function(){return r.done(arguments).fail(arguments),this},catch:function(e){return o.then(null,e)},pipe:function(){var e=arguments;return w.Deferred((function(t){w.each(n,(function(n,i){var o=f(e[i[4]])&&e[i[4]];r[i[1]]((function(){var e=o&&o.apply(this,arguments);e&&f(e.promise)?e.promise().progress(t.notify).done(t.resolve).fail(t.reject):t[i[0]+"With"](this,o?[e]:arguments)}))})),e=null})).promise()},then:function(t,i,o){var r=0;function s(t,n,i,o){return function(){var a=this,l=arguments,c=function(){var e,c;if(!(t<r)){if((e=i.apply(a,l))===n.promise())throw new TypeError("Thenable self-resolution");c=e&&("object"==typeof e||"function"==typeof e)&&e.then,f(c)?o?c.call(e,s(r,n,I,o),s(r,n,H,o)):(r++,c.call(e,s(r,n,I,o),s(r,n,H,o),s(r,n,I,n.notifyWith))):(i!==I&&(a=void 0,l=[e]),(o||n.resolveWith)(a,l))}},u=o?c:function(){try{c()}catch(e){w.Deferred.exceptionHook&&w.Deferred.exceptionHook(e,u.stackTrace),r<=t+1&&(i!==H&&(a=void 0,l=[e]),n.rejectWith(a,l))}};t?u():(w.Deferred.getStackHook&&(u.stackTrace=w.Deferred.getStackHook()),e.setTimeout(u))}}return w.Deferred((function(e){n[0][3].add(s(0,e,f(o)?o:I,e.notifyWith)),n[1][3].add(s(0,e,f(t)?t:I)),n[2][3].add(s(0,e,f(i)?i:H))})).promise()},promise:function(e){return null!=e?w.extend(e,o):o}},r={};return w.each(n,(function(e,t){var s=t[2],a=t[5];o[t[1]]=s.add,a&&s.add((function(){i=a}),n[3-e][2].disable,n[3-e][3].disable,n[0][2].lock,n[0][3].lock),s.add(t[3].fire),r[t[0]]=function(){return r[t[0]+"With"](this===r?void 0:this,arguments),this},r[t[0]+"With"]=s.fireWith})),o.promise(r),t&&t.call(r,r),r},when:function(e){var t=arguments.length,n=t,i=Array(n),r=o.call(arguments),s=w.Deferred(),a=function(e){return function(n){i[e]=this,r[e]=1<arguments.length?o.call(arguments):n,--t||s.resolveWith(i,r)}};if(t<=1&&(R(e,s.done(a(n)).resolve,s.reject,!t),"pending"===s.state()||f(r[n]&&r[n].then)))return s.then();for(;n--;)R(r[n],a(n),s.reject);return s.promise()}});var q=/^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/;w.Deferred.exceptionHook=function(t,n){e.console&&e.console.warn&&t&&q.test(t.name)&&e.console.warn("jQuery.Deferred exception: "+t.message,t.stack,n)},w.readyException=function(t){e.setTimeout((function(){throw t}))};var z=w.Deferred();function W(){y.removeEventListener("DOMContentLoaded",W),e.removeEventListener("load",W),w.ready()}w.fn.ready=function(e){return z.then(e).catch((function(e){w.readyException(e)})),this},w.extend({isReady:!1,readyWait:1,ready:function(e){(!0===e?--w.readyWait:w.isReady)||(w.isReady=!0)!==e&&0<--w.readyWait||z.resolveWith(y,[w])}}),w.ready.then=z.then,"complete"===y.readyState||"loading"!==y.readyState&&!y.documentElement.doScroll?e.setTimeout(w.ready):(y.addEventListener("DOMContentLoaded",W),e.addEventListener("load",W));var F=function(e,t,n,i,o,r,s){var a=0,l=e.length,c=null==n;if("object"===b(n))for(a in o=!0,n)F(e,t,a,n[a],!0,r,s);else if(void 0!==i&&(o=!0,f(i)||(s=!0),c&&(s?(t.call(e,i),t=null):(c=t,t=function(e,t,n){return c.call(w(e),n)})),t))for(;a<l;a++)t(e[a],n,s?i:i.call(e[a],a,t(e[a],n)));return o?e:c?t.call(e):l?t(e[0],n):r},U=/^-ms-/,X=/-([a-z])/g;function Y(e,t){return t.toUpperCase()}function B(e){return e.replace(U,"ms-").replace(X,Y)}var V=function(e){return 1===e.nodeType||9===e.nodeType||!+e.nodeType};function $(){this.expando=w.expando+$.uid++}$.uid=1,$.prototype={cache:function(e){var t=e[this.expando];return t||(t={},V(e)&&(e.nodeType?e[this.expando]=t:Object.defineProperty(e,this.expando,{value:t,configurable:!0}))),t},set:function(e,t,n){var i,o=this.cache(e);if("string"==typeof t)o[B(t)]=n;else for(i in t)o[B(i)]=t[i];return o},get:function(e,t){return void 0===t?this.cache(e):e[this.expando]&&e[this.expando][B(t)]},access:function(e,t,n){return void 0===t||t&&"string"==typeof t&&void 0===n?this.get(e,t):(this.set(e,t,n),void 0!==n?n:t)},remove:function(e,t){var n,i=e[this.expando];if(void 0!==i){if(void 0!==t){n=(t=Array.isArray(t)?t.map(B):(t=B(t))in i?[t]:t.match(P)||[]).length;for(;n--;)delete i[t[n]]}(void 0===t||w.isEmptyObject(i))&&(e.nodeType?e[this.expando]=void 0:delete e[this.expando])}},hasData:function(e){var t=e[this.expando];return void 0!==t&&!w.isEmptyObject(t)}};var K=new $,Q=new $,G=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,J=/[A-Z]/g;function Z(e,t,n){var i,o;if(void 0===n&&1===e.nodeType)if(i="data-"+t.replace(J,"-$&").toLowerCase(),"string"==typeof(n=e.getAttribute(i))){try{n="true"===(o=n)||"false"!==o&&("null"===o?null:o===+o+""?+o:G.test(o)?JSON.parse(o):o)}catch(e){}Q.set(e,t,n)}else n=void 0;return n}w.extend({hasData:function(e){return Q.hasData(e)||K.hasData(e)},data:function(e,t,n){return Q.access(e,t,n)},removeData:function(e,t){Q.remove(e,t)},_data:function(e,t,n){return K.access(e,t,n)},_removeData:function(e,t){K.remove(e,t)}}),w.fn.extend({data:function(e,t){var n,i,o,r=this[0],s=r&&r.attributes;if(void 0===e){if(this.length&&(o=Q.get(r),1===r.nodeType&&!K.get(r,"hasDataAttrs"))){for(n=s.length;n--;)s[n]&&0===(i=s[n].name).indexOf("data-")&&(i=B(i.slice(5)),Z(r,i,o[i]));K.set(r,"hasDataAttrs",!0)}return o}return"object"==typeof e?this.each((function(){Q.set(this,e)})):F(this,(function(t){var n;if(r&&void 0===t)return void 0!==(n=Q.get(r,e))||void 0!==(n=Z(r,e))?n:void 0;this.each((function(){Q.set(this,e,t)}))}),null,t,1<arguments.length,null,!0)},removeData:function(e){return this.each((function(){Q.remove(this,e)}))}}),w.extend({queue:function(e,t,n){var i;if(e)return t=(t||"fx")+"queue",i=K.get(e,t),n&&(!i||Array.isArray(n)?i=K.access(e,t,w.makeArray(n)):i.push(n)),i||[]},dequeue:function(e,t){t=t||"fx";var n=w.queue(e,t),i=n.length,o=n.shift(),r=w._queueHooks(e,t);"inprogress"===o&&(o=n.shift(),i--),o&&("fx"===t&&n.unshift("inprogress"),delete r.stop,o.call(e,(function(){w.dequeue(e,t)}),r)),!i&&r&&r.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return K.get(e,n)||K.access(e,n,{empty:w.Callbacks("once memory").add((function(){K.remove(e,[t+"queue",n])}))})}}),w.fn.extend({queue:function(e,t){var n=2;return"string"!=typeof e&&(t=e,e="fx",n--),arguments.length<n?w.queue(this[0],e):void 0===t?this:this.each((function(){var n=w.queue(this,e,t);w._queueHooks(this,e),"fx"===e&&"inprogress"!==n[0]&&w.dequeue(this,e)}))},dequeue:function(e){return this.each((function(){w.dequeue(this,e)}))},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,t){var n,i=1,o=w.Deferred(),r=this,s=this.length,a=function(){--i||o.resolveWith(r,[r])};for("string"!=typeof e&&(t=e,e=void 0),e=e||"fx";s--;)(n=K.get(r[s],e+"queueHooks"))&&n.empty&&(i++,n.empty.add(a));return a(),o.promise(t)}});var ee=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,te=new RegExp("^(?:([+-])=|)("+ee+")([a-z%]*)$","i"),ne=["Top","Right","Bottom","Left"],ie=y.documentElement,oe=function(e){return w.contains(e.ownerDocument,e)},re={composed:!0};ie.getRootNode&&(oe=function(e){return w.contains(e.ownerDocument,e)||e.getRootNode(re)===e.ownerDocument});var se=function(e,t){return"none"===(e=t||e).style.display||""===e.style.display&&oe(e)&&"none"===w.css(e,"display")};function ae(e,t,n,i){var o,r,s=20,a=i?function(){return i.cur()}:function(){return w.css(e,t,"")},l=a(),c=n&&n[3]||(w.cssNumber[t]?"":"px"),u=e.nodeType&&(w.cssNumber[t]||"px"!==c&&+l)&&te.exec(w.css(e,t));if(u&&u[3]!==c){for(l/=2,c=c||u[3],u=+l||1;s--;)w.style(e,t,u+c),(1-r)*(1-(r=a()/l||.5))<=0&&(s=0),u/=r;u*=2,w.style(e,t,u+c),n=n||[]}return n&&(u=+u||+l||0,o=n[1]?u+(n[1]+1)*n[2]:+n[2],i&&(i.unit=c,i.start=u,i.end=o)),o}var le={};function ce(e,t){for(var n,i,o,r,s,a,l,c=[],u=0,p=e.length;u<p;u++)(i=e[u]).style&&(n=i.style.display,t?("none"===n&&(c[u]=K.get(i,"display")||null,c[u]||(i.style.display="")),""===i.style.display&&se(i)&&(c[u]=(l=s=r=void 0,s=(o=i).ownerDocument,a=o.nodeName,(l=le[a])||(r=s.body.appendChild(s.createElement(a)),l=w.css(r,"display"),r.parentNode.removeChild(r),"none"===l&&(l="block"),le[a]=l)))):"none"!==n&&(c[u]="none",K.set(i,"display",n)));for(u=0;u<p;u++)null!=c[u]&&(e[u].style.display=c[u]);return e}w.fn.extend({show:function(){return ce(this,!0)},hide:function(){return ce(this)},toggle:function(e){return"boolean"==typeof e?e?this.show():this.hide():this.each((function(){se(this)?w(this).show():w(this).hide()}))}});var ue,pe,he=/^(?:checkbox|radio)$/i,de=/<([a-z][^\/\0>\x20\t\r\n\f]*)/i,fe=/^$|^module$|\/(?:java|ecma)script/i;ue=y.createDocumentFragment().appendChild(y.createElement("div")),(pe=y.createElement("input")).setAttribute("type","radio"),pe.setAttribute("checked","checked"),pe.setAttribute("name","t"),ue.appendChild(pe),d.checkClone=ue.cloneNode(!0).cloneNode(!0).lastChild.checked,ue.innerHTML="<textarea>x</textarea>",d.noCloneChecked=!!ue.cloneNode(!0).lastChild.defaultValue,ue.innerHTML="<option></option>",d.option=!!ue.lastChild;var ge={thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};function ye(e,t){var n;return n=void 0!==e.getElementsByTagName?e.getElementsByTagName(t||"*"):void 0!==e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?w.merge([e],n):n}function me(e,t){for(var n=0,i=e.length;n<i;n++)K.set(e[n],"globalEval",!t||K.get(t[n],"globalEval"))}ge.tbody=ge.tfoot=ge.colgroup=ge.caption=ge.thead,ge.th=ge.td,d.option||(ge.optgroup=ge.option=[1,"<select multiple='multiple'>","</select>"]);var ve=/<|&#?\w+;/;function be(e,t,n,i,o){for(var r,s,a,l,c,u,p=t.createDocumentFragment(),h=[],d=0,f=e.length;d<f;d++)if((r=e[d])||0===r)if("object"===b(r))w.merge(h,r.nodeType?[r]:r);else if(ve.test(r)){for(s=s||p.appendChild(t.createElement("div")),a=(de.exec(r)||["",""])[1].toLowerCase(),l=ge[a]||ge._default,s.innerHTML=l[1]+w.htmlPrefilter(r)+l[2],u=l[0];u--;)s=s.lastChild;w.merge(h,s.childNodes),(s=p.firstChild).textContent=""}else h.push(t.createTextNode(r));for(p.textContent="",d=0;r=h[d++];)if(i&&-1<w.inArray(r,i))o&&o.push(r);else if(c=oe(r),s=ye(p.appendChild(r),"script"),c&&me(s),n)for(u=0;r=s[u++];)fe.test(r.type||"")&&n.push(r);return p}var xe=/^([^.]*)(?:\.(.+)|)/;function we(){return!0}function _e(){return!1}function ke(e,t){return e===function(){try{return y.activeElement}catch(e){}}()==("focus"===t)}function Ee(e,t,n,i,o,r){var s,a;if("object"==typeof t){for(a in"string"!=typeof n&&(i=i||n,n=void 0),t)Ee(e,a,n,i,t[a],r);return e}if(null==i&&null==o?(o=n,i=n=void 0):null==o&&("string"==typeof n?(o=i,i=void 0):(o=i,i=n,n=void 0)),!1===o)o=_e;else if(!o)return e;return 1===r&&(s=o,(o=function(e){return w().off(e),s.apply(this,arguments)}).guid=s.guid||(s.guid=w.guid++)),e.each((function(){w.event.add(this,t,o,i,n)}))}function Te(e,t,n){n?(K.set(e,t,!1),w.event.add(e,t,{namespace:!1,handler:function(e){var i,r,s=K.get(this,t);if(1&e.isTrigger&&this[t]){if(s.length)(w.event.special[t]||{}).delegateType&&e.stopPropagation();else if(s=o.call(arguments),K.set(this,t,s),i=n(this,t),this[t](),s!==(r=K.get(this,t))||i?K.set(this,t,!1):r={},s!==r)return e.stopImmediatePropagation(),e.preventDefault(),r&&r.value}else s.length&&(K.set(this,t,{value:w.event.trigger(w.extend(s[0],w.Event.prototype),s.slice(1),this)}),e.stopImmediatePropagation())}})):void 0===K.get(e,t)&&w.event.add(e,t,we)}w.event={global:{},add:function(e,t,n,i,o){var r,s,a,l,c,u,p,h,d,f,g,y=K.get(e);if(V(e))for(n.handler&&(n=(r=n).handler,o=r.selector),o&&w.find.matchesSelector(ie,o),n.guid||(n.guid=w.guid++),(l=y.events)||(l=y.events=Object.create(null)),(s=y.handle)||(s=y.handle=function(t){return void 0!==w&&w.event.triggered!==t.type?w.event.dispatch.apply(e,arguments):void 0}),c=(t=(t||"").match(P)||[""]).length;c--;)d=g=(a=xe.exec(t[c])||[])[1],f=(a[2]||"").split(".").sort(),d&&(p=w.event.special[d]||{},d=(o?p.delegateType:p.bindType)||d,p=w.event.special[d]||{},u=w.extend({type:d,origType:g,data:i,handler:n,guid:n.guid,selector:o,needsContext:o&&w.expr.match.needsContext.test(o),namespace:f.join(".")},r),(h=l[d])||((h=l[d]=[]).delegateCount=0,p.setup&&!1!==p.setup.call(e,i,f,s)||e.addEventListener&&e.addEventListener(d,s)),p.add&&(p.add.call(e,u),u.handler.guid||(u.handler.guid=n.guid)),o?h.splice(h.delegateCount++,0,u):h.push(u),w.event.global[d]=!0)},remove:function(e,t,n,i,o){var r,s,a,l,c,u,p,h,d,f,g,y=K.hasData(e)&&K.get(e);if(y&&(l=y.events)){for(c=(t=(t||"").match(P)||[""]).length;c--;)if(d=g=(a=xe.exec(t[c])||[])[1],f=(a[2]||"").split(".").sort(),d){for(p=w.event.special[d]||{},h=l[d=(i?p.delegateType:p.bindType)||d]||[],a=a[2]&&new RegExp("(^|\\.)"+f.join("\\.(?:.*\\.|)")+"(\\.|$)"),s=r=h.length;r--;)u=h[r],!o&&g!==u.origType||n&&n.guid!==u.guid||a&&!a.test(u.namespace)||i&&i!==u.selector&&("**"!==i||!u.selector)||(h.splice(r,1),u.selector&&h.delegateCount--,p.remove&&p.remove.call(e,u));s&&!h.length&&(p.teardown&&!1!==p.teardown.call(e,f,y.handle)||w.removeEvent(e,d,y.handle),delete l[d])}else for(d in l)w.event.remove(e,d+t[c],n,i,!0);w.isEmptyObject(l)&&K.remove(e,"handle events")}},dispatch:function(e){var t,n,i,o,r,s,a=new Array(arguments.length),l=w.event.fix(e),c=(K.get(this,"events")||Object.create(null))[l.type]||[],u=w.event.special[l.type]||{};for(a[0]=l,t=1;t<arguments.length;t++)a[t]=arguments[t];if(l.delegateTarget=this,!u.preDispatch||!1!==u.preDispatch.call(this,l)){for(s=w.event.handlers.call(this,l,c),t=0;(o=s[t++])&&!l.isPropagationStopped();)for(l.currentTarget=o.elem,n=0;(r=o.handlers[n++])&&!l.isImmediatePropagationStopped();)l.rnamespace&&!1!==r.namespace&&!l.rnamespace.test(r.namespace)||(l.handleObj=r,l.data=r.data,void 0!==(i=((w.event.special[r.origType]||{}).handle||r.handler).apply(o.elem,a))&&!1===(l.result=i)&&(l.preventDefault(),l.stopPropagation()));return u.postDispatch&&u.postDispatch.call(this,l),l.result}},handlers:function(e,t){var n,i,o,r,s,a=[],l=t.delegateCount,c=e.target;if(l&&c.nodeType&&!("click"===e.type&&1<=e.button))for(;c!==this;c=c.parentNode||this)if(1===c.nodeType&&("click"!==e.type||!0!==c.disabled)){for(r=[],s={},n=0;n<l;n++)void 0===s[o=(i=t[n]).selector+" "]&&(s[o]=i.needsContext?-1<w(o,this).index(c):w.find(o,this,null,[c]).length),s[o]&&r.push(i);r.length&&a.push({elem:c,handlers:r})}return c=this,l<t.length&&a.push({elem:c,handlers:t.slice(l)}),a},addProp:function(e,t){Object.defineProperty(w.Event.prototype,e,{enumerable:!0,configurable:!0,get:f(t)?function(){if(this.originalEvent)return t(this.originalEvent)}:function(){if(this.originalEvent)return this.originalEvent[e]},set:function(t){Object.defineProperty(this,e,{enumerable:!0,configurable:!0,writable:!0,value:t})}})},fix:function(e){return e[w.expando]?e:new w.Event(e)},special:{load:{noBubble:!0},click:{setup:function(e){var t=this||e;return he.test(t.type)&&t.click&&A(t,"input")&&Te(t,"click",we),!1},trigger:function(e){var t=this||e;return he.test(t.type)&&t.click&&A(t,"input")&&Te(t,"click"),!0},_default:function(e){var t=e.target;return he.test(t.type)&&t.click&&A(t,"input")&&K.get(t,"click")||A(t,"a")}},beforeunload:{postDispatch:function(e){void 0!==e.result&&e.originalEvent&&(e.originalEvent.returnValue=e.result)}}}},w.removeEvent=function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n)},w.Event=function(e,t){if(!(this instanceof w.Event))return new w.Event(e,t);e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||void 0===e.defaultPrevented&&!1===e.returnValue?we:_e,this.target=e.target&&3===e.target.nodeType?e.target.parentNode:e.target,this.currentTarget=e.currentTarget,this.relatedTarget=e.relatedTarget):this.type=e,t&&w.extend(this,t),this.timeStamp=e&&e.timeStamp||Date.now(),this[w.expando]=!0},w.Event.prototype={constructor:w.Event,isDefaultPrevented:_e,isPropagationStopped:_e,isImmediatePropagationStopped:_e,isSimulated:!1,preventDefault:function(){var e=this.originalEvent;this.isDefaultPrevented=we,e&&!this.isSimulated&&e.preventDefault()},stopPropagation:function(){var e=this.originalEvent;this.isPropagationStopped=we,e&&!this.isSimulated&&e.stopPropagation()},stopImmediatePropagation:function(){var e=this.originalEvent;this.isImmediatePropagationStopped=we,e&&!this.isSimulated&&e.stopImmediatePropagation(),this.stopPropagation()}},w.each({altKey:!0,bubbles:!0,cancelable:!0,changedTouches:!0,ctrlKey:!0,detail:!0,eventPhase:!0,metaKey:!0,pageX:!0,pageY:!0,shiftKey:!0,view:!0,char:!0,code:!0,charCode:!0,key:!0,keyCode:!0,button:!0,buttons:!0,clientX:!0,clientY:!0,offsetX:!0,offsetY:!0,pointerId:!0,pointerType:!0,screenX:!0,screenY:!0,targetTouches:!0,toElement:!0,touches:!0,which:!0},w.event.addProp),w.each({focus:"focusin",blur:"focusout"},(function(e,t){w.event.special[e]={setup:function(){return Te(this,e,ke),!1},trigger:function(){return Te(this,e),!0},_default:function(){return!0},delegateType:t}})),w.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},(function(e,t){w.event.special[e]={delegateType:t,bindType:t,handle:function(e){var n,i=e.relatedTarget,o=e.handleObj;return i&&(i===this||w.contains(this,i))||(e.type=o.origType,n=o.handler.apply(this,arguments),e.type=t),n}}})),w.fn.extend({on:function(e,t,n,i){return Ee(this,e,t,n,i)},one:function(e,t,n,i){return Ee(this,e,t,n,i,1)},off:function(e,t,n){var i,o;if(e&&e.preventDefault&&e.handleObj)return i=e.handleObj,w(e.delegateTarget).off(i.namespace?i.origType+"."+i.namespace:i.origType,i.selector,i.handler),this;if("object"==typeof e){for(o in e)this.off(o,t,e[o]);return this}return!1!==t&&"function"!=typeof t||(n=t,t=void 0),!1===n&&(n=_e),this.each((function(){w.event.remove(this,e,n,t)}))}});var Se=/<script|<style|<link/i,Ae=/checked\s*(?:[^=]|=\s*.checked.)/i,Ce=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g;function Le(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&w(e).children("tbody")[0]||e}function Oe(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function Me(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function je(e,t){var n,i,o,r,s,a;if(1===t.nodeType){if(K.hasData(e)&&(a=K.get(e).events))for(o in K.remove(t,"handle events"),a)for(n=0,i=a[o].length;n<i;n++)w.event.add(t,o,a[o][n]);Q.hasData(e)&&(r=Q.access(e),s=w.extend({},r),Q.set(t,s))}}function Ne(e,t,n,i){t=r(t);var o,s,a,l,c,u,p=0,h=e.length,g=h-1,y=t[0],m=f(y);if(m||1<h&&"string"==typeof y&&!d.checkClone&&Ae.test(y))return e.each((function(o){var r=e.eq(o);m&&(t[0]=y.call(this,o,r.html())),Ne(r,t,n,i)}));if(h&&(s=(o=be(t,e[0].ownerDocument,!1,e,i)).firstChild,1===o.childNodes.length&&(o=s),s||i)){for(l=(a=w.map(ye(o,"script"),Oe)).length;p<h;p++)c=o,p!==g&&(c=w.clone(c,!0,!0),l&&w.merge(a,ye(c,"script"))),n.call(e[p],c,p);if(l)for(u=a[a.length-1].ownerDocument,w.map(a,Me),p=0;p<l;p++)c=a[p],fe.test(c.type||"")&&!K.access(c,"globalEval")&&w.contains(u,c)&&(c.src&&"module"!==(c.type||"").toLowerCase()?w._evalUrl&&!c.noModule&&w._evalUrl(c.src,{nonce:c.nonce||c.getAttribute("nonce")},u):v(c.textContent.replace(Ce,""),c,u))}return e}function De(e,t,n){for(var i,o=t?w.filter(t,e):e,r=0;null!=(i=o[r]);r++)n||1!==i.nodeType||w.cleanData(ye(i)),i.parentNode&&(n&&oe(i)&&me(ye(i,"script")),i.parentNode.removeChild(i));return e}w.extend({htmlPrefilter:function(e){return e},clone:function(e,t,n){var i,o,r,s,a,l,c,u=e.cloneNode(!0),p=oe(e);if(!(d.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||w.isXMLDoc(e)))for(s=ye(u),i=0,o=(r=ye(e)).length;i<o;i++)a=r[i],"input"===(c=(l=s[i]).nodeName.toLowerCase())&&he.test(a.type)?l.checked=a.checked:"input"!==c&&"textarea"!==c||(l.defaultValue=a.defaultValue);if(t)if(n)for(r=r||ye(e),s=s||ye(u),i=0,o=r.length;i<o;i++)je(r[i],s[i]);else je(e,u);return 0<(s=ye(u,"script")).length&&me(s,!p&&ye(e,"script")),u},cleanData:function(e){for(var t,n,i,o=w.event.special,r=0;void 0!==(n=e[r]);r++)if(V(n)){if(t=n[K.expando]){if(t.events)for(i in t.events)o[i]?w.event.remove(n,i):w.removeEvent(n,i,t.handle);n[K.expando]=void 0}n[Q.expando]&&(n[Q.expando]=void 0)}}}),w.fn.extend({detach:function(e){return De(this,e,!0)},remove:function(e){return De(this,e)},text:function(e){return F(this,(function(e){return void 0===e?w.text(this):this.empty().each((function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=e)}))}),null,e,arguments.length)},append:function(){return Ne(this,arguments,(function(e){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||Le(this,e).appendChild(e)}))},prepend:function(){return Ne(this,arguments,(function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=Le(this,e);t.insertBefore(e,t.firstChild)}}))},before:function(){return Ne(this,arguments,(function(e){this.parentNode&&this.parentNode.insertBefore(e,this)}))},after:function(){return Ne(this,arguments,(function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)}))},empty:function(){for(var e,t=0;null!=(e=this[t]);t++)1===e.nodeType&&(w.cleanData(ye(e,!1)),e.textContent="");return this},clone:function(e,t){return e=null!=e&&e,t=null==t?e:t,this.map((function(){return w.clone(this,e,t)}))},html:function(e){return F(this,(function(e){var t=this[0]||{},n=0,i=this.length;if(void 0===e&&1===t.nodeType)return t.innerHTML;if("string"==typeof e&&!Se.test(e)&&!ge[(de.exec(e)||["",""])[1].toLowerCase()]){e=w.htmlPrefilter(e);try{for(;n<i;n++)1===(t=this[n]||{}).nodeType&&(w.cleanData(ye(t,!1)),t.innerHTML=e);t=0}catch(e){}}t&&this.empty().append(e)}),null,e,arguments.length)},replaceWith:function(){var e=[];return Ne(this,arguments,(function(t){var n=this.parentNode;w.inArray(this,e)<0&&(w.cleanData(ye(this)),n&&n.replaceChild(t,this))}),e)}}),w.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},(function(e,t){w.fn[e]=function(e){for(var n,i=[],o=w(e),r=o.length-1,a=0;a<=r;a++)n=a===r?this:this.clone(!0),w(o[a])[t](n),s.apply(i,n.get());return this.pushStack(i)}}));var Pe=new RegExp("^("+ee+")(?!px)[a-z%]+$","i"),Ie=function(t){var n=t.ownerDocument.defaultView;return n&&n.opener||(n=e),n.getComputedStyle(t)},He=function(e,t,n){var i,o,r={};for(o in t)r[o]=e.style[o],e.style[o]=t[o];for(o in i=n.call(e),t)e.style[o]=r[o];return i},Re=new RegExp(ne.join("|"),"i");function qe(e,t,n){var i,o,r,s,a=e.style;return(n=n||Ie(e))&&(""!==(s=n.getPropertyValue(t)||n[t])||oe(e)||(s=w.style(e,t)),!d.pixelBoxStyles()&&Pe.test(s)&&Re.test(t)&&(i=a.width,o=a.minWidth,r=a.maxWidth,a.minWidth=a.maxWidth=a.width=s,s=n.width,a.width=i,a.minWidth=o,a.maxWidth=r)),void 0!==s?s+"":s}function ze(e,t){return{get:function(){if(!e())return(this.get=t).apply(this,arguments);delete this.get}}}!function(){function t(){if(u){c.style.cssText="position:absolute;left:-11111px;width:60px;margin-top:1px;padding:0;border:0",u.style.cssText="position:relative;display:block;box-sizing:border-box;overflow:scroll;margin:auto;border:1px;padding:1px;width:60%;top:1%",ie.appendChild(c).appendChild(u);var t=e.getComputedStyle(u);i="1%"!==t.top,l=12===n(t.marginLeft),u.style.right="60%",s=36===n(t.right),o=36===n(t.width),u.style.position="absolute",r=12===n(u.offsetWidth/3),ie.removeChild(c),u=null}}function n(e){return Math.round(parseFloat(e))}var i,o,r,s,a,l,c=y.createElement("div"),u=y.createElement("div");u.style&&(u.style.backgroundClip="content-box",u.cloneNode(!0).style.backgroundClip="",d.clearCloneStyle="content-box"===u.style.backgroundClip,w.extend(d,{boxSizingReliable:function(){return t(),o},pixelBoxStyles:function(){return t(),s},pixelPosition:function(){return t(),i},reliableMarginLeft:function(){return t(),l},scrollboxSize:function(){return t(),r},reliableTrDimensions:function(){var t,n,i,o;return null==a&&(t=y.createElement("table"),n=y.createElement("tr"),i=y.createElement("div"),t.style.cssText="position:absolute;left:-11111px;border-collapse:separate",n.style.cssText="border:1px solid",n.style.height="1px",i.style.height="9px",i.style.display="block",ie.appendChild(t).appendChild(n).appendChild(i),o=e.getComputedStyle(n),a=parseInt(o.height,10)+parseInt(o.borderTopWidth,10)+parseInt(o.borderBottomWidth,10)===n.offsetHeight,ie.removeChild(t)),a}}))}();var We=["Webkit","Moz","ms"],Fe=y.createElement("div").style,Ue={};function Xe(e){return w.cssProps[e]||Ue[e]||(e in Fe?e:Ue[e]=function(e){for(var t=e[0].toUpperCase()+e.slice(1),n=We.length;n--;)if((e=We[n]+t)in Fe)return e}(e)||e)}var Ye=/^(none|table(?!-c[ea]).+)/,Be=/^--/,Ve={position:"absolute",visibility:"hidden",display:"block"},$e={letterSpacing:"0",fontWeight:"400"};function Ke(e,t,n){var i=te.exec(t);return i?Math.max(0,i[2]-(n||0))+(i[3]||"px"):t}function Qe(e,t,n,i,o,r){var s="width"===t?1:0,a=0,l=0;if(n===(i?"border":"content"))return 0;for(;s<4;s+=2)"margin"===n&&(l+=w.css(e,n+ne[s],!0,o)),i?("content"===n&&(l-=w.css(e,"padding"+ne[s],!0,o)),"margin"!==n&&(l-=w.css(e,"border"+ne[s]+"Width",!0,o))):(l+=w.css(e,"padding"+ne[s],!0,o),"padding"!==n?l+=w.css(e,"border"+ne[s]+"Width",!0,o):a+=w.css(e,"border"+ne[s]+"Width",!0,o));return!i&&0<=r&&(l+=Math.max(0,Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-r-l-a-.5))||0),l}function Ge(e,t,n){var i=Ie(e),o=(!d.boxSizingReliable()||n)&&"border-box"===w.css(e,"boxSizing",!1,i),r=o,s=qe(e,t,i),a="offset"+t[0].toUpperCase()+t.slice(1);if(Pe.test(s)){if(!n)return s;s="auto"}return(!d.boxSizingReliable()&&o||!d.reliableTrDimensions()&&A(e,"tr")||"auto"===s||!parseFloat(s)&&"inline"===w.css(e,"display",!1,i))&&e.getClientRects().length&&(o="border-box"===w.css(e,"boxSizing",!1,i),(r=a in e)&&(s=e[a])),(s=parseFloat(s)||0)+Qe(e,t,n||(o?"border":"content"),r,i,s)+"px"}function Je(e,t,n,i,o){return new Je.prototype.init(e,t,n,i,o)}w.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=qe(e,"opacity");return""===n?"1":n}}}},cssNumber:{animationIterationCount:!0,columnCount:!0,fillOpacity:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,gridArea:!0,gridColumn:!0,gridColumnEnd:!0,gridColumnStart:!0,gridRow:!0,gridRowEnd:!0,gridRowStart:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{},style:function(e,t,n,i){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var o,r,s,a=B(t),l=Be.test(t),c=e.style;if(l||(t=Xe(a)),s=w.cssHooks[t]||w.cssHooks[a],void 0===n)return s&&"get"in s&&void 0!==(o=s.get(e,!1,i))?o:c[t];"string"==(r=typeof n)&&(o=te.exec(n))&&o[1]&&(n=ae(e,t,o),r="number"),null!=n&&n==n&&("number"!==r||l||(n+=o&&o[3]||(w.cssNumber[a]?"":"px")),d.clearCloneStyle||""!==n||0!==t.indexOf("background")||(c[t]="inherit"),s&&"set"in s&&void 0===(n=s.set(e,n,i))||(l?c.setProperty(t,n):c[t]=n))}},css:function(e,t,n,i){var o,r,s,a=B(t);return Be.test(t)||(t=Xe(a)),(s=w.cssHooks[t]||w.cssHooks[a])&&"get"in s&&(o=s.get(e,!0,n)),void 0===o&&(o=qe(e,t,i)),"normal"===o&&t in $e&&(o=$e[t]),""===n||n?(r=parseFloat(o),!0===n||isFinite(r)?r||0:o):o}}),w.each(["height","width"],(function(e,t){w.cssHooks[t]={get:function(e,n,i){if(n)return!Ye.test(w.css(e,"display"))||e.getClientRects().length&&e.getBoundingClientRect().width?Ge(e,t,i):He(e,Ve,(function(){return Ge(e,t,i)}))},set:function(e,n,i){var o,r=Ie(e),s=!d.scrollboxSize()&&"absolute"===r.position,a=(s||i)&&"border-box"===w.css(e,"boxSizing",!1,r),l=i?Qe(e,t,i,a,r):0;return a&&s&&(l-=Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-parseFloat(r[t])-Qe(e,t,"border",!1,r)-.5)),l&&(o=te.exec(n))&&"px"!==(o[3]||"px")&&(e.style[t]=n,n=w.css(e,t)),Ke(0,n,l)}}})),w.cssHooks.marginLeft=ze(d.reliableMarginLeft,(function(e,t){if(t)return(parseFloat(qe(e,"marginLeft"))||e.getBoundingClientRect().left-He(e,{marginLeft:0},(function(){return e.getBoundingClientRect().left})))+"px"})),w.each({margin:"",padding:"",border:"Width"},(function(e,t){w.cssHooks[e+t]={expand:function(n){for(var i=0,o={},r="string"==typeof n?n.split(" "):[n];i<4;i++)o[e+ne[i]+t]=r[i]||r[i-2]||r[0];return o}},"margin"!==e&&(w.cssHooks[e+t].set=Ke)})),w.fn.extend({css:function(e,t){return F(this,(function(e,t,n){var i,o,r={},s=0;if(Array.isArray(t)){for(i=Ie(e),o=t.length;s<o;s++)r[t[s]]=w.css(e,t[s],!1,i);return r}return void 0!==n?w.style(e,t,n):w.css(e,t)}),e,t,1<arguments.length)}}),((w.Tween=Je).prototype={constructor:Je,init:function(e,t,n,i,o,r){this.elem=e,this.prop=n,this.easing=o||w.easing._default,this.options=t,this.start=this.now=this.cur(),this.end=i,this.unit=r||(w.cssNumber[n]?"":"px")},cur:function(){var e=Je.propHooks[this.prop];return e&&e.get?e.get(this):Je.propHooks._default.get(this)},run:function(e){var t,n=Je.propHooks[this.prop];return this.options.duration?this.pos=t=w.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):Je.propHooks._default.set(this),this}}).init.prototype=Je.prototype,(Je.propHooks={_default:{get:function(e){var t;return 1!==e.elem.nodeType||null!=e.elem[e.prop]&&null==e.elem.style[e.prop]?e.elem[e.prop]:(t=w.css(e.elem,e.prop,""))&&"auto"!==t?t:0},set:function(e){w.fx.step[e.prop]?w.fx.step[e.prop](e):1!==e.elem.nodeType||!w.cssHooks[e.prop]&&null==e.elem.style[Xe(e.prop)]?e.elem[e.prop]=e.now:w.style(e.elem,e.prop,e.now+e.unit)}}}).scrollTop=Je.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},w.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2},_default:"swing"},w.fx=Je.prototype.init,w.fx.step={};var Ze,et,tt,nt,it=/^(?:toggle|show|hide)$/,ot=/queueHooks$/;function rt(){et&&(!1===y.hidden&&e.requestAnimationFrame?e.requestAnimationFrame(rt):e.setTimeout(rt,w.fx.interval),w.fx.tick())}function st(){return e.setTimeout((function(){Ze=void 0})),Ze=Date.now()}function at(e,t){var n,i=0,o={height:e};for(t=t?1:0;i<4;i+=2-t)o["margin"+(n=ne[i])]=o["padding"+n]=e;return t&&(o.opacity=o.width=e),o}function lt(e,t,n){for(var i,o=(ct.tweeners[t]||[]).concat(ct.tweeners["*"]),r=0,s=o.length;r<s;r++)if(i=o[r].call(n,t,e))return i}function ct(e,t,n){var i,o,r=0,s=ct.prefilters.length,a=w.Deferred().always((function(){delete l.elem})),l=function(){if(o)return!1;for(var t=Ze||st(),n=Math.max(0,c.startTime+c.duration-t),i=1-(n/c.duration||0),r=0,s=c.tweens.length;r<s;r++)c.tweens[r].run(i);return a.notifyWith(e,[c,i,n]),i<1&&s?n:(s||a.notifyWith(e,[c,1,0]),a.resolveWith(e,[c]),!1)},c=a.promise({elem:e,props:w.extend({},t),opts:w.extend(!0,{specialEasing:{},easing:w.easing._default},n),originalProperties:t,originalOptions:n,startTime:Ze||st(),duration:n.duration,tweens:[],createTween:function(t,n){var i=w.Tween(e,c.opts,t,n,c.opts.specialEasing[t]||c.opts.easing);return c.tweens.push(i),i},stop:function(t){var n=0,i=t?c.tweens.length:0;if(o)return this;for(o=!0;n<i;n++)c.tweens[n].run(1);return t?(a.notifyWith(e,[c,1,0]),a.resolveWith(e,[c,t])):a.rejectWith(e,[c,t]),this}}),u=c.props;for(function(e,t){var n,i,o,r,s;for(n in e)if(o=t[i=B(n)],r=e[n],Array.isArray(r)&&(o=r[1],r=e[n]=r[0]),n!==i&&(e[i]=r,delete e[n]),(s=w.cssHooks[i])&&"expand"in s)for(n in r=s.expand(r),delete e[i],r)n in e||(e[n]=r[n],t[n]=o);else t[i]=o}(u,c.opts.specialEasing);r<s;r++)if(i=ct.prefilters[r].call(c,e,u,c.opts))return f(i.stop)&&(w._queueHooks(c.elem,c.opts.queue).stop=i.stop.bind(i)),i;return w.map(u,lt,c),f(c.opts.start)&&c.opts.start.call(e,c),c.progress(c.opts.progress).done(c.opts.done,c.opts.complete).fail(c.opts.fail).always(c.opts.always),w.fx.timer(w.extend(l,{elem:e,anim:c,queue:c.opts.queue})),c}w.Animation=w.extend(ct,{tweeners:{"*":[function(e,t){var n=this.createTween(e,t);return ae(n.elem,e,te.exec(t),n),n}]},tweener:function(e,t){f(e)?(t=e,e=["*"]):e=e.match(P);for(var n,i=0,o=e.length;i<o;i++)n=e[i],ct.tweeners[n]=ct.tweeners[n]||[],ct.tweeners[n].unshift(t)},prefilters:[function(e,t,n){var i,o,r,s,a,l,c,u,p="width"in t||"height"in t,h=this,d={},f=e.style,g=e.nodeType&&se(e),y=K.get(e,"fxshow");for(i in n.queue||(null==(s=w._queueHooks(e,"fx")).unqueued&&(s.unqueued=0,a=s.empty.fire,s.empty.fire=function(){s.unqueued||a()}),s.unqueued++,h.always((function(){h.always((function(){s.unqueued--,w.queue(e,"fx").length||s.empty.fire()}))}))),t)if(o=t[i],it.test(o)){if(delete t[i],r=r||"toggle"===o,o===(g?"hide":"show")){if("show"!==o||!y||void 0===y[i])continue;g=!0}d[i]=y&&y[i]||w.style(e,i)}if((l=!w.isEmptyObject(t))||!w.isEmptyObject(d))for(i in p&&1===e.nodeType&&(n.overflow=[f.overflow,f.overflowX,f.overflowY],null==(c=y&&y.display)&&(c=K.get(e,"display")),"none"===(u=w.css(e,"display"))&&(c?u=c:(ce([e],!0),c=e.style.display||c,u=w.css(e,"display"),ce([e]))),("inline"===u||"inline-block"===u&&null!=c)&&"none"===w.css(e,"float")&&(l||(h.done((function(){f.display=c})),null==c&&(u=f.display,c="none"===u?"":u)),f.display="inline-block")),n.overflow&&(f.overflow="hidden",h.always((function(){f.overflow=n.overflow[0],f.overflowX=n.overflow[1],f.overflowY=n.overflow[2]}))),l=!1,d)l||(y?"hidden"in y&&(g=y.hidden):y=K.access(e,"fxshow",{display:c}),r&&(y.hidden=!g),g&&ce([e],!0),h.done((function(){for(i in g||ce([e]),K.remove(e,"fxshow"),d)w.style(e,i,d[i])}))),l=lt(g?y[i]:0,i,h),i in y||(y[i]=l.start,g&&(l.end=l.start,l.start=0))}],prefilter:function(e,t){t?ct.prefilters.unshift(e):ct.prefilters.push(e)}}),w.speed=function(e,t,n){var i=e&&"object"==typeof e?w.extend({},e):{complete:n||!n&&t||f(e)&&e,duration:e,easing:n&&t||t&&!f(t)&&t};return w.fx.off?i.duration=0:"number"!=typeof i.duration&&(i.duration in w.fx.speeds?i.duration=w.fx.speeds[i.duration]:i.duration=w.fx.speeds._default),null!=i.queue&&!0!==i.queue||(i.queue="fx"),i.old=i.complete,i.complete=function(){f(i.old)&&i.old.call(this),i.queue&&w.dequeue(this,i.queue)},i},w.fn.extend({fadeTo:function(e,t,n,i){return this.filter(se).css("opacity",0).show().end().animate({opacity:t},e,n,i)},animate:function(e,t,n,i){var o=w.isEmptyObject(e),r=w.speed(t,n,i),s=function(){var t=ct(this,w.extend({},e),r);(o||K.get(this,"finish"))&&t.stop(!0)};return s.finish=s,o||!1===r.queue?this.each(s):this.queue(r.queue,s)},stop:function(e,t,n){var i=function(e){var t=e.stop;delete e.stop,t(n)};return"string"!=typeof e&&(n=t,t=e,e=void 0),t&&this.queue(e||"fx",[]),this.each((function(){var t=!0,o=null!=e&&e+"queueHooks",r=w.timers,s=K.get(this);if(o)s[o]&&s[o].stop&&i(s[o]);else for(o in s)s[o]&&s[o].stop&&ot.test(o)&&i(s[o]);for(o=r.length;o--;)r[o].elem!==this||null!=e&&r[o].queue!==e||(r[o].anim.stop(n),t=!1,r.splice(o,1));!t&&n||w.dequeue(this,e)}))},finish:function(e){return!1!==e&&(e=e||"fx"),this.each((function(){var t,n=K.get(this),i=n[e+"queue"],o=n[e+"queueHooks"],r=w.timers,s=i?i.length:0;for(n.finish=!0,w.queue(this,e,[]),o&&o.stop&&o.stop.call(this,!0),t=r.length;t--;)r[t].elem===this&&r[t].queue===e&&(r[t].anim.stop(!0),r.splice(t,1));for(t=0;t<s;t++)i[t]&&i[t].finish&&i[t].finish.call(this);delete n.finish}))}}),w.each(["toggle","show","hide"],(function(e,t){var n=w.fn[t];w.fn[t]=function(e,i,o){return null==e||"boolean"==typeof e?n.apply(this,arguments):this.animate(at(t,!0),e,i,o)}})),w.each({slideDown:at("show"),slideUp:at("hide"),slideToggle:at("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},(function(e,t){w.fn[e]=function(e,n,i){return this.animate(t,e,n,i)}})),w.timers=[],w.fx.tick=function(){var e,t=0,n=w.timers;for(Ze=Date.now();t<n.length;t++)(e=n[t])()||n[t]!==e||n.splice(t--,1);n.length||w.fx.stop(),Ze=void 0},w.fx.timer=function(e){w.timers.push(e),w.fx.start()},w.fx.interval=13,w.fx.start=function(){et||(et=!0,rt())},w.fx.stop=function(){et=null},w.fx.speeds={slow:600,fast:200,_default:400},w.fn.delay=function(t,n){return t=w.fx&&w.fx.speeds[t]||t,n=n||"fx",this.queue(n,(function(n,i){var o=e.setTimeout(n,t);i.stop=function(){e.clearTimeout(o)}}))},tt=y.createElement("input"),nt=y.createElement("select").appendChild(y.createElement("option")),tt.type="checkbox",d.checkOn=""!==tt.value,d.optSelected=nt.selected,(tt=y.createElement("input")).value="t",tt.type="radio",d.radioValue="t"===tt.value;var ut,pt=w.expr.attrHandle;w.fn.extend({attr:function(e,t){return F(this,w.attr,e,t,1<arguments.length)},removeAttr:function(e){return this.each((function(){w.removeAttr(this,e)}))}}),w.extend({attr:function(e,t,n){var i,o,r=e.nodeType;if(3!==r&&8!==r&&2!==r)return void 0===e.getAttribute?w.prop(e,t,n):(1===r&&w.isXMLDoc(e)||(o=w.attrHooks[t.toLowerCase()]||(w.expr.match.bool.test(t)?ut:void 0)),void 0!==n?null===n?void w.removeAttr(e,t):o&&"set"in o&&void 0!==(i=o.set(e,n,t))?i:(e.setAttribute(t,n+""),n):o&&"get"in o&&null!==(i=o.get(e,t))?i:null==(i=w.find.attr(e,t))?void 0:i)},attrHooks:{type:{set:function(e,t){if(!d.radioValue&&"radio"===t&&A(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},removeAttr:function(e,t){var n,i=0,o=t&&t.match(P);if(o&&1===e.nodeType)for(;n=o[i++];)e.removeAttribute(n)}}),ut={set:function(e,t,n){return!1===t?w.removeAttr(e,n):e.setAttribute(n,n),n}},w.each(w.expr.match.bool.source.match(/\w+/g),(function(e,t){var n=pt[t]||w.find.attr;pt[t]=function(e,t,i){var o,r,s=t.toLowerCase();return i||(r=pt[s],pt[s]=o,o=null!=n(e,t,i)?s:null,pt[s]=r),o}}));var ht=/^(?:input|select|textarea|button)$/i,dt=/^(?:a|area)$/i;function ft(e){return(e.match(P)||[]).join(" ")}function gt(e){return e.getAttribute&&e.getAttribute("class")||""}function yt(e){return Array.isArray(e)?e:"string"==typeof e&&e.match(P)||[]}w.fn.extend({prop:function(e,t){return F(this,w.prop,e,t,1<arguments.length)},removeProp:function(e){return this.each((function(){delete this[w.propFix[e]||e]}))}}),w.extend({prop:function(e,t,n){var i,o,r=e.nodeType;if(3!==r&&8!==r&&2!==r)return 1===r&&w.isXMLDoc(e)||(t=w.propFix[t]||t,o=w.propHooks[t]),void 0!==n?o&&"set"in o&&void 0!==(i=o.set(e,n,t))?i:e[t]=n:o&&"get"in o&&null!==(i=o.get(e,t))?i:e[t]},propHooks:{tabIndex:{get:function(e){var t=w.find.attr(e,"tabindex");return t?parseInt(t,10):ht.test(e.nodeName)||dt.test(e.nodeName)&&e.href?0:-1}}},propFix:{for:"htmlFor",class:"className"}}),d.optSelected||(w.propHooks.selected={get:function(e){var t=e.parentNode;return t&&t.parentNode&&t.parentNode.selectedIndex,null},set:function(e){var t=e.parentNode;t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex)}}),w.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],(function(){w.propFix[this.toLowerCase()]=this})),w.fn.extend({addClass:function(e){var t,n,i,o,r,s,a,l=0;if(f(e))return this.each((function(t){w(this).addClass(e.call(this,t,gt(this)))}));if((t=yt(e)).length)for(;n=this[l++];)if(o=gt(n),i=1===n.nodeType&&" "+ft(o)+" "){for(s=0;r=t[s++];)i.indexOf(" "+r+" ")<0&&(i+=r+" ");o!==(a=ft(i))&&n.setAttribute("class",a)}return this},removeClass:function(e){var t,n,i,o,r,s,a,l=0;if(f(e))return this.each((function(t){w(this).removeClass(e.call(this,t,gt(this)))}));if(!arguments.length)return this.attr("class","");if((t=yt(e)).length)for(;n=this[l++];)if(o=gt(n),i=1===n.nodeType&&" "+ft(o)+" "){for(s=0;r=t[s++];)for(;-1<i.indexOf(" "+r+" ");)i=i.replace(" "+r+" "," ");o!==(a=ft(i))&&n.setAttribute("class",a)}return this},toggleClass:function(e,t){var n=typeof e,i="string"===n||Array.isArray(e);return"boolean"==typeof t&&i?t?this.addClass(e):this.removeClass(e):f(e)?this.each((function(n){w(this).toggleClass(e.call(this,n,gt(this),t),t)})):this.each((function(){var t,o,r,s;if(i)for(o=0,r=w(this),s=yt(e);t=s[o++];)r.hasClass(t)?r.removeClass(t):r.addClass(t);else void 0!==e&&"boolean"!==n||((t=gt(this))&&K.set(this,"__className__",t),this.setAttribute&&this.setAttribute("class",t||!1===e?"":K.get(this,"__className__")||""))}))},hasClass:function(e){var t,n,i=0;for(t=" "+e+" ";n=this[i++];)if(1===n.nodeType&&-1<(" "+ft(gt(n))+" ").indexOf(t))return!0;return!1}});var mt=/\r/g;w.fn.extend({val:function(e){var t,n,i,o=this[0];return arguments.length?(i=f(e),this.each((function(n){var o;1===this.nodeType&&(null==(o=i?e.call(this,n,w(this).val()):e)?o="":"number"==typeof o?o+="":Array.isArray(o)&&(o=w.map(o,(function(e){return null==e?"":e+""}))),(t=w.valHooks[this.type]||w.valHooks[this.nodeName.toLowerCase()])&&"set"in t&&void 0!==t.set(this,o,"value")||(this.value=o))}))):o?(t=w.valHooks[o.type]||w.valHooks[o.nodeName.toLowerCase()])&&"get"in t&&void 0!==(n=t.get(o,"value"))?n:"string"==typeof(n=o.value)?n.replace(mt,""):null==n?"":n:void 0}}),w.extend({valHooks:{option:{get:function(e){var t=w.find.attr(e,"value");return null!=t?t:ft(w.text(e))}},select:{get:function(e){var t,n,i,o=e.options,r=e.selectedIndex,s="select-one"===e.type,a=s?null:[],l=s?r+1:o.length;for(i=r<0?l:s?r:0;i<l;i++)if(((n=o[i]).selected||i===r)&&!n.disabled&&(!n.parentNode.disabled||!A(n.parentNode,"optgroup"))){if(t=w(n).val(),s)return t;a.push(t)}return a},set:function(e,t){for(var n,i,o=e.options,r=w.makeArray(t),s=o.length;s--;)((i=o[s]).selected=-1<w.inArray(w.valHooks.option.get(i),r))&&(n=!0);return n||(e.selectedIndex=-1),r}}}}),w.each(["radio","checkbox"],(function(){w.valHooks[this]={set:function(e,t){if(Array.isArray(t))return e.checked=-1<w.inArray(w(e).val(),t)}},d.checkOn||(w.valHooks[this].get=function(e){return null===e.getAttribute("value")?"on":e.value})})),d.focusin="onfocusin"in e;var vt=/^(?:focusinfocus|focusoutblur)$/,bt=function(e){e.stopPropagation()};w.extend(w.event,{trigger:function(t,n,i,o){var r,s,a,l,c,p,h,d,m=[i||y],v=u.call(t,"type")?t.type:t,b=u.call(t,"namespace")?t.namespace.split("."):[];if(s=d=a=i=i||y,3!==i.nodeType&&8!==i.nodeType&&!vt.test(v+w.event.triggered)&&(-1<v.indexOf(".")&&(v=(b=v.split(".")).shift(),b.sort()),c=v.indexOf(":")<0&&"on"+v,(t=t[w.expando]?t:new w.Event(v,"object"==typeof t&&t)).isTrigger=o?2:3,t.namespace=b.join("."),t.rnamespace=t.namespace?new RegExp("(^|\\.)"+b.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,t.result=void 0,t.target||(t.target=i),n=null==n?[t]:w.makeArray(n,[t]),h=w.event.special[v]||{},o||!h.trigger||!1!==h.trigger.apply(i,n))){if(!o&&!h.noBubble&&!g(i)){for(l=h.delegateType||v,vt.test(l+v)||(s=s.parentNode);s;s=s.parentNode)m.push(s),a=s;a===(i.ownerDocument||y)&&m.push(a.defaultView||a.parentWindow||e)}for(r=0;(s=m[r++])&&!t.isPropagationStopped();)d=s,t.type=1<r?l:h.bindType||v,(p=(K.get(s,"events")||Object.create(null))[t.type]&&K.get(s,"handle"))&&p.apply(s,n),(p=c&&s[c])&&p.apply&&V(s)&&(t.result=p.apply(s,n),!1===t.result&&t.preventDefault());return t.type=v,o||t.isDefaultPrevented()||h._default&&!1!==h._default.apply(m.pop(),n)||!V(i)||c&&f(i[v])&&!g(i)&&((a=i[c])&&(i[c]=null),w.event.triggered=v,t.isPropagationStopped()&&d.addEventListener(v,bt),i[v](),t.isPropagationStopped()&&d.removeEventListener(v,bt),w.event.triggered=void 0,a&&(i[c]=a)),t.result}},simulate:function(e,t,n){var i=w.extend(new w.Event,n,{type:e,isSimulated:!0});w.event.trigger(i,null,t)}}),w.fn.extend({trigger:function(e,t){return this.each((function(){w.event.trigger(e,t,this)}))},triggerHandler:function(e,t){var n=this[0];if(n)return w.event.trigger(e,t,n,!0)}}),d.focusin||w.each({focus:"focusin",blur:"focusout"},(function(e,t){var n=function(e){w.event.simulate(t,e.target,w.event.fix(e))};w.event.special[t]={setup:function(){var i=this.ownerDocument||this.document||this,o=K.access(i,t);o||i.addEventListener(e,n,!0),K.access(i,t,(o||0)+1)},teardown:function(){var i=this.ownerDocument||this.document||this,o=K.access(i,t)-1;o?K.access(i,t,o):(i.removeEventListener(e,n,!0),K.remove(i,t))}}}));var xt=e.location,wt={guid:Date.now()},_t=/\?/;w.parseXML=function(t){var n,i;if(!t||"string"!=typeof t)return null;try{n=(new e.DOMParser).parseFromString(t,"text/xml")}catch(t){}return i=n&&n.getElementsByTagName("parsererror")[0],n&&!i||w.error("Invalid XML: "+(i?w.map(i.childNodes,(function(e){return e.textContent})).join("\n"):t)),n};var kt=/\[\]$/,Et=/\r?\n/g,Tt=/^(?:submit|button|image|reset|file)$/i,St=/^(?:input|select|textarea|keygen)/i;function At(e,t,n,i){var o;if(Array.isArray(t))w.each(t,(function(t,o){n||kt.test(e)?i(e,o):At(e+"["+("object"==typeof o&&null!=o?t:"")+"]",o,n,i)}));else if(n||"object"!==b(t))i(e,t);else for(o in t)At(e+"["+o+"]",t[o],n,i)}w.param=function(e,t){var n,i=[],o=function(e,t){var n=f(t)?t():t;i[i.length]=encodeURIComponent(e)+"="+encodeURIComponent(null==n?"":n)};if(null==e)return"";if(Array.isArray(e)||e.jquery&&!w.isPlainObject(e))w.each(e,(function(){o(this.name,this.value)}));else for(n in e)At(n,e[n],t,o);return i.join("&")},w.fn.extend({serialize:function(){return w.param(this.serializeArray())},serializeArray:function(){return this.map((function(){var e=w.prop(this,"elements");return e?w.makeArray(e):this})).filter((function(){var e=this.type;return this.name&&!w(this).is(":disabled")&&St.test(this.nodeName)&&!Tt.test(e)&&(this.checked||!he.test(e))})).map((function(e,t){var n=w(this).val();return null==n?null:Array.isArray(n)?w.map(n,(function(e){return{name:t.name,value:e.replace(Et,"\r\n")}})):{name:t.name,value:n.replace(Et,"\r\n")}})).get()}});var Ct=/%20/g,Lt=/#.*$/,Ot=/([?&])_=[^&]*/,Mt=/^(.*?):[ \t]*([^\r\n]*)$/gm,jt=/^(?:GET|HEAD)$/,Nt=/^\/\//,Dt={},Pt={},It="*/".concat("*"),Ht=y.createElement("a");function Rt(e){return function(t,n){"string"!=typeof t&&(n=t,t="*");var i,o=0,r=t.toLowerCase().match(P)||[];if(f(n))for(;i=r[o++];)"+"===i[0]?(i=i.slice(1)||"*",(e[i]=e[i]||[]).unshift(n)):(e[i]=e[i]||[]).push(n)}}function qt(e,t,n,i){var o={},r=e===Pt;function s(a){var l;return o[a]=!0,w.each(e[a]||[],(function(e,a){var c=a(t,n,i);return"string"!=typeof c||r||o[c]?r?!(l=c):void 0:(t.dataTypes.unshift(c),s(c),!1)})),l}return s(t.dataTypes[0])||!o["*"]&&s("*")}function zt(e,t){var n,i,o=w.ajaxSettings.flatOptions||{};for(n in t)void 0!==t[n]&&((o[n]?e:i||(i={}))[n]=t[n]);return i&&w.extend(!0,e,i),e}Ht.href=xt.href,w.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:xt.href,type:"GET",isLocal:/^(?:about|app|app-storage|.+-extension|file|res|widget):$/.test(xt.protocol),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":It,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":JSON.parse,"text xml":w.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?zt(zt(e,w.ajaxSettings),t):zt(w.ajaxSettings,e)},ajaxPrefilter:Rt(Dt),ajaxTransport:Rt(Pt),ajax:function(t,n){"object"==typeof t&&(n=t,t=void 0),n=n||{};var i,o,r,s,a,l,c,u,p,h,d=w.ajaxSetup({},n),f=d.context||d,g=d.context&&(f.nodeType||f.jquery)?w(f):w.event,m=w.Deferred(),v=w.Callbacks("once memory"),b=d.statusCode||{},x={},_={},k="canceled",E={readyState:0,getResponseHeader:function(e){var t;if(c){if(!s)for(s={};t=Mt.exec(r);)s[t[1].toLowerCase()+" "]=(s[t[1].toLowerCase()+" "]||[]).concat(t[2]);t=s[e.toLowerCase()+" "]}return null==t?null:t.join(", ")},getAllResponseHeaders:function(){return c?r:null},setRequestHeader:function(e,t){return null==c&&(e=_[e.toLowerCase()]=_[e.toLowerCase()]||e,x[e]=t),this},overrideMimeType:function(e){return null==c&&(d.mimeType=e),this},statusCode:function(e){var t;if(e)if(c)E.always(e[E.status]);else for(t in e)b[t]=[b[t],e[t]];return this},abort:function(e){var t=e||k;return i&&i.abort(t),T(0,t),this}};if(m.promise(E),d.url=((t||d.url||xt.href)+"").replace(Nt,xt.protocol+"//"),d.type=n.method||n.type||d.method||d.type,d.dataTypes=(d.dataType||"*").toLowerCase().match(P)||[""],null==d.crossDomain){l=y.createElement("a");try{l.href=d.url,l.href=l.href,d.crossDomain=Ht.protocol+"//"+Ht.host!=l.protocol+"//"+l.host}catch(t){d.crossDomain=!0}}if(d.data&&d.processData&&"string"!=typeof d.data&&(d.data=w.param(d.data,d.traditional)),qt(Dt,d,n,E),c)return E;for(p in(u=w.event&&d.global)&&0==w.active++&&w.event.trigger("ajaxStart"),d.type=d.type.toUpperCase(),d.hasContent=!jt.test(d.type),o=d.url.replace(Lt,""),d.hasContent?d.data&&d.processData&&0===(d.contentType||"").indexOf("application/x-www-form-urlencoded")&&(d.data=d.data.replace(Ct,"+")):(h=d.url.slice(o.length),d.data&&(d.processData||"string"==typeof d.data)&&(o+=(_t.test(o)?"&":"?")+d.data,delete d.data),!1===d.cache&&(o=o.replace(Ot,"$1"),h=(_t.test(o)?"&":"?")+"_="+wt.guid+++h),d.url=o+h),d.ifModified&&(w.lastModified[o]&&E.setRequestHeader("If-Modified-Since",w.lastModified[o]),w.etag[o]&&E.setRequestHeader("If-None-Match",w.etag[o])),(d.data&&d.hasContent&&!1!==d.contentType||n.contentType)&&E.setRequestHeader("Content-Type",d.contentType),E.setRequestHeader("Accept",d.dataTypes[0]&&d.accepts[d.dataTypes[0]]?d.accepts[d.dataTypes[0]]+("*"!==d.dataTypes[0]?", "+It+"; q=0.01":""):d.accepts["*"]),d.headers)E.setRequestHeader(p,d.headers[p]);if(d.beforeSend&&(!1===d.beforeSend.call(f,E,d)||c))return E.abort();if(k="abort",v.add(d.complete),E.done(d.success),E.fail(d.error),i=qt(Pt,d,n,E)){if(E.readyState=1,u&&g.trigger("ajaxSend",[E,d]),c)return E;d.async&&0<d.timeout&&(a=e.setTimeout((function(){E.abort("timeout")}),d.timeout));try{c=!1,i.send(x,T)}catch(t){if(c)throw t;T(-1,t)}}else T(-1,"No Transport");function T(t,n,s,l){var p,h,y,x,_,k=n;c||(c=!0,a&&e.clearTimeout(a),i=void 0,r=l||"",E.readyState=0<t?4:0,p=200<=t&&t<300||304===t,s&&(x=function(e,t,n){for(var i,o,r,s,a=e.contents,l=e.dataTypes;"*"===l[0];)l.shift(),void 0===i&&(i=e.mimeType||t.getResponseHeader("Content-Type"));if(i)for(o in a)if(a[o]&&a[o].test(i)){l.unshift(o);break}if(l[0]in n)r=l[0];else{for(o in n){if(!l[0]||e.converters[o+" "+l[0]]){r=o;break}s||(s=o)}r=r||s}if(r)return r!==l[0]&&l.unshift(r),n[r]}(d,E,s)),!p&&-1<w.inArray("script",d.dataTypes)&&w.inArray("json",d.dataTypes)<0&&(d.converters["text script"]=function(){}),x=function(e,t,n,i){var o,r,s,a,l,c={},u=e.dataTypes.slice();if(u[1])for(s in e.converters)c[s.toLowerCase()]=e.converters[s];for(r=u.shift();r;)if(e.responseFields[r]&&(n[e.responseFields[r]]=t),!l&&i&&e.dataFilter&&(t=e.dataFilter(t,e.dataType)),l=r,r=u.shift())if("*"===r)r=l;else if("*"!==l&&l!==r){if(!(s=c[l+" "+r]||c["* "+r]))for(o in c)if((a=o.split(" "))[1]===r&&(s=c[l+" "+a[0]]||c["* "+a[0]])){!0===s?s=c[o]:!0!==c[o]&&(r=a[0],u.unshift(a[1]));break}if(!0!==s)if(s&&e.throws)t=s(t);else try{t=s(t)}catch(e){return{state:"parsererror",error:s?e:"No conversion from "+l+" to "+r}}}return{state:"success",data:t}}(d,x,E,p),p?(d.ifModified&&((_=E.getResponseHeader("Last-Modified"))&&(w.lastModified[o]=_),(_=E.getResponseHeader("etag"))&&(w.etag[o]=_)),204===t||"HEAD"===d.type?k="nocontent":304===t?k="notmodified":(k=x.state,h=x.data,p=!(y=x.error))):(y=k,!t&&k||(k="error",t<0&&(t=0))),E.status=t,E.statusText=(n||k)+"",p?m.resolveWith(f,[h,k,E]):m.rejectWith(f,[E,k,y]),E.statusCode(b),b=void 0,u&&g.trigger(p?"ajaxSuccess":"ajaxError",[E,d,p?h:y]),v.fireWith(f,[E,k]),u&&(g.trigger("ajaxComplete",[E,d]),--w.active||w.event.trigger("ajaxStop")))}return E},getJSON:function(e,t,n){return w.get(e,t,n,"json")},getScript:function(e,t){return w.get(e,void 0,t,"script")}}),w.each(["get","post"],(function(e,t){w[t]=function(e,n,i,o){return f(n)&&(o=o||i,i=n,n=void 0),w.ajax(w.extend({url:e,type:t,dataType:o,data:n,success:i},w.isPlainObject(e)&&e))}})),w.ajaxPrefilter((function(e){var t;for(t in e.headers)"content-type"===t.toLowerCase()&&(e.contentType=e.headers[t]||"")})),w._evalUrl=function(e,t,n){return w.ajax({url:e,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,converters:{"text script":function(){}},dataFilter:function(e){w.globalEval(e,t,n)}})},w.fn.extend({wrapAll:function(e){var t;return this[0]&&(f(e)&&(e=e.call(this[0])),t=w(e,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&t.insertBefore(this[0]),t.map((function(){for(var e=this;e.firstElementChild;)e=e.firstElementChild;return e})).append(this)),this},wrapInner:function(e){return f(e)?this.each((function(t){w(this).wrapInner(e.call(this,t))})):this.each((function(){var t=w(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)}))},wrap:function(e){var t=f(e);return this.each((function(n){w(this).wrapAll(t?e.call(this,n):e)}))},unwrap:function(e){return this.parent(e).not("body").each((function(){w(this).replaceWith(this.childNodes)})),this}}),w.expr.pseudos.hidden=function(e){return!w.expr.pseudos.visible(e)},w.expr.pseudos.visible=function(e){return!!(e.offsetWidth||e.offsetHeight||e.getClientRects().length)},w.ajaxSettings.xhr=function(){try{return new e.XMLHttpRequest}catch(e){}};var Wt={0:200,1223:204},Ft=w.ajaxSettings.xhr();d.cors=!!Ft&&"withCredentials"in Ft,d.ajax=Ft=!!Ft,w.ajaxTransport((function(t){var n,i;if(d.cors||Ft&&!t.crossDomain)return{send:function(o,r){var s,a=t.xhr();if(a.open(t.type,t.url,t.async,t.username,t.password),t.xhrFields)for(s in t.xhrFields)a[s]=t.xhrFields[s];for(s in t.mimeType&&a.overrideMimeType&&a.overrideMimeType(t.mimeType),t.crossDomain||o["X-Requested-With"]||(o["X-Requested-With"]="XMLHttpRequest"),o)a.setRequestHeader(s,o[s]);n=function(e){return function(){n&&(n=i=a.onload=a.onerror=a.onabort=a.ontimeout=a.onreadystatechange=null,"abort"===e?a.abort():"error"===e?"number"!=typeof a.status?r(0,"error"):r(a.status,a.statusText):r(Wt[a.status]||a.status,a.statusText,"text"!==(a.responseType||"text")||"string"!=typeof a.responseText?{binary:a.response}:{text:a.responseText},a.getAllResponseHeaders()))}},a.onload=n(),i=a.onerror=a.ontimeout=n("error"),void 0!==a.onabort?a.onabort=i:a.onreadystatechange=function(){4===a.readyState&&e.setTimeout((function(){n&&i()}))},n=n("abort");try{a.send(t.hasContent&&t.data||null)}catch(o){if(n)throw o}},abort:function(){n&&n()}}})),w.ajaxPrefilter((function(e){e.crossDomain&&(e.contents.script=!1)})),w.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(e){return w.globalEval(e),e}}}),w.ajaxPrefilter("script",(function(e){void 0===e.cache&&(e.cache=!1),e.crossDomain&&(e.type="GET")})),w.ajaxTransport("script",(function(e){var t,n;if(e.crossDomain||e.scriptAttrs)return{send:function(i,o){t=w("<script>").attr(e.scriptAttrs||{}).prop({charset:e.scriptCharset,src:e.url}).on("load error",n=function(e){t.remove(),n=null,e&&o("error"===e.type?404:200,e.type)}),y.head.appendChild(t[0])},abort:function(){n&&n()}}}));var Ut,Xt=[],Yt=/(=)\?(?=&|$)|\?\?/;w.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Xt.pop()||w.expando+"_"+wt.guid++;return this[e]=!0,e}}),w.ajaxPrefilter("json jsonp",(function(t,n,i){var o,r,s,a=!1!==t.jsonp&&(Yt.test(t.url)?"url":"string"==typeof t.data&&0===(t.contentType||"").indexOf("application/x-www-form-urlencoded")&&Yt.test(t.data)&&"data");if(a||"jsonp"===t.dataTypes[0])return o=t.jsonpCallback=f(t.jsonpCallback)?t.jsonpCallback():t.jsonpCallback,a?t[a]=t[a].replace(Yt,"$1"+o):!1!==t.jsonp&&(t.url+=(_t.test(t.url)?"&":"?")+t.jsonp+"="+o),t.converters["script json"]=function(){return s||w.error(o+" was not called"),s[0]},t.dataTypes[0]="json",r=e[o],e[o]=function(){s=arguments},i.always((function(){void 0===r?w(e).removeProp(o):e[o]=r,t[o]&&(t.jsonpCallback=n.jsonpCallback,Xt.push(o)),s&&f(r)&&r(s[0]),s=r=void 0})),"script"})),d.createHTMLDocument=((Ut=y.implementation.createHTMLDocument("").body).innerHTML="<form></form><form></form>",2===Ut.childNodes.length),w.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(d.createHTMLDocument?((i=(t=y.implementation.createHTMLDocument("")).createElement("base")).href=y.location.href,t.head.appendChild(i)):t=y),r=!n&&[],(o=C.exec(e))?[t.createElement(o[1])]:(o=be([e],t,r),r&&r.length&&w(r).remove(),w.merge([],o.childNodes)));var i,o,r},w.fn.load=function(e,t,n){var i,o,r,s=this,a=e.indexOf(" ");return-1<a&&(i=ft(e.slice(a)),e=e.slice(0,a)),f(t)?(n=t,t=void 0):t&&"object"==typeof t&&(o="POST"),0<s.length&&w.ajax({url:e,type:o||"GET",dataType:"html",data:t}).done((function(e){r=arguments,s.html(i?w("<div>").append(w.parseHTML(e)).find(i):e)})).always(n&&function(e,t){s.each((function(){n.apply(this,r||[e.responseText,t,e])}))}),this},w.expr.pseudos.animated=function(e){return w.grep(w.timers,(function(t){return e===t.elem})).length},w.offset={setOffset:function(e,t,n){var i,o,r,s,a,l,c=w.css(e,"position"),u=w(e),p={};"static"===c&&(e.style.position="relative"),a=u.offset(),r=w.css(e,"top"),l=w.css(e,"left"),("absolute"===c||"fixed"===c)&&-1<(r+l).indexOf("auto")?(s=(i=u.position()).top,o=i.left):(s=parseFloat(r)||0,o=parseFloat(l)||0),f(t)&&(t=t.call(e,n,w.extend({},a))),null!=t.top&&(p.top=t.top-a.top+s),null!=t.left&&(p.left=t.left-a.left+o),"using"in t?t.using.call(e,p):u.css(p)}},w.fn.extend({offset:function(e){if(arguments.length)return void 0===e?this:this.each((function(t){w.offset.setOffset(this,e,t)}));var t,n,i=this[0];return i?i.getClientRects().length?(t=i.getBoundingClientRect(),n=i.ownerDocument.defaultView,{top:t.top+n.pageYOffset,left:t.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,i=this[0],o={top:0,left:0};if("fixed"===w.css(i,"position"))t=i.getBoundingClientRect();else{for(t=this.offset(),n=i.ownerDocument,e=i.offsetParent||n.documentElement;e&&(e===n.body||e===n.documentElement)&&"static"===w.css(e,"position");)e=e.parentNode;e&&e!==i&&1===e.nodeType&&((o=w(e).offset()).top+=w.css(e,"borderTopWidth",!0),o.left+=w.css(e,"borderLeftWidth",!0))}return{top:t.top-o.top-w.css(i,"marginTop",!0),left:t.left-o.left-w.css(i,"marginLeft",!0)}}},offsetParent:function(){return this.map((function(){for(var e=this.offsetParent;e&&"static"===w.css(e,"position");)e=e.offsetParent;return e||ie}))}}),w.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},(function(e,t){var n="pageYOffset"===t;w.fn[e]=function(i){return F(this,(function(e,i,o){var r;if(g(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===o)return r?r[t]:e[i];r?r.scrollTo(n?r.pageXOffset:o,n?o:r.pageYOffset):e[i]=o}),e,i,arguments.length)}})),w.each(["top","left"],(function(e,t){w.cssHooks[t]=ze(d.pixelPosition,(function(e,n){if(n)return n=qe(e,t),Pe.test(n)?w(e).position()[t]+"px":n}))})),w.each({Height:"height",Width:"width"},(function(e,t){w.each({padding:"inner"+e,content:t,"":"outer"+e},(function(n,i){w.fn[i]=function(o,r){var s=arguments.length&&(n||"boolean"!=typeof o),a=n||(!0===o||!0===r?"margin":"border");return F(this,(function(t,n,o){var r;return g(t)?0===i.indexOf("outer")?t["inner"+e]:t.document.documentElement["client"+e]:9===t.nodeType?(r=t.documentElement,Math.max(t.body["scroll"+e],r["scroll"+e],t.body["offset"+e],r["offset"+e],r["client"+e])):void 0===o?w.css(t,n,a):w.style(t,n,o,a)}),t,s?o:void 0,s)}}))})),w.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],(function(e,t){w.fn[t]=function(e){return this.on(t,e)}})),w.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,i){return this.on(t,e,n,i)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),w.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),(function(e,t){w.fn[t]=function(e,n){return 0<arguments.length?this.on(t,null,e,n):this.trigger(t)}}));var Bt=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;w.proxy=function(e,t){var n,i,r;if("string"==typeof t&&(n=e[t],t=e,e=n),f(e))return i=o.call(arguments,2),(r=function(){return e.apply(t||this,i.concat(o.call(arguments)))}).guid=e.guid=e.guid||w.guid++,r},w.holdReady=function(e){e?w.readyWait++:w.ready(!0)},w.isArray=Array.isArray,w.parseJSON=JSON.parse,w.nodeName=A,w.isFunction=f,w.isWindow=g,w.camelCase=B,w.type=b,w.now=Date.now,w.isNumeric=function(e){var t=w.type(e);return("number"===t||"string"===t)&&!isNaN(e-parseFloat(e))},w.trim=function(e){return null==e?"":(e+"").replace(Bt,"")},"function"==typeof define&&define.amd&&define("jquery",[],(function(){return w}));var Vt=e.jQuery,$t=e.$;return w.noConflict=function(t){return e.$===w&&(e.$=$t),t&&e.jQuery===w&&(e.jQuery=Vt),w},void 0===t&&(e.jQuery=e.$=w),w})),function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self).Popper={})}(this,(function(e){function t(e){return{width:(e=e.getBoundingClientRect()).width,height:e.height,top:e.top,right:e.right,bottom:e.bottom,left:e.left,x:e.left,y:e.top}}function n(e){return null==e?window:"[object Window]"!==e.toString()?(e=e.ownerDocument)&&e.defaultView||window:e}function i(e){return{scrollLeft:(e=n(e)).pageXOffset,scrollTop:e.pageYOffset}}function o(e){return e instanceof n(e).Element||e instanceof Element}function r(e){return e instanceof n(e).HTMLElement||e instanceof HTMLElement}function s(e){return"undefined"!=typeof ShadowRoot&&(e instanceof n(e).ShadowRoot||e instanceof ShadowRoot)}function a(e){return e?(e.nodeName||"").toLowerCase():null}function l(e){return((o(e)?e.ownerDocument:e.document)||window.document).documentElement}function c(e){return t(l(e)).left+i(e).scrollLeft}function u(e){return n(e).getComputedStyle(e)}function p(e){return e=u(e),/auto|scroll|overlay|hidden/.test(e.overflow+e.overflowY+e.overflowX)}function h(e,o,s){void 0===s&&(s=!1);var u=l(o);e=t(e);var h=r(o),d={scrollLeft:0,scrollTop:0},f={x:0,y:0};return(h||!h&&!s)&&(("body"!==a(o)||p(u))&&(d=o!==n(o)&&r(o)?{scrollLeft:o.scrollLeft,scrollTop:o.scrollTop}:i(o)),r(o)?((f=t(o)).x+=o.clientLeft,f.y+=o.clientTop):u&&(f.x=c(u))),{x:e.left+d.scrollLeft-f.x,y:e.top+d.scrollTop-f.y,width:e.width,height:e.height}}function d(e){var n=t(e),i=e.offsetWidth,o=e.offsetHeight;return 1>=Math.abs(n.width-i)&&(i=n.width),1>=Math.abs(n.height-o)&&(o=n.height),{x:e.offsetLeft,y:e.offsetTop,width:i,height:o}}function f(e){return"html"===a(e)?e:e.assignedSlot||e.parentNode||(s(e)?e.host:null)||l(e)}function g(e){return 0<=["html","body","#document"].indexOf(a(e))?e.ownerDocument.body:r(e)&&p(e)?e:g(f(e))}function y(e,t){var i;void 0===t&&(t=[]);var o=g(e);return e=o===(null==(i=e.ownerDocument)?void 0:i.body),i=n(o),o=e?[i].concat(i.visualViewport||[],p(o)?o:[]):o,t=t.concat(o),e?t:t.concat(y(f(o)))}function m(e){return r(e)&&"fixed"!==u(e).position?e.offsetParent:null}function v(e){for(var t=n(e),i=m(e);i&&0<=["table","td","th"].indexOf(a(i))&&"static"===u(i).position;)i=m(i);if(i&&("html"===a(i)||"body"===a(i)&&"static"===u(i).position))return t;if(!i)e:{if(i=-1!==navigator.userAgent.toLowerCase().indexOf("firefox"),-1===navigator.userAgent.indexOf("Trident")||!r(e)||"fixed"!==u(e).position)for(e=f(e);r(e)&&0>["html","body"].indexOf(a(e));){var o=u(e);if("none"!==o.transform||"none"!==o.perspective||"paint"===o.contain||-1!==["transform","perspective"].indexOf(o.willChange)||i&&"filter"===o.willChange||i&&o.filter&&"none"!==o.filter){i=e;break e}e=e.parentNode}i=null}return i||t}function b(e){function t(e){i.add(e.name),[].concat(e.requires||[],e.requiresIfExists||[]).forEach((function(e){i.has(e)||(e=n.get(e))&&t(e)})),o.push(e)}var n=new Map,i=new Set,o=[];return e.forEach((function(e){n.set(e.name,e)})),e.forEach((function(e){i.has(e.name)||t(e)})),o}function x(e){var t;return function(){return t||(t=new Promise((function(n){Promise.resolve().then((function(){t=void 0,n(e())}))}))),t}}function w(e){return e.split("-")[0]}function _(e,t){var n=t.getRootNode&&t.getRootNode();if(e.contains(t))return!0;if(n&&s(n))do{if(t&&e.isSameNode(t))return!0;t=t.parentNode||t.host}while(t);return!1}function k(e){return Object.assign({},e,{left:e.x,top:e.y,right:e.x+e.width,bottom:e.y+e.height})}function E(e,o){if("viewport"===o){o=n(e);var s=l(e);o=o.visualViewport;var a=s.clientWidth;s=s.clientHeight;var p=0,h=0;o&&(a=o.width,s=o.height,/^((?!chrome|android).)*safari/i.test(navigator.userAgent)||(p=o.offsetLeft,h=o.offsetTop)),e=k(e={width:a,height:s,x:p+c(e),y:h})}else r(o)?((e=t(o)).top+=o.clientTop,e.left+=o.clientLeft,e.bottom=e.top+o.clientHeight,e.right=e.left+o.clientWidth,e.width=o.clientWidth,e.height=o.clientHeight,e.x=e.left,e.y=e.top):(h=l(e),e=l(h),a=i(h),o=null==(s=h.ownerDocument)?void 0:s.body,s=F(e.scrollWidth,e.clientWidth,o?o.scrollWidth:0,o?o.clientWidth:0),p=F(e.scrollHeight,e.clientHeight,o?o.scrollHeight:0,o?o.clientHeight:0),h=-a.scrollLeft+c(h),a=-a.scrollTop,"rtl"===u(o||e).direction&&(h+=F(e.clientWidth,o?o.clientWidth:0)-s),e=k({width:s,height:p,x:h,y:a}));return e}function T(e,t,n){return t="clippingParents"===t?function(e){var t=y(f(e)),n=0<=["absolute","fixed"].indexOf(u(e).position)&&r(e)?v(e):e;return o(n)?t.filter((function(e){return o(e)&&_(e,n)&&"body"!==a(e)})):[]}(e):[].concat(t),(n=(n=[].concat(t,[n])).reduce((function(t,n){return n=E(e,n),t.top=F(n.top,t.top),t.right=U(n.right,t.right),t.bottom=U(n.bottom,t.bottom),t.left=F(n.left,t.left),t}),E(e,n[0]))).width=n.right-n.left,n.height=n.bottom-n.top,n.x=n.left,n.y=n.top,n}function S(e){return 0<=["top","bottom"].indexOf(e)?"x":"y"}function A(e){var t=e.reference,n=e.element,i=(e=e.placement)?w(e):null;e=e?e.split("-")[1]:null;var o=t.x+t.width/2-n.width/2,r=t.y+t.height/2-n.height/2;switch(i){case"top":o={x:o,y:t.y-n.height};break;case"bottom":o={x:o,y:t.y+t.height};break;case"right":o={x:t.x+t.width,y:r};break;case"left":o={x:t.x-n.width,y:r};break;default:o={x:t.x,y:t.y}}if(null!=(i=i?S(i):null))switch(r="y"===i?"height":"width",e){case"start":o[i]-=t[r]/2-n[r]/2;break;case"end":o[i]+=t[r]/2-n[r]/2}return o}function C(e){return Object.assign({},{top:0,right:0,bottom:0,left:0},e)}function L(e,t){return t.reduce((function(t,n){return t[n]=e,t}),{})}function O(e,n){void 0===n&&(n={});var i=n;n=void 0===(n=i.placement)?e.placement:n;var r=i.boundary,s=void 0===r?"clippingParents":r,a=void 0===(r=i.rootBoundary)?"viewport":r;r=void 0===(r=i.elementContext)?"popper":r;var c=i.altBoundary,u=void 0!==c&&c;i=C("number"!=typeof(i=void 0===(i=i.padding)?0:i)?i:L(i,R));var p=e.elements.reference;c=e.rects.popper,s=T(o(u=e.elements[u?"popper"===r?"reference":"popper":r])?u:u.contextElement||l(e.elements.popper),s,a),u=A({reference:a=t(p),element:c,strategy:"absolute",placement:n}),c=k(Object.assign({},c,u)),a="popper"===r?c:a;var h={top:s.top-a.top+i.top,bottom:a.bottom-s.bottom+i.bottom,left:s.left-a.left+i.left,right:a.right-s.right+i.right};if(e=e.modifiersData.offset,"popper"===r&&e){var d=e[n];Object.keys(h).forEach((function(e){var t=0<=["right","bottom"].indexOf(e)?1:-1,n=0<=["top","bottom"].indexOf(e)?"y":"x";h[e]+=d[n]*t}))}return h}function M(){for(var e=arguments.length,t=Array(e),n=0;n<e;n++)t[n]=arguments[n];return!t.some((function(e){return!(e&&"function"==typeof e.getBoundingClientRect)}))}function j(e){void 0===e&&(e={});var t=e.defaultModifiers,n=void 0===t?[]:t,i=void 0===(e=e.defaultOptions)?Y:e;return function(e,t,r){function s(){l.forEach((function(e){return e()})),l=[]}void 0===r&&(r=i);var a={placement:"bottom",orderedModifiers:[],options:Object.assign({},Y,i),modifiersData:{},elements:{reference:e,popper:t},attributes:{},styles:{}},l=[],c=!1,u={state:a,setOptions:function(r){return s(),a.options=Object.assign({},i,a.options,r),a.scrollParents={reference:o(e)?y(e):e.contextElement?y(e.contextElement):[],popper:y(t)},r=function(e){var t=b(e);return W.reduce((function(e,n){return e.concat(t.filter((function(e){return e.phase===n})))}),[])}(function(e){var t=e.reduce((function(e,t){var n=e[t.name];return e[t.name]=n?Object.assign({},n,t,{options:Object.assign({},n.options,t.options),data:Object.assign({},n.data,t.data)}):t,e}),{});return Object.keys(t).map((function(e){return t[e]}))}([].concat(n,a.options.modifiers))),a.orderedModifiers=r.filter((function(e){return e.enabled})),a.orderedModifiers.forEach((function(e){var t=e.name,n=e.options;n=void 0===n?{}:n,"function"==typeof(e=e.effect)&&(t=e({state:a,name:t,instance:u,options:n}),l.push(t||function(){}))})),u.update()},forceUpdate:function(){if(!c){var e=a.elements,t=e.reference;if(M(t,e=e.popper))for(a.rects={reference:h(t,v(e),"fixed"===a.options.strategy),popper:d(e)},a.reset=!1,a.placement=a.options.placement,a.orderedModifiers.forEach((function(e){return a.modifiersData[e.name]=Object.assign({},e.data)})),t=0;t<a.orderedModifiers.length;t++)if(!0===a.reset)a.reset=!1,t=-1;else{var n=a.orderedModifiers[t];e=n.fn;var i=n.options;i=void 0===i?{}:i,n=n.name,"function"==typeof e&&(a=e({state:a,options:i,name:n,instance:u})||a)}}},update:x((function(){return new Promise((function(e){u.forceUpdate(),e(a)}))})),destroy:function(){s(),c=!0}};return M(e,t)?(u.setOptions(r).then((function(e){!c&&r.onFirstUpdate&&r.onFirstUpdate(e)})),u):u}}function N(e){var t,i=e.popper,o=e.popperRect,r=e.placement,s=e.offsets,a=e.position,c=e.gpuAcceleration,p=e.adaptive;if(!0===(e=e.roundOffsets)){e=s.y;var h=window.devicePixelRatio||1;e={x:X(X(s.x*h)/h)||0,y:X(X(e*h)/h)||0}}else e="function"==typeof e?e(s):s;e=void 0===(e=(h=e).x)?0:e,h=void 0===(h=h.y)?0:h;var d=s.hasOwnProperty("x");s=s.hasOwnProperty("y");var f,g="left",y="top",m=window;if(p){var b=v(i),x="clientHeight",w="clientWidth";b===n(i)&&"static"!==u(b=l(i)).position&&(x="scrollHeight",w="scrollWidth"),"top"===r&&(y="bottom",h-=b[x]-o.height,h*=c?1:-1),"left"===r&&(g="right",e-=b[w]-o.width,e*=c?1:-1)}return i=Object.assign({position:a},p&&K),c?Object.assign({},i,((f={})[y]=s?"0":"",f[g]=d?"0":"",f.transform=2>(m.devicePixelRatio||1)?"translate("+e+"px, "+h+"px)":"translate3d("+e+"px, "+h+"px, 0)",f)):Object.assign({},i,((t={})[y]=s?h+"px":"",t[g]=d?e+"px":"",t.transform="",t))}function D(e){return e.replace(/left|right|bottom|top/g,(function(e){return Z[e]}))}function P(e){return e.replace(/start|end/g,(function(e){return ee[e]}))}function I(e,t,n){return void 0===n&&(n={x:0,y:0}),{top:e.top-t.height-n.y,right:e.right-t.width+n.x,bottom:e.bottom-t.height+n.y,left:e.left-t.width-n.x}}function H(e){return["top","right","bottom","left"].some((function(t){return 0<=e[t]}))}var R=["top","bottom","right","left"],q=R.reduce((function(e,t){return e.concat([t+"-start",t+"-end"])}),[]),z=[].concat(R,["auto"]).reduce((function(e,t){return e.concat([t,t+"-start",t+"-end"])}),[]),W="beforeRead read afterRead beforeMain main afterMain beforeWrite write afterWrite".split(" "),F=Math.max,U=Math.min,X=Math.round,Y={placement:"bottom",modifiers:[],strategy:"absolute"},B={passive:!0},V={name:"eventListeners",enabled:!0,phase:"write",fn:function(){},effect:function(e){var t=e.state,i=e.instance,o=(e=e.options).scroll,r=void 0===o||o,s=void 0===(e=e.resize)||e,a=n(t.elements.popper),l=[].concat(t.scrollParents.reference,t.scrollParents.popper);return r&&l.forEach((function(e){e.addEventListener("scroll",i.update,B)})),s&&a.addEventListener("resize",i.update,B),function(){r&&l.forEach((function(e){e.removeEventListener("scroll",i.update,B)})),s&&a.removeEventListener("resize",i.update,B)}},data:{}},$={name:"popperOffsets",enabled:!0,phase:"read",fn:function(e){var t=e.state;t.modifiersData[e.name]=A({reference:t.rects.reference,element:t.rects.popper,strategy:"absolute",placement:t.placement})},data:{}},K={top:"auto",right:"auto",bottom:"auto",left:"auto"},Q={name:"computeStyles",enabled:!0,phase:"beforeWrite",fn:function(e){var t=e.state,n=e.options;e=void 0===(e=n.gpuAcceleration)||e;var i=n.adaptive;i=void 0===i||i,n=void 0===(n=n.roundOffsets)||n,e={placement:w(t.placement),popper:t.elements.popper,popperRect:t.rects.popper,gpuAcceleration:e},null!=t.modifiersData.popperOffsets&&(t.styles.popper=Object.assign({},t.styles.popper,N(Object.assign({},e,{offsets:t.modifiersData.popperOffsets,position:t.options.strategy,adaptive:i,roundOffsets:n})))),null!=t.modifiersData.arrow&&(t.styles.arrow=Object.assign({},t.styles.arrow,N(Object.assign({},e,{offsets:t.modifiersData.arrow,position:"absolute",adaptive:!1,roundOffsets:n})))),t.attributes.popper=Object.assign({},t.attributes.popper,{"data-popper-placement":t.placement})},data:{}},G={name:"applyStyles",enabled:!0,phase:"write",fn:function(e){var t=e.state;Object.keys(t.elements).forEach((function(e){var n=t.styles[e]||{},i=t.attributes[e]||{},o=t.elements[e];r(o)&&a(o)&&(Object.assign(o.style,n),Object.keys(i).forEach((function(e){var t=i[e];!1===t?o.removeAttribute(e):o.setAttribute(e,!0===t?"":t)})))}))},effect:function(e){var t=e.state,n={popper:{position:t.options.strategy,left:"0",top:"0",margin:"0"},arrow:{position:"absolute"},reference:{}};return Object.assign(t.elements.popper.style,n.popper),t.styles=n,t.elements.arrow&&Object.assign(t.elements.arrow.style,n.arrow),function(){Object.keys(t.elements).forEach((function(e){var i=t.elements[e],o=t.attributes[e]||{};e=Object.keys(t.styles.hasOwnProperty(e)?t.styles[e]:n[e]).reduce((function(e,t){return e[t]="",e}),{}),r(i)&&a(i)&&(Object.assign(i.style,e),Object.keys(o).forEach((function(e){i.removeAttribute(e)})))}))}},requires:["computeStyles"]},J={name:"offset",enabled:!0,phase:"main",requires:["popperOffsets"],fn:function(e){var t=e.state,n=e.name,i=void 0===(e=e.options.offset)?[0,0]:e,o=(e=z.reduce((function(e,n){var o=t.rects,r=w(n),s=0<=["left","top"].indexOf(r)?-1:1,a="function"==typeof i?i(Object.assign({},o,{placement:n})):i;return o=(o=a[0])||0,a=((a=a[1])||0)*s,r=0<=["left","right"].indexOf(r)?{x:a,y:o}:{x:o,y:a},e[n]=r,e}),{}))[t.placement],r=o.x;o=o.y,null!=t.modifiersData.popperOffsets&&(t.modifiersData.popperOffsets.x+=r,t.modifiersData.popperOffsets.y+=o),t.modifiersData[n]=e}},Z={left:"right",right:"left",bottom:"top",top:"bottom"},ee={start:"end",end:"start"},te={name:"flip",enabled:!0,phase:"main",fn:function(e){var t=e.state,n=e.options;if(e=e.name,!t.modifiersData[e]._skip){var i=n.mainAxis;i=void 0===i||i;var o=n.altAxis;o=void 0===o||o;var r=n.fallbackPlacements,s=n.padding,a=n.boundary,l=n.rootBoundary,c=n.altBoundary,u=n.flipVariations,p=void 0===u||u,h=n.allowedAutoPlacements;u=w(n=t.options.placement),r=r||(u!==n&&p?function(e){if("auto"===w(e))return[];var t=D(e);return[P(e),t,P(t)]}(n):[D(n)]);var d=[n].concat(r).reduce((function(e,n){return e.concat("auto"===w(n)?function(e,t){void 0===t&&(t={});var n=t.boundary,i=t.rootBoundary,o=t.padding,r=t.flipVariations,s=t.allowedAutoPlacements,a=void 0===s?z:s,l=t.placement.split("-")[1];0===(r=(t=l?r?q:q.filter((function(e){return e.split("-")[1]===l})):R).filter((function(e){return 0<=a.indexOf(e)}))).length&&(r=t);var c=r.reduce((function(t,r){return t[r]=O(e,{placement:r,boundary:n,rootBoundary:i,padding:o})[w(r)],t}),{});return Object.keys(c).sort((function(e,t){return c[e]-c[t]}))}(t,{placement:n,boundary:a,rootBoundary:l,padding:s,flipVariations:p,allowedAutoPlacements:h}):n)}),[]);n=t.rects.reference,r=t.rects.popper;var f=new Map;u=!0;for(var g=d[0],y=0;y<d.length;y++){var m=d[y],v=w(m),b="start"===m.split("-")[1],x=0<=["top","bottom"].indexOf(v),_=x?"width":"height",k=O(t,{placement:m,boundary:a,rootBoundary:l,altBoundary:c,padding:s});if(b=x?b?"right":"left":b?"bottom":"top",n[_]>r[_]&&(b=D(b)),_=D(b),x=[],i&&x.push(0>=k[v]),o&&x.push(0>=k[b],0>=k[_]),x.every((function(e){return e}))){g=m,u=!1;break}f.set(m,x)}if(u)for(i=function(e){var t=d.find((function(t){if(t=f.get(t))return t.slice(0,e).every((function(e){return e}))}));if(t)return g=t,"break"},o=p?3:1;0<o&&"break"!==i(o);o--);t.placement!==g&&(t.modifiersData[e]._skip=!0,t.placement=g,t.reset=!0)}},requiresIfExists:["offset"],data:{_skip:!1}},ne={name:"preventOverflow",enabled:!0,phase:"main",fn:function(e){var t=e.state,n=e.options;e=e.name;var i=n.mainAxis,o=void 0===i||i,r=void 0!==(i=n.altAxis)&&i;i=void 0===(i=n.tether)||i;var s=n.tetherOffset,a=void 0===s?0:s,l=O(t,{boundary:n.boundary,rootBoundary:n.rootBoundary,padding:n.padding,altBoundary:n.altBoundary});n=w(t.placement);var c=t.placement.split("-")[1],u=!c,p=S(n);n="x"===p?"y":"x",s=t.modifiersData.popperOffsets;var h=t.rects.reference,f=t.rects.popper,g="function"==typeof a?a(Object.assign({},t.rects,{placement:t.placement})):a;if(a={x:0,y:0},s){if(o||r){var y="y"===p?"top":"left",m="y"===p?"bottom":"right",b="y"===p?"height":"width",x=s[p],_=s[p]+l[y],k=s[p]-l[m],E=i?-f[b]/2:0,T="start"===c?h[b]:f[b];c="start"===c?-f[b]:-h[b],f=t.elements.arrow,f=i&&f?d(f):{width:0,height:0};var A=t.modifiersData["arrow#persistent"]?t.modifiersData["arrow#persistent"].padding:{top:0,right:0,bottom:0,left:0};y=A[y],m=A[m],f=F(0,U(h[b],f[b])),T=u?h[b]/2-E-f-y-g:T-f-y-g,h=u?-h[b]/2+E+f+m+g:c+f+m+g,u=t.elements.arrow&&v(t.elements.arrow),g=t.modifiersData.offset?t.modifiersData.offset[t.placement][p]:0,u=s[p]+T-g-(u?"y"===p?u.clientTop||0:u.clientLeft||0:0),h=s[p]+h-g,o&&(o=i?U(_,u):_,k=i?F(k,h):k,o=F(o,U(x,k)),s[p]=o,a[p]=o-x),r&&(o=(r=s[n])+l["x"===p?"top":"left"],l=r-l["x"===p?"bottom":"right"],o=i?U(o,u):o,i=i?F(l,h):l,i=F(o,U(r,i)),s[n]=i,a[n]=i-r)}t.modifiersData[e]=a}},requiresIfExists:["offset"]},ie={name:"arrow",enabled:!0,phase:"main",fn:function(e){var t,n=e.state,i=e.name,o=e.options,r=n.elements.arrow,s=n.modifiersData.popperOffsets,a=w(n.placement);if(e=S(a),a=0<=["left","right"].indexOf(a)?"height":"width",r&&s){o=C("number"!=typeof(o="function"==typeof(o=o.padding)?o(Object.assign({},n.rects,{placement:n.placement})):o)?o:L(o,R));var l=d(r),c="y"===e?"top":"left",u="y"===e?"bottom":"right",p=n.rects.reference[a]+n.rects.reference[e]-s[e]-n.rects.popper[a];s=s[e]-n.rects.reference[e],s=(r=(r=v(r))?"y"===e?r.clientHeight||0:r.clientWidth||0:0)/2-l[a]/2+(p/2-s/2),a=F(o[c],U(s,r-l[a]-o[u])),n.modifiersData[i]=((t={})[e]=a,t.centerOffset=a-s,t)}},effect:function(e){var t=e.state;if(null!=(e=void 0===(e=e.options.element)?"[data-popper-arrow]":e)){if("string"==typeof e&&!(e=t.elements.popper.querySelector(e)))return;_(t.elements.popper,e)&&(t.elements.arrow=e)}},requires:["popperOffsets"],requiresIfExists:["preventOverflow"]},oe={name:"hide",enabled:!0,phase:"main",requiresIfExists:["preventOverflow"],fn:function(e){var t=e.state;e=e.name;var n=t.rects.reference,i=t.rects.popper,o=t.modifiersData.preventOverflow,r=O(t,{elementContext:"reference"}),s=O(t,{altBoundary:!0});n=I(r,n),i=I(s,i,o),o=H(n),s=H(i),t.modifiersData[e]={referenceClippingOffsets:n,popperEscapeOffsets:i,isReferenceHidden:o,hasPopperEscaped:s},t.attributes.popper=Object.assign({},t.attributes.popper,{"data-popper-reference-hidden":o,"data-popper-escaped":s})}},re=j({defaultModifiers:[V,$,Q,G]}),se=[V,$,Q,G,J,te,ne,ie,oe],ae=j({defaultModifiers:se});e.applyStyles=G,e.arrow=ie,e.computeStyles=Q,e.createPopper=ae,e.createPopperLite=re,e.defaultModifiers=se,e.detectOverflow=O,e.eventListeners=V,e.flip=te,e.hide=oe,e.offset=J,e.popperGenerator=j,e.popperOffsets=$,e.preventOverflow=ne,Object.defineProperty(e,"__esModule",{value:!0})})),function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t(require("@popperjs/core")):"function"==typeof define&&define.amd?define(["@popperjs/core"],t):(e="undefined"!=typeof globalThis?globalThis:e||self).bootstrap=t(e.Popper)}(this,(function(e){"use strict";var t=function(e){if(e&&e.__esModule)return e;var t=Object.create(null);return e&&Object.keys(e).forEach((function(n){if("default"!==n){var i=Object.getOwnPropertyDescriptor(e,n);Object.defineProperty(t,n,i.get?i:{enumerable:!0,get:function(){return e[n]}})}})),t.default=e,Object.freeze(t)}(e);const n={find:(e,t=document.documentElement)=>[].concat(...Element.prototype.querySelectorAll.call(t,e)),findOne:(e,t=document.documentElement)=>Element.prototype.querySelector.call(t,e),children:(e,t)=>[].concat(...e.children).filter((e=>e.matches(t))),parents(e,t){const n=[];let i=e.parentNode;for(;i&&i.nodeType===Node.ELEMENT_NODE&&3!==i.nodeType;)i.matches(t)&&n.push(i),i=i.parentNode;return n},prev(e,t){let n=e.previousElementSibling;for(;n;){if(n.matches(t))return[n];n=n.previousElementSibling}return[]},next(e,t){let n=e.nextElementSibling;for(;n;){if(n.matches(t))return[n];n=n.nextElementSibling}return[]}},i=e=>{do{e+=Math.floor(1e6*Math.random())}while(document.getElementById(e));return e},o=e=>{let t=e.getAttribute("data-bs-target");if(!t||"#"===t){let n=e.getAttribute("href");if(!n||!n.includes("#")&&!n.startsWith("."))return null;n.includes("#")&&!n.startsWith("#")&&(n="#"+n.split("#")[1]),t=n&&"#"!==n?n.trim():null}return t},r=e=>{const t=o(e);return t&&document.querySelector(t)?t:null},s=e=>{const t=o(e);return t?document.querySelector(t):null},a=e=>{if(!e)return 0;let{transitionDuration:t,transitionDelay:n}=window.getComputedStyle(e);const i=Number.parseFloat(t),o=Number.parseFloat(n);return i||o?(t=t.split(",")[0],n=n.split(",")[0],1e3*(Number.parseFloat(t)+Number.parseFloat(n))):0},l=e=>{e.dispatchEvent(new Event("transitionend"))},c=e=>!(!e||"object"!=typeof e)&&(void 0!==e.jquery&&(e=e[0]),void 0!==e.nodeType),u=e=>c(e)?e.jquery?e[0]:e:"string"==typeof e&&e.length>0?n.findOne(e):null,p=(e,t)=>{let n=!1;const i=t+5;e.addEventListener("transitionend",(function t(){n=!0,e.removeEventListener("transitionend",t)})),setTimeout((()=>{n||l(e)}),i)},h=(e,t,n)=>{Object.keys(n).forEach((i=>{const o=n[i],r=t[i],s=r&&c(r)?"element":null==(a=r)?""+a:{}.toString.call(a).match(/\s([a-z]+)/i)[1].toLowerCase();var a;if(!new RegExp(o).test(s))throw new TypeError(`${e.toUpperCase()}: Option "${i}" provided type "${s}" but expected type "${o}".`)}))},d=e=>{if(!e)return!1;if(e.style&&e.parentNode&&e.parentNode.style){const t=getComputedStyle(e),n=getComputedStyle(e.parentNode);return"none"!==t.display&&"none"!==n.display&&"hidden"!==t.visibility}return!1},f=e=>!e||e.nodeType!==Node.ELEMENT_NODE||!!e.classList.contains("disabled")||(void 0!==e.disabled?e.disabled:e.hasAttribute("disabled")&&"false"!==e.getAttribute("disabled")),g=e=>{if(!document.documentElement.attachShadow)return null;if("function"==typeof e.getRootNode){const t=e.getRootNode();return t instanceof ShadowRoot?t:null}return e instanceof ShadowRoot?e:e.parentNode?g(e.parentNode):null},y=()=>{},m=e=>e.offsetHeight,v=()=>{const{jQuery:e}=window;return e&&!document.body.hasAttribute("data-bs-no-jquery")?e:null},b=()=>"rtl"===document.documentElement.dir,x=e=>{var t;t=()=>{const t=v();if(t){const n=e.NAME,i=t.fn[n];t.fn[n]=e.jQueryInterface,t.fn[n].Constructor=e,t.fn[n].noConflict=()=>(t.fn[n]=i,e.jQueryInterface)}},"loading"===document.readyState?document.addEventListener("DOMContentLoaded",t):t()},w=e=>{"function"==typeof e&&e()},_=new Map;var k={set(e,t,n){_.has(e)||_.set(e,new Map);const i=_.get(e);i.has(t)||0===i.size?i.set(t,n):console.error(`Bootstrap doesn't allow more than one instance per element. Bound instance: ${Array.from(i.keys())[0]}.`)},get:(e,t)=>_.has(e)&&_.get(e).get(t)||null,remove(e,t){if(!_.has(e))return;const n=_.get(e);n.delete(t),0===n.size&&_.delete(e)}};const E=/[^.]*(?=\..*)\.|.*/,T=/\..*/,S=/::\d+$/,A={};let C=1;const L={mouseenter:"mouseover",mouseleave:"mouseout"},O=/^(mouseenter|mouseleave)/i,M=new Set(["click","dblclick","mouseup","mousedown","contextmenu","mousewheel","DOMMouseScroll","mouseover","mouseout","mousemove","selectstart","selectend","keydown","keypress","keyup","orientationchange","touchstart","touchmove","touchend","touchcancel","pointerdown","pointermove","pointerup","pointerleave","pointercancel","gesturestart","gesturechange","gestureend","focus","blur","change","reset","select","submit","focusin","focusout","load","unload","beforeunload","resize","move","DOMContentLoaded","readystatechange","error","abort","scroll"]);function j(e,t){return t&&`${t}::${C++}`||e.uidEvent||C++}function N(e){const t=j(e);return e.uidEvent=t,A[t]=A[t]||{},A[t]}function D(e,t,n=null){const i=Object.keys(e);for(let o=0,r=i.length;o<r;o++){const r=e[i[o]];if(r.originalHandler===t&&r.delegationSelector===n)return r}return null}function P(e,t,n){const i="string"==typeof t,o=i?n:t;let r=R(e);return M.has(r)||(r=e),[i,o,r]}function I(e,t,n,i,o){if("string"!=typeof t||!e)return;if(n||(n=i,i=null),O.test(t)){const e=e=>function(t){if(!t.relatedTarget||t.relatedTarget!==t.delegateTarget&&!t.delegateTarget.contains(t.relatedTarget))return e.call(this,t)};i?i=e(i):n=e(n)}const[r,s,a]=P(t,n,i),l=N(e),c=l[a]||(l[a]={}),u=D(c,s,r?n:null);if(u)return void(u.oneOff=u.oneOff&&o);const p=j(s,t.replace(E,"")),h=r?function(e,t,n){return function i(o){const r=e.querySelectorAll(t);for(let{target:s}=o;s&&s!==this;s=s.parentNode)for(let a=r.length;a--;)if(r[a]===s)return o.delegateTarget=s,i.oneOff&&q.off(e,o.type,t,n),n.apply(s,[o]);return null}}(e,n,i):function(e,t){return function n(i){return i.delegateTarget=e,n.oneOff&&q.off(e,i.type,t),t.apply(e,[i])}}(e,n);h.delegationSelector=r?n:null,h.originalHandler=s,h.oneOff=o,h.uidEvent=p,c[p]=h,e.addEventListener(a,h,r)}function H(e,t,n,i,o){const r=D(t[n],i,o);r&&(e.removeEventListener(n,r,Boolean(o)),delete t[n][r.uidEvent])}function R(e){return e=e.replace(T,""),L[e]||e}const q={on(e,t,n,i){I(e,t,n,i,!1)},one(e,t,n,i){I(e,t,n,i,!0)},off(e,t,n,i){if("string"!=typeof t||!e)return;const[o,r,s]=P(t,n,i),a=s!==t,l=N(e),c=t.startsWith(".");if(void 0!==r){if(!l||!l[s])return;return void H(e,l,s,r,o?n:null)}c&&Object.keys(l).forEach((n=>{!function(e,t,n,i){const o=t[n]||{};Object.keys(o).forEach((r=>{if(r.includes(i)){const i=o[r];H(e,t,n,i.originalHandler,i.delegationSelector)}}))}(e,l,n,t.slice(1))}));const u=l[s]||{};Object.keys(u).forEach((n=>{const i=n.replace(S,"");if(!a||t.includes(i)){const t=u[n];H(e,l,s,t.originalHandler,t.delegationSelector)}}))},trigger(e,t,n){if("string"!=typeof t||!e)return null;const i=v(),o=R(t),r=t!==o,s=M.has(o);let a,l=!0,c=!0,u=!1,p=null;return r&&i&&(a=i.Event(t,n),i(e).trigger(a),l=!a.isPropagationStopped(),c=!a.isImmediatePropagationStopped(),u=a.isDefaultPrevented()),s?(p=document.createEvent("HTMLEvents"),p.initEvent(o,l,!0)):p=new CustomEvent(t,{bubbles:l,cancelable:!0}),void 0!==n&&Object.keys(n).forEach((e=>{Object.defineProperty(p,e,{get:()=>n[e]})})),u&&p.preventDefault(),c&&e.dispatchEvent(p),p.defaultPrevented&&void 0!==a&&a.preventDefault(),p}};class z{constructor(e){(e=u(e))&&(this._element=e,k.set(this._element,this.constructor.DATA_KEY,this))}dispose(){k.remove(this._element,this.constructor.DATA_KEY),q.off(this._element,this.constructor.EVENT_KEY),Object.getOwnPropertyNames(this).forEach((e=>{this[e]=null}))}_queueCallback(e,t,n=!0){if(!n)return void w(e);const i=a(t);q.one(t,"transitionend",(()=>w(e))),p(t,i)}static getInstance(e){return k.get(e,this.DATA_KEY)}static get VERSION(){return"5.0.1"}static get NAME(){throw new Error('You have to implement the static method "NAME", for each component!')}static get DATA_KEY(){return"bs."+this.NAME}static get EVENT_KEY(){return"."+this.DATA_KEY}}class W extends z{static get NAME(){return"alert"}close(e){const t=e?this._getRootElement(e):this._element,n=this._triggerCloseEvent(t);null===n||n.defaultPrevented||this._removeElement(t)}_getRootElement(e){return s(e)||e.closest(".alert")}_triggerCloseEvent(e){return q.trigger(e,"close.bs.alert")}_removeElement(e){e.classList.remove("show");const t=e.classList.contains("fade");this._queueCallback((()=>this._destroyElement(e)),e,t)}_destroyElement(e){e.parentNode&&e.parentNode.removeChild(e),q.trigger(e,"closed.bs.alert")}static jQueryInterface(e){return this.each((function(){let t=k.get(this,"bs.alert");t||(t=new W(this)),"close"===e&&t[e](this)}))}static handleDismiss(e){return function(t){t&&t.preventDefault(),e.close(this)}}}q.on(document,"click.bs.alert.data-api",'[data-bs-dismiss="alert"]',W.handleDismiss(new W)),x(W);class F extends z{static get NAME(){return"button"}toggle(){this._element.setAttribute("aria-pressed",this._element.classList.toggle("active"))}static jQueryInterface(e){return this.each((function(){let t=k.get(this,"bs.button");t||(t=new F(this)),"toggle"===e&&t[e]()}))}}function U(e){return"true"===e||"false"!==e&&(e===Number(e).toString()?Number(e):""===e||"null"===e?null:e)}function X(e){return e.replace(/[A-Z]/g,(e=>"-"+e.toLowerCase()))}q.on(document,"click.bs.button.data-api",'[data-bs-toggle="button"]',(e=>{e.preventDefault();const t=e.target.closest('[data-bs-toggle="button"]');let n=k.get(t,"bs.button");n||(n=new F(t)),n.toggle()})),x(F);const Y={setDataAttribute(e,t,n){e.setAttribute("data-bs-"+X(t),n)},removeDataAttribute(e,t){e.removeAttribute("data-bs-"+X(t))},getDataAttributes(e){if(!e)return{};const t={};return Object.keys(e.dataset).filter((e=>e.startsWith("bs"))).forEach((n=>{let i=n.replace(/^bs/,"");i=i.charAt(0).toLowerCase()+i.slice(1,i.length),t[i]=U(e.dataset[n])})),t},getDataAttribute:(e,t)=>U(e.getAttribute("data-bs-"+X(t))),offset(e){const t=e.getBoundingClientRect();return{top:t.top+document.body.scrollTop,left:t.left+document.body.scrollLeft}},position:e=>({top:e.offsetTop,left:e.offsetLeft})},B={interval:5e3,keyboard:!0,slide:!1,pause:"hover",wrap:!0,touch:!0},V={interval:"(number|boolean)",keyboard:"boolean",slide:"(boolean|string)",pause:"(string|boolean)",wrap:"boolean",touch:"boolean"},$="next",K="prev",Q="left",G="right";class J extends z{constructor(e,t){super(e),this._items=null,this._interval=null,this._activeElement=null,this._isPaused=!1,this._isSliding=!1,this.touchTimeout=null,this.touchStartX=0,this.touchDeltaX=0,this._config=this._getConfig(t),this._indicatorsElement=n.findOne(".carousel-indicators",this._element),this._touchSupported="ontouchstart"in document.documentElement||navigator.maxTouchPoints>0,this._pointerEvent=Boolean(window.PointerEvent),this._addEventListeners()}static get Default(){return B}static get NAME(){return"carousel"}next(){this._isSliding||this._slide($)}nextWhenVisible(){!document.hidden&&d(this._element)&&this.next()}prev(){this._isSliding||this._slide(K)}pause(e){e||(this._isPaused=!0),n.findOne(".carousel-item-next, .carousel-item-prev",this._element)&&(l(this._element),this.cycle(!0)),clearInterval(this._interval),this._interval=null}cycle(e){e||(this._isPaused=!1),this._interval&&(clearInterval(this._interval),this._interval=null),this._config&&this._config.interval&&!this._isPaused&&(this._updateInterval(),this._interval=setInterval((document.visibilityState?this.nextWhenVisible:this.next).bind(this),this._config.interval))}to(e){this._activeElement=n.findOne(".active.carousel-item",this._element);const t=this._getItemIndex(this._activeElement);if(e>this._items.length-1||e<0)return;if(this._isSliding)return void q.one(this._element,"slid.bs.carousel",(()=>this.to(e)));if(t===e)return this.pause(),void this.cycle();const i=e>t?$:K;this._slide(i,this._items[e])}_getConfig(e){return e={...B,...e},h("carousel",e,V),e}_handleSwipe(){const e=Math.abs(this.touchDeltaX);if(e<=40)return;const t=e/this.touchDeltaX;this.touchDeltaX=0,t&&this._slide(t>0?G:Q)}_addEventListeners(){this._config.keyboard&&q.on(this._element,"keydown.bs.carousel",(e=>this._keydown(e))),"hover"===this._config.pause&&(q.on(this._element,"mouseenter.bs.carousel",(e=>this.pause(e))),q.on(this._element,"mouseleave.bs.carousel",(e=>this.cycle(e)))),this._config.touch&&this._touchSupported&&this._addTouchEventListeners()}_addTouchEventListeners(){const e=e=>{!this._pointerEvent||"pen"!==e.pointerType&&"touch"!==e.pointerType?this._pointerEvent||(this.touchStartX=e.touches[0].clientX):this.touchStartX=e.clientX},t=e=>{this.touchDeltaX=e.touches&&e.touches.length>1?0:e.touches[0].clientX-this.touchStartX},i=e=>{!this._pointerEvent||"pen"!==e.pointerType&&"touch"!==e.pointerType||(this.touchDeltaX=e.clientX-this.touchStartX),this._handleSwipe(),"hover"===this._config.pause&&(this.pause(),this.touchTimeout&&clearTimeout(this.touchTimeout),this.touchTimeout=setTimeout((e=>this.cycle(e)),500+this._config.interval))};n.find(".carousel-item img",this._element).forEach((e=>{q.on(e,"dragstart.bs.carousel",(e=>e.preventDefault()))})),this._pointerEvent?(q.on(this._element,"pointerdown.bs.carousel",(t=>e(t))),q.on(this._element,"pointerup.bs.carousel",(e=>i(e))),this._element.classList.add("pointer-event")):(q.on(this._element,"touchstart.bs.carousel",(t=>e(t))),q.on(this._element,"touchmove.bs.carousel",(e=>t(e))),q.on(this._element,"touchend.bs.carousel",(e=>i(e))))}_keydown(e){/input|textarea/i.test(e.target.tagName)||("ArrowLeft"===e.key?(e.preventDefault(),this._slide(G)):"ArrowRight"===e.key&&(e.preventDefault(),this._slide(Q)))}_getItemIndex(e){return this._items=e&&e.parentNode?n.find(".carousel-item",e.parentNode):[],this._items.indexOf(e)}_getItemByOrder(e,t){const n=e===$,i=e===K,o=this._getItemIndex(t),r=this._items.length-1;if((i&&0===o||n&&o===r)&&!this._config.wrap)return t;const s=(o+(i?-1:1))%this._items.length;return-1===s?this._items[this._items.length-1]:this._items[s]}_triggerSlideEvent(e,t){const i=this._getItemIndex(e),o=this._getItemIndex(n.findOne(".active.carousel-item",this._element));return q.trigger(this._element,"slide.bs.carousel",{relatedTarget:e,direction:t,from:o,to:i})}_setActiveIndicatorElement(e){if(this._indicatorsElement){const t=n.findOne(".active",this._indicatorsElement);t.classList.remove("active"),t.removeAttribute("aria-current");const i=n.find("[data-bs-target]",this._indicatorsElement);for(let t=0;t<i.length;t++)if(Number.parseInt(i[t].getAttribute("data-bs-slide-to"),10)===this._getItemIndex(e)){i[t].classList.add("active"),i[t].setAttribute("aria-current","true");break}}}_updateInterval(){const e=this._activeElement||n.findOne(".active.carousel-item",this._element);if(!e)return;const t=Number.parseInt(e.getAttribute("data-bs-interval"),10);t?(this._config.defaultInterval=this._config.defaultInterval||this._config.interval,this._config.interval=t):this._config.interval=this._config.defaultInterval||this._config.interval}_slide(e,t){const i=this._directionToOrder(e),o=n.findOne(".active.carousel-item",this._element),r=this._getItemIndex(o),s=t||this._getItemByOrder(i,o),a=this._getItemIndex(s),l=Boolean(this._interval),c=i===$,u=c?"carousel-item-start":"carousel-item-end",p=c?"carousel-item-next":"carousel-item-prev",h=this._orderToDirection(i);if(s&&s.classList.contains("active"))return void(this._isSliding=!1);if(this._triggerSlideEvent(s,h).defaultPrevented)return;if(!o||!s)return;this._isSliding=!0,l&&this.pause(),this._setActiveIndicatorElement(s),this._activeElement=s;const d=()=>{q.trigger(this._element,"slid.bs.carousel",{relatedTarget:s,direction:h,from:r,to:a})};if(this._element.classList.contains("slide")){s.classList.add(p),m(s),o.classList.add(u),s.classList.add(u);const e=()=>{s.classList.remove(u,p),s.classList.add("active"),o.classList.remove("active",p,u),this._isSliding=!1,setTimeout(d,0)};this._queueCallback(e,o,!0)}else o.classList.remove("active"),s.classList.add("active"),this._isSliding=!1,d();l&&this.cycle()}_directionToOrder(e){return[G,Q].includes(e)?b()?e===Q?K:$:e===Q?$:K:e}_orderToDirection(e){return[$,K].includes(e)?b()?e===K?Q:G:e===K?G:Q:e}static carouselInterface(e,t){let n=k.get(e,"bs.carousel"),i={...B,...Y.getDataAttributes(e)};"object"==typeof t&&(i={...i,...t});const o="string"==typeof t?t:i.slide;if(n||(n=new J(e,i)),"number"==typeof t)n.to(t);else if("string"==typeof o){if(void 0===n[o])throw new TypeError(`No method named "${o}"`);n[o]()}else i.interval&&i.ride&&(n.pause(),n.cycle())}static jQueryInterface(e){return this.each((function(){J.carouselInterface(this,e)}))}static dataApiClickHandler(e){const t=s(this);if(!t||!t.classList.contains("carousel"))return;const n={...Y.getDataAttributes(t),...Y.getDataAttributes(this)},i=this.getAttribute("data-bs-slide-to");i&&(n.interval=!1),J.carouselInterface(t,n),i&&k.get(t,"bs.carousel").to(i),e.preventDefault()}}q.on(document,"click.bs.carousel.data-api","[data-bs-slide], [data-bs-slide-to]",J.dataApiClickHandler),q.on(window,"load.bs.carousel.data-api",(()=>{const e=n.find('[data-bs-ride="carousel"]');for(let t=0,n=e.length;t<n;t++)J.carouselInterface(e[t],k.get(e[t],"bs.carousel"))})),x(J);const Z={toggle:!0,parent:""},ee={toggle:"boolean",parent:"(string|element)"};class te extends z{constructor(e,t){super(e),this._isTransitioning=!1,this._config=this._getConfig(t),this._triggerArray=n.find(`[data-bs-toggle="collapse"][href="#${this._element.id}"],[data-bs-toggle="collapse"][data-bs-target="#${this._element.id}"]`);const i=n.find('[data-bs-toggle="collapse"]');for(let e=0,t=i.length;e<t;e++){const t=i[e],o=r(t),s=n.find(o).filter((e=>e===this._element));null!==o&&s.length&&(this._selector=o,this._triggerArray.push(t))}this._parent=this._config.parent?this._getParent():null,this._config.parent||this._addAriaAndCollapsedClass(this._element,this._triggerArray),this._config.toggle&&this.toggle()}static get Default(){return Z}static get NAME(){return"collapse"}toggle(){this._element.classList.contains("show")?this.hide():this.show()}show(){if(this._isTransitioning||this._element.classList.contains("show"))return;let e,t;this._parent&&(e=n.find(".show, .collapsing",this._parent).filter((e=>"string"==typeof this._config.parent?e.getAttribute("data-bs-parent")===this._config.parent:e.classList.contains("collapse"))),0===e.length&&(e=null));const i=n.findOne(this._selector);if(e){const n=e.find((e=>i!==e));if(t=n?k.get(n,"bs.collapse"):null,t&&t._isTransitioning)return}if(q.trigger(this._element,"show.bs.collapse").defaultPrevented)return;e&&e.forEach((e=>{i!==e&&te.collapseInterface(e,"hide"),t||k.set(e,"bs.collapse",null)}));const o=this._getDimension();this._element.classList.remove("collapse"),this._element.classList.add("collapsing"),this._element.style[o]=0,this._triggerArray.length&&this._triggerArray.forEach((e=>{e.classList.remove("collapsed"),e.setAttribute("aria-expanded",!0)})),this.setTransitioning(!0);const r="scroll"+(o[0].toUpperCase()+o.slice(1));this._queueCallback((()=>{this._element.classList.remove("collapsing"),this._element.classList.add("collapse","show"),this._element.style[o]="",this.setTransitioning(!1),q.trigger(this._element,"shown.bs.collapse")}),this._element,!0),this._element.style[o]=this._element[r]+"px"}hide(){if(this._isTransitioning||!this._element.classList.contains("show"))return;if(q.trigger(this._element,"hide.bs.collapse").defaultPrevented)return;const e=this._getDimension();this._element.style[e]=this._element.getBoundingClientRect()[e]+"px",m(this._element),this._element.classList.add("collapsing"),this._element.classList.remove("collapse","show");const t=this._triggerArray.length;if(t>0)for(let e=0;e<t;e++){const t=this._triggerArray[e],n=s(t);n&&!n.classList.contains("show")&&(t.classList.add("collapsed"),t.setAttribute("aria-expanded",!1))}this.setTransitioning(!0),this._element.style[e]="",this._queueCallback((()=>{this.setTransitioning(!1),this._element.classList.remove("collapsing"),this._element.classList.add("collapse"),q.trigger(this._element,"hidden.bs.collapse")}),this._element,!0)}setTransitioning(e){this._isTransitioning=e}_getConfig(e){return(e={...Z,...e}).toggle=Boolean(e.toggle),h("collapse",e,ee),e}_getDimension(){return this._element.classList.contains("width")?"width":"height"}_getParent(){let{parent:e}=this._config;e=u(e);const t=`[data-bs-toggle="collapse"][data-bs-parent="${e}"]`;return n.find(t,e).forEach((e=>{const t=s(e);this._addAriaAndCollapsedClass(t,[e])})),e}_addAriaAndCollapsedClass(e,t){if(!e||!t.length)return;const n=e.classList.contains("show");t.forEach((e=>{n?e.classList.remove("collapsed"):e.classList.add("collapsed"),e.setAttribute("aria-expanded",n)}))}static collapseInterface(e,t){let n=k.get(e,"bs.collapse");const i={...Z,...Y.getDataAttributes(e),..."object"==typeof t&&t?t:{}};if(!n&&i.toggle&&"string"==typeof t&&/show|hide/.test(t)&&(i.toggle=!1),n||(n=new te(e,i)),"string"==typeof t){if(void 0===n[t])throw new TypeError(`No method named "${t}"`);n[t]()}}static jQueryInterface(e){return this.each((function(){te.collapseInterface(this,e)}))}}q.on(document,"click.bs.collapse.data-api",'[data-bs-toggle="collapse"]',(function(e){("A"===e.target.tagName||e.delegateTarget&&"A"===e.delegateTarget.tagName)&&e.preventDefault();const t=Y.getDataAttributes(this),i=r(this);n.find(i).forEach((e=>{const n=k.get(e,"bs.collapse");let i;n?(null===n._parent&&"string"==typeof t.parent&&(n._config.parent=t.parent,n._parent=n._getParent()),i="toggle"):i=t,te.collapseInterface(e,i)}))})),x(te);const ne=new RegExp("ArrowUp|ArrowDown|Escape"),ie=b()?"top-end":"top-start",oe=b()?"top-start":"top-end",re=b()?"bottom-end":"bottom-start",se=b()?"bottom-start":"bottom-end",ae=b()?"left-start":"right-start",le=b()?"right-start":"left-start",ce={offset:[0,2],boundary:"clippingParents",reference:"toggle",display:"dynamic",popperConfig:null,autoClose:!0},ue={offset:"(array|string|function)",boundary:"(string|element)",reference:"(string|element|object)",display:"string",popperConfig:"(null|object|function)",autoClose:"(boolean|string)"};class pe extends z{constructor(e,t){super(e),this._popper=null,this._config=this._getConfig(t),this._menu=this._getMenuElement(),this._inNavbar=this._detectNavbar(),this._addEventListeners()}static get Default(){return ce}static get DefaultType(){return ue}static get NAME(){return"dropdown"}toggle(){f(this._element)||(this._element.classList.contains("show")?this.hide():this.show())}show(){if(f(this._element)||this._menu.classList.contains("show"))return;const e=pe.getParentFromElement(this._element),n={relatedTarget:this._element};if(!q.trigger(this._element,"show.bs.dropdown",n).defaultPrevented){if(this._inNavbar)Y.setDataAttribute(this._menu,"popper","none");else{if(void 0===t)throw new TypeError("Bootstrap's dropdowns require Popper (https://popper.js.org)");let n=this._element;"parent"===this._config.reference?n=e:c(this._config.reference)?n=u(this._config.reference):"object"==typeof this._config.reference&&(n=this._config.reference);const i=this._getPopperConfig(),o=i.modifiers.find((e=>"applyStyles"===e.name&&!1===e.enabled));this._popper=t.createPopper(n,this._menu,i),o&&Y.setDataAttribute(this._menu,"popper","static")}"ontouchstart"in document.documentElement&&!e.closest(".navbar-nav")&&[].concat(...document.body.children).forEach((e=>q.on(e,"mouseover",y))),this._element.focus(),this._element.setAttribute("aria-expanded",!0),this._menu.classList.toggle("show"),this._element.classList.toggle("show"),q.trigger(this._element,"shown.bs.dropdown",n)}}hide(){if(f(this._element)||!this._menu.classList.contains("show"))return;const e={relatedTarget:this._element};this._completeHide(e)}dispose(){this._popper&&this._popper.destroy(),super.dispose()}update(){this._inNavbar=this._detectNavbar(),this._popper&&this._popper.update()}_addEventListeners(){q.on(this._element,"click.bs.dropdown",(e=>{e.preventDefault(),this.toggle()}))}_completeHide(e){q.trigger(this._element,"hide.bs.dropdown",e).defaultPrevented||("ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach((e=>q.off(e,"mouseover",y))),this._popper&&this._popper.destroy(),this._menu.classList.remove("show"),this._element.classList.remove("show"),this._element.setAttribute("aria-expanded","false"),Y.removeDataAttribute(this._menu,"popper"),q.trigger(this._element,"hidden.bs.dropdown",e))}_getConfig(e){if(e={...this.constructor.Default,...Y.getDataAttributes(this._element),...e},h("dropdown",e,this.constructor.DefaultType),"object"==typeof e.reference&&!c(e.reference)&&"function"!=typeof e.reference.getBoundingClientRect)throw new TypeError("dropdown".toUpperCase()+': Option "reference" provided type "object" without a required "getBoundingClientRect" method.');return e}_getMenuElement(){return n.next(this._element,".dropdown-menu")[0]}_getPlacement(){const e=this._element.parentNode;if(e.classList.contains("dropend"))return ae;if(e.classList.contains("dropstart"))return le;const t="end"===getComputedStyle(this._menu).getPropertyValue("--bs-position").trim();return e.classList.contains("dropup")?t?oe:ie:t?se:re}_detectNavbar(){return null!==this._element.closest(".navbar")}_getOffset(){const{offset:e}=this._config;return"string"==typeof e?e.split(",").map((e=>Number.parseInt(e,10))):"function"==typeof e?t=>e(t,this._element):e}_getPopperConfig(){const e={placement:this._getPlacement(),modifiers:[{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"offset",options:{offset:this._getOffset()}}]};return"static"===this._config.display&&(e.modifiers=[{name:"applyStyles",enabled:!1}]),{...e,..."function"==typeof this._config.popperConfig?this._config.popperConfig(e):this._config.popperConfig}}_selectMenuItem(e){const t=n.find(".dropdown-menu .dropdown-item:not(.disabled):not(:disabled)",this._menu).filter(d);if(!t.length)return;let i=t.indexOf(e.target);"ArrowUp"===e.key&&i>0&&i--,"ArrowDown"===e.key&&i<t.length-1&&i++,i=-1===i?0:i,t[i].focus()}static dropdownInterface(e,t){let n=k.get(e,"bs.dropdown");if(n||(n=new pe(e,"object"==typeof t?t:null)),"string"==typeof t){if(void 0===n[t])throw new TypeError(`No method named "${t}"`);n[t]()}}static jQueryInterface(e){return this.each((function(){pe.dropdownInterface(this,e)}))}static clearMenus(e){if(e&&(2===e.button||"keyup"===e.type&&"Tab"!==e.key))return;const t=n.find('[data-bs-toggle="dropdown"]');for(let n=0,i=t.length;n<i;n++){const i=k.get(t[n],"bs.dropdown");if(!i||!1===i._config.autoClose)continue;if(!i._element.classList.contains("show"))continue;const o={relatedTarget:i._element};if(e){const t=e.composedPath(),n=t.includes(i._menu);if(t.includes(i._element)||"inside"===i._config.autoClose&&!n||"outside"===i._config.autoClose&&n)continue;if(i._menu.contains(e.target)&&("keyup"===e.type&&"Tab"===e.key||/input|select|option|textarea|form/i.test(e.target.tagName)))continue;"click"===e.type&&(o.clickEvent=e)}i._completeHide(o)}}static getParentFromElement(e){return s(e)||e.parentNode}static dataApiKeydownHandler(e){if(/input|textarea/i.test(e.target.tagName)?"Space"===e.key||"Escape"!==e.key&&("ArrowDown"!==e.key&&"ArrowUp"!==e.key||e.target.closest(".dropdown-menu")):!ne.test(e.key))return;const t=this.classList.contains("show");if(!t&&"Escape"===e.key)return;if(e.preventDefault(),e.stopPropagation(),f(this))return;const i=()=>this.matches('[data-bs-toggle="dropdown"]')?this:n.prev(this,'[data-bs-toggle="dropdown"]')[0];if("Escape"===e.key)return i().focus(),void pe.clearMenus();t||"ArrowUp"!==e.key&&"ArrowDown"!==e.key?t&&"Space"!==e.key?pe.getInstance(i())._selectMenuItem(e):pe.clearMenus():i().click()}}q.on(document,"keydown.bs.dropdown.data-api",'[data-bs-toggle="dropdown"]',pe.dataApiKeydownHandler),q.on(document,"keydown.bs.dropdown.data-api",".dropdown-menu",pe.dataApiKeydownHandler),q.on(document,"click.bs.dropdown.data-api",pe.clearMenus),q.on(document,"keyup.bs.dropdown.data-api",pe.clearMenus),q.on(document,"click.bs.dropdown.data-api",'[data-bs-toggle="dropdown"]',(function(e){e.preventDefault(),pe.dropdownInterface(this)})),x(pe);const he=()=>{const e=document.documentElement.clientWidth;return Math.abs(window.innerWidth-e)},de=(e=he())=>{fe(),ge("body","paddingRight",(t=>t+e)),ge(".fixed-top, .fixed-bottom, .is-fixed, .sticky-top","paddingRight",(t=>t+e)),ge(".sticky-top","marginRight",(t=>t-e))},fe=()=>{const e=document.body.style.overflow;e&&Y.setDataAttribute(document.body,"overflow",e),document.body.style.overflow="hidden"},ge=(e,t,i)=>{const o=he();n.find(e).forEach((e=>{if(e!==document.body&&window.innerWidth>e.clientWidth+o)return;const n=e.style[t],r=window.getComputedStyle(e)[t];Y.setDataAttribute(e,t,n),e.style[t]=i(Number.parseFloat(r))+"px"}))},ye=()=>{me("body","overflow"),me("body","paddingRight"),me(".fixed-top, .fixed-bottom, .is-fixed, .sticky-top","paddingRight"),me(".sticky-top","marginRight")},me=(e,t)=>{n.find(e).forEach((e=>{const n=Y.getDataAttribute(e,t);void 0===n?e.style.removeProperty(t):(Y.removeDataAttribute(e,t),e.style[t]=n)}))},ve={isVisible:!0,isAnimated:!1,rootElement:document.body,clickCallback:null},be={isVisible:"boolean",isAnimated:"boolean",rootElement:"element",clickCallback:"(function|null)"};class xe{constructor(e){this._config=this._getConfig(e),this._isAppended=!1,this._element=null}show(e){this._config.isVisible?(this._append(),this._config.isAnimated&&m(this._getElement()),this._getElement().classList.add("show"),this._emulateAnimation((()=>{w(e)}))):w(e)}hide(e){this._config.isVisible?(this._getElement().classList.remove("show"),this._emulateAnimation((()=>{this.dispose(),w(e)}))):w(e)}_getElement(){if(!this._element){const e=document.createElement("div");e.className="modal-backdrop",this._config.isAnimated&&e.classList.add("fade"),this._element=e}return this._element}_getConfig(e){return(e={...ve,..."object"==typeof e?e:{}}).rootElement=e.rootElement||document.body,h("backdrop",e,be),e}_append(){this._isAppended||(this._config.rootElement.appendChild(this._getElement()),q.on(this._getElement(),"mousedown.bs.backdrop",(()=>{w(this._config.clickCallback)})),this._isAppended=!0)}dispose(){this._isAppended&&(q.off(this._element,"mousedown.bs.backdrop"),this._getElement().parentNode.removeChild(this._element),this._isAppended=!1)}_emulateAnimation(e){if(!this._config.isAnimated)return void w(e);const t=a(this._getElement());q.one(this._getElement(),"transitionend",(()=>w(e))),p(this._getElement(),t)}}const we={backdrop:!0,keyboard:!0,focus:!0},_e={backdrop:"(boolean|string)",keyboard:"boolean",focus:"boolean"};class ke extends z{constructor(e,t){super(e),this._config=this._getConfig(t),this._dialog=n.findOne(".modal-dialog",this._element),this._backdrop=this._initializeBackDrop(),this._isShown=!1,this._ignoreBackdropClick=!1,this._isTransitioning=!1}static get Default(){return we}static get NAME(){return"modal"}toggle(e){return this._isShown?this.hide():this.show(e)}show(e){if(this._isShown||this._isTransitioning)return;this._isAnimated()&&(this._isTransitioning=!0);const t=q.trigger(this._element,"show.bs.modal",{relatedTarget:e});this._isShown||t.defaultPrevented||(this._isShown=!0,de(),document.body.classList.add("modal-open"),this._adjustDialog(),this._setEscapeEvent(),this._setResizeEvent(),q.on(this._element,"click.dismiss.bs.modal",'[data-bs-dismiss="modal"]',(e=>this.hide(e))),q.on(this._dialog,"mousedown.dismiss.bs.modal",(()=>{q.one(this._element,"mouseup.dismiss.bs.modal",(e=>{e.target===this._element&&(this._ignoreBackdropClick=!0)}))})),this._showBackdrop((()=>this._showElement(e))))}hide(e){if(e&&e.preventDefault(),!this._isShown||this._isTransitioning)return;if(q.trigger(this._element,"hide.bs.modal").defaultPrevented)return;this._isShown=!1;const t=this._isAnimated();t&&(this._isTransitioning=!0),this._setEscapeEvent(),this._setResizeEvent(),q.off(document,"focusin.bs.modal"),this._element.classList.remove("show"),q.off(this._element,"click.dismiss.bs.modal"),q.off(this._dialog,"mousedown.dismiss.bs.modal"),this._queueCallback((()=>this._hideModal()),this._element,t)}dispose(){[window,this._dialog].forEach((e=>q.off(e,".bs.modal"))),this._backdrop.dispose(),super.dispose(),q.off(document,"focusin.bs.modal")}handleUpdate(){this._adjustDialog()}_initializeBackDrop(){return new xe({isVisible:Boolean(this._config.backdrop),isAnimated:this._isAnimated()})}_getConfig(e){return e={...we,...Y.getDataAttributes(this._element),...e},h("modal",e,_e),e}_showElement(e){const t=this._isAnimated(),i=n.findOne(".modal-body",this._dialog);this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE||document.body.appendChild(this._element),this._element.style.display="block",this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.scrollTop=0,i&&(i.scrollTop=0),t&&m(this._element),this._element.classList.add("show"),this._config.focus&&this._enforceFocus(),this._queueCallback((()=>{this._config.focus&&this._element.focus(),this._isTransitioning=!1,q.trigger(this._element,"shown.bs.modal",{relatedTarget:e})}),this._dialog,t)}_enforceFocus(){q.off(document,"focusin.bs.modal"),q.on(document,"focusin.bs.modal",(e=>{document===e.target||this._element===e.target||this._element.contains(e.target)||this._element.focus()}))}_setEscapeEvent(){this._isShown?q.on(this._element,"keydown.dismiss.bs.modal",(e=>{this._config.keyboard&&"Escape"===e.key?(e.preventDefault(),this.hide()):this._config.keyboard||"Escape"!==e.key||this._triggerBackdropTransition()})):q.off(this._element,"keydown.dismiss.bs.modal")}_setResizeEvent(){this._isShown?q.on(window,"resize.bs.modal",(()=>this._adjustDialog())):q.off(window,"resize.bs.modal")}_hideModal(){this._element.style.display="none",this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._isTransitioning=!1,this._backdrop.hide((()=>{document.body.classList.remove("modal-open"),this._resetAdjustments(),ye(),q.trigger(this._element,"hidden.bs.modal")}))}_showBackdrop(e){q.on(this._element,"click.dismiss.bs.modal",(e=>{this._ignoreBackdropClick?this._ignoreBackdropClick=!1:e.target===e.currentTarget&&(!0===this._config.backdrop?this.hide():"static"===this._config.backdrop&&this._triggerBackdropTransition())})),this._backdrop.show(e)}_isAnimated(){return this._element.classList.contains("fade")}_triggerBackdropTransition(){if(q.trigger(this._element,"hidePrevented.bs.modal").defaultPrevented)return;const e=this._element.scrollHeight>document.documentElement.clientHeight;e||(this._element.style.overflowY="hidden"),this._element.classList.add("modal-static");const t=a(this._dialog);q.off(this._element,"transitionend"),q.one(this._element,"transitionend",(()=>{this._element.classList.remove("modal-static"),e||(q.one(this._element,"transitionend",(()=>{this._element.style.overflowY=""})),p(this._element,t))})),p(this._element,t),this._element.focus()}_adjustDialog(){const e=this._element.scrollHeight>document.documentElement.clientHeight,t=he(),n=t>0;(!n&&e&&!b()||n&&!e&&b())&&(this._element.style.paddingLeft=t+"px"),(n&&!e&&!b()||!n&&e&&b())&&(this._element.style.paddingRight=t+"px")}_resetAdjustments(){this._element.style.paddingLeft="",this._element.style.paddingRight=""}static jQueryInterface(e,t){return this.each((function(){const n=ke.getInstance(this)||new ke(this,"object"==typeof e?e:{});if("string"==typeof e){if(void 0===n[e])throw new TypeError(`No method named "${e}"`);n[e](t)}}))}}q.on(document,"click.bs.modal.data-api",'[data-bs-toggle="modal"]',(function(e){const t=s(this);["A","AREA"].includes(this.tagName)&&e.preventDefault(),q.one(t,"show.bs.modal",(e=>{e.defaultPrevented||q.one(t,"hidden.bs.modal",(()=>{d(this)&&this.focus()}))})),(ke.getInstance(t)||new ke(t)).toggle(this)})),x(ke);const Ee={backdrop:!0,keyboard:!0,scroll:!1},Te={backdrop:"boolean",keyboard:"boolean",scroll:"boolean"};class Se extends z{constructor(e,t){super(e),this._config=this._getConfig(t),this._isShown=!1,this._backdrop=this._initializeBackDrop(),this._addEventListeners()}static get NAME(){return"offcanvas"}static get Default(){return Ee}toggle(e){return this._isShown?this.hide():this.show(e)}show(e){this._isShown||q.trigger(this._element,"show.bs.offcanvas",{relatedTarget:e}).defaultPrevented||(this._isShown=!0,this._element.style.visibility="visible",this._backdrop.show(),this._config.scroll||(de(),this._enforceFocusOnElement(this._element)),this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.classList.add("show"),this._queueCallback((()=>{q.trigger(this._element,"shown.bs.offcanvas",{relatedTarget:e})}),this._element,!0))}hide(){this._isShown&&(q.trigger(this._element,"hide.bs.offcanvas").defaultPrevented||(q.off(document,"focusin.bs.offcanvas"),this._element.blur(),this._isShown=!1,this._element.classList.remove("show"),this._backdrop.hide(),this._queueCallback((()=>{this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._element.style.visibility="hidden",this._config.scroll||ye(),q.trigger(this._element,"hidden.bs.offcanvas")}),this._element,!0)))}dispose(){this._backdrop.dispose(),super.dispose(),q.off(document,"focusin.bs.offcanvas")}_getConfig(e){return e={...Ee,...Y.getDataAttributes(this._element),..."object"==typeof e?e:{}},h("offcanvas",e,Te),e}_initializeBackDrop(){return new xe({isVisible:this._config.backdrop,isAnimated:!0,rootElement:this._element.parentNode,clickCallback:()=>this.hide()})}_enforceFocusOnElement(e){q.off(document,"focusin.bs.offcanvas"),q.on(document,"focusin.bs.offcanvas",(t=>{document===t.target||e===t.target||e.contains(t.target)||e.focus()})),e.focus()}_addEventListeners(){q.on(this._element,"click.dismiss.bs.offcanvas",'[data-bs-dismiss="offcanvas"]',(()=>this.hide())),q.on(this._element,"keydown.dismiss.bs.offcanvas",(e=>{this._config.keyboard&&"Escape"===e.key&&this.hide()}))}static jQueryInterface(e){return this.each((function(){const t=k.get(this,"bs.offcanvas")||new Se(this,"object"==typeof e?e:{});if("string"==typeof e){if(void 0===t[e]||e.startsWith("_")||"constructor"===e)throw new TypeError(`No method named "${e}"`);t[e](this)}}))}}q.on(document,"click.bs.offcanvas.data-api",'[data-bs-toggle="offcanvas"]',(function(e){const t=s(this);if(["A","AREA"].includes(this.tagName)&&e.preventDefault(),f(this))return;q.one(t,"hidden.bs.offcanvas",(()=>{d(this)&&this.focus()}));const i=n.findOne(".offcanvas.show");i&&i!==t&&Se.getInstance(i).hide(),(k.get(t,"bs.offcanvas")||new Se(t)).toggle(this)})),q.on(window,"load.bs.offcanvas.data-api",(()=>{n.find(".offcanvas.show").forEach((e=>(k.get(e,"bs.offcanvas")||new Se(e)).show()))})),x(Se);const Ae=new Set(["background","cite","href","itemtype","longdesc","poster","src","xlink:href"]),Ce=/^(?:(?:https?|mailto|ftp|tel|file):|[^#&/:?]*(?:[#/?]|$))/i,Le=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[\d+/a-z]+=*$/i,Oe=(e,t)=>{const n=e.nodeName.toLowerCase();if(t.includes(n))return!Ae.has(n)||Boolean(Ce.test(e.nodeValue)||Le.test(e.nodeValue));const i=t.filter((e=>e instanceof RegExp));for(let e=0,t=i.length;e<t;e++)if(i[e].test(n))return!0;return!1};function Me(e,t,n){if(!e.length)return e;if(n&&"function"==typeof n)return n(e);const i=(new window.DOMParser).parseFromString(e,"text/html"),o=Object.keys(t),r=[].concat(...i.body.querySelectorAll("*"));for(let e=0,n=r.length;e<n;e++){const n=r[e],i=n.nodeName.toLowerCase();if(!o.includes(i)){n.parentNode.removeChild(n);continue}const s=[].concat(...n.attributes),a=[].concat(t["*"]||[],t[i]||[]);s.forEach((e=>{Oe(e,a)||n.removeAttribute(e.nodeName)}))}return i.body.innerHTML}const je=new RegExp("(^|\\s)bs-tooltip\\S+","g"),Ne=new Set(["sanitize","allowList","sanitizeFn"]),De={animation:"boolean",template:"string",title:"(string|element|function)",trigger:"string",delay:"(number|object)",html:"boolean",selector:"(string|boolean)",placement:"(string|function)",offset:"(array|string|function)",container:"(string|element|boolean)",fallbackPlacements:"array",boundary:"(string|element)",customClass:"(string|function)",sanitize:"boolean",sanitizeFn:"(null|function)",allowList:"object",popperConfig:"(null|object|function)"},Pe={AUTO:"auto",TOP:"top",RIGHT:b()?"left":"right",BOTTOM:"bottom",LEFT:b()?"right":"left"},Ie={animation:!0,template:'<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,selector:!1,placement:"top",offset:[0,0],container:!1,fallbackPlacements:["top","right","bottom","left"],boundary:"clippingParents",customClass:"",sanitize:!0,sanitizeFn:null,allowList:{"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","srcset","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},popperConfig:null},He={HIDE:"hide.bs.tooltip",HIDDEN:"hidden.bs.tooltip",SHOW:"show.bs.tooltip",SHOWN:"shown.bs.tooltip",INSERTED:"inserted.bs.tooltip",CLICK:"click.bs.tooltip",FOCUSIN:"focusin.bs.tooltip",FOCUSOUT:"focusout.bs.tooltip",MOUSEENTER:"mouseenter.bs.tooltip",MOUSELEAVE:"mouseleave.bs.tooltip"};class Re extends z{constructor(e,n){if(void 0===t)throw new TypeError("Bootstrap's tooltips require Popper (https://popper.js.org)");super(e),this._isEnabled=!0,this._timeout=0,this._hoverState="",this._activeTrigger={},this._popper=null,this._config=this._getConfig(n),this.tip=null,this._setListeners()}static get Default(){return Ie}static get NAME(){return"tooltip"}static get Event(){return He}static get DefaultType(){return De}enable(){this._isEnabled=!0}disable(){this._isEnabled=!1}toggleEnabled(){this._isEnabled=!this._isEnabled}toggle(e){if(this._isEnabled)if(e){const t=this._initializeOnDelegatedTarget(e);t._activeTrigger.click=!t._activeTrigger.click,t._isWithActiveTrigger()?t._enter(null,t):t._leave(null,t)}else{if(this.getTipElement().classList.contains("show"))return void this._leave(null,this);this._enter(null,this)}}dispose(){clearTimeout(this._timeout),q.off(this._element.closest(".modal"),"hide.bs.modal",this._hideModalHandler),this.tip&&this.tip.parentNode&&this.tip.parentNode.removeChild(this.tip),this._popper&&this._popper.destroy(),super.dispose()}show(){if("none"===this._element.style.display)throw new Error("Please use show on visible elements");if(!this.isWithContent()||!this._isEnabled)return;const e=q.trigger(this._element,this.constructor.Event.SHOW),n=g(this._element),o=null===n?this._element.ownerDocument.documentElement.contains(this._element):n.contains(this._element);if(e.defaultPrevented||!o)return;const r=this.getTipElement(),s=i(this.constructor.NAME);r.setAttribute("id",s),this._element.setAttribute("aria-describedby",s),this.setContent(),this._config.animation&&r.classList.add("fade");const a="function"==typeof this._config.placement?this._config.placement.call(this,r,this._element):this._config.placement,l=this._getAttachment(a);this._addAttachmentClass(l);const{container:c}=this._config;k.set(r,this.constructor.DATA_KEY,this),this._element.ownerDocument.documentElement.contains(this.tip)||(c.appendChild(r),q.trigger(this._element,this.constructor.Event.INSERTED)),this._popper?this._popper.update():this._popper=t.createPopper(this._element,r,this._getPopperConfig(l)),r.classList.add("show");const u="function"==typeof this._config.customClass?this._config.customClass():this._config.customClass;u&&r.classList.add(...u.split(" ")),"ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach((e=>{q.on(e,"mouseover",y)}));const p=this.tip.classList.contains("fade");this._queueCallback((()=>{const e=this._hoverState;this._hoverState=null,q.trigger(this._element,this.constructor.Event.SHOWN),"out"===e&&this._leave(null,this)}),this.tip,p)}hide(){if(!this._popper)return;const e=this.getTipElement();if(q.trigger(this._element,this.constructor.Event.HIDE).defaultPrevented)return;e.classList.remove("show"),"ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach((e=>q.off(e,"mouseover",y))),this._activeTrigger.click=!1,this._activeTrigger.focus=!1,this._activeTrigger.hover=!1;const t=this.tip.classList.contains("fade");this._queueCallback((()=>{this._isWithActiveTrigger()||("show"!==this._hoverState&&e.parentNode&&e.parentNode.removeChild(e),this._cleanTipClass(),this._element.removeAttribute("aria-describedby"),q.trigger(this._element,this.constructor.Event.HIDDEN),this._popper&&(this._popper.destroy(),this._popper=null))}),this.tip,t),this._hoverState=""}update(){null!==this._popper&&this._popper.update()}isWithContent(){return Boolean(this.getTitle())}getTipElement(){if(this.tip)return this.tip;const e=document.createElement("div");return e.innerHTML=this._config.template,this.tip=e.children[0],this.tip}setContent(){const e=this.getTipElement();this.setElementContent(n.findOne(".tooltip-inner",e),this.getTitle()),e.classList.remove("fade","show")}setElementContent(e,t){if(null!==e)return c(t)?(t=u(t),void(this._config.html?t.parentNode!==e&&(e.innerHTML="",e.appendChild(t)):e.textContent=t.textContent)):void(this._config.html?(this._config.sanitize&&(t=Me(t,this._config.allowList,this._config.sanitizeFn)),e.innerHTML=t):e.textContent=t)}getTitle(){let e=this._element.getAttribute("data-bs-original-title");return e||(e="function"==typeof this._config.title?this._config.title.call(this._element):this._config.title),e}updateAttachment(e){return"right"===e?"end":"left"===e?"start":e}_initializeOnDelegatedTarget(e,t){const n=this.constructor.DATA_KEY;return(t=t||k.get(e.delegateTarget,n))||(t=new this.constructor(e.delegateTarget,this._getDelegateConfig()),k.set(e.delegateTarget,n,t)),t}_getOffset(){const{offset:e}=this._config;return"string"==typeof e?e.split(",").map((e=>Number.parseInt(e,10))):"function"==typeof e?t=>e(t,this._element):e}_getPopperConfig(e){const t={placement:e,modifiers:[{name:"flip",options:{fallbackPlacements:this._config.fallbackPlacements}},{name:"offset",options:{offset:this._getOffset()}},{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"arrow",options:{element:`.${this.constructor.NAME}-arrow`}},{name:"onChange",enabled:!0,phase:"afterWrite",fn:e=>this._handlePopperPlacementChange(e)}],onFirstUpdate:e=>{e.options.placement!==e.placement&&this._handlePopperPlacementChange(e)}};return{...t,..."function"==typeof this._config.popperConfig?this._config.popperConfig(t):this._config.popperConfig}}_addAttachmentClass(e){this.getTipElement().classList.add("bs-tooltip-"+this.updateAttachment(e))}_getAttachment(e){return Pe[e.toUpperCase()]}_setListeners(){this._config.trigger.split(" ").forEach((e=>{if("click"===e)q.on(this._element,this.constructor.Event.CLICK,this._config.selector,(e=>this.toggle(e)));else if("manual"!==e){const t="hover"===e?this.constructor.Event.MOUSEENTER:this.constructor.Event.FOCUSIN,n="hover"===e?this.constructor.Event.MOUSELEAVE:this.constructor.Event.FOCUSOUT;q.on(this._element,t,this._config.selector,(e=>this._enter(e))),q.on(this._element,n,this._config.selector,(e=>this._leave(e)))}})),this._hideModalHandler=()=>{this._element&&this.hide()},q.on(this._element.closest(".modal"),"hide.bs.modal",this._hideModalHandler),this._config.selector?this._config={...this._config,trigger:"manual",selector:""}:this._fixTitle()}_fixTitle(){const e=this._element.getAttribute("title"),t=typeof this._element.getAttribute("data-bs-original-title");(e||"string"!==t)&&(this._element.setAttribute("data-bs-original-title",e||""),!e||this._element.getAttribute("aria-label")||this._element.textContent||this._element.setAttribute("aria-label",e),this._element.setAttribute("title",""))}_enter(e,t){t=this._initializeOnDelegatedTarget(e,t),e&&(t._activeTrigger["focusin"===e.type?"focus":"hover"]=!0),t.getTipElement().classList.contains("show")||"show"===t._hoverState?t._hoverState="show":(clearTimeout(t._timeout),t._hoverState="show",t._config.delay&&t._config.delay.show?t._timeout=setTimeout((()=>{"show"===t._hoverState&&t.show()}),t._config.delay.show):t.show())}_leave(e,t){t=this._initializeOnDelegatedTarget(e,t),e&&(t._activeTrigger["focusout"===e.type?"focus":"hover"]=t._element.contains(e.relatedTarget)),t._isWithActiveTrigger()||(clearTimeout(t._timeout),t._hoverState="out",t._config.delay&&t._config.delay.hide?t._timeout=setTimeout((()=>{"out"===t._hoverState&&t.hide()}),t._config.delay.hide):t.hide())}_isWithActiveTrigger(){for(const e in this._activeTrigger)if(this._activeTrigger[e])return!0;return!1}_getConfig(e){const t=Y.getDataAttributes(this._element);return Object.keys(t).forEach((e=>{Ne.has(e)&&delete t[e]})),(e={...this.constructor.Default,...t,..."object"==typeof e&&e?e:{}}).container=!1===e.container?document.body:u(e.container),"number"==typeof e.delay&&(e.delay={show:e.delay,hide:e.delay}),"number"==typeof e.title&&(e.title=e.title.toString()),"number"==typeof e.content&&(e.content=e.content.toString()),h("tooltip",e,this.constructor.DefaultType),e.sanitize&&(e.template=Me(e.template,e.allowList,e.sanitizeFn)),e}_getDelegateConfig(){const e={};if(this._config)for(const t in this._config)this.constructor.Default[t]!==this._config[t]&&(e[t]=this._config[t]);return e}_cleanTipClass(){const e=this.getTipElement(),t=e.getAttribute("class").match(je);null!==t&&t.length>0&&t.map((e=>e.trim())).forEach((t=>e.classList.remove(t)))}_handlePopperPlacementChange(e){const{state:t}=e;t&&(this.tip=t.elements.popper,this._cleanTipClass(),this._addAttachmentClass(this._getAttachment(t.placement)))}static jQueryInterface(e){return this.each((function(){let t=k.get(this,"bs.tooltip");const n="object"==typeof e&&e;if((t||!/dispose|hide/.test(e))&&(t||(t=new Re(this,n)),"string"==typeof e)){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e]()}}))}}x(Re);const qe=new RegExp("(^|\\s)bs-popover\\S+","g"),ze={...Re.Default,placement:"right",offset:[0,8],trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="popover-arrow"></div><h3 class="popover-header"></h3><div class="popover-body"></div></div>'},We={...Re.DefaultType,content:"(string|element|function)"},Fe={HIDE:"hide.bs.popover",HIDDEN:"hidden.bs.popover",SHOW:"show.bs.popover",SHOWN:"shown.bs.popover",INSERTED:"inserted.bs.popover",CLICK:"click.bs.popover",FOCUSIN:"focusin.bs.popover",FOCUSOUT:"focusout.bs.popover",MOUSEENTER:"mouseenter.bs.popover",MOUSELEAVE:"mouseleave.bs.popover"};class Ue extends Re{static get Default(){return ze}static get NAME(){return"popover"}static get Event(){return Fe}static get DefaultType(){return We}isWithContent(){return this.getTitle()||this._getContent()}setContent(){const e=this.getTipElement();this.setElementContent(n.findOne(".popover-header",e),this.getTitle());let t=this._getContent();"function"==typeof t&&(t=t.call(this._element)),this.setElementContent(n.findOne(".popover-body",e),t),e.classList.remove("fade","show")}_addAttachmentClass(e){this.getTipElement().classList.add("bs-popover-"+this.updateAttachment(e))}_getContent(){return this._element.getAttribute("data-bs-content")||this._config.content}_cleanTipClass(){const e=this.getTipElement(),t=e.getAttribute("class").match(qe);null!==t&&t.length>0&&t.map((e=>e.trim())).forEach((t=>e.classList.remove(t)))}static jQueryInterface(e){return this.each((function(){let t=k.get(this,"bs.popover");const n="object"==typeof e?e:null;if((t||!/dispose|hide/.test(e))&&(t||(t=new Ue(this,n),k.set(this,"bs.popover",t)),"string"==typeof e)){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e]()}}))}}x(Ue);const Xe={offset:10,method:"auto",target:""},Ye={offset:"number",method:"string",target:"(string|element)"};class Be extends z{constructor(e,t){super(e),this._scrollElement="BODY"===this._element.tagName?window:this._element,this._config=this._getConfig(t),this._selector=`${this._config.target} .nav-link, ${this._config.target} .list-group-item, ${this._config.target} .dropdown-item`,this._offsets=[],this._targets=[],this._activeTarget=null,this._scrollHeight=0,q.on(this._scrollElement,"scroll.bs.scrollspy",(()=>this._process())),this.refresh(),this._process()}static get Default(){return Xe}static get NAME(){return"scrollspy"}refresh(){const e=this._scrollElement===this._scrollElement.window?"offset":"position",t="auto"===this._config.method?e:this._config.method,i="position"===t?this._getScrollTop():0;this._offsets=[],this._targets=[],this._scrollHeight=this._getScrollHeight(),n.find(this._selector).map((e=>{const o=r(e),s=o?n.findOne(o):null;if(s){const e=s.getBoundingClientRect();if(e.width||e.height)return[Y[t](s).top+i,o]}return null})).filter((e=>e)).sort(((e,t)=>e[0]-t[0])).forEach((e=>{this._offsets.push(e[0]),this._targets.push(e[1])}))}dispose(){q.off(this._scrollElement,".bs.scrollspy"),super.dispose()}_getConfig(e){if("string"!=typeof(e={...Xe,...Y.getDataAttributes(this._element),..."object"==typeof e&&e?e:{}}).target&&c(e.target)){let{id:t}=e.target;t||(t=i("scrollspy"),e.target.id=t),e.target="#"+t}return h("scrollspy",e,Ye),e}_getScrollTop(){return this._scrollElement===window?this._scrollElement.pageYOffset:this._scrollElement.scrollTop}_getScrollHeight(){return this._scrollElement.scrollHeight||Math.max(document.body.scrollHeight,document.documentElement.scrollHeight)}_getOffsetHeight(){return this._scrollElement===window?window.innerHeight:this._scrollElement.getBoundingClientRect().height}_process(){const e=this._getScrollTop()+this._config.offset,t=this._getScrollHeight(),n=this._config.offset+t-this._getOffsetHeight();if(this._scrollHeight!==t&&this.refresh(),e>=n){const e=this._targets[this._targets.length-1];this._activeTarget!==e&&this._activate(e)}else{if(this._activeTarget&&e<this._offsets[0]&&this._offsets[0]>0)return this._activeTarget=null,void this._clear();for(let t=this._offsets.length;t--;)this._activeTarget!==this._targets[t]&&e>=this._offsets[t]&&(void 0===this._offsets[t+1]||e<this._offsets[t+1])&&this._activate(this._targets[t])}}_activate(e){this._activeTarget=e,this._clear();const t=this._selector.split(",").map((t=>`${t}[data-bs-target="${e}"],${t}[href="${e}"]`)),i=n.findOne(t.join(","));i.classList.contains("dropdown-item")?(n.findOne(".dropdown-toggle",i.closest(".dropdown")).classList.add("active"),i.classList.add("active")):(i.classList.add("active"),n.parents(i,".nav, .list-group").forEach((e=>{n.prev(e,".nav-link, .list-group-item").forEach((e=>e.classList.add("active"))),n.prev(e,".nav-item").forEach((e=>{n.children(e,".nav-link").forEach((e=>e.classList.add("active")))}))}))),q.trigger(this._scrollElement,"activate.bs.scrollspy",{relatedTarget:e})}_clear(){n.find(this._selector).filter((e=>e.classList.contains("active"))).forEach((e=>e.classList.remove("active")))}static jQueryInterface(e){return this.each((function(){const t=Be.getInstance(this)||new Be(this,"object"==typeof e?e:{});if("string"==typeof e){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e]()}}))}}q.on(window,"load.bs.scrollspy.data-api",(()=>{n.find('[data-bs-spy="scroll"]').forEach((e=>new Be(e)))})),x(Be);class Ve extends z{static get NAME(){return"tab"}show(){if(this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE&&this._element.classList.contains("active"))return;let e;const t=s(this._element),i=this._element.closest(".nav, .list-group");if(i){const t="UL"===i.nodeName||"OL"===i.nodeName?":scope > li > .active":".active";e=n.find(t,i),e=e[e.length-1]}const o=e?q.trigger(e,"hide.bs.tab",{relatedTarget:this._element}):null;if(q.trigger(this._element,"show.bs.tab",{relatedTarget:e}).defaultPrevented||null!==o&&o.defaultPrevented)return;this._activate(this._element,i);const r=()=>{q.trigger(e,"hidden.bs.tab",{relatedTarget:this._element}),q.trigger(this._element,"shown.bs.tab",{relatedTarget:e})};t?this._activate(t,t.parentNode,r):r()}_activate(e,t,i){const o=(!t||"UL"!==t.nodeName&&"OL"!==t.nodeName?n.children(t,".active"):n.find(":scope > li > .active",t))[0],r=i&&o&&o.classList.contains("fade"),s=()=>this._transitionComplete(e,o,i);o&&r?(o.classList.remove("show"),this._queueCallback(s,e,!0)):s()}_transitionComplete(e,t,i){if(t){t.classList.remove("active");const e=n.findOne(":scope > .dropdown-menu .active",t.parentNode);e&&e.classList.remove("active"),"tab"===t.getAttribute("role")&&t.setAttribute("aria-selected",!1)}e.classList.add("active"),"tab"===e.getAttribute("role")&&e.setAttribute("aria-selected",!0),m(e),e.classList.contains("fade")&&e.classList.add("show");let o=e.parentNode;if(o&&"LI"===o.nodeName&&(o=o.parentNode),o&&o.classList.contains("dropdown-menu")){const t=e.closest(".dropdown");t&&n.find(".dropdown-toggle",t).forEach((e=>e.classList.add("active"))),e.setAttribute("aria-expanded",!0)}i&&i()}static jQueryInterface(e){return this.each((function(){const t=k.get(this,"bs.tab")||new Ve(this);if("string"==typeof e){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e]()}}))}}q.on(document,"click.bs.tab.data-api",'[data-bs-toggle="tab"], [data-bs-toggle="pill"], [data-bs-toggle="list"]',(function(e){["A","AREA"].includes(this.tagName)&&e.preventDefault(),f(this)||(k.get(this,"bs.tab")||new Ve(this)).show()})),x(Ve);const $e={animation:"boolean",autohide:"boolean",delay:"number"},Ke={animation:!0,autohide:!0,delay:5e3};class Qe extends z{constructor(e,t){super(e),this._config=this._getConfig(t),this._timeout=null,this._hasMouseInteraction=!1,this._hasKeyboardInteraction=!1,this._setListeners()}static get DefaultType(){return $e}static get Default(){return Ke}static get NAME(){return"toast"}show(){q.trigger(this._element,"show.bs.toast").defaultPrevented||(this._clearTimeout(),this._config.animation&&this._element.classList.add("fade"),this._element.classList.remove("hide"),m(this._element),this._element.classList.add("showing"),this._queueCallback((()=>{this._element.classList.remove("showing"),this._element.classList.add("show"),q.trigger(this._element,"shown.bs.toast"),this._maybeScheduleHide()}),this._element,this._config.animation))}hide(){this._element.classList.contains("show")&&(q.trigger(this._element,"hide.bs.toast").defaultPrevented||(this._element.classList.remove("show"),this._queueCallback((()=>{this._element.classList.add("hide"),q.trigger(this._element,"hidden.bs.toast")}),this._element,this._config.animation)))}dispose(){this._clearTimeout(),this._element.classList.contains("show")&&this._element.classList.remove("show"),super.dispose()}_getConfig(e){return e={...Ke,...Y.getDataAttributes(this._element),..."object"==typeof e&&e?e:{}},h("toast",e,this.constructor.DefaultType),e}_maybeScheduleHide(){this._config.autohide&&(this._hasMouseInteraction||this._hasKeyboardInteraction||(this._timeout=setTimeout((()=>{this.hide()}),this._config.delay)))}_onInteraction(e,t){switch(e.type){case"mouseover":case"mouseout":this._hasMouseInteraction=t;break;case"focusin":case"focusout":this._hasKeyboardInteraction=t}if(t)return void this._clearTimeout();const n=e.relatedTarget;this._element===n||this._element.contains(n)||this._maybeScheduleHide()}_setListeners(){q.on(this._element,"click.dismiss.bs.toast",'[data-bs-dismiss="toast"]',(()=>this.hide())),q.on(this._element,"mouseover.bs.toast",(e=>this._onInteraction(e,!0))),q.on(this._element,"mouseout.bs.toast",(e=>this._onInteraction(e,!1))),q.on(this._element,"focusin.bs.toast",(e=>this._onInteraction(e,!0))),q.on(this._element,"focusout.bs.toast",(e=>this._onInteraction(e,!1)))}_clearTimeout(){clearTimeout(this._timeout),this._timeout=null}static jQueryInterface(e){return this.each((function(){let t=k.get(this,"bs.toast");if(t||(t=new Qe(this,"object"==typeof e&&e)),"string"==typeof e){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e](this)}}))}}return x(Qe),{Alert:W,Button:F,Carousel:J,Collapse:te,Dropdown:pe,Modal:ke,Offcanvas:Se,Popover:Ue,ScrollSpy:Be,Tab:Ve,Toast:Qe,Tooltip:Re}})),function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e=e||self).PerfectScrollbar=t()}(this,(function(){"use strict";var e=Math.abs,t=Math.floor;function n(e){return getComputedStyle(e)}function i(e,t){for(var n in t){var i=t[n];"number"==typeof i&&(i+="px"),e.style[n]=i}return e}function o(e){var t=document.createElement("div");return t.className=e,t}function r(e,t){if(!b)throw new Error("No element matching method supported");return b.call(e,t)}function s(e){e.remove?e.remove():e.parentNode&&e.parentNode.removeChild(e)}function a(e,t){return Array.prototype.filter.call(e.children,(function(e){return r(e,t)}))}function l(e,t){var n=e.element.classList,i=x.state.scrolling(t);n.contains(i)?clearTimeout(w[t]):n.add(i)}function c(e,t){w[t]=setTimeout((function(){return e.isAlive&&e.element.classList.remove(x.state.scrolling(t))}),e.settings.scrollingThreshold)}function u(e,t){l(e,t),c(e,t)}function p(e){if("function"==typeof window.CustomEvent)return new CustomEvent(e);var t=document.createEvent("CustomEvent");return t.initCustomEvent(e,!1,!1,void 0),t}function h(e,t,n,i,o){var r;if(void 0===i&&(i=!0),void 0===o&&(o=!1),"top"===t)r=["contentHeight","containerHeight","scrollTop","y","up","down"];else{if("left"!==t)throw new Error("A proper axis should be provided");r=["contentWidth","containerWidth","scrollLeft","x","left","right"]}!function(e,t,n,i,o){var r=n[0],s=n[1],a=n[2],l=n[3],c=n[4],h=n[5];void 0===i&&(i=!0),void 0===o&&(o=!1);var d=e.element;e.reach[l]=null,1>d[a]&&(e.reach[l]="start"),d[a]>e[r]-e[s]-1&&(e.reach[l]="end"),t&&(d.dispatchEvent(p("ps-scroll-"+l)),0>t?d.dispatchEvent(p("ps-scroll-"+c)):0<t&&d.dispatchEvent(p("ps-scroll-"+h)),i&&u(e,l)),e.reach[l]&&(t||o)&&d.dispatchEvent(p("ps-"+l+"-reach-"+e.reach[l]))}(e,n,r,i,o)}function d(e){return parseInt(e,10)||0}function f(e){return r(e,"input,[contenteditable]")||r(e,"select,[contenteditable]")||r(e,"textarea,[contenteditable]")||r(e,"button,[contenteditable]")}function g(e){var n=Math.ceil,i=e.element,o=t(i.scrollTop),r=i.getBoundingClientRect();e.containerWidth=n(r.width),e.containerHeight=n(r.height),e.contentWidth=i.scrollWidth,e.contentHeight=i.scrollHeight,i.contains(e.scrollbarXRail)||(a(i,x.element.rail("x")).forEach((function(e){return s(e)})),i.appendChild(e.scrollbarXRail)),i.contains(e.scrollbarYRail)||(a(i,x.element.rail("y")).forEach((function(e){return s(e)})),i.appendChild(e.scrollbarYRail)),!e.settings.suppressScrollX&&e.containerWidth+e.settings.scrollXMarginOffset<e.contentWidth?(e.scrollbarXActive=!0,e.railXWidth=e.containerWidth-e.railXMarginWidth,e.railXRatio=e.containerWidth/e.railXWidth,e.scrollbarXWidth=y(e,d(e.railXWidth*e.containerWidth/e.contentWidth)),e.scrollbarXLeft=d((e.negativeScrollAdjustment+i.scrollLeft)*(e.railXWidth-e.scrollbarXWidth)/(e.contentWidth-e.containerWidth))):e.scrollbarXActive=!1,!e.settings.suppressScrollY&&e.containerHeight+e.settings.scrollYMarginOffset<e.contentHeight?(e.scrollbarYActive=!0,e.railYHeight=e.containerHeight-e.railYMarginHeight,e.railYRatio=e.containerHeight/e.railYHeight,e.scrollbarYHeight=y(e,d(e.railYHeight*e.containerHeight/e.contentHeight)),e.scrollbarYTop=d(o*(e.railYHeight-e.scrollbarYHeight)/(e.contentHeight-e.containerHeight))):e.scrollbarYActive=!1,e.scrollbarXLeft>=e.railXWidth-e.scrollbarXWidth&&(e.scrollbarXLeft=e.railXWidth-e.scrollbarXWidth),e.scrollbarYTop>=e.railYHeight-e.scrollbarYHeight&&(e.scrollbarYTop=e.railYHeight-e.scrollbarYHeight),m(i,e),e.scrollbarXActive?i.classList.add(x.state.active("x")):(i.classList.remove(x.state.active("x")),e.scrollbarXWidth=0,e.scrollbarXLeft=0,i.scrollLeft=!0===e.isRtl?e.contentWidth:0),e.scrollbarYActive?i.classList.add(x.state.active("y")):(i.classList.remove(x.state.active("y")),e.scrollbarYHeight=0,e.scrollbarYTop=0,i.scrollTop=0)}function y(e,t){var n=Math.min,i=Math.max;return e.settings.minScrollbarLength&&(t=i(t,e.settings.minScrollbarLength)),e.settings.maxScrollbarLength&&(t=n(t,e.settings.maxScrollbarLength)),t}function m(e,n){var o={width:n.railXWidth},r=t(e.scrollTop);o.left=n.isRtl?n.negativeScrollAdjustment+e.scrollLeft+n.containerWidth-n.contentWidth:e.scrollLeft,n.isScrollbarXUsingBottom?o.bottom=n.scrollbarXBottom-r:o.top=n.scrollbarXTop+r,i(n.scrollbarXRail,o);var s={top:r,height:n.railYHeight};n.isScrollbarYUsingRight?n.isRtl?s.right=n.contentWidth-(n.negativeScrollAdjustment+e.scrollLeft)-n.scrollbarYRight-n.scrollbarYOuterWidth-9:s.right=n.scrollbarYRight-e.scrollLeft:n.isRtl?s.left=n.negativeScrollAdjustment+e.scrollLeft+2*n.containerWidth-n.contentWidth-n.scrollbarYLeft-n.scrollbarYOuterWidth:s.left=n.scrollbarYLeft+e.scrollLeft,i(n.scrollbarYRail,s),i(n.scrollbarX,{left:n.scrollbarXLeft,width:n.scrollbarXWidth-n.railBorderXWidth}),i(n.scrollbarY,{top:n.scrollbarYTop,height:n.scrollbarYHeight-n.railBorderYWidth})}function v(e,t){function n(t){t.touches&&t.touches[0]&&(t[a]=t.touches[0].pageY),m[d]=v+w*(t[a]-b),l(e,f),g(e),t.stopPropagation(),t.preventDefault()}function i(){c(e,f),e[y].classList.remove(x.state.clicking),e.event.unbind(e.ownerDocument,"mousemove",n)}function o(t,o){v=m[d],o&&t.touches&&(t[a]=t.touches[0].pageY),b=t[a],w=(e[s]-e[r])/(e[u]-e[h]),o?e.event.bind(e.ownerDocument,"touchmove",n):(e.event.bind(e.ownerDocument,"mousemove",n),e.event.once(e.ownerDocument,"mouseup",i),t.preventDefault()),e[y].classList.add(x.state.clicking),t.stopPropagation()}var r=t[0],s=t[1],a=t[2],u=t[3],p=t[4],h=t[5],d=t[6],f=t[7],y=t[8],m=e.element,v=null,b=null,w=null;e.event.bind(e[p],"mousedown",(function(e){o(e)})),e.event.bind(e[p],"touchstart",(function(e){o(e,!0)}))}var b="undefined"!=typeof Element&&(Element.prototype.matches||Element.prototype.webkitMatchesSelector||Element.prototype.mozMatchesSelector||Element.prototype.msMatchesSelector),x={main:"ps",rtl:"ps__rtl",element:{thumb:function(e){return"ps__thumb-"+e},rail:function(e){return"ps__rail-"+e},consuming:"ps__child--consume"},state:{focus:"ps--focus",clicking:"ps--clicking",active:function(e){return"ps--active-"+e},scrolling:function(e){return"ps--scrolling-"+e}}},w={x:null,y:null},_=function(e){this.element=e,this.handlers={}},k={isEmpty:{configurable:!0}};_.prototype.bind=function(e,t){void 0===this.handlers[e]&&(this.handlers[e]=[]),this.handlers[e].push(t),this.element.addEventListener(e,t,!1)},_.prototype.unbind=function(e,t){var n=this;this.handlers[e]=this.handlers[e].filter((function(i){return!(!t||i===t)||(n.element.removeEventListener(e,i,!1),!1)}))},_.prototype.unbindAll=function(){for(var e in this.handlers)this.unbind(e)},k.isEmpty.get=function(){var e=this;return Object.keys(this.handlers).every((function(t){return 0===e.handlers[t].length}))},Object.defineProperties(_.prototype,k);var E=function(){this.eventElements=[]};E.prototype.eventElement=function(e){var t=this.eventElements.filter((function(t){return t.element===e}))[0];return t||(t=new _(e),this.eventElements.push(t)),t},E.prototype.bind=function(e,t,n){this.eventElement(e).bind(t,n)},E.prototype.unbind=function(e,t,n){var i=this.eventElement(e);i.unbind(t,n),i.isEmpty&&this.eventElements.splice(this.eventElements.indexOf(i),1)},E.prototype.unbindAll=function(){this.eventElements.forEach((function(e){return e.unbindAll()})),this.eventElements=[]},E.prototype.once=function(e,t,n){var i=this.eventElement(e),o=function(e){i.unbind(t,o),n(e)};i.bind(t,o)};var T={isWebKit:"undefined"!=typeof document&&"WebkitAppearance"in document.documentElement.style,supportsTouch:"undefined"!=typeof window&&("ontouchstart"in window||"maxTouchPoints"in window.navigator&&0<window.navigator.maxTouchPoints||window.DocumentTouch&&document instanceof window.DocumentTouch),supportsIePointer:"undefined"!=typeof navigator&&navigator.msMaxTouchPoints,isChrome:"undefined"!=typeof navigator&&/Chrome/i.test(navigator&&navigator.userAgent)},S={"click-rail":function(e){e.element,e.event.bind(e.scrollbarY,"mousedown",(function(e){return e.stopPropagation()})),e.event.bind(e.scrollbarYRail,"mousedown",(function(t){var n=t.pageY-window.pageYOffset-e.scrollbarYRail.getBoundingClientRect().top>e.scrollbarYTop?1:-1;e.element.scrollTop+=n*e.containerHeight,g(e),t.stopPropagation()})),e.event.bind(e.scrollbarX,"mousedown",(function(e){return e.stopPropagation()})),e.event.bind(e.scrollbarXRail,"mousedown",(function(t){var n=t.pageX-window.pageXOffset-e.scrollbarXRail.getBoundingClientRect().left>e.scrollbarXLeft?1:-1;e.element.scrollLeft+=n*e.containerWidth,g(e),t.stopPropagation()}))},"drag-thumb":function(e){v(e,["containerWidth","contentWidth","pageX","railXWidth","scrollbarX","scrollbarXWidth","scrollLeft","x","scrollbarXRail"]),v(e,["containerHeight","contentHeight","pageY","railYHeight","scrollbarY","scrollbarYHeight","scrollTop","y","scrollbarYRail"])},keyboard:function(e){var n=e.element,i=function(){return r(n,":hover")},o=function(){return r(e.scrollbarX,":focus")||r(e.scrollbarY,":focus")};e.event.bind(e.ownerDocument,"keydown",(function(r){if(!(r.isDefaultPrevented&&r.isDefaultPrevented()||r.defaultPrevented)&&(i()||o())){var s=document.activeElement?document.activeElement:e.ownerDocument.activeElement;if(s){if("IFRAME"===s.tagName)s=s.contentDocument.activeElement;else for(;s.shadowRoot;)s=s.shadowRoot.activeElement;if(f(s))return}var a=0,l=0;switch(r.which){case 37:a=r.metaKey?-e.contentWidth:r.altKey?-e.containerWidth:-30;break;case 38:l=r.metaKey?e.contentHeight:r.altKey?e.containerHeight:30;break;case 39:a=r.metaKey?e.contentWidth:r.altKey?e.containerWidth:30;break;case 40:l=r.metaKey?-e.contentHeight:r.altKey?-e.containerHeight:-30;break;case 32:l=r.shiftKey?e.containerHeight:-e.containerHeight;break;case 33:l=e.containerHeight;break;case 34:l=-e.containerHeight;break;case 36:l=e.contentHeight;break;case 35:l=-e.contentHeight;break;default:return}e.settings.suppressScrollX&&0!==a||e.settings.suppressScrollY&&0!==l||(n.scrollTop-=l,n.scrollLeft+=a,g(e),function(i,o){var r=t(n.scrollTop);if(0===i){if(!e.scrollbarYActive)return!1;if(0===r&&0<o||r>=e.contentHeight-e.containerHeight&&0>o)return!e.settings.wheelPropagation}var s=n.scrollLeft;if(0===o){if(!e.scrollbarXActive)return!1;if(0===s&&0>i||s>=e.contentWidth-e.containerWidth&&0<i)return!e.settings.wheelPropagation}return!0}(a,l)&&r.preventDefault())}}))},wheel:function(i){function o(e,t,i){if(!T.isWebKit&&s.querySelector("select:focus"))return!0;if(!s.contains(e))return!1;for(var o=e;o&&o!==s;){if(o.classList.contains(x.element.consuming))return!0;var r=n(o);if(i&&r.overflowY.match(/(scroll|auto)/)){var a=o.scrollHeight-o.clientHeight;if(0<a&&(0<o.scrollTop&&0>i||o.scrollTop<a&&0<i))return!0}if(t&&r.overflowX.match(/(scroll|auto)/)){var l=o.scrollWidth-o.clientWidth;if(0<l&&(0<o.scrollLeft&&0>t||o.scrollLeft<l&&0<t))return!0}o=o.parentNode}return!1}function r(n){var r=function(e){var t=e.deltaX,n=-1*e.deltaY;return(void 0===t||void 0===n)&&(t=-1*e.wheelDeltaX/6,n=e.wheelDeltaY/6),e.deltaMode&&1===e.deltaMode&&(t*=10,n*=10),t!=t&&n!=n&&(t=0,n=e.wheelDelta),e.shiftKey?[-n,-t]:[t,n]}(n),a=r[0],l=r[1];if(!o(n.target,a,l)){var c=!1;i.settings.useBothWheelAxes?i.scrollbarYActive&&!i.scrollbarXActive?(l?s.scrollTop-=l*i.settings.wheelSpeed:s.scrollTop+=a*i.settings.wheelSpeed,c=!0):i.scrollbarXActive&&!i.scrollbarYActive&&(a?s.scrollLeft+=a*i.settings.wheelSpeed:s.scrollLeft-=l*i.settings.wheelSpeed,c=!0):(s.scrollTop-=l*i.settings.wheelSpeed,s.scrollLeft+=a*i.settings.wheelSpeed),g(i),(c=c||function(n,o){var r=t(s.scrollTop),a=0===s.scrollTop,l=r+s.offsetHeight===s.scrollHeight,c=0===s.scrollLeft,u=s.scrollLeft+s.offsetWidth===s.scrollWidth;return!(e(o)>e(n)?a||l:c||u)||!i.settings.wheelPropagation}(a,l))&&!n.ctrlKey&&(n.stopPropagation(),n.preventDefault())}}var s=i.element;void 0===window.onwheel?void 0!==window.onmousewheel&&i.event.bind(s,"mousewheel",r):i.event.bind(s,"wheel",r)},touch:function(i){function o(n,o){var r=t(h.scrollTop),s=h.scrollLeft,a=e(n),l=e(o);if(l>a){if(0>o&&r===i.contentHeight-i.containerHeight||0<o&&0===r)return 0===window.scrollY&&0<o&&T.isChrome}else if(a>l&&(0>n&&s===i.contentWidth-i.containerWidth||0<n&&0===s))return!0;return!0}function r(e,t){h.scrollTop-=t,h.scrollLeft-=e,g(i)}function s(e){return e.targetTouches?e.targetTouches[0]:e}function a(e){return!(e.pointerType&&"pen"===e.pointerType&&0===e.buttons||(!e.targetTouches||1!==e.targetTouches.length)&&(!e.pointerType||"mouse"===e.pointerType||e.pointerType===e.MSPOINTER_TYPE_MOUSE))}function l(e){if(a(e)){var t=s(e);d.pageX=t.pageX,d.pageY=t.pageY,f=(new Date).getTime(),null!==m&&clearInterval(m)}}function c(e,t,i){if(!h.contains(e))return!1;for(var o=e;o&&o!==h;){if(o.classList.contains(x.element.consuming))return!0;var r=n(o);if(i&&r.overflowY.match(/(scroll|auto)/)){var s=o.scrollHeight-o.clientHeight;if(0<s&&(0<o.scrollTop&&0>i||o.scrollTop<s&&0<i))return!0}if(t&&r.overflowX.match(/(scroll|auto)/)){var a=o.scrollWidth-o.clientWidth;if(0<a&&(0<o.scrollLeft&&0>t||o.scrollLeft<a&&0<t))return!0}o=o.parentNode}return!1}function u(e){if(a(e)){var t=s(e),n={pageX:t.pageX,pageY:t.pageY},i=n.pageX-d.pageX,l=n.pageY-d.pageY;if(c(e.target,i,l))return;r(i,l),d=n;var u=(new Date).getTime(),p=u-f;0<p&&(y.x=i/p,y.y=l/p,f=u),o(i,l)&&e.preventDefault()}}function p(){i.settings.swipeEasing&&(clearInterval(m),m=setInterval((function(){return i.isInitialized?void clearInterval(m):y.x||y.y?.01>e(y.x)&&.01>e(y.y)?void clearInterval(m):(r(30*y.x,30*y.y),y.x*=.8,void(y.y*=.8)):void clearInterval(m)}),10))}if(T.supportsTouch||T.supportsIePointer){var h=i.element,d={},f=0,y={},m=null;T.supportsTouch?(i.event.bind(h,"touchstart",l),i.event.bind(h,"touchmove",u),i.event.bind(h,"touchend",p)):T.supportsIePointer&&(window.PointerEvent?(i.event.bind(h,"pointerdown",l),i.event.bind(h,"pointermove",u),i.event.bind(h,"pointerup",p)):window.MSPointerEvent&&(i.event.bind(h,"MSPointerDown",l),i.event.bind(h,"MSPointerMove",u),i.event.bind(h,"MSPointerUp",p)))}}},A=function(e,r){var s=this;if(void 0===r&&(r={}),"string"==typeof e&&(e=document.querySelector(e)),!e||!e.nodeName)throw new Error("no element is specified to initialize PerfectScrollbar");for(var a in this.element=e,e.classList.add(x.main),this.settings={handlers:["click-rail","drag-thumb","keyboard","wheel","touch"],maxScrollbarLength:null,minScrollbarLength:null,scrollingThreshold:1e3,scrollXMarginOffset:0,scrollYMarginOffset:0,suppressScrollX:!1,suppressScrollY:!1,swipeEasing:!0,useBothWheelAxes:!1,wheelPropagation:!0,wheelSpeed:1},r)this.settings[a]=r[a];this.containerWidth=null,this.containerHeight=null,this.contentWidth=null,this.contentHeight=null;var l=function(){return e.classList.add(x.state.focus)},c=function(){return e.classList.remove(x.state.focus)};this.isRtl="rtl"===n(e).direction,!0===this.isRtl&&e.classList.add(x.rtl),this.isNegativeScroll=function(){var t,n=e.scrollLeft;return e.scrollLeft=-1,t=0>e.scrollLeft,e.scrollLeft=n,t}(),this.negativeScrollAdjustment=this.isNegativeScroll?e.scrollWidth-e.clientWidth:0,this.event=new E,this.ownerDocument=e.ownerDocument||document,this.scrollbarXRail=o(x.element.rail("x")),e.appendChild(this.scrollbarXRail),this.scrollbarX=o(x.element.thumb("x")),this.scrollbarXRail.appendChild(this.scrollbarX),this.scrollbarX.setAttribute("tabindex",0),this.event.bind(this.scrollbarX,"focus",l),this.event.bind(this.scrollbarX,"blur",c),this.scrollbarXActive=null,this.scrollbarXWidth=null,this.scrollbarXLeft=null;var u=n(this.scrollbarXRail);this.scrollbarXBottom=parseInt(u.bottom,10),isNaN(this.scrollbarXBottom)?(this.isScrollbarXUsingBottom=!1,this.scrollbarXTop=d(u.top)):this.isScrollbarXUsingBottom=!0,this.railBorderXWidth=d(u.borderLeftWidth)+d(u.borderRightWidth),i(this.scrollbarXRail,{display:"block"}),this.railXMarginWidth=d(u.marginLeft)+d(u.marginRight),i(this.scrollbarXRail,{display:""}),this.railXWidth=null,this.railXRatio=null,this.scrollbarYRail=o(x.element.rail("y")),e.appendChild(this.scrollbarYRail),this.scrollbarY=o(x.element.thumb("y")),this.scrollbarYRail.appendChild(this.scrollbarY),this.scrollbarY.setAttribute("tabindex",0),this.event.bind(this.scrollbarY,"focus",l),this.event.bind(this.scrollbarY,"blur",c),this.scrollbarYActive=null,this.scrollbarYHeight=null,this.scrollbarYTop=null;var p=n(this.scrollbarYRail);this.scrollbarYRight=parseInt(p.right,10),isNaN(this.scrollbarYRight)?(this.isScrollbarYUsingRight=!1,this.scrollbarYLeft=d(p.left)):this.isScrollbarYUsingRight=!0,this.scrollbarYOuterWidth=this.isRtl?function(e){var t=n(e);return d(t.width)+d(t.paddingLeft)+d(t.paddingRight)+d(t.borderLeftWidth)+d(t.borderRightWidth)}(this.scrollbarY):null,this.railBorderYWidth=d(p.borderTopWidth)+d(p.borderBottomWidth),i(this.scrollbarYRail,{display:"block"}),this.railYMarginHeight=d(p.marginTop)+d(p.marginBottom),i(this.scrollbarYRail,{display:""}),this.railYHeight=null,this.railYRatio=null,this.reach={x:0>=e.scrollLeft?"start":e.scrollLeft>=this.contentWidth-this.containerWidth?"end":null,y:0>=e.scrollTop?"start":e.scrollTop>=this.contentHeight-this.containerHeight?"end":null},this.isAlive=!0,this.settings.handlers.forEach((function(e){return S[e](s)})),this.lastScrollTop=t(e.scrollTop),this.lastScrollLeft=e.scrollLeft,this.event.bind(this.element,"scroll",(function(e){return s.onScroll(e)})),g(this)};return A.prototype.update=function(){this.isAlive&&(this.negativeScrollAdjustment=this.isNegativeScroll?this.element.scrollWidth-this.element.clientWidth:0,i(this.scrollbarXRail,{display:"block"}),i(this.scrollbarYRail,{display:"block"}),this.railXMarginWidth=d(n(this.scrollbarXRail).marginLeft)+d(n(this.scrollbarXRail).marginRight),this.railYMarginHeight=d(n(this.scrollbarYRail).marginTop)+d(n(this.scrollbarYRail).marginBottom),i(this.scrollbarXRail,{display:"none"}),i(this.scrollbarYRail,{display:"none"}),g(this),h(this,"top",0,!1,!0),h(this,"left",0,!1,!0),i(this.scrollbarXRail,{display:""}),i(this.scrollbarYRail,{display:""}))},A.prototype.onScroll=function(){this.isAlive&&(g(this),h(this,"top",this.element.scrollTop-this.lastScrollTop),h(this,"left",this.element.scrollLeft-this.lastScrollLeft),this.lastScrollTop=t(this.element.scrollTop),this.lastScrollLeft=this.element.scrollLeft)},A.prototype.destroy=function(){this.isAlive&&(this.event.unbindAll(),s(this.scrollbarX),s(this.scrollbarY),s(this.scrollbarXRail),s(this.scrollbarYRail),this.removePsClasses(),this.element=null,this.scrollbarX=null,this.scrollbarY=null,this.scrollbarXRail=null,this.scrollbarYRail=null,this.isAlive=!1)},A.prototype.removePsClasses=function(){this.element.className=this.element.className.split(" ").filter((function(e){return!e.match(/^ps([-_].+|)$/)})).join(" ")},A})),function(e,t,n,i){"use strict";function o(e,t,n){return setTimeout(c(e,n),t)}function r(e,t,n){return!!Array.isArray(e)&&(s(e,n[t],n),!0)}function s(e,t,n){var o;if(e)if(e.forEach)e.forEach(t,n);else if(e.length!==i)for(o=0;o<e.length;)t.call(n,e[o],o,e),o++;else for(o in e)e.hasOwnProperty(o)&&t.call(n,e[o],o,e)}function a(t,n,i){var o="DEPRECATED METHOD: "+n+"\n"+i+" AT \n";return function(){var n=new Error("get-stack-trace"),i=n&&n.stack?n.stack.replace(/^[^\(]+?[\n$]/gm,"").replace(/^\s+at\s+/gm,"").replace(/^Object.<anonymous>\s*\(/gm,"{anonymous}()@"):"Unknown Stack Trace",r=e.console&&(e.console.warn||e.console.log);return r&&r.call(e.console,o,i),t.apply(this,arguments)}}function l(e,t,n){var i,o=t.prototype;(i=e.prototype=Object.create(o)).constructor=e,i._super=o,n&&ie(i,n)}function c(e,t){return function(){return e.apply(t,arguments)}}function u(e,t){return typeof e==se?e.apply(t&&t[0]||i,t):e}function p(e,t){return e===i?t:e}function h(e,t,n){s(y(t),(function(t){e.addEventListener(t,n,!1)}))}function d(e,t,n){s(y(t),(function(t){e.removeEventListener(t,n,!1)}))}function f(e,t){for(;e;){if(e==t)return!0;e=e.parentNode}return!1}function g(e,t){return e.indexOf(t)>-1}function y(e){return e.trim().split(/\s+/g)}function m(e,t,n){if(e.indexOf&&!n)return e.indexOf(t);for(var i=0;i<e.length;){if(n&&e[i][n]==t||!n&&e[i]===t)return i;i++}return-1}function v(e){return Array.prototype.slice.call(e,0)}function b(e,t,n){for(var i=[],o=[],r=0;r<e.length;){var s=t?e[r][t]:e[r];m(o,s)<0&&i.push(e[r]),o[r]=s,r++}return n&&(i=t?i.sort((function(e,n){return e[t]>n[t]})):i.sort()),i}function x(e,t){for(var n,o,r=t[0].toUpperCase()+t.slice(1),s=0;s<oe.length;){if((o=(n=oe[s])?n+r:t)in e)return o;s++}return i}function w(t){var n=t.ownerDocument||t;return n.defaultView||n.parentWindow||e}function _(e,t){var n=this;this.manager=e,this.callback=t,this.element=e.element,this.target=e.options.inputTarget,this.domHandler=function(t){u(e.options.enable,[e])&&n.handler(t)},this.init()}function k(e,t,n){var i=n.pointers.length,o=n.changedPointers.length,r=t&be&&i-o==0,s=t&(we|_e)&&i-o==0;n.isFirst=!!r,n.isFinal=!!s,r&&(e.session={}),n.eventType=t,function(e,t){var n=e.session,i=t.pointers,o=i.length;n.firstInput||(n.firstInput=T(t)),o>1&&!n.firstMultiple?n.firstMultiple=T(t):1===o&&(n.firstMultiple=!1);var r=n.firstInput,s=n.firstMultiple,a=s?s.center:r.center,l=t.center=S(i);t.timeStamp=ce(),t.deltaTime=t.timeStamp-r.timeStamp,t.angle=O(a,l),t.distance=L(a,l),function(e,t){var n=t.center,i=e.offsetDelta||{},o=e.prevDelta||{},r=e.prevInput||{};t.eventType!==be&&r.eventType!==we||(o=e.prevDelta={x:r.deltaX||0,y:r.deltaY||0},i=e.offsetDelta={x:n.x,y:n.y}),t.deltaX=o.x+(n.x-i.x),t.deltaY=o.y+(n.y-i.y)}(n,t),t.offsetDirection=C(t.deltaX,t.deltaY);var c=A(t.deltaTime,t.deltaX,t.deltaY);t.overallVelocityX=c.x,t.overallVelocityY=c.y,t.overallVelocity=le(c.x)>le(c.y)?c.x:c.y,t.scale=s?function(e,t){return L(t[0],t[1],je)/L(e[0],e[1],je)}(s.pointers,i):1,t.rotation=s?function(e,t){return O(t[1],t[0],je)+O(e[1],e[0],je)}(s.pointers,i):0,t.maxPointers=n.prevInput?t.pointers.length>n.prevInput.maxPointers?t.pointers.length:n.prevInput.maxPointers:t.pointers.length,E(n,t);var u=e.element;f(t.srcEvent.target,u)&&(u=t.srcEvent.target),t.target=u}(e,n),e.emit("hammer.input",n),e.recognize(n),e.session.prevInput=n}function E(e,t){var n,o,r,s,a=e.lastInterval||t,l=t.timeStamp-a.timeStamp;if(t.eventType!=_e&&(l>ve||a.velocity===i)){var c=t.deltaX-a.deltaX,u=t.deltaY-a.deltaY,p=A(l,c,u);o=p.x,r=p.y,n=le(p.x)>le(p.y)?p.x:p.y,s=C(c,u),e.lastInterval=t}else n=a.velocity,o=a.velocityX,r=a.velocityY,s=a.direction;t.velocity=n,t.velocityX=o,t.velocityY=r,t.direction=s}function T(e){for(var t=[],n=0;n<e.pointers.length;)t[n]={clientX:ae(e.pointers[n].clientX),clientY:ae(e.pointers[n].clientY)},n++;return{timeStamp:ce(),pointers:t,center:S(t),deltaX:e.deltaX,deltaY:e.deltaY}}function S(e){var t=e.length;if(1===t)return{x:ae(e[0].clientX),y:ae(e[0].clientY)};for(var n=0,i=0,o=0;t>o;)n+=e[o].clientX,i+=e[o].clientY,o++;return{x:ae(n/t),y:ae(i/t)}}function A(e,t,n){return{x:t/e||0,y:n/e||0}}function C(e,t){return e===t?ke:le(e)>=le(t)?0>e?Ee:Te:0>t?Se:Ae}function L(e,t,n){n||(n=Me);var i=t[n[0]]-e[n[0]],o=t[n[1]]-e[n[1]];return Math.sqrt(i*i+o*o)}function O(e,t,n){n||(n=Me);var i=t[n[0]]-e[n[0]],o=t[n[1]]-e[n[1]];return 180*Math.atan2(o,i)/Math.PI}function M(){this.evEl=De,this.evWin=Pe,this.pressed=!1,_.apply(this,arguments)}function j(){this.evEl=Re,this.evWin=qe,_.apply(this,arguments),this.store=this.manager.session.pointerEvents=[]}function N(){this.evTarget=We,this.evWin=Fe,this.started=!1,_.apply(this,arguments)}function D(e,t){var n=v(e.touches),i=v(e.changedTouches);return t&(we|_e)&&(n=b(n.concat(i),"identifier",!0)),[n,i]}function P(){this.evTarget=Xe,this.targetIds={},_.apply(this,arguments)}function I(e,t){var n=v(e.touches),i=this.targetIds;if(t&(be|xe)&&1===n.length)return i[n[0].identifier]=!0,[n,n];var o,r,s=v(e.changedTouches),a=[],l=this.target;if(r=n.filter((function(e){return f(e.target,l)})),t===be)for(o=0;o<r.length;)i[r[o].identifier]=!0,o++;for(o=0;o<s.length;)i[s[o].identifier]&&a.push(s[o]),t&(we|_e)&&delete i[s[o].identifier],o++;return a.length?[b(r.concat(a),"identifier",!0),a]:void 0}function H(){_.apply(this,arguments);var e=c(this.handler,this);this.touch=new P(this.manager,e),this.mouse=new M(this.manager,e),this.primaryTouch=null,this.lastTouches=[]}function R(e,t){e&be?(this.primaryTouch=t.changedPointers[0].identifier,q.call(this,t)):e&(we|_e)&&q.call(this,t)}function q(e){var t=e.changedPointers[0];if(t.identifier===this.primaryTouch){var n={x:t.clientX,y:t.clientY};this.lastTouches.push(n);var i=this.lastTouches;setTimeout((function(){var e=i.indexOf(n);e>-1&&i.splice(e,1)}),Ye)}}function z(e){for(var t=e.srcEvent.clientX,n=e.srcEvent.clientY,i=0;i<this.lastTouches.length;i++){var o=this.lastTouches[i],r=Math.abs(t-o.x),s=Math.abs(n-o.y);if(Be>=r&&Be>=s)return!0}return!1}function W(e,t){this.manager=e,this.set(t)}function F(e){this.options=ie({},this.defaults,e||{}),this.id=he++,this.manager=null,this.options.enable=p(this.options.enable,!0),this.state=nt,this.simultaneous={},this.requireFail=[]}function U(e){return e&at?"cancel":e&rt?"end":e&ot?"move":e&it?"start":""}function X(e){return e==Ae?"down":e==Se?"up":e==Ee?"left":e==Te?"right":""}function Y(e,t){var n=t.manager;return n?n.get(e):e}function B(){F.apply(this,arguments)}function V(){B.apply(this,arguments),this.pX=null,this.pY=null}function $(){B.apply(this,arguments)}function K(){F.apply(this,arguments),this._timer=null,this._input=null}function Q(){B.apply(this,arguments)}function G(){B.apply(this,arguments)}function J(){F.apply(this,arguments),this.pTime=!1,this.pCenter=!1,this._timer=null,this._input=null,this.count=0}function Z(e,t){return(t=t||{}).recognizers=p(t.recognizers,Z.defaults.preset),new ee(e,t)}function ee(e,t){this.options=ie({},Z.defaults,t||{}),this.options.inputTarget=this.options.inputTarget||e,this.handlers={},this.session={},this.recognizers=[],this.oldCssProps={},this.element=e,this.input=function(e){return new(e.options.inputClass||(fe?j:ge?P:de?H:M))(e,k)}(this),this.touchAction=new W(this,this.options.touchAction),te(this,!0),s(this.options.recognizers,(function(e){var t=this.add(new e[0](e[1]));e[2]&&t.recognizeWith(e[2]),e[3]&&t.requireFailure(e[3])}),this)}function te(e,t){var n,i=e.element;i.style&&(s(e.options.cssProps,(function(o,r){n=x(i.style,r),t?(e.oldCssProps[n]=i.style[n],i.style[n]=o):i.style[n]=e.oldCssProps[n]||""})),t||(e.oldCssProps={}))}function ne(e,n){var i=t.createEvent("Event");i.initEvent(e,!0,!0),i.gesture=n,n.target.dispatchEvent(i)}var ie,oe=["","webkit","Moz","MS","ms","o"],re=t.createElement("div"),se="function",ae=Math.round,le=Math.abs,ce=Date.now;ie="function"!=typeof Object.assign?function(e){if(e===i||null===e)throw new TypeError("Cannot convert undefined or null to object");for(var t=Object(e),n=1;n<arguments.length;n++){var o=arguments[n];if(o!==i&&null!==o)for(var r in o)o.hasOwnProperty(r)&&(t[r]=o[r])}return t}:Object.assign;var ue=a((function(e,t,n){for(var o=Object.keys(t),r=0;r<o.length;)(!n||n&&e[o[r]]===i)&&(e[o[r]]=t[o[r]]),r++;return e}),"extend","Use `assign`."),pe=a((function(e,t){return ue(e,t,!0)}),"merge","Use `assign`."),he=1,de="ontouchstart"in e,fe=x(e,"PointerEvent")!==i,ge=de&&/mobile|tablet|ip(ad|hone|od)|android/i.test(navigator.userAgent),ye="touch",me="mouse",ve=25,be=1,xe=2,we=4,_e=8,ke=1,Ee=2,Te=4,Se=8,Ae=16,Ce=Ee|Te,Le=Se|Ae,Oe=Ce|Le,Me=["x","y"],je=["clientX","clientY"];_.prototype={handler:function(){},init:function(){this.evEl&&h(this.element,this.evEl,this.domHandler),this.evTarget&&h(this.target,this.evTarget,this.domHandler),this.evWin&&h(w(this.element),this.evWin,this.domHandler)},destroy:function(){this.evEl&&d(this.element,this.evEl,this.domHandler),this.evTarget&&d(this.target,this.evTarget,this.domHandler),this.evWin&&d(w(this.element),this.evWin,this.domHandler)}};var Ne={mousedown:be,mousemove:xe,mouseup:we},De="mousedown",Pe="mousemove mouseup";l(M,_,{handler:function(e){var t=Ne[e.type];t&be&&0===e.button&&(this.pressed=!0),t&xe&&1!==e.which&&(t=we),this.pressed&&(t&we&&(this.pressed=!1),this.callback(this.manager,t,{pointers:[e],changedPointers:[e],pointerType:me,srcEvent:e}))}});var Ie={pointerdown:be,pointermove:xe,pointerup:we,pointercancel:_e,pointerout:_e},He={2:ye,3:"pen",4:me,5:"kinect"},Re="pointerdown",qe="pointermove pointerup pointercancel";e.MSPointerEvent&&!e.PointerEvent&&(Re="MSPointerDown",qe="MSPointerMove MSPointerUp MSPointerCancel"),l(j,_,{handler:function(e){var t=this.store,n=!1,i=e.type.toLowerCase().replace("ms",""),o=Ie[i],r=He[e.pointerType]||e.pointerType,s=r==ye,a=m(t,e.pointerId,"pointerId");o&be&&(0===e.button||s)?0>a&&(t.push(e),a=t.length-1):o&(we|_e)&&(n=!0),0>a||(t[a]=e,this.callback(this.manager,o,{pointers:t,changedPointers:[e],pointerType:r,srcEvent:e}),n&&t.splice(a,1))}});var ze={touchstart:be,touchmove:xe,touchend:we,touchcancel:_e},We="touchstart",Fe="touchstart touchmove touchend touchcancel";l(N,_,{handler:function(e){var t=ze[e.type];if(t===be&&(this.started=!0),this.started){var n=D.call(this,e,t);t&(we|_e)&&n[0].length-n[1].length==0&&(this.started=!1),this.callback(this.manager,t,{pointers:n[0],changedPointers:n[1],pointerType:ye,srcEvent:e})}}});var Ue={touchstart:be,touchmove:xe,touchend:we,touchcancel:_e},Xe="touchstart touchmove touchend touchcancel";l(P,_,{handler:function(e){var t=Ue[e.type],n=I.call(this,e,t);n&&this.callback(this.manager,t,{pointers:n[0],changedPointers:n[1],pointerType:ye,srcEvent:e})}});var Ye=2500,Be=25;l(H,_,{handler:function(e,t,n){var i=n.pointerType==ye,o=n.pointerType==me;if(!(o&&n.sourceCapabilities&&n.sourceCapabilities.firesTouchEvents)){if(i)R.call(this,t,n);else if(o&&z.call(this,n))return;this.callback(e,t,n)}},destroy:function(){this.touch.destroy(),this.mouse.destroy()}});var Ve=x(re.style,"touchAction"),$e=Ve!==i,Ke="compute",Qe="auto",Ge="manipulation",Je="none",Ze="pan-x",et="pan-y",tt=function(){if(!$e)return!1;var t={},n=e.CSS&&e.CSS.supports;return["auto","manipulation","pan-y","pan-x","pan-x pan-y","none"].forEach((function(i){t[i]=!n||e.CSS.supports("touch-action",i)})),t}();W.prototype={set:function(e){e==Ke&&(e=this.compute()),$e&&this.manager.element.style&&tt[e]&&(this.manager.element.style[Ve]=e),this.actions=e.toLowerCase().trim()},update:function(){this.set(this.manager.options.touchAction)},compute:function(){var e=[];return s(this.manager.recognizers,(function(t){u(t.options.enable,[t])&&(e=e.concat(t.getTouchAction()))})),function(e){if(g(e,Je))return Je;var t=g(e,Ze),n=g(e,et);return t&&n?Je:t||n?t?Ze:et:g(e,Ge)?Ge:Qe}(e.join(" "))},preventDefaults:function(e){var t=e.srcEvent,n=e.offsetDirection;if(!this.manager.session.prevented){var i=this.actions,o=g(i,Je)&&!tt[Je],r=g(i,et)&&!tt[et],s=g(i,Ze)&&!tt[Ze];if(o){var a=1===e.pointers.length,l=e.distance<2,c=e.deltaTime<250;if(a&&l&&c)return}return s&&r?void 0:o||r&&n&Ce||s&&n&Le?this.preventSrc(t):void 0}t.preventDefault()},preventSrc:function(e){this.manager.session.prevented=!0,e.preventDefault()}};var nt=1,it=2,ot=4,rt=8,st=rt,at=16,lt=32;F.prototype={defaults:{},set:function(e){return ie(this.options,e),this.manager&&this.manager.touchAction.update(),this},recognizeWith:function(e){if(r(e,"recognizeWith",this))return this;var t=this.simultaneous;return t[(e=Y(e,this)).id]||(t[e.id]=e,e.recognizeWith(this)),this},dropRecognizeWith:function(e){return r(e,"dropRecognizeWith",this)||(e=Y(e,this),delete this.simultaneous[e.id]),this},requireFailure:function(e){if(r(e,"requireFailure",this))return this;var t=this.requireFail;return-1===m(t,e=Y(e,this))&&(t.push(e),e.requireFailure(this)),this},dropRequireFailure:function(e){if(r(e,"dropRequireFailure",this))return this;e=Y(e,this);var t=m(this.requireFail,e);return t>-1&&this.requireFail.splice(t,1),this},hasRequireFailures:function(){return this.requireFail.length>0},canRecognizeWith:function(e){return!!this.simultaneous[e.id]},emit:function(e){function t(t){n.manager.emit(t,e)}var n=this,i=this.state;rt>i&&t(n.options.event+U(i)),t(n.options.event),e.additionalEvent&&t(e.additionalEvent),i>=rt&&t(n.options.event+U(i))},tryEmit:function(e){return this.canEmit()?this.emit(e):void(this.state=lt)},canEmit:function(){for(var e=0;e<this.requireFail.length;){if(!(this.requireFail[e].state&(lt|nt)))return!1;e++}return!0},recognize:function(e){var t=ie({},e);return u(this.options.enable,[this,t])?(this.state&(st|at|lt)&&(this.state=nt),this.state=this.process(t),void(this.state&(it|ot|rt|at)&&this.tryEmit(t))):(this.reset(),void(this.state=lt))},process:function(e){},getTouchAction:function(){},reset:function(){}},l(B,F,{defaults:{pointers:1},attrTest:function(e){var t=this.options.pointers;return 0===t||e.pointers.length===t},process:function(e){var t=this.state,n=e.eventType,i=t&(it|ot),o=this.attrTest(e);return i&&(n&_e||!o)?t|at:i||o?n&we?t|rt:t&it?t|ot:it:lt}}),l(V,B,{defaults:{event:"pan",threshold:10,pointers:1,direction:Oe},getTouchAction:function(){var e=this.options.direction,t=[];return e&Ce&&t.push(et),e&Le&&t.push(Ze),t},directionTest:function(e){var t=this.options,n=!0,i=e.distance,o=e.direction,r=e.deltaX,s=e.deltaY;return o&t.direction||(t.direction&Ce?(o=0===r?ke:0>r?Ee:Te,n=r!=this.pX,i=Math.abs(e.deltaX)):(o=0===s?ke:0>s?Se:Ae,n=s!=this.pY,i=Math.abs(e.deltaY))),e.direction=o,n&&i>t.threshold&&o&t.direction},attrTest:function(e){return B.prototype.attrTest.call(this,e)&&(this.state&it||!(this.state&it)&&this.directionTest(e))},emit:function(e){this.pX=e.deltaX,this.pY=e.deltaY;var t=X(e.direction);t&&(e.additionalEvent=this.options.event+t),this._super.emit.call(this,e)}}),l($,B,{defaults:{event:"pinch",threshold:0,pointers:2},getTouchAction:function(){return[Je]},attrTest:function(e){return this._super.attrTest.call(this,e)&&(Math.abs(e.scale-1)>this.options.threshold||this.state&it)},emit:function(e){if(1!==e.scale){var t=e.scale<1?"in":"out";e.additionalEvent=this.options.event+t}this._super.emit.call(this,e)}}),l(K,F,{defaults:{event:"press",pointers:1,time:251,threshold:9},getTouchAction:function(){return[Qe]},process:function(e){var t=this.options,n=e.pointers.length===t.pointers,i=e.distance<t.threshold,r=e.deltaTime>t.time;if(this._input=e,!i||!n||e.eventType&(we|_e)&&!r)this.reset();else if(e.eventType&be)this.reset(),this._timer=o((function(){this.state=st,this.tryEmit()}),t.time,this);else if(e.eventType&we)return st;return lt},reset:function(){clearTimeout(this._timer)},emit:function(e){this.state===st&&(e&&e.eventType&we?this.manager.emit(this.options.event+"up",e):(this._input.timeStamp=ce(),this.manager.emit(this.options.event,this._input)))}}),l(Q,B,{defaults:{event:"rotate",threshold:0,pointers:2},getTouchAction:function(){return[Je]},attrTest:function(e){return this._super.attrTest.call(this,e)&&(Math.abs(e.rotation)>this.options.threshold||this.state&it)}}),l(G,B,{defaults:{event:"swipe",threshold:10,velocity:.3,direction:Ce|Le,pointers:1},getTouchAction:function(){return V.prototype.getTouchAction.call(this)},attrTest:function(e){var t,n=this.options.direction;return n&(Ce|Le)?t=e.overallVelocity:n&Ce?t=e.overallVelocityX:n&Le&&(t=e.overallVelocityY),this._super.attrTest.call(this,e)&&n&e.offsetDirection&&e.distance>this.options.threshold&&e.maxPointers==this.options.pointers&&le(t)>this.options.velocity&&e.eventType&we},emit:function(e){var t=X(e.offsetDirection);t&&this.manager.emit(this.options.event+t,e),this.manager.emit(this.options.event,e)}}),l(J,F,{defaults:{event:"tap",pointers:1,taps:1,interval:300,time:250,threshold:9,posThreshold:10},getTouchAction:function(){return[Ge]},process:function(e){var t=this.options,n=e.pointers.length===t.pointers,i=e.distance<t.threshold,r=e.deltaTime<t.time;if(this.reset(),e.eventType&be&&0===this.count)return this.failTimeout();if(i&&r&&n){if(e.eventType!=we)return this.failTimeout();var s=!this.pTime||e.timeStamp-this.pTime<t.interval,a=!this.pCenter||L(this.pCenter,e.center)<t.posThreshold;if(this.pTime=e.timeStamp,this.pCenter=e.center,a&&s?this.count+=1:this.count=1,this._input=e,0==this.count%t.taps)return this.hasRequireFailures()?(this._timer=o((function(){this.state=st,this.tryEmit()}),t.interval,this),it):st}return lt},failTimeout:function(){return this._timer=o((function(){this.state=lt}),this.options.interval,this),lt},reset:function(){clearTimeout(this._timer)},emit:function(){this.state==st&&(this._input.tapCount=this.count,this.manager.emit(this.options.event,this._input))}}),Z.VERSION="2.0.8",Z.defaults={domEvents:!1,touchAction:Ke,enable:!0,inputTarget:null,inputClass:null,preset:[[Q,{enable:!1}],[$,{enable:!1},["rotate"]],[G,{direction:Ce}],[V,{direction:Ce},["swipe"]],[J],[J,{event:"doubletap",taps:2},["tap"]],[K]],cssProps:{userSelect:"none",touchSelect:"none",touchCallout:"none",contentZooming:"none",userDrag:"none",tapHighlightColor:"rgba(0,0,0,0)"}},ee.prototype={set:function(e){return ie(this.options,e),e.touchAction&&this.touchAction.update(),e.inputTarget&&(this.input.destroy(),this.input.target=e.inputTarget,this.input.init()),this},stop:function(e){this.session.stopped=e?2:1},recognize:function(e){var t=this.session;if(!t.stopped){this.touchAction.preventDefaults(e);var n,i=this.recognizers,o=t.curRecognizer;(!o||o&&o.state&st)&&(o=t.curRecognizer=null);for(var r=0;r<i.length;)n=i[r],2===t.stopped||o&&n!=o&&!n.canRecognizeWith(o)?n.reset():n.recognize(e),!o&&n.state&(it|ot|rt)&&(o=t.curRecognizer=n),r++}},get:function(e){if(e instanceof F)return e;for(var t=this.recognizers,n=0;n<t.length;n++)if(t[n].options.event==e)return t[n];return null},add:function(e){if(r(e,"add",this))return this;var t=this.get(e.options.event);return t&&this.remove(t),this.recognizers.push(e),e.manager=this,this.touchAction.update(),e},remove:function(e){if(r(e,"remove",this))return this;if(e=this.get(e)){var t=this.recognizers,n=m(t,e);-1!==n&&(t.splice(n,1),this.touchAction.update())}return this},on:function(e,t){if(e!==i&&t!==i){var n=this.handlers;return s(y(e),(function(e){n[e]=n[e]||[],n[e].push(t)})),this}},off:function(e,t){if(e!==i){var n=this.handlers;return s(y(e),(function(e){t?n[e]&&n[e].splice(m(n[e],t),1):delete n[e]})),this}},emit:function(e,t){this.options.domEvents&&ne(e,t);var n=this.handlers[e]&&this.handlers[e].slice();if(n&&n.length){t.type=e,t.preventDefault=function(){t.srcEvent.preventDefault()};for(var i=0;i<n.length;)n[i](t),i++}},destroy:function(){this.element&&te(this,!1),this.handlers={},this.session={},this.input.destroy(),this.element=null}},ie(Z,{INPUT_START:be,INPUT_MOVE:xe,INPUT_END:we,INPUT_CANCEL:_e,STATE_POSSIBLE:nt,STATE_BEGAN:it,STATE_CHANGED:ot,STATE_ENDED:rt,STATE_RECOGNIZED:st,STATE_CANCELLED:at,STATE_FAILED:lt,DIRECTION_NONE:ke,DIRECTION_LEFT:Ee,DIRECTION_RIGHT:Te,DIRECTION_UP:Se,DIRECTION_DOWN:Ae,DIRECTION_HORIZONTAL:Ce,DIRECTION_VERTICAL:Le,DIRECTION_ALL:Oe,Manager:ee,Input:_,TouchAction:W,TouchInput:P,MouseInput:M,PointerEventInput:j,TouchMouseInput:H,SingleTouchInput:N,Recognizer:F,AttrRecognizer:B,Tap:J,Pan:V,Swipe:G,Pinch:$,Rotate:Q,Press:K,on:h,off:d,each:s,merge:pe,extend:ue,assign:ie,inherit:l,bindFn:c,prefixed:x}),(void 0!==e?e:"undefined"!=typeof self?self:{}).Hammer=Z,"function"==typeof define&&define.amd?define((function(){return Z})):"undefined"!=typeof module&&module.exports?module.exports=Z:e.Hammer=Z}(window,document),Unison=function(){"use strict";var e,t=window,n=document,i=n.head,o={},r=!1,s={parseMQ:function(e){return t.getComputedStyle(e,null).getPropertyValue("font-family").replace(/"/g,"").replace(/'/g,"")},debounce:function(e,t,n){var i;return function(){var o=this,r=arguments;clearTimeout(i),i=setTimeout((function(){i=null,n||e.apply(o,r)}),t),n&&!i&&e.apply(o,r)}},isObject:function(e){return"object"==typeof e},isUndefined:function(e){return void 0===e}},a={on:function(e,t){s.isObject(o[e])||(o[e]=[]),o[e].push(t)},emit:function(e,t){if(s.isObject(o[e]))for(var n=o[e].slice(),i=0;i<n.length;i++)n[i].call(this,t)}},l={all:function(){for(var e={},t=s.parseMQ(n.querySelector("title")).split(","),i=0;i<t.length;i++){var o=t[i].trim().split(" ");e[o[0]]=o[1]}return r?e:null},now:function(e){var t=s.parseMQ(i).split(" "),n={name:t[0],width:t[1]};return r?s.isUndefined(e)?n:e(n):null},update:function(){l.now((function(t){t.name!==e&&(a.emit(t.name),a.emit("change",t),e=t.name)}))}};return t.onresize=s.debounce(l.update,100),n.addEventListener("DOMContentLoaded",(function(){r="none"!==t.getComputedStyle(i,null).getPropertyValue("clear"),l.update()})),{fetch:{all:l.all,now:l.now},on:a.on,emit:a.emit,util:{debounce:s.debounce,isObject:s.isObject}}}(),function(){"use strict";function e(e){function t(t,i){var r,f,g=t==window,y=i&&void 0!==i.message?i.message:void 0;if(!(i=e.extend({},e.blockUI.defaults,i||{})).ignoreIfBlocked||!e(t).data("blockUI.isBlocked")){if(i.overlayCSS=e.extend({},e.blockUI.defaults.overlayCSS,i.overlayCSS||{}),r=e.extend({},e.blockUI.defaults.css,i.css||{}),i.onOverlayClick&&(i.overlayCSS.cursor="pointer"),f=e.extend({},e.blockUI.defaults.themedCSS,i.themedCSS||{}),y=void 0===y?i.message:y,g&&h&&n(window,{fadeOut:0}),y&&"string"!=typeof y&&(y.parentNode||y.jquery)){var m=y.jquery?y[0]:y,v={};e(t).data("blockUI.history",v),v.el=m,v.parent=m.parentNode,v.display=m.style.display,v.position=m.style.position,v.parent&&v.parent.removeChild(m)}e(t).data("blockUI.onUnblock",i.onUnblock);var b,x,w,_,k=i.baseZ;b=e(c||i.forceIframe?'<iframe class="blockUI" style="z-index:'+k+++';display:none;border:none;margin:0;padding:0;position:absolute;width:100%;height:100%;top:0;left:0" src="'+i.iframeSrc+'"></iframe>':'<div class="blockUI" style="display:none"></div>'),x=e(i.theme?'<div class="blockUI blockOverlay ui-widget-overlay" style="z-index:'+k+++';display:none"></div>':'<div class="blockUI blockOverlay" style="z-index:'+k+++';display:none;border:none;margin:0;padding:0;width:100%;height:100%;top:0;left:0"></div>'),i.theme&&g?(_='<div class="blockUI '+i.blockMsgClass+' blockPage ui-dialog ui-widget ui-corner-all" style="z-index:'+(k+10)+';display:none;position:fixed">',i.title&&(_+='<div class="ui-widget-header ui-dialog-titlebar ui-corner-all blockTitle">'+(i.title||"&nbsp;")+"</div>"),_+='<div class="ui-widget-content ui-dialog-content"></div>',_+="</div>"):i.theme?(_='<div class="blockUI '+i.blockMsgClass+' blockElement ui-dialog ui-widget ui-corner-all" style="z-index:'+(k+10)+';display:none;position:absolute">',i.title&&(_+='<div class="ui-widget-header ui-dialog-titlebar ui-corner-all blockTitle">'+(i.title||"&nbsp;")+"</div>"),_+='<div class="ui-widget-content ui-dialog-content"></div>',_+="</div>"):_=g?'<div class="blockUI '+i.blockMsgClass+' blockPage" style="z-index:'+(k+10)+';display:none;position:fixed"></div>':'<div class="blockUI '+i.blockMsgClass+' blockElement" style="z-index:'+(k+10)+';display:none;position:absolute"></div>',w=e(_),y&&(i.theme?(w.css(f),w.addClass("ui-widget-content")):w.css(r)),i.theme||x.css(i.overlayCSS),x.css("position",g?"fixed":"absolute"),(c||i.forceIframe)&&b.css("opacity",0);var E=[b,x,w],T=e(g?"body":t);e.each(E,(function(){this.appendTo(T)})),i.theme&&i.draggable&&e.fn.draggable&&w.draggable({handle:".ui-dialog-titlebar",cancel:"li"});var S=p&&(!e.support.boxModel||e("object,embed",g?null:t).length>0);if(u||S){if(g&&i.allowBodyStretch&&e.support.boxModel&&e("html,body").css("height","100%"),(u||!e.support.boxModel)&&!g)var A=a(t,"borderTopWidth"),C=a(t,"borderLeftWidth"),L=A?"(0 - "+A+")":0,O=C?"(0 - "+C+")":0;e.each(E,(function(e,t){var n=t[0].style;if(n.position="absolute",2>e)g?n.setExpression("height","Math.max(document.body.scrollHeight, document.body.offsetHeight) - (jQuery.support.boxModel?0:"+i.quirksmodeOffsetHack+') + "px"'):n.setExpression("height",'this.parentNode.offsetHeight + "px"'),g?n.setExpression("width",'jQuery.support.boxModel && document.documentElement.clientWidth || document.body.clientWidth + "px"'):n.setExpression("width",'this.parentNode.offsetWidth + "px"'),O&&n.setExpression("left",O),L&&n.setExpression("top",L);else if(i.centerY)g&&n.setExpression("top",'(document.documentElement.clientHeight || document.body.clientHeight) / 2 - (this.offsetHeight / 2) + (blah = document.documentElement.scrollTop ? document.documentElement.scrollTop : document.body.scrollTop) + "px"'),n.marginTop=0;else if(!i.centerY&&g){var o="((document.documentElement.scrollTop ? document.documentElement.scrollTop : document.body.scrollTop) + "+(i.css&&i.css.top?parseInt(i.css.top,10):0)+') + "px"';n.setExpression("top",o)}}))}if(y&&(i.theme?w.find(".ui-widget-content").append(y):w.append(y),(y.jquery||y.nodeType)&&e(y).show()),(c||i.forceIframe)&&i.showOverlay&&b.show(),i.fadeIn){var M=i.onBlock?i.onBlock:l,j=i.showOverlay&&!y?M:l,N=y?M:l;i.showOverlay&&x._fadeIn(i.fadeIn,j),y&&w._fadeIn(i.fadeIn,N)}else i.showOverlay&&x.show(),y&&w.show(),i.onBlock&&i.onBlock.bind(w)();if(o(1,t,i),g?(h=w[0],d=e(i.focusableElements,h),i.focusInput&&setTimeout(s,20)):function(e,t,n){var i=e.parentNode,o=e.style,r=(i.offsetWidth-e.offsetWidth)/2-a(i,"borderLeftWidth"),s=(i.offsetHeight-e.offsetHeight)/2-a(i,"borderTopWidth");t&&(o.left=r>0?r+"px":"0"),n&&(o.top=s>0?s+"px":"0")}(w[0],i.centerX,i.centerY),i.timeout){var D=setTimeout((function(){g?e.unblockUI(i):e(t).unblock(i)}),i.timeout);e(t).data("blockUI.timeout",D)}}}function n(t,n){var r,s,a=t==window,l=e(t),c=l.data("blockUI.history"),u=l.data("blockUI.timeout");u&&(clearTimeout(u),l.removeData("blockUI.timeout")),n=e.extend({},e.blockUI.defaults,n||{}),o(0,t,n),null===n.onUnblock&&(n.onUnblock=l.data("blockUI.onUnblock"),l.removeData("blockUI.onUnblock")),s=a?e("body").children().filter(".blockUI").add("body > .blockUI"):l.find(">.blockUI"),n.cursorReset&&(s.length>1&&(s[1].style.cursor=n.cursorReset),s.length>2&&(s[2].style.cursor=n.cursorReset)),a&&(h=d=null),n.fadeOut?(r=s.length,s.stop().fadeOut(n.fadeOut,(function(){0==--r&&i(s,c,n,t)}))):i(s,c,n,t)}function i(t,n,i,o){var r=e(o);if(!r.data("blockUI.isBlocked")){t.each((function(e,t){this.parentNode&&this.parentNode.removeChild(this)})),n&&n.el&&(n.el.style.display=n.display,n.el.style.position=n.position,n.el.style.cursor="default",n.parent&&n.parent.appendChild(n.el),r.removeData("blockUI.history")),r.data("blockUI.static")&&r.css("position","static"),"function"==typeof i.onUnblock&&i.onUnblock(o,i);var s=e(document.body),a=s.width(),l=s[0].style.width;s.width(a-1).width(a),s[0].style.width=l}}function o(t,n,i){var o=n==window,s=e(n);if((t||(!o||h)&&(o||s.data("blockUI.isBlocked")))&&(s.data("blockUI.isBlocked",t),o&&i.bindEvents&&(!t||i.showOverlay))){var a="mousedown mouseup keydown keypress keyup touchstart touchend touchmove";t?e(document).bind(a,i,r):e(document).unbind(a,r)}}function r(t){if("keydown"===t.type&&t.keyCode&&9==t.keyCode&&h&&t.data.constrainTabKey){var n=d,i=!t.shiftKey&&t.target===n[n.length-1],o=t.shiftKey&&t.target===n[0];if(i||o)return setTimeout((function(){s(o)}),10),!1}var r=t.data,a=e(t.target);return a.hasClass("blockOverlay")&&r.onOverlayClick&&r.onOverlayClick(t),a.parents("div."+r.blockMsgClass).length>0||0===a.parents().children().filter("div.blockUI").length}function s(e){if(d){var t=d[!0===e?d.length-1:0];t&&t.focus()}}function a(t,n){return parseInt(e.css(t,n),10)||0}e.fn._fadeIn=e.fn.fadeIn;var l=e.noop||function(){},c=/MSIE/.test(navigator.userAgent),u=/MSIE 6.0/.test(navigator.userAgent)&&!/MSIE 8.0/.test(navigator.userAgent),p=(document.documentMode,e.isFunction(document.createElement("div").style.setExpression));e.blockUI=function(e){t(window,e)},e.unblockUI=function(e){n(window,e)},e.growlUI=function(t,n,i,o){var r=e('<div class="growlUI"></div>');t&&r.append("<h1>"+t+"</h1>"),n&&r.append("<h2>"+n+"</h2>"),void 0===i&&(i=3e3);var s=function(t){t=t||{},e.blockUI({message:r,fadeIn:void 0!==t.fadeIn?t.fadeIn:700,fadeOut:void 0!==t.fadeOut?t.fadeOut:1e3,timeout:void 0!==t.timeout?t.timeout:i,centerY:!1,showOverlay:!1,onUnblock:o,css:e.blockUI.defaults.growlCSS})};s(),r.css("opacity"),r.mouseover((function(){s({fadeIn:0,timeout:3e4});var t=e(".blockMsg");t.stop(),t.fadeTo(300,1)})).mouseout((function(){e(".blockMsg").fadeOut(1e3)}))},e.fn.block=function(n){if(this[0]===window)return e.blockUI(n),this;var i=e.extend({},e.blockUI.defaults,n||{});return this.each((function(){var t=e(this);i.ignoreIfBlocked&&t.data("blockUI.isBlocked")||t.unblock({fadeOut:0})})),this.each((function(){"static"==e.css(this,"position")&&(this.style.position="relative",e(this).data("blockUI.static",!0)),this.style.zoom=1,t(this,n)}))},e.fn.unblock=function(t){return this[0]===window?(e.unblockUI(t),this):this.each((function(){n(this,t)}))},e.blockUI.version=2.7,e.blockUI.defaults={message:"<h1>Please wait...</h1>",title:null,draggable:!0,theme:!1,css:{padding:0,margin:0,width:"30%",top:"40%",left:"35%",textAlign:"center",color:"#000",border:"3px solid #aaa",backgroundColor:"#fff",cursor:"wait"},themedCSS:{width:"30%",top:"40%",left:"35%"},overlayCSS:{backgroundColor:"#000",opacity:.6,cursor:"wait"},cursorReset:"default",growlCSS:{width:"350px",top:"10px",left:"",right:"10px",border:"none",padding:"5px",opacity:.6,cursor:"default",color:"#fff",backgroundColor:"#000","-webkit-border-radius":"10px","-moz-border-radius":"10px","border-radius":"10px"},iframeSrc:/^https/i.test(window.location.href||"")?"javascript:false":"about:blank",forceIframe:!1,baseZ:1e3,centerX:!0,centerY:!0,allowBodyStretch:!0,bindEvents:!0,constrainTabKey:!0,fadeIn:200,fadeOut:400,timeout:0,showOverlay:!0,focusInput:!0,focusableElements:":input:enabled:visible",onBlock:null,onUnblock:null,onOverlayClick:null,quirksmodeOffsetHack:4,blockMsgClass:"blockMsg",ignoreIfBlocked:!1};var h=null,d=[]}"function"==typeof define&&define.amd&&define.amd.jQuery?define(["jquery"],e):e(jQuery)}(),function(){function e(e,t){return function(){return e.apply(t,arguments)}}var t,n,i,o,r,s,a,l,c,u,p,h,d,f,g,y,m,v,b,x,w,_,k,E,T,S,A,C,L,O,M,j,N,D,P,I,H,R,q,z,W,F,U,X,Y,B,V=[].slice,$={}.hasOwnProperty,K=function(e,t){for(var n in t)$.call(t,n)&&(e[n]=t[n]);function i(){this.constructor=e}return i.prototype=t.prototype,e.prototype=new i,e.__super__=t.prototype,e},Q=[].indexOf||function(e){for(var t=0,n=this.length;t<n;t++)if(t in this&&this[t]===e)return t;return-1};function G(){}for(v={className:"",catchupTime:100,initialRate:.03,minTime:250,ghostTime:100,maxProgressPerFrame:20,easeFactor:1.25,startOnPageLoad:!0,restartOnPushState:!0,restartOnRequestAfter:500,target:"body",elements:{checkInterval:100,selectors:["body"]},eventLag:{minSamples:10,sampleCount:3,lagThreshold:3},ajax:{trackMethods:["GET"],trackWebSockets:!0,ignoreURLs:[]}},S=function(){var e;return null!=(e="undefined"!=typeof performance&&null!==performance&&"function"==typeof performance.now?performance.now():void 0)?e:+new Date},C=window.requestAnimationFrame||window.mozRequestAnimationFrame||window.webkitRequestAnimationFrame||window.msRequestAnimationFrame,m=window.cancelAnimationFrame||window.mozCancelAnimationFrame,h=function(e,t,n){if("function"==typeof e.addEventListener)return e.addEventListener(t,n,!1);var i;"function"!=typeof e["on"+t]||"object"!=typeof e["on"+t].eventListeners?(i=new o,"function"==typeof e["on"+t]&&i.on(t,e["on"+t]),e["on"+t]=function(e){return i.trigger(t,e)},e["on"+t].eventListeners=i):i=e["on"+t].eventListeners,i.on(t,n)},null==C&&(C=function(e){return setTimeout(e,50)},m=function(e){return clearTimeout(e)}),O=function(e){var t=S(),n=function(){var i=S()-t;return 33<=i?(t=S(),e(i,(function(){return C(n)}))):setTimeout(n,33-i)};return n()},L=function(){var e=arguments[0],t=arguments[1],n=3<=arguments.length?V.call(arguments,2):[];return"function"==typeof e[t]?e[t].apply(e,n):e[t]},b=function(){for(var e,t,n,i=arguments[0],o=2<=arguments.length?V.call(arguments,1):[],r=0,s=o.length;r<s;r++)if(t=o[r])for(e in t)$.call(t,e)&&(n=t[e],null!=i[e]&&"object"==typeof i[e]&&null!=n&&"object"==typeof n?b(i[e],n):i[e]=n);return i},f=function(e){for(var t,n,i=t=0,o=0,r=e.length;o<r;o++)n=e[o],i+=Math.abs(n),t++;return i/t},w=function(e,t){var n,i;if(null==e&&(e="options"),null==t&&(t=!0),i=document.querySelector("[data-pace-"+e+"]")){if(n=i.getAttribute("data-pace-"+e),!t)return n;try{return JSON.parse(n)}catch(e){return"undefined"!=typeof console&&null!==console?console.error("Error parsing inline pace options",e):void 0}}},G.prototype.on=function(e,t,n,i){var o;return null==i&&(i=!1),null==this.bindings&&(this.bindings={}),null==(o=this.bindings)[e]&&(o[e]=[]),this.bindings[e].push({handler:t,ctx:n,once:i})},G.prototype.once=function(e,t,n){return this.on(e,t,n,!0)},G.prototype.off=function(e,t){var n,i,o;if(null!=(null!=(i=this.bindings)?i[e]:void 0)){if(null==t)return delete this.bindings[e];for(n=0,o=[];n<this.bindings[e].length;)this.bindings[e][n].handler===t?o.push(this.bindings[e].splice(n,1)):o.push(n++);return o}},G.prototype.trigger=function(){var e,t,n,i,o,r,s=arguments[0],a=2<=arguments.length?V.call(arguments,1):[];if(null!=(i=this.bindings)&&i[s]){for(n=0,r=[];n<this.bindings[s].length;)t=(o=this.bindings[s][n]).handler,e=o.ctx,o=o.once,t.apply(null!=e?e:this,a),o?r.push(this.bindings[s].splice(n,1)):r.push(n++);return r}},B=G,s=window.Pace||{},window.Pace=s,b(s,B.prototype),A=s.options=b({},v,window.paceOptions,w()),z=0,F=(X=["ajax","document","eventLag","elements"]).length;z<F;z++)!0===A[D=X[z]]&&(A[D]=v[D]);function J(){return J.__super__.constructor.apply(this,arguments)}function Z(){this.progress=0}function ee(){this.bindings={}}function te(){var e,t=this;te.__super__.constructor.apply(this,arguments),e=function(e){var n=e.open;return e.open=function(i,o,r){return N(i)&&t.trigger("request",{type:i,url:o,request:e}),n.apply(e,arguments)}},window.XMLHttpRequest=function(t){return t=new q(t),e(t),t};try{x(window.XMLHttpRequest,q)}catch(e){}if(null!=R){window.XDomainRequest=function(){var t=new R;return e(t),t};try{x(window.XDomainRequest,R)}catch(e){}}if(null!=H&&A.ajax.trackWebSockets){window.WebSocket=function(e,n){var i=null!=n?new H(e,n):new H(e);return N("socket")&&t.trigger("request",{type:"socket",url:e,protocols:n,request:i}),i};try{x(window.WebSocket,H)}catch(e){}}}function ne(){this.complete=e(this.complete,this);var t=this;this.elements=[],_().on("request",(function(){return t.watch.apply(t,arguments)}))}function ie(t){var n,o,r,s;for(null==t&&(t={}),this.complete=e(this.complete,this),this.elements=[],null==t.selectors&&(t.selectors=[]),o=0,r=(s=t.selectors).length;o<r;o++)n=s[o],this.elements.push(new i(n,this.complete))}function oe(e,t){this.selector=e,this.completeCallback=t,this.progress=0,this.check()}function re(){var e,t,n=this;this.progress=null!=(t=this.states[document.readyState])?t:100,e=document.onreadystatechange,document.onreadystatechange=function(){return null!=n.states[document.readyState]&&(n.progress=n.states[document.readyState]),"function"==typeof e?e.apply(null,arguments):void 0}}function se(e){this.source=e,this.last=this.sinceLastUpdate=0,this.rate=A.initialRate,this.catchup=0,this.progress=this.lastProgress=0,null!=this.source&&(this.progress=L(this.source,"progress"))}K(J,B=Error),r=J,Z.prototype.getElement=function(){var e;if(null==this.el){if(!(e=document.querySelector(A.target)))throw new r;this.el=document.createElement("div"),this.el.className="pace pace-active",document.body.className=document.body.className.replace(/(pace-done )|/,"pace-running ");var t=""!==A.className?" "+A.className:"";this.el.innerHTML='<div class="pace-progress'+t+'">\n <div class="pace-progress-inner"></div>\n</div>\n<div class="pace-activity"></div>',null!=e.firstChild?e.insertBefore(this.el,e.firstChild):e.appendChild(this.el)}return this.el},Z.prototype.finish=function(){var e=this.getElement();return e.className=e.className.replace("pace-active","pace-inactive"),document.body.className=document.body.className.replace("pace-running ","pace-done ")},Z.prototype.update=function(e){return this.progress=e,s.trigger("progress",e),this.render()},Z.prototype.destroy=function(){try{this.getElement().parentNode.removeChild(this.getElement())}catch(e){r=e}return this.el=void 0},Z.prototype.render=function(){var e,t,n,i,o,r,a;if(null==document.querySelector(A.target))return!1;for(e=this.getElement(),i="translate3d("+this.progress+"%, 0, 0)",o=0,r=(a=["webkitTransform","msTransform","transform"]).length;o<r;o++)t=a[o],e.children[0].style[t]=i;return(!this.lastRenderedProgress||this.lastRenderedProgress|0!==this.progress|0)&&(e.children[0].setAttribute("data-progress-text",(0|this.progress)+"%"),100<=this.progress?n="99":(n=this.progress<10?"0":"",n+=0|this.progress),e.children[0].setAttribute("data-progress",""+n)),s.trigger("change",this.progress),this.lastRenderedProgress=this.progress},Z.prototype.done=function(){return 100<=this.progress},n=Z,ee.prototype.trigger=function(e,t){var n,i,o,r,s;if(null!=this.bindings[e]){for(s=[],i=0,o=(r=this.bindings[e]).length;i<o;i++)n=r[i],s.push(n.call(this,t));return s}},ee.prototype.on=function(e,t){var n;return null==(n=this.bindings)[e]&&(n[e]=[]),this.bindings[e].push(t)},o=ee,q=window.XMLHttpRequest,R=window.XDomainRequest,H=window.WebSocket,x=function(e,t){var n,i=[];for(n in t.prototype)try{null==e[n]&&"function"!=typeof t[n]?"function"==typeof Object.defineProperty?i.push(Object.defineProperty(e,n,{get:function(e){return function(){return t.prototype[e]}}(n),configurable:!0,enumerable:!0})):i.push(e[n]=t.prototype[n]):i.push(void 0)}catch(e){}return i},E=[],s.ignore=function(){var e=arguments[0],t=2<=arguments.length?V.call(arguments,1):[];return E.unshift("ignore"),t=e.apply(null,t),E.shift(),t},s.track=function(){var e=arguments[0],t=2<=arguments.length?V.call(arguments,1):[];return E.unshift("track"),t=e.apply(null,t),E.shift(),t},N=function(e){if(null==e&&(e="GET"),"track"===E[0])return"force";if(!E.length&&A.ajax){if("socket"===e&&A.ajax.trackWebSockets)return!0;if(e=e.toUpperCase(),0<=Q.call(A.ajax.trackMethods,e))return!0}return!1},K(te,o),a=te,W=null,j=function(e){for(var t,n=A.ajax.ignoreURLs,i=0,o=n.length;i<o;i++)if("string"==typeof(t=n[i])){if(-1!==e.indexOf(t))return!0}else if(t.test(e))return!0;return!1},(_=function(){return W=null==W?new a:W})().on("request",(function(e){var n,i=e.type,o=e.request,r=e.url;if(!j(r))return s.running||!1===A.restartOnRequestAfter&&"force"!==N(i)?void 0:(n=arguments,"boolean"==typeof(r=A.restartOnRequestAfter||0)&&(r=0),setTimeout((function(){var e,r,a,l,c="socket"===i?o.readyState<1:0<(c=o.readyState)&&c<4;if(c){for(s.restart(),l=[],e=0,r=(a=s.sources).length;e<r;e++){if((D=a[e])instanceof t){D.watch.apply(D,n);break}l.push(void 0)}return l}}),r))})),ne.prototype.watch=function(e){var t=e.type,n=e.request;if(e=e.url,!j(e))return n=new("socket"===t?u:p)(n,this.complete),this.elements.push(n)},ne.prototype.complete=function(e){return this.elements=this.elements.filter((function(t){return t!==e}))},t=ne,p=function(e,t){var n,i,o,r,s=this;if(this.progress=0,null!=window.ProgressEvent)for(h(e,"progress",(function(e){return e.lengthComputable?s.progress=100*e.loaded/e.total:s.progress=s.progress+(100-s.progress)/2})),n=0,i=(r=["load","abort","timeout","error"]).length;n<i;n++)h(e,r[n],(function(){return t(s),s.progress=100}));else o=e.onreadystatechange,e.onreadystatechange=function(){var n;return 0===(n=e.readyState)||4===n?(t(s),s.progress=100):3===e.readyState&&(s.progress=50),"function"==typeof o?o.apply(null,arguments):void 0}},u=function(e,t){for(var n,i=this,o=this.progress=0,r=(n=["error","open"]).length;o<r;o++)h(e,n[o],(function(){return t(i),i.progress=100}))},ie.prototype.complete=function(e){return this.elements=this.elements.filter((function(t){return t!==e}))},w=ie,oe.prototype.check=function(){var e=this;return document.querySelector(this.selector)?this.done():setTimeout((function(){return e.check()}),A.elements.checkInterval)},oe.prototype.done=function(){return this.completeCallback(this),this.completeCallback=null,this.progress=100},i=oe,re.prototype.states={loading:0,interactive:50,complete:100},B=re,K=function(){var e,t,n,i,o,r=this;this.progress=0,o=[],i=0,n=S(),t=setInterval((function(){var s=S()-n-50;return n=S(),o.push(s),o.length>A.eventLag.sampleCount&&o.shift(),e=f(o),++i>=A.eventLag.minSamples&&e<A.eventLag.lagThreshold?(r.progress=100,clearInterval(t)):r.progress=3/(e+3)*100}),50)},se.prototype.tick=function(e,t){return 100<=(t=null==t?L(this.source,"progress"):t)&&(this.done=!0),t===this.last?this.sinceLastUpdate+=e:(this.sinceLastUpdate&&(this.rate=(t-this.last)/this.sinceLastUpdate),this.catchup=(t-this.progress)/A.catchupTime,this.sinceLastUpdate=0,this.last=t),t>this.progress&&(this.progress+=this.catchup*e),t=1-Math.pow(this.progress/100,A.easeFactor),this.progress+=t*this.rate*e,this.progress=Math.min(this.lastProgress+A.maxProgressPerFrame,this.progress),this.progress=Math.max(0,this.progress),this.progress=Math.min(100,this.progress),this.lastProgress=this.progress,this.progress},c=se,y=d=I=g=M=P=null,s.running=!1,k=function(){if(A.restartOnPushState)return s.restart()},null!=window.history.pushState&&(U=window.history.pushState,window.history.pushState=function(){return k(),U.apply(window.history,arguments)}),null!=window.history.replaceState&&(Y=window.history.replaceState,window.history.replaceState=function(){return k(),Y.apply(window.history,arguments)}),l={ajax:t,elements:w,document:B,eventLag:K},(T=function(){var e,t,i,o,r,a,u,p;for(s.sources=P=[],t=0,o=(a=["ajax","elements","document","eventLag"]).length;t<o;t++)!1!==A[e=a[t]]&&P.push(new l[e](A[e]));for(i=0,r=(p=null!=(u=A.extraSources)?u:[]).length;i<r;i++)D=p[i],P.push(new D(A));return s.bar=g=new n,M=[],I=new c})(),s.stop=function(){return s.trigger("stop"),s.running=!1,g.destroy(),y=!0,null!=d&&("function"==typeof m&&m(d),d=null),T()},s.restart=function(){return s.trigger("restart"),s.stop(),s.start()},s.go=function(){var e;return s.running=!0,g.render(),e=S(),y=!1,d=O((function(t,n){g.progress;for(var i,o,r,a,l,u,p,h,d,f,m=u=0,v=!0,b=p=0,x=P.length;p<x;b=++p)for(D=P[b],l=null!=M[b]?M[b]:M[b]=[],r=h=0,d=(o=null!=(f=D.elements)?f:[D]).length;h<d;r=++h)a=o[r],v&=(a=null!=l[r]?l[r]:l[r]=new c(a)).done,a.done||(m++,u+=a.tick(t));return i=u/m,g.update(I.tick(t,i)),g.done()||v||y?(g.update(100),s.trigger("done"),setTimeout((function(){return g.finish(),s.running=!1,s.trigger("hide")}),Math.max(A.ghostTime,Math.max(A.minTime-(S()-e),0)))):n()}))},s.start=function(e){b(A,e),s.running=!0;try{g.render()}catch(e){r=e}return document.querySelector(".pace")?(s.trigger("start"),s.go()):setTimeout(s.start,50)},"function"==typeof define&&define.amd?define((function(){return s})):"object"==typeof exports?module.exports=s:A.startOnPageLoad&&s.start()}.call(this),function(e,t){"use strict";"function"==typeof define&&define.amd?define([],(function(){return e.Waves=t.call(e),e.Waves})):"object"==typeof exports?module.exports=t.call(e):e.Waves=t.call(e)}("object"==typeof global?global:this,(function(){"use strict";var e=e||{},t=document.querySelectorAll.bind(document),n=Object.prototype.toString,i="ontouchstart"in window;function o(e){var t=typeof e;return"function"==t||"object"==t&&!!e}function r(e){var i,r=n.call(e);return"[object String]"===r?t(e):o(e)&&/^\[object (Array|HTMLCollection|NodeList|Object)\]$/.test(r)&&e.hasOwnProperty("length")?e:o(i=e)&&0<i.nodeType?[e]:[]}function s(e){var t,n,i,o={top:0,left:0},r=e&&e.ownerDocument,s=r.documentElement;return void 0!==e.getBoundingClientRect&&(o=e.getBoundingClientRect()),t=null!==(i=n=r)&&i===i.window?n:9===n.nodeType&&n.defaultView,{top:o.top+t.pageYOffset-s.clientTop,left:o.left+t.pageXOffset-s.clientLeft}}function a(e){var t="";for(var n in e)e.hasOwnProperty(n)&&(t+=n+":"+e[n]+";");return t}var l={duration:750,delay:200,show:function(e,t,n){if(2===e.button)return!1;t=t||this;var i=document.createElement("div");i.className="waves-ripple waves-rippling",t.appendChild(i);var o=s(t),r=0,c=0;c=0<=(c="touches"in e&&e.touches.length?(r=e.touches[0].pageY-o.top,e.touches[0].pageX-o.left):(r=e.pageY-o.top,e.pageX-o.left))?c:0,r=0<=r?r:0;var u="scale("+t.clientWidth/100*3+")",p="translate(0,0)";n&&(p="translate("+n.x+"px, "+n.y+"px)"),i.setAttribute("data-hold",Date.now()),i.setAttribute("data-x",c),i.setAttribute("data-y",r),i.setAttribute("data-scale",u),i.setAttribute("data-translate",p);var h={top:r+"px",left:c+"px"};i.classList.add("waves-notransition"),i.setAttribute("style",a(h)),i.classList.remove("waves-notransition"),h["-webkit-transform"]=u+" "+p,h["-moz-transform"]=u+" "+p,h["-ms-transform"]=u+" "+p,h["-o-transform"]=u+" "+p,h.transform=u+" "+p,h.opacity="1";var d="mousemove"===e.type?2500:l.duration;h["-webkit-transition-duration"]=d+"ms",h["-moz-transition-duration"]=d+"ms",h["-o-transition-duration"]=d+"ms",h["transition-duration"]=d+"ms",i.setAttribute("style",a(h))},hide:function(e,t){for(var n=(t=t||this).getElementsByClassName("waves-rippling"),o=0,r=n.length;o<r;o++)u(e,t,n[o]);i&&(t.removeEventListener("touchend",l.hide),t.removeEventListener("touchcancel",l.hide)),t.removeEventListener("mouseup",l.hide),t.removeEventListener("mouseleave",l.hide)}},c={input:function(e){var t,n,i,o,r=e.parentNode;"i"===r.tagName.toLowerCase()&&r.classList.contains("waves-effect")||((t=document.createElement("i")).className=e.className+" waves-input-wrapper",e.className="waves-button-input",r.replaceChild(t,e),t.appendChild(e),i=(n=window.getComputedStyle(e,null)).color,o=n.backgroundColor,t.setAttribute("style","color:"+i+";background:"+o),e.setAttribute("style","background-color:rgba(0,0,0,0);"))},img:function(e){var t,n=e.parentNode;"i"===n.tagName.toLowerCase()&&n.classList.contains("waves-effect")||(t=document.createElement("i"),n.replaceChild(t,e),t.appendChild(e))}};function u(e,t,n){var i,o,r,s,c,u;n&&(n.classList.remove("waves-rippling"),i=n.getAttribute("data-x"),o=n.getAttribute("data-y"),r=n.getAttribute("data-scale"),s=n.getAttribute("data-translate"),(c=350-(Date.now()-Number(n.getAttribute("data-hold"))))<0&&(c=0),"mousemove"===e.type&&(c=150),u="mousemove"===e.type?2500:l.duration,setTimeout((function(){var e={top:o+"px",left:i+"px",opacity:"0","-webkit-transition-duration":u+"ms","-moz-transition-duration":u+"ms","-o-transition-duration":u+"ms","transition-duration":u+"ms","-webkit-transform":r+" "+s,"-moz-transform":r+" "+s,"-ms-transform":r+" "+s,"-o-transform":r+" "+s,transform:r+" "+s};n.setAttribute("style",a(e)),setTimeout((function(){try{t.removeChild(n)}catch(e){return!1}}),u)}),c))}var p={touches:0,allowEvent:function(e){var t=!0;return/^(mousedown|mousemove)$/.test(e.type)&&p.touches&&(t=!1),t},registerEvent:function(e){var t=e.type;"touchstart"===t?p.touches+=1:/^(touchend|touchcancel)$/.test(t)&&setTimeout((function(){p.touches&&--p.touches}),500)}};function h(e){var t,n,o,r,s,a=function(e){if(!1===p.allowEvent(e))return null;for(var t=null,n=e.target||e.srcElement;n.parentElement;){if(!(n instanceof SVGElement)&&n.classList.contains("waves-effect")){t=n;break}n=n.parentElement}return t}(e);if(null!==a){if(a.disabled||a.getAttribute("disabled")||a.classList.contains("disabled"))return;p.registerEvent(e),"touchstart"===e.type&&l.delay?(t=!1,n=setTimeout((function(){n=null,l.show(e,a)}),l.delay),o=function(i){n&&(clearTimeout(n),n=null,l.show(e,a)),t||(t=!0,l.hide(i,a)),s()},r=function(e){n&&(clearTimeout(n),n=null),o(e),s()},a.addEventListener("touchmove",r,!1),a.addEventListener("touchend",o,!1),a.addEventListener("touchcancel",o,!1),s=function(){a.removeEventListener("touchmove",r),a.removeEventListener("touchend",o),a.removeEventListener("touchcancel",o)}):(l.show(e,a),i&&(a.addEventListener("touchend",l.hide,!1),a.addEventListener("touchcancel",l.hide,!1)),a.addEventListener("mouseup",l.hide,!1),a.addEventListener("mouseleave",l.hide,!1))}}return e.init=function(e){var t=document.body;"duration"in(e=e||{})&&(l.duration=e.duration),"delay"in e&&(l.delay=e.delay),i&&(t.addEventListener("touchstart",h,!1),t.addEventListener("touchcancel",p.registerEvent,!1),t.addEventListener("touchend",p.registerEvent,!1)),t.addEventListener("mousedown",h,!1)},e.attach=function(e,t){var i,o;e=r(e),"[object Array]"===n.call(t)&&(t=t.join(" ")),t=t?" "+t:"";for(var s=0,a=e.length;s<a;s++)o=(i=e[s]).tagName.toLowerCase(),-1!==["input","img"].indexOf(o)&&(c[o](i),i=i.parentElement),-1===i.className.indexOf("waves-effect")&&(i.className+=" waves-effect"+t)},e.ripple=function(e,t){var n=(e=r(e)).length;if((t=t||{}).wait=t.wait||0,t.position=t.position||null,n)for(var i={},o=0,a={type:"mousedown",button:1},c=function(e,t){return function(){l.hide(e,t)}};o<n;o++){var u=e[o],p=t.position||{x:u.clientWidth/2,y:u.clientHeight/2},h=s(u);i.x=h.left+p.x,i.y=h.top+p.y,a.pageX=i.x,a.pageY=i.y,l.show(a,u),0<=t.wait&&null!==t.wait&&setTimeout(c({type:"mouseup",button:1},u),t.wait)}},e.calm=function(e){for(var t={type:"mouseup",button:1},n=0,i=(e=r(e)).length;n<i;n++)l.hide(t,e[n])},e.displayEffect=function(t){e.init(t)},e})),function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e=e||self).i18next=t()}(this,(function(){"use strict";function e(t){return(e="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(t)}function t(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function n(e){for(var n=1;n<arguments.length;n++){var i=null!=arguments[n]?Object(arguments[n]):{},o=Object.keys(i);"function"==typeof Object.getOwnPropertySymbols&&(o=o.concat(Object.getOwnPropertySymbols(i).filter((function(e){return Object.getOwnPropertyDescriptor(i,e).enumerable})))),o.forEach((function(n){t(e,n,i[n])}))}return e}function i(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function o(e,t){for(var n=0;n<t.length;n++){var i=t[n];i.enumerable=i.enumerable||!1,i.configurable=!0,"value"in i&&(i.writable=!0),Object.defineProperty(e,i.key,i)}}function r(e,t,n){return t&&o(e.prototype,t),n&&o(e,n),e}function s(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}function a(t,n){return!n||"object"!==e(n)&&"function"!=typeof n?s(t):n}function l(e){return(l=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}function c(e,t){return(c=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e})(e,t)}function u(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),t&&c(e,t)}var p={type:"logger",log:function(e){this.output("log",e)},warn:function(e){this.output("warn",e)},error:function(e){this.output("error",e)},output:function(e,t){console&&console[e]&&console[e].apply(console,t)}},h=new(function(){function e(
){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};i(this,e),this.init(t,n)}return r(e,[{key:"init",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};this.prefix=t.prefix||"i18next:",this.logger=e||p,this.options=t,this.debug=t.debug}},{key:"setDebug",value:function(e){this.debug=e}},{key:"log",value:function(){for(var e=arguments.length,t=new Array(e),n=0;n<e;n++)t[n]=arguments[n];return this.forward(t,"log","",!0)}},{key:"warn",value:function(){for(var e=arguments.length,t=new Array(e),n=0;n<e;n++)t[n]=arguments[n];return this.forward(t,"warn","",!0)}},{key:"error",value:function(){for(var e=arguments.length,t=new Array(e),n=0;n<e;n++)t[n]=arguments[n];return this.forward(t,"error","")}},{key:"deprecate",value:function(){for(var e=arguments.length,t=new Array(e),n=0;n<e;n++)t[n]=arguments[n];return this.forward(t,"warn","WARNING DEPRECATED: ",!0)}},{key:"forward",value:function(e,t,n,i){return i&&!this.debug?null:("string"==typeof e[0]&&(e[0]="".concat(n).concat(this.prefix," ").concat(e[0])),this.logger[t](e))}},{key:"create",value:function(t){return new e(this.logger,n({},{prefix:"".concat(this.prefix,":").concat(t,":")},this.options))}}]),e}()),d=function(){function e(){i(this,e),this.observers={}}return r(e,[{key:"on",value:function(e,t){var n=this;return e.split(" ").forEach((function(e){n.observers[e]=n.observers[e]||[],n.observers[e].push(t)})),this}},{key:"off",value:function(e,t){this.observers[e]&&(t?this.observers[e]=this.observers[e].filter((function(e){return e!==t})):delete this.observers[e])}},{key:"emit",value:function(e){for(var t=arguments.length,n=new Array(t>1?t-1:0),i=1;i<t;i++)n[i-1]=arguments[i];this.observers[e]&&[].concat(this.observers[e]).forEach((function(e){e.apply(void 0,n)})),this.observers["*"]&&[].concat(this.observers["*"]).forEach((function(t){t.apply(t,[e].concat(n))}))}}]),e}();function f(){var e,t,n=new Promise((function(n,i){e=n,t=i}));return n.resolve=e,n.reject=t,n}function g(e){return null==e?"":""+e}function y(e,t,n){function i(e){return e&&e.indexOf("###")>-1?e.replace(/###/g,"."):e}function o(){return!e||"string"==typeof e}for(var r="string"!=typeof t?[].concat(t):t.split(".");r.length>1;){if(o())return{};var s=i(r.shift());!e[s]&&n&&(e[s]=new n),e=e[s]}return o()?{}:{obj:e,k:i(r.shift())}}function m(e,t,n){var i=y(e,t,Object);i.obj[i.k]=n}function v(e,t){var n=y(e,t),i=n.obj,o=n.k;if(i)return i[o]}function b(e,t,n){var i=v(e,n);return void 0!==i?i:v(t,n)}function x(e){return e.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g,"\\$&")}var w={"&":"&amp;","<":"&lt;",">":"&gt;",'"':"&quot;","'":"&#39;","/":"&#x2F;"};function _(e){return"string"==typeof e?e.replace(/[&<>"'\/]/g,(function(e){return w[e]})):e}var k="undefined"!=typeof window&&window.navigator&&window.navigator.userAgent&&window.navigator.userAgent.indexOf("MSIE")>-1,E=function(e){function t(e){var n,o=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{ns:["translation"],defaultNS:"translation"};return i(this,t),n=a(this,l(t).call(this)),k&&d.call(s(n)),n.data=e||{},n.options=o,void 0===n.options.keySeparator&&(n.options.keySeparator="."),n}return u(t,d),r(t,[{key:"addNamespaces",value:function(e){this.options.ns.indexOf(e)<0&&this.options.ns.push(e)}},{key:"removeNamespaces",value:function(e){var t=this.options.ns.indexOf(e);t>-1&&this.options.ns.splice(t,1)}},{key:"getResource",value:function(e,t,n){var i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},o=void 0!==i.keySeparator?i.keySeparator:this.options.keySeparator,r=[e,t];return n&&"string"!=typeof n&&(r=r.concat(n)),n&&"string"==typeof n&&(r=r.concat(o?n.split(o):n)),e.indexOf(".")>-1&&(r=e.split(".")),v(this.data,r)}},{key:"addResource",value:function(e,t,n,i){var o=arguments.length>4&&void 0!==arguments[4]?arguments[4]:{silent:!1},r=this.options.keySeparator;void 0===r&&(r=".");var s=[e,t];n&&(s=s.concat(r?n.split(r):n)),e.indexOf(".")>-1&&(i=t,t=(s=e.split("."))[1]),this.addNamespaces(t),m(this.data,s,i),o.silent||this.emit("added",e,t,n,i)}},{key:"addResources",value:function(e,t,n){var i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{silent:!1};for(var o in n)"string"!=typeof n[o]&&"[object Array]"!==Object.prototype.toString.apply(n[o])||this.addResource(e,t,o,n[o],{silent:!0});i.silent||this.emit("added",e,t,n)}},{key:"addResourceBundle",value:function(e,t,i,o,r){var s=arguments.length>5&&void 0!==arguments[5]?arguments[5]:{silent:!1},a=[e,t];e.indexOf(".")>-1&&(o=i,i=t,t=(a=e.split("."))[1]),this.addNamespaces(t);var l=v(this.data,a)||{};o?function e(t,n,i){for(var o in n)"__proto__"!==o&&(o in t?"string"==typeof t[o]||t[o]instanceof String||"string"==typeof n[o]||n[o]instanceof String?i&&(t[o]=n[o]):e(t[o],n[o],i):t[o]=n[o]);return t}(l,i,r):l=n({},l,i),m(this.data,a,l),s.silent||this.emit("added",e,t,i)}},{key:"removeResourceBundle",value:function(e,t){this.hasResourceBundle(e,t)&&delete this.data[e][t],this.removeNamespaces(t),this.emit("removed",e,t)}},{key:"hasResourceBundle",value:function(e,t){return void 0!==this.getResource(e,t)}},{key:"getResourceBundle",value:function(e,t){return t||(t=this.options.defaultNS),"v1"===this.options.compatibilityAPI?n({},{},this.getResource(e,t)):this.getResource(e,t)}},{key:"getDataByLanguage",value:function(e){return this.data[e]}},{key:"toJSON",value:function(){return this.data}}]),t}(),T={processors:{},addPostProcessor:function(e){this.processors[e.name]=e},handle:function(e,t,n,i,o){var r=this;return e.forEach((function(e){r.processors[e]&&(t=r.processors[e].process(t,n,i,o))})),t}},S={},A=function(t){function o(e){var t,n,r,c,u=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return i(this,o),t=a(this,l(o).call(this)),k&&d.call(s(t)),n=["resourceStore","languageUtils","pluralResolver","interpolator","backendConnector","i18nFormat","utils"],r=e,c=s(t),n.forEach((function(e){r[e]&&(c[e]=r[e])})),t.options=u,void 0===t.options.keySeparator&&(t.options.keySeparator="."),t.logger=h.create("translator"),t}return u(o,d),r(o,[{key:"changeLanguage",value:function(e){e&&(this.language=e)}},{key:"exists",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{interpolation:{}},n=this.resolve(e,t);return n&&void 0!==n.res}},{key:"extractFromKey",value:function(e,t){var n=void 0!==t.nsSeparator?t.nsSeparator:this.options.nsSeparator;void 0===n&&(n=":");var i=void 0!==t.keySeparator?t.keySeparator:this.options.keySeparator,o=t.ns||this.options.defaultNS;if(n&&e.indexOf(n)>-1){var r=e.match(this.interpolator.nestingRegexp);if(r&&r.length>0)return{key:e,namespaces:o};var s=e.split(n);(n!==i||n===i&&this.options.ns.indexOf(s[0])>-1)&&(o=s.shift()),e=s.join(i)}return"string"==typeof o&&(o=[o]),{key:e,namespaces:o}}},{key:"translate",value:function(t,i,o){var r=this;if("object"!==e(i)&&this.options.overloadTranslationOptionHandler&&(i=this.options.overloadTranslationOptionHandler(arguments)),i||(i={}),null==t)return"";Array.isArray(t)||(t=[String(t)]);var s=void 0!==i.keySeparator?i.keySeparator:this.options.keySeparator,a=this.extractFromKey(t[t.length-1],i),l=a.key,c=a.namespaces,u=c[c.length-1],p=i.lng||this.language,h=i.appendNamespaceToCIMode||this.options.appendNamespaceToCIMode;if(p&&"cimode"===p.toLowerCase()){if(h){var d=i.nsSeparator||this.options.nsSeparator;return u+d+l}return l}var f=this.resolve(t,i),g=f&&f.res,y=f&&f.usedKey||l,m=f&&f.exactUsedKey||l,v=Object.prototype.toString.apply(g),b=void 0!==i.joinArrays?i.joinArrays:this.options.joinArrays,x=!this.i18nFormat||this.i18nFormat.handleAsObject;if(x&&g&&"string"!=typeof g&&"boolean"!=typeof g&&"number"!=typeof g&&["[object Number]","[object Function]","[object RegExp]"].indexOf(v)<0&&("string"!=typeof b||"[object Array]"!==v)){if(!i.returnObjects&&!this.options.returnObjects)return this.logger.warn("accessing an object - but returnObjects options is not enabled!"),this.options.returnedObjectHandler?this.options.returnedObjectHandler(y,g,i):"key '".concat(l," (").concat(this.language,")' returned an object instead of string.");if(s){var w="[object Array]"===v,_=w?[]:{},k=w?m:y;for(var E in g)if(Object.prototype.hasOwnProperty.call(g,E)){var T="".concat(k).concat(s).concat(E);_[E]=this.translate(T,n({},i,{joinArrays:!1,ns:c})),_[E]===T&&(_[E]=g[E])}g=_}}else if(x&&"string"==typeof b&&"[object Array]"===v)(g=g.join(b))&&(g=this.extendTranslation(g,t,i,o));else{var S=!1,A=!1;if(!this.isValidLookup(g)&&void 0!==i.defaultValue){if(S=!0,void 0!==i.count){var C=this.pluralResolver.getSuffix(p,i.count);g=i["defaultValue".concat(C)]}g||(g=i.defaultValue)}this.isValidLookup(g)||(A=!0,g=l);var L=i.defaultValue&&i.defaultValue!==g&&this.options.updateMissing;if(A||S||L){if(this.logger.log(L?"updateKey":"missingKey",p,u,l,L?i.defaultValue:g),s){var O=this.resolve(l,n({},i,{keySeparator:!1}));O&&O.res&&this.logger.warn("Seems the loaded translations were in flat JSON format instead of nested. Either set keySeparator: false on init or make sure your translations are published in nested format.")}var M=[],j=this.languageUtils.getFallbackCodes(this.options.fallbackLng,i.lng||this.language);if("fallback"===this.options.saveMissingTo&&j&&j[0])for(var N=0;N<j.length;N++)M.push(j[N]);else"all"===this.options.saveMissingTo?M=this.languageUtils.toResolveHierarchy(i.lng||this.language):M.push(i.lng||this.language);var D=function(e,t){r.options.missingKeyHandler?r.options.missingKeyHandler(e,u,t,L?i.defaultValue:g,L,i):r.backendConnector&&r.backendConnector.saveMissing&&r.backendConnector.saveMissing(e,u,t,L?i.defaultValue:g,L,i),r.emit("missingKey",e,u,t,g)};if(this.options.saveMissing){var P=void 0!==i.count&&"string"!=typeof i.count;this.options.saveMissingPlurals&&P?M.forEach((function(e){r.pluralResolver.getPluralFormsOfKey(e,l).forEach((function(t){return D([e],t)}))})):D(M,l)}}g=this.extendTranslation(g,t,i,f,o),A&&g===l&&this.options.appendNamespaceToMissingKey&&(g="".concat(u,":").concat(l)),A&&this.options.parseMissingKeyHandler&&(g=this.options.parseMissingKeyHandler(g))}return g}},{key:"extendTranslation",value:function(e,t,i,o,r){var s=this;if(this.i18nFormat&&this.i18nFormat.parse)e=this.i18nFormat.parse(e,i,o.usedLng,o.usedNS,o.usedKey,{resolved:o});else if(!i.skipInterpolation){i.interpolation&&this.interpolator.init(n({},i,{interpolation:n({},this.options.interpolation,i.interpolation)}));var a,l=i.interpolation&&i.interpolation.skipOnVariables||this.options.interpolation.skipOnVariables;if(l){var c=e.match(this.interpolator.nestingRegexp);a=c&&c.length}var u=i.replace&&"string"!=typeof i.replace?i.replace:i;if(this.options.interpolation.defaultVariables&&(u=n({},this.options.interpolation.defaultVariables,u)),e=this.interpolator.interpolate(e,u,i.lng||this.language,i),l){var p=e.match(this.interpolator.nestingRegexp);a<(p&&p.length)&&(i.nest=!1)}!1!==i.nest&&(e=this.interpolator.nest(e,(function(){for(var e=arguments.length,n=new Array(e),i=0;i<e;i++)n[i]=arguments[i];return r&&r[0]===n[0]?(s.logger.warn("It seems you are nesting recursively key: ".concat(n[0]," in key: ").concat(t[0])),null):s.translate.apply(s,n.concat([t]))}),i)),i.interpolation&&this.interpolator.reset()}var h=i.postProcess||this.options.postProcess,d="string"==typeof h?[h]:h;return null!=e&&d&&d.length&&!1!==i.applyPostProcessor&&(e=T.handle(d,e,t,this.options&&this.options.postProcessPassResolved?n({i18nResolved:o},i):i,this)),e}},{key:"resolve",value:function(e){var t,n,i,o,r,s=this,a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return"string"==typeof e&&(e=[e]),e.forEach((function(e){if(!s.isValidLookup(t)){var l=s.extractFromKey(e,a),c=l.key;n=c;var u=l.namespaces;s.options.fallbackNS&&(u=u.concat(s.options.fallbackNS));var p=void 0!==a.count&&"string"!=typeof a.count,h=void 0!==a.context&&"string"==typeof a.context&&""!==a.context,d=a.lngs?a.lngs:s.languageUtils.toResolveHierarchy(a.lng||s.language,a.fallbackLng);u.forEach((function(e){s.isValidLookup(t)||(r=e,!S["".concat(d[0],"-").concat(e)]&&s.utils&&s.utils.hasLoadedNamespace&&!s.utils.hasLoadedNamespace(r)&&(S["".concat(d[0],"-").concat(e)]=!0,s.logger.warn('key "'.concat(n,'" for languages "').concat(d.join(", "),'" won\'t get resolved as namespace "').concat(r,'" was not yet loaded'),"This means something IS WRONG in your setup. You access the t function before i18next.init / i18next.loadNamespace / i18next.changeLanguage was done. Wait for the callback or Promise to resolve before accessing it!!!")),d.forEach((function(n){if(!s.isValidLookup(t)){o=n;var r,l,u=c,d=[u];for(s.i18nFormat&&s.i18nFormat.addLookupKeys?s.i18nFormat.addLookupKeys(d,c,n,e,a):(p&&(r=s.pluralResolver.getSuffix(n,a.count)),p&&h&&d.push(u+r),h&&d.push(u+="".concat(s.options.contextSeparator).concat(a.context)),p&&d.push(u+=r));l=d.pop();)s.isValidLookup(t)||(i=l,t=s.getResource(n,e,l,a))}})))}))}})),{res:t,usedKey:n,exactUsedKey:i,usedLng:o,usedNS:r}}},{key:"isValidLookup",value:function(e){return!(void 0===e||!this.options.returnNull&&null===e||!this.options.returnEmptyString&&""===e)}},{key:"getResource",value:function(e,t,n){var i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{};return this.i18nFormat&&this.i18nFormat.getResource?this.i18nFormat.getResource(e,t,n,i):this.resourceStore.getResource(e,t,n,i)}}]),o}();function C(e){return e.charAt(0).toUpperCase()+e.slice(1)}var L=function(){function e(t){i(this,e),this.options=t,this.whitelist=this.options.supportedLngs||!1,this.supportedLngs=this.options.supportedLngs||!1,this.logger=h.create("languageUtils")}return r(e,[{key:"getScriptPartFromCode",value:function(e){if(!e||e.indexOf("-")<0)return null;var t=e.split("-");return 2===t.length?null:(t.pop(),"x"===t[t.length-1].toLowerCase()?null:this.formatLanguageCode(t.join("-")))}},{key:"getLanguagePartFromCode",value:function(e){if(!e||e.indexOf("-")<0)return e;var t=e.split("-");return this.formatLanguageCode(t[0])}},{key:"formatLanguageCode",value:function(e){if("string"==typeof e&&e.indexOf("-")>-1){var t=["hans","hant","latn","cyrl","cans","mong","arab"],n=e.split("-");return this.options.lowerCaseLng?n=n.map((function(e){return e.toLowerCase()})):2===n.length?(n[0]=n[0].toLowerCase(),n[1]=n[1].toUpperCase(),t.indexOf(n[1].toLowerCase())>-1&&(n[1]=C(n[1].toLowerCase()))):3===n.length&&(n[0]=n[0].toLowerCase(),2===n[1].length&&(n[1]=n[1].toUpperCase()),"sgn"!==n[0]&&2===n[2].length&&(n[2]=n[2].toUpperCase()),t.indexOf(n[1].toLowerCase())>-1&&(n[1]=C(n[1].toLowerCase())),t.indexOf(n[2].toLowerCase())>-1&&(n[2]=C(n[2].toLowerCase()))),n.join("-")}return this.options.cleanCode||this.options.lowerCaseLng?e.toLowerCase():e}},{key:"isWhitelisted",value:function(e){return this.logger.deprecate("languageUtils.isWhitelisted",'function "isWhitelisted" will be renamed to "isSupportedCode" in the next major - please make sure to rename it\'s usage asap.'),this.isSupportedCode(e)}},{key:"isSupportedCode",value:function(e){return("languageOnly"===this.options.load||this.options.nonExplicitSupportedLngs)&&(e=this.getLanguagePartFromCode(e)),!this.supportedLngs||!this.supportedLngs.length||this.supportedLngs.indexOf(e)>-1}},{key:"getBestMatchFromCodes",value:function(e){var t,n=this;return e?(e.forEach((function(e){if(!t){var i=n.formatLanguageCode(e);n.options.supportedLngs&&!n.isSupportedCode(i)||(t=i)}})),!t&&this.options.supportedLngs&&e.forEach((function(e){if(!t){var i=n.getLanguagePartFromCode(e);if(n.isSupportedCode(i))return t=i;t=n.options.supportedLngs.find((function(e){if(0===e.indexOf(i))return e}))}})),t||(t=this.getFallbackCodes(this.options.fallbackLng)[0]),t):null}},{key:"getFallbackCodes",value:function(e,t){if(!e)return[];if("function"==typeof e&&(e=e(t)),"string"==typeof e&&(e=[e]),"[object Array]"===Object.prototype.toString.apply(e))return e;if(!t)return e.default||[];var n=e[t];return n||(n=e[this.getScriptPartFromCode(t)]),n||(n=e[this.formatLanguageCode(t)]),n||(n=e[this.getLanguagePartFromCode(t)]),n||(n=e.default),n||[]}},{key:"toResolveHierarchy",value:function(e,t){var n=this,i=this.getFallbackCodes(t||this.options.fallbackLng||[],e),o=[],r=function(e){e&&(n.isSupportedCode(e)?o.push(e):n.logger.warn("rejecting language code not found in supportedLngs: ".concat(e)))};return"string"==typeof e&&e.indexOf("-")>-1?("languageOnly"!==this.options.load&&r(this.formatLanguageCode(e)),"languageOnly"!==this.options.load&&"currentOnly"!==this.options.load&&r(this.getScriptPartFromCode(e)),"currentOnly"!==this.options.load&&r(this.getLanguagePartFromCode(e))):"string"==typeof e&&r(this.formatLanguageCode(e)),i.forEach((function(e){o.indexOf(e)<0&&r(n.formatLanguageCode(e))})),o}}]),e}(),O=[{lngs:["ach","ak","am","arn","br","fil","gun","ln","mfe","mg","mi","oc","pt","pt-BR","tg","ti","tr","uz","wa"],nr:[1,2],fc:1},{lngs:["af","an","ast","az","bg","bn","ca","da","de","dev","el","en","eo","es","et","eu","fi","fo","fur","fy","gl","gu","ha","hi","hu","hy","ia","it","kn","ku","lb","mai","ml","mn","mr","nah","nap","nb","ne","nl","nn","no","nso","pa","pap","pms","ps","pt-PT","rm","sco","se","si","so","son","sq","sv","sw","ta","te","tk","ur","yo"],nr:[1,2],fc:2},{lngs:["ay","bo","cgg","fa","ht","id","ja","jbo","ka","kk","km","ko","ky","lo","ms","sah","su","th","tt","ug","vi","wo","zh"],nr:[1],fc:3},{lngs:["be","bs","cnr","dz","hr","ru","sr","uk"],nr:[1,2,5],fc:4},{lngs:["ar"],nr:[0,1,2,3,11,100],fc:5},{lngs:["cs","sk"],nr:[1,2,5],fc:6},{lngs:["csb","pl"],nr:[1,2,5],fc:7},{lngs:["cy"],nr:[1,2,3,8],fc:8},{lngs:["fr"],nr:[1,2],fc:9},{lngs:["ga"],nr:[1,2,3,7,11],fc:10},{lngs:["gd"],nr:[1,2,3,20],fc:11},{lngs:["is"],nr:[1,2],fc:12},{lngs:["jv"],nr:[0,1],fc:13},{lngs:["kw"],nr:[1,2,3,4],fc:14},{lngs:["lt"],nr:[1,2,10],fc:15},{lngs:["lv"],nr:[1,2,0],fc:16},{lngs:["mk"],nr:[1,2],fc:17},{lngs:["mnk"],nr:[0,1,2],fc:18},{lngs:["mt"],nr:[1,2,11,20],fc:19},{lngs:["or"],nr:[2,1],fc:2},{lngs:["ro"],nr:[1,2,20],fc:20},{lngs:["sl"],nr:[5,1,2,3],fc:21},{lngs:["he","iw"],nr:[1,2,20,21],fc:22}],M={1:function(e){return Number(e>1)},2:function(e){return Number(1!=e)},3:function(e){return 0},4:function(e){return Number(e%10==1&&e%100!=11?0:e%10>=2&&e%10<=4&&(e%100<10||e%100>=20)?1:2)},5:function(e){return Number(0==e?0:1==e?1:2==e?2:e%100>=3&&e%100<=10?3:e%100>=11?4:5)},6:function(e){return Number(1==e?0:e>=2&&e<=4?1:2)},7:function(e){return Number(1==e?0:e%10>=2&&e%10<=4&&(e%100<10||e%100>=20)?1:2)},8:function(e){return Number(1==e?0:2==e?1:8!=e&&11!=e?2:3)},9:function(e){return Number(e>=2)},10:function(e){return Number(1==e?0:2==e?1:e<7?2:e<11?3:4)},11:function(e){return Number(1==e||11==e?0:2==e||12==e?1:e>2&&e<20?2:3)},12:function(e){return Number(e%10!=1||e%100==11)},13:function(e){return Number(0!==e)},14:function(e){return Number(1==e?0:2==e?1:3==e?2:3)},15:function(e){return Number(e%10==1&&e%100!=11?0:e%10>=2&&(e%100<10||e%100>=20)?1:2)},16:function(e){return Number(e%10==1&&e%100!=11?0:0!==e?1:2)},17:function(e){return Number(1==e||e%10==1&&e%100!=11?0:1)},18:function(e){return Number(0==e?0:1==e?1:2)},19:function(e){return Number(1==e?0:0==e||e%100>1&&e%100<11?1:e%100>10&&e%100<20?2:3)},20:function(e){return Number(1==e?0:0==e||e%100>0&&e%100<20?1:2)},21:function(e){return Number(e%100==1?1:e%100==2?2:e%100==3||e%100==4?3:0)},22:function(e){return Number(1==e?0:2==e?1:(e<0||e>10)&&e%10==0?2:3)}},j=function(){function e(t){var n,o=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};i(this,e),this.languageUtils=t,this.options=o,this.logger=h.create("pluralResolver"),this.rules=(n={},O.forEach((function(e){e.lngs.forEach((function(t){n[t]={numbers:e.nr,plurals:M[e.fc]}}))})),n)}return r(e,[{key:"addRule",value:function(e,t){this.rules[e]=t}},{key:"getRule",value:function(e){return this.rules[e]||this.rules[this.languageUtils.getLanguagePartFromCode(e)]}},{key:"needsPlural",value:function(e){var t=this.getRule(e);return t&&t.numbers.length>1}},{key:"getPluralFormsOfKey",value:function(e,t){var n=this,i=[],o=this.getRule(e);return o?(o.numbers.forEach((function(o){var r=n.getSuffix(e,o);i.push("".concat(t).concat(r))})),i):i}},{key:"getSuffix",value:function(e,t){var n=this,i=this.getRule(e);if(i){var o=i.noAbs?i.plurals(t):i.plurals(Math.abs(t)),r=i.numbers[o];this.options.simplifyPluralSuffix&&2===i.numbers.length&&1===i.numbers[0]&&(2===r?r="plural":1===r&&(r=""));var s=function(){return n.options.prepend&&r.toString()?n.options.prepend+r.toString():r.toString()};return"v1"===this.options.compatibilityJSON?1===r?"":"number"==typeof r?"_plural_".concat(r.toString()):s():"v2"===this.options.compatibilityJSON||this.options.simplifyPluralSuffix&&2===i.numbers.length&&1===i.numbers[0]?s():this.options.prepend&&o.toString()?this.options.prepend+o.toString():o.toString()}return this.logger.warn("no plural rule found for: ".concat(e)),""}}]),e}(),N=function(){function e(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};i(this,e),this.logger=h.create("interpolator"),this.options=t,this.format=t.interpolation&&t.interpolation.format||function(e){return e},this.init(t)}return r(e,[{key:"init",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};e.interpolation||(e.interpolation={escapeValue:!0});var t=e.interpolation;this.escape=void 0!==t.escape?t.escape:_,this.escapeValue=void 0===t.escapeValue||t.escapeValue,this.useRawValueToEscape=void 0!==t.useRawValueToEscape&&t.useRawValueToEscape,this.prefix=t.prefix?x(t.prefix):t.prefixEscaped||"{{",this.suffix=t.suffix?x(t.suffix):t.suffixEscaped||"}}",this.formatSeparator=t.formatSeparator?t.formatSeparator:t.formatSeparator||",",this.unescapePrefix=t.unescapeSuffix?"":t.unescapePrefix||"-",this.unescapeSuffix=this.unescapePrefix?"":t.unescapeSuffix||"",this.nestingPrefix=t.nestingPrefix?x(t.nestingPrefix):t.nestingPrefixEscaped||x("$t("),this.nestingSuffix=t.nestingSuffix?x(t.nestingSuffix):t.nestingSuffixEscaped||x(")"),this.nestingOptionsSeparator=t.nestingOptionsSeparator?t.nestingOptionsSeparator:t.nestingOptionsSeparator||",",this.maxReplaces=t.maxReplaces?t.maxReplaces:1e3,this.alwaysFormat=void 0!==t.alwaysFormat&&t.alwaysFormat,this.resetRegExp()}},{key:"reset",value:function(){this.options&&this.init(this.options)}},{key:"resetRegExp",value:function(){var e="".concat(this.prefix,"(.+?)").concat(this.suffix);this.regexp=new RegExp(e,"g");var t="".concat(this.prefix).concat(this.unescapePrefix,"(.+?)").concat(this.unescapeSuffix).concat(this.suffix);this.regexpUnescape=new RegExp(t,"g");var n="".concat(this.nestingPrefix,"(.+?)").concat(this.nestingSuffix);this.nestingRegexp=new RegExp(n,"g")}},{key:"interpolate",value:function(e,t,n,i){var o,r,s,a=this,l=this.options&&this.options.interpolation&&this.options.interpolation.defaultVariables||{};function c(e){return e.replace(/\$/g,"$$$$")}var u=function(e){if(e.indexOf(a.formatSeparator)<0){var o=b(t,l,e);return a.alwaysFormat?a.format(o,void 0,n):o}var r=e.split(a.formatSeparator),s=r.shift().trim(),c=r.join(a.formatSeparator).trim();return a.format(b(t,l,s),c,n,i)};this.resetRegExp();var p=i&&i.missingInterpolationHandler||this.options.missingInterpolationHandler,h=i&&i.interpolation&&i.interpolation.skipOnVariables||this.options.interpolation.skipOnVariables;return[{regex:this.regexpUnescape,safeValue:function(e){return c(e)}},{regex:this.regexp,safeValue:function(e){return a.escapeValue?c(a.escape(e)):c(e)}}].forEach((function(t){for(s=0;o=t.regex.exec(e);){if(void 0===(r=u(o[1].trim())))if("function"==typeof p){var n=p(e,o,i);r="string"==typeof n?n:""}else{if(h){r=o[0];continue}a.logger.warn("missed to pass in variable ".concat(o[1]," for interpolating ").concat(e)),r=""}else"string"==typeof r||a.useRawValueToEscape||(r=g(r));if(e=e.replace(o[0],t.safeValue(r)),t.regex.lastIndex=0,++s>=a.maxReplaces)break}})),e}},{key:"nest",value:function(e,t){var i,o,r=this,s=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},a=n({},s);function l(e,t){var i=this.nestingOptionsSeparator;if(e.indexOf(i)<0)return e;var o=e.split(new RegExp("".concat(i,"[ ]*{"))),r="{".concat(o[1]);e=o[0],r=(r=this.interpolate(r,a)).replace(/'/g,'"');try{a=JSON.parse(r),t&&(a=n({},t,a))}catch(t){return this.logger.warn("failed parsing options string in nesting for key ".concat(e),t),"".concat(e).concat(i).concat(r)}return delete a.defaultValue,e}for(a.applyPostProcessor=!1,delete a.defaultValue;i=this.nestingRegexp.exec(e);){var c=[],u=!1;if(i[0].includes(this.formatSeparator)&&!/{.*}/.test(i[1])){var p=i[1].split(this.formatSeparator).map((function(e){return e.trim()}));i[1]=p.shift(),c=p,u=!0}if((o=t(l.call(this,i[1].trim(),a),a))&&i[0]===e&&"string"!=typeof o)return o;"string"!=typeof o&&(o=g(o)),o||(this.logger.warn("missed to resolve ".concat(i[1]," for nesting ").concat(e)),o=""),u&&(o=c.reduce((function(e,t){return r.format(e,t,s.lng,s)}),o.trim())),e=e.replace(i[0],o),this.regexp.lastIndex=0}return e}}]),e}(),D=function(e){function t(e,n,o){var r,c=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{};return i(this,t),r=a(this,l(t).call(this)),k&&d.call(s(r)),r.backend=e,r.store=n,r.services=o,r.languageUtils=o.languageUtils,r.options=c,r.logger=h.create("backendConnector"),r.state={},r.queue=[],r.backend&&r.backend.init&&r.backend.init(o,c.backend,c),r}return u(t,d),r(t,[{key:"queueLoad",value:function(e,t,n,i){var o=this,r=[],s=[],a=[],l=[];return e.forEach((function(e){var i=!0;t.forEach((function(t){var a="".concat(e,"|").concat(t);!n.reload&&o.store.hasResourceBundle(e,t)?o.state[a]=2:o.state[a]<0||(1===o.state[a]?s.indexOf(a)<0&&s.push(a):(o.state[a]=1,i=!1,s.indexOf(a)<0&&s.push(a),r.indexOf(a)<0&&r.push(a),l.indexOf(t)<0&&l.push(t)))})),i||a.push(e)})),(r.length||s.length)&&this.queue.push({pending:s,loaded:{},errors:[],callback:i}),{toLoad:r,pending:s,toLoadLanguages:a,toLoadNamespaces:l}}},{key:"loaded",value:function(e,t,n){var i=e.split("|"),o=i[0],r=i[1];t&&this.emit("failedLoading",o,r,t),n&&this.store.addResourceBundle(o,r,n),this.state[e]=t?-1:2;var s={};this.queue.forEach((function(n){var i,a,l,c,u;i=n.loaded,a=r,(c=(l=y(i,[o],Object)).obj)[u=l.k]=c[u]||[],c[u].push(a),function(e,t){for(var n=e.indexOf(t);-1!==n;)e.splice(n,1),n=e.indexOf(t)}(n.pending,e),t&&n.errors.push(t),0!==n.pending.length||n.done||(Object.keys(n.loaded).forEach((function(e){s[e]||(s[e]=[]),n.loaded[e].length&&n.loaded[e].forEach((function(t){s[e].indexOf(t)<0&&s[e].push(t)}))})),n.done=!0,n.errors.length?n.callback(n.errors):n.callback())})),this.emit("loaded",s),this.queue=this.queue.filter((function(e){return!e.done}))}},{key:"read",value:function(e,t,n){var i=this,o=arguments.length>3&&void 0!==arguments[3]?arguments[3]:0,r=arguments.length>4&&void 0!==arguments[4]?arguments[4]:350,s=arguments.length>5?arguments[5]:void 0;return e.length?this.backend[n](e,t,(function(a,l){a&&l&&o<5?setTimeout((function(){i.read.call(i,e,t,n,o+1,2*r,s)}),r):s(a,l)})):s(null,{})}},{key:"prepareLoading",value:function(e,t){var n=this,i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},o=arguments.length>3?arguments[3]:void 0;if(!this.backend)return this.logger.warn("No backend was added via i18next.use. Will not load resources."),o&&o();"string"==typeof e&&(e=this.languageUtils.toResolveHierarchy(e)),"string"==typeof t&&(t=[t]);var r=this.queueLoad(e,t,i,o);if(!r.toLoad.length)return r.pending.length||o(),null;r.toLoad.forEach((function(e){n.loadOne(e)}))}},{key:"load",value:function(e,t,n){this.prepareLoading(e,t,{},n)}},{key:"reload",value:function(e,t,n){this.prepareLoading(e,t,{reload:!0},n)}},{key:"loadOne",value:function(e){var t=this,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",i=e.split("|"),o=i[0],r=i[1];this.read(o,r,"read",void 0,void 0,(function(i,s){i&&t.logger.warn("".concat(n,"loading namespace ").concat(r," for language ").concat(o," failed"),i),!i&&s&&t.logger.log("".concat(n,"loaded namespace ").concat(r," for language ").concat(o),s),t.loaded(e,i,s)}))}},{key:"saveMissing",value:function(e,t,i,o,r){var s=arguments.length>5&&void 0!==arguments[5]?arguments[5]:{};this.services.utils&&this.services.utils.hasLoadedNamespace&&!this.services.utils.hasLoadedNamespace(t)?this.logger.warn('did not save key "'.concat(i,'" as the namespace "').concat(t,'" was not yet loaded'),"This means something IS WRONG in your setup. You access the t function before i18next.init / i18next.loadNamespace / i18next.changeLanguage was done. Wait for the callback or Promise to resolve before accessing it!!!"):null!=i&&""!==i&&(this.backend&&this.backend.create&&this.backend.create(e,t,i,o,null,n({},s,{isUpdate:r})),e&&e[0]&&this.store.addResource(e[0],t,i,o))}}]),t}();function P(e){return"string"==typeof e.ns&&(e.ns=[e.ns]),"string"==typeof e.fallbackLng&&(e.fallbackLng=[e.fallbackLng]),"string"==typeof e.fallbackNS&&(e.fallbackNS=[e.fallbackNS]),e.whitelist&&(e.whitelist&&e.whitelist.indexOf("cimode")<0&&(e.whitelist=e.whitelist.concat(["cimode"])),e.supportedLngs=e.whitelist),e.nonExplicitWhitelist&&(e.nonExplicitSupportedLngs=e.nonExplicitWhitelist),e.supportedLngs&&e.supportedLngs.indexOf("cimode")<0&&(e.supportedLngs=e.supportedLngs.concat(["cimode"])),e}function I(){}return new(function(t){function o(){var e,t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},n=arguments.length>1?arguments[1]:void 0;if(i(this,o),e=a(this,l(o).call(this)),k&&d.call(s(e)),e.options=P(t),e.services={},e.logger=h,e.modules={external:[]},n&&!e.isInitialized&&!t.isClone){if(!e.options.initImmediate)return e.init(t,n),a(e,s(e));setTimeout((function(){e.init(t,n)}),0)}return e}return u(o,d),r(o,[{key:"init",value:function(){var t=this,i=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},o=arguments.length>1?arguments[1]:void 0;function r(e){return e?"function"==typeof e?new e:e:null}if("function"==typeof i&&(o=i,i={}),i.whitelist&&!i.supportedLngs&&this.logger.deprecate("whitelist",'option "whitelist" will be renamed to "supportedLngs" in the next major - please make sure to rename this option asap.'),i.nonExplicitWhitelist&&!i.nonExplicitSupportedLngs&&this.logger.deprecate("whitelist",'options "nonExplicitWhitelist" will be renamed to "nonExplicitSupportedLngs" in the next major - please make sure to rename this option asap.'),this.options=n({},{debug:!1,initImmediate:!0,ns:["translation"],defaultNS:["translation"],fallbackLng:["dev"],fallbackNS:!1,whitelist:!1,nonExplicitWhitelist:!1,supportedLngs:!1,nonExplicitSupportedLngs:!1,load:"all",preload:!1,simplifyPluralSuffix:!0,keySeparator:".",nsSeparator:":",pluralSeparator:"_",contextSeparator:"_",partialBundledLanguages:!1,saveMissing:!1,updateMissing:!1,saveMissingTo:"fallback",saveMissingPlurals:!0,missingKeyHandler:!1,missingInterpolationHandler:!1,postProcess:!1,postProcessPassResolved:!1,returnNull:!0,returnEmptyString:!0,returnObjects:!1,joinArrays:!1,returnedObjectHandler:!1,parseMissingKeyHandler:!1,appendNamespaceToMissingKey:!1,appendNamespaceToCIMode:!1,overloadTranslationOptionHandler:function(t){var n={};if("object"===e(t[1])&&(n=t[1]),"string"==typeof t[1]&&(n.defaultValue=t[1]),"string"==typeof t[2]&&(n.tDescription=t[2]),"object"===e(t[2])||"object"===e(t[3])){var i=t[3]||t[2];Object.keys(i).forEach((function(e){n[e]=i[e]}))}return n},interpolation:{escapeValue:!0,format:function(e,t,n,i){return e},prefix:"{{",suffix:"}}",formatSeparator:",",unescapePrefix:"-",nestingPrefix:"$t(",nestingSuffix:")",nestingOptionsSeparator:",",maxReplaces:1e3,skipOnVariables:!1}},this.options,P(i)),this.format=this.options.interpolation.format,o||(o=I),!this.options.isClone){this.modules.logger?h.init(r(this.modules.logger),this.options):h.init(null,this.options);var s=new L(this.options);this.store=new E(this.options.resources,this.options);var a=this.services;a.logger=h,a.resourceStore=this.store,a.languageUtils=s,a.pluralResolver=new j(s,{prepend:this.options.pluralSeparator,compatibilityJSON:this.options.compatibilityJSON,simplifyPluralSuffix:this.options.simplifyPluralSuffix}),a.interpolator=new N(this.options),a.utils={hasLoadedNamespace:this.hasLoadedNamespace.bind(this)},a.backendConnector=new D(r(this.modules.backend),a.resourceStore,a,this.options),a.backendConnector.on("*",(function(e){for(var n=arguments.length,i=new Array(n>1?n-1:0),o=1;o<n;o++)i[o-1]=arguments[o];t.emit.apply(t,[e].concat(i))})),this.modules.languageDetector&&(a.languageDetector=r(this.modules.languageDetector),a.languageDetector.init(a,this.options.detection,this.options)),this.modules.i18nFormat&&(a.i18nFormat=r(this.modules.i18nFormat),a.i18nFormat.init&&a.i18nFormat.init(this)),this.translator=new A(this.services,this.options),this.translator.on("*",(function(e){for(var n=arguments.length,i=new Array(n>1?n-1:0),o=1;o<n;o++)i[o-1]=arguments[o];t.emit.apply(t,[e].concat(i))})),this.modules.external.forEach((function(e){e.init&&e.init(t)}))}this.services.languageDetector||this.options.lng||this.logger.warn("init: no languageDetector is used and no lng is defined"),["getResource","hasResourceBundle","getResourceBundle","getDataByLanguage"].forEach((function(e){t[e]=function(){var n;return(n=t.store)[e].apply(n,arguments)}})),["addResource","addResources","addResourceBundle","removeResourceBundle"].forEach((function(e){t[e]=function(){var n;return(n=t.store)[e].apply(n,arguments),t}}));var l=f(),c=function(){t.changeLanguage(t.options.lng,(function(e,n){t.isInitialized=!0,t.options.isClone||t.logger.log("initialized",t.options),t.emit("initialized",t.options),l.resolve(n),o(e,n)}))};return this.options.resources||!this.options.initImmediate?c():setTimeout(c,0),l}},{key:"loadResources",value:function(e){var t=this,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:I,i="string"==typeof e?e:this.language;if("function"==typeof e&&(n=e),!this.options.resources||this.options.partialBundledLanguages){if(i&&"cimode"===i.toLowerCase())return n();var o=[],r=function(e){e&&t.services.languageUtils.toResolveHierarchy(e).forEach((function(e){o.indexOf(e)<0&&o.push(e)}))};i?r(i):this.services.languageUtils.getFallbackCodes(this.options.fallbackLng).forEach((function(e){return r(e)})),this.options.preload&&this.options.preload.forEach((function(e){return r(e)})),this.services.backendConnector.load(o,this.options.ns,n)}else n(null)}},{key:"reloadResources",value:function(e,t,n){var i=f();return e||(e=this.languages),t||(t=this.options.ns),n||(n=I),this.services.backendConnector.reload(e,t,(function(e){i.resolve(),n(e)})),i}},{key:"use",value:function(e){if(!e)throw new Error("You are passing an undefined module! Please check the object you are passing to i18next.use()");if(!e.type)throw new Error("You are passing a wrong module! Please check the object you are passing to i18next.use()");return"backend"===e.type&&(this.modules.backend=e),("logger"===e.type||e.log&&e.warn&&e.error)&&(this.modules.logger=e),"languageDetector"===e.type&&(this.modules.languageDetector=e),"i18nFormat"===e.type&&(this.modules.i18nFormat=e),"postProcessor"===e.type&&T.addPostProcessor(e),"3rdParty"===e.type&&this.modules.external.push(e),this}},{key:"changeLanguage",value:function(e,t){var n=this;this.isLanguageChangingTo=e;var i=f();this.emit("languageChanging",e);var o=function(e){var o="string"==typeof e?e:n.services.languageUtils.getBestMatchFromCodes(e);o&&(n.language||(n.language=o,n.languages=n.services.languageUtils.toResolveHierarchy(o)),n.translator.language||n.translator.changeLanguage(o),n.services.languageDetector&&n.services.languageDetector.cacheUserLanguage(o)),n.loadResources(o,(function(e){!function(e,o){o?(n.language=o,n.languages=n.services.languageUtils.toResolveHierarchy(o),n.translator.changeLanguage(o),n.isLanguageChangingTo=void 0,n.emit("languageChanged",o),n.logger.log("languageChanged",o)):n.isLanguageChangingTo=void 0,i.resolve((function(){return n.t.apply(n,arguments)})),t&&t(e,(function(){return n.t.apply(n,arguments)}))}(e,o)}))};return e||!this.services.languageDetector||this.services.languageDetector.async?!e&&this.services.languageDetector&&this.services.languageDetector.async?this.services.languageDetector.detect(o):o(e):o(this.services.languageDetector.detect()),i}},{key:"getFixedT",value:function(t,i){var o=this,r=function t(i,r){var s;if("object"!==e(r)){for(var a=arguments.length,l=new Array(a>2?a-2:0),c=2;c<a;c++)l[c-2]=arguments[c];s=o.options.overloadTranslationOptionHandler([i,r].concat(l))}else s=n({},r);return s.lng=s.lng||t.lng,s.lngs=s.lngs||t.lngs,s.ns=s.ns||t.ns,o.t(i,s)};return"string"==typeof t?r.lng=t:r.lngs=t,r.ns=i,r}},{key:"t",value:function(){var e;return this.translator&&(e=this.translator).translate.apply(e,arguments)}},{key:"exists",value:function(){var e;return this.translator&&(e=this.translator).exists.apply(e,arguments)}},{key:"setDefaultNamespace",value:function(e){this.options.defaultNS=e}},{key:"hasLoadedNamespace",value:function(e){var t=this,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};if(!this.isInitialized)return this.logger.warn("hasLoadedNamespace: i18next was not initialized",this.languages),!1;if(!this.languages||!this.languages.length)return this.logger.warn("hasLoadedNamespace: i18n.languages were undefined or empty",this.languages),!1;var i=this.languages[0],o=!!this.options&&this.options.fallbackLng,r=this.languages[this.languages.length-1];if("cimode"===i.toLowerCase())return!0;var s=function(e,n){var i=t.services.backendConnector.state["".concat(e,"|").concat(n)];return-1===i||2===i};if(n.precheck){var a=n.precheck(this,s);if(void 0!==a)return a}return!!this.hasResourceBundle(i,e)||!this.services.backendConnector.backend||!(!s(i,e)||o&&!s(r,e))}},{key:"loadNamespaces",value:function(e,t){var n=this,i=f();return this.options.ns?("string"==typeof e&&(e=[e]),e.forEach((function(e){n.options.ns.indexOf(e)<0&&n.options.ns.push(e)})),this.loadResources((function(e){i.resolve(),t&&t(e)})),i):(t&&t(),Promise.resolve())}},{key:"loadLanguages",value:function(e,t){var n=f();"string"==typeof e&&(e=[e]);var i=this.options.preload||[],o=e.filter((function(e){return i.indexOf(e)<0}));return o.length?(this.options.preload=i.concat(o),this.loadResources((function(e){n.resolve(),t&&t(e)})),n):(t&&t(),Promise.resolve())}},{key:"dir",value:function(e){return e||(e=this.languages&&this.languages.length>0?this.languages[0]:this.language),e?["ar","shu","sqr","ssh","xaa","yhd","yud","aao","abh","abv","acm","acq","acw","acx","acy","adf","ads","aeb","aec","afb","ajp","apc","apd","arb","arq","ars","ary","arz","auz","avl","ayh","ayl","ayn","ayp","bbz","pga","he","iw","ps","pbt","pbu","pst","prp","prd","ug","ur","ydd","yds","yih","ji","yi","hbo","men","xmn","fa","jpr","peo","pes","prs","dv","sam"].indexOf(this.services.languageUtils.getLanguagePartFromCode(e))>=0?"rtl":"ltr":"rtl"}},{key:"createInstance",value:function(){return new o(arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},arguments.length>1?arguments[1]:void 0)}},{key:"cloneInstance",value:function(){var e=this,t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},i=arguments.length>1&&void 0!==arguments[1]?arguments[1]:I,r=n({},this.options,t,{isClone:!0}),s=new o(r);return["store","services","language"].forEach((function(t){s[t]=e[t]})),s.services=n({},this.services),s.services.utils={hasLoadedNamespace:s.hasLoadedNamespace.bind(s)},s.translator=new A(s.services,s.options),s.translator.on("*",(function(e){for(var t=arguments.length,n=new Array(t>1?t-1:0),i=1;i<t;i++)n[i-1]=arguments[i];s.emit.apply(s,[e].concat(n))})),s.init(r,i),s.translator.options=s.options,s.translator.backendConnector.services.utils={hasLoadedNamespace:s.hasLoadedNamespace.bind(s)},s}}]),o}())})),function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e=e||self).i18nextXHRBackend=t()}(this,(function(){"use strict";var e=[],t=e.forEach,n=e.slice;function i(e){return t.call(n.call(arguments,1),(function(t){if(t)for(var n in t)void 0===e[n]&&(e[n]=t[n])})),e}function o(e){return(o="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function r(e){return(r="function"==typeof Symbol&&"symbol"===o(Symbol.iterator)?function(e){return o(e)}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":o(e)})(e)}function s(e,t){if(t&&"object"===r(t)){var n="",i=encodeURIComponent;for(var o in t)n+="&"+i(o)+"="+i(t[o]);if(!n)return e;e=e+(-1!==e.indexOf("?")?"&":"?")+n.slice(1)}return e}function a(e,t,n,i,o){i&&"object"===r(i)&&(o||(i._t=new Date),i=s("",i).slice(1)),t.queryStringParams&&(e=s(e,t.queryStringParams));try{var a;(a=XMLHttpRequest?new XMLHttpRequest:new ActiveXObject("MSXML2.XMLHTTP.3.0")).open(i?"POST":"GET",e,1),t.crossDomain||a.setRequestHeader("X-Requested-With","XMLHttpRequest"),a.withCredentials=!!t.withCredentials,i&&a.setRequestHeader("Content-type","application/x-www-form-urlencoded"),a.overrideMimeType&&a.overrideMimeType("application/json");var l=t.customHeaders;if(l="function"==typeof l?l():l)for(var c in l)a.setRequestHeader(c,l[c]);a.onreadystatechange=function(){a.readyState>3&&n&&n(a.responseText,a)},a.send(i)}catch(e){console&&console.log(e)}}function l(){return{loadPath:"/locales/{{lng}}/{{ns}}.json",addPath:"/locales/add/{{lng}}/{{ns}}",allowMultiLoading:!1,parse:JSON.parse,parsePayload:function(e,t,n){return function(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}({},t,n||"")},crossDomain:!1,ajax:a}}var c=function(){function e(t){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};!function(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}(this,e),this.init(t,n),this.type="backend"}var t;return(t=[{key:"init",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};this.services=e,this.options=i(t,this.options||{},l())}},{key:"readMulti",value:function(e,t,n){var i=this.options.loadPath;"function"==typeof this.options.loadPath&&(i=this.options.loadPath(e,t));var o=this.services.interpolator.interpolate(i,{lng:e.join("+"),ns:t.join("+")});this.loadUrl(o,n)}},{key:"read",value:function(e,t,n){var i=this.options.loadPath;"function"==typeof this.options.loadPath&&(i=this.options.loadPath([e],[t]));var o=this.services.interpolator.interpolate(i,{lng:e,ns:t});this.loadUrl(o,n)}},{key:"loadUrl",value:function(e,t){var n=this;this.options.ajax(e,this.options,(function(i,o){if(o.status>=500&&o.status<600)return t("failed loading "+e,!0);if(o.status>=400&&o.status<500)return t("failed loading "+e,!1);var r,s;try{r=n.options.parse(i,e)}catch(t){s="failed parsing "+e+" to json"}if(s)return t(s,!1);t(null,r)}))}},{key:"create",value:function(e,t,n,i){var o=this;"string"==typeof e&&(e=[e]);var r=this.options.parsePayload(t,n,i);e.forEach((function(e){var n=o.services.interpolator.interpolate(o.options.addPath,{lng:e,ns:t});o.options.ajax(n,o.options,(function(e,t){}),r)}))}}])&&function(e,t){for(var n=0;n<t.length;n++){var i=t[n];i.enumerable=i.enumerable||!1,i.configurable=!0,"value"in i&&(i.writable=!0),Object.defineProperty(e,i.key,i)}}(e.prototype,t),e}();return c.type="backend",c})),function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e=e||self).i18nextBrowserLanguageDetector=t()}(this,(function(){"use strict";var e=[],t=e.forEach,n=e.slice;function i(e){return t.call(n.call(arguments,1),(function(t){if(t)for(var n in t)void 0===e[n]&&(e[n]=t[n])})),e}var o=/^[\u0009\u0020-\u007e\u0080-\u00ff]+$/,r=function(e,t,n){var i=n||{};i.path=i.path||"/";var r=e+"="+encodeURIComponent(t);if(i.maxAge>0){var s=i.maxAge-0;if(isNaN(s))throw new Error("maxAge should be a Number");r+="; Max-Age="+Math.floor(s)}if(i.domain){if(!o.test(i.domain))throw new TypeError("option domain is invalid");r+="; Domain="+i.domain}if(i.path){if(!o.test(i.path))throw new TypeError("option path is invalid");r+="; Path="+i.path}if(i.expires){if("function"!=typeof i.expires.toUTCString)throw new TypeError("option expires is invalid");r+="; Expires="+i.expires.toUTCString()}if(i.httpOnly&&(r+="; HttpOnly"),i.secure&&(r+="; Secure"),i.sameSite)switch("string"==typeof i.sameSite?i.sameSite.toLowerCase():i.sameSite){case!0:r+="; SameSite=Strict";break;case"lax":r+="; SameSite=Lax";break;case"strict":r+="; SameSite=Strict";break;case"none":r+="; SameSite=None";break;default:throw new TypeError("option sameSite is invalid")}return r},s={name:"cookie",lookup:function(e){var t;if(e.lookupCookie&&"undefined"!=typeof document){var n=function(e){for(var t=e+"=",n=document.cookie.split(";"),i=0;i<n.length;i++){for(var o=n[i];" "===o.charAt(0);)o=o.substring(1,o.length);if(0===o.indexOf(t))return o.substring(t.length,o.length)}return null}(e.lookupCookie);n&&(t=n)}return t},cacheUserLanguage:function(e,t){t.lookupCookie&&"undefined"!=typeof document&&function(e,t,n,i){var o=arguments.length>4&&void 0!==arguments[4]?arguments[4]:{path:"/",sameSite:"strict"};n&&(o.expires=new Date,o.expires.setTime(o.expires.getTime()+60*n*1e3)),i&&(o.domain=i),document.cookie=r(e,encodeURIComponent(t),o)}(t.lookupCookie,e,t.cookieMinutes,t.cookieDomain,t.cookieOptions)}},a={name:"querystring",lookup:function(e){var t;if("undefined"!=typeof window)for(var n=window.location.search.substring(1).split("&"),i=0;i<n.length;i++){var o=n[i].indexOf("=");o>0&&n[i].substring(0,o)===e.lookupQuerystring&&(t=n[i].substring(o+1))}return t}},l=null,c=function(){if(null!==l)return l;try{l="undefined"!==window&&null!==window.localStorage,window.localStorage.setItem("i18next.translate.boo","foo"),window.localStorage.removeItem("i18next.translate.boo")}catch(e){l=!1}return l},u={name:"localStorage",lookup:function(e){var t;if(e.lookupLocalStorage&&c()){var n=window.localStorage.getItem(e.lookupLocalStorage);n&&(t=n)}return t},cacheUserLanguage:function(e,t){t.lookupLocalStorage&&c()&&window.localStorage.setItem(t.lookupLocalStorage,e)}},p=null,h=function(){if(null!==p)return p;try{p="undefined"!==window&&null!==window.sessionStorage,window.sessionStorage.setItem("i18next.translate.boo","foo"),window.sessionStorage.removeItem("i18next.translate.boo")}catch(e){p=!1}return p},d={name:"sessionStorage",lookup:function(e){var t;if(e.lookupSessionStorage&&h()){var n=window.sessionStorage.getItem(e.lookupSessionStorage);n&&(t=n)}return t},cacheUserLanguage:function(e,t){t.lookupSessionStorage&&h()&&window.sessionStorage.setItem(t.lookupSessionStorage,e)}},f={name:"navigator",lookup:function(e){var t=[];if("undefined"!=typeof navigator){if(navigator.languages)for(var n=0;n<navigator.languages.length;n++)t.push(navigator.languages[n]);navigator.userLanguage&&t.push(navigator.userLanguage),navigator.language&&t.push(navigator.language)}return t.length>0?t:void 0}},g={name:"htmlTag",lookup:function(e){var t,n=e.htmlTag||("undefined"!=typeof document?document.documentElement:null);return n&&"function"==typeof n.getAttribute&&(t=n.getAttribute("lang")),t}},y={name:"path",lookup:function(e){var t;if("undefined"!=typeof window){var n=window.location.pathname.match(/\/([a-zA-Z-]*)/g);if(n instanceof Array)if("number"==typeof e.lookupFromPathIndex){if("string"!=typeof n[e.lookupFromPathIndex])return;t=n[e.lookupFromPathIndex].replace("/","")}else t=n[0].replace("/","")}return t}},m={name:"subdomain",lookup:function(e){var t;if("undefined"!=typeof window){var n=window.location.href.match(/(?:http[s]*\:\/\/)*(.*?)\.(?=[^\/]*\..{2,5})/gi);n instanceof Array&&(t="number"==typeof e.lookupFromSubdomainIndex?n[e.lookupFromSubdomainIndex].replace("http://","").replace("https://","").replace(".",""):n[0].replace("http://","").replace("https://","").replace(".",""))}return t}},v=function(){function e(t){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};!function(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}(this,e),this.type="languageDetector",this.detectors={},this.init(t,n)}var t;return(t=[{key:"init",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};this.services=e,this.options=i(t,this.options||{},{order:["querystring","cookie","localStorage","sessionStorage","navigator","htmlTag"],lookupQuerystring:"lng",lookupCookie:"i18next",lookupLocalStorage:"i18nextLng",lookupSessionStorage:"i18nextLng",caches:["localStorage"],excludeCacheFor:["cimode"]}),this.options.lookupFromUrlIndex&&(this.options.lookupFromPathIndex=this.options.lookupFromUrlIndex),this.i18nOptions=n,this.addDetector(s),this.addDetector(a),this.addDetector(u),this.addDetector(d),this.addDetector(f),this.addDetector(g),this.addDetector(y),this.addDetector(m)}},{key:"addDetector",value:function(e){this.detectors[e.name]=e}},{key:"detect",value:function(e){var t=this;e||(e=this.options.order);var n=[];return e.forEach((function(e){if(t.detectors[e]){var i=t.detectors[e].lookup(t.options);i&&"string"==typeof i&&(i=[i]),i&&(n=n.concat(i))}})),this.services.languageUtils.getBestMatchFromCodes?n:n.length>0?n[0]:null}},{key:"cacheUserLanguage",value:function(e,t){var n=this;t||(t=this.options.caches),t&&(this.options.excludeCacheFor&&this.options.excludeCacheFor.indexOf(e)>-1||t.forEach((function(t){n.detectors[t]&&n.detectors[t].cacheUserLanguage(e,n.options)})))}}])&&function(e,t){for(var n=0;n<t.length;n++){var i=t[n];i.enumerable=i.enumerable||!1,i.configurable=!0,"value"in i&&(i.writable=!0),Object.defineProperty(e,i.key,i)}}(e.prototype,t),e}();return v.type="languageDetector",v})),function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):e.jqueryI18next=t()}(this,(function(){"use strict";var e=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var i in n)Object.prototype.hasOwnProperty.call(n,i)&&(e[i]=n[i])}return e},t={tName:"t",i18nName:"i18n",handleName:"localize",selectorAttr:"data-i18n",targetAttr:"i18n-target",optionsAttr:"i18n-options",useOptionsAttr:!1,parseDefaultValueFromContent:!0};return{init:function(n,i){function o(t,i,o){function r(t,n){return a.parseDefaultValueFromContent?e({},t,{defaultValue:n}):t}if(0!==i.length){var s="text";if(0===i.indexOf("[")){var l=i.split("]");i=l[1],s=l[0].substr(1,l[0].length-1)}if(i.indexOf(";")===i.length-1&&(i=i.substr(0,i.length-2)),"html"===s)t.html(n.t(i,r(o,t.html())));else if("text"===s)t.text(n.t(i,r(o,t.text())));else if("prepend"===s)t.prepend(n.t(i,r(o,t.html())));else if("append"===s)t.append(n.t(i,r(o,t.html())));else if(0===s.indexOf("data-")){var c=s.substr("data-".length),u=n.t(i,r(o,t.data(c)));t.data(c,u),t.attr(s,u)}else t.attr(s,n.t(i,r(o,t.attr(s))))}}function r(t,n){var r=t.attr(a.selectorAttr);if(r||void 0===r||!1===r||(r=t.text()||t.val()),r){var s=t,l=t.data(a.targetAttr);if(l&&(s=t.find(l)||t),n||!0!==a.useOptionsAttr||(n=t.data(a.optionsAttr)),n=n||{},r.indexOf(";")>=0){var c=r.split(";");i.each(c,(function(e,t){""!==t&&o(s,t.trim(),n)}))}else o(s,r,n);if(!0===a.useOptionsAttr){var u={};delete(u=e({clone:u},n)).lng,t.data(a.optionsAttr,u)}}}function s(e){return this.each((function(){r(i(this),e),i(this).find("["+a.selectorAttr+"]").each((function(){r(i(this),e)}))}))}var a=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};a=e({},t,a),i[a.tName]=n.t.bind(n),i[a.i18nName]=n,i.fn[a.handleName]=s}}})),function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.feather=t():e.feather=t()}("undefined"!=typeof self?self:this,(function(){return function(e){var t={};function n(i){if(t[i])return t[i].exports;var o=t[i]={i:i,l:!1,exports:{}};return e[i].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=e,n.c=t,n.d=function(e,t,i){n.o(e,t)||Object.defineProperty(e,t,{configurable:!1,enumerable:!0,get:i})},n.r=function(e){Object.defineProperty(e,"__esModule",{value:!0})},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=80)}([function(e,t,n){(function(t){var n="object",i=function(e){return e&&e.Math==Math&&e};e.exports=i(typeof globalThis==n&&globalThis)||i(typeof window==n&&window)||i(typeof self==n&&self)||i(typeof t==n&&t)||Function("return this")()}).call(this,n(75))},function(e,t){var n={}.hasOwnProperty;e.exports=function(e,t){return n.call(e,t)}},function(e,t,n){var i=n(0),o=n(11),r=n(33),s=n(62),a=i.Symbol,l=o("wks");e.exports=function(e){return l[e]||(l[e]=s&&a[e]||(s?a:r)("Symbol."+e))}},function(e,t,n){var i=n(6);e.exports=function(e){if(!i(e))throw TypeError(String(e)+" is not an object");return e}},function(e,t){e.exports=function(e){try{return!!e()}catch(e){return!0}}},function(e,t,n){var i=n(8),o=n(7),r=n(10);e.exports=i?function(e,t,n){return o.f(e,t,r(1,n))}:function(e,t,n){return e[t]=n,e}},function(e,t){e.exports=function(e){return"object"==typeof e?null!==e:"function"==typeof e}},function(e,t,n){var i=n(8),o=n(35),r=n(3),s=n(18),a=Object.defineProperty;t.f=i?a:function(e,t,n){if(r(e),t=s(t,!0),r(n),o)try{return a(e,t,n)}catch(e){}if("get"in n||"set"in n)throw TypeError("Accessors not supported");return"value"in n&&(e[t]=n.value),e}},function(e,t,n){var i=n(4);e.exports=!i((function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a}))},function(e,t){e.exports={}},function(e,t){e.exports=function(e,t){return{enumerable:!(1&e),configurable:!(2&e),writable:!(4&e),value:t}}},function(e,t,n){var i=n(0),o=n(19),r=n(17),s=i["__core-js_shared__"]||o("__core-js_shared__",{});(e.exports=function(e,t){return s[e]||(s[e]=void 0!==t?t:{})})("versions",[]).push({version:"3.1.3",mode:r?"pure":"global",copyright:"© 2019 Denis Pushkarev (zloirock.ru)"})},function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0});var i=s(n(43)),o=s(n(41)),r=s(n(40));function s(e){return e&&e.__esModule?e:{default:e}}t.default=Object.keys(o.default).map((function(e){return new i.default(e,o.default[e],r.default[e])})).reduce((function(e,t){return e[t.name]=t,e}),{})},function(e,t){e.exports=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"]},function(e,t,n){var i=n(72),o=n(20);e.exports=function(e){return i(o(e))}},function(e,t){e.exports={}},function(e,t,n){var i=n(11),o=n(33),r=i("keys");e.exports=function(e){return r[e]||(r[e]=o(e))}},function(e,t){e.exports=!1},function(e,t,n){var i=n(6);e.exports=function(e,t){if(!i(e))return e;var n,o;if(t&&"function"==typeof(n=e.toString)&&!i(o=n.call(e)))return o;if("function"==typeof(n=e.valueOf)&&!i(o=n.call(e)))return o;if(!t&&"function"==typeof(n=e.toString)&&!i(o=n.call(e)))return o;throw TypeError("Can't convert object to primitive value")}},function(e,t,n){var i=n(0),o=n(5);e.exports=function(e,t){try{o(i,e,t)}catch(n){i[e]=t}return t}},function(e,t){e.exports=function(e){if(null==e)throw TypeError("Can't call method on "+e);return e}},function(e,t){var n=Math.ceil,i=Math.floor;e.exports=function(e){return isNaN(e=+e)?0:(e>0?i:n)(e)}},function(e,t,n){var i;!function(){"use strict";var n=function(){function e(){}function t(e,t){for(var n=t.length,i=0;i<n;++i)o(e,t[i])}e.prototype=Object.create(null);var n={}.hasOwnProperty,i=/\s+/;function o(e,o){if(o){var r=typeof o;"string"===r?function(e,t){for(var n=t.split(i),o=n.length,r=0;r<o;++r)e[n[r]]=!0}(e,o):Array.isArray(o)?t(e,o):"object"===r?function(e,t){for(var i in t)n.call(t,i)&&(e[i]=!!t[i])}(e,o):"number"===r&&function(e,t){e[t]=!0}(e,o)}}return function(){for(var n=arguments.length,i=Array(n),o=0;o<n;o++)i[o]=arguments[o];var r=new e;t(r,i);var s=[];for(var a in r)r[a]&&s.push(a);return s.join(" ")}}();void 0!==e&&e.exports?e.exports=n:void 0===(i=function(){return n}.apply(t,[]))||(e.exports=i)}()},function(e,t,n){var i=n(7).f,o=n(1),r=n(2)("toStringTag");e.exports=function(e,t,n){e&&!o(e=n?e:e.prototype,r)&&i(e,r,{configurable:!0,value:t})}},function(e,t,n){var i=n(20);e.exports=function(e){return Object(i(e))}},function(e,t,n){var i=n(1),o=n(24),r=n(16),s=n(63),a=r("IE_PROTO"),l=Object.prototype;e.exports=s?Object.getPrototypeOf:function(e){return e=o(e),i(e,a)?e[a]:"function"==typeof e.constructor&&e instanceof e.constructor?e.constructor.prototype:e instanceof Object?l:null}},function(e,t,n){"use strict";var i,o,r,s=n(25),a=n(5),l=n(1),c=n(2),u=n(17),p=c("iterator"),h=!1;[].keys&&("next"in(r=[].keys())?(o=s(s(r)))!==Object.prototype&&(i=o):h=!0),null==i&&(i={}),u||l(i,p)||a(i,p,(function(){return this})),e.exports={IteratorPrototype:i,BUGGY_SAFARI_ITERATORS:h}},function(e,t,n){var i=n(21),o=Math.min;e.exports=function(e){return e>0?o(i(e),9007199254740991):0}},function(e,t,n){var i=n(1),o=n(14),r=n(68),s=n(15),a=r(!1);e.exports=function(e,t){var n,r=o(e),l=0,c=[];for(n in r)!i(s,n)&&i(r,n)&&c.push(n);for(;t.length>l;)i(r,n=t[l++])&&(~a(c,n)||c.push(n));return c}},function(e,t,n){var i=n(0),o=n(11),r=n(5),s=n(1),a=n(19),l=n(36),c=n(37),u=c.get,p=c.enforce,h=String(l).split("toString");o("inspectSource",(function(e){return l.call(e)})),(e.exports=function(e,t,n,o){var l=!!o&&!!o.unsafe,c=!!o&&!!o.enumerable,u=!!o&&!!o.noTargetGet;"function"==typeof n&&("string"!=typeof t||s(n,"name")||r(n,"name",t),p(n).source=h.join("string"==typeof t?t:"")),e!==i?(l?!u&&e[t]&&(c=!0):delete e[t],c?e[t]=n:r(e,t,n)):c?e[t]=n:a(t,n)})(Function.prototype,"toString",(function(){return"function"==typeof this&&u(this).source||l.call(this)}))},function(e,t){var n={}.toString;e.exports=function(e){return n.call(e).slice(8,-1)}},function(e,t,n){var i=n(8),o=n(73),r=n(10),s=n(14),a=n(18),l=n(1),c=n(35),u=Object.getOwnPropertyDescriptor;t.f=i?u:function(e,t){if(e=s(e),t=a(t,!0),c)try{return u(e,t)}catch(e){}if(l(e,t))return r(!o.f.call(e,t),e[t])}},function(e,t,n){var i=n(0),o=n(31).f,r=n(5),s=n(29),a=n(19),l=n(71),c=n(65);e.exports=function(e,t){var n,u,p,h,d,f=e.target,g=e.global,y=e.stat;if(n=g?i:y?i[f]||a(f,{}):(i[f]||{}).prototype)for(u in t){if(h=t[u],p=e.noTargetGet?(d=o(n,u))&&d.value:n[u],!c(g?u:f+(y?".":"#")+u,e.forced)&&void 0!==p){if(typeof h==typeof p)continue;l(h,p)}(e.sham||p&&p.sham)&&r(h,"sham",!0),s(n,u,h,e)}}},function(e,t){var n=0,i=Math.random();e.exports=function(e){return"Symbol(".concat(void 0===e?"":e,")_",(++n+i).toString(36))}},function(e,t,n){var i=n(0),o=n(6),r=i.document,s=o(r)&&o(r.createElement);e.exports=function(e){return s?r.createElement(e):{}}},function(e,t,n){var i=n(8),o=n(4),r=n(34);e.exports=!i&&!o((function(){return 7!=Object.defineProperty(r("div"),"a",{get:function(){return 7}}).a}))},function(e,t,n){var i=n(11);e.exports=i("native-function-to-string",Function.toString)},function(e,t,n){var i,o,r,s=n(76),a=n(0),l=n(6),c=n(5),u=n(1),p=n(16),h=n(15),d=a.WeakMap;if(s){var f=new d,g=f.get,y=f.has,m=f.set;i=function(e,t){return m.call(f,e,t),t},o=function(e){return g.call(f,e)||{}},r=function(e){return y.call(f,e)}}else{var v=p("state");h[v]=!0,i=function(e,t){return c(e,v,t),t},o=function(e){return u(e,v)?e[v]:{}},r=function(e){return u(e,v)}}e.exports={set:i,get:o,has:r,enforce:function(e){return r(e)?o(e):i(e,{})},getterFor:function(e){return function(t){var n;if(!l(t)||(n=o(t)).type!==e)throw TypeError("Incompatible receiver, "+e+" required");return n}}}},function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0});var i=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var i in n)Object.prototype.hasOwnProperty.call(n,i)&&(e[i]=n[i])}return e},o=s(n(22)),r=s(n(12));function s(e){return e&&e.__esModule?e:{default:e}}t.default=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};if("undefined"==typeof document)throw new Error("`feather.replace()` only works in a browser environment.");var t=document.querySelectorAll("[data-feather]");Array.from(t).forEach((function(t){return function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=function(e){return Array.from(e.attributes).reduce((function(e,t){return e[t.name]=t.value,e}),{})}(e),s=n["data-feather"];delete n["data-feather"];var a=r.default[s].toSvg(i({},t,n,{class:(0,o.default)(t.class,n.class)})),l=(new DOMParser).parseFromString(a,"image/svg+xml").querySelector("svg");e.parentNode.replaceChild(l,e)}(t,e)}))}},function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0});var i,o=(i=n(12))&&i.__esModule?i:{default:i};t.default=function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};if(console.warn("feather.toSvg() is deprecated. Please use feather.icons[name].toSvg() instead."),!e)throw new Error("The required `key` (icon name) parameter is missing.");if(!o.default[e])throw new Error("No icon matching '"+e+"'. See the complete list of icons at https://feathericons.com");return o.default[e].toSvg(t)}},function(e){e.exports={activity:["pulse","health","action","motion"],airplay:["stream","cast","mirroring"],"alert-circle":["warning","alert","danger"],"alert-octagon":["warning","alert","danger"],"alert-triangle":["warning","alert","danger"],"align-center":["text alignment","center"],"align-justify":["text alignment","justified"],"align-left":["text alignment","left"],"align-right":["text alignment","right"],anchor:[],archive:["index","box"],"at-sign":["mention","at","email","message"],award:["achievement","badge"],aperture:["camera","photo"],"bar-chart":["statistics","diagram","graph"],"bar-chart-2":["statistics","diagram","graph"],battery:["power","electricity"],"battery-charging":["power","electricity"],bell:["alarm","notification","sound"],"bell-off":["alarm","notification","silent"],bluetooth:["wireless"],"book-open":["read","library"],book:["read","dictionary","booklet","magazine","library"],bookmark:["read","clip","marker","tag"],box:["cube"],briefcase:["work","bag","baggage","folder"],calendar:["date"],camera:["photo"],cast:["chromecast","airplay"],circle:["off","zero","record"],clipboard:["copy"],clock:["time","watch","alarm"],"cloud-drizzle":["weather","shower"],"cloud-lightning":["weather","bolt"],"cloud-rain":["weather"],"cloud-snow":["weather","blizzard"],cloud:["weather"],codepen:["logo"],codesandbox:["logo"],code:["source","programming"],coffee:["drink","cup","mug","tea","cafe","hot","beverage"],columns:["layout"],command:["keyboard","cmd","terminal","prompt"],compass:["navigation","safari","travel","direction"],copy:["clone","duplicate"],"corner-down-left":["arrow","return"],"corner-down-right":["arrow"],"corner-left-down":["arrow"],"corner-left-up":["arrow"],"corner-right-down":["arrow"],"corner-right-up":["arrow"],"corner-up-left":["arrow"],"corner-up-right":["arrow"],cpu:["processor","technology"],"credit-card":["purchase","payment","cc"],crop:["photo","image"],crosshair:["aim","target"],database:["storage","memory"],delete:["remove"],disc:["album","cd","dvd","music"],"dollar-sign":["currency","money","payment"],droplet:["water"],edit:["pencil","change"],"edit-2":["pencil","change"],"edit-3":["pencil","change"],eye:["view","watch"],"eye-off":["view","watch","hide","hidden"],"external-link":["outbound"],facebook:["logo","social"],"fast-forward":["music"],figma:["logo","design","tool"],"file-minus":["delete","remove","erase"],"file-plus":["add","create","new"],"file-text":["data","txt","pdf"],film:["movie","video"],filter:["funnel","hopper"],flag:["report"],"folder-minus":["directory"],"folder-plus":["directory"],folder:["directory"],framer:["logo","design","tool"],frown:["emoji","face","bad","sad","emotion"],gift:["present","box","birthday","party"],"git-branch":["code","version control"],"git-commit":["code","version control"],"git-merge":["code","version control"],"git-pull-request":["code","version control"],github:["logo","version control"],gitlab:["logo","version control"],globe:["world","browser","language","translate"],"hard-drive":["computer","server","memory","data"],hash:["hashtag","number","pound"],headphones:["music","audio","sound"],heart:["like","love","emotion"],"help-circle":["question mark"],hexagon:["shape","node.js","logo"],home:["house","living"],image:["picture"],inbox:["email"],instagram:["logo","camera"],key:["password","login","authentication","secure"],layers:["stack"],layout:["window","webpage"],"life-bouy":["help","life ring","support"],link:["chain","url"],"link-2":["chain","url"],linkedin:["logo","social media"],list:["options"],lock:["security","password","secure"],"log-in":["sign in","arrow","enter"],"log-out":["sign out","arrow","exit"],mail:["email","message"],"map-pin":["location","navigation","travel","marker"],map:["location","navigation","travel"],maximize:["fullscreen"],"maximize-2":["fullscreen","arrows","expand"],meh:["emoji","face","neutral","emotion"],menu:["bars","navigation","hamburger"],"message-circle":["comment","chat"],"message-square":["comment","chat"],"mic-off":["record","sound","mute"],mic:["record","sound","listen"],minimize:["exit fullscreen","close"],"minimize-2":["exit fullscreen","arrows","close"],minus:["subtract"],monitor:["tv","screen","display"],moon:["dark","night"],"more-horizontal":["ellipsis"],"more-vertical":["ellipsis"],"mouse-pointer":["arrow","cursor"],move:["arrows"],music:["note"],navigation:["location","travel"],"navigation-2":["location","travel"],octagon:["stop"],package:["box","container"],paperclip:["attachment"],pause:["music","stop"],"pause-circle":["music","audio","stop"],"pen-tool":["vector","drawing"],percent:["discount"],"phone-call":["ring"],"phone-forwarded":["call"],"phone-incoming":["call"],"phone-missed":["call"],"phone-off":["call","mute"],"phone-outgoing":["call"],phone:["call"],play:["music","start"],"pie-chart":["statistics","diagram"],"play-circle":["music","start"],plus:["add","new"],"plus-circle":["add","new"],"plus-square":["add","new"],pocket:["logo","save"],power:["on","off"],printer:["fax","office","device"],radio:["signal"],"refresh-cw":["synchronise","arrows"],"refresh-ccw":["arrows"],repeat:["loop","arrows"],rewind:["music"],"rotate-ccw":["arrow"],"rotate-cw":["arrow"],rss:["feed","subscribe"],save:["floppy disk"],scissors:["cut"],search:["find","magnifier","magnifying glass"],send:["message","mail","email","paper airplane","paper aeroplane"],settings:["cog","edit","gear","preferences"],"share-2":["network","connections"],shield:["security","secure"],"shield-off":["security","insecure"],"shopping-bag":["ecommerce","cart","purchase","store"],"shopping-cart":["ecommerce","cart","purchase","store"],shuffle:["music"],"skip-back":["music"],"skip-forward":["music"],slack:["logo"],slash:["ban","no"],sliders:["settings","controls"],smartphone:["cellphone","device"],smile:["emoji","face","happy","good","emotion"],speaker:["audio","music"],star:["bookmark","favorite","like"],"stop-circle":["media","music"],sun:["brightness","weather","light"],sunrise:["weather","time","morning","day"],sunset:["weather","time","evening","night"],tablet:["device"],tag:["label"],target:["logo","bullseye"],terminal:["code","command line","prompt"],thermometer:["temperature","celsius","fahrenheit","weather"],"thumbs-down":["dislike","bad","emotion"],"thumbs-up":["like","good","emotion"],"toggle-left":["on","off","switch"],"toggle-right":["on","off","switch"],tool:["settings","spanner"],trash:["garbage","delete","remove","bin"],"trash-2":["garbage","delete","remove","bin"],triangle:["delta"],truck:["delivery","van","shipping","transport","lorry"],tv:["television","stream"],twitch:["logo"],twitter:["logo","social"],type:["text"],umbrella:["rain","weather"],unlock:["security"],"user-check":["followed","subscribed"],"user-minus":["delete","remove","unfollow","unsubscribe"],"user-plus":["new","add","create","follow","subscribe"],"user-x":["delete","remove","unfollow","unsubscribe","unavailable"],user:["person","account"],users:["group"],"video-off":["camera","movie","film"],video:["camera","movie","film"],voicemail:["phone"],volume:["music","sound","mute"],"volume-1":["music","sound"],"volume-2":["music","sound"],"volume-x":["music","sound","mute"],watch:["clock","time"],"wifi-off":["disabled"],wifi:["connection","signal","wireless"],wind:["weather","air"],"x-circle":["cancel","close","delete","remove","times","clear"],"x-octagon":["delete","stop","alert","warning","times","clear"],"x-square":["cancel","close","delete","remove","times","clear"],x:["cancel","close","delete","remove","times","clear"],youtube:["logo","video","play"],"zap-off":["flash","camera","lightning"],zap:["flash","camera","lightning"],"zoom-in":["magnifying glass"],"zoom-out":["magnifying glass"]}},function(e){e.exports={activity:'<polyline points="22 12 18 12 15 21 9 3 6 12 2 12"></polyline>',airplay:'<path d="M5 17H4a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h16a2 2 0 0 1 2 2v10a2 2 0 0 1-2 2h-1"></path><polygon points="12 15 17 21 7 21 12 15"></polygon>',"alert-circle":'<circle cx="12" cy="12" r="10"></circle><line x1="12" y1="8" x2="12" y2="12"></line><line x1="12" y1="16" x2="12.01" y2="16"></line>',"alert-octagon":'<polygon points="7.86 2 16.14 2 22 7.86 22 16.14 16.14 22 7.86 22 2 16.14 2 7.86 7.86 2"></polygon><line x1="12" y1="8" x2="12" y2="12"></line><line x1="12" y1="16" x2="12.01" y2="16"></line>',"alert-triangle":'<path d="M10.29 3.86L1.82 18a2 2 0 0 0 1.71 3h16.94a2 2 0 0 0 1.71-3L13.71 3.86a2 2 0 0 0-3.42 0z"></path><line x1="12" y1="9" x2="12" y2="13"></line><line x1="12" y1="17" x2="12.01" y2="17"></line>',"align-center":'<line x1="18" y1="10" x2="6" y2="10"></line><line x1="21" y1="6" x2="3" y2="6"></line><line x1="21" y1="14" x2="3" y2="14"></line><line x1="18" y1="18" x2="6" y2="18"></line>',"align-justify":'<line x1="21" y1="10" x2="3" y2="10"></line><line x1="21" y1="6" x2="3" y2="6"></line><line x1="21" y1="14" x2="3" y2="14"></line><line x1="21" y1="18" x2="3" y2="18"></line>',"align-left":'<line x1="17" y1="10" x2="3" y2="10"></line><line x1="21" y1="6" x2="3" y2="6"></line><line x1="21" y1="14" x2="3" y2="14"></line><line x1="17" y1="18" x2="3" y2="18"></line>',"align-right":'<line x1="21" y1="10" x2="7" y2="10"></line><line x1="21" y1="6" x2="3" y2="6"></line><line x1="21" y1="14" x2="3" y2="14"></line><line x1="21" y1="18" x2="7" y2="18"></line>',anchor:'<circle cx="12" cy="5" r="3"></circle><line x1="12" y1="22" x2="12" y2="8"></line><path d="M5 12H2a10 10 0 0 0 20 0h-3"></path>',aperture:'<circle cx="12" cy="12" r="10"></circle><line x1="14.31" y1="8" x2="20.05" y2="17.94"></line><line x1="9.69" y1="8" x2="21.17" y2="8"></line><line x1="7.38" y1="12" x2="13.12" y2="2.06"></line><line x1="9.69" y1="16" x2="3.95" y2="6.06"></line><line x1="14.31" y1="16" x2="2.83" y2="16"></line><line x1="16.62" y1="12" x2="10.88" y2="21.94"></line>',archive:'<polyline points="21 8 21 21 3 21 3 8"></polyline><rect x="1" y="3" width="22" height="5"></rect><line x1="10" y1="12" x2="14" y2="12"></line>',"arrow-down-circle":'<circle cx="12" cy="12" r="10"></circle><polyline points="8 12 12 16 16 12"></polyline><line x1="12" y1="8" x2="12" y2="16"></line>',"arrow-down-left":'<line x1="17" y1="7" x2="7" y2="17"></line><polyline points="17 17 7 17 7 7"></polyline>',"arrow-down-right":'<line x1="7" y1="7" x2="17" y2="17"></line><polyline points="17 7 17 17 7 17"></polyline>',"arrow-down":'<line x1="12" y1="5" x2="12" y2="19"></line><polyline points="19 12 12 19 5 12"></polyline>',"arrow-left-circle":'<circle cx="12" cy="12" r="10"></circle><polyline points="12 8 8 12 12 16"></polyline><line x1="16" y1="12" x2="8" y2="12"></line>',"arrow-left":'<line x1="19" y1="12" x2="5" y2="12"></line><polyline points="12 19 5 12 12 5"></polyline>',"arrow-right-circle":'<circle cx="12" cy="12" r="10"></circle><polyline points="12 16 16 12 12 8"></polyline><line x1="8" y1="12" x2="16" y2="12"></line>',"arrow-right":'<line x1="5" y1="12" x2="19" y2="12"></line><polyline points="12 5 19 12 12 19"></polyline>',"arrow-up-circle":'<circle cx="12" cy="12" r="10"></circle><polyline points="16 12 12 8 8 12"></polyline><line x1="12" y1="16" x2="12" y2="8"></line>',"arrow-up-left":'<line x1="17" y1="17" x2="7" y2="7"></line><polyline points="7 17 7 7 17 7"></polyline>',"arrow-up-right":'<line x1="7" y1="17" x2="17" y2="7"></line><polyline points="7 7 17 7 17 17"></polyline>',"arrow-up":'<line x1="12" y1="19" x2="12" y2="5"></line><polyline points="5 12 12 5 19 12"></polyline>',"at-sign":'<circle cx="12" cy="12" r="4"></circle><path d="M16 8v5a3 3 0 0 0 6 0v-1a10 10 0 1 0-3.92 7.94"></path>',award:'<circle cx="12" cy="8" r="7"></circle><polyline points="8.21 13.89 7 23 12 20 17 23 15.79 13.88"></polyline>',"bar-chart-2":'<line x1="18" y1="20" x2="18" y2="10"></line><line x1="12" y1="20" x2="12" y2="4"></line><line x1="6" y1="20" x2="6" y2="14"></line>',"bar-chart":'<line x1="12" y1="20" x2="12" y2="10"></line><line x1="18" y1="20" x2="18" y2="4"></line><line x1="6" y1="20" x2="6" y2="16"></line>',"battery-charging":'<path d="M5 18H3a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h3.19M15 6h2a2 2 0 0 1 2 2v8a2 2 0 0 1-2 2h-3.19"></path><line x1="23" y1="13" x2="23" y2="11"></line><polyline points="11 6 7 12 13 12 9 18"></polyline>',battery:'<rect x="1" y="6" width="18" height="12" rx="2" ry="2"></rect><line x1="23" y1="13" x2="23" y2="11"></line>',"bell-off":'<path d="M13.73 21a2 2 0 0 1-3.46 0"></path><path d="M18.63 13A17.89 17.89 0 0 1 18 8"></path><path d="M6.26 6.26A5.86 5.86 0 0 0 6 8c0 7-3 9-3 9h14"></path><path d="M18 8a6 6 0 0 0-9.33-5"></path><line x1="1" y1="1" x2="23" y2="23"></line>',bell:'<path d="M18 8A6 6 0 0 0 6 8c0 7-3 9-3 9h18s-3-2-3-9"></path><path d="M13.73 21a2 2 0 0 1-3.46 0"></path>',bluetooth:'<polyline points="6.5 6.5 17.5 17.5 12 23 12 1 17.5 6.5 6.5 17.5"></polyline>',bold:'<path d="M6 4h8a4 4 0 0 1 4 4 4 4 0 0 1-4 4H6z"></path><path d="M6 12h9a4 4 0 0 1 4 4 4 4 0 0 1-4 4H6z"></path>',"book-open":'<path d="M2 3h6a4 4 0 0 1 4 4v14a3 3 0 0 0-3-3H2z"></path><path d="M22 3h-6a4 4 0 0 0-4 4v14a3 3 0 0 1 3-3h7z"></path>',book:'<path d="M4 19.5A2.5 2.5 0 0 1 6.5 17H20"></path><path d="M6.5 2H20v20H6.5A2.5 2.5 0 0 1 4 19.5v-15A2.5 2.5 0 0 1 6.5 2z"></path>',bookmark:'<path d="M19 21l-7-5-7 5V5a2 2 0 0 1 2-2h10a2 2 0 0 1 2 2z"></path>',box:'<path d="M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z"></path><polyline points="3.27 6.96 12 12.01 20.73 6.96"></polyline><line x1="12" y1="22.08" x2="12" y2="12"></line>',briefcase:'<rect x="2" y="7" width="20" height="14" rx="2" ry="2"></rect><path d="M16 21V5a2 2 0 0 0-2-2h-4a2 2 0 0 0-2 2v16"></path>',calendar:'<rect x="3" y="4" width="18" height="18" rx="2" ry="2"></rect><line x1="16" y1="2" x2="16" y2="6"></line><line x1="8" y1="2" x2="8" y2="6"></line><line x1="3" y1="10" x2="21" y2="10"></line>',"camera-off":'<line x1="1" y1="1" x2="23" y2="23"></line><path d="M21 21H3a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h3m3-3h6l2 3h4a2 2 0 0 1 2 2v9.34m-7.72-2.06a4 4 0 1 1-5.56-5.56"></path>',camera:'<path d="M23 19a2 2 0 0 1-2 2H3a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h4l2-3h6l2 3h4a2 2 0 0 1 2 2z"></path><circle cx="12" cy="13" r="4"></circle>',cast:'<path d="M2 16.1A5 5 0 0 1 5.9 20M2 12.05A9 9 0 0 1 9.95 20M2 8V6a2 2 0 0 1 2-2h16a2 2 0 0 1 2 2v12a2 2 0 0 1-2 2h-6"></path><line x1="2" y1="20" x2="2.01" y2="20"></line>',"check-circle":'<path d="M22 11.08V12a10 10 0 1 1-5.93-9.14"></path><polyline points="22 4 12 14.01 9 11.01"></polyline>',"check-square":'<polyline points="9 11 12 14 22 4"></polyline><path d="M21 12v7a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h11"></path>',check:'<polyline points="20 6 9 17 4 12"></polyline>',"chevron-down":'<polyline points="6 9 12 15 18 9"></polyline>',"chevron-left":'<polyline points="15 18 9 12 15 6"></polyline>',"chevron-right":'<polyline points="9 18 15 12 9 6"></polyline>',"chevron-up":'<polyline points="18 15 12 9 6 15"></polyline>',"chevrons-down":'<polyline points="7 13 12 18 17 13"></polyline><polyline points="7 6 12 11 17 6"></polyline>',"chevrons-left":'<polyline points="11 17 6 12 11 7"></polyline><polyline points="18 17 13 12 18 7"></polyline>',"chevrons-right":'<polyline points="13 17 18 12 13 7"></polyline><polyline points="6 17 11 12 6 7"></polyline>',"chevrons-up":'<polyline points="17 11 12 6 7 11"></polyline><polyline points="17 18 12 13 7 18"></polyline>',chrome:'<circle cx="12" cy="12" r="10"></circle><circle cx="12" cy="12" r="4"></circle><line x1="21.17" y1="8" x2="12" y2="8"></line><line x1="3.95" y1="6.06" x2="8.54" y2="14"></line><line x1="10.88" y1="21.94" x2="15.46" y2="14"></line>',circle:'<circle cx="12" cy="12" r="10"></circle>',clipboard:'<path d="M16 4h2a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2H6a2 2 0 0 1-2-2V6a2 2 0 0 1 2-2h2"></path><rect x="8" y="2" width="8" height="4" rx="1" ry="1"></rect>',clock:'<circle cx="12" cy="12" r="10"></circle><polyline points="12 6 12 12 16 14"></polyline>',"cloud-drizzle":'<line x1="8" y1="19" x2="8" y2="21"></line><line x1="8" y1="13" x2="8" y2="15"></line><line x1="16" y1="19" x2="16" y2="21"></line><line x1="16" y1="13" x2="16" y2="15"></line><line x1="12" y1="21" x2="12" y2="23"></line><line x1="12" y1="15" x2="12" y2="17"></line><path d="M20 16.58A5 5 0 0 0 18 7h-1.26A8 8 0 1 0 4 15.25"></path>',"cloud-lightning":'<path d="M19 16.9A5 5 0 0 0 18 7h-1.26a8 8 0 1 0-11.62 9"></path><polyline points="13 11 9 17 15 17 11 23"></polyline>',"cloud-off":'<path d="M22.61 16.95A5 5 0 0 0 18 10h-1.26a8 8 0 0 0-7.05-6M5 5a8 8 0 0 0 4 15h9a5 5 0 0 0 1.7-.3"></path><line x1="1" y1="1" x2="23" y2="23"></line>',"cloud-rain":'<line x1="16" y1="13" x2="16" y2="21"></line><line x1="8" y1="13" x2="8" y2="21"></line><line x1="12" y1="15" x2="12" y2="23"></line><path d="M20 16.58A5 5 0 0 0 18 7h-1.26A8 8 0 1 0 4 15.25"></path>',"cloud-snow":'<path d="M20 17.58A5 5 0 0 0 18 8h-1.26A8 8 0 1 0 4 16.25"></path><line x1="8" y1="16" x2="8.01" y2="16"></line><line x1="8" y1="20" x2="8.01" y2="20"></line><line x1="12" y1="18" x2="12.01" y2="18"></line><line x1="12" y1="22" x2="12.01" y2="22"></line><line x1="16" y1="16" x2="16.01" y2="16"></line><line x1="16" y1="20" x2="16.01" y2="20"></line>',cloud:'<path d="M18 10h-1.26A8 8 0 1 0 9 20h9a5 5 0 0 0 0-10z"></path>',code:'<polyline points="16 18 22 12 16 6"></polyline><polyline points="8 6 2 12 8 18"></polyline>',codepen:'<polygon points="12 2 22 8.5 22 15.5 12 22 2 15.5 2 8.5 12 2"></polygon><line x1="12" y1="22" x2="12" y2="15.5"></line><polyline points="22 8.5 12 15.5 2 8.5"></polyline><polyline points="2 15.5 12 8.5 22 15.5"></polyline><line x1="12" y1="2" x2="12" y2="8.5"></line>',codesandbox:'<path d="M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z"></path><polyline points="7.5 4.21 12 6.81 16.5 4.21"></polyline><polyline points="7.5 19.79 7.5 14.6 3 12"></polyline><polyline points="21 12 16.5 14.6 16.5 19.79"></polyline><polyline points="3.27 6.96 12 12.01 20.73 6.96"></polyline><line x1="12" y1="22.08" x2="12" y2="12"></line>',coffee:'<path d="M18 8h1a4 4 0 0 1 0 8h-1"></path><path d="M2 8h16v9a4 4 0 0 1-4 4H6a4 4 0 0 1-4-4V8z"></path><line x1="6" y1="1" x2="6" y2="4"></line><line x1="10" y1="1" x2="10" y2="4"></line><line x1="14" y1="1" x2="14" y2="4"></line>',columns:'<path d="M12 3h7a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2h-7m0-18H5a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2h7m0-18v18"></path>',command:'<path d="M18 3a3 3 0 0 0-3 3v12a3 3 0 0 0 3 3 3 3 0 0 0 3-3 3 3 0 0 0-3-3H6a3 3 0 0 0-3 3 3 3 0 0 0 3 3 3 3 0 0 0 3-3V6a3 3 0 0 0-3-3 3 3 0 0 0-3 3 3 3 0 0 0 3 3h12a3 3 0 0 0 3-3 3 3 0 0 0-3-3z"></path>',compass:'<circle cx="12" cy="12" r="10"></circle><polygon points="16.24 7.76 14.12 14.12 7.76 16.24 9.88 9.88 16.24 7.76"></polygon>',copy:'<rect x="9" y="9" width="13" height="13" rx="2" ry="2"></rect><path d="M5 15H4a2 2 0 0 1-2-2V4a2 2 0 0 1 2-2h9a2 2 0 0 1 2 2v1"></path>',"corner-down-left":'<polyline points="9 10 4 15 9 20"></polyline><path d="M20 4v7a4 4 0 0 1-4 4H4"></path>',"corner-down-right":'<polyline points="15 10 20 15 15 20"></polyline><path d="M4 4v7a4 4 0 0 0 4 4h12"></path>',"corner-left-down":'<polyline points="14 15 9 20 4 15"></polyline><path d="M20 4h-7a4 4 0 0 0-4 4v12"></path>',"corner-left-up":'<polyline points="14 9 9 4 4 9"></polyline><path d="M20 20h-7a4 4 0 0 1-4-4V4"></path>',"corner-right-down":'<polyline points="10 15 15 20 20 15"></polyline><path d="M4 4h7a4 4 0 0 1 4 4v12"></path>',"corner-right-up":'<polyline points="10 9 15 4 20 9"></polyline><path d="M4 20h7a4 4 0 0 0 4-4V4"></path>',"corner-up-left":'<polyline points="9 14 4 9 9 4"></polyline><path d="M20 20v-7a4 4 0 0 0-4-4H4"></path>',"corner-up-right":'<polyline points="15 14 20 9 15 4"></polyline><path d="M4 20v-7a4 4 0 0 1 4-4h12"></path>',cpu:'<rect x="4" y="4" width="16" height="16" rx="2" ry="2"></rect><rect x="9" y="9" width="6" height="6"></rect><line x1="9" y1="1" x2="9" y2="4"></line><line x1="15" y1="1" x2="15" y2="4"></line><line x1="9" y1="20" x2="9" y2="23"></line><line x1="15" y1="20" x2="15" y2="23"></line><line x1="20" y1="9" x2="23" y2="9"></line><line x1="20" y1="14" x2="23" y2="14"></line><line x1="1" y1="9" x2="4" y2="9"></line><line x1="1" y1="14" x2="4" y2="14"></line>',"credit-card":'<rect x="1" y="4" width="22" height="16" rx="2" ry="2"></rect><line x1="1" y1="10" x2="23" y2="10"></line>',crop:'<path d="M6.13 1L6 16a2 2 0 0 0 2 2h15"></path><path d="M1 6.13L16 6a2 2 0 0 1 2 2v15"></path>',crosshair:'<circle cx="12" cy="12" r="10"></circle><line x1="22" y1="12" x2="18" y2="12"></line><line x1="6" y1="12" x2="2" y2="12"></line><line x1="12" y1="6" x2="12" y2="2"></line><line x1="12" y1="22" x2="12" y2="18"></line>',database:'<ellipse cx="12" cy="5" rx="9" ry="3"></ellipse><path d="M21 12c0 1.66-4 3-9 3s-9-1.34-9-3"></path><path d="M3 5v14c0 1.66 4 3 9 3s9-1.34 9-3V5"></path>',delete:'<path d="M21 4H8l-7 8 7 8h13a2 2 0 0 0 2-2V6a2 2 0 0 0-2-2z"></path><line x1="18" y1="9" x2="12" y2="15"></line><line x1="12" y1="9" x2="18" y2="15"></line>',disc:'<circle cx="12" cy="12" r="10"></circle><circle cx="12" cy="12" r="3"></circle>',"divide-circle":'<line x1="8" y1="12" x2="16" y2="12"></line><line x1="12" y1="16" x2="12" y2="16"></line><line x1="12" y1="8" x2="12" y2="8"></line><circle cx="12" cy="12" r="10"></circle>',"divide-square":'<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><line x1="8" y1="12" x2="16" y2="12"></line><line x1="12" y1="16" x2="12" y2="16"></line><line x1="12" y1="8" x2="12" y2="8"></line>',divide:'<circle cx="12" cy="6" r="2"></circle><line x1="5" y1="12" x2="19" y2="12"></line><circle cx="12" cy="18" r="2"></circle>',"dollar-sign":'<line x1="12" y1="1" x2="12" y2="23"></line><path d="M17 5H9.5a3.5 3.5 0 0 0 0 7h5a3.5 3.5 0 0 1 0 7H6"></path>',"download-cloud":'<polyline points="8 17 12 21 16 17"></polyline><line x1="12" y1="12" x2="12" y2="21"></line><path d="M20.88 18.09A5 5 0 0 0 18 9h-1.26A8 8 0 1 0 3 16.29"></path>',download:'<path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4"></path><polyline points="7 10 12 15 17 10"></polyline><line x1="12" y1="15" x2="12" y2="3"></line>',dribbble:'<circle cx="12" cy="12" r="10"></circle><path d="M8.56 2.75c4.37 6.03 6.02 9.42 8.03 17.72m2.54-15.38c-3.72 4.35-8.94 5.66-16.88 5.85m19.5 1.9c-3.5-.93-6.63-.82-8.94 0-2.58.92-5.01 2.86-7.44 6.32"></path>',droplet:'<path d="M12 2.69l5.66 5.66a8 8 0 1 1-11.31 0z"></path>',"edit-2":'<path d="M17 3a2.828 2.828 0 1 1 4 4L7.5 20.5 2 22l1.5-5.5L17 3z"></path>',"edit-3":'<path d="M12 20h9"></path><path d="M16.5 3.5a2.121 2.121 0 0 1 3 3L7 19l-4 1 1-4L16.5 3.5z"></path>',edit:'<path d="M11 4H4a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2h14a2 2 0 0 0 2-2v-7"></path><path d="M18.5 2.5a2.121 2.121 0 0 1 3 3L12 15l-4 1 1-4 9.5-9.5z"></path>',"external-link":'<path d="M18 13v6a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h6"></path><polyline points="15 3 21 3 21 9"></polyline><line x1="10" y1="14" x2="21" y2="3"></line>',"eye-off":'<path d="M17.94 17.94A10.07 10.07 0 0 1 12 20c-7 0-11-8-11-8a18.45 18.45 0 0 1 5.06-5.94M9.9 4.24A9.12 9.12 0 0 1 12 4c7 0 11 8 11 8a18.5 18.5 0 0 1-2.16 3.19m-6.72-1.07a3 3 0 1 1-4.24-4.24"></path><line x1="1" y1="1" x2="23" y2="23"></line>',eye:'<path d="M1 12s4-8 11-8 11 8 11 8-4 8-11 8-11-8-11-8z"></path><circle cx="12" cy="12" r="3"></circle>',facebook:'<path d="M18 2h-3a5 5 0 0 0-5 5v3H7v4h3v8h4v-8h3l1-4h-4V7a1 1 0 0 1 1-1h3z"></path>',"fast-forward":'<polygon points="13 19 22 12 13 5 13 19"></polygon><polygon points="2 19 11 12 2 5 2 19"></polygon>',feather:'<path d="M20.24 12.24a6 6 0 0 0-8.49-8.49L5 10.5V19h8.5z"></path><line x1="16" y1="8" x2="2" y2="22"></line><line x1="17.5" y1="15" x2="9" y2="15"></line>',figma:'<path d="M5 5.5A3.5 3.5 0 0 1 8.5 2H12v7H8.5A3.5 3.5 0 0 1 5 5.5z"></path><path d="M12 2h3.5a3.5 3.5 0 1 1 0 7H12V2z"></path><path d="M12 12.5a3.5 3.5 0 1 1 7 0 3.5 3.5 0 1 1-7 0z"></path><path d="M5 19.5A3.5 3.5 0 0 1 8.5 16H12v3.5a3.5 3.5 0 1 1-7 0z"></path><path d="M5 12.5A3.5 3.5 0 0 1 8.5 9H12v7H8.5A3.5 3.5 0 0 1 5 12.5z"></path>',"file-minus":'<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"></path><polyline points="14 2 14 8 20 8"></polyline><line x1="9" y1="15" x2="15" y2="15"></line>',"file-plus":'<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"></path><polyline points="14 2 14 8 20 8"></polyline><line x1="12" y1="18" x2="12" y2="12"></line><line x1="9" y1="15" x2="15" y2="15"></line>',"file-text":'<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"></path><polyline points="14 2 14 8 20 8"></polyline><line x1="16" y1="13" x2="8" y2="13"></line><line x1="16" y1="17" x2="8" y2="17"></line><polyline points="10 9 9 9 8 9"></polyline>',file:'<path d="M13 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V9z"></path><polyline points="13 2 13 9 20 9"></polyline>',film:'<rect x="2" y="2" width="20" height="20" rx="2.18" ry="2.18"></rect><line x1="7" y1="2" x2="7" y2="22"></line><line x1="17" y1="2" x2="17" y2="22"></line><line x1="2" y1="12" x2="22" y2="12"></line><line x1="2" y1="7" x2="7" y2="7"></line><line x1="2" y1="17" x2="7" y2="17"></line><line x1="17" y1="17" x2="22" y2="17"></line><line x1="17" y1="7" x2="22" y2="7"></line>',filter:'<polygon points="22 3 2 3 10 12.46 10 19 14 21 14 12.46 22 3"></polygon>',flag:'<path d="M4 15s1-1 4-1 5 2 8 2 4-1 4-1V3s-1 1-4 1-5-2-8-2-4 1-4 1z"></path><line x1="4" y1="22" x2="4" y2="15"></line>',"folder-minus":'<path d="M22 19a2 2 0 0 1-2 2H4a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h5l2 3h9a2 2 0 0 1 2 2z"></path><line x1="9" y1="14" x2="15" y2="14"></line>',"folder-plus":'<path d="M22 19a2 2 0 0 1-2 2H4a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h5l2 3h9a2 2 0 0 1 2 2z"></path><line x1="12" y1="11" x2="12" y2="17"></line><line x1="9" y1="14" x2="15" y2="14"></line>',folder:'<path d="M22 19a2 2 0 0 1-2 2H4a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h5l2 3h9a2 2 0 0 1 2 2z"></path>',framer:'<path d="M5 16V9h14V2H5l14 14h-7m-7 0l7 7v-7m-7 0h7"></path>',frown:'<circle cx="12" cy="12" r="10"></circle><path d="M16 16s-1.5-2-4-2-4 2-4 2"></path><line x1="9" y1="9" x2="9.01" y2="9"></line><line x1="15" y1="9" x2="15.01" y2="9"></line>',gift:'<polyline points="20 12 20 22 4 22 4 12"></polyline><rect x="2" y="7" width="20" height="5"></rect><line x1="12" y1="22" x2="12" y2="7"></line><path d="M12 7H7.5a2.5 2.5 0 0 1 0-5C11 2 12 7 12 7z"></path><path d="M12 7h4.5a2.5 2.5 0 0 0 0-5C13 2 12 7 12 7z"></path>',"git-branch":'<line x1="6" y1="3" x2="6" y2="15"></line><circle cx="18" cy="6" r="3"></circle><circle cx="6" cy="18" r="3"></circle><path d="M18 9a9 9 0 0 1-9 9"></path>',"git-commit":'<circle cx="12" cy="12" r="4"></circle><line x1="1.05" y1="12" x2="7" y2="12"></line><line x1="17.01" y1="12" x2="22.96" y2="12"></line>',"git-merge":'<circle cx="18" cy="18" r="3"></circle><circle cx="6" cy="6" r="3"></circle><path d="M6 21V9a9 9 0 0 0 9 9"></path>',"git-pull-request":'<circle cx="18" cy="18" r="3"></circle><circle cx="6" cy="6" r="3"></circle><path d="M13 6h3a2 2 0 0 1 2 2v7"></path><line x1="6" y1="9" x2="6" y2="21"></line>',github:'<path d="M9 19c-5 1.5-5-2.5-7-3m14 6v-3.87a3.37 3.37 0 0 0-.94-2.61c3.14-.35 6.44-1.54 6.44-7A5.44 5.44 0 0 0 20 4.77 5.07 5.07 0 0 0 19.91 1S18.73.65 16 2.48a13.38 13.38 0 0 0-7 0C6.27.65 5.09 1 5.09 1A5.07 5.07 0 0 0 5 4.77a5.44 5.44 0 0 0-1.5 3.78c0 5.42 3.3 6.61 6.44 7A3.37 3.37 0 0 0 9 18.13V22"></path>',gitlab:'<path d="M22.65 14.39L12 22.13 1.35 14.39a.84.84 0 0 1-.3-.94l1.22-3.78 2.44-7.51A.42.42 0 0 1 4.82 2a.43.43 0 0 1 .58 0 .42.42 0 0 1 .11.18l2.44 7.49h8.1l2.44-7.51A.42.42 0 0 1 18.6 2a.43.43 0 0 1 .58 0 .42.42 0 0 1 .11.18l2.44 7.51L23 13.45a.84.84 0 0 1-.35.94z"></path>',globe:'<circle cx="12" cy="12" r="10"></circle><line x1="2" y1="12" x2="22" y2="12"></line><path d="M12 2a15.3 15.3 0 0 1 4 10 15.3 15.3 0 0 1-4 10 15.3 15.3 0 0 1-4-10 15.3 15.3 0 0 1 4-10z"></path>',grid:'<rect x="3" y="3" width="7" height="7"></rect><rect x="14" y="3" width="7" height="7"></rect><rect x="14" y="14" width="7" height="7"></rect><rect x="3" y="14" width="7" height="7"></rect>',"hard-drive":'<line x1="22" y1="12" x2="2" y2="12"></line><path d="M5.45 5.11L2 12v6a2 2 0 0 0 2 2h16a2 2 0 0 0 2-2v-6l-3.45-6.89A2 2 0 0 0 16.76 4H7.24a2 2 0 0 0-1.79 1.11z"></path><line x1="6" y1="16" x2="6.01" y2="16"></line><line x1="10" y1="16" x2="10.01" y2="16"></line>',hash:'<line x1="4" y1="9" x2="20" y2="9"></line><line x1="4" y1="15" x2="20" y2="15"></line><line x1="10" y1="3" x2="8" y2="21"></line><line x1="16" y1="3" x2="14" y2="21"></line>',headphones:'<path d="M3 18v-6a9 9 0 0 1 18 0v6"></path><path d="M21 19a2 2 0 0 1-2 2h-1a2 2 0 0 1-2-2v-3a2 2 0 0 1 2-2h3zM3 19a2 2 0 0 0 2 2h1a2 2 0 0 0 2-2v-3a2 2 0 0 0-2-2H3z"></path>',heart:'<path d="M20.84 4.61a5.5 5.5 0 0 0-7.78 0L12 5.67l-1.06-1.06a5.5 5.5 0 0 0-7.78 7.78l1.06 1.06L12 21.23l7.78-7.78 1.06-1.06a5.5 5.5 0 0 0 0-7.78z"></path>',"help-circle":'<circle cx="12" cy="12" r="10"></circle><path d="M9.09 9a3 3 0 0 1 5.83 1c0 2-3 3-3 3"></path><line x1="12" y1="17" x2="12.01" y2="17"></line>',hexagon:'<path d="M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z"></path>',home:'<path d="M3 9l9-7 9 7v11a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2z"></path><polyline points="9 22 9 12 15 12 15 22"></polyline>',image:'<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><circle cx="8.5" cy="8.5" r="1.5"></circle><polyline points="21 15 16 10 5 21"></polyline>',inbox:'<polyline points="22 12 16 12 14 15 10 15 8 12 2 12"></polyline><path d="M5.45 5.11L2 12v6a2 2 0 0 0 2 2h16a2 2 0 0 0 2-2v-6l-3.45-6.89A2 2 0 0 0 16.76 4H7.24a2 2 0 0 0-1.79 1.11z"></path>',info:'<circle cx="12" cy="12" r="10"></circle><line x1="12" y1="16" x2="12" y2="12"></line><line x1="12" y1="8" x2="12.01" y2="8"></line>',instagram:'<rect x="2" y="2" width="20" height="20" rx="5" ry="5"></rect><path d="M16 11.37A4 4 0 1 1 12.63 8 4 4 0 0 1 16 11.37z"></path><line x1="17.5" y1="6.5" x2="17.51" y2="6.5"></line>',italic:'<line x1="19" y1="4" x2="10" y2="4"></line><line x1="14" y1="20" x2="5" y2="20"></line><line x1="15" y1="4" x2="9" y2="20"></line>',key:'<path d="M21 2l-2 2m-7.61 7.61a5.5 5.5 0 1 1-7.778 7.778 5.5 5.5 0 0 1 7.777-7.777zm0 0L15.5 7.5m0 0l3 3L22 7l-3-3m-3.5 3.5L19 4"></path>',layers:'<polygon points="12 2 2 7 12 12 22 7 12 2"></polygon><polyline points="2 17 12 22 22 17"></polyline><polyline points="2 12 12 17 22 12"></polyline>',layout:'<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><line x1="3" y1="9" x2="21" y2="9"></line><line x1="9" y1="21" x2="9" y2="9"></line>',"life-buoy":'<circle cx="12" cy="12" r="10"></circle><circle cx="12" cy="12" r="4"></circle><line x1="4.93" y1="4.93" x2="9.17" y2="9.17"></line><line x1="14.83" y1="14.83" x2="19.07" y2="19.07"></line><line x1="14.83" y1="9.17" x2="19.07" y2="4.93"></line><line x1="14.83" y1="9.17" x2="18.36" y2="5.64"></line><line x1="4.93" y1="19.07" x2="9.17" y2="14.83"></line>',"link-2":'<path d="M15 7h3a5 5 0 0 1 5 5 5 5 0 0 1-5 5h-3m-6 0H6a5 5 0 0 1-5-5 5 5 0 0 1 5-5h3"></path><line x1="8" y1="12" x2="16" y2="12"></line>',link:'<path d="M10 13a5 5 0 0 0 7.54.54l3-3a5 5 0 0 0-7.07-7.07l-1.72 1.71"></path><path d="M14 11a5 5 0 0 0-7.54-.54l-3 3a5 5 0 0 0 7.07 7.07l1.71-1.71"></path>',linkedin:'<path d="M16 8a6 6 0 0 1 6 6v7h-4v-7a2 2 0 0 0-2-2 2 2 0 0 0-2 2v7h-4v-7a6 6 0 0 1 6-6z"></path><rect x="2" y="9" width="4" height="12"></rect><circle cx="4" cy="4" r="2"></circle>',list:'<line x1="8" y1="6" x2="21" y2="6"></line><line x1="8" y1="12" x2="21" y2="12"></line><line x1="8" y1="18" x2="21" y2="18"></line><line x1="3" y1="6" x2="3.01" y2="6"></line><line x1="3" y1="12" x2="3.01" y2="12"></line><line x1="3" y1="18" x2="3.01" y2="18"></line>',loader:'<line x1="12" y1="2" x2="12" y2="6"></line><line x1="12" y1="18" x2="12" y2="22"></line><line x1="4.93" y1="4.93" x2="7.76" y2="7.76"></line><line x1="16.24" y1="16.24" x2="19.07" y2="19.07"></line><line x1="2" y1="12" x2="6" y2="12"></line><line x1="18" y1="12" x2="22" y2="12"></line><line x1="4.93" y1="19.07" x2="7.76" y2="16.24"></line><line x1="16.24" y1="7.76" x2="19.07" y2="4.93"></line>',lock:'<rect x="3" y="11" width="18" height="11" rx="2" ry="2"></rect><path d="M7 11V7a5 5 0 0 1 10 0v4"></path>',"log-in":'<path d="M15 3h4a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2h-4"></path><polyline points="10 17 15 12 10 7"></polyline><line x1="15" y1="12" x2="3" y2="12"></line>',"log-out":'<path d="M9 21H5a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h4"></path><polyline points="16 17 21 12 16 7"></polyline><line x1="21" y1="12" x2="9" y2="12"></line>',mail:'<path d="M4 4h16c1.1 0 2 .9 2 2v12c0 1.1-.9 2-2 2H4c-1.1 0-2-.9-2-2V6c0-1.1.9-2 2-2z"></path><polyline points="22,6 12,13 2,6"></polyline>',"map-pin":'<path d="M21 10c0 7-9 13-9 13s-9-6-9-13a9 9 0 0 1 18 0z"></path><circle cx="12" cy="10" r="3"></circle>',map:'<polygon points="1 6 1 22 8 18 16 22 23 18 23 2 16 6 8 2 1 6"></polygon><line x1="8" y1="2" x2="8" y2="18"></line><line x1="16" y1="6" x2="16" y2="22"></line>',"maximize-2":'<polyline points="15 3 21 3 21 9"></polyline><polyline points="9 21 3 21 3 15"></polyline><line x1="21" y1="3" x2="14" y2="10"></line><line x1="3" y1="21" x2="10" y2="14"></line>',maximize:'<path d="M8 3H5a2 2 0 0 0-2 2v3m18 0V5a2 2 0 0 0-2-2h-3m0 18h3a2 2 0 0 0 2-2v-3M3 16v3a2 2 0 0 0 2 2h3"></path>',meh:'<circle cx="12" cy="12" r="10"></circle><line x1="8" y1="15" x2="16" y2="15"></line><line x1="9" y1="9" x2="9.01" y2="9"></line><line x1="15" y1="9" x2="15.01" y2="9"></line>',menu:'<line x1="3" y1="12" x2="21" y2="12"></line><line x1="3" y1="6" x2="21" y2="6"></line><line x1="3" y1="18" x2="21" y2="18"></line>',"message-circle":'<path d="M21 11.5a8.38 8.38 0 0 1-.9 3.8 8.5 8.5 0 0 1-7.6 4.7 8.38 8.38 0 0 1-3.8-.9L3 21l1.9-5.7a8.38 8.38 0 0 1-.9-3.8 8.5 8.5 0 0 1 4.7-7.6 8.38 8.38 0 0 1 3.8-.9h.5a8.48 8.48 0 0 1 8 8v.5z"></path>',"message-square":'<path d="M21 15a2 2 0 0 1-2 2H7l-4 4V5a2 2 0 0 1 2-2h14a2 2 0 0 1 2 2z"></path>',"mic-off":'<line x1="1" y1="1" x2="23" y2="23"></line><path d="M9 9v3a3 3 0 0 0 5.12 2.12M15 9.34V4a3 3 0 0 0-5.94-.6"></path><path d="M17 16.95A7 7 0 0 1 5 12v-2m14 0v2a7 7 0 0 1-.11 1.23"></path><line x1="12" y1="19" x2="12" y2="23"></line><line x1="8" y1="23" x2="16" y2="23"></line>',mic:'<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path><path d="M19 10v2a7 7 0 0 1-14 0v-2"></path><line x1="12" y1="19" x2="12" y2="23"></line><line x1="8" y1="23" x2="16" y2="23"></line>',"minimize-2":'<polyline points="4 14 10 14 10 20"></polyline><polyline points="20 10 14 10 14 4"></polyline><line x1="14" y1="10" x2="21" y2="3"></line><line x1="3" y1="21" x2="10" y2="14"></line>',minimize:'<path d="M8 3v3a2 2 0 0 1-2 2H3m18 0h-3a2 2 0 0 1-2-2V3m0 18v-3a2 2 0 0 1 2-2h3M3 16h3a2 2 0 0 1 2 2v3"></path>',"minus-circle":'<circle cx="12" cy="12" r="10"></circle><line x1="8" y1="12" x2="16" y2="12"></line>',"minus-square":'<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><line x1="8" y1="12" x2="16" y2="12"></line>',minus:'<line x1="5" y1="12" x2="19" y2="12"></line>',monitor:'<rect x="2" y="3" width="20" height="14" rx="2" ry="2"></rect><line x1="8" y1="21" x2="16" y2="21"></line><line x1="12" y1="17" x2="12" y2="21"></line>',moon:'<path d="M21 12.79A9 9 0 1 1 11.21 3 7 7 0 0 0 21 12.79z"></path>',"more-horizontal":'<circle cx="12" cy="12" r="1"></circle><circle cx="19" cy="12" r="1"></circle><circle cx="5" cy="12" r="1"></circle>',"more-vertical":'<circle cx="12" cy="12" r="1"></circle><circle cx="12" cy="5" r="1"></circle><circle cx="12" cy="19" r="1"></circle>',"mouse-pointer":'<path d="M3 3l7.07 16.97 2.51-7.39 7.39-2.51L3 3z"></path><path d="M13 13l6 6"></path>',move:'<polyline points="5 9 2 12 5 15"></polyline><polyline points="9 5 12 2 15 5"></polyline><polyline points="15 19 12 22 9 19"></polyline><polyline points="19 9 22 12 19 15"></polyline><line x1="2" y1="12" x2="22" y2="12"></line><line x1="12" y1="2" x2="12" y2="22"></line>',music:'<path d="M9 18V5l12-2v13"></path><circle cx="6" cy="18" r="3"></circle><circle cx="18" cy="16" r="3"></circle>',"navigation-2":'<polygon points="12 2 19 21 12 17 5 21 12 2"></polygon>',navigation:'<polygon points="3 11 22 2 13 21 11 13 3 11"></polygon>',octagon:'<polygon points="7.86 2 16.14 2 22 7.86 22 16.14 16.14 22 7.86 22 2 16.14 2 7.86 7.86 2"></polygon>',package:'<line x1="16.5" y1="9.4" x2="7.5" y2="4.21"></line><path d="M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z"></path><polyline points="3.27 6.96 12 12.01 20.73 6.96"></polyline><line x1="12" y1="22.08" x2="12" y2="12"></line>',paperclip:'<path d="M21.44 11.05l-9.19 9.19a6 6 0 0 1-8.49-8.49l9.19-9.19a4 4 0 0 1 5.66 5.66l-9.2 9.19a2 2 0 0 1-2.83-2.83l8.49-8.48"></path>',"pause-circle":'<circle cx="12" cy="12" r="10"></circle><line x1="10" y1="15" x2="10" y2="9"></line><line x1="14" y1="15" x2="14" y2="9"></line>',pause:'<rect x="6" y="4" width="4" height="16"></rect><rect x="14" y="4" width="4" height="16"></rect>',"pen-tool":'<path d="M12 19l7-7 3 3-7 7-3-3z"></path><path d="M18 13l-1.5-7.5L2 2l3.5 14.5L13 18l5-5z"></path><path d="M2 2l7.586 7.586"></path><circle cx="11" cy="11" r="2"></circle>',percent:'<line x1="19" y1="5" x2="5" y2="19"></line><circle cx="6.5" cy="6.5" r="2.5"></circle><circle cx="17.5" cy="17.5" r="2.5"></circle>',"phone-call":'<path d="M15.05 5A5 5 0 0 1 19 8.95M15.05 1A9 9 0 0 1 23 8.94m-1 7.98v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.5 19.5 0 0 1-6-6 19.79 19.79 0 0 1-3.07-8.67A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91a16 16 0 0 0 6 6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7A2 2 0 0 1 22 16.92z"></path>',"phone-forwarded":'<polyline points="19 1 23 5 19 9"></polyline><line x1="15" y1="5" x2="23" y2="5"></line><path d="M22 16.92v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.5 19.5 0 0 1-6-6 19.79 19.79 0 0 1-3.07-8.67A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91a16 16 0 0 0 6 6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7A2 2 0 0 1 22 16.92z"></path>',"phone-incoming":'<polyline points="16 2 16 8 22 8"></polyline><line x1="23" y1="1" x2="16" y2="8"></line><path d="M22 16.92v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.5 19.5 0 0 1-6-6 19.79 19.79 0 0 1-3.07-8.67A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91a16 16 0 0 0 6 6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7A2 2 0 0 1 22 16.92z"></path>',"phone-missed":'<line x1="23" y1="1" x2="17" y2="7"></line><line x1="17" y1="1" x2="23" y2="7"></line><path d="M22 16.92v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.5 19.5 0 0 1-6-6 19.79 19.79 0 0 1-3.07-8.67A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91a16 16 0 0 0 6 6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7A2 2 0 0 1 22 16.92z"></path>',"phone-off":'<path d="M10.68 13.31a16 16 0 0 0 3.41 2.6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7 2 2 0 0 1 1.72 2v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.42 19.42 0 0 1-3.33-2.67m-2.67-3.34a19.79 19.79 0 0 1-3.07-8.63A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91"></path><line x1="23" y1="1" x2="1" y2="23"></line>',"phone-outgoing":'<polyline points="23 7 23 1 17 1"></polyline><line x1="16" y1="8" x2="23" y2="1"></line><path d="M22 16.92v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.5 19.5 0 0 1-6-6 19.79 19.79 0 0 1-3.07-8.67A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91a16 16 0 0 0 6 6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7A2 2 0 0 1 22 16.92z"></path>',phone:'<path d="M22 16.92v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.5 19.5 0 0 1-6-6 19.79 19.79 0 0 1-3.07-8.67A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91a16 16 0 0 0 6 6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7A2 2 0 0 1 22 16.92z"></path>',"pie-chart":'<path d="M21.21 15.89A10 10 0 1 1 8 2.83"></path><path d="M22 12A10 10 0 0 0 12 2v10z"></path>',"play-circle":'<circle cx="12" cy="12" r="10"></circle><polygon points="10 8 16 12 10 16 10 8"></polygon>',play:'<polygon points="5 3 19 12 5 21 5 3"></polygon>',"plus-circle":'<circle cx="12" cy="12" r="10"></circle><line x1="12" y1="8" x2="12" y2="16"></line><line x1="8" y1="12" x2="16" y2="12"></line>',"plus-square":'<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><line x1="12" y1="8" x2="12" y2="16"></line><line x1="8" y1="12" x2="16" y2="12"></line>',plus:'<line x1="12" y1="5" x2="12" y2="19"></line><line x1="5" y1="12" x2="19" y2="12"></line>',pocket:'<path d="M4 3h16a2 2 0 0 1 2 2v6a10 10 0 0 1-10 10A10 10 0 0 1 2 11V5a2 2 0 0 1 2-2z"></path><polyline points="8 10 12 14 16 10"></polyline>',power:'<path d="M18.36 6.64a9 9 0 1 1-12.73 0"></path><line x1="12" y1="2" x2="12" y2="12"></line>',printer:'<polyline points="6 9 6 2 18 2 18 9"></polyline><path d="M6 18H4a2 2 0 0 1-2-2v-5a2 2 0 0 1 2-2h16a2 2 0 0 1 2 2v5a2 2 0 0 1-2 2h-2"></path><rect x="6" y="14" width="12" height="8"></rect>',radio:'<circle cx="12" cy="12" r="2"></circle><path d="M16.24 7.76a6 6 0 0 1 0 8.49m-8.48-.01a6 6 0 0 1 0-8.49m11.31-2.82a10 10 0 0 1 0 14.14m-14.14 0a10 10 0 0 1 0-14.14"></path>',"refresh-ccw":'<polyline points="1 4 1 10 7 10"></polyline><polyline points="23 20 23 14 17 14"></polyline><path d="M20.49 9A9 9 0 0 0 5.64 5.64L1 10m22 4l-4.64 4.36A9 9 0 0 1 3.51 15"></path>',"refresh-cw":'<polyline points="23 4 23 10 17 10"></polyline><polyline points="1 20 1 14 7 14"></polyline><path d="M3.51 9a9 9 0 0 1 14.85-3.36L23 10M1 14l4.64 4.36A9 9 0 0 0 20.49 15"></path>',repeat:'<polyline points="17 1 21 5 17 9"></polyline><path d="M3 11V9a4 4 0 0 1 4-4h14"></path><polyline points="7 23 3 19 7 15"></polyline><path d="M21 13v2a4 4 0 0 1-4 4H3"></path>',rewind:'<polygon points="11 19 2 12 11 5 11 19"></polygon><polygon points="22 19 13 12 22 5 22 19"></polygon>',"rotate-ccw":'<polyline points="1 4 1 10 7 10"></polyline><path d="M3.51 15a9 9 0 1 0 2.13-9.36L1 10"></path>',"rotate-cw":'<polyline points="23 4 23 10 17 10"></polyline><path d="M20.49 15a9 9 0 1 1-2.12-9.36L23 10"></path>',rss:'<path d="M4 11a9 9 0 0 1 9 9"></path><path d="M4 4a16 16 0 0 1 16 16"></path><circle cx="5" cy="19" r="1"></circle>',save:'<path d="M19 21H5a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h11l5 5v11a2 2 0 0 1-2 2z"></path><polyline points="17 21 17 13 7 13 7 21"></polyline><polyline points="7 3 7 8 15 8"></polyline>',scissors:'<circle cx="6" cy="6" r="3"></circle><circle cx="6" cy="18" r="3"></circle><line x1="20" y1="4" x2="8.12" y2="15.88"></line><line x1="14.47" y1="14.48" x2="20" y2="20"></line><line x1="8.12" y1="8.12" x2="12" y2="12"></line>',search:'<circle cx="11" cy="11" r="8"></circle><line x1="21" y1="21" x2="16.65" y2="16.65"></line>',send:'<line x1="22" y1="2" x2="11" y2="13"></line><polygon points="22 2 15 22 11 13 2 9 22 2"></polygon>',server:'<rect x="2" y="2" width="20" height="8" rx="2" ry="2"></rect><rect x="2" y="14" width="20" height="8" rx="2" ry="2"></rect><line x1="6" y1="6" x2="6.01" y2="6"></line><line x1="6" y1="18" x2="6.01" y2="18"></line>',settings:'<circle cx="12" cy="12" r="3"></circle><path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>',"share-2":'<circle cx="18" cy="5" r="3"></circle><circle cx="6" cy="12" r="3"></circle><circle cx="18" cy="19" r="3"></circle><line x1="8.59" y1="13.51" x2="15.42" y2="17.49"></line><line x1="15.41" y1="6.51" x2="8.59" y2="10.49"></line>',share:'<path d="M4 12v8a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2v-8"></path><polyline points="16 6 12 2 8 6"></polyline><line x1="12" y1="2" x2="12" y2="15"></line>',"shield-off":'<path d="M19.69 14a6.9 6.9 0 0 0 .31-2V5l-8-3-3.16 1.18"></path><path d="M4.73 4.73L4 5v7c0 6 8 10 8 10a20.29 20.29 0 0 0 5.62-4.38"></path><line x1="1" y1="1" x2="23" y2="23"></line>',shield:'<path d="M12 22s8-4 8-10V5l-8-3-8 3v7c0 6 8 10 8 10z"></path>',"shopping-bag":'<path d="M6 2L3 6v14a2 2 0 0 0 2 2h14a2 2 0 0 0 2-2V6l-3-4z"></path><line x1="3" y1="6" x2="21" y2="6"></line><path d="M16 10a4 4 0 0 1-8 0"></path>',"shopping-cart":'<circle cx="9" cy="21" r="1"></circle><circle cx="20" cy="21" r="1"></circle><path d="M1 1h4l2.68 13.39a2 2 0 0 0 2 1.61h9.72a2 2 0 0 0 2-1.61L23 6H6"></path>',shuffle:'<polyline points="16 3 21 3 21 8"></polyline><line x1="4" y1="20" x2="21" y2="3"></line><polyline points="21 16 21 21 16 21"></polyline><line x1="15" y1="15" x2="21" y2="21"></line><line x1="4" y1="4" x2="9" y2="9"></line>',sidebar:'<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><line x1="9" y1="3" x2="9" y2="21"></line>',"skip-back":'<polygon points="19 20 9 12 19 4 19 20"></polygon><line x1="5" y1="19" x2="5" y2="5"></line>',"skip-forward":'<polygon points="5 4 15 12 5 20 5 4"></polygon><line x1="19" y1="5" x2="19" y2="19"></line>',slack:'<path d="M14.5 10c-.83 0-1.5-.67-1.5-1.5v-5c0-.83.67-1.5 1.5-1.5s1.5.67 1.5 1.5v5c0 .83-.67 1.5-1.5 1.5z"></path><path d="M20.5 10H19V8.5c0-.83.67-1.5 1.5-1.5s1.5.67 1.5 1.5-.67 1.5-1.5 1.5z"></path><path d="M9.5 14c.83 0 1.5.67 1.5 1.5v5c0 .83-.67 1.5-1.5 1.5S8 21.33 8 20.5v-5c0-.83.67-1.5 1.5-1.5z"></path><path d="M3.5 14H5v1.5c0 .83-.67 1.5-1.5 1.5S2 16.33 2 15.5 2.67 14 3.5 14z"></path><path d="M14 14.5c0-.83.67-1.5 1.5-1.5h5c.83 0 1.5.67 1.5 1.5s-.67 1.5-1.5 1.5h-5c-.83 0-1.5-.67-1.5-1.5z"></path><path d="M15.5 19H14v1.5c0 .83.67 1.5 1.5 1.5s1.5-.67 1.5-1.5-.67-1.5-1.5-1.5z"></path><path d="M10 9.5C10 8.67 9.33 8 8.5 8h-5C2.67 8 2 8.67 2 9.5S2.67 11 3.5 11h5c.83 0 1.5-.67 1.5-1.5z"></path><path d="M8.5 5H10V3.5C10 2.67 9.33 2 8.5 2S7 2.67 7 3.5 7.67 5 8.5 5z"></path>',slash:'<circle cx="12" cy="12" r="10"></circle><line x1="4.93" y1="4.93" x2="19.07" y2="19.07"></line>',sliders:'<line x1="4" y1="21" x2="4" y2="14"></line><line x1="4" y1="10" x2="4" y2="3"></line><line x1="12" y1="21" x2="12" y2="12"></line><line x1="12" y1="8" x2="12" y2="3"></line><line x1="20" y1="21" x2="20" y2="16"></line><line x1="20" y1="12" x2="20" y2="3"></line><line x1="1" y1="14" x2="7" y2="14"></line><line x1="9" y1="8" x2="15" y2="8"></line><line x1="17" y1="16" x2="23" y2="16"></line>',smartphone:'<rect x="5" y="2" width="14" height="20" rx="2" ry="2"></rect><line x1="12" y1="18" x2="12.01" y2="18"></line>',smile:'<circle cx="12" cy="12" r="10"></circle><path d="M8 14s1.5 2 4 2 4-2 4-2"></path><line x1="9" y1="9" x2="9.01" y2="9"></line><line x1="15" y1="9" x2="15.01" y2="9"></line>',speaker:'<rect x="4" y="2" width="16" height="20" rx="2" ry="2"></rect><circle cx="12" cy="14" r="4"></circle><line x1="12" y1="6" x2="12.01" y2="6"></line>',square:'<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect>',star:'<polygon points="12 2 15.09 8.26 22 9.27 17 14.14 18.18 21.02 12 17.77 5.82 21.02 7 14.14 2 9.27 8.91 8.26 12 2"></polygon>',"stop-circle":'<circle cx="12" cy="12" r="10"></circle><rect x="9" y="9" width="6" height="6"></rect>',sun:'<circle cx="12" cy="12" r="5"></circle><line x1="12" y1="1" x2="12" y2="3"></line><line x1="12" y1="21" x2="12" y2="23"></line><line x1="4.22" y1="4.22" x2="5.64" y2="5.64"></line><line x1="18.36" y1="18.36" x2="19.78" y2="19.78"></line><line x1="1" y1="12" x2="3" y2="12"></line><line x1="21" y1="12" x2="23" y2="12"></line><line x1="4.22" y1="19.78" x2="5.64" y2="18.36"></line><line x1="18.36" y1="5.64" x2="19.78" y2="4.22"></line>',sunrise:'<path d="M17 18a5 5 0 0 0-10 0"></path><line x1="12" y1="2" x2="12" y2="9"></line><line x1="4.22" y1="10.22" x2="5.64" y2="11.64"></line><line x1="1" y1="18" x2="3" y2="18"></line><line x1="21" y1="18" x2="23" y2="18"></line><line x1="18.36" y1="11.64" x2="19.78" y2="10.22"></line><line x1="23" y1="22" x2="1" y2="22"></line><polyline points="8 6 12 2 16 6"></polyline>',sunset:'<path d="M17 18a5 5 0 0 0-10 0"></path><line x1="12" y1="9" x2="12" y2="2"></line><line x1="4.22" y1="10.22" x2="5.64" y2="11.64"></line><line x1="1" y1="18" x2="3" y2="18"></line><line x1="21" y1="18" x2="23" y2="18"></line><line x1="18.36" y1="11.64" x2="19.78" y2="10.22"></line><line x1="23" y1="22" x2="1" y2="22"></line><polyline points="16 5 12 9 8 5"></polyline>',tablet:'<rect x="4" y="2" width="16" height="20" rx="2" ry="2"></rect><line x1="12" y1="18" x2="12.01" y2="18"></line>',tag:'<path d="M20.59 13.41l-7.17 7.17a2 2 0 0 1-2.83 0L2 12V2h10l8.59 8.59a2 2 0 0 1 0 2.82z"></path><line x1="7" y1="7" x2="7.01" y2="7"></line>',target:'<circle cx="12" cy="12" r="10"></circle><circle cx="12" cy="12" r="6"></circle><circle cx="12" cy="12" r="2"></circle>',terminal:'<polyline points="4 17 10 11 4 5"></polyline><line x1="12" y1="19" x2="20" y2="19"></line>',thermometer:'<path d="M14 14.76V3.5a2.5 2.5 0 0 0-5 0v11.26a4.5 4.5 0 1 0 5 0z"></path>',"thumbs-down":'<path d="M10 15v4a3 3 0 0 0 3 3l4-9V2H5.72a2 2 0 0 0-2 1.7l-1.38 9a2 2 0 0 0 2 2.3zm7-13h2.67A2.31 2.31 0 0 1 22 4v7a2.31 2.31 0 0 1-2.33 2H17"></path>',"thumbs-up":'<path d="M14 9V5a3 3 0 0 0-3-3l-4 9v11h11.28a2 2 0 0 0 2-1.7l1.38-9a2 2 0 0 0-2-2.3zM7 22H4a2 2 0 0 1-2-2v-7a2 2 0 0 1 2-2h3"></path>',"toggle-left":'<rect x="1" y="5" width="22" height="14" rx="7" ry="7"></rect><circle cx="8" cy="12" r="3"></circle>',"toggle-right":'<rect x="1" y="5" width="22" height="14" rx="7" ry="7"></rect><circle cx="16" cy="12" r="3"></circle>',tool:'<path d="M14.7 6.3a1 1 0 0 0 0 1.4l1.6 1.6a1 1 0 0 0 1.4 0l3.77-3.77a6 6 0 0 1-7.94 7.94l-6.91 6.91a2.12 2.12 0 0 1-3-3l6.91-6.91a6 6 0 0 1 7.94-7.94l-3.76 3.76z"></path>',"trash-2":'<polyline points="3 6 5 6 21 6"></polyline><path d="M19 6v14a2 2 0 0 1-2 2H7a2 2 0 0 1-2-2V6m3 0V4a2 2 0 0 1 2-2h4a2 2 0 0 1 2 2v2"></path><line x1="10" y1="11" x2="10" y2="17"></line><line x1="14" y1="11" x2="14" y2="17"></line>',trash:'<polyline points="3 6 5 6 21 6"></polyline><path d="M19 6v14a2 2 0 0 1-2 2H7a2 2 0 0 1-2-2V6m3 0V4a2 2 0 0 1 2-2h4a2 2 0 0 1 2 2v2"></path>',trello:'<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><rect x="7" y="7" width="3" height="9"></rect><rect x="14" y="7" width="3" height="5"></rect>',"trending-down":'<polyline points="23 18 13.5 8.5 8.5 13.5 1 6"></polyline><polyline points="17 18 23 18 23 12"></polyline>',"trending-up":'<polyline points="23 6 13.5 15.5 8.5 10.5 1 18"></polyline><polyline points="17 6 23 6 23 12"></polyline>',triangle:'<path d="M10.29 3.86L1.82 18a2 2 0 0 0 1.71 3h16.94a2 2 0 0 0 1.71-3L13.71 3.86a2 2 0 0 0-3.42 0z"></path>',truck:'<rect x="1" y="3" width="15" height="13"></rect><polygon points="16 8 20 8 23 11 23 16 16 16 16 8"></polygon><circle cx="5.5" cy="18.5" r="2.5"></circle><circle cx="18.5" cy="18.5" r="2.5"></circle>',tv:'<rect x="2" y="7" width="20" height="15" rx="2" ry="2"></rect><polyline points="17 2 12 7 7 2"></polyline>',twitch:'<path d="M21 2H3v16h5v4l4-4h5l4-4V2zm-10 9V7m5 4V7"></path>',twitter:'<path d="M23 3a10.9 10.9 0 0 1-3.14 1.53 4.48 4.48 0 0 0-7.86 3v1A10.66 10.66 0 0 1 3 4s-4 9 5 13a11.64 11.64 0 0 1-7 2c9 5 20 0 20-11.5a4.5 4.5 0 0 0-.08-.83A7.72 7.72 0 0 0 23 3z"></path>',type:'<polyline points="4 7 4 4 20 4 20 7"></polyline><line x1="9" y1="20" x2="15" y2="20"></line><line x1="12" y1="4" x2="12" y2="20"></line>',umbrella:'<path d="M23 12a11.05 11.05 0 0 0-22 0zm-5 7a3 3 0 0 1-6 0v-7"></path>',underline:'<path d="M6 3v7a6 6 0 0 0 6 6 6 6 0 0 0 6-6V3"></path><line x1="4" y1="21" x2="20" y2="21"></line>',unlock:'<rect x="3" y="11" width="18" height="11" rx="2" ry="2"></rect><path d="M7 11V7a5 5 0 0 1 9.9-1"></path>',"upload-cloud":'<polyline points="16 16 12 12 8 16"></polyline><line x1="12" y1="12" x2="12" y2="21"></line><path d="M20.39 18.39A5 5 0 0 0 18 9h-1.26A8 8 0 1 0 3 16.3"></path><polyline points="16 16 12 12 8 16"></polyline>',upload:'<path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4"></path><polyline points="17 8 12 3 7 8"></polyline><line x1="12" y1="3" x2="12" y2="15"></line>',"user-check":'<path d="M16 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"></path><circle cx="8.5" cy="7" r="4"></circle><polyline points="17 11 19 13 23 9"></polyline>',"user-minus":'<path d="M16 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"></path><circle cx="8.5" cy="7" r="4"></circle><line x1="23" y1="11" x2="17" y2="11"></line>',"user-plus":'<path d="M16 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"></path><circle cx="8.5" cy="7" r="4"></circle><line x1="20" y1="8" x2="20" y2="14"></line><line x1="23" y1="11" x2="17" y2="11"></line>',"user-x":'<path d="M16 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"></path><circle cx="8.5" cy="7" r="4"></circle><line x1="18" y1="8" x2="23" y2="13"></line><line x1="23" y1="8" x2="18" y2="13"></line>',user:'<path d="M20 21v-2a4 4 0 0 0-4-4H8a4 4 0 0 0-4 4v2"></path><circle cx="12" cy="7" r="4"></circle>',users:'<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"></path><circle cx="9" cy="7" r="4"></circle><path d="M23 21v-2a4 4 0 0 0-3-3.87"></path><path d="M16 3.13a4 4 0 0 1 0 7.75"></path>',"video-off":'<path d="M16 16v1a2 2 0 0 1-2 2H3a2 2 0 0 1-2-2V7a2 2 0 0 1 2-2h2m5.66 0H14a2 2 0 0 1 2 2v3.34l1 1L23 7v10"></path><line x1="1" y1="1" x2="23" y2="23"></line>',video:'<polygon points="23 7 16 12 23 17 23 7"></polygon><rect x="1" y="5" width="15" height="14" rx="2" ry="2"></rect>',voicemail:'<circle cx="5.5" cy="11.5" r="4.5"></circle><circle cx="18.5" cy="11.5" r="4.5"></circle><line x1="5.5" y1="16" x2="18.5" y2="16"></line>',"volume-1":'<polygon points="11 5 6 9 2 9 2 15 6 15 11 19 11 5"></polygon><path d="M15.54 8.46a5 5 0 0 1 0 7.07"></path>',"volume-2":'<polygon points="11 5 6 9 2 9 2 15 6 15 11 19 11 5"></polygon><path d="M19.07 4.93a10 10 0 0 1 0 14.14M15.54 8.46a5 5 0 0 1 0 7.07"></path>',"volume-x":'<polygon points="11 5 6 9 2 9 2 15 6 15 11 19 11 5"></polygon><line x1="23" y1="9" x2="17" y2="15"></line><line x1="17" y1="9" x2="23" y2="15"></line>',volume:'<polygon points="11 5 6 9 2 9 2 15 6 15 11 19 11 5"></polygon>',watch:'<circle cx="12" cy="12" r="7"></circle><polyline points="12 9 12 12 13.5 13.5"></polyline><path d="M16.51 17.35l-.35 3.83a2 2 0 0 1-2 1.82H9.83a2 2 0 0 1-2-1.82l-.35-3.83m.01-10.7l.35-3.83A2 2 0 0 1 9.83 1h4.35a2 2 0 0 1 2 1.82l.35 3.83"></path>',"wifi-off":'<line x1="1" y1="1" x2="23" y2="23"></line><path d="M16.72 11.06A10.94 10.94 0 0 1 19 12.55"></path><path d="M5 12.55a10.94 10.94 0 0 1 5.17-2.39"></path><path d="M10.71 5.05A16 16 0 0 1 22.58 9"></path><path d="M1.42 9a15.91 15.91 0 0 1 4.7-2.88"></path><path d="M8.53 16.11a6 6 0 0 1 6.95 0"></path><line x1="12" y1="20" x2="12.01" y2="20"></line>',wifi:'<path d="M5 12.55a11 11 0 0 1 14.08 0"></path><path d="M1.42 9a16 16 0 0 1 21.16 0"></path><path d="M8.53 16.11a6 6 0 0 1 6.95 0"></path><line x1="12" y1="20" x2="12.01" y2="20"></line>',wind:'<path d="M9.59 4.59A2 2 0 1 1 11 8H2m10.59 11.41A2 2 0 1 0 14 16H2m15.73-8.27A2.5 2.5 0 1 1 19.5 12H2"></path>',"x-circle":'<circle cx="12" cy="12" r="10"></circle><line x1="15" y1="9" x2="9" y2="15"></line><line x1="9" y1="9" x2="15" y2="15"></line>',"x-octagon":'<polygon points="7.86 2 16.14 2 22 7.86 22 16.14 16.14 22 7.86 22 2 16.14 2 7.86 7.86 2"></polygon><line x1="15" y1="9" x2="9" y2="15"></line><line x1="9" y1="9" x2="15" y2="15"></line>',"x-square":'<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><line x1="9" y1="9" x2="15" y2="15"></line><line x1="15" y1="9" x2="9" y2="15"></line>',x:'<line x1="18" y1="6" x2="6" y2="18"></line><line x1="6" y1="6" x2="18" y2="18"></line>',youtube:'<path d="M22.54 6.42a2.78 2.78 0 0 0-1.94-2C18.88 4 12 4 12 4s-6.88 0-8.6.46a2.78 2.78 0 0 0-1.94 2A29 29 0 0 0 1 11.75a29 29 0 0 0 .46 5.33A2.78 2.78 0 0 0 3.4 19c1.72.46 8.6.46 8.6.46s6.88 0 8.6-.46a2.78 2.78 0 0 0 1.94-2 29 29 0 0 0 .46-5.25 29 29 0 0 0-.46-5.33z"></path><polygon points="9.75 15.02 15.5 11.75 9.75 8.48 9.75 15.02"></polygon>',"zap-off":'<polyline points="12.41 6.75 13 2 10.57 4.92"></polyline><polyline points="18.57 12.91 21 10 15.66 10"></polyline><polyline points="8 8 3 14 12 14 11 22 16 16"></polyline><line x1="1" y1="1" x2="23" y2="23"></line>',zap:'<polygon points="13 2 3 14 12 14 11 22 21 10 12 10 13 2"></polygon>',"zoom-in":'<circle cx="11" cy="11" r="8"></circle><line x1="21" y1="21" x2="16.65" y2="16.65"></line><line x1="11" y1="8" x2="11" y2="14"></line><line x1="8" y1="11" x2="14" y2="11"></line>',"zoom-out":'<circle cx="11" cy="11" r="8"></circle><line x1="21" y1="21" x2="16.65" y2="16.65"></line><line x1="8" y1="11" x2="14" y2="11"></line>'}},function(e){e.exports={xmlns:"http://www.w3.org/2000/svg",width:24,height:24,viewBox:"0 0 24 24",fill:"none",stroke:"currentColor","stroke-width":2,"stroke-linecap":"round","stroke-linejoin":"round"}},function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0});var i=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var i in n)Object.prototype.hasOwnProperty.call(n,i)&&(e[i]=n[i])}return e},o=function(){function e(e,t){for(var n=0;n<t.length;n++){var i=t[n];i.enumerable=i.enumerable||!1,i.configurable=!0,"value"in i&&(i.writable=!0),Object.defineProperty(e,i.key,i)}}return function(t,n,i){return n&&e(t.prototype,n),i&&e(t,i),t}}(),r=a(n(22)),s=a(n(42));function a(e){return e&&e.__esModule?e:{default:e}}var l=function(){function e(t,n){var o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:[];!function(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}(this,e),this.name=t,this.contents=n,this.tags=o,this.attrs=i({},s.default,{class:"feather feather-"+t})}return o(e,[{key:"toSvg",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return"<svg "+function(e){return Object.keys(e).map((function(t){return t+'="'+e[t]+'"'})).join(" ")}(i({},this.attrs,e,{class:(0,r.default)(this.attrs.class,e.class)}))+">"+this.contents+"</svg>"}},{key:"toString",value:function(){return this.contents}}]),e}();t.default=l},function(e,t,n){"use strict";var i=s(n(12)),o=s(n(39)),r=s(n(38));function s(e){return e&&e.__esModule?e:{default:e}}e.exports={icons:i.default,toSvg:o.default,replace:r.default}},function(e,t,n){e.exports=n(0)},function(e,t,n){var i=n(2)("iterator"),o=!1;try{var r=0,s={next:function(){return{done:!!r++}},return:function(){o=!0}};s[i]=function(){return this},Array.from(s,(function(){throw 2}))}catch(e){}e.exports=function(e,t){if(!t&&!o)return!1;var n=!1;try{var r={};r[i]=function(){return{next:function(){return{done:n=!0}}}},e(r)}catch(e){}return n}},function(e,t,n){var i=n(30),o=n(2)("toStringTag"),r="Arguments"==i(function(){return arguments}());e.exports=function(e){var t,n,s;return void 0===e?"Undefined":null===e?"Null":"string"==typeof(n=function(e,t){try{return e[t]}catch(e){}}(t=Object(e),o))?n:r?i(t):"Object"==(s=i(t))&&"function"==typeof t.callee?"Arguments":s}},function(e,t,n){var i=n(47),o=n(9),r=n(2)("iterator");e.exports=function(e){if(null!=e)return e[r]||e["@@iterator"]||o[i(e)]}},function(e,t,n){"use strict";var i=n(18),o=n(7),r=n(10);e.exports=function(e,t,n){var s=i(t);s in e?o.f(e,s,r(0,n)):e[s]=n}},function(e,t,n){var i=n(2),o=n(9),r=i("iterator"),s=Array.prototype;e.exports=function(e){return void 0!==e&&(o.Array===e||s[r]===e)}},function(e,t,n){var i=n(3);e.exports=function(e,t,n,o){try{return o?t(i(n)[0],n[1]):t(n)}catch(t){var r=e.return;throw void 0!==r&&i(r.call(e)),t}}},function(e,t){e.exports=function(e){if("function"!=typeof e)throw TypeError(String(e)+" is not a function");return e}},function(e,t,n){var i=n(52);e.exports=function(e,t,n){if(i(e),void 0===t)return e;switch(n){case 0:return function(){return e.call(t)};case 1:return function(n){return e.call(t,n)};case 2:return function(n,i){return e.call(t,n,i)};case 3:return function(n,i,o){return e.call(t,n,i,o)}}return function(){return e.apply(t,arguments)}}},function(e,t,n){"use strict";var i=n(53),o=n(24),r=n(51),s=n(50),a=n(27),l=n(49),c=n(48);e.exports=function(e){var t,n,u,p,h=o(e),d="function"==typeof this?this:Array,f=arguments.length,g=f>1?arguments[1]:void 0,y=void 0!==g,m=0,v=c(h);if(y&&(g=i(g,f>2?arguments[2]:void 0,2)),null==v||d==Array&&s(v))for(n=new d(t=a(h.length));t>m;m++)l(n,m,y?g(h[m],m):h[m]);else for(p=v.call(h),n=new d;!(u=p.next()).done;m++)l(n,m,y?r(p,g,[u.value,m],!0):u.value);return n.length=m,n}},function(e,t,n){var i=n(32),o=n(54);i({target:"Array",stat:!0,forced:!n(46)((function(e){Array.from(e)}))},{from:o})},function(e,t,n){var i=n(6),o=n(3);e.exports=function(e,t){if(o(e),!i(t)&&null!==t)throw TypeError("Can't set "+String(t)+" as a prototype")}},function(e,t,n){var i=n(56);e.exports=Object.setPrototypeOf||("__proto__"in{}?function(){var e,t=!1,n={};try{(e=Object.getOwnPropertyDescriptor(Object.prototype,"__proto__").set).call(n,[]),t=n instanceof Array}catch(e){}return function(n,o){return i(n,o),t?e.call(n,o):n.__proto__=o,n}}():void 0)},function(e,t,n){var i=n(0).document;e.exports=i&&i.documentElement},function(e,t,n){var i=n(28),o=n(13);e.exports=Object.keys||function(e){return i(e,o)}},function(e,t,n){var i=n(8),o=n(7),r=n(3),s=n(59);e.exports=i?Object.defineProperties:function(e,t){r(e);for(var n,i=s(t),a=i.length,l=0;a>l;)o.f(e,n=i[l++],t[n]);return e}},function(e,t,n){var i=n(3),o=n(60),r=n(13),s=n(15),a=n(58),l=n(34),c=n(16)("IE_PROTO"),u=function(){},p=function(){var e,t=l("iframe"),n=r.length;for(t.style.display="none",a.appendChild(t),t.src=String("javascript:"),(e=t.contentWindow.document).open(),e.write("<script>document.F=Object<\/script>"),e.close(),p=e.F;n--;)delete p.prototype[r[n]];return p()};e.exports=Object.create||function(e,t){var n;return null!==e?(u.prototype=i(e),n=new u,u.prototype=null,n[c]=e):n=p(),void 0===t?n:o(n,t)},s[c]=!0},function(e,t,n){var i=n(4);e.exports=!!Object.getOwnPropertySymbols&&!i((function(){return!String(Symbol())}))},function(e,t,n){var i=n(4);e.exports=!i((function(){function e(){}return e.prototype.constructor=null,Object.getPrototypeOf(new e)!==e.prototype}))},function(e,t,n){"use strict";var i=n(26).IteratorPrototype,o=n(61),r=n(10),s=n(23),a=n(9),l=function(){return this};e.exports=function(e,t,n){var c=t+" Iterator";return e.prototype=o(i,{next:r(1,n)}),s(e,c,!1,!0),a[c]=l,e}},function(e,t,n){var i=n(4),o=/#|\.prototype\./,r=function(e,t){var n=a[s(e)];return n==c||n!=l&&("function"==typeof t?i(t):!!t)},s=r.normalize=function(e){return String(e).replace(o,".").toLowerCase()},a=r.data={},l=r.NATIVE="N",c=r.POLYFILL="P";e.exports=r},function(e,t){t.f=Object.getOwnPropertySymbols},function(e,t,n){var i=n(21),o=Math.max,r=Math.min;e.exports=function(e,t){var n=i(e);return n<0?o(n+t,0):r(n,t)}},function(e,t,n){var i=n(14),o=n(27),r=n(67);e.exports=function(e){return function(t,n,s){var a,l=i(t),c=o(l.length),u=r(s,c);if(e&&n!=n){for(;c>u;)if((a=l[u++])!=a)return!0}else for(;c>u;u++)if((e||u in l)&&l[u]===n)return e||u||0;return!e&&-1}}},function(e,t,n){var i=n(28),o=n(13).concat("length","prototype");t.f=Object.getOwnPropertyNames||function(e){return i(e,o)}},function(e,t,n){var i=n(0),o=n(69),r=n(66),s=n(3),a=i.Reflect;e.exports=a&&a.ownKeys||function(e){var t=o.f(s(e)),n=r.f;return n?t.concat(n(e)):t}},function(e,t,n){var i=n(1),o=n(70),r=n(31),s=n(7);e.exports=function(e,t){for(var n=o(t),a=s.f,l=r.f,c=0;c<n.length;c++){var u=n[c];i(e,u)||a(e,u,l(t,u))}}},function(e,t,n){var i=n(4),o=n(30),r="".split;e.exports=i((function(){return!Object("z").propertyIsEnumerable(0)}))?function(e){return"String"==o(e)?r.call(e,""):Object(e)}:Object},function(e,t,n){"use strict";var i={}.propertyIsEnumerable,o=Object.getOwnPropertyDescriptor,r=o&&!i.call({1:2},1);t.f=r?function(e){var t=o(this,e);return!!t&&t.enumerable}:i},function(e,t,n){"use strict";var i=n(32),o=n(64),r=n(25),s=n(57),a=n(23),l=n(5),c=n(29),u=n(2),p=n(17),h=n(9),d=n(26),f=d.IteratorPrototype,g=d.BUGGY_SAFARI_ITERATORS,y=u("iterator"),m=function(){return this};e.exports=function(e,t,n,u,d,v,b){o(n,t,u);var x,w,_,k=function(e){if(e===d&&C)return C;if(!g&&e in S)return S[e];switch(e){case"keys":case"values":case"entries":return function(){return new n(this,e)}}return function(){return new n(this)}},E=t+" Iterator",T=!1,S=e.prototype,A=S[y]||S["@@iterator"]||d&&S[d],C=!g&&A||k(d),L="Array"==t&&S.entries||A;if(L&&(x=r(L.call(new e)),f!==Object.prototype&&x.next&&(p||r(x)===f||(s?s(x,f):"function"!=typeof x[y]&&l(x,y,m)),a(x,E,!0,!0),p&&(h[E]=m))),"values"==d&&A&&"values"!==A.name&&(T=!0,C=function(){return A.call(this)}),p&&!b||S[y]===C||l(S,y,C),h[t]=C,d)if(w={values:k("values"),keys:v?C:k("keys"),entries:k("entries")},b)for(_ in w)!g&&!T&&_ in S||c(S,_,w[_]);else i({target:t,proto:!0,forced:g||T},w);return w}},function(e,t){var n;n=function(){return this}();try{n=n||Function("return this")()||(0,eval)("this")}catch(e){"object"==typeof window&&(n=window)}e.exports=n},function(e,t,n){var i=n(0),o=n(36),r=i.WeakMap;e.exports="function"==typeof r&&/native code/.test(o.call(r))},function(e,t,n){var i=n(21),o=n(20);e.exports=function(e,t,n){var r,s,a=String(o(e)),l=i(t),c=a.length;return l<0||l>=c?n?"":void 0:(r=a.charCodeAt(l))<55296||r>56319||l+1===c||(s=a.charCodeAt(l+1))<56320||s>57343?n?a.charAt(l):r:n?a.slice(l,l+2):s-56320+(r-55296<<10)+65536}},function(e,t,n){"use strict";var i=n(77),o=n(37),r=n(74),s=o.set,a=o.getterFor("String Iterator");r(String,"String",(function(e){s(this,{type:"String Iterator",string:String(e),index:0})}),(function(){var e,t=a(this),n=t.string,o=t.index;return o>=n.length?{value:void 0,done:!0}:(e=i(n,o,!0),t.index+=e.length,{value:e,done:!1})}))},function(e,t,n){n(78),n(55);var i=n(45);e.exports=i.Array.from},function(e,t,n){n(79),e.exports=n(44)}])})),function(e){"function"==typeof define&&define.amd?define(["jquery"],e):"object"==typeof module&&module.exports?module.exports=function(t,n){return void 0===n&&(n="undefined"!=typeof window?require("jquery"):require("jquery")(t)),e(n),n}:e(jQuery)}((function(e){"use strict";var t=0;e.fn.TouchSpin=function(n){var i={min:0,max:100,initval:"",replacementval:"",firstclickvalueifempty:null,step:1,decimals:0,stepinterval:100,forcestepdivisibility:"round",stepintervaldelay:500,verticalbuttons:!1,verticalup:"+",verticaldown:"-",verticalupclass:"",verticaldownclass:"",prefix:"",postfix:"",prefix_extraclass:"",postfix_extraclass:"",booster:!0,boostat:10,maxboostedstep:!1,mousewheel:!0,buttondown_class:"btn btn-primary",buttonup_class:"btn btn-primary",buttondown_txt:"-",buttonup_txt:"+",callback_before_calculation:function(e){return e},callback_after_calculation:function(e){return e}},o={min:"min",max:"max",initval:"init-val",replacementval:"replacement-val",firstclickvalueifempty:"first-click-value-if-empty",step:"step",decimals:"decimals",stepinterval:"step-interval",verticalbuttons:"vertical-buttons",verticalupclass:"vertical-up-class",verticaldownclass:"vertical-down-class",forcestepdivisibility:"force-step-divisibility",stepintervaldelay:"step-interval-delay",prefix:"prefix",postfix:"postfix",prefix_extraclass:"prefix-extra-class",postfix_extraclass:"postfix-extra-class",booster:"booster",boostat:"boostat",maxboostedstep:"max-boosted-step",mousewheel:"mouse-wheel",buttondown_class:"button-down-class",buttonup_class:"button-up-class",buttondown_txt:"button-down-txt",buttonup_txt:"button-up-txt"};return this.each((function(){var r,s,a,l,c,u,p,h,d,f,g=e(this),y=g.data(),m=0,v=!1;function b(){""===r.prefix&&(s=c.prefix.detach()),""===r.postfix&&(a=c.postfix.detach())}function x(){var e,t,n;""!==(e=r.callback_before_calculation(g.val()))?0<r.decimals&&"."===e||(t=parseFloat(e),isNaN(t)&&(t=""!==r.replacementval?r.replacementval:0),(n=t).toString()!==e&&(n=t),null!==r.min&&t<r.min&&(n=r.min),null!==r.max&&t>r.max&&(n=r.max),n=function(e){switch(r.forcestepdivisibility){case"round":return(Math.round(e/r.step)*r.step).toFixed(r.decimals);case"floor":return(Math.floor(e/r.step)*r.step).toFixed(r.decimals);case"ceil":return(Math.ceil(e/r.step)*r.step).toFixed(r.decimals);default:return e.toFixed(r.decimals)}}(n),Number(e).toString()!==n.toString()&&(g.val(n),g.trigger("change"))):""!==r.replacementval&&(g.val(r.replacementval),g.trigger("change"))}function w(){if(r.booster){var e=Math.pow(2,Math.floor(m/r.boostat))*r.step;return r.maxboostedstep&&e>r.maxboostedstep&&(e=r.maxboostedstep,u=Math.round(u/e)*e),Math.max(r.step,e)}return r.step}function _(){return"number"==typeof r.firstclickvalueifempty?r.firstclickvalueifempty:(r.min+r.max)/2}function k(){x();var e,t=u=parseFloat(r.callback_before_calculation(c.input.val()));isNaN(u)?u=_():(e=w(),u+=e),null!==r.max&&u>r.max&&(u=r.max,g.trigger("touchspin.on.max"),A()),c.input.val(r.callback_after_calculation(Number(u).toFixed(r.decimals))),t!==u&&g.trigger("change")}function E(){x();var e,t=u=parseFloat(r.callback_before_calculation(c.input.val()));isNaN(u)?u=_():(e=w(),u-=e),null!==r.min&&u<r.min&&(u=r.min,g.trigger("touchspin.on.min"),A()),c.input.val(r.callback_after_calculation(Number(u).toFixed(r.decimals))),t!==u&&g.trigger("change")}function T(){A(),m=0,v="down",g.trigger("touchspin.on.startspin"),g.trigger("touchspin.on.startdownspin"),d=setTimeout((function(){p=setInterval((function(){m++,E()}),r.stepinterval)}),r.stepintervaldelay)}function S(){A(),m=0,v="up",g.trigger("touchspin.on.startspin"),g.trigger("touchspin.on.startupspin"),f=setTimeout((function(){h=setInterval((function(){m++,k()}),r.stepinterval)}),r.stepintervaldelay)}function A(){switch(clearTimeout(d),clearTimeout(f),clearInterval(p),clearInterval(h),v){case"up":g.trigger("touchspin.on.stopupspin"),g.trigger("touchspin.on.stopspin");break;case"down":g.trigger("touchspin.on.stopdownspin"),g.trigger("touchspin.on.stopspin")}m=0,v=!1}g.data("alreadyinitialized")||(g.data("alreadyinitialized",!0),t+=1,g.data("spinnerid",t),g.is("input")?(""!==(r=e.extend({},i,y,function(){var t={};return e.each(o,(function(e,n){var i="bts-"+n;g.is("[data-"+i+"]")&&(t[e]=g.data(i))})),t}(),n)).initval&&""===g.val()&&g.val(r.initval),x(),function(){var t=g.val(),n=g.parent();""!==t&&(t=r.callback_after_calculation(Number(t).toFixed(r.decimals))),g.data("initvalue",t).val(t),g.addClass("form-control"),n.hasClass("input-group")?function(t){t.addClass("bootstrap-touchspin");var n,i,o=g.prev(),s=g.next(),a='<span class="input-group-addon bootstrap-touchspin-prefix bootstrap-touchspin-injected"><span class="input-group-text">'+r.prefix+"</span></span>",c='<span class="input-group-addon bootstrap-touchspin-postfix bootstrap-touchspin-injected"><span class="input-group-text">'+r.postfix+"</span></span>";o.hasClass("input-group-btn")||o.hasClass("input-group-text")?(n='<button class="'+r.buttondown_class+' bootstrap-touchspin-down bootstrap-touchspin-injected" type="button">'+r.buttondown_txt+"</button>",o.append(n)):(n='<span class="input-group-btn bootstrap-touchspin-injected"><button class="'+r.buttondown_class+' bootstrap-touchspin-down" type="button">'+r.buttondown_txt+"</button></span>",e(n).insertBefore(g)),s.hasClass("input-group-btn")||s.hasClass("input-group-text")?(i='<button class="'+r.buttonup_class+' bootstrap-touchspin-up bootstrap-touchspin-injected" type="button">'+r.buttonup_txt+"</button>",s.text(i)):(i='<span class="input-group-btn bootstrap-touchspin-injected"><button class="'+r.buttonup_class+' bootstrap-touchspin-up" type="button">'+r.buttonup_txt+"</button></span>",e(i).insertAfter(g)),e(a).insertBefore(g),e(c).insertAfter(g),l=t}(n):function(){var t,n="";g.hasClass("input-sm")&&(n="input-group-sm"),g.hasClass("input-lg")&&(n="input-group-lg"),t=r.verticalbuttons?'<div class="input-group '+n+' bootstrap-touchspin bootstrap-touchspin-injected"><span class="input-group-addon bootstrap-touchspin-prefix"><span class="input-group-text">'+r.prefix+'</span></span><span class="input-group-addon bootstrap-touchspin-postfix"><span class="input-group-text">'+r.postfix+'</span></span><span class="input-group-btn-vertical"><button class="'+r.buttondown_class+" bootstrap-touchspin-up "+r.verticalupclass+'" type="button">'+r.verticalup+'</button><button class="'+r.buttonup_class+" bootstrap-touchspin-down "+r.verticaldownclass+'" type="button">'+r.verticaldown+"</button></span></div>":'<div class="input-group bootstrap-touchspin bootstrap-touchspin-injected"><span class="input-group-btn"><button class="'+r.buttondown_class+' bootstrap-touchspin-down" type="button">'+r.buttondown_txt+'</button></span><span class="input-group-addon bootstrap-touchspin-prefix"><span class="input-group-text">'+r.prefix+'</span></span><span class="input-group-addon bootstrap-touchspin-postfix"><span class="input-group-text">'+r.postfix+'</span></span><span class="input-group-btn"><button class="'+r.buttonup_class+' bootstrap-touchspin-up" type="button">'+r.buttonup_txt+"</button></span></div>",l=e(t).insertBefore(g),e(".bootstrap-touchspin-prefix",l).after(g),g.hasClass("input-sm")?l.addClass("input-group-sm"):g.hasClass("input-lg")&&l.addClass("input-group-lg")}()}(),c={down:e(".bootstrap-touchspin-down",l),up:e(".bootstrap-touchspin-up",l),input:e("input",l),prefix:e(".bootstrap-touchspin-prefix",l).addClass(r.prefix_extraclass),postfix:e(".bootstrap-touchspin-postfix",l).addClass(r.postfix_extraclass)},b(),g.on("keydown.touchspin",(function(e){var t=e.keyCode||e.which;38===t?("up"!==v&&(k(),S()),e.preventDefault()):40===t&&("down"!==v&&(E(),T()),e.preventDefault())})),g.on("keyup.touchspin",(function(e){var t=e.keyCode||e.which;38!==t&&40!==t||A()})),g.on("blur.touchspin",(function(){x(),g.val(r.callback_after_calculation(g.val()))})),c.down.on("keydown",(function(e){var t=e.keyCode||e.which;32!==t&&13!==t||("down"!==v&&(E(),T()),e.preventDefault())})),c.down.on("keyup.touchspin",(function(e){var t=e.keyCode||e.which;32!==t&&13!==t||A()})),c.up.on("keydown.touchspin",(function(e){var t=e.keyCode||e.which;32!==t&&13!==t||("up"!==v&&(k(),S()),e.preventDefault())})),c.up.on("keyup.touchspin",(function(e){var t=e.keyCode||e.which;32!==t&&13!==t||A()})),c.down.on("mousedown.touchspin",(function(e){c.down.off("touchstart.touchspin"),g.is(":disabled")||(E(),T(),e.preventDefault(),e.stopPropagation())})),c.down.on("touchstart.touchspin",(function(e){c.down.off("mousedown.touchspin"),g.is(":disabled")||(E(),T(),e.preventDefault(),e.stopPropagation())})),c.up.on("mousedown.touchspin",(function(e){c.up.off("touchstart.touchspin"),g.is(":disabled")||(k(),S(),e.preventDefault(),e.stopPropagation())})),c.up.on("touchstart.touchspin",(function(e){c.up.off("mousedown.touchspin"),g.is(":disabled")||(k(),S(),e.preventDefault(),e.stopPropagation())})),c.up.on("mouseup.touchspin mouseout.touchspin touchleave.touchspin touchend.touchspin touchcancel.touchspin",(function(e){v&&(e.stopPropagation(),A())})),c.down.on("mouseup.touchspin mouseout.touchspin touchleave.touchspin touchend.touchspin touchcancel.touchspin",(function(e){v&&(e.stopPropagation(),A())})),c.down.on("mousemove.touchspin touchmove.touchspin",(function(e){v&&(e.stopPropagation(),e.preventDefault())})),c.up.on("mousemove.touchspin touchmove.touchspin",(function(e){v&&(e.stopPropagation(),e.preventDefault())})),g.on("mousewheel.touchspin DOMMouseScroll.touchspin",(function(e){if(r.mousewheel&&g.is(":focus")){var t=e.originalEvent.wheelDelta||-e.originalEvent.deltaY||-e.originalEvent.detail;e.stopPropagation(),e.preventDefault(),(t<0?E:k)()}})),g.on("touchspin.destroy",(function(){!function(){var t=g.parent();A(),g.off(".touchspin"),t.hasClass("bootstrap-touchspin-injected")?(g.siblings().remove(),g.unwrap()):(e(".bootstrap-touchspin-injected",t).remove(),t.removeClass("bootstrap-touchspin")),g.data("alreadyinitialized",!1)}()})),g.on("touchspin.uponce",(function(){A(),k()})),g.on("touchspin.downonce",(function(){A(),E()})),g.on("touchspin.startupspin",(function(){S()})),g.on("touchspin.startdownspin",(function(){T()})),g.on("touchspin.stopspin",(function(){A()})),g.on("touchspin.updatesettings",(function(t,n){!function(t){(function(t){r=e.extend({},r,t),t.postfix&&(0===g.parent().find(".bootstrap-touchspin-postfix").length&&a.insertAfter(g),g.parent().find(".bootstrap-touchspin-postfix .input-group-text").text(t.postfix)),t.prefix&&(0===g.parent().find(".bootstrap-touchspin-prefix").length&&s.insertBefore(g),g.parent().find(".bootstrap-touchspin-prefix .input-group-text").text(t.prefix)),b()})(t),x();var n=c.input.val();""!==n&&(n=Number(r.callback_before_calculation(c.input.val())),c.input.val(r.callback_after_calculation(Number(n).toFixed(r.decimals))))}(n)}))):console.log("Must be an input."))}))}}));
t
_app.js
import "../styles/globals.css"; import "katex/dist/katex.min.css";
function MyApp({ Component, pageProps }) { return <Component {...pageProps} />; } export default MyApp;
import "mathquill/build/mathquill.css"; import "mathquill4quill/mathquill4quill.css"; import "react-quill/dist/quill.snow.css";
wavelet_compress.py
import pywt import matplotlib.pyplot as plt from matplotlib.image import imread import numpy as np """Image compression using discrete Wavelet transform.""" plt.rcParams['figure.figsize'] = [8, 8] plt.rcParams.update({'font.size': 18}) im = imread('data/dog.jpg') im_gray = np.mean(im, -1) # convert RGB to gray scale # Wavelet Compression n = 4 # Use Daubechies 1 wavelet family. w = 'db1' coeffs = pywt.wavedec2(im_gray, wavelet=w, level=n) coeff_arr, coeff_slices = pywt.coeffs_to_array(coeffs) Csort = np.sort(np.abs(coeff_arr.reshape(-1))) for keep in (0.1, 0.05, 0.01, 0.005): thresh = Csort[int(np.floor((1 - keep) * len(Csort)))] ind = np.abs(coeff_arr) > thresh Cfilt = coeff_arr * ind # Threshold small indices coeffs_filt = pywt.array_to_coeffs(Cfilt, coeff_slices, output_format='wavedec2') # Plot reconstruction Arecon = pywt.waverec2(coeffs_filt, wavelet=w) plt.figure() plt.imshow(Arecon.astype('uint8'), cmap='gray')
# Conclusion. As we can see, image compression works batter when we using Wavelets in compare with FFT
plt.axis('off') plt.title('keep = ' + str(keep)) plt.show()
import_files.py
import os from os.path import isfile, join import adal from settings import settings from office365.graph_client import GraphClient def get_token(): """Acquire token via client credential flow """ authority_url = 'https://login.microsoftonline.com/{0}'.format(settings['tenant']) auth_ctx = adal.AuthenticationContext(authority_url) token = auth_ctx.acquire_token_with_client_credentials( "https://graph.microsoft.com", settings['client_credentials']['client_id'], settings['client_credentials']['client_secret'])
return token def upload_files(remote_drive, local_root_path): """ Uploads files from local folder into OneDrive drive :type remote_drive: Drive :type local_root_path: str """ for name in os.listdir(local_root_path): path = join(local_root_path, name) if isfile(path): with open(path, 'rb') as local_file: content = local_file.read() uploaded_drive_item = remote_drive.root.upload(name, content).execute_query() print("File '{0}' uploaded into {1}".format(path, uploaded_drive_item.web_url), ) # get target drive client = GraphClient(get_token) user_name = settings.get('test_alt_account_name') target_drive = client.users[user_name].drive # import local files into OneDrive upload_files(target_drive, "../data")
generated.rs
// ================================================================= // // * WARNING * // // This file is generated! // // Changes made to this file will be overwritten. If changes are // required to the generated code, the service_crategen project // must be updated to generate the changes. // // ================================================================= use std::error::Error; use std::fmt; use std::io; #[allow(warnings)] use futures::future; use futures::Future; use rusoto_core::reactor::{CredentialsProvider, RequestDispatcher}; use rusoto_core::region; use rusoto_core::request::DispatchSignedRequest; use rusoto_core::{ClientInner, RusotoFuture}; use rusoto_core::credential::{CredentialsError, ProvideAwsCredentials}; use rusoto_core::request::HttpDispatchError; use hyper::StatusCode; use rusoto_core::signature::SignedRequest; use serde_json; use serde_json::from_str; use serde_json::Value as SerdeJsonValue; /// <p>Represents the input for <code>AddTagsToStream</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct AddTagsToStreamInput { /// <p>The name of the stream.</p> #[serde(rename = "StreamName")] pub stream_name: String, /// <p>The set of key-value pairs to use to create the tags.</p> #[serde(rename = "Tags")] pub tags: ::std::collections::HashMap<String, String>, } /// <p>Represents the input for <code>CreateStream</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct CreateStreamInput { /// <p>The number of shards that the stream will use. The throughput of the stream is a function of the number of shards; more shards are required for greater provisioned throughput.</p> <p>DefaultShardLimit;</p> #[serde(rename = "ShardCount")] pub shard_count: i64, /// <p>A name to identify the stream. The stream name is scoped to the AWS account used by the application that creates the stream. It is also scoped by AWS Region. That is, two streams in two different AWS accounts can have the same name. Two streams in the same AWS account but in two different Regions can also have the same name.</p> #[serde(rename = "StreamName")] pub stream_name: String, } /// <p>Represents the input for <a>DecreaseStreamRetentionPeriod</a>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct DecreaseStreamRetentionPeriodInput { /// <p>The new retention period of the stream, in hours. Must be less than the current retention period.</p> #[serde(rename = "RetentionPeriodHours")] pub retention_period_hours: i64, /// <p>The name of the stream to modify.</p> #[serde(rename = "StreamName")] pub stream_name: String, } /// <p>Represents the input for <a>DeleteStream</a>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct DeleteStreamInput { /// <p>The name of the stream to delete.</p> #[serde(rename = "StreamName")] pub stream_name: String, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct DescribeLimitsInput {} #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct DescribeLimitsOutput { /// <p>The number of open shards.</p> #[serde(rename = "OpenShardCount")] pub open_shard_count: i64, /// <p>The maximum number of shards.</p> #[serde(rename = "ShardLimit")] pub shard_limit: i64, } /// <p>Represents the input for <code>DescribeStream</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct DescribeStreamInput { /// <p>The shard ID of the shard to start with.</p> #[serde(rename = "ExclusiveStartShardId")] #[serde(skip_serializing_if = "Option::is_none")] pub exclusive_start_shard_id: Option<String>, /// <p>The maximum number of shards to return in a single call. The default value is 100. If you specify a value greater than 100, at most 100 shards are returned.</p> #[serde(rename = "Limit")] #[serde(skip_serializing_if = "Option::is_none")] pub limit: Option<i64>, /// <p>The name of the stream to describe.</p> #[serde(rename = "StreamName")] pub stream_name: String, } /// <p>Represents the output for <code>DescribeStream</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct DescribeStreamOutput { /// <p>The current status of the stream, the stream Amazon Resource Name (ARN), an array of shard objects that comprise the stream, and whether there are more shards available.</p> #[serde(rename = "StreamDescription")] pub stream_description: StreamDescription, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct DescribeStreamSummaryInput { /// <p>The name of the stream to describe.</p> #[serde(rename = "StreamName")] pub stream_name: String, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct DescribeStreamSummaryOutput { /// <p>A <a>StreamDescriptionSummary</a> containing information about the stream.</p> #[serde(rename = "StreamDescriptionSummary")] pub stream_description_summary: StreamDescriptionSummary, } /// <p>Represents the input for <a>DisableEnhancedMonitoring</a>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct DisableEnhancedMonitoringInput { /// <p>List of shard-level metrics to disable.</p> <p>The following are the valid shard-level metrics. The value "<code>ALL</code>" disables every metric.</p> <ul> <li> <p> <code>IncomingBytes</code> </p> </li> <li> <p> <code>IncomingRecords</code> </p> </li> <li> <p> <code>OutgoingBytes</code> </p> </li> <li> <p> <code>OutgoingRecords</code> </p> </li> <li> <p> <code>WriteProvisionedThroughputExceeded</code> </p> </li> <li> <p> <code>ReadProvisionedThroughputExceeded</code> </p> </li> <li> <p> <code>IteratorAgeMilliseconds</code> </p> </li> <li> <p> <code>ALL</code> </p> </li> </ul> <p>For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/monitoring-with-cloudwatch.html">Monitoring the Amazon Kinesis Data Streams Service with Amazon CloudWatch</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> #[serde(rename = "ShardLevelMetrics")] pub shard_level_metrics: Vec<String>, /// <p>The name of the Kinesis data stream for which to disable enhanced monitoring.</p> #[serde(rename = "StreamName")] pub stream_name: String, } /// <p>Represents the input for <a>EnableEnhancedMonitoring</a>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct EnableEnhancedMonitoringInput { /// <p>List of shard-level metrics to enable.</p> <p>The following are the valid shard-level metrics. The value "<code>ALL</code>" enables every metric.</p> <ul> <li> <p> <code>IncomingBytes</code> </p> </li> <li> <p> <code>IncomingRecords</code> </p> </li> <li> <p> <code>OutgoingBytes</code> </p> </li> <li> <p> <code>OutgoingRecords</code> </p> </li> <li> <p> <code>WriteProvisionedThroughputExceeded</code> </p> </li> <li> <p> <code>ReadProvisionedThroughputExceeded</code> </p> </li> <li> <p> <code>IteratorAgeMilliseconds</code> </p> </li> <li> <p> <code>ALL</code> </p> </li> </ul> <p>For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/monitoring-with-cloudwatch.html">Monitoring the Amazon Kinesis Data Streams Service with Amazon CloudWatch</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> #[serde(rename = "ShardLevelMetrics")] pub shard_level_metrics: Vec<String>, /// <p>The name of the stream for which to enable enhanced monitoring.</p> #[serde(rename = "StreamName")] pub stream_name: String, } /// <p>Represents enhanced metrics types.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct EnhancedMetrics { /// <p>List of shard-level metrics.</p> <p>The following are the valid shard-level metrics. The value "<code>ALL</code>" enhances every metric.</p> <ul> <li> <p> <code>IncomingBytes</code> </p> </li> <li> <p> <code>IncomingRecords</code> </p> </li> <li> <p> <code>OutgoingBytes</code> </p> </li> <li> <p> <code>OutgoingRecords</code> </p> </li> <li> <p> <code>WriteProvisionedThroughputExceeded</code> </p> </li> <li> <p> <code>ReadProvisionedThroughputExceeded</code> </p> </li> <li> <p> <code>IteratorAgeMilliseconds</code> </p> </li> <li> <p> <code>ALL</code> </p> </li> </ul> <p>For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/monitoring-with-cloudwatch.html">Monitoring the Amazon Kinesis Data Streams Service with Amazon CloudWatch</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> #[serde(rename = "ShardLevelMetrics")] #[serde(skip_serializing_if = "Option::is_none")] pub shard_level_metrics: Option<Vec<String>>, } /// <p>Represents the output for <a>EnableEnhancedMonitoring</a> and <a>DisableEnhancedMonitoring</a>.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct EnhancedMonitoringOutput { /// <p>Represents the current state of the metrics that are in the enhanced state before the operation.</p> #[serde(rename = "CurrentShardLevelMetrics")] #[serde(skip_serializing_if = "Option::is_none")] pub current_shard_level_metrics: Option<Vec<String>>, /// <p>Represents the list of all the metrics that would be in the enhanced state after the operation.</p> #[serde(rename = "DesiredShardLevelMetrics")] #[serde(skip_serializing_if = "Option::is_none")] pub desired_shard_level_metrics: Option<Vec<String>>, /// <p>The name of the Kinesis data stream.</p> #[serde(rename = "StreamName")] #[serde(skip_serializing_if = "Option::is_none")] pub stream_name: Option<String>, } /// <p>Represents the input for <a>GetRecords</a>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct GetRecordsInput { /// <p>The maximum number of records to return. Specify a value of up to 10,000. If you specify a value that is greater than 10,000, <a>GetRecords</a> throws <code>InvalidArgumentException</code>.</p> #[serde(rename = "Limit")] #[serde(skip_serializing_if = "Option::is_none")] pub limit: Option<i64>, /// <p>The position in the shard from which you want to start sequentially reading data records. A shard iterator specifies this position using the sequence number of a data record in the shard.</p> #[serde(rename = "ShardIterator")] pub shard_iterator: String, } /// <p>Represents the output for <a>GetRecords</a>.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct GetRecordsOutput { /// <p>The number of milliseconds the <a>GetRecords</a> response is from the tip of the stream, indicating how far behind current time the consumer is. A value of zero indicates that record processing is caught up, and there are no new records to process at this moment.</p> #[serde(rename = "MillisBehindLatest")] #[serde(skip_serializing_if = "Option::is_none")] pub millis_behind_latest: Option<i64>, /// <p>The next position in the shard from which to start sequentially reading data records. If set to <code>null</code>, the shard has been closed and the requested iterator does not return any more data. </p> #[serde(rename = "NextShardIterator")] #[serde(skip_serializing_if = "Option::is_none")] pub next_shard_iterator: Option<String>, /// <p>The data records retrieved from the shard.</p> #[serde(rename = "Records")] pub records: Vec<Record>, } /// <p>Represents the input for <code>GetShardIterator</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct GetShardIteratorInput { /// <p>The shard ID of the Kinesis Data Streams shard to get the iterator for.</p> #[serde(rename = "ShardId")] pub shard_id: String, /// <p><p>Determines how the shard iterator is used to start reading data records from the shard.</p> <p>The following are the valid Amazon Kinesis shard iterator types:</p> <ul> <li> <p>AT<em>SEQUENCE</em>NUMBER - Start reading from the position denoted by a specific sequence number, provided in the value <code>StartingSequenceNumber</code>.</p> </li> <li> <p>AFTER<em>SEQUENCE</em>NUMBER - Start reading right after the position denoted by a specific sequence number, provided in the value <code>StartingSequenceNumber</code>.</p> </li> <li> <p>AT<em>TIMESTAMP - Start reading from the position denoted by a specific time stamp, provided in the value <code>Timestamp</code>.</p> </li> <li> <p>TRIM</em>HORIZON - Start reading at the last untrimmed record in the shard in the system, which is the oldest data record in the shard.</p> </li> <li> <p>LATEST - Start reading just after the most recent record in the shard, so that you always read the most recent data in the shard.</p> </li> </ul></p> #[serde(rename = "ShardIteratorType")] pub shard_iterator_type: String, /// <p>The sequence number of the data record in the shard from which to start reading. Used with shard iterator type AT_SEQUENCE_NUMBER and AFTER_SEQUENCE_NUMBER.</p> #[serde(rename = "StartingSequenceNumber")] #[serde(skip_serializing_if = "Option::is_none")] pub starting_sequence_number: Option<String>, /// <p>The name of the Amazon Kinesis data stream.</p> #[serde(rename = "StreamName")] pub stream_name: String, /// <p>The time stamp of the data record from which to start reading. Used with shard iterator type AT_TIMESTAMP. A time stamp is the Unix epoch date with precision in milliseconds. For example, <code>2016-04-04T19:58:46.480-00:00</code> or <code>1459799926.480</code>. If a record with this exact time stamp does not exist, the iterator returned is for the next (later) record. If the time stamp is older than the current trim horizon, the iterator returned is for the oldest untrimmed data record (TRIM_HORIZON).</p> #[serde(rename = "Timestamp")] #[serde(skip_serializing_if = "Option::is_none")] pub timestamp: Option<f64>, } /// <p>Represents the output for <code>GetShardIterator</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct GetShardIteratorOutput { /// <p>The position in the shard from which to start reading data records sequentially. A shard iterator specifies this position using the sequence number of a data record in a shard.</p> #[serde(rename = "ShardIterator")] #[serde(skip_serializing_if = "Option::is_none")] pub shard_iterator: Option<String>, } /// <p>The range of possible hash key values for the shard, which is a set of ordered contiguous positive integers.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct HashKeyRange { /// <p>The ending hash key of the hash key range.</p> #[serde(rename = "EndingHashKey")] pub ending_hash_key: String, /// <p>The starting hash key of the hash key range.</p> #[serde(rename = "StartingHashKey")] pub starting_hash_key: String, } /// <p>Represents the input for <a>IncreaseStreamRetentionPeriod</a>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct IncreaseStreamRetentionPeriodInput { /// <p>The new retention period of the stream, in hours. Must be more than the current retention period.</p> #[serde(rename = "RetentionPeriodHours")] pub retention_period_hours: i64, /// <p>The name of the stream to modify.</p> #[serde(rename = "StreamName")] pub stream_name: String, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct ListShardsInput { /// <p>The ID of the shard to start the list with. </p> <p>If you don't specify this parameter, the default behavior is for <code>ListShards</code> to list the shards starting with the first one in the stream.</p> <p>You cannot specify this parameter if you specify <code>NextToken</code>.</p> #[serde(rename = "ExclusiveStartShardId")] #[serde(skip_serializing_if = "Option::is_none")] pub exclusive_start_shard_id: Option<String>, /// <p>The maximum number of shards to return in a single call to <code>ListShards</code>. The minimum value you can specify for this parameter is 1, and the maximum is 1,000, which is also the default.</p> <p>When the number of shards to be listed is greater than the value of <code>MaxResults</code>, the response contains a <code>NextToken</code> value that you can use in a subsequent call to <code>ListShards</code> to list the next set of shards.</p> #[serde(rename = "MaxResults")] #[serde(skip_serializing_if = "Option::is_none")] pub max_results: Option<i64>, /// <p><p>When the number of shards in the data stream is greater than the default value for the <code>MaxResults</code> parameter, or if you explicitly specify a value for <code>MaxResults</code> that is less than the number of shards in the data stream, the response includes a pagination token named <code>NextToken</code>. You can specify this <code>NextToken</code> value in a subsequent call to <code>ListShards</code> to list the next set of shards.</p> <p>Don&#39;t specify <code>StreamName</code> or <code>StreamCreationTimestamp</code> if you specify <code>NextToken</code> because the latter unambiguously identifies the stream.</p> <p>You can optionally specify a value for the <code>MaxResults</code> parameter when you specify <code>NextToken</code>. If you specify a <code>MaxResults</code> value that is less than the number of shards that the operation returns if you don&#39;t specify <code>MaxResults</code>, the response will contain a new <code>NextToken</code> value. You can use the new <code>NextToken</code> value in a subsequent call to the <code>ListShards</code> operation.</p> <important> <p>Tokens expire after 300 seconds. When you obtain a value for <code>NextToken</code> in the response to a call to <code>ListShards</code>, you have 300 seconds to use that value. If you specify an expired token in a call to <code>ListShards</code>, you get <code>ExpiredNextTokenException</code>.</p> </important></p> #[serde(rename = "NextToken")] #[serde(skip_serializing_if = "Option::is_none")] pub next_token: Option<String>, /// <p>Specify this input parameter to distinguish data streams that have the same name. For example, if you create a data stream and then delete it, and you later create another data stream with the same name, you can use this input parameter to specify which of the two streams you want to list the shards for.</p> <p>You cannot specify this parameter if you specify the <code>NextToken</code> parameter.</p> #[serde(rename = "StreamCreationTimestamp")] #[serde(skip_serializing_if = "Option::is_none")] pub stream_creation_timestamp: Option<f64>, /// <p>The name of the data stream whose shards you want to list. </p> <p>You cannot specify this parameter if you specify the <code>NextToken</code> parameter.</p> #[serde(rename = "StreamName")] #[serde(skip_serializing_if = "Option::is_none")] pub stream_name: Option<String>, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct ListShardsOutput { /// <p><p>When the number of shards in the data stream is greater than the default value for the <code>MaxResults</code> parameter, or if you explicitly specify a value for <code>MaxResults</code> that is less than the number of shards in the data stream, the response includes a pagination token named <code>NextToken</code>. You can specify this <code>NextToken</code> value in a subsequent call to <code>ListShards</code> to list the next set of shards. For more information about the use of this pagination token when calling the <code>ListShards</code> operation, see <a>ListShardsInput$NextToken</a>.</p> <important> <p>Tokens expire after 300 seconds. When you obtain a value for <code>NextToken</code> in the response to a call to <code>ListShards</code>, you have 300 seconds to use that value. If you specify an expired token in a call to <code>ListShards</code>, you get <code>ExpiredNextTokenException</code>.</p> </important></p> #[serde(rename = "NextToken")] #[serde(skip_serializing_if = "Option::is_none")] pub next_token: Option<String>, /// <p>An array of JSON objects. Each object represents one shard and specifies the IDs of the shard, the shard's parent, and the shard that's adjacent to the shard's parent. Each object also contains the starting and ending hash keys and the starting and ending sequence numbers for the shard.</p> #[serde(rename = "Shards")] #[serde(skip_serializing_if = "Option::is_none")] pub shards: Option<Vec<Shard>>, } /// <p>Represents the input for <code>ListStreams</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct ListStreamsInput { /// <p>The name of the stream to start the list with.</p> #[serde(rename = "ExclusiveStartStreamName")] #[serde(skip_serializing_if = "Option::is_none")] pub exclusive_start_stream_name: Option<String>, /// <p>The maximum number of streams to list.</p> #[serde(rename = "Limit")] #[serde(skip_serializing_if = "Option::is_none")] pub limit: Option<i64>, } /// <p>Represents the output for <code>ListStreams</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct ListStreamsOutput { /// <p>If set to <code>true</code>, there are more streams available to list.</p> #[serde(rename = "HasMoreStreams")] pub has_more_streams: bool, /// <p>The names of the streams that are associated with the AWS account making the <code>ListStreams</code> request.</p> #[serde(rename = "StreamNames")] pub stream_names: Vec<String>, } /// <p>Represents the input for <code>ListTagsForStream</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct ListTagsForStreamInput { /// <p>The key to use as the starting point for the list of tags. If this parameter is set, <code>ListTagsForStream</code> gets all tags that occur after <code>ExclusiveStartTagKey</code>. </p> #[serde(rename = "ExclusiveStartTagKey")] #[serde(skip_serializing_if = "Option::is_none")] pub exclusive_start_tag_key: Option<String>, /// <p>The number of tags to return. If this number is less than the total number of tags associated with the stream, <code>HasMoreTags</code> is set to <code>true</code>. To list additional tags, set <code>ExclusiveStartTagKey</code> to the last key in the response.</p> #[serde(rename = "Limit")] #[serde(skip_serializing_if = "Option::is_none")] pub limit: Option<i64>, /// <p>The name of the stream.</p> #[serde(rename = "StreamName")] pub stream_name: String, } /// <p>Represents the output for <code>ListTagsForStream</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct ListTagsForStreamOutput { /// <p>If set to <code>true</code>, more tags are available. To request additional tags, set <code>ExclusiveStartTagKey</code> to the key of the last tag returned.</p> #[serde(rename = "HasMoreTags")] pub has_more_tags: bool, /// <p>A list of tags associated with <code>StreamName</code>, starting with the first tag after <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>. </p> #[serde(rename = "Tags")] pub tags: Vec<Tag>, } /// <p>Represents the input for <code>MergeShards</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct MergeShardsInput { /// <p>The shard ID of the adjacent shard for the merge.</p> #[serde(rename = "AdjacentShardToMerge")] pub adjacent_shard_to_merge: String, /// <p>The shard ID of the shard to combine with the adjacent shard for the merge.</p> #[serde(rename = "ShardToMerge")] pub shard_to_merge: String, /// <p>The name of the stream for the merge.</p> #[serde(rename = "StreamName")] pub stream_name: String, } /// <p>Represents the input for <code>PutRecord</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct PutRecordInput { /// <p>The data blob to put into the record, which is base64-encoded when the blob is serialized. When the data blob (the payload before base64-encoding) is added to the partition key size, the total size must not exceed the maximum record size (1 MB).</p> #[serde(rename = "Data")] #[serde( deserialize_with = "::rusoto_core::serialization::SerdeBlob::deserialize_blob", serialize_with = "::rusoto_core::serialization::SerdeBlob::serialize_blob", default )] pub data: Vec<u8>, /// <p>The hash value used to explicitly determine the shard the data record is assigned to by overriding the partition key hash.</p> #[serde(rename = "ExplicitHashKey")] #[serde(skip_serializing_if = "Option::is_none")] pub explicit_hash_key: Option<String>, /// <p>Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.</p> #[serde(rename = "PartitionKey")] pub partition_key: String, /// <p>Guarantees strictly increasing sequence numbers, for puts from the same client and to the same partition key. Usage: set the <code>SequenceNumberForOrdering</code> of record <i>n</i> to the sequence number of record <i>n-1</i> (as returned in the result when putting record <i>n-1</i>). If this parameter is not set, records are coarsely ordered based on arrival time.</p> #[serde(rename = "SequenceNumberForOrdering")] #[serde(skip_serializing_if = "Option::is_none")] pub sequence_number_for_ordering: Option<String>, /// <p>The name of the stream to put the data record into.</p> #[serde(rename = "StreamName")] pub stream_name: String, } /// <p>Represents the output for <code>PutRecord</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct PutRecordOutput { /// <p><p>The encryption type to use on the record. This parameter can be one of the following values:</p> <ul> <li> <p> <code>NONE</code>: Do not encrypt the records in the stream.</p> </li> <li> <p> <code>KMS</code>: Use server-side encryption on the records in the stream using a customer-managed AWS KMS key.</p> </li> </ul></p> #[serde(rename = "EncryptionType")] #[serde(skip_serializing_if = "Option::is_none")] pub encryption_type: Option<String>, /// <p>The sequence number identifier that was assigned to the put data record. The sequence number for the record is unique across all records in the stream. A sequence number is the identifier associated with every record put into the stream.</p> #[serde(rename = "SequenceNumber")] pub sequence_number: String, /// <p>The shard ID of the shard where the data record was placed.</p> #[serde(rename = "ShardId")] pub shard_id: String, } /// <p>A <code>PutRecords</code> request.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct PutRecordsInput { /// <p>The records associated with the request.</p> #[serde(rename = "Records")] pub records: Vec<PutRecordsRequestEntry>, /// <p>The stream name associated with the request.</p> #[serde(rename = "StreamName")] pub stream_name: String, } /// <p> <code>PutRecords</code> results.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct PutRecordsOutput { /// <p><p>The encryption type used on the records. This parameter can be one of the following values:</p> <ul> <li> <p> <code>NONE</code>: Do not encrypt the records.</p> </li> <li> <p> <code>KMS</code>: Use server-side encryption on the records using a customer-managed AWS KMS key.</p> </li> </ul></p> #[serde(rename = "EncryptionType")] #[serde(skip_serializing_if = "Option::is_none")] pub encryption_type: Option<String>, /// <p>The number of unsuccessfully processed records in a <code>PutRecords</code> request.</p> #[serde(rename = "FailedRecordCount")] #[serde(skip_serializing_if = "Option::is_none")] pub failed_record_count: Option<i64>, /// <p>An array of successfully and unsuccessfully processed record results, correlated with the request by natural ordering. A record that is successfully added to a stream includes <code>SequenceNumber</code> and <code>ShardId</code> in the result. A record that fails to be added to a stream includes <code>ErrorCode</code> and <code>ErrorMessage</code> in the result.</p> #[serde(rename = "Records")] pub records: Vec<PutRecordsResultEntry>, } /// <p>Represents the output for <code>PutRecords</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct PutRecordsRequestEntry { /// <p>The data blob to put into the record, which is base64-encoded when the blob is serialized. When the data blob (the payload before base64-encoding) is added to the partition key size, the total size must not exceed the maximum record size (1 MB).</p> #[serde(rename = "Data")] #[serde( deserialize_with = "::rusoto_core::serialization::SerdeBlob::deserialize_blob", serialize_with = "::rusoto_core::serialization::SerdeBlob::serialize_blob", default )] pub data: Vec<u8>, /// <p>The hash value used to determine explicitly the shard that the data record is assigned to by overriding the partition key hash.</p> #[serde(rename = "ExplicitHashKey")] #[serde(skip_serializing_if = "Option::is_none")] pub explicit_hash_key: Option<String>, /// <p>Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.</p> #[serde(rename = "PartitionKey")] pub partition_key: String, } /// <p>Represents the result of an individual record from a <code>PutRecords</code> request. A record that is successfully added to a stream includes <code>SequenceNumber</code> and <code>ShardId</code> in the result. A record that fails to be added to the stream includes <code>ErrorCode</code> and <code>ErrorMessage</code> in the result.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct PutRecordsResultEntry { /// <p>The error code for an individual record result. <code>ErrorCodes</code> can be either <code>ProvisionedThroughputExceededException</code> or <code>InternalFailure</code>.</p> #[serde(rename = "ErrorCode")] #[serde(skip_serializing_if = "Option::is_none")] pub error_code: Option<String>, /// <p>The error message for an individual record result. An <code>ErrorCode</code> value of <code>ProvisionedThroughputExceededException</code> has an error message that includes the account ID, stream name, and shard ID. An <code>ErrorCode</code> value of <code>InternalFailure</code> has the error message <code>"Internal Service Failure"</code>.</p> #[serde(rename = "ErrorMessage")] #[serde(skip_serializing_if = "Option::is_none")] pub error_message: Option<String>, /// <p>The sequence number for an individual record result.</p> #[serde(rename = "SequenceNumber")] #[serde(skip_serializing_if = "Option::is_none")] pub sequence_number: Option<String>, /// <p>The shard ID for an individual record result.</p> #[serde(rename = "ShardId")] #[serde(skip_serializing_if = "Option::is_none")] pub shard_id: Option<String>, } /// <p>The unit of data of the Kinesis data stream, which is composed of a sequence number, a partition key, and a data blob.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct Record { /// <p>The approximate time that the record was inserted into the stream.</p> #[serde(rename = "ApproximateArrivalTimestamp")] #[serde(skip_serializing_if = "Option::is_none")] pub approximate_arrival_timestamp: Option<f64>, /// <p>The data blob. The data in the blob is both opaque and immutable to Kinesis Data Streams, which does not inspect, interpret, or change the data in the blob in any way. When the data blob (the payload before base64-encoding) is added to the partition key size, the total size must not exceed the maximum record size (1 MB).</p> #[serde(rename = "Data")] #[serde( deserialize_with = "::rusoto_core::serialization::SerdeBlob::deserialize_blob", serialize_with = "::rusoto_core::serialization::SerdeBlob::serialize_blob", default )] pub data: Vec<u8>, /// <p><p>The encryption type used on the record. This parameter can be one of the following values:</p> <ul> <li> <p> <code>NONE</code>: Do not encrypt the records in the stream.</p> </li> <li> <p> <code>KMS</code>: Use server-side encryption on the records in the stream using a customer-managed AWS KMS key.</p> </li> </ul></p> #[serde(rename = "EncryptionType")] #[serde(skip_serializing_if = "Option::is_none")] pub encryption_type: Option<String>, /// <p>Identifies which shard in the stream the data record is assigned to.</p> #[serde(rename = "PartitionKey")] pub partition_key: String, /// <p>The unique identifier of the record within its shard.</p> #[serde(rename = "SequenceNumber")] pub sequence_number: String, } /// <p>Represents the input for <code>RemoveTagsFromStream</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct RemoveTagsFromStreamInput { /// <p>The name of the stream.</p> #[serde(rename = "StreamName")] pub stream_name: String, /// <p>A list of tag keys. Each corresponding tag is removed from the stream.</p> #[serde(rename = "TagKeys")] pub tag_keys: Vec<String>, } /// <p>The range of possible sequence numbers for the shard.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct SequenceNumberRange { /// <p>The ending sequence number for the range. Shards that are in the OPEN state have an ending sequence number of <code>null</code>.</p> #[serde(rename = "EndingSequenceNumber")] #[serde(skip_serializing_if = "Option::is_none")] pub ending_sequence_number: Option<String>, /// <p>The starting sequence number for the range.</p> #[serde(rename = "StartingSequenceNumber")] pub starting_sequence_number: String, } /// <p>A uniquely identified group of data records in a Kinesis data stream.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct Shard { /// <p>The shard ID of the shard adjacent to the shard's parent.</p> #[serde(rename = "AdjacentParentShardId")] #[serde(skip_serializing_if = "Option::is_none")] pub adjacent_parent_shard_id: Option<String>, /// <p>The range of possible hash key values for the shard, which is a set of ordered contiguous positive integers.</p> #[serde(rename = "HashKeyRange")] pub hash_key_range: HashKeyRange, /// <p>The shard ID of the shard's parent.</p> #[serde(rename = "ParentShardId")] #[serde(skip_serializing_if = "Option::is_none")] pub parent_shard_id: Option<String>, /// <p>The range of possible sequence numbers for the shard.</p> #[serde(rename = "SequenceNumberRange")] pub sequence_number_range: SequenceNumberRange, /// <p>The unique identifier of the shard within the stream.</p> #[serde(rename = "ShardId")] pub shard_id: String, } /// <p>Represents the input for <code>SplitShard</code>.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct SplitShardInput { /// <p>A hash key value for the starting hash key of one of the child shards created by the split. The hash key range for a given shard constitutes a set of ordered contiguous positive integers. The value for <code>NewStartingHashKey</code> must be in the range of hash keys being mapped into the shard. The <code>NewStartingHashKey</code> hash key value and all higher hash key values in hash key range are distributed to one of the child shards. All the lower hash key values in the range are distributed to the other child shard.</p> #[serde(rename = "NewStartingHashKey")] pub new_starting_hash_key: String, /// <p>The shard ID of the shard to split.</p> #[serde(rename = "ShardToSplit")] pub shard_to_split: String, /// <p>The name of the stream for the shard split.</p> #[serde(rename = "StreamName")] pub stream_name: String, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct StartStreamEncryptionInput { /// <p>The encryption type to use. The only valid value is <code>KMS</code>.</p> #[serde(rename = "EncryptionType")] pub encryption_type: String, /// <p><p>The GUID for the customer-managed AWS KMS key to use for encryption. This value can be a globally unique identifier, a fully specified Amazon Resource Name (ARN) to either an alias or a key, or an alias name prefixed by &quot;alias/&quot;.You can also use a master key owned by Kinesis Data Streams by specifying the alias <code>aws/kinesis</code>.</p> <ul> <li> <p>Key ARN example: <code>arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012</code> </p> </li> <li> <p>Alias ARN example: <code>arn:aws:kms:us-east-1:123456789012:alias/MyAliasName</code> </p> </li> <li> <p>Globally unique key ID example: <code>12345678-1234-1234-1234-123456789012</code> </p> </li> <li> <p>Alias name example: <code>alias/MyAliasName</code> </p> </li> <li> <p>Master key owned by Kinesis Data Streams: <code>alias/aws/kinesis</code> </p> </li> </ul></p> #[serde(rename = "KeyId")] pub key_id: String, /// <p>The name of the stream for which to start encrypting records.</p> #[serde(rename = "StreamName")] pub stream_name: String, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct StopStreamEncryptionInput { /// <p>The encryption type. The only valid value is <code>KMS</code>.</p> #[serde(rename = "EncryptionType")] pub encryption_type: String, /// <p><p>The GUID for the customer-managed AWS KMS key to use for encryption. This value can be a globally unique identifier, a fully specified Amazon Resource Name (ARN) to either an alias or a key, or an alias name prefixed by &quot;alias/&quot;.You can also use a master key owned by Kinesis Data Streams by specifying the alias <code>aws/kinesis</code>.</p> <ul> <li> <p>Key ARN example: <code>arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012</code> </p> </li> <li> <p>Alias ARN example: <code>arn:aws:kms:us-east-1:123456789012:alias/MyAliasName</code> </p> </li> <li> <p>Globally unique key ID example: <code>12345678-1234-1234-1234-123456789012</code> </p> </li> <li> <p>Alias name example: <code>alias/MyAliasName</code> </p> </li> <li> <p>Master key owned by Kinesis Data Streams: <code>alias/aws/kinesis</code> </p> </li> </ul></p> #[serde(rename = "KeyId")] pub key_id: String, /// <p>The name of the stream on which to stop encrypting records.</p> #[serde(rename = "StreamName")] pub stream_name: String, } /// <p>Represents the output for <a>DescribeStream</a>.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct StreamDescription { /// <p><p>The server-side encryption type used on the stream. This parameter can be one of the following values:</p> <ul> <li> <p> <code>NONE</code>: Do not encrypt the records in the stream.</p> </li> <li> <p> <code>KMS</code>: Use server-side encryption on the records in the stream using a customer-managed AWS KMS key.</p> </li> </ul></p> #[serde(rename = "EncryptionType")] #[serde(skip_serializing_if = "Option::is_none")] pub encryption_type: Option<String>, /// <p>Represents the current enhanced monitoring settings of the stream.</p> #[serde(rename = "EnhancedMonitoring")] pub enhanced_monitoring: Vec<EnhancedMetrics>, /// <p>If set to <code>true</code>, more shards in the stream are available to describe.</p> #[serde(rename = "HasMoreShards")] pub has_more_shards: bool, /// <p><p>The GUID for the customer-managed AWS KMS key to use for encryption. This value can be a globally unique identifier, a fully specified ARN to either an alias or a key, or an alias name prefixed by &quot;alias/&quot;.You can also use a master key owned by Kinesis Data Streams by specifying the alias <code>aws/kinesis</code>.</p> <ul> <li> <p>Key ARN example: <code>arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012</code> </p> </li> <li> <p>Alias ARN example: <code>arn:aws:kms:us-east-1:123456789012:alias/MyAliasName</code> </p> </li> <li> <p>Globally unique key ID example: <code>12345678-1234-1234-1234-123456789012</code> </p> </li> <li> <p>Alias name example: <code>alias/MyAliasName</code> </p> </li> <li> <p>Master key owned by Kinesis Data Streams: <code>alias/aws/kinesis</code> </p> </li> </ul></p> #[serde(rename = "KeyId")] #[serde(skip_serializing_if = "Option::is_none")] pub key_id: Option<String>, /// <p>The current retention period, in hours.</p> #[serde(rename = "RetentionPeriodHours")] pub retention_period_hours: i64, /// <p>The shards that comprise the stream.</p> #[serde(rename = "Shards")] pub shards: Vec<Shard>, /// <p>The Amazon Resource Name (ARN) for the stream being described.</p> #[serde(rename = "StreamARN")] pub stream_arn: String, /// <p>The approximate time that the stream was created.</p> #[serde(rename = "StreamCreationTimestamp")] pub stream_creation_timestamp: f64, /// <p>The name of the stream being described.</p> #[serde(rename = "StreamName")] pub stream_name: String, /// <p><p>The current status of the stream being described. The stream status is one of the following states:</p> <ul> <li> <p> <code>CREATING</code> - The stream is being created. Kinesis Data Streams immediately returns and sets <code>StreamStatus</code> to <code>CREATING</code>.</p> </li> <li> <p> <code>DELETING</code> - The stream is being deleted. The specified stream is in the <code>DELETING</code> state until Kinesis Data Streams completes the deletion.</p> </li> <li> <p> <code>ACTIVE</code> - The stream exists and is ready for read and write operations or deletion. You should perform read and write operations only on an <code>ACTIVE</code> stream.</p> </li> <li> <p> <code>UPDATING</code> - Shards in the stream are being merged or split. Read and write operations continue to work while the stream is in the <code>UPDATING</code> state.</p> </li> </ul></p> #[serde(rename = "StreamStatus")] pub stream_status: String, } /// <p>Represents the output for <a>DescribeStreamSummary</a> </p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct StreamDescriptionSummary { /// <p><p>The encryption type used. This value is one of the following:</p> <ul> <li> <p> <code>KMS</code> </p> </li> <li> <p> <code>NONE</code> </p> </li> </ul></p> #[serde(rename = "EncryptionType")] #[serde(skip_serializing_if = "Option::is_none")] pub encryption_type: Option<String>, /// <p>Represents the current enhanced monitoring settings of the stream.</p> #[serde(rename = "EnhancedMonitoring")] pub enhanced_monitoring: Vec<EnhancedMetrics>, /// <p><p>The GUID for the customer-managed AWS KMS key to use for encryption. This value can be a globally unique identifier, a fully specified ARN to either an alias or a key, or an alias name prefixed by &quot;alias/&quot;.You can also use a master key owned by Kinesis Data Streams by specifying the alias <code>aws/kinesis</code>.</p> <ul> <li> <p>Key ARN example: <code>arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012</code> </p> </li> <li> <p>Alias ARN example: <code> arn:aws:kms:us-east-1:123456789012:alias/MyAliasName</code> </p> </li> <li> <p>Globally unique key ID example: <code>12345678-1234-1234-1234-123456789012</code> </p> </li> <li> <p>Alias name example: <code>alias/MyAliasName</code> </p> </li> <li> <p>Master key owned by Kinesis Data Streams: <code>alias/aws/kinesis</code> </p> </li> </ul></p> #[serde(rename = "KeyId")] #[serde(skip_serializing_if = "Option::is_none")] pub key_id: Option<String>, /// <p>The number of open shards in the stream.</p> #[serde(rename = "OpenShardCount")] pub open_shard_count: i64, /// <p>The current retention period, in hours.</p> #[serde(rename = "RetentionPeriodHours")] pub retention_period_hours: i64, /// <p>The Amazon Resource Name (ARN) for the stream being described.</p> #[serde(rename = "StreamARN")] pub stream_arn: String, /// <p>The approximate time that the stream was created.</p> #[serde(rename = "StreamCreationTimestamp")] pub stream_creation_timestamp: f64, /// <p>The name of the stream being described.</p> #[serde(rename = "StreamName")] pub stream_name: String, /// <p><p>The current status of the stream being described. The stream status is one of the following states:</p> <ul> <li> <p> <code>CREATING</code> - The stream is being created. Kinesis Data Streams immediately returns and sets <code>StreamStatus</code> to <code>CREATING</code>.</p> </li> <li> <p> <code>DELETING</code> - The stream is being deleted. The specified stream is in the <code>DELETING</code> state until Kinesis Data Streams completes the deletion.</p> </li> <li> <p> <code>ACTIVE</code> - The stream exists and is ready for read and write operations or deletion. You should perform read and write operations only on an <code>ACTIVE</code> stream.</p> </li> <li> <p> <code>UPDATING</code> - Shards in the stream are being merged or split. Read and write operations continue to work while the stream is in the <code>UPDATING</code> state.</p> </li> </ul></p> #[serde(rename = "StreamStatus")] pub stream_status: String, } /// <p>Metadata assigned to the stream, consisting of a key-value pair.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct Tag { /// <p>A unique identifier for the tag. Maximum length: 128 characters. Valid characters: Unicode letters, digits, white space, _ . / = + - % @</p> #[serde(rename = "Key")] pub key: String, /// <p>An optional string, typically used to describe or define the tag. Maximum length: 256 characters. Valid characters: Unicode letters, digits, white space, _ . / = + - % @</p> #[serde(rename = "Value")] #[serde(skip_serializing_if = "Option::is_none")] pub value: Option<String>, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct UpdateShardCountInput { /// <p>The scaling type. Uniform scaling creates shards of equal size.</p> #[serde(rename = "ScalingType")] pub scaling_type: String, /// <p>The name of the stream.</p> #[serde(rename = "StreamName")] pub stream_name: String, /// <p>The new number of shards.</p> #[serde(rename = "TargetShardCount")] pub target_shard_count: i64, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] pub struct UpdateShardCountOutput { /// <p>The current number of shards.</p> #[serde(rename = "CurrentShardCount")] #[serde(skip_serializing_if = "Option::is_none")] pub current_shard_count: Option<i64>, /// <p>The name of the stream.</p> #[serde(rename = "StreamName")] #[serde(skip_serializing_if = "Option::is_none")] pub stream_name: Option<String>, /// <p>The updated number of shards.</p> #[serde(rename = "TargetShardCount")] #[serde(skip_serializing_if = "Option::is_none")] pub target_shard_count: Option<i64>, } /// Errors returned by AddTagsToStream #[derive(Debug, PartialEq)] pub enum AddTagsToStreamError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The resource is not available for this operation. For successful operation, the resource must be in the <code>ACTIVE</code> state.</p> ResourceInUse(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl AddTagsToStreamError { pub fn from_body(body: &str) -> AddTagsToStreamError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { AddTagsToStreamError::InvalidArgument(String::from(error_message)) } "LimitExceededException" => { AddTagsToStreamError::LimitExceeded(String::from(error_message)) } "ResourceInUseException" => { AddTagsToStreamError::ResourceInUse(String::from(error_message)) } "ResourceNotFoundException" => { AddTagsToStreamError::ResourceNotFound(String::from(error_message)) } "ValidationException" => { AddTagsToStreamError::Validation(error_message.to_string()) } _ => AddTagsToStreamError::Unknown(String::from(body)), } } Err(_) => AddTagsToStreamError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for AddTagsToStreamError { fn from(err: serde_json::error::Error) -> AddTagsToStreamError { AddTagsToStreamError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for AddTagsToStreamError { fn from(err: CredentialsError) -> AddTagsToStreamError { AddTagsToStreamError::Credentials(err) } } impl From<HttpDispatchError> for AddTagsToStreamError { fn from(err: HttpDispatchError) -> AddTagsToStreamError { AddTagsToStreamError::HttpDispatch(err) } } impl From<io::Error> for AddTagsToStreamError { fn from(err: io::Error) -> AddTagsToStreamError { AddTagsToStreamError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for AddTagsToStreamError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for AddTagsToStreamError { fn description(&self) -> &str { match *self { AddTagsToStreamError::InvalidArgument(ref cause) => cause, AddTagsToStreamError::LimitExceeded(ref cause) => cause, AddTagsToStreamError::ResourceInUse(ref cause) => cause, AddTagsToStreamError::ResourceNotFound(ref cause) => cause, AddTagsToStreamError::Validation(ref cause) => cause, AddTagsToStreamError::Credentials(ref err) => err.description(), AddTagsToStreamError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), AddTagsToStreamError::Unknown(ref cause) => cause, } } } /// Errors returned by CreateStream #[derive(Debug, PartialEq)] pub enum CreateStreamError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The resource is not available for this operation. For successful operation, the resource must be in the <code>ACTIVE</code> state.</p> ResourceInUse(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl CreateStreamError { pub fn from_body(body: &str) -> CreateStreamError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { CreateStreamError::InvalidArgument(String::from(error_message)) } "LimitExceededException" => { CreateStreamError::LimitExceeded(String::from(error_message)) } "ResourceInUseException" => { CreateStreamError::ResourceInUse(String::from(error_message)) } "ValidationException" => { CreateStreamError::Validation(error_message.to_string()) } _ => CreateStreamError::Unknown(String::from(body)), } } Err(_) => CreateStreamError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for CreateStreamError { fn from(err: serde_json::error::Error) -> CreateStreamError { CreateStreamError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for CreateStreamError { fn from(err: CredentialsError) -> CreateStreamError { CreateStreamError::Credentials(err) } } impl From<HttpDispatchError> for CreateStreamError { fn from(err: HttpDispatchError) -> CreateStreamError { CreateStreamError::HttpDispatch(err) } } impl From<io::Error> for CreateStreamError { fn from(err: io::Error) -> CreateStreamError { CreateStreamError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for CreateStreamError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for CreateStreamError { fn description(&self) -> &str { match *self { CreateStreamError::InvalidArgument(ref cause) => cause, CreateStreamError::LimitExceeded(ref cause) => cause, CreateStreamError::ResourceInUse(ref cause) => cause, CreateStreamError::Validation(ref cause) => cause, CreateStreamError::Credentials(ref err) => err.description(), CreateStreamError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), CreateStreamError::Unknown(ref cause) => cause, } } } /// Errors returned by DecreaseStreamRetentionPeriod #[derive(Debug, PartialEq)] pub enum DecreaseStreamRetentionPeriodError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The resource is not available for this operation. For successful operation, the resource must be in the <code>ACTIVE</code> state.</p> ResourceInUse(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl DecreaseStreamRetentionPeriodError { pub fn from_body(body: &str) -> DecreaseStreamRetentionPeriodError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { DecreaseStreamRetentionPeriodError::InvalidArgument(String::from( error_message, )) } "LimitExceededException" => DecreaseStreamRetentionPeriodError::LimitExceeded( String::from(error_message), ), "ResourceInUseException" => DecreaseStreamRetentionPeriodError::ResourceInUse( String::from(error_message), ), "ResourceNotFoundException" => { DecreaseStreamRetentionPeriodError::ResourceNotFound(String::from( error_message, )) } "ValidationException" => { DecreaseStreamRetentionPeriodError::Validation(error_message.to_string()) } _ => DecreaseStreamRetentionPeriodError::Unknown(String::from(body)), } } Err(_) => DecreaseStreamRetentionPeriodError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for DecreaseStreamRetentionPeriodError { fn from(err: serde_json::error::Error) -> DecreaseStreamRetentionPeriodError { DecreaseStreamRetentionPeriodError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for DecreaseStreamRetentionPeriodError { fn from(err: CredentialsError) -> DecreaseStreamRetentionPeriodError { DecreaseStreamRetentionPeriodError::Credentials(err) } } impl From<HttpDispatchError> for DecreaseStreamRetentionPeriodError { fn from(err: HttpDispatchError) -> DecreaseStreamRetentionPeriodError { DecreaseStreamRetentionPeriodError::HttpDispatch(err) } } impl From<io::Error> for DecreaseStreamRetentionPeriodError { fn from(err: io::Error) -> DecreaseStreamRetentionPeriodError { DecreaseStreamRetentionPeriodError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for DecreaseStreamRetentionPeriodError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DecreaseStreamRetentionPeriodError { fn description(&self) -> &str { match *self { DecreaseStreamRetentionPeriodError::InvalidArgument(ref cause) => cause, DecreaseStreamRetentionPeriodError::LimitExceeded(ref cause) => cause, DecreaseStreamRetentionPeriodError::ResourceInUse(ref cause) => cause, DecreaseStreamRetentionPeriodError::ResourceNotFound(ref cause) => cause, DecreaseStreamRetentionPeriodError::Validation(ref cause) => cause, DecreaseStreamRetentionPeriodError::Credentials(ref err) => err.description(), DecreaseStreamRetentionPeriodError::HttpDispatch(ref dispatch_error) => { dispatch_error.description() } DecreaseStreamRetentionPeriodError::Unknown(ref cause) => cause, } } } /// Errors returned by DeleteStream #[derive(Debug, PartialEq)] pub enum DeleteStreamError { /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl DeleteStreamError { pub fn from_body(body: &str) -> DeleteStreamError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "LimitExceededException" => { DeleteStreamError::LimitExceeded(String::from(error_message)) } "ResourceNotFoundException" => { DeleteStreamError::ResourceNotFound(String::from(error_message)) } "ValidationException" => { DeleteStreamError::Validation(error_message.to_string()) } _ => DeleteStreamError::Unknown(String::from(body)), } } Err(_) => DeleteStreamError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for DeleteStreamError { fn from(err: serde_json::error::Error) -> DeleteStreamError { DeleteStreamError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for DeleteStreamError { fn from(err: CredentialsError) -> DeleteStreamError { DeleteStreamError::Credentials(err) } } impl From<HttpDispatchError> for DeleteStreamError { fn from(err: HttpDispatchError) -> DeleteStreamError { DeleteStreamError::HttpDispatch(err) } } impl From<io::Error> for DeleteStreamError { fn from(err: io::Error) -> DeleteStreamError { DeleteStreamError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for DeleteStreamError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DeleteStreamError { fn description(&self) -> &str { match *self { DeleteStreamError::LimitExceeded(ref cause) => cause, DeleteStreamError::ResourceNotFound(ref cause) => cause, DeleteStreamError::Validation(ref cause) => cause, DeleteStreamError::Credentials(ref err) => err.description(), DeleteStreamError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DeleteStreamError::Unknown(ref cause) => cause, } } } /// Errors returned by DescribeLimits #[derive(Debug, PartialEq)] pub enum DescribeLimitsError { /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl DescribeLimitsError { pub fn from_body(body: &str) -> DescribeLimitsError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "LimitExceededException" => { DescribeLimitsError::LimitExceeded(String::from(error_message)) } "ValidationException" => { DescribeLimitsError::Validation(error_message.to_string()) } _ => DescribeLimitsError::Unknown(String::from(body)), } } Err(_) => DescribeLimitsError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for DescribeLimitsError { fn from(err: serde_json::error::Error) -> DescribeLimitsError { DescribeLimitsError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for DescribeLimitsError { fn from(err: CredentialsError) -> DescribeLimitsError { DescribeLimitsError::Credentials(err) } } impl From<HttpDispatchError> for DescribeLimitsError { fn from(err: HttpDispatchError) -> DescribeLimitsError { DescribeLimitsError::HttpDispatch(err) } } impl From<io::Error> for DescribeLimitsError { fn from(err: io::Error) -> DescribeLimitsError { DescribeLimitsError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for DescribeLimitsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeLimitsError { fn description(&self) -> &str { match *self { DescribeLimitsError::LimitExceeded(ref cause) => cause, DescribeLimitsError::Validation(ref cause) => cause, DescribeLimitsError::Credentials(ref err) => err.description(), DescribeLimitsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeLimitsError::Unknown(ref cause) => cause, } } } /// Errors returned by DescribeStream #[derive(Debug, PartialEq)] pub enum DescribeStreamError { /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl DescribeStreamError { pub fn from_body(body: &str) -> DescribeStreamError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "LimitExceededException" => { DescribeStreamError::LimitExceeded(String::from(error_message)) } "ResourceNotFoundException" => { DescribeStreamError::ResourceNotFound(String::from(error_message)) } "ValidationException" => { DescribeStreamError::Validation(error_message.to_string()) } _ => DescribeStreamError::Unknown(String::from(body)), } } Err(_) => DescribeStreamError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for DescribeStreamError { fn from(err: serde_json::error::Error) -> DescribeStreamError { DescribeStreamError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for DescribeStreamError { fn from(err: CredentialsError) -> DescribeStreamError { DescribeStreamError::Credentials(err) } } impl From<HttpDispatchError> for DescribeStreamError { fn from(err: HttpDispatchError) -> DescribeStreamError { DescribeStreamError::HttpDispatch(err) } } impl From<io::Error> for DescribeStreamError { fn from(err: io::Error) -> DescribeStreamError { DescribeStreamError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for DescribeStreamError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeStreamError { fn description(&self) -> &str { match *self { DescribeStreamError::LimitExceeded(ref cause) => cause, DescribeStreamError::ResourceNotFound(ref cause) => cause, DescribeStreamError::Validation(ref cause) => cause, DescribeStreamError::Credentials(ref err) => err.description(), DescribeStreamError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeStreamError::Unknown(ref cause) => cause, } } } /// Errors returned by DescribeStreamSummary #[derive(Debug, PartialEq)] pub enum DescribeStreamSummaryError { /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl DescribeStreamSummaryError { pub fn from_body(body: &str) -> DescribeStreamSummaryError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "LimitExceededException" => { DescribeStreamSummaryError::LimitExceeded(String::from(error_message)) } "ResourceNotFoundException" => { DescribeStreamSummaryError::ResourceNotFound(String::from(error_message)) } "ValidationException" => { DescribeStreamSummaryError::Validation(error_message.to_string()) } _ => DescribeStreamSummaryError::Unknown(String::from(body)), } } Err(_) => DescribeStreamSummaryError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for DescribeStreamSummaryError { fn from(err: serde_json::error::Error) -> DescribeStreamSummaryError { DescribeStreamSummaryError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for DescribeStreamSummaryError { fn from(err: CredentialsError) -> DescribeStreamSummaryError { DescribeStreamSummaryError::Credentials(err) } } impl From<HttpDispatchError> for DescribeStreamSummaryError { fn from(err: HttpDispatchError) -> DescribeStreamSummaryError { DescribeStreamSummaryError::HttpDispatch(err) } } impl From<io::Error> for DescribeStreamSummaryError { fn from(err: io::Error) -> DescribeStreamSummaryError { DescribeStreamSummaryError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for DescribeStreamSummaryError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeStreamSummaryError { fn description(&self) -> &str { match *self { DescribeStreamSummaryError::LimitExceeded(ref cause) => cause, DescribeStreamSummaryError::ResourceNotFound(ref cause) => cause, DescribeStreamSummaryError::Validation(ref cause) => cause, DescribeStreamSummaryError::Credentials(ref err) => err.description(), DescribeStreamSummaryError::HttpDispatch(ref dispatch_error) => { dispatch_error.description() } DescribeStreamSummaryError::Unknown(ref cause) => cause, } } } /// Errors returned by DisableEnhancedMonitoring #[derive(Debug, PartialEq)] pub enum DisableEnhancedMonitoringError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The resource is not available for this operation. For successful operation, the resource must be in the <code>ACTIVE</code> state.</p> ResourceInUse(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl DisableEnhancedMonitoringError { pub fn from_body(body: &str) -> DisableEnhancedMonitoringError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { DisableEnhancedMonitoringError::InvalidArgument(String::from(error_message)) } "LimitExceededException" => { DisableEnhancedMonitoringError::LimitExceeded(String::from(error_message)) } "ResourceInUseException" => { DisableEnhancedMonitoringError::ResourceInUse(String::from(error_message)) } "ResourceNotFoundException" => { DisableEnhancedMonitoringError::ResourceNotFound(String::from( error_message, )) } "ValidationException" => { DisableEnhancedMonitoringError::Validation(error_message.to_string()) } _ => DisableEnhancedMonitoringError::Unknown(String::from(body)), } } Err(_) => DisableEnhancedMonitoringError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for DisableEnhancedMonitoringError { fn from(err: serde_json::error::Error) -> DisableEnhancedMonitoringError { DisableEnhancedMonitoringError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for DisableEnhancedMonitoringError { fn from(err: CredentialsError) -> DisableEnhancedMonitoringError { DisableEnhancedMonitoringError::Credentials(err) } } impl From<HttpDispatchError> for DisableEnhancedMonitoringError { fn from(err: HttpDispatchError) -> DisableEnhancedMonitoringError { DisableEnhancedMonitoringError::HttpDispatch(err) } } impl From<io::Error> for DisableEnhancedMonitoringError { fn from(err: io::Error) -> DisableEnhancedMonitoringError { DisableEnhancedMonitoringError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for DisableEnhancedMonitoringError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DisableEnhancedMonitoringError { fn description(&self) -> &str { match *self { DisableEnhancedMonitoringError::InvalidArgument(ref cause) => cause, DisableEnhancedMonitoringError::LimitExceeded(ref cause) => cause, DisableEnhancedMonitoringError::ResourceInUse(ref cause) => cause, DisableEnhancedMonitoringError::ResourceNotFound(ref cause) => cause, DisableEnhancedMonitoringError::Validation(ref cause) => cause, DisableEnhancedMonitoringError::Credentials(ref err) => err.description(), DisableEnhancedMonitoringError::HttpDispatch(ref dispatch_error) => { dispatch_error.description() } DisableEnhancedMonitoringError::Unknown(ref cause) => cause, } } } /// Errors returned by EnableEnhancedMonitoring #[derive(Debug, PartialEq)] pub enum EnableEnhancedMonitoringError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The resource is not available for this operation. For successful operation, the resource must be in the <code>ACTIVE</code> state.</p> ResourceInUse(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl EnableEnhancedMonitoringError { pub fn from_body(body: &str) -> EnableEnhancedMonitoringError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { EnableEnhancedMonitoringError::InvalidArgument(String::from(error_message)) } "LimitExceededException" => { EnableEnhancedMonitoringError::LimitExceeded(String::from(error_message)) } "ResourceInUseException" => { EnableEnhancedMonitoringError::ResourceInUse(String::from(error_message)) } "ResourceNotFoundException" => { EnableEnhancedMonitoringError::ResourceNotFound(String::from(error_message)) } "ValidationException" => { EnableEnhancedMonitoringError::Validation(error_message.to_string()) } _ => EnableEnhancedMonitoringError::Unknown(String::from(body)), } } Err(_) => EnableEnhancedMonitoringError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for EnableEnhancedMonitoringError { fn from(err: serde_json::error::Error) -> EnableEnhancedMonitoringError { EnableEnhancedMonitoringError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for EnableEnhancedMonitoringError { fn from(err: CredentialsError) -> EnableEnhancedMonitoringError { EnableEnhancedMonitoringError::Credentials(err) } } impl From<HttpDispatchError> for EnableEnhancedMonitoringError { fn from(err: HttpDispatchError) -> EnableEnhancedMonitoringError { EnableEnhancedMonitoringError::HttpDispatch(err) } } impl From<io::Error> for EnableEnhancedMonitoringError { fn from(err: io::Error) -> EnableEnhancedMonitoringError { EnableEnhancedMonitoringError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for EnableEnhancedMonitoringError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for EnableEnhancedMonitoringError { fn description(&self) -> &str { match *self { EnableEnhancedMonitoringError::InvalidArgument(ref cause) => cause, EnableEnhancedMonitoringError::LimitExceeded(ref cause) => cause, EnableEnhancedMonitoringError::ResourceInUse(ref cause) => cause, EnableEnhancedMonitoringError::ResourceNotFound(ref cause) => cause, EnableEnhancedMonitoringError::Validation(ref cause) => cause, EnableEnhancedMonitoringError::Credentials(ref err) => err.description(), EnableEnhancedMonitoringError::HttpDispatch(ref dispatch_error) => { dispatch_error.description() } EnableEnhancedMonitoringError::Unknown(ref cause) => cause, } } } /// Errors returned by GetRecords #[derive(Debug, PartialEq)] pub enum GetRecordsError { /// <p>The provided iterator exceeds the maximum age allowed.</p> ExpiredIterator(String), /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The ciphertext references a key that doesn't exist or that you don't have access to.</p> KMSAccessDenied(String), /// <p>The request was rejected because the specified customer master key (CMK) isn't enabled.</p> KMSDisabled(String), /// <p>The request was rejected because the state of the specified resource isn't valid for this request. For more information, see <a href="http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html">How Key State Affects Use of a Customer Master Key</a> in the <i>AWS Key Management Service Developer Guide</i>.</p> KMSInvalidState(String), /// <p>The request was rejected because the specified entity or resource can't be found.</p> KMSNotFound(String), /// <p>The AWS access key ID needs a subscription for the service.</p> KMSOptInRequired(String), /// <p>The request was denied due to request throttling. For more information about throttling, see <a href="http://docs.aws.amazon.com/kms/latest/developerguide/limits.html#requests-per-second">Limits</a> in the <i>AWS Key Management Service Developer Guide</i>.</p> KMSThrottling(String), /// <p>The request rate for the stream is too high, or the requested data is too large for the available throughput. Reduce the frequency or size of your requests. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Streams Limits</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>, and <a href="http://docs.aws.amazon.com/general/latest/gr/api-retries.html">Error Retries and Exponential Backoff in AWS</a> in the <i>AWS General Reference</i>.</p> ProvisionedThroughputExceeded(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl GetRecordsError { pub fn from_body(body: &str) -> GetRecordsError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "ExpiredIteratorException" => { GetRecordsError::ExpiredIterator(String::from(error_message)) } "InvalidArgumentException" => { GetRecordsError::InvalidArgument(String::from(error_message)) } "KMSAccessDeniedException" => { GetRecordsError::KMSAccessDenied(String::from(error_message)) } "KMSDisabledException" => { GetRecordsError::KMSDisabled(String::from(error_message)) } "KMSInvalidStateException" => { GetRecordsError::KMSInvalidState(String::from(error_message)) } "KMSNotFoundException" => { GetRecordsError::KMSNotFound(String::from(error_message)) } "KMSOptInRequired" => { GetRecordsError::KMSOptInRequired(String::from(error_message)) } "KMSThrottlingException" => { GetRecordsError::KMSThrottling(String::from(error_message)) } "ProvisionedThroughputExceededException" => { GetRecordsError::ProvisionedThroughputExceeded(String::from(error_message)) } "ResourceNotFoundException" => { GetRecordsError::ResourceNotFound(String::from(error_message)) } "ValidationException" => GetRecordsError::Validation(error_message.to_string()), _ => GetRecordsError::Unknown(String::from(body)), } } Err(_) => GetRecordsError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for GetRecordsError { fn from(err: serde_json::error::Error) -> GetRecordsError { GetRecordsError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for GetRecordsError { fn from(err: CredentialsError) -> GetRecordsError { GetRecordsError::Credentials(err) } } impl From<HttpDispatchError> for GetRecordsError { fn from(err: HttpDispatchError) -> GetRecordsError { GetRecordsError::HttpDispatch(err) } } impl From<io::Error> for GetRecordsError { fn from(err: io::Error) -> GetRecordsError { GetRecordsError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for GetRecordsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for GetRecordsError { fn description(&self) -> &str { match *self { GetRecordsError::ExpiredIterator(ref cause) => cause, GetRecordsError::InvalidArgument(ref cause) => cause, GetRecordsError::KMSAccessDenied(ref cause) => cause, GetRecordsError::KMSDisabled(ref cause) => cause, GetRecordsError::KMSInvalidState(ref cause) => cause, GetRecordsError::KMSNotFound(ref cause) => cause, GetRecordsError::KMSOptInRequired(ref cause) => cause, GetRecordsError::KMSThrottling(ref cause) => cause, GetRecordsError::ProvisionedThroughputExceeded(ref cause) => cause, GetRecordsError::ResourceNotFound(ref cause) => cause, GetRecordsError::Validation(ref cause) => cause, GetRecordsError::Credentials(ref err) => err.description(), GetRecordsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), GetRecordsError::Unknown(ref cause) => cause, } } } /// Errors returned by GetShardIterator #[derive(Debug, PartialEq)] pub enum GetShardIteratorError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The request rate for the stream is too high, or the requested data is too large for the available throughput. Reduce the frequency or size of your requests. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Streams Limits</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>, and <a href="http://docs.aws.amazon.com/general/latest/gr/api-retries.html">Error Retries and Exponential Backoff in AWS</a> in the <i>AWS General Reference</i>.</p> ProvisionedThroughputExceeded(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl GetShardIteratorError { pub fn from_body(body: &str) -> GetShardIteratorError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { GetShardIteratorError::InvalidArgument(String::from(error_message)) } "ProvisionedThroughputExceededException" => { GetShardIteratorError::ProvisionedThroughputExceeded(String::from( error_message, )) } "ResourceNotFoundException" => { GetShardIteratorError::ResourceNotFound(String::from(error_message)) } "ValidationException" => { GetShardIteratorError::Validation(error_message.to_string()) } _ => GetShardIteratorError::Unknown(String::from(body)), } } Err(_) => GetShardIteratorError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for GetShardIteratorError { fn from(err: serde_json::error::Error) -> GetShardIteratorError { GetShardIteratorError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for GetShardIteratorError { fn from(err: CredentialsError) -> GetShardIteratorError { GetShardIteratorError::Credentials(err) } } impl From<HttpDispatchError> for GetShardIteratorError { fn from(err: HttpDispatchError) -> GetShardIteratorError { GetShardIteratorError::HttpDispatch(err) } } impl From<io::Error> for GetShardIteratorError { fn from(err: io::Error) -> GetShardIteratorError { GetShardIteratorError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for GetShardIteratorError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for GetShardIteratorError { fn description(&self) -> &str { match *self { GetShardIteratorError::InvalidArgument(ref cause) => cause, GetShardIteratorError::ProvisionedThroughputExceeded(ref cause) => cause, GetShardIteratorError::ResourceNotFound(ref cause) => cause, GetShardIteratorError::Validation(ref cause) => cause, GetShardIteratorError::Credentials(ref err) => err.description(), GetShardIteratorError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), GetShardIteratorError::Unknown(ref cause) => cause, } } } /// Errors returned by IncreaseStreamRetentionPeriod #[derive(Debug, PartialEq)] pub enum IncreaseStreamRetentionPeriodError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The resource is not available for this operation. For successful operation, the resource must be in the <code>ACTIVE</code> state.</p> ResourceInUse(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl IncreaseStreamRetentionPeriodError { pub fn from_body(body: &str) -> IncreaseStreamRetentionPeriodError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { IncreaseStreamRetentionPeriodError::InvalidArgument(String::from( error_message, )) } "LimitExceededException" => IncreaseStreamRetentionPeriodError::LimitExceeded( String::from(error_message), ), "ResourceInUseException" => IncreaseStreamRetentionPeriodError::ResourceInUse( String::from(error_message), ), "ResourceNotFoundException" => { IncreaseStreamRetentionPeriodError::ResourceNotFound(String::from( error_message, )) } "ValidationException" => { IncreaseStreamRetentionPeriodError::Validation(error_message.to_string()) } _ => IncreaseStreamRetentionPeriodError::Unknown(String::from(body)), } } Err(_) => IncreaseStreamRetentionPeriodError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for IncreaseStreamRetentionPeriodError { fn from(err: serde_json::error::Error) -> IncreaseStreamRetentionPeriodError { IncreaseStreamRetentionPeriodError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for IncreaseStreamRetentionPeriodError { fn from(err: CredentialsError) -> IncreaseStreamRetentionPeriodError { IncreaseStreamRetentionPeriodError::Credentials(err) } } impl From<HttpDispatchError> for IncreaseStreamRetentionPeriodError { fn from(err: HttpDispatchError) -> IncreaseStreamRetentionPeriodError { IncreaseStreamRetentionPeriodError::HttpDispatch(err) } } impl From<io::Error> for IncreaseStreamRetentionPeriodError { fn from(err: io::Error) -> IncreaseStreamRetentionPeriodError { IncreaseStreamRetentionPeriodError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for IncreaseStreamRetentionPeriodError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for IncreaseStreamRetentionPeriodError { fn description(&self) -> &str { match *self { IncreaseStreamRetentionPeriodError::InvalidArgument(ref cause) => cause, IncreaseStreamRetentionPeriodError::LimitExceeded(ref cause) => cause, IncreaseStreamRetentionPeriodError::ResourceInUse(ref cause) => cause, IncreaseStreamRetentionPeriodError::ResourceNotFound(ref cause) => cause, IncreaseStreamRetentionPeriodError::Validation(ref cause) => cause, IncreaseStreamRetentionPeriodError::Credentials(ref err) => err.description(), IncreaseStreamRetentionPeriodError::HttpDispatch(ref dispatch_error) => { dispatch_error.description() } IncreaseStreamRetentionPeriodError::Unknown(ref cause) => cause, } } } /// Errors returned by ListShards #[derive(Debug, PartialEq)] pub enum ListShardsError { /// <p>The pagination token passed to the <code>ListShards</code> operation is expired. For more information, see <a>ListShardsInput$NextToken</a>.</p> ExpiredNextToken(String), /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The resource is not available for this operation. For successful operation, the resource must be in the <code>ACTIVE</code> state.</p> ResourceInUse(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl ListShardsError { pub fn from_body(body: &str) -> ListShardsError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "ExpiredNextTokenException" => { ListShardsError::ExpiredNextToken(String::from(error_message)) } "InvalidArgumentException" => { ListShardsError::InvalidArgument(String::from(error_message)) } "LimitExceededException" => { ListShardsError::LimitExceeded(String::from(error_message)) } "ResourceInUseException" => { ListShardsError::ResourceInUse(String::from(error_message)) } "ResourceNotFoundException" => { ListShardsError::ResourceNotFound(String::from(error_message)) } "ValidationException" => ListShardsError::Validation(error_message.to_string()), _ => ListShardsError::Unknown(String::from(body)), } } Err(_) => ListShardsError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for ListShardsError { fn from(err: serde_json::error::Error) -> ListShardsError { ListShardsError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for ListShardsError { fn from(err: CredentialsError) -> ListShardsError { ListShardsError::Credentials(err) } } impl From<HttpDispatchError> for ListShardsError { fn from(err: HttpDispatchError) -> ListShardsError { ListShardsError::HttpDispatch(err) } } impl From<io::Error> for ListShardsError { fn from(err: io::Error) -> ListShardsError { ListShardsError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for ListShardsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for ListShardsError { fn description(&self) -> &str { match *self { ListShardsError::ExpiredNextToken(ref cause) => cause, ListShardsError::InvalidArgument(ref cause) => cause, ListShardsError::LimitExceeded(ref cause) => cause, ListShardsError::ResourceInUse(ref cause) => cause, ListShardsError::ResourceNotFound(ref cause) => cause, ListShardsError::Validation(ref cause) => cause, ListShardsError::Credentials(ref err) => err.description(), ListShardsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), ListShardsError::Unknown(ref cause) => cause, } } } /// Errors returned by ListStreams #[derive(Debug, PartialEq)] pub enum ListStreamsError { /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl ListStreamsError { pub fn from_body(body: &str) -> ListStreamsError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "LimitExceededException" => { ListStreamsError::LimitExceeded(String::from(error_message)) } "ValidationException" => { ListStreamsError::Validation(error_message.to_string()) } _ => ListStreamsError::Unknown(String::from(body)), } } Err(_) => ListStreamsError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for ListStreamsError { fn from(err: serde_json::error::Error) -> ListStreamsError { ListStreamsError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for ListStreamsError { fn from(err: CredentialsError) -> ListStreamsError { ListStreamsError::Credentials(err) } } impl From<HttpDispatchError> for ListStreamsError { fn from(err: HttpDispatchError) -> ListStreamsError { ListStreamsError::HttpDispatch(err) } } impl From<io::Error> for ListStreamsError { fn from(err: io::Error) -> ListStreamsError { ListStreamsError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for ListStreamsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for ListStreamsError { fn description(&self) -> &str { match *self { ListStreamsError::LimitExceeded(ref cause) => cause, ListStreamsError::Validation(ref cause) => cause, ListStreamsError::Credentials(ref err) => err.description(), ListStreamsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), ListStreamsError::Unknown(ref cause) => cause, } } } /// Errors returned by ListTagsForStream #[derive(Debug, PartialEq)] pub enum ListTagsForStreamError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl ListTagsForStreamError { pub fn from_body(body: &str) -> ListTagsForStreamError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { ListTagsForStreamError::InvalidArgument(String::from(error_message)) } "LimitExceededException" => { ListTagsForStreamError::LimitExceeded(String::from(error_message)) } "ResourceNotFoundException" => { ListTagsForStreamError::ResourceNotFound(String::from(error_message)) } "ValidationException" => { ListTagsForStreamError::Validation(error_message.to_string()) } _ => ListTagsForStreamError::Unknown(String::from(body)), } } Err(_) => ListTagsForStreamError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for ListTagsForStreamError { fn from(err: serde_json::error::Error) -> ListTagsForStreamError { ListTagsForStreamError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for ListTagsForStreamError { fn from(err: CredentialsError) -> ListTagsForStreamError { ListTagsForStreamError::Credentials(err) } } impl From<HttpDispatchError> for ListTagsForStreamError { fn from(err: HttpDispatchError) -> ListTagsForStreamError { ListTagsForStreamError::HttpDispatch(err) } } impl From<io::Error> for ListTagsForStreamError { fn from(err: io::Error) -> ListTagsForStreamError { ListTagsForStreamError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for ListTagsForStreamError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for ListTagsForStreamError { fn description(&self) -> &str { match *self { ListTagsForStreamError::InvalidArgument(ref cause) => cause, ListTagsForStreamError::LimitExceeded(ref cause) => cause, ListTagsForStreamError::ResourceNotFound(ref cause) => cause, ListTagsForStreamError::Validation(ref cause) => cause, ListTagsForStreamError::Credentials(ref err) => err.description(), ListTagsForStreamError::HttpDispatch(ref dispatch_error) => { dispatch_error.description() } ListTagsForStreamError::Unknown(ref cause) => cause, } } } /// Errors returned by MergeShards #[derive(Debug, PartialEq)] pub enum MergeShardsError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The resource is not available for this operation. For successful operation, the resource must be in the <code>ACTIVE</code> state.</p> ResourceInUse(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl MergeShardsError { pub fn from_body(body: &str) -> MergeShardsError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { MergeShardsError::InvalidArgument(String::from(error_message)) } "LimitExceededException" => { MergeShardsError::LimitExceeded(String::from(error_message)) } "ResourceInUseException" => { MergeShardsError::ResourceInUse(String::from(error_message)) } "ResourceNotFoundException" => { MergeShardsError::ResourceNotFound(String::from(error_message)) } "ValidationException" => { MergeShardsError::Validation(error_message.to_string()) } _ => MergeShardsError::Unknown(String::from(body)), } } Err(_) => MergeShardsError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for MergeShardsError { fn from(err: serde_json::error::Error) -> MergeShardsError { MergeShardsError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for MergeShardsError { fn from(err: CredentialsError) -> MergeShardsError { MergeShardsError::Credentials(err) } } impl From<HttpDispatchError> for MergeShardsError { fn from(err: HttpDispatchError) -> MergeShardsError { MergeShardsError::HttpDispatch(err) } } impl From<io::Error> for MergeShardsError { fn from(err: io::Error) -> MergeShardsError { MergeShardsError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for MergeShardsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for MergeShardsError { fn description(&self) -> &str { match *self { MergeShardsError::InvalidArgument(ref cause) => cause, MergeShardsError::LimitExceeded(ref cause) => cause, MergeShardsError::ResourceInUse(ref cause) => cause, MergeShardsError::ResourceNotFound(ref cause) => cause, MergeShardsError::Validation(ref cause) => cause, MergeShardsError::Credentials(ref err) => err.description(), MergeShardsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), MergeShardsError::Unknown(ref cause) => cause, } } } /// Errors returned by PutRecord #[derive(Debug, PartialEq)] pub enum PutRecordError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The ciphertext references a key that doesn't exist or that you don't have access to.</p> KMSAccessDenied(String), /// <p>The request was rejected because the specified customer master key (CMK) isn't enabled.</p> KMSDisabled(String), /// <p>The request was rejected because the state of the specified resource isn't valid for this request. For more information, see <a href="http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html">How Key State Affects Use of a Customer Master Key</a> in the <i>AWS Key Management Service Developer Guide</i>.</p> KMSInvalidState(String), /// <p>The request was rejected because the specified entity or resource can't be found.</p> KMSNotFound(String), /// <p>The AWS access key ID needs a subscription for the service.</p> KMSOptInRequired(String), /// <p>The request was denied due to request throttling. For more information about throttling, see <a href="http://docs.aws.amazon.com/kms/latest/developerguide/limits.html#requests-per-second">Limits</a> in the <i>AWS Key Management Service Developer Guide</i>.</p> KMSThrottling(String), /// <p>The request rate for the stream is too high, or the requested data is too large for the available throughput. Reduce the frequency or size of your requests. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Streams Limits</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>, and <a href="http://docs.aws.amazon.com/general/latest/gr/api-retries.html">Error Retries and Exponential Backoff in AWS</a> in the <i>AWS General Reference</i>.</p> ProvisionedThroughputExceeded(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl PutRecordError { pub fn from_body(body: &str) -> PutRecordError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { PutRecordError::InvalidArgument(String::from(error_message)) } "KMSAccessDeniedException" => { PutRecordError::KMSAccessDenied(String::from(error_message)) } "KMSDisabledException" => { PutRecordError::KMSDisabled(String::from(error_message)) } "KMSInvalidStateException" => { PutRecordError::KMSInvalidState(String::from(error_message)) } "KMSNotFoundException" => { PutRecordError::KMSNotFound(String::from(error_message)) } "KMSOptInRequired" => { PutRecordError::KMSOptInRequired(String::from(error_message)) } "KMSThrottlingException" => { PutRecordError::KMSThrottling(String::from(error_message)) } "ProvisionedThroughputExceededException" => { PutRecordError::ProvisionedThroughputExceeded(String::from(error_message)) } "ResourceNotFoundException" => { PutRecordError::ResourceNotFound(String::from(error_message)) } "ValidationException" => PutRecordError::Validation(error_message.to_string()), _ => PutRecordError::Unknown(String::from(body)), } } Err(_) => PutRecordError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for PutRecordError { fn from(err: serde_json::error::Error) -> PutRecordError { PutRecordError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for PutRecordError { fn from(err: CredentialsError) -> PutRecordError { PutRecordError::Credentials(err) } } impl From<HttpDispatchError> for PutRecordError { fn from(err: HttpDispatchError) -> PutRecordError { PutRecordError::HttpDispatch(err) } } impl From<io::Error> for PutRecordError { fn from(err: io::Error) -> PutRecordError { PutRecordError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for PutRecordError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for PutRecordError { fn description(&self) -> &str { match *self { PutRecordError::InvalidArgument(ref cause) => cause, PutRecordError::KMSAccessDenied(ref cause) => cause, PutRecordError::KMSDisabled(ref cause) => cause, PutRecordError::KMSInvalidState(ref cause) => cause, PutRecordError::KMSNotFound(ref cause) => cause, PutRecordError::KMSOptInRequired(ref cause) => cause, PutRecordError::KMSThrottling(ref cause) => cause, PutRecordError::ProvisionedThroughputExceeded(ref cause) => cause, PutRecordError::ResourceNotFound(ref cause) => cause, PutRecordError::Validation(ref cause) => cause, PutRecordError::Credentials(ref err) => err.description(), PutRecordError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), PutRecordError::Unknown(ref cause) => cause, } } } /// Errors returned by PutRecords #[derive(Debug, PartialEq)] pub enum PutRecordsError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The ciphertext references a key that doesn't exist or that you don't have access to.</p> KMSAccessDenied(String), /// <p>The request was rejected because the specified customer master key (CMK) isn't enabled.</p> KMSDisabled(String), /// <p>The request was rejected because the state of the specified resource isn't valid for this request. For more information, see <a href="http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html">How Key State Affects Use of a Customer Master Key</a> in the <i>AWS Key Management Service Developer Guide</i>.</p> KMSInvalidState(String), /// <p>The request was rejected because the specified entity or resource can't be found.</p> KMSNotFound(String), /// <p>The AWS access key ID needs a subscription for the service.</p> KMSOptInRequired(String), /// <p>The request was denied due to request throttling. For more information about throttling, see <a href="http://docs.aws.amazon.com/kms/latest/developerguide/limits.html#requests-per-second">Limits</a> in the <i>AWS Key Management Service Developer Guide</i>.</p> KMSThrottling(String), /// <p>The request rate for the stream is too high, or the requested data is too large for the available throughput. Reduce the frequency or size of your requests. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Streams Limits</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>, and <a href="http://docs.aws.amazon.com/general/latest/gr/api-retries.html">Error Retries and Exponential Backoff in AWS</a> in the <i>AWS General Reference</i>.</p> ProvisionedThroughputExceeded(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl PutRecordsError { pub fn from_body(body: &str) -> PutRecordsError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { PutRecordsError::InvalidArgument(String::from(error_message)) } "KMSAccessDeniedException" => { PutRecordsError::KMSAccessDenied(String::from(error_message)) } "KMSDisabledException" => { PutRecordsError::KMSDisabled(String::from(error_message)) } "KMSInvalidStateException" => { PutRecordsError::KMSInvalidState(String::from(error_message)) } "KMSNotFoundException" => { PutRecordsError::KMSNotFound(String::from(error_message)) } "KMSOptInRequired" => { PutRecordsError::KMSOptInRequired(String::from(error_message)) } "KMSThrottlingException" => { PutRecordsError::KMSThrottling(String::from(error_message)) } "ProvisionedThroughputExceededException" => { PutRecordsError::ProvisionedThroughputExceeded(String::from(error_message)) } "ResourceNotFoundException" => { PutRecordsError::ResourceNotFound(String::from(error_message)) } "ValidationException" => PutRecordsError::Validation(error_message.to_string()), _ => PutRecordsError::Unknown(String::from(body)), } } Err(_) => PutRecordsError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for PutRecordsError { fn from(err: serde_json::error::Error) -> PutRecordsError { PutRecordsError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for PutRecordsError { fn from(err: CredentialsError) -> PutRecordsError { PutRecordsError::Credentials(err) } } impl From<HttpDispatchError> for PutRecordsError { fn from(err: HttpDispatchError) -> PutRecordsError { PutRecordsError::HttpDispatch(err) } } impl From<io::Error> for PutRecordsError { fn from(err: io::Error) -> PutRecordsError { PutRecordsError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for PutRecordsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for PutRecordsError { fn description(&self) -> &str { match *self { PutRecordsError::InvalidArgument(ref cause) => cause, PutRecordsError::KMSAccessDenied(ref cause) => cause, PutRecordsError::KMSDisabled(ref cause) => cause, PutRecordsError::KMSInvalidState(ref cause) => cause, PutRecordsError::KMSNotFound(ref cause) => cause, PutRecordsError::KMSOptInRequired(ref cause) => cause, PutRecordsError::KMSThrottling(ref cause) => cause, PutRecordsError::ProvisionedThroughputExceeded(ref cause) => cause, PutRecordsError::ResourceNotFound(ref cause) => cause, PutRecordsError::Validation(ref cause) => cause, PutRecordsError::Credentials(ref err) => err.description(), PutRecordsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), PutRecordsError::Unknown(ref cause) => cause, } } } /// Errors returned by RemoveTagsFromStream #[derive(Debug, PartialEq)] pub enum RemoveTagsFromStreamError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The resource is not available for this operation. For successful operation, the resource must be in the <code>ACTIVE</code> state.</p> ResourceInUse(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl RemoveTagsFromStreamError { pub fn from_body(body: &str) -> RemoveTagsFromStreamError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { RemoveTagsFromStreamError::InvalidArgument(String::from(error_message)) } "LimitExceededException" => { RemoveTagsFromStreamError::LimitExceeded(String::from(error_message)) } "ResourceInUseException" => { RemoveTagsFromStreamError::ResourceInUse(String::from(error_message)) } "ResourceNotFoundException" => { RemoveTagsFromStreamError::ResourceNotFound(String::from(error_message)) } "ValidationException" => { RemoveTagsFromStreamError::Validation(error_message.to_string()) } _ => RemoveTagsFromStreamError::Unknown(String::from(body)), } } Err(_) => RemoveTagsFromStreamError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for RemoveTagsFromStreamError { fn from(err: serde_json::error::Error) -> RemoveTagsFromStreamError { RemoveTagsFromStreamError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for RemoveTagsFromStreamError { fn from(err: CredentialsError) -> RemoveTagsFromStreamError { RemoveTagsFromStreamError::Credentials(err) } } impl From<HttpDispatchError> for RemoveTagsFromStreamError { fn from(err: HttpDispatchError) -> RemoveTagsFromStreamError { RemoveTagsFromStreamError::HttpDispatch(err) } } impl From<io::Error> for RemoveTagsFromStreamError { fn from(err: io::Error) -> RemoveTagsFromStreamError { RemoveTagsFromStreamError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for RemoveTagsFromStreamError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for RemoveTagsFromStreamError { fn description(&self) -> &str { match *self { RemoveTagsFromStreamError::InvalidArgument(ref cause) => cause, RemoveTagsFromStreamError::LimitExceeded(ref cause) => cause, RemoveTagsFromStreamError::ResourceInUse(ref cause) => cause, RemoveTagsFromStreamError::ResourceNotFound(ref cause) => cause, RemoveTagsFromStreamError::Validation(ref cause) => cause, RemoveTagsFromStreamError::Credentials(ref err) => err.description(), RemoveTagsFromStreamError::HttpDispatch(ref dispatch_error) => { dispatch_error.description() } RemoveTagsFromStreamError::Unknown(ref cause) => cause, } } } /// Errors returned by SplitShard #[derive(Debug, PartialEq)] pub enum SplitShardError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The resource is not available for this operation. For successful operation, the resource must be in the <code>ACTIVE</code> state.</p> ResourceInUse(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl SplitShardError { pub fn from_body(body: &str) -> SplitShardError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { SplitShardError::InvalidArgument(String::from(error_message)) } "LimitExceededException" => { SplitShardError::LimitExceeded(String::from(error_message)) } "ResourceInUseException" => { SplitShardError::ResourceInUse(String::from(error_message)) } "ResourceNotFoundException" => { SplitShardError::ResourceNotFound(String::from(error_message)) } "ValidationException" => SplitShardError::Validation(error_message.to_string()), _ => SplitShardError::Unknown(String::from(body)), } } Err(_) => SplitShardError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for SplitShardError { fn from(err: serde_json::error::Error) -> SplitShardError { SplitShardError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for SplitShardError { fn from(err: CredentialsError) -> SplitShardError { SplitShardError::Credentials(err) } } impl From<HttpDispatchError> for SplitShardError { fn from(err: HttpDispatchError) -> SplitShardError { SplitShardError::HttpDispatch(err) } } impl From<io::Error> for SplitShardError { fn from(err: io::Error) -> SplitShardError { SplitShardError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for SplitShardError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for SplitShardError { fn description(&self) -> &str { match *self { SplitShardError::InvalidArgument(ref cause) => cause, SplitShardError::LimitExceeded(ref cause) => cause, SplitShardError::ResourceInUse(ref cause) => cause, SplitShardError::ResourceNotFound(ref cause) => cause, SplitShardError::Validation(ref cause) => cause, SplitShardError::Credentials(ref err) => err.description(), SplitShardError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), SplitShardError::Unknown(ref cause) => cause, } } } /// Errors returned by StartStreamEncryption #[derive(Debug, PartialEq)] pub enum StartStreamEncryptionError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The ciphertext references a key that doesn't exist or that you don't have access to.</p> KMSAccessDenied(String), /// <p>The request was rejected because the specified customer master key (CMK) isn't enabled.</p> KMSDisabled(String), /// <p>The request was rejected because the state of the specified resource isn't valid for this request. For more information, see <a href="http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html">How Key State Affects Use of a Customer Master Key</a> in the <i>AWS Key Management Service Developer Guide</i>.</p> KMSInvalidState(String), /// <p>The request was rejected because the specified entity or resource can't be found.</p> KMSNotFound(String), /// <p>The AWS access key ID needs a subscription for the service.</p> KMSOptInRequired(String), /// <p>The request was denied due to request throttling. For more information about throttling, see <a href="http://docs.aws.amazon.com/kms/latest/developerguide/limits.html#requests-per-second">Limits</a> in the <i>AWS Key Management Service Developer Guide</i>.</p> KMSThrottling(String), /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The resource is not available for this operation. For successful operation, the resource must be in the <code>ACTIVE</code> state.</p> ResourceInUse(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl StartStreamEncryptionError { pub fn from_body(body: &str) -> StartStreamEncryptionError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { StartStreamEncryptionError::InvalidArgument(String::from(error_message)) } "KMSAccessDeniedException" => { StartStreamEncryptionError::KMSAccessDenied(String::from(error_message)) } "KMSDisabledException" => { StartStreamEncryptionError::KMSDisabled(String::from(error_message)) } "KMSInvalidStateException" => { StartStreamEncryptionError::KMSInvalidState(String::from(error_message)) } "KMSNotFoundException" => { StartStreamEncryptionError::KMSNotFound(String::from(error_message)) } "KMSOptInRequired" => { StartStreamEncryptionError::KMSOptInRequired(String::from(error_message)) } "KMSThrottlingException" => { StartStreamEncryptionError::KMSThrottling(String::from(error_message)) } "LimitExceededException" => { StartStreamEncryptionError::LimitExceeded(String::from(error_message)) } "ResourceInUseException" => { StartStreamEncryptionError::ResourceInUse(String::from(error_message)) } "ResourceNotFoundException" => { StartStreamEncryptionError::ResourceNotFound(String::from(error_message)) } "ValidationException" => { StartStreamEncryptionError::Validation(error_message.to_string()) } _ => StartStreamEncryptionError::Unknown(String::from(body)), } } Err(_) => StartStreamEncryptionError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for StartStreamEncryptionError { fn from(err: serde_json::error::Error) -> StartStreamEncryptionError { StartStreamEncryptionError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for StartStreamEncryptionError { fn from(err: CredentialsError) -> StartStreamEncryptionError { StartStreamEncryptionError::Credentials(err) } } impl From<HttpDispatchError> for StartStreamEncryptionError { fn from(err: HttpDispatchError) -> StartStreamEncryptionError { StartStreamEncryptionError::HttpDispatch(err) } } impl From<io::Error> for StartStreamEncryptionError { fn from(err: io::Error) -> StartStreamEncryptionError { StartStreamEncryptionError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for StartStreamEncryptionError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for StartStreamEncryptionError { fn description(&self) -> &str { match *self { StartStreamEncryptionError::InvalidArgument(ref cause) => cause, StartStreamEncryptionError::KMSAccessDenied(ref cause) => cause, StartStreamEncryptionError::KMSDisabled(ref cause) => cause, StartStreamEncryptionError::KMSInvalidState(ref cause) => cause, StartStreamEncryptionError::KMSNotFound(ref cause) => cause, StartStreamEncryptionError::KMSOptInRequired(ref cause) => cause, StartStreamEncryptionError::KMSThrottling(ref cause) => cause, StartStreamEncryptionError::LimitExceeded(ref cause) => cause, StartStreamEncryptionError::ResourceInUse(ref cause) => cause, StartStreamEncryptionError::ResourceNotFound(ref cause) => cause, StartStreamEncryptionError::Validation(ref cause) => cause, StartStreamEncryptionError::Credentials(ref err) => err.description(), StartStreamEncryptionError::HttpDispatch(ref dispatch_error) => { dispatch_error.description() } StartStreamEncryptionError::Unknown(ref cause) => cause, } } } /// Errors returned by StopStreamEncryption #[derive(Debug, PartialEq)] pub enum StopStreamEncryptionError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The resource is not available for this operation. For successful operation, the resource must be in the <code>ACTIVE</code> state.</p> ResourceInUse(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl StopStreamEncryptionError { pub fn from_body(body: &str) -> StopStreamEncryptionError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { StopStreamEncryptionError::InvalidArgument(String::from(error_message)) } "LimitExceededException" => { StopStreamEncryptionError::LimitExceeded(String::from(error_message)) } "ResourceInUseException" => { StopStreamEncryptionError::ResourceInUse(String::from(error_message)) } "ResourceNotFoundException" => { StopStreamEncryptionError::ResourceNotFound(String::from(error_message)) } "ValidationException" => { StopStreamEncryptionError::Validation(error_message.to_string()) } _ => StopStreamEncryptionError::Unknown(String::from(body)), } } Err(_) => StopStreamEncryptionError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for StopStreamEncryptionError { fn from(err: serde_json::error::Error) -> StopStreamEncryptionError { StopStreamEncryptionError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for StopStreamEncryptionError { fn from(err: CredentialsError) -> StopStreamEncryptionError { StopStreamEncryptionError::Credentials(err) } } impl From<HttpDispatchError> for StopStreamEncryptionError { fn from(err: HttpDispatchError) -> StopStreamEncryptionError { StopStreamEncryptionError::HttpDispatch(err) } } impl From<io::Error> for StopStreamEncryptionError { fn from(err: io::Error) -> StopStreamEncryptionError { StopStreamEncryptionError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for StopStreamEncryptionError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for StopStreamEncryptionError { fn description(&self) -> &str { match *self { StopStreamEncryptionError::InvalidArgument(ref cause) => cause, StopStreamEncryptionError::LimitExceeded(ref cause) => cause, StopStreamEncryptionError::ResourceInUse(ref cause) => cause, StopStreamEncryptionError::ResourceNotFound(ref cause) => cause, StopStreamEncryptionError::Validation(ref cause) => cause, StopStreamEncryptionError::Credentials(ref err) => err.description(), StopStreamEncryptionError::HttpDispatch(ref dispatch_error) => { dispatch_error.description() } StopStreamEncryptionError::Unknown(ref cause) => cause, } } } /// Errors returned by UpdateShardCount #[derive(Debug, PartialEq)] pub enum UpdateShardCountError { /// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used. For more information, see the returned message.</p> InvalidArgument(String), /// <p>The requested resource exceeds the maximum number allowed, or the number of concurrent stream requests exceeds the maximum number allowed. </p> LimitExceeded(String), /// <p>The resource is not available for this operation. For successful operation, the resource must be in the <code>ACTIVE</code> state.</p> ResourceInUse(String), /// <p>The requested resource could not be found. The stream might not be specified correctly.</p> ResourceNotFound(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(String), } impl UpdateShardCountError { pub fn from_body(body: &str) -> UpdateShardCountError { match from_str::<SerdeJsonValue>(body) { Ok(json) => { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(body); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "InvalidArgumentException" => { UpdateShardCountError::InvalidArgument(String::from(error_message)) } "LimitExceededException" => { UpdateShardCountError::LimitExceeded(String::from(error_message)) } "ResourceInUseException" => { UpdateShardCountError::ResourceInUse(String::from(error_message)) } "ResourceNotFoundException" => { UpdateShardCountError::ResourceNotFound(String::from(error_message)) } "ValidationException" => { UpdateShardCountError::Validation(error_message.to_string()) } _ => UpdateShardCountError::Unknown(String::from(body)), } } Err(_) => UpdateShardCountError::Unknown(String::from(body)), } } } impl From<serde_json::error::Error> for UpdateShardCountError { fn from(err: serde_json::error::Error) -> UpdateShardCountError { UpdateShardCountError::Unknown(err.description().to_string()) } } impl From<CredentialsError> for UpdateShardCountError { fn from(err: CredentialsError) -> UpdateShardCountError { UpdateShardCountError::Credentials(err) } } impl From<HttpDispatchError> for UpdateShardCountError { fn from(err: HttpDispatchError) -> UpdateShardCountError { UpdateShardCountError::HttpDispatch(err) } } impl From<io::Error> for UpdateShardCountError { fn from(err: io::Error) -> UpdateShardCountError { UpdateShardCountError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for UpdateShardCountError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for UpdateShardCountError { fn description(&self) -> &str { match *self { UpdateShardCountError::InvalidArgument(ref cause) => cause, UpdateShardCountError::LimitExceeded(ref cause) => cause, UpdateShardCountError::ResourceInUse(ref cause) => cause, UpdateShardCountError::ResourceNotFound(ref cause) => cause, UpdateShardCountError::Validation(ref cause) => cause, UpdateShardCountError::Credentials(ref err) => err.description(), UpdateShardCountError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), UpdateShardCountError::Unknown(ref cause) => cause, } } } /// Trait representing the capabilities of the Kinesis API. Kinesis clients implement this trait. pub trait Kinesis { /// <p>Adds or updates tags for the specified Kinesis data stream. Each stream can have up to 10 tags.</p> <p>If tags have already been assigned to the stream, <code>AddTagsToStream</code> overwrites any existing tags that correspond to the specified tag keys.</p> <p> <a>AddTagsToStream</a> has a limit of five transactions per second per account.</p> fn add_tags_to_stream( &self, input: AddTagsToStreamInput, ) -> RusotoFuture<(), AddTagsToStreamError>; /// <p>Creates a Kinesis data stream. A stream captures and transports data records that are continuously emitted from different data sources or <i>producers</i>. Scale-out within a stream is explicitly supported by means of shards, which are uniquely identified groups of data records in a stream.</p> <p>You specify and control the number of shards that a stream is composed of. Each shard can support reads up to five transactions per second, up to a maximum data read total of 2 MB per second. Each shard can support writes up to 1,000 records per second, up to a maximum data write total of 1 MB per second. If the amount of data input increases or decreases, you can add or remove shards.</p> <p>The stream name identifies the stream. The name is scoped to the AWS account used by the application. It is also scoped by AWS Region. That is, two streams in two different accounts can have the same name, and two streams in the same account, but in two different Regions, can have the same name.</p> <p> <code>CreateStream</code> is an asynchronous operation. Upon receiving a <code>CreateStream</code> request, Kinesis Data Streams immediately returns and sets the stream status to <code>CREATING</code>. After the stream is created, Kinesis Data Streams sets the stream status to <code>ACTIVE</code>. You should perform read and write operations only on an <code>ACTIVE</code> stream. </p> <p>You receive a <code>LimitExceededException</code> when making a <code>CreateStream</code> request when you try to do one of the following:</p> <ul> <li> <p>Have more than five streams in the <code>CREATING</code> state at any point in time.</p> </li> <li> <p>Create more shards than are authorized for your account.</p> </li> </ul> <p>For the default shard limit for an AWS account, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Amazon Kinesis Data Streams Limits</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>. To increase this limit, <a href="http://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html">contact AWS Support</a>.</p> <p>You can use <code>DescribeStream</code> to check the stream status, which is returned in <code>StreamStatus</code>.</p> <p> <a>CreateStream</a> has a limit of five transactions per second per account.</p> fn create_stream(&self, input: CreateStreamInput) -> RusotoFuture<(), CreateStreamError>; /// <p>Decreases the Kinesis data stream's retention period, which is the length of time data records are accessible after they are added to the stream. The minimum value of a stream's retention period is 24 hours.</p> <p>This operation may result in lost data. For example, if the stream's retention period is 48 hours and is decreased to 24 hours, any data already in the stream that is older than 24 hours is inaccessible.</p> fn decrease_stream_retention_period( &self, input: DecreaseStreamRetentionPeriodInput, ) -> RusotoFuture<(), DecreaseStreamRetentionPeriodError>; /// <p>Deletes a Kinesis data stream and all its shards and data. You must shut down any applications that are operating on the stream before you delete the stream. If an application attempts to operate on a deleted stream, it receives the exception <code>ResourceNotFoundException</code>.</p> <p>If the stream is in the <code>ACTIVE</code> state, you can delete it. After a <code>DeleteStream</code> request, the specified stream is in the <code>DELETING</code> state until Kinesis Data Streams completes the deletion.</p> <p> <b>Note:</b> Kinesis Data Streams might continue to accept data read and write operations, such as <a>PutRecord</a>, <a>PutRecords</a>, and <a>GetRecords</a>, on a stream in the <code>DELETING</code> state until the stream deletion is complete.</p> <p>When you delete a stream, any shards in that stream are also deleted, and any tags are dissociated from the stream.</p> <p>You can use the <a>DescribeStream</a> operation to check the state of the stream, which is returned in <code>StreamStatus</code>.</p> <p> <a>DeleteStream</a> has a limit of five transactions per second per account.</p> fn delete_stream(&self, input: DeleteStreamInput) -> RusotoFuture<(), DeleteStreamError>; /// <p>Describes the shard limits and usage for the account.</p> <p>If you update your account limits, the old limits might be returned for a few minutes.</p> <p>This operation has a limit of one transaction per second per account.</p> fn describe_limits(&self) -> RusotoFuture<DescribeLimitsOutput, DescribeLimitsError>; /// <p>Describes the specified Kinesis data stream.</p> <p>The information returned includes the stream name, Amazon Resource Name (ARN), creation time, enhanced metric configuration, and shard map. The shard map is an array of shard objects. For each shard object, there is the hash key and sequence number ranges that the shard spans, and the IDs of any earlier shards that played in a role in creating the shard. Every record ingested in the stream is identified by a sequence number, which is assigned when the record is put into the stream.</p> <p>You can limit the number of shards returned by each call. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/kinesis-using-sdk-java-retrieve-shards.html">Retrieving Shards from a Stream</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>There are no guarantees about the chronological order shards returned. To process shards in chronological order, use the ID of the parent shard to track the lineage to the oldest shard.</p> <p>This operation has a limit of 10 transactions per second per account.</p> fn describe_stream( &self, input: DescribeStreamInput, ) -> RusotoFuture<DescribeStreamOutput, DescribeStreamError>; /// <p>Provides a summarized description of the specified Kinesis data stream without the shard list.</p> <p>The information returned includes the stream name, Amazon Resource Name (ARN), status, record retention period, approximate creation time, monitoring, encryption details, and open shard count. </p> fn describe_stream_summary( &self, input: DescribeStreamSummaryInput, ) -> RusotoFuture<DescribeStreamSummaryOutput, DescribeStreamSummaryError>; /// <p>Disables enhanced monitoring.</p> fn disable_enhanced_monitoring( &self, input: DisableEnhancedMonitoringInput, ) -> RusotoFuture<EnhancedMonitoringOutput, DisableEnhancedMonitoringError>; /// <p>Enables enhanced Kinesis data stream monitoring for shard-level metrics.</p> fn enable_enhanced_monitoring( &self, input: EnableEnhancedMonitoringInput, ) -> RusotoFuture<EnhancedMonitoringOutput, EnableEnhancedMonitoringError>; /// <p>Gets data records from a Kinesis data stream's shard.</p> <p>Specify a shard iterator using the <code>ShardIterator</code> parameter. The shard iterator specifies the position in the shard from which you want to start reading data records sequentially. If there are no records available in the portion of the shard that the iterator points to, <a>GetRecords</a> returns an empty list. It might take multiple calls to get to a portion of the shard that contains records.</p> <p>You can scale by provisioning multiple shards per stream while considering service limits (for more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Amazon Kinesis Data Streams Limits</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>). Your application should have one thread per shard, each reading continuously from its stream. To read from a stream continually, call <a>GetRecords</a> in a loop. Use <a>GetShardIterator</a> to get the shard iterator to specify in the first <a>GetRecords</a> call. <a>GetRecords</a> returns a new shard iterator in <code>NextShardIterator</code>. Specify the shard iterator returned in <code>NextShardIterator</code> in subsequent calls to <a>GetRecords</a>. If the shard has been closed, the shard iterator can't return more data and <a>GetRecords</a> returns <code>null</code> in <code>NextShardIterator</code>. You can terminate the loop when the shard is closed, or when the shard iterator reaches the record with the sequence number or other attribute that marks it as the last record to process.</p> <p>Each data record can be up to 1 MB in size, and each shard can read up to 2 MB per second. You can ensure that your calls don't exceed the maximum supported size or throughput by using the <code>Limit</code> parameter to specify the maximum number of records that <a>GetRecords</a> can return. Consider your average record size when determining this limit.</p> <p>The size of the data returned by <a>GetRecords</a> varies depending on the utilization of the shard. The maximum size of data that <a>GetRecords</a> can return is 10 MB. If a call returns this amount of data, subsequent calls made within the next five seconds throw <code>ProvisionedThroughputExceededException</code>. If there is insufficient provisioned throughput on the stream, subsequent calls made within the next one second throw <code>ProvisionedThroughputExceededException</code>. <a>GetRecords</a> won't return any data when it throws an exception. For this reason, we recommend that you wait one second between calls to <a>GetRecords</a>; however, it's possible that the application will get exceptions for longer than 1 second.</p> <p>To detect whether the application is falling behind in processing, you can use the <code>MillisBehindLatest</code> response attribute. You can also monitor the stream using CloudWatch metrics and other mechanisms (see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/monitoring.html">Monitoring</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>).</p> <p>Each Amazon Kinesis record includes a value, <code>ApproximateArrivalTimestamp</code>, that is set when a stream successfully receives and stores a record. This is commonly referred to as a server-side time stamp, whereas a client-side time stamp is set when a data producer creates or sends the record to a stream (a data producer is any data source putting data records into a stream, for example with <a>PutRecords</a>). The time stamp has millisecond precision. There are no guarantees about the time stamp accuracy, or that the time stamp is always increasing. For example, records in a shard or across a stream might have time stamps that are out of order.</p> fn get_records( &self, input: GetRecordsInput, ) -> RusotoFuture<GetRecordsOutput, GetRecordsError>; /// <p>Gets an Amazon Kinesis shard iterator. A shard iterator expires five minutes after it is returned to the requester.</p> <p>A shard iterator specifies the shard position from which to start reading data records sequentially. The position is specified using the sequence number of a data record in a shard. A sequence number is the identifier associated with every record ingested in the stream, and is assigned when a record is put into the stream. Each stream has one or more shards.</p> <p>You must specify the shard iterator type. For example, you can set the <code>ShardIteratorType</code> parameter to read exactly from the position denoted by a specific sequence number by using the <code>AT_SEQUENCE_NUMBER</code> shard iterator type. Alternatively, the parameter can read right after the sequence number by using the <code>AFTER_SEQUENCE_NUMBER</code> shard iterator type, using sequence numbers returned by earlier calls to <a>PutRecord</a>, <a>PutRecords</a>, <a>GetRecords</a>, or <a>DescribeStream</a>. In the request, you can specify the shard iterator type <code>AT_TIMESTAMP</code> to read records from an arbitrary point in time, <code>TRIM_HORIZON</code> to cause <code>ShardIterator</code> to point to the last untrimmed record in the shard in the system (the oldest data record in the shard), or <code>LATEST</code> so that you always read the most recent data in the shard. </p> <p>When you read repeatedly from a stream, use a <a>GetShardIterator</a> request to get the first shard iterator for use in your first <a>GetRecords</a> request and for subsequent reads use the shard iterator returned by the <a>GetRecords</a> request in <code>NextShardIterator</code>. A new shard iterator is returned by every <a>GetRecords</a> request in <code>NextShardIterator</code>, which you use in the <code>ShardIterator</code> parameter of the next <a>GetRecords</a> request. </p> <p>If a <a>GetShardIterator</a> request is made too often, you receive a <code>ProvisionedThroughputExceededException</code>. For more information about throughput limits, see <a>GetRecords</a>, and <a href="http://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Streams Limits</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>If the shard is closed, <a>GetShardIterator</a> returns a valid iterator for the last sequence number of the shard. A shard can be closed as a result of using <a>SplitShard</a> or <a>MergeShards</a>.</p> <p> <a>GetShardIterator</a> has a limit of five transactions per second per account per open shard.</p> fn get_shard_iterator( &self, input: GetShardIteratorInput, ) -> RusotoFuture<GetShardIteratorOutput, GetShardIteratorError>; /// <p>Increases the Kinesis data stream's retention period, which is the length of time data records are accessible after they are added to the stream. The maximum value of a stream's retention period is 168 hours (7 days).</p> <p>If you choose a longer stream retention period, this operation increases the time period during which records that have not yet expired are accessible. However, it does not make previous, expired data (older than the stream's previous retention period) accessible after the operation has been called. For example, if a stream's retention period is set to 24 hours and is increased to 168 hours, any data that is older than 24 hours remains inaccessible to consumer applications.</p> fn increase_stream_retention_period( &self, input: IncreaseStreamRetentionPeriodInput, ) -> RusotoFuture<(), IncreaseStreamRetentionPeriodError>; /// <p><p>Lists the shards in a stream and provides information about each shard.</p> <important> <p>This API is a new operation that is used by the Amazon Kinesis Client Library (KCL). If you have a fine-grained IAM policy that only allows specific operations, you must update your policy to allow calls to this API. For more information, see <a href="https://docs.aws.amazon.com/streams/latest/dev/controlling-access.html">Controlling Access to Amazon Kinesis Data Streams Resources Using IAM</a>.</p> </important></p> fn list_shards( &self, input: ListShardsInput, ) -> RusotoFuture<ListShardsOutput, ListShardsError>; /// <p>Lists your Kinesis data streams.</p> <p>The number of streams may be too large to return from a single call to <code>ListStreams</code>. You can limit the number of returned streams using the <code>Limit</code> parameter. If you do not specify a value for the <code>Limit</code> parameter, Kinesis Data Streams uses the default limit, which is currently 10.</p> <p>You can detect if there are more streams available to list by using the <code>HasMoreStreams</code> flag from the returned output. If there are more streams available, you can request more streams by using the name of the last stream returned by the <code>ListStreams</code> request in the <code>ExclusiveStartStreamName</code> parameter in a subsequent request to <code>ListStreams</code>. The group of stream names returned by the subsequent request is then added to the list. You can continue this process until all the stream names have been collected in the list. </p> <p> <a>ListStreams</a> has a limit of five transactions per second per account.</p> fn list_streams( &self, input: ListStreamsInput, ) -> RusotoFuture<ListStreamsOutput, ListStreamsError>; /// <p>Lists the tags for the specified Kinesis data stream. This operation has a limit of five transactions per second per account.</p> fn list_tags_for_stream( &self, input: ListTagsForStreamInput, ) -> RusotoFuture<ListTagsForStreamOutput, ListTagsForStreamError>; /// <p>Merges two adjacent shards in a Kinesis data stream and combines them into a single shard to reduce the stream's capacity to ingest and transport data. Two shards are considered adjacent if the union of the hash key ranges for the two shards form a contiguous set with no gaps. For example, if you have two shards, one with a hash key range of 276...381 and the other with a hash key range of 382...454, then you could merge these two shards into a single shard that would have a hash key range of 276...454. After the merge, the single child shard receives data for all hash key values covered by the two parent shards.</p> <p> <code>MergeShards</code> is called when there is a need to reduce the overall capacity of a stream because of excess capacity that is not being used. You must specify the shard to be merged and the adjacent shard for a stream. For more information about merging shards, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/kinesis-using-sdk-java-resharding-merge.html">Merge Two Shards</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>If the stream is in the <code>ACTIVE</code> state, you can call <code>MergeShards</code>. If a stream is in the <code>CREATING</code>, <code>UPDATING</code>, or <code>DELETING</code> state, <code>MergeShards</code> returns a <code>ResourceInUseException</code>. If the specified stream does not exist, <code>MergeShards</code> returns a <code>ResourceNotFoundException</code>. </p> <p>You can use <a>DescribeStream</a> to check the state of the stream, which is returned in <code>StreamStatus</code>.</p> <p> <code>MergeShards</code> is an asynchronous operation. Upon receiving a <code>MergeShards</code> request, Amazon Kinesis Data Streams immediately returns a response and sets the <code>StreamStatus</code> to <code>UPDATING</code>. After the operation is completed, Kinesis Data Streams sets the <code>StreamStatus</code> to <code>ACTIVE</code>. Read and write operations continue to work while the stream is in the <code>UPDATING</code> state. </p> <p>You use <a>DescribeStream</a> to determine the shard IDs that are specified in the <code>MergeShards</code> request. </p> <p>If you try to operate on too many streams in parallel using <a>CreateStream</a>, <a>DeleteStream</a>, <code>MergeShards</code>, or <a>SplitShard</a>, you receive a <code>LimitExceededException</code>. </p> <p> <code>MergeShards</code> has a limit of five transactions per second per account.</p> fn merge_shards(&self, input: MergeShardsInput) -> RusotoFuture<(), MergeShardsError>; /// <p>Writes a single data record into an Amazon Kinesis data stream. Call <code>PutRecord</code> to send data into the stream for real-time ingestion and subsequent processing, one record at a time. Each shard can support writes up to 1,000 records per second, up to a maximum data write total of 1 MB per second.</p> <p>You must specify the name of the stream that captures, stores, and transports the data; a partition key; and the data blob itself.</p> <p>The data blob can be any type of data; for example, a segment from a log file, geographic/location data, website clickstream data, and so on.</p> <p>The partition key is used by Kinesis Data Streams to distribute data across shards. Kinesis Data Streams segregates the data records that belong to a stream into multiple shards, using the partition key associated with each data record to determine the shard to which a given data record belongs.</p> <p>Partition keys are Unicode strings, with a maximum length limit of 256 characters for each key. An MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards using the hash key ranges of the shards. You can override hashing the partition key to determine the shard by explicitly specifying a hash value using the <code>ExplicitHashKey</code> parameter. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/developing-producers-with-sdk.html#kinesis-using-sdk-java-add-data-to-stream">Adding Data to a Stream</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p> <code>PutRecord</code> returns the shard ID of where the data record was placed and the sequence number that was assigned to the data record.</p> <p>Sequence numbers increase over time and are specific to a shard within a stream, not across all shards within a stream. To guarantee strictly increasing ordering, write serially to a shard and use the <code>SequenceNumberForOrdering</code> parameter. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/developing-producers-with-sdk.html#kinesis-using-sdk-java-add-data-to-stream">Adding Data to a Stream</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>If a <code>PutRecord</code> request cannot be processed because of insufficient provisioned throughput on the shard involved in the request, <code>PutRecord</code> throws <code>ProvisionedThroughputExceededException</code>. </p> <p>By default, data records are accessible for 24 hours from the time that they are added to a stream. You can use <a>IncreaseStreamRetentionPeriod</a> or <a>DecreaseStreamRetentionPeriod</a> to modify this retention period.</p> fn put_record(&self, input: PutRecordInput) -> RusotoFuture<PutRecordOutput, PutRecordError>; /// <p>Writes multiple data records into a Kinesis data stream in a single call (also referred to as a <code>PutRecords</code> request). Use this operation to send data into the stream for data ingestion and processing. </p> <p>Each <code>PutRecords</code> request can support up to 500 records. Each record in the request can be as large as 1 MB, up to a limit of 5 MB for the entire request, including partition keys. Each shard can support writes up to 1,000 records per second, up to a maximum data write total of 1 MB per second.</p> <p>You must specify the name of the stream that captures, stores, and transports the data; and an array of request <code>Records</code>, with each record in the array requiring a partition key and data blob. The record size limit applies to the total size of the partition key and data blob.</p> <p>The data blob can be any type of data; for example, a segment from a log file, geographic/location data, website clickstream data, and so on.</p> <p>The partition key is used by Kinesis Data Streams as input to a hash function that maps the partition key and associated data to a specific shard. An MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/developing-producers-with-sdk.html#kinesis-using-sdk-java-add-data-to-stream">Adding Data to a Stream</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>Each record in the <code>Records</code> array may include an optional parameter, <code>ExplicitHashKey</code>, which overrides the partition key to shard mapping. This parameter allows a data producer to determine explicitly the shard where the record is stored. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/developing-producers-with-sdk.html#kinesis-using-sdk-java-putrecords">Adding Multiple Records with PutRecords</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>The <code>PutRecords</code> response includes an array of response <code>Records</code>. Each record in the response array directly correlates with a record in the request array using natural ordering, from the top to the bottom of the request and response. The response <code>Records</code> array always includes the same number of records as the request array.</p> <p>The response <code>Records</code> array includes both successfully and unsuccessfully processed records. Kinesis Data Streams attempts to process all records in each <code>PutRecords</code> request. A single record failure does not stop the processing of subsequent records.</p> <p>A successfully processed record includes <code>ShardId</code> and <code>SequenceNumber</code> values. The <code>ShardId</code> parameter identifies the shard in the stream where the record is stored. The <code>SequenceNumber</code> parameter is an identifier assigned to the put record, unique to all records in the stream.</p> <p>An unsuccessfully processed record includes <code>ErrorCode</code> and <code>ErrorMessage</code> values. <code>ErrorCode</code> reflects the type of error and can be one of the following values: <code>ProvisionedThroughputExceededException</code> or <code>InternalFailure</code>. <code>ErrorMessage</code> provides more detailed information about the <code>ProvisionedThroughputExceededException</code> exception including the account ID, stream name, and shard ID of the record that was throttled. For more information about partially successful responses, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/kinesis-using-sdk-java-add-data-to-stream.html#kinesis-using-sdk-java-putrecords">Adding Multiple Records with PutRecords</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>By default, data records are accessible for 24 hours from the time that they are added to a stream. You can use <a>IncreaseStreamRetentionPeriod</a> or <a>DecreaseStreamRetentionPeriod</a> to modify this retention period.</p> fn put_records( &self, input: PutRecordsInput, ) -> RusotoFuture<PutRecordsOutput, PutRecordsError>; /// <p>Removes tags from the specified Kinesis data stream. Removed tags are deleted and cannot be recovered after this operation successfully completes.</p> <p>If you specify a tag that does not exist, it is ignored.</p> <p> <a>RemoveTagsFromStream</a> has a limit of five transactions per second per account.</p> fn remove_tags_from_stream( &self, input: RemoveTagsFromStreamInput, ) -> RusotoFuture<(), RemoveTagsFromStreamError>; /// <p>Splits a shard into two new shards in the Kinesis data stream, to increase the stream's capacity to ingest and transport data. <code>SplitShard</code> is called when there is a need to increase the overall capacity of a stream because of an expected increase in the volume of data records being ingested. </p> <p>You can also use <code>SplitShard</code> when a shard appears to be approaching its maximum utilization; for example, the producers sending data into the specific shard are suddenly sending more than previously anticipated. You can also call <code>SplitShard</code> to increase stream capacity, so that more Kinesis Data Streams applications can simultaneously read data from the stream for real-time processing. </p> <p>You must specify the shard to be split and the new hash key, which is the position in the shard where the shard gets split in two. In many cases, the new hash key might be the average of the beginning and ending hash key, but it can be any hash key value in the range being mapped into the shard. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/kinesis-using-sdk-java-resharding-split.html">Split a Shard</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>You can use <a>DescribeStream</a> to determine the shard ID and hash key values for the <code>ShardToSplit</code> and <code>NewStartingHashKey</code> parameters that are specified in the <code>SplitShard</code> request.</p> <p> <code>SplitShard</code> is an asynchronous operation. Upon receiving a <code>SplitShard</code> request, Kinesis Data Streams immediately returns a response and sets the stream status to <code>UPDATING</code>. After the operation is completed, Kinesis Data Streams sets the stream status to <code>ACTIVE</code>. Read and write operations continue to work while the stream is in the <code>UPDATING</code> state. </p> <p>You can use <code>DescribeStream</code> to check the status of the stream, which is returned in <code>StreamStatus</code>. If the stream is in the <code>ACTIVE</code> state, you can call <code>SplitShard</code>. If a stream is in <code>CREATING</code> or <code>UPDATING</code> or <code>DELETING</code> states, <code>DescribeStream</code> returns a <code>ResourceInUseException</code>.</p> <p>If the specified stream does not exist, <code>DescribeStream</code> returns a <code>ResourceNotFoundException</code>. If you try to create more shards than are authorized for your account, you receive a <code>LimitExceededException</code>. </p> <p>For the default shard limit for an AWS account, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Streams Limits</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>. To increase this limit, <a href="http://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html">contact AWS Support</a>.</p> <p>If you try to operate on too many streams simultaneously using <a>CreateStream</a>, <a>DeleteStream</a>, <a>MergeShards</a>, and/or <a>SplitShard</a>, you receive a <code>LimitExceededException</code>. </p> <p> <code>SplitShard</code> has a limit of five transactions per second per account.</p> fn split_shard(&self, input: SplitShardInput) -> RusotoFuture<(), SplitShardError>; /// <p>Enables or updates server-side encryption using an AWS KMS key for a specified stream. </p> <p>Starting encryption is an asynchronous operation. Upon receiving the request, Kinesis Data Streams returns immediately and sets the status of the stream to <code>UPDATING</code>. After the update is complete, Kinesis Data Streams sets the status of the stream back to <code>ACTIVE</code>. Updating or applying encryption normally takes a few seconds to complete, but it can take minutes. You can continue to read and write data to your stream while its status is <code>UPDATING</code>. Once the status of the stream is <code>ACTIVE</code>, encryption begins for records written to the stream. </p> <p>API Limits: You can successfully apply a new AWS KMS key for server-side encryption 25 times in a rolling 24-hour period.</p> <p>Note: It can take up to five seconds after the stream is in an <code>ACTIVE</code> status before all records written to the stream are encrypted. After you enable encryption, you can verify that encryption is applied by inspecting the API response from <code>PutRecord</code> or <code>PutRecords</code>.</p> fn start_stream_encryption( &self, input: StartStreamEncryptionInput, ) -> RusotoFuture<(), StartStreamEncryptionError>; /// <p>Disables server-side encryption for a specified stream. </p> <p>Stopping encryption is an asynchronous operation. Upon receiving the request, Kinesis Data Streams returns immediately and sets the status of the stream to <code>UPDATING</code>. After the update is complete, Kinesis Data Streams sets the status of the stream back to <code>ACTIVE</code>. Stopping encryption normally takes a few seconds to complete, but it can take minutes. You can continue to read and write data to your stream while its status is <code>UPDATING</code>. Once the status of the stream is <code>ACTIVE</code>, records written to the stream are no longer encrypted by Kinesis Data Streams. </p> <p>API Limits: You can successfully disable server-side encryption 25 times in a rolling 24-hour period. </p> <p>Note: It can take up to five seconds after the stream is in an <code>ACTIVE</code> status before all records written to the stream are no longer subject to encryption. After you disabled encryption, you can verify that encryption is not applied by inspecting the API response from <code>PutRecord</code> or <code>PutRecords</code>.</p> fn stop_stream_encryption( &self, input: StopStreamEncryptionInput, ) -> RusotoFuture<(), StopStreamEncryptionError>; /// <p>Updates the shard count of the specified stream to the specified number of shards.</p> <p>Updating the shard count is an asynchronous operation. Upon receiving the request, Kinesis Data Streams returns immediately and sets the status of the stream to <code>UPDATING</code>. After the update is complete, Kinesis Data Streams sets the status of the stream back to <code>ACTIVE</code>. Depending on the size of the stream, the scaling action could take a few minutes to complete. You can continue to read and write data to your stream while its status is <code>UPDATING</code>.</p> <p>To update the shard count, Kinesis Data Streams performs splits or merges on individual shards. This can cause short-lived shards to be created, in addition to the final shards. We recommend that you double or halve the shard count, as this results in the fewest number of splits or merges.</p> <p>This operation has the following limits. You cannot do the following:</p> <ul> <li> <p>Scale more than twice per rolling 24-hour period per stream</p> </li> <li> <p>Scale up to more than double your current shard count for a stream</p> </li> <li> <p>Scale down below half your current shard count for a stream</p> </li> <li> <p>Scale up to more than 500 shards in a stream</p> </li> <li> <p>Scale a stream with more than 500 shards down unless the result is less than 500 shards</p> </li> <li> <p>Scale up to more than the shard limit for your account</p> </li> </ul> <p>For the default limits for an AWS account, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Streams Limits</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>. To request an increase in the call rate limit, the shard limit for this API, or your overall shard limit, use the <a href="https://console.aws.amazon.com/support/v1#/case/create?issueType=service-limit-increase&amp;limitType=service-code-kinesis">limits form</a>.</p> fn update_shard_count( &self, input: UpdateShardCountInput, ) -> RusotoFuture<UpdateShardCountOutput, UpdateShardCountError>; } /// A client for the Kinesis API. pub struct KinesisClient<P = CredentialsProvider, D = RequestDispatcher> where P: ProvideAwsCredentials, D: DispatchSignedRequest, { inner: ClientInner<P, D>, region: region::Region, } impl KinesisClient { /// Creates a simple client backed by an implicit event loop. /// /// The client will use the default credentials provider and tls client. /// /// See the `rusoto_core::reactor` module for more details. pub fn simple(region: region::Region) -> KinesisClient { KinesisClient::new( RequestDispatcher::default(), CredentialsProvider::default(), region, ) } } impl<P, D> KinesisClient<P, D> where P: ProvideAwsCredentials, D: DispatchSignedRequest, { pub fn new(request_dispatcher: D, credentials_provider: P, region: region::Region) -> Self { KinesisClient { inner: ClientInner::new(credentials_provider, request_dispatcher), region: region, } } } impl<P, D> Kinesis for KinesisClient<P, D> where P: ProvideAwsCredentials + 'static, D: DispatchSignedRequest + 'static, { /// <p>Adds or updates tags for the specified Kinesis data stream. Each stream can have up to 10 tags.</p> <p>If tags have already been assigned to the stream, <code>AddTagsToStream</code> overwrites any existing tags that correspond to the specified tag keys.</p> <p> <a>AddTagsToStream</a> has a limit of five transactions per second per account.</p> fn add_tags_to_stream( &self, input: AddTagsToStreamInput, ) -> RusotoFuture<(), AddTagsToStreamError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.AddTagsToStream"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(future::ok(::std::mem::drop(response))) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(AddTagsToStreamError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Creates a Kinesis data stream. A stream captures and transports data records that are continuously emitted from different data sources or <i>producers</i>. Scale-out within a stream is explicitly supported by means of shards, which are uniquely identified groups of data records in a stream.</p> <p>You specify and control the number of shards that a stream is composed of. Each shard can support reads up to five transactions per second, up to a maximum data read total of 2 MB per second. Each shard can support writes up to 1,000 records per second, up to a maximum data write total of 1 MB per second. If the amount of data input increases or decreases, you can add or remove shards.</p> <p>The stream name identifies the stream. The name is scoped to the AWS account used by the application. It is also scoped by AWS Region. That is, two streams in two different accounts can have the same name, and two streams in the same account, but in two different Regions, can have the same name.</p> <p> <code>CreateStream</code> is an asynchronous operation. Upon receiving a <code>CreateStream</code> request, Kinesis Data Streams immediately returns and sets the stream status to <code>CREATING</code>. After the stream is created, Kinesis Data Streams sets the stream status to <code>ACTIVE</code>. You should perform read and write operations only on an <code>ACTIVE</code> stream. </p> <p>You receive a <code>LimitExceededException</code> when making a <code>CreateStream</code> request when you try to do one of the following:</p> <ul> <li> <p>Have more than five streams in the <code>CREATING</code> state at any point in time.</p> </li> <li> <p>Create more shards than are authorized for your account.</p> </li> </ul> <p>For the default shard limit for an AWS account, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Amazon Kinesis Data Streams Limits</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>. To increase this limit, <a href="http://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html">contact AWS Support</a>.</p> <p>You can use <code>DescribeStream</code> to check the stream status, which is returned in <code>StreamStatus</code>.</p> <p> <a>CreateStream</a> has a limit of five transactions per second per account.</p> fn create_stream(&self, input: CreateStreamInput) -> RusotoFuture<(), CreateStreamError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.CreateStream"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(future::ok(::std::mem::drop(response))) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(CreateStreamError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) }
}); RusotoFuture::new(future) } /// <p>Decreases the Kinesis data stream's retention period, which is the length of time data records are accessible after they are added to the stream. The minimum value of a stream's retention period is 24 hours.</p> <p>This operation may result in lost data. For example, if the stream's retention period is 48 hours and is decreased to 24 hours, any data already in the stream that is older than 24 hours is inaccessible.</p> fn decrease_stream_retention_period( &self, input: DecreaseStreamRetentionPeriodInput, ) -> RusotoFuture<(), DecreaseStreamRetentionPeriodError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header( "x-amz-target", "Kinesis_20131202.DecreaseStreamRetentionPeriod", ); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(future::ok(::std::mem::drop(response))) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(DecreaseStreamRetentionPeriodError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Deletes a Kinesis data stream and all its shards and data. You must shut down any applications that are operating on the stream before you delete the stream. If an application attempts to operate on a deleted stream, it receives the exception <code>ResourceNotFoundException</code>.</p> <p>If the stream is in the <code>ACTIVE</code> state, you can delete it. After a <code>DeleteStream</code> request, the specified stream is in the <code>DELETING</code> state until Kinesis Data Streams completes the deletion.</p> <p> <b>Note:</b> Kinesis Data Streams might continue to accept data read and write operations, such as <a>PutRecord</a>, <a>PutRecords</a>, and <a>GetRecords</a>, on a stream in the <code>DELETING</code> state until the stream deletion is complete.</p> <p>When you delete a stream, any shards in that stream are also deleted, and any tags are dissociated from the stream.</p> <p>You can use the <a>DescribeStream</a> operation to check the state of the stream, which is returned in <code>StreamStatus</code>.</p> <p> <a>DeleteStream</a> has a limit of five transactions per second per account.</p> fn delete_stream(&self, input: DeleteStreamInput) -> RusotoFuture<(), DeleteStreamError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.DeleteStream"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(future::ok(::std::mem::drop(response))) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(DeleteStreamError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Describes the shard limits and usage for the account.</p> <p>If you update your account limits, the old limits might be returned for a few minutes.</p> <p>This operation has a limit of one transaction per second per account.</p> fn describe_limits(&self) -> RusotoFuture<DescribeLimitsOutput, DescribeLimitsError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.DescribeLimits"); request.set_payload(Some(b"{}".to_vec())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<DescribeLimitsOutput>( String::from_utf8_lossy(body.as_ref()).as_ref(), ).unwrap() })) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(DescribeLimitsError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Describes the specified Kinesis data stream.</p> <p>The information returned includes the stream name, Amazon Resource Name (ARN), creation time, enhanced metric configuration, and shard map. The shard map is an array of shard objects. For each shard object, there is the hash key and sequence number ranges that the shard spans, and the IDs of any earlier shards that played in a role in creating the shard. Every record ingested in the stream is identified by a sequence number, which is assigned when the record is put into the stream.</p> <p>You can limit the number of shards returned by each call. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/kinesis-using-sdk-java-retrieve-shards.html">Retrieving Shards from a Stream</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>There are no guarantees about the chronological order shards returned. To process shards in chronological order, use the ID of the parent shard to track the lineage to the oldest shard.</p> <p>This operation has a limit of 10 transactions per second per account.</p> fn describe_stream( &self, input: DescribeStreamInput, ) -> RusotoFuture<DescribeStreamOutput, DescribeStreamError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.DescribeStream"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<DescribeStreamOutput>( String::from_utf8_lossy(body.as_ref()).as_ref(), ).unwrap() })) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(DescribeStreamError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Provides a summarized description of the specified Kinesis data stream without the shard list.</p> <p>The information returned includes the stream name, Amazon Resource Name (ARN), status, record retention period, approximate creation time, monitoring, encryption details, and open shard count. </p> fn describe_stream_summary( &self, input: DescribeStreamSummaryInput, ) -> RusotoFuture<DescribeStreamSummaryOutput, DescribeStreamSummaryError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.DescribeStreamSummary"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<DescribeStreamSummaryOutput>( String::from_utf8_lossy(body.as_ref()).as_ref(), ).unwrap() })) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(DescribeStreamSummaryError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Disables enhanced monitoring.</p> fn disable_enhanced_monitoring( &self, input: DisableEnhancedMonitoringInput, ) -> RusotoFuture<EnhancedMonitoringOutput, DisableEnhancedMonitoringError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.DisableEnhancedMonitoring"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<EnhancedMonitoringOutput>( String::from_utf8_lossy(body.as_ref()).as_ref(), ).unwrap() })) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(DisableEnhancedMonitoringError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Enables enhanced Kinesis data stream monitoring for shard-level metrics.</p> fn enable_enhanced_monitoring( &self, input: EnableEnhancedMonitoringInput, ) -> RusotoFuture<EnhancedMonitoringOutput, EnableEnhancedMonitoringError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.EnableEnhancedMonitoring"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<EnhancedMonitoringOutput>( String::from_utf8_lossy(body.as_ref()).as_ref(), ).unwrap() })) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(EnableEnhancedMonitoringError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Gets data records from a Kinesis data stream's shard.</p> <p>Specify a shard iterator using the <code>ShardIterator</code> parameter. The shard iterator specifies the position in the shard from which you want to start reading data records sequentially. If there are no records available in the portion of the shard that the iterator points to, <a>GetRecords</a> returns an empty list. It might take multiple calls to get to a portion of the shard that contains records.</p> <p>You can scale by provisioning multiple shards per stream while considering service limits (for more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Amazon Kinesis Data Streams Limits</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>). Your application should have one thread per shard, each reading continuously from its stream. To read from a stream continually, call <a>GetRecords</a> in a loop. Use <a>GetShardIterator</a> to get the shard iterator to specify in the first <a>GetRecords</a> call. <a>GetRecords</a> returns a new shard iterator in <code>NextShardIterator</code>. Specify the shard iterator returned in <code>NextShardIterator</code> in subsequent calls to <a>GetRecords</a>. If the shard has been closed, the shard iterator can't return more data and <a>GetRecords</a> returns <code>null</code> in <code>NextShardIterator</code>. You can terminate the loop when the shard is closed, or when the shard iterator reaches the record with the sequence number or other attribute that marks it as the last record to process.</p> <p>Each data record can be up to 1 MB in size, and each shard can read up to 2 MB per second. You can ensure that your calls don't exceed the maximum supported size or throughput by using the <code>Limit</code> parameter to specify the maximum number of records that <a>GetRecords</a> can return. Consider your average record size when determining this limit.</p> <p>The size of the data returned by <a>GetRecords</a> varies depending on the utilization of the shard. The maximum size of data that <a>GetRecords</a> can return is 10 MB. If a call returns this amount of data, subsequent calls made within the next five seconds throw <code>ProvisionedThroughputExceededException</code>. If there is insufficient provisioned throughput on the stream, subsequent calls made within the next one second throw <code>ProvisionedThroughputExceededException</code>. <a>GetRecords</a> won't return any data when it throws an exception. For this reason, we recommend that you wait one second between calls to <a>GetRecords</a>; however, it's possible that the application will get exceptions for longer than 1 second.</p> <p>To detect whether the application is falling behind in processing, you can use the <code>MillisBehindLatest</code> response attribute. You can also monitor the stream using CloudWatch metrics and other mechanisms (see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/monitoring.html">Monitoring</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>).</p> <p>Each Amazon Kinesis record includes a value, <code>ApproximateArrivalTimestamp</code>, that is set when a stream successfully receives and stores a record. This is commonly referred to as a server-side time stamp, whereas a client-side time stamp is set when a data producer creates or sends the record to a stream (a data producer is any data source putting data records into a stream, for example with <a>PutRecords</a>). The time stamp has millisecond precision. There are no guarantees about the time stamp accuracy, or that the time stamp is always increasing. For example, records in a shard or across a stream might have time stamps that are out of order.</p> fn get_records( &self, input: GetRecordsInput, ) -> RusotoFuture<GetRecordsOutput, GetRecordsError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.GetRecords"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<GetRecordsOutput>( String::from_utf8_lossy(body.as_ref()).as_ref(), ).unwrap() })) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(GetRecordsError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Gets an Amazon Kinesis shard iterator. A shard iterator expires five minutes after it is returned to the requester.</p> <p>A shard iterator specifies the shard position from which to start reading data records sequentially. The position is specified using the sequence number of a data record in a shard. A sequence number is the identifier associated with every record ingested in the stream, and is assigned when a record is put into the stream. Each stream has one or more shards.</p> <p>You must specify the shard iterator type. For example, you can set the <code>ShardIteratorType</code> parameter to read exactly from the position denoted by a specific sequence number by using the <code>AT_SEQUENCE_NUMBER</code> shard iterator type. Alternatively, the parameter can read right after the sequence number by using the <code>AFTER_SEQUENCE_NUMBER</code> shard iterator type, using sequence numbers returned by earlier calls to <a>PutRecord</a>, <a>PutRecords</a>, <a>GetRecords</a>, or <a>DescribeStream</a>. In the request, you can specify the shard iterator type <code>AT_TIMESTAMP</code> to read records from an arbitrary point in time, <code>TRIM_HORIZON</code> to cause <code>ShardIterator</code> to point to the last untrimmed record in the shard in the system (the oldest data record in the shard), or <code>LATEST</code> so that you always read the most recent data in the shard. </p> <p>When you read repeatedly from a stream, use a <a>GetShardIterator</a> request to get the first shard iterator for use in your first <a>GetRecords</a> request and for subsequent reads use the shard iterator returned by the <a>GetRecords</a> request in <code>NextShardIterator</code>. A new shard iterator is returned by every <a>GetRecords</a> request in <code>NextShardIterator</code>, which you use in the <code>ShardIterator</code> parameter of the next <a>GetRecords</a> request. </p> <p>If a <a>GetShardIterator</a> request is made too often, you receive a <code>ProvisionedThroughputExceededException</code>. For more information about throughput limits, see <a>GetRecords</a>, and <a href="http://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Streams Limits</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>If the shard is closed, <a>GetShardIterator</a> returns a valid iterator for the last sequence number of the shard. A shard can be closed as a result of using <a>SplitShard</a> or <a>MergeShards</a>.</p> <p> <a>GetShardIterator</a> has a limit of five transactions per second per account per open shard.</p> fn get_shard_iterator( &self, input: GetShardIteratorInput, ) -> RusotoFuture<GetShardIteratorOutput, GetShardIteratorError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.GetShardIterator"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<GetShardIteratorOutput>( String::from_utf8_lossy(body.as_ref()).as_ref(), ).unwrap() })) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(GetShardIteratorError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Increases the Kinesis data stream's retention period, which is the length of time data records are accessible after they are added to the stream. The maximum value of a stream's retention period is 168 hours (7 days).</p> <p>If you choose a longer stream retention period, this operation increases the time period during which records that have not yet expired are accessible. However, it does not make previous, expired data (older than the stream's previous retention period) accessible after the operation has been called. For example, if a stream's retention period is set to 24 hours and is increased to 168 hours, any data that is older than 24 hours remains inaccessible to consumer applications.</p> fn increase_stream_retention_period( &self, input: IncreaseStreamRetentionPeriodInput, ) -> RusotoFuture<(), IncreaseStreamRetentionPeriodError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header( "x-amz-target", "Kinesis_20131202.IncreaseStreamRetentionPeriod", ); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(future::ok(::std::mem::drop(response))) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(IncreaseStreamRetentionPeriodError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p><p>Lists the shards in a stream and provides information about each shard.</p> <important> <p>This API is a new operation that is used by the Amazon Kinesis Client Library (KCL). If you have a fine-grained IAM policy that only allows specific operations, you must update your policy to allow calls to this API. For more information, see <a href="https://docs.aws.amazon.com/streams/latest/dev/controlling-access.html">Controlling Access to Amazon Kinesis Data Streams Resources Using IAM</a>.</p> </important></p> fn list_shards( &self, input: ListShardsInput, ) -> RusotoFuture<ListShardsOutput, ListShardsError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.ListShards"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<ListShardsOutput>( String::from_utf8_lossy(body.as_ref()).as_ref(), ).unwrap() })) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(ListShardsError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Lists your Kinesis data streams.</p> <p>The number of streams may be too large to return from a single call to <code>ListStreams</code>. You can limit the number of returned streams using the <code>Limit</code> parameter. If you do not specify a value for the <code>Limit</code> parameter, Kinesis Data Streams uses the default limit, which is currently 10.</p> <p>You can detect if there are more streams available to list by using the <code>HasMoreStreams</code> flag from the returned output. If there are more streams available, you can request more streams by using the name of the last stream returned by the <code>ListStreams</code> request in the <code>ExclusiveStartStreamName</code> parameter in a subsequent request to <code>ListStreams</code>. The group of stream names returned by the subsequent request is then added to the list. You can continue this process until all the stream names have been collected in the list. </p> <p> <a>ListStreams</a> has a limit of five transactions per second per account.</p> fn list_streams( &self, input: ListStreamsInput, ) -> RusotoFuture<ListStreamsOutput, ListStreamsError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.ListStreams"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<ListStreamsOutput>( String::from_utf8_lossy(body.as_ref()).as_ref(), ).unwrap() })) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(ListStreamsError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Lists the tags for the specified Kinesis data stream. This operation has a limit of five transactions per second per account.</p> fn list_tags_for_stream( &self, input: ListTagsForStreamInput, ) -> RusotoFuture<ListTagsForStreamOutput, ListTagsForStreamError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.ListTagsForStream"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<ListTagsForStreamOutput>( String::from_utf8_lossy(body.as_ref()).as_ref(), ).unwrap() })) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(ListTagsForStreamError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Merges two adjacent shards in a Kinesis data stream and combines them into a single shard to reduce the stream's capacity to ingest and transport data. Two shards are considered adjacent if the union of the hash key ranges for the two shards form a contiguous set with no gaps. For example, if you have two shards, one with a hash key range of 276...381 and the other with a hash key range of 382...454, then you could merge these two shards into a single shard that would have a hash key range of 276...454. After the merge, the single child shard receives data for all hash key values covered by the two parent shards.</p> <p> <code>MergeShards</code> is called when there is a need to reduce the overall capacity of a stream because of excess capacity that is not being used. You must specify the shard to be merged and the adjacent shard for a stream. For more information about merging shards, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/kinesis-using-sdk-java-resharding-merge.html">Merge Two Shards</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>If the stream is in the <code>ACTIVE</code> state, you can call <code>MergeShards</code>. If a stream is in the <code>CREATING</code>, <code>UPDATING</code>, or <code>DELETING</code> state, <code>MergeShards</code> returns a <code>ResourceInUseException</code>. If the specified stream does not exist, <code>MergeShards</code> returns a <code>ResourceNotFoundException</code>. </p> <p>You can use <a>DescribeStream</a> to check the state of the stream, which is returned in <code>StreamStatus</code>.</p> <p> <code>MergeShards</code> is an asynchronous operation. Upon receiving a <code>MergeShards</code> request, Amazon Kinesis Data Streams immediately returns a response and sets the <code>StreamStatus</code> to <code>UPDATING</code>. After the operation is completed, Kinesis Data Streams sets the <code>StreamStatus</code> to <code>ACTIVE</code>. Read and write operations continue to work while the stream is in the <code>UPDATING</code> state. </p> <p>You use <a>DescribeStream</a> to determine the shard IDs that are specified in the <code>MergeShards</code> request. </p> <p>If you try to operate on too many streams in parallel using <a>CreateStream</a>, <a>DeleteStream</a>, <code>MergeShards</code>, or <a>SplitShard</a>, you receive a <code>LimitExceededException</code>. </p> <p> <code>MergeShards</code> has a limit of five transactions per second per account.</p> fn merge_shards(&self, input: MergeShardsInput) -> RusotoFuture<(), MergeShardsError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.MergeShards"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(future::ok(::std::mem::drop(response))) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(MergeShardsError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Writes a single data record into an Amazon Kinesis data stream. Call <code>PutRecord</code> to send data into the stream for real-time ingestion and subsequent processing, one record at a time. Each shard can support writes up to 1,000 records per second, up to a maximum data write total of 1 MB per second.</p> <p>You must specify the name of the stream that captures, stores, and transports the data; a partition key; and the data blob itself.</p> <p>The data blob can be any type of data; for example, a segment from a log file, geographic/location data, website clickstream data, and so on.</p> <p>The partition key is used by Kinesis Data Streams to distribute data across shards. Kinesis Data Streams segregates the data records that belong to a stream into multiple shards, using the partition key associated with each data record to determine the shard to which a given data record belongs.</p> <p>Partition keys are Unicode strings, with a maximum length limit of 256 characters for each key. An MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards using the hash key ranges of the shards. You can override hashing the partition key to determine the shard by explicitly specifying a hash value using the <code>ExplicitHashKey</code> parameter. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/developing-producers-with-sdk.html#kinesis-using-sdk-java-add-data-to-stream">Adding Data to a Stream</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p> <code>PutRecord</code> returns the shard ID of where the data record was placed and the sequence number that was assigned to the data record.</p> <p>Sequence numbers increase over time and are specific to a shard within a stream, not across all shards within a stream. To guarantee strictly increasing ordering, write serially to a shard and use the <code>SequenceNumberForOrdering</code> parameter. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/developing-producers-with-sdk.html#kinesis-using-sdk-java-add-data-to-stream">Adding Data to a Stream</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>If a <code>PutRecord</code> request cannot be processed because of insufficient provisioned throughput on the shard involved in the request, <code>PutRecord</code> throws <code>ProvisionedThroughputExceededException</code>. </p> <p>By default, data records are accessible for 24 hours from the time that they are added to a stream. You can use <a>IncreaseStreamRetentionPeriod</a> or <a>DecreaseStreamRetentionPeriod</a> to modify this retention period.</p> fn put_record(&self, input: PutRecordInput) -> RusotoFuture<PutRecordOutput, PutRecordError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.PutRecord"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<PutRecordOutput>( String::from_utf8_lossy(body.as_ref()).as_ref(), ).unwrap() })) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(PutRecordError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Writes multiple data records into a Kinesis data stream in a single call (also referred to as a <code>PutRecords</code> request). Use this operation to send data into the stream for data ingestion and processing. </p> <p>Each <code>PutRecords</code> request can support up to 500 records. Each record in the request can be as large as 1 MB, up to a limit of 5 MB for the entire request, including partition keys. Each shard can support writes up to 1,000 records per second, up to a maximum data write total of 1 MB per second.</p> <p>You must specify the name of the stream that captures, stores, and transports the data; and an array of request <code>Records</code>, with each record in the array requiring a partition key and data blob. The record size limit applies to the total size of the partition key and data blob.</p> <p>The data blob can be any type of data; for example, a segment from a log file, geographic/location data, website clickstream data, and so on.</p> <p>The partition key is used by Kinesis Data Streams as input to a hash function that maps the partition key and associated data to a specific shard. An MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/developing-producers-with-sdk.html#kinesis-using-sdk-java-add-data-to-stream">Adding Data to a Stream</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>Each record in the <code>Records</code> array may include an optional parameter, <code>ExplicitHashKey</code>, which overrides the partition key to shard mapping. This parameter allows a data producer to determine explicitly the shard where the record is stored. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/developing-producers-with-sdk.html#kinesis-using-sdk-java-putrecords">Adding Multiple Records with PutRecords</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>The <code>PutRecords</code> response includes an array of response <code>Records</code>. Each record in the response array directly correlates with a record in the request array using natural ordering, from the top to the bottom of the request and response. The response <code>Records</code> array always includes the same number of records as the request array.</p> <p>The response <code>Records</code> array includes both successfully and unsuccessfully processed records. Kinesis Data Streams attempts to process all records in each <code>PutRecords</code> request. A single record failure does not stop the processing of subsequent records.</p> <p>A successfully processed record includes <code>ShardId</code> and <code>SequenceNumber</code> values. The <code>ShardId</code> parameter identifies the shard in the stream where the record is stored. The <code>SequenceNumber</code> parameter is an identifier assigned to the put record, unique to all records in the stream.</p> <p>An unsuccessfully processed record includes <code>ErrorCode</code> and <code>ErrorMessage</code> values. <code>ErrorCode</code> reflects the type of error and can be one of the following values: <code>ProvisionedThroughputExceededException</code> or <code>InternalFailure</code>. <code>ErrorMessage</code> provides more detailed information about the <code>ProvisionedThroughputExceededException</code> exception including the account ID, stream name, and shard ID of the record that was throttled. For more information about partially successful responses, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/kinesis-using-sdk-java-add-data-to-stream.html#kinesis-using-sdk-java-putrecords">Adding Multiple Records with PutRecords</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>By default, data records are accessible for 24 hours from the time that they are added to a stream. You can use <a>IncreaseStreamRetentionPeriod</a> or <a>DecreaseStreamRetentionPeriod</a> to modify this retention period.</p> fn put_records( &self, input: PutRecordsInput, ) -> RusotoFuture<PutRecordsOutput, PutRecordsError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.PutRecords"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<PutRecordsOutput>( String::from_utf8_lossy(body.as_ref()).as_ref(), ).unwrap() })) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(PutRecordsError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Removes tags from the specified Kinesis data stream. Removed tags are deleted and cannot be recovered after this operation successfully completes.</p> <p>If you specify a tag that does not exist, it is ignored.</p> <p> <a>RemoveTagsFromStream</a> has a limit of five transactions per second per account.</p> fn remove_tags_from_stream( &self, input: RemoveTagsFromStreamInput, ) -> RusotoFuture<(), RemoveTagsFromStreamError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.RemoveTagsFromStream"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(future::ok(::std::mem::drop(response))) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(RemoveTagsFromStreamError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Splits a shard into two new shards in the Kinesis data stream, to increase the stream's capacity to ingest and transport data. <code>SplitShard</code> is called when there is a need to increase the overall capacity of a stream because of an expected increase in the volume of data records being ingested. </p> <p>You can also use <code>SplitShard</code> when a shard appears to be approaching its maximum utilization; for example, the producers sending data into the specific shard are suddenly sending more than previously anticipated. You can also call <code>SplitShard</code> to increase stream capacity, so that more Kinesis Data Streams applications can simultaneously read data from the stream for real-time processing. </p> <p>You must specify the shard to be split and the new hash key, which is the position in the shard where the shard gets split in two. In many cases, the new hash key might be the average of the beginning and ending hash key, but it can be any hash key value in the range being mapped into the shard. For more information, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/kinesis-using-sdk-java-resharding-split.html">Split a Shard</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>.</p> <p>You can use <a>DescribeStream</a> to determine the shard ID and hash key values for the <code>ShardToSplit</code> and <code>NewStartingHashKey</code> parameters that are specified in the <code>SplitShard</code> request.</p> <p> <code>SplitShard</code> is an asynchronous operation. Upon receiving a <code>SplitShard</code> request, Kinesis Data Streams immediately returns a response and sets the stream status to <code>UPDATING</code>. After the operation is completed, Kinesis Data Streams sets the stream status to <code>ACTIVE</code>. Read and write operations continue to work while the stream is in the <code>UPDATING</code> state. </p> <p>You can use <code>DescribeStream</code> to check the status of the stream, which is returned in <code>StreamStatus</code>. If the stream is in the <code>ACTIVE</code> state, you can call <code>SplitShard</code>. If a stream is in <code>CREATING</code> or <code>UPDATING</code> or <code>DELETING</code> states, <code>DescribeStream</code> returns a <code>ResourceInUseException</code>.</p> <p>If the specified stream does not exist, <code>DescribeStream</code> returns a <code>ResourceNotFoundException</code>. If you try to create more shards than are authorized for your account, you receive a <code>LimitExceededException</code>. </p> <p>For the default shard limit for an AWS account, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Streams Limits</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>. To increase this limit, <a href="http://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html">contact AWS Support</a>.</p> <p>If you try to operate on too many streams simultaneously using <a>CreateStream</a>, <a>DeleteStream</a>, <a>MergeShards</a>, and/or <a>SplitShard</a>, you receive a <code>LimitExceededException</code>. </p> <p> <code>SplitShard</code> has a limit of five transactions per second per account.</p> fn split_shard(&self, input: SplitShardInput) -> RusotoFuture<(), SplitShardError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.SplitShard"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(future::ok(::std::mem::drop(response))) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(SplitShardError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Enables or updates server-side encryption using an AWS KMS key for a specified stream. </p> <p>Starting encryption is an asynchronous operation. Upon receiving the request, Kinesis Data Streams returns immediately and sets the status of the stream to <code>UPDATING</code>. After the update is complete, Kinesis Data Streams sets the status of the stream back to <code>ACTIVE</code>. Updating or applying encryption normally takes a few seconds to complete, but it can take minutes. You can continue to read and write data to your stream while its status is <code>UPDATING</code>. Once the status of the stream is <code>ACTIVE</code>, encryption begins for records written to the stream. </p> <p>API Limits: You can successfully apply a new AWS KMS key for server-side encryption 25 times in a rolling 24-hour period.</p> <p>Note: It can take up to five seconds after the stream is in an <code>ACTIVE</code> status before all records written to the stream are encrypted. After you enable encryption, you can verify that encryption is applied by inspecting the API response from <code>PutRecord</code> or <code>PutRecords</code>.</p> fn start_stream_encryption( &self, input: StartStreamEncryptionInput, ) -> RusotoFuture<(), StartStreamEncryptionError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.StartStreamEncryption"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(future::ok(::std::mem::drop(response))) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(StartStreamEncryptionError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Disables server-side encryption for a specified stream. </p> <p>Stopping encryption is an asynchronous operation. Upon receiving the request, Kinesis Data Streams returns immediately and sets the status of the stream to <code>UPDATING</code>. After the update is complete, Kinesis Data Streams sets the status of the stream back to <code>ACTIVE</code>. Stopping encryption normally takes a few seconds to complete, but it can take minutes. You can continue to read and write data to your stream while its status is <code>UPDATING</code>. Once the status of the stream is <code>ACTIVE</code>, records written to the stream are no longer encrypted by Kinesis Data Streams. </p> <p>API Limits: You can successfully disable server-side encryption 25 times in a rolling 24-hour period. </p> <p>Note: It can take up to five seconds after the stream is in an <code>ACTIVE</code> status before all records written to the stream are no longer subject to encryption. After you disabled encryption, you can verify that encryption is not applied by inspecting the API response from <code>PutRecord</code> or <code>PutRecords</code>.</p> fn stop_stream_encryption( &self, input: StopStreamEncryptionInput, ) -> RusotoFuture<(), StopStreamEncryptionError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.StopStreamEncryption"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(future::ok(::std::mem::drop(response))) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(StopStreamEncryptionError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } /// <p>Updates the shard count of the specified stream to the specified number of shards.</p> <p>Updating the shard count is an asynchronous operation. Upon receiving the request, Kinesis Data Streams returns immediately and sets the status of the stream to <code>UPDATING</code>. After the update is complete, Kinesis Data Streams sets the status of the stream back to <code>ACTIVE</code>. Depending on the size of the stream, the scaling action could take a few minutes to complete. You can continue to read and write data to your stream while its status is <code>UPDATING</code>.</p> <p>To update the shard count, Kinesis Data Streams performs splits or merges on individual shards. This can cause short-lived shards to be created, in addition to the final shards. We recommend that you double or halve the shard count, as this results in the fewest number of splits or merges.</p> <p>This operation has the following limits. You cannot do the following:</p> <ul> <li> <p>Scale more than twice per rolling 24-hour period per stream</p> </li> <li> <p>Scale up to more than double your current shard count for a stream</p> </li> <li> <p>Scale down below half your current shard count for a stream</p> </li> <li> <p>Scale up to more than 500 shards in a stream</p> </li> <li> <p>Scale a stream with more than 500 shards down unless the result is less than 500 shards</p> </li> <li> <p>Scale up to more than the shard limit for your account</p> </li> </ul> <p>For the default limits for an AWS account, see <a href="http://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Streams Limits</a> in the <i>Amazon Kinesis Data Streams Developer Guide</i>. To request an increase in the call rate limit, the shard limit for this API, or your overall shard limit, use the <a href="https://console.aws.amazon.com/support/v1#/case/create?issueType=service-limit-increase&amp;limitType=service-code-kinesis">limits form</a>.</p> fn update_shard_count( &self, input: UpdateShardCountInput, ) -> RusotoFuture<UpdateShardCountOutput, UpdateShardCountError> { let mut request = SignedRequest::new("POST", "kinesis", &self.region, "/"); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "Kinesis_20131202.UpdateShardCount"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); let future = self.inner.sign_and_dispatch(request, |response| { if response.status == StatusCode::Ok { future::Either::A(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<UpdateShardCountOutput>( String::from_utf8_lossy(body.as_ref()).as_ref(), ).unwrap() })) } else { future::Either::B(response.buffer().from_err().and_then(|response| { Err(UpdateShardCountError::from_body( String::from_utf8_lossy(response.body.as_ref()).as_ref(), )) })) } }); RusotoFuture::new(future) } } #[cfg(test)] mod protocol_tests {}
i.go
package main import "log" /* 时间复杂度:O(n) O(n^2) O(n^2) 从尾到头在已排序区间查找插入位置,能达到最好的时间复杂度。 空间复杂度:O(1) 稳定 */ func main() { log.Println(InsertSort([]int{2, 4, 6, 5, 3, 1, 100})) } func InsertSort(arr []int) []int { arrL := len(arr) if arrL <= 1 { return arr } for i
arrL; i++ { v := arr[i] j := i - 1 for ; j >= 0; j-- { if arr[j] > v { arr[j+1] = arr[j] } else { break } } arr[j+1] = v } return arr }
:= 1; i <
tag.go
/* The MIT License (MIT) Copyright (c) 2015 Eric Anderton */ package polymer import ( "reflect" "strings" ) /* polymer:"<alias>" polymer:"-" polymer:",alias:<alias>" polymer:",ignore" polymer:",onchange:<fn>" */ type tagInfo struct { Name string Alias string Handler string } var ( // Registry for all types to be ignored when processing Polymer tags. // Any type in this array will not participate in Polymer field handling, // will be regarded as though `polymer:"-"` was used on the field. IgnoredTagTypes = []reflect.Type { reflect.TypeOf(BasicComponent {}), reflect.TypeOf(PolymerBase {}), reflect.TypeOf(UpdateableAdapter {}), reflect.TypeOf(make(UpdateChan)), } ) func isIgnoredType(field reflect.StructField) bool { for _, typ := range IgnoredTagTypes { if typ == field.Type { return true } } return false } // provide some way to turn a field 'off' func newTagInfo(field reflect.StructField) (*tagInfo, bool) { if isIgnoredType(field)
tag := field.Tag.Get("polymer") if tag == "-" { return nil, false // explicitly not mapped } // tag defaults ti := &tagInfo { Name: field.Name, Alias: field.Name, Handler: field.Name + "Changed", } // split tag parts out parts := strings.Split(strings.TrimSpace(tag), ",") if len(parts) > 0 { // first property is the alias name iff it doesn't look like a pair startIndex := 0 alias := parts[0] if alias != "" && strings.Index(alias, ":") == -1 { ti.Alias = alias startIndex = 1 } // iterate over remaining parts for ii := startIndex; ii < len(parts); ii++ { var name, val string // break things down into <name> or <name>:<val> pairs args := strings.Split(parts[ii], ":") switch len(args) { case 0: continue // nothing to do, keep going case 1: name = strings.TrimSpace(args[0]) default: name = strings.TrimSpace(args[0]) val = strings.TrimSpace(args[1]) } // handle supported attributes switch name { case "ignore": return nil, false // explicitly not mapped case "alias": ti.Alias = val case "onchange": ti.Handler = val } } } // return completed tag info return ti, true }
{ return nil, false }
socketMVC.ts
export enum privateChatPath { HANDLE_CONNECTION = 'handleConnection', HANDLE_DISCONNECT = 'handleDisconnect', CLIENT_CONNECTED = 'client/private-chat/connected', CLIENT_ERROR = 'client/private-chat/error', CLIENT_SEND = 'client/private-chat/send', CLIENT_RECEIVE_CHAT_LOG = 'client/private-chat/receive-chatlog', CLIENT_LOGOUT_EVENT_EXECUTED = 'client/private-chat/logoutEventExecuted', SERVER_SEND = 'server/private-chat/send', SERVER_CALL_CHAT_LOG = 'server/private-chat/call-chatLog', SERVER_LOGOUT_EVENT = 'server/private-chat/logoutEvent', } export enum statusPath { HANDLE_CONNECTION = 'handleConnection', HANDLE_DISCONNECT = 'handleDisconnect', SERVER_CHANGE_STATUS = 'server/status/changeStatus', SERVER_LOGOUT_EVENT = 'server/status/logoutEvent', CLIENT_LOGOUT_EVENT_EXECUTED = 'client/status/logoutEventExecuted', CLIENT_GET_STATUS_LIST = 'client/status/getStatusList', CLIENT_CHANGE_STATUS = 'client/status/changeStatus', CLIENT_ERROR = 'client/status/error', CLIENT_LOGIN_ALERT = 'client/status/loginAlert', CLIENT_LOGOUT_ALERT = 'client/status/logoutAlert', } export enum groupChatPath { HANDLE_CONNECTION = 'handleConnection', HANDLE_DISCONNECT = 'handleDisconnect', CLIENT_CONNECTED = 'client/gc/connected', CLIENT_JOIN_ROOM_NOTICE = 'client/gc/join-room-notice', CLIENT_JOINED_GCR = 'client/gc/joined_gcr', CLIENT_READ_CHAT_LOGS = 'client/gc/read-chat-logs', CLIENT_SEND_MESSAGE = 'client/gc/send-message', CLIENT_ERROR_ALERT = 'client/gc/errorAlert', CLIENT_LOGOUT_EVENT_EXECUTED = 'client/gc/logoutEventExecuted', SERVER_CREATE_GROUP_CHAT_ROOM = 'server/gc/create-group-chat-room', SERVER_JOIN_GROUP_CHAT_ROOM = 'server/gc/join-group-chat-room', SERVER_SEND_MESSAGE = 'server/gc/send-message', SERVER_READ_CHAT_LOGS = 'server/gc/read-chat-logs', SERVER_LOGOUT_EVENT = 'server/gc/logoutEvent', TEST_ADD_NEW_CLIENT = 'test/gc/add-new-client', } export interface IMessagePayload { roomId: string; sender: number; file?: number; message: string; date: number; } export interface IGMessagePayload {
GC_PK: number; sender: number; message: string; file?: number; date: number; } export interface IErrorPacket<T> { path: string; err: Error; originalData: T; tokenError: boolean; }
messages_test.go
// Copyright (c) Mainflux // SPDX-License-Identifier: Apache-2.0 package postgres_test import ( "fmt" "testing" "time" "github.com/mainflux/mainflux" "github.com/mainflux/mainflux/writers/postgres" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/gofrs/uuid" ) var (
valueFields = 6 ) func TestMessageSave(t *testing.T) { messageRepo := postgres.New(db) chid, err := uuid.NewV4() require.Nil(t, err, fmt.Sprintf("got unexpected error: %s", err)) msg.Channel = chid.String() pubid, err := uuid.NewV4() require.Nil(t, err, fmt.Sprintf("got unexpected error: %s", err)) msg.Publisher = pubid.String() now := time.Now().Unix() for i := 0; i < msgsNum; i++ { // Mix possible values as well as value sum. count := i % valueFields switch count { case 0: msg.Value = &mainflux.Message_FloatValue{FloatValue: 5} case 1: msg.Value = &mainflux.Message_BoolValue{BoolValue: false} case 2: msg.Value = &mainflux.Message_StringValue{StringValue: "value"} case 3: msg.Value = &mainflux.Message_DataValue{DataValue: "base64data"} case 4: msg.ValueSum = nil case 5: msg.ValueSum = &mainflux.SumValue{Value: 45} } msg.Time = float64(now + int64(i)) err := messageRepo.Save(msg) assert.Nil(t, err, fmt.Sprintf("expected no error got %s\n", err)) } }
msg = mainflux.Message{} msgsNum = 42
local.go
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package etcd import ( "fmt" "path/filepath" "github.com/golang/glog" "k8s.io/api/core/v1" kubeadmapi "k8s.io/kubernetes/cmd/kubeadm/app/apis/kubeadm" kubeadmconstants "k8s.io/kubernetes/cmd/kubeadm/app/constants" "k8s.io/kubernetes/cmd/kubeadm/app/images" kubeadmutil "k8s.io/kubernetes/cmd/kubeadm/app/util" staticpodutil "k8s.io/kubernetes/cmd/kubeadm/app/util/staticpod" ) const ( etcdVolumeName = "etcd-data" certsVolumeName = "etcd-certs" ) // CreateLocalEtcdStaticPodManifestFile will write local etcd static pod manifest file. func
(manifestDir string, cfg *kubeadmapi.MasterConfiguration) error { glog.V(1).Infoln("creating local etcd static pod manifest file") // gets etcd StaticPodSpec, actualized for the current MasterConfiguration spec := GetEtcdPodSpec(cfg) // writes etcd StaticPod to disk if err := staticpodutil.WriteStaticPodToDisk(kubeadmconstants.Etcd, manifestDir, spec); err != nil { return err } fmt.Printf("[etcd] Wrote Static Pod manifest for a local etcd instance to %q\n", kubeadmconstants.GetStaticPodFilepath(kubeadmconstants.Etcd, manifestDir)) return nil } // GetEtcdPodSpec returns the etcd static Pod actualized to the context of the current MasterConfiguration // NB. GetEtcdPodSpec methods holds the information about how kubeadm creates etcd static pod manifests. func GetEtcdPodSpec(cfg *kubeadmapi.MasterConfiguration) v1.Pod { pathType := v1.HostPathDirectoryOrCreate etcdMounts := map[string]v1.Volume{ etcdVolumeName: staticpodutil.NewVolume(etcdVolumeName, cfg.Etcd.DataDir, &pathType), certsVolumeName: staticpodutil.NewVolume(certsVolumeName, cfg.CertificatesDir+"/etcd", &pathType), } return staticpodutil.ComponentPod(v1.Container{ Name: kubeadmconstants.Etcd, Command: getEtcdCommand(cfg), Image: images.GetCoreImage(kubeadmconstants.Etcd, cfg.ImageRepository, cfg.KubernetesVersion, cfg.Etcd.Image), ImagePullPolicy: cfg.ImagePullPolicy, // Mount the etcd datadir path read-write so etcd can store data in a more persistent manner VolumeMounts: []v1.VolumeMount{ staticpodutil.NewVolumeMount(etcdVolumeName, cfg.Etcd.DataDir, false), staticpodutil.NewVolumeMount(certsVolumeName, cfg.CertificatesDir+"/etcd", false), }, LivenessProbe: staticpodutil.EtcdProbe( cfg, kubeadmconstants.Etcd, 2379, cfg.CertificatesDir, kubeadmconstants.EtcdCACertName, kubeadmconstants.EtcdHealthcheckClientCertName, kubeadmconstants.EtcdHealthcheckClientKeyName, ), }, etcdMounts) } // getEtcdCommand builds the right etcd command from the given config object func getEtcdCommand(cfg *kubeadmapi.MasterConfiguration) []string { defaultArguments := map[string]string{ "listen-client-urls": "https://127.0.0.1:2379", "advertise-client-urls": "https://127.0.0.1:2379", "data-dir": cfg.Etcd.DataDir, "cert-file": filepath.Join(cfg.CertificatesDir, kubeadmconstants.EtcdServerCertName), "key-file": filepath.Join(cfg.CertificatesDir, kubeadmconstants.EtcdServerKeyName), "trusted-ca-file": filepath.Join(cfg.CertificatesDir, kubeadmconstants.EtcdCACertName), "client-cert-auth": "true", "peer-cert-file": filepath.Join(cfg.CertificatesDir, kubeadmconstants.EtcdPeerCertName), "peer-key-file": filepath.Join(cfg.CertificatesDir, kubeadmconstants.EtcdPeerKeyName), "peer-trusted-ca-file": filepath.Join(cfg.CertificatesDir, kubeadmconstants.EtcdCACertName), "peer-client-cert-auth": "true", "snapshot-count": "10000", } command := []string{"etcd"} command = append(command, kubeadmutil.BuildArgumentListFromMap(defaultArguments, cfg.Etcd.ExtraArgs)...) return command }
CreateLocalEtcdStaticPodManifestFile
multiple.go
// Copyright (c) 2016, Ben Morgan. All rights reserved. // Use of this source code is governed by an MIT license // that can be found in the LICENSE file. package errs import ( "fmt" "strings" ) // Collector collects multiple errors and returns a MultipleError // if any of the errors are non-nil. type Collector struct { Message string Errors []error } func NewCollector(msg string) *Collector { return &Collector{ Message: msg, Errors: make([]error, 0), } } // Add adds err to the list of errors, without checking // whether it is nil or not. func (c *Collector) Add(err error) { c.Errors = append(c.Errors, err) } // Collect adds err if it is non-nil. func (c *Collector) Collect(err error) { if err != nil { c.Add(err) } } // Error returns a MultipleError if it contains any errors, // otherwise it returns nil. func (c *Collector) Error() error { if len(c.Errors) > 0
return nil } type MultipleError struct { Message string Errors []error } func (e *MultipleError) Error() string { xs := make([]string, len(e.Errors)) for i, e := range e.Errors { xs[i] = e.Error() } return fmt.Sprintf("%s: %s", e.Message, strings.Join(xs, "; ")) }
{ return &MultipleError{c.Message, c.Errors} }
ixnet.py
# MIT LICENSE # # Copyright 1997 - 2020 by IXIA Keysight # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from ixnetwork_restpy.base import Base from ixnetwork_restpy.files import Files from typing import List, Any, Union class Ixnet(Base): """Tracks remote clients connected using the ixNet API Service over websockets. The Ixnet class encapsulates a required ixnet resource which will be retrieved from the server every time the property is accessed. """ __slots__ = () _SDM_NAME = 'ixnet' _SDM_ATT_MAP = { 'ConnectedClients': 'connectedClients', 'IsActive': 'isActive', } _SDM_ENUM_MAP = { } def __init__(self, parent, list_op=False): super(Ixnet, self).__init__(parent, list_op) @property def
(self): # type: () -> List[str] """ Returns ------- - list(str): Returns the remote address and remote port for each of the currently connected ixNet clients. """ return self._get_attribute(self._SDM_ATT_MAP['ConnectedClients']) @property def IsActive(self): # type: () -> bool """ Returns ------- - bool: Returns true if any remote clients are connected, false if no remote clients are connected. """ return self._get_attribute(self._SDM_ATT_MAP['IsActive'])
ConnectedClients
lookalikespec.py
# Copyright 2014 Facebook, Inc. # You are hereby granted a non-exclusive, worldwide, royalty-free license to # use, copy, modify, and distribute this software in source code or binary # form for use in connection with the web services and APIs provided by # Facebook. # As with any software that integrates with the Facebook platform, your use # of this software is subject to the Facebook Developer Principles and # Policies [http://developers.facebook.com/policy/]. This copyright notice # shall be included in all copies or substantial portions of the software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. from facebook_business.adobjects.abstractobject import AbstractObject """ This class is auto-generated. For any issues or feature requests related to this class, please let us know on github and we'll fix in our codegen framework. We'll not be able to accept pull request for this class. """ class LookalikeSpec( AbstractObject, ): def __init__(self, api=None): super(LookalikeSpec, self).__init__() self._isLookalikeSpec = True self._api = api class Field(AbstractObject.Field): country = 'country' is_financial_service = 'is_financial_service' origin = 'origin' origin_event_name = 'origin_event_name' origin_event_source_name = 'origin_event_source_name' origin_event_source_type = 'origin_event_source_type' product_set_name = 'product_set_name' ratio = 'ratio' starting_ratio = 'starting_ratio' target_countries = 'target_countries' target_country_names = 'target_country_names' type = 'type' _field_types = { 'country': 'string', 'is_financial_service': 'bool', 'origin': 'list<Object>', 'origin_event_name': 'string', 'origin_event_source_name': 'string', 'origin_event_source_type': 'string', 'product_set_name': 'string', 'ratio': 'float', 'starting_ratio': 'float', 'target_countries': 'list<string>', 'target_country_names': 'list', 'type': 'string', } @classmethod def
(cls): field_enum_info = {} return field_enum_info
_get_field_enum_info
brute.py
""" Implementation of the Brute from "Revisiting the Arcade Learning Environment: Evaluation Protocols and Open Problems for General Agents" by Machado et al. https://arxiv.org/abs/1709.06009 This is an agent that uses the determinism of the environment in order to do pretty well at a number of retro games. It does not save emulator state but does rely on the same sequence of actions producing the same result when played back. """ import random import argparse import numpy as np import retro import gym EXPLORATION_PARAM = 0.005 class Frameskip(gym.Wrapper): def __init__(self, env, skip=4): super().__init__(env) self._skip = skip def reset(self): return self.env.reset() def step(self, act): total_rew = 0.0 done = None for i in range(self._skip): obs, rew, done, info = self.env.step(act) total_rew += rew if done: break return obs, total_rew, done, info class TimeLimit(gym.Wrapper): def __init__(self, env, max_episode_steps=None): super().__init__(env) self._max_episode_steps = max_episode_steps self._elapsed_steps = 0 def step(self, ac): observation, reward, done, info = self.env.step(ac) self._elapsed_steps += 1 if self._elapsed_steps >= self._max_episode_steps: done = True info['TimeLimit.truncated'] = True return observation, reward, done, info def reset(self, **kwargs): self._elapsed_steps = 0 return self.env.reset(**kwargs) class Node: def __init__(self, value=-np.inf, children=None): self.value = value self.visits = 0 self.children = {} if children is None else children def __repr__(self): return "<Node value=%f visits=%d len(children)=%d>" % ( self.value, self.visits, len(self.children), ) def select_actions(root, action_space, max_episode_steps): """ Select actions from the tree Normally we select the greedy action that has the highest reward associated with that subtree. We have a small chance to select a random action based on the exploration param and visit count of the current node at each step. We select actions for the longest possible episode, but normally these will not all be used. They will instead be truncated to the length of the actual episode and then used to update the tree. """ node = root acts = [] steps = 0 while steps < max_episode_steps: if node is None: # we've fallen off the explored area of the tree, just select random actions act = action_space.sample() else: epsilon = EXPLORATION_PARAM / np.log(node.visits + 2) if random.random() < epsilon: # random action act = action_space.sample() else: # greedy action act_value = {} for act in range(action_space.n): if node is not None and act in node.children: act_value[act] = node.children[act].value else: act_value[act] = -np.inf best_value = max(act_value.values()) best_acts = [ act for act, value in act_value.items() if value == best_value ] act = random.choice(best_acts) if act in node.children: node = node.children[act] else: node = None acts.append(act) steps += 1 return acts def
(env, acts): """ Perform a rollout using a preset collection of actions """ total_rew = 0 env.reset() steps = 0 for act in acts: _obs, rew, done, _info = env.step(act) steps += 1 total_rew += rew if done: break return steps, total_rew def update_tree(root, executed_acts, total_rew): """ Given the tree, a list of actions that were executed before the game ended, and a reward, update the tree so that the path formed by the executed actions are all updated to the new reward. """ root.value = max(total_rew, root.value) root.visits += 1 new_nodes = 0 node = root for step, act in enumerate(executed_acts): if act not in node.children: node.children[act] = Node() new_nodes += 1 node = node.children[act] node.value = max(total_rew, node.value) node.visits += 1 return new_nodes class Brute: """ Implementation of the Brute Creates and manages the tree storing game actions and rewards """ def __init__(self, env, max_episode_steps): self.node_count = 1 self._root = Node() self._env = env self._max_episode_steps = max_episode_steps def run(self): acts = select_actions(self._root, self._env.action_space, self._max_episode_steps) steps, total_rew = rollout(self._env, acts) executed_acts = acts[:steps] self.node_count += update_tree(self._root, executed_acts, total_rew) return executed_acts, total_rew def brute_retro( game, max_episode_steps=4500, timestep_limit=1e8, state=retro.State.DEFAULT, scenario=None, ): env = retro.make(game, state, use_restricted_actions=retro.Actions.DISCRETE, scenario=scenario) env = Frameskip(env) env = TimeLimit(env, max_episode_steps=max_episode_steps) brute = Brute(env, max_episode_steps=max_episode_steps) timesteps = 0 best_rew = float('-inf') while True: acts, rew = brute.run() timesteps += len(acts) if rew > best_rew: print("new best reward {} => {}".format(best_rew, rew)) best_rew = rew env.unwrapped.record_movie("best.bk2") env.reset() for act in acts: env.step(act) env.unwrapped.stop_record() if timesteps > timestep_limit: print("timestep limit exceeded") break def main(): parser = argparse.ArgumentParser() parser.add_argument('--game', default='Airstriker-Genesis') parser.add_argument('--state', default=retro.State.DEFAULT) parser.add_argument('--scenario', default=None) args = parser.parse_args() brute_retro(game=args.game, state=args.state, scenario=args.scenario) if __name__ == "__main__": main()
rollout
test.rs
use std::{collections::HashMap, time::Duration}; use uom::si::{ acceleration::foot_per_second_squared, f64::*, length::foot, thermodynamic_temperature::degree_celsius, velocity::knot, }; use crate::electrical::consumption::SuppliedPower; use super::{ from_bool, to_bool, Aircraft, Simulation, SimulationElement, SimulationElementVisitor, SimulationToSimulatorVisitor, SimulatorReaderWriter, SimulatorWriter, UpdateContext, }; /// The simulation test bed handles the testing of [`Aircraft`] and [`SimulationElement`] /// by running a full simulation tick on them. /// /// [`Aircraft`]: ../trait.Aircraft.html /// [`SimulationElement`]: ../trait.SimulationElement.html pub struct SimulationTestBed { reader_writer: TestReaderWriter, get_supplied_power_fn: Box<dyn Fn() -> SuppliedPower>, delta: Duration, } impl SimulationTestBed { pub fn new() -> Self { Self::new_with_delta(Duration::from_secs(1)) } pub fn new_with_delta(delta: Duration) -> Self { let mut test_bed = Self { reader_writer: TestReaderWriter::new(), get_supplied_power_fn: Box::new(SuppliedPower::new), delta, }; test_bed.set_indicated_airspeed(Velocity::new::<knot>(250.)); test_bed.set_indicated_altitude(Length::new::<foot>(5000.)); test_bed.set_ambient_temperature(ThermodynamicTemperature::new::<degree_celsius>(0.)); test_bed.set_on_ground(false); test_bed } /// Creates an instance seeded with the state found in the given element. /// /// By default the unseeded simulation will return 0.0 or false for any requested /// variables. If this is a problem for your test, then use this function. pub fn seeded_with(element: &mut impl SimulationElement) -> Self { let mut test_bed = Self::new(); let mut writer = SimulatorWriter::new(&mut test_bed.reader_writer); let mut visitor = SimulationToSimulatorVisitor::new(&mut writer); element.accept(&mut visitor); test_bed } /// Runs a single [`Simulation`] tick on the provided [`Aircraft`]. /// /// [`Aircraft`]: ../trait.Aircraft.html /// [`Simulation`]: ../struct.Simulation.html pub fn run_aircraft(&mut self, aircraft: &mut impl Aircraft) { let mut simulation = Simulation::new(aircraft, &mut self.reader_writer); simulation.tick(self.delta); } /// Runs a single [`Simulation`] tick on the provided [`SimulationElement`], executing /// the given update before electrical power is distributed. /// /// Prefer using [`run`] over this if electrical power distribution does not /// matter for the test you're executing. /// /// [`Simulation`]: ../struct.Simulation.html /// [`SimulationElement`]: ../trait.SimulationElement.html /// [`run`]: #method.run pub fn run_before_power_distribution<T: SimulationElement, U: Fn(&mut T, &UpdateContext)>( &mut self, element: &mut T, update_fn: U, ) { self.run_within_test_aircraft(element, update_fn, true); }
/// Runs a single [`Simulation`] tick on the provided [`SimulationElement`]. /// /// Prefer using [`run_without_update`] over this if electrical power distribution does not /// matter for the test you're executing. /// /// [`Simulation`]: ../struct.Simulation.html /// [`SimulationElement`]: ../trait.SimulationElement.html /// [`run_without_update`]: #method.run_without_update pub fn run_before_power_distribution_without_update<T: SimulationElement>( &mut self, element: &mut T, ) { self.run_before_power_distribution(element, |_, _| {}); } /// Runs a single [`Simulation`] tick on the provided [`SimulationElement`], executing /// the given update after electrical power is distributed. /// /// [`Simulation`]: ../struct.Simulation.html /// [`SimulationElement`]: ../trait.SimulationElement.html pub fn run<T: SimulationElement, U: Fn(&mut T, &UpdateContext)>( &mut self, element: &mut T, update_fn: U, ) { self.run_within_test_aircraft(element, update_fn, false); } /// Runs a single [`Simulation`] tick on the provided [`SimulationElement`]. /// /// [`Simulation`]: ../struct.Simulation.html /// [`SimulationElement`]: ../trait.SimulationElement.html pub fn run_without_update(&mut self, element: &mut impl SimulationElement) { self.run(element, |_, _| {}); } fn run_within_test_aircraft<T: SimulationElement, U: Fn(&mut T, &UpdateContext)>( &mut self, element: &mut T, update_fn: U, before_power_distribution: bool, ) { let mut aircraft = TestAircraft::new( element, update_fn, (self.get_supplied_power_fn)(), before_power_distribution, ); self.run_aircraft(&mut aircraft); } pub fn set_delta(&mut self, delta: Duration) { self.delta = delta; } pub fn set_indicated_airspeed(&mut self, indicated_airspeed: Velocity) { self.reader_writer.write_f64( UpdateContext::INDICATED_AIRSPEED_KEY, indicated_airspeed.get::<knot>(), ); } pub fn indicated_airspeed(&mut self) -> Velocity { Velocity::new::<knot>( self.reader_writer .read_f64(UpdateContext::INDICATED_AIRSPEED_KEY), ) } pub fn set_indicated_altitude(&mut self, indicated_altitude: Length) { self.reader_writer.write_f64( UpdateContext::INDICATED_ALTITUDE_KEY, indicated_altitude.get::<foot>(), ); } pub fn set_ambient_temperature(&mut self, ambient_temperature: ThermodynamicTemperature) { self.reader_writer.write_f64( UpdateContext::AMBIENT_TEMPERATURE_KEY, ambient_temperature.get::<degree_celsius>(), ); } pub fn set_on_ground(&mut self, on_ground: bool) { self.reader_writer .write_bool(UpdateContext::IS_ON_GROUND_KEY, on_ground); } pub fn set_long_acceleration(&mut self, accel: Acceleration) { self.reader_writer.write_f64( UpdateContext::ACCEL_BODY_Z_KEY, accel.get::<foot_per_second_squared>(), ); } pub fn supplied_power_fn( mut self, supplied_power_fn: impl Fn() -> SuppliedPower + 'static, ) -> Self { self.get_supplied_power_fn = Box::new(supplied_power_fn); self } pub fn write_bool(&mut self, name: &str, value: bool) { self.reader_writer.write_bool(name, value); } pub fn write_f64(&mut self, name: &str, value: f64) { self.reader_writer.write_f64(name, value); } pub fn read_bool(&mut self, name: &str) -> bool { self.reader_writer.read_bool(name) } pub fn read_f64(&mut self, name: &str) -> f64 { self.reader_writer.read_f64(name) } pub fn contains_key(&self, name: &str) -> bool { self.reader_writer.contains_key(name) } } impl Default for SimulationTestBed { fn default() -> Self { Self::new() } } struct TestAircraft<'a, T: SimulationElement, U: Fn(&mut T, &UpdateContext)> { element: &'a mut T, update_fn: U, supplied_power: Option<SuppliedPower>, update_before_power_distribution: bool, } impl<'a, T: SimulationElement, U: Fn(&mut T, &UpdateContext)> TestAircraft<'a, T, U> { fn new( element: &'a mut T, update_fn: U, supplied_power: SuppliedPower, update_before_power_distribution: bool, ) -> Self { Self { element, update_fn, supplied_power: Some(supplied_power), update_before_power_distribution, } } } impl<'a, T: SimulationElement, U: Fn(&mut T, &UpdateContext)> Aircraft for TestAircraft<'a, T, U> { fn update_before_power_distribution(&mut self, context: &UpdateContext) { if self.update_before_power_distribution { (self.update_fn)(&mut self.element, context); } } fn update_after_power_distribution(&mut self, context: &UpdateContext) { if !self.update_before_power_distribution { (self.update_fn)(&mut self.element, context); } } fn get_supplied_power(&mut self) -> SuppliedPower { self.supplied_power.take().unwrap() } } impl<'a, T: SimulationElement, U: Fn(&mut T, &UpdateContext)> SimulationElement for TestAircraft<'a, T, U> { fn accept<W: SimulationElementVisitor>(&mut self, visitor: &mut W) { self.element.accept(visitor); visitor.visit(self); } } struct TestReaderWriter { variables: HashMap<String, f64>, } impl TestReaderWriter { fn new() -> Self { Self { variables: HashMap::new(), } } fn contains_key(&self, name: &str) -> bool { self.variables.contains_key(name) } fn write_bool(&mut self, name: &str, value: bool) { self.write(name, from_bool(value)); } fn write_f64(&mut self, name: &str, value: f64) { self.write(name, value); } fn read_bool(&mut self, name: &str) -> bool { to_bool(self.read(name)) } fn read_f64(&mut self, name: &str) -> f64 { self.read(name) } } impl SimulatorReaderWriter for TestReaderWriter { fn read(&mut self, name: &str) -> f64 { *self.variables.get(name).unwrap_or(&0.) } fn write(&mut self, name: &str, value: f64) { self.variables.insert(name.to_owned(), value); } } impl Default for TestReaderWriter { fn default() -> Self { Self::new() } } #[cfg(test)] mod tests { use super::*; use crate::{ electrical::consumption::{PowerConsumption, PowerConsumptionReport, SuppliedPower}, simulation::{SimulatorReader, SimulatorWriter}, }; #[derive(Clone, Copy, Debug, PartialEq)] enum CallOrder { Before, After, } #[derive(Default)] struct ElementUnderTest { update_called: bool, read_called: bool, receive_power_called: bool, consume_power_called: bool, consume_power_in_converters_called: bool, process_power_consumption_report_called: bool, update_called_before_or_after_receive_power: Option<CallOrder>, } impl ElementUnderTest { fn update(&mut self, _: &UpdateContext) { self.update_called = true; self.update_called_before_or_after_receive_power = if self.receive_power_called { Some(CallOrder::After) } else { Some(CallOrder::Before) }; } fn all_functions_called(&self) -> bool { self.update_called && self.read_called && self.receive_power_called && self.consume_power_called && self.consume_power_in_converters_called && self.process_power_consumption_report_called } fn update_called_before_or_after_receive_power(&self) -> Option<CallOrder> { self.update_called_before_or_after_receive_power } } impl SimulationElement for ElementUnderTest { fn read(&mut self, _: &mut SimulatorReader) { self.read_called = true; } fn write(&self, _: &mut SimulatorWriter) { // Can't check this as the fn doesn't require mutable self. } fn receive_power(&mut self, _: &SuppliedPower) { self.receive_power_called = true; } fn consume_power(&mut self, _: &mut PowerConsumption) { self.consume_power_called = true; } fn consume_power_in_converters(&mut self, _consumption: &mut PowerConsumption) { self.consume_power_in_converters_called = true; } fn process_power_consumption_report<T: PowerConsumptionReport>(&mut self, _: &T) { self.process_power_consumption_report_called = true; } } #[test] fn test_aircraft_can_run_in_simulation() { let mut element = ElementUnderTest::default(); let mut test_bed = SimulationTestBed::new(); test_bed.run_before_power_distribution(&mut element, |el, context| { el.update(context); }); assert!(element.all_functions_called()); } #[test] fn defaults_to_receiving_power_before_update() { let mut element = ElementUnderTest::default(); let mut test_bed = SimulationTestBed::new(); test_bed.run(&mut element, |el, context| { el.update(context); }); assert_eq!( element.update_called_before_or_after_receive_power(), Some(CallOrder::After) ); } #[test] fn when_update_before_receive_power_requested_executes_update_before_receive_power() { let mut element = ElementUnderTest::default(); let mut test_bed = SimulationTestBed::new(); test_bed.run_before_power_distribution(&mut element, |el, context| { el.update(context); }); assert_eq!( element.update_called_before_or_after_receive_power(), Some(CallOrder::Before) ); } }
offer.service.ts
import { Injectable } from '@angular/core'; import { HttpClient, HttpHeaders } from '@angular/common/http'; import { Subject } from 'rxjs'; import { map } from 'rxjs/operators'; import { Router } from '@angular/router'; import {environment} from '../../environments/environment'; import swal from 'sweetalert2'; const BackUrl = environment.BackUrl; // const BackUrl = 'http://localhost:5000/api' @Injectable({ providedIn: 'root' }) export class offerService { constructor(private http: HttpClient, private router: Router) {} getCandidates(id: string) { return this.http.get<{candidateNames: [string], username: [string] , count:Number, id:[string]}>(BackUrl+'/getOneCandi?jobAd='+id) } addCandidate(data: any) { this.http.post(BackUrl+'/postBodyC',data).subscribe(() =>{ this.router.navigate(['/']);
addAcceptence(data: any) { return this.http.post(BackUrl+'/postAcc' , data); } getAcceptence(id: string) { return this.http.get<{AcceptedNames: [string], username: [string] , count:Number, id:[string]}>(BackUrl+'/getOneAccepted?jobAd='+id) } startJob(data: any) { this.http.post(BackUrl+'/start/job', data).subscribe(result =>{ this.showSwal('secc'); this.router.navigate(['/my-offers']); }) } endJob(data: any) { const response = this.http.post(BackUrl+'/end/job', data,{ responseType: 'text'}).subscribe(result =>{ this.showSwal('secc'); this.router.navigate(['/acceptance-list']); }) } showSwal(type){ if (type == 'secc') { swal({ title: "تمت العملية بنجاح!", buttonsStyling: false, confirmButtonClass: 'btn btn-success', confirmButtonText:'نعم', type:'success', }).catch(swal.noop) } } }
}) }
colors.py
import numpy as np import skimage
import matplotlib.pyplot as plt from matplotlib.pyplot import imshow def unmix_purple_img(purp_img, loud=False): """ Accepts a purple image object as a parameter and returns the image with the colors unmixed for easier segmentation """ hematoxylin_matrix = np.ones((3,3)) * (0.644, 0.717, 0.267) # cell profiler matrix for purple images stain_img = purp_img[:, :, [0, 1, 2]] # need only first 3 channels to separate stains separated_img = separate_stains(stain_img, hematoxylin_matrix) # apply stain matrix to image if loud: fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(8,8)) ax[0].set_title("Original") ax[0].imshow(purp_img) ax[1].set_title("Hematoxylin") ax[1].imshow(separated_img[:, :, 0]) return separated_img[:, :, 0] def unmix_pink_imgs(pink_img, loud=False): """ Same as unmix_purple_img but takes a pink image as a parameter """ stain_img = pink_img[:, :, [0, 1, 2]] separated_img = separate_stains(stain_img, rbd_from_rgb) if loud: fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(8,8)) ax[0].set_title("Original") ax[0].imshow(pink_img) ax[1].set_title("RBD") ax[1].imshow(separated_img[:, :, 1]) return separated_img[:, :, 1]
from skimage import io, transform, exposure, data, color from skimage.color import *
util.go
// Copyright 2018 The Kubeflow Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package testutil import ( "strings" "testing" "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/runtime" "k8s.io/client-go/tools/cache" common "github.com/kubeflow/common/pkg/apis/common/v1" pyv1 "github.com/kubeflow/pytorch-operator/pkg/apis/pytorch/v1" ) const ( LabelGroupName = "group-name" JobNameLabel = "job-name" ControllerNameLabel = "controller-name" // Deprecated label. Has to be removed later DeprecatedLabelPyTorchJobName = "pytorch-job-name" ) var ( // KeyFunc is the short name to DeletionHandlingMetaNamespaceKeyFunc. // IndexerInformer uses a delta queue, therefore for deletes we have to use this // key function but it should be just fine for non delete events. KeyFunc = cache.DeletionHandlingMetaNamespaceKeyFunc GroupName = pyv1.GroupName ControllerName = "pytorch-operator" ) func
(jobName string) map[string]string { return map[string]string{ LabelGroupName: GroupName, JobNameLabel: strings.Replace(jobName, "/", "-", -1), DeprecatedLabelPyTorchJobName: strings.Replace(jobName, "/", "-", -1), ControllerNameLabel: ControllerName, } } func GenOwnerReference(job *pyv1.PyTorchJob) *metav1.OwnerReference { boolPtr := func(b bool) *bool { return &b } controllerRef := &metav1.OwnerReference{ APIVersion: pyv1.SchemeGroupVersion.String(), Kind: pyv1.Kind, Name: job.Name, UID: job.UID, BlockOwnerDeletion: boolPtr(true), Controller: boolPtr(true), } return controllerRef } // ConvertPyTorchJobToUnstructured uses function ToUnstructured to convert PyTorchJob to Unstructured. func ConvertPyTorchJobToUnstructured(job *pyv1.PyTorchJob) (*unstructured.Unstructured, error) { object, err := runtime.DefaultUnstructuredConverter.ToUnstructured(job) if err != nil { return nil, err } return &unstructured.Unstructured{ Object: object, }, nil } func GetKey(job *pyv1.PyTorchJob, t *testing.T) string { key, err := KeyFunc(job) if err != nil { t.Errorf("Unexpected error getting key for job %v: %v", job.Name, err) return "" } return key } func CheckCondition(job *pyv1.PyTorchJob, condition common.JobConditionType, reason string) bool { for _, v := range job.Status.Conditions { if v.Type == condition && v.Status == v1.ConditionTrue && v.Reason == reason { return true } } return false }
GenLabels
GridMenu.minfd53.js
/*! * Remark Material (http://getbootstrapadmin.com/remark) * Copyright 2017 amazingsurge * Licensed under the Themeforest Standard Licenses */ !function(global,factory){if("function"==typeof define&&define.amd)define("/Section/GridMenu",["exports","jquery","Component"],factory);else if("undefined"!=typeof exports)factory(exports,require("jquery"),require("Component"));else{var mod={exports:{}};factory(mod.exports,global.jQuery,global.Component),global.SectionGridMenu=mod.exports}}(this,function(exports,_jquery,_Component2){"use strict";Object.defineProperty(exports,"__esModule",{value:!0});var _jquery2=babelHelpers.interopRequireDefault(_jquery),_Component3=babelHelpers.interopRequireDefault(_Component2),$BODY=(0,_jquery2.default)("body"),$HTML=(0,_jquery2.default)("html"),Scrollable=function(){function Scrollable($el){babelHelpers.classCallCheck(this,Scrollable),this.$el=$el,this.api=null,this.init()}return babelHelpers.createClass(Scrollable,[{key:"init",value:function(){this.api=this.$el.asScrollable({namespace:"scrollable",skin:"scrollable-inverse",direction:"vertical",contentSelector:">",containerSelector:">"}).data("asScrollable")}},{key:"update",value:function(){this.api&&this.api.update()}},{key:"enable",value:function(){this.api||this.init(),this.api&&this.api.enable()}},{key:"disable",value:function(){this.api&&this.api.disable()}}]),Scrollable}(),_class=function(_Component){function
(){var _ref;babelHelpers.classCallCheck(this,_class);for(var _len=arguments.length,args=Array(_len),_key=0;_key<_len;_key++)args[_key]=arguments[_key];var _this=babelHelpers.possibleConstructorReturn(this,(_ref=_class.__proto__||Object.getPrototypeOf(_class)).call.apply(_ref,[this].concat(args)));return _this.isOpened=!1,_this.scrollable=new Scrollable(_this.$el),_this}return babelHelpers.inherits(_class,_Component),babelHelpers.createClass(_class,[{key:"open",value:function(){this.animate(function(){this.$el.addClass("active"),(0,_jquery2.default)('[data-toggle="gridmenu"]').addClass("active").attr("aria-expanded",!0),$BODY.addClass("site-gridmenu-active"),$HTML.addClass("disable-scrolling")},function(){this.scrollable.enable()}),this.isOpened=!0}},{key:"close",value:function(){this.animate(function(){this.$el.removeClass("active"),(0,_jquery2.default)('[data-toggle="gridmenu"]').addClass("active").attr("aria-expanded",!0),$BODY.removeClass("site-gridmenu-active"),$HTML.removeClass("disable-scrolling")},function(){this.scrollable.disable()}),this.isOpened=!1}},{key:"toggle",value:function(opened){opened?this.open():this.close()}},{key:"animate",value:function(doing,callback){var _this2=this;doing.call(this),this.$el.trigger("changing.site.gridmenu"),setTimeout(function(){callback.call(_this2),_this2.$el.trigger("changed.site.gridmenu")},500)}}]),_class}(_Component3.default);exports.default=_class});
_class
DeleteTableCommand.ts
import { GlueClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../GlueClient.ts"; import { DeleteTableRequest, DeleteTableResponse } from "../models/models_0.ts"; import { deserializeAws_json1_1DeleteTableCommand, serializeAws_json1_1DeleteTableCommand, } from "../protocols/Aws_json1_1.ts"; import { getSerdePlugin } from "../../middleware-serde/mod.ts"; import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "../../protocol-http/mod.ts"; import { Command as $Command } from "../../smithy-client/mod.ts"; import { FinalizeHandlerArguments, Handler, HandlerExecutionContext, MiddlewareStack, HttpHandlerOptions as __HttpHandlerOptions, MetadataBearer as __MetadataBearer, SerdeContext as __SerdeContext, } from "../../types/mod.ts"; export interface DeleteTableCommandInput extends DeleteTableRequest {} export interface DeleteTableCommandOutput extends DeleteTableResponse, __MetadataBearer {} /** * <p>Removes a table definition from the Data Catalog.</p> * <note> * <p>After completing this operation, you no longer have access to the table versions and * partitions that belong to the deleted table. Glue deletes these "orphaned" resources * asynchronously in a timely manner, at the discretion of the service.</p> * <p>To ensure the immediate deletion of all related resources, before calling * <code>DeleteTable</code>, use <code>DeleteTableVersion</code> or * <code>BatchDeleteTableVersion</code>, and <code>DeletePartition</code> or * <code>BatchDeletePartition</code>, to delete any resources that belong to the * table.</p> * </note> * @example * Use a bare-bones client and the command you need to make an API call. * ```javascript * import { GlueClient, DeleteTableCommand } from "../../client-glue/mod.ts"; * // const { GlueClient, DeleteTableCommand } = require("@aws-sdk/client-glue"); // CommonJS import * const client = new GlueClient(config); * const command = new DeleteTableCommand(input); * const response = await client.send(command); * ``` * * @see {@link DeleteTableCommandInput} for command's `input` shape. * @see {@link DeleteTableCommandOutput} for command's `response` shape. * @see {@link GlueClientResolvedConfig | config} for command's `input` shape. * */ export class
extends $Command< DeleteTableCommandInput, DeleteTableCommandOutput, GlueClientResolvedConfig > { // Start section: command_properties // End section: command_properties constructor(readonly input: DeleteTableCommandInput) { // Start section: command_constructor super(); // End section: command_constructor } /** * @internal */ resolveMiddleware( clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>, configuration: GlueClientResolvedConfig, options?: __HttpHandlerOptions ): Handler<DeleteTableCommandInput, DeleteTableCommandOutput> { this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); const stack = clientStack.concat(this.middlewareStack); const { logger } = configuration; const clientName = "GlueClient"; const commandName = "DeleteTableCommand"; const handlerExecutionContext: HandlerExecutionContext = { logger, clientName, commandName, inputFilterSensitiveLog: DeleteTableRequest.filterSensitiveLog, outputFilterSensitiveLog: DeleteTableResponse.filterSensitiveLog, }; const { requestHandler } = configuration; return stack.resolve( (request: FinalizeHandlerArguments<any>) => requestHandler.handle(request.request as __HttpRequest, options || {}), handlerExecutionContext ); } private serialize(input: DeleteTableCommandInput, context: __SerdeContext): Promise<__HttpRequest> { return serializeAws_json1_1DeleteTableCommand(input, context); } private deserialize(output: __HttpResponse, context: __SerdeContext): Promise<DeleteTableCommandOutput> { return deserializeAws_json1_1DeleteTableCommand(output, context); } // Start section: command_body_extra // End section: command_body_extra }
DeleteTableCommand
interface.go
/******************************************************************************* * * Copyright 2018 SAP SE * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You should have received a copy of the License along with this * program. If not, you may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *******************************************************************************/ package os //Interface describes the set of OS-level operations that can be executed by //the autopilot. The default implementation for production is struct Linux in //this package. // //There is an important distinction between "drive" and "device" in the //autopilot's jargon. A "drive" is the physical thing, a "device" is a device //file. For encrypted drives, there are two devices for each drive: the //original SCSI device file (e.g. /dev/sda) and the device file representing //the contents of the LUKS container (e.g. /dev/mapper/ABCDEFGH). type Interface interface { //CollectDrives is run in a separate goroutine and reports drives as they are //added or removed. (When first started, all existing drives shall be //reported as "added".) It shall not return. The `trigger` channel is used by //the caller to trigger each work cycle of CollectDrives. CollectDrives(devicePathGlobs []string, trigger <-chan struct{}, added chan<- []Drive, removed chan<- []string) //CollectDriveErrors is run in a separate goroutine and reports drive errors //that are observed in the kernel log. It shall not return. CollectDriveErrors(errors chan<- []DriveError) //ClassifyDevice examines the contents of the given device to detect existing //LUKS containers or filesystems. ClassifyDevice(devicePath string) DeviceType //FormatDevice creates an XFS filesystem on this device. Existing containers //or filesystems will be overwritten. FormatDevice(devicePath string) (ok bool) //MountDevice mounts this device at the given location. MountDevice(devicePath, mountPath string, scope MountScope) (ok bool) //UnmountDevice unmounts the device that is mounted at the given location. UnmountDevice(mountPath string, scope MountScope) (ok bool) //RefreshMountPoints examines the system to find any mounts that have changed //since we last looked. RefreshMountPoints() //GetMountPointsIn returns all active mount points below the given path. GetMountPointsIn(mountPathPrefix string, scope MountScope) []MountPoint //GetMountPointsOf returns all active mount points for this device. GetMountPointsOf(devicePath string, scope MountScope) []MountPoint //CreateLUKSContainer creates a LUKS container on the given device, using the //given encryption key. Existing data on the device will be overwritten. CreateLUKSContainer(devicePath, key string) (ok bool) //OpenLUKSContainer opens the LUKS container on the given device. The given //keys are tried in order until one works. OpenLUKSContainer(devicePath, mappingName string, keys []string) (mappedDevicePath string, ok bool) //CloseLUKSContainer closes the LUKS container with the given mapping name. CloseLUKSContainer(mappingName string) (ok bool) //RefreshLUKSMappings examines the system to find any LUKS mappings that have //changed since we last looked. RefreshLUKSMappings() //GetLUKSMappingOf returns the device path of the active LUKS mapping for //this device, or "" if no such mapping exists. GetLUKSMappingOf(devicePath string) (mappedDevicePath string) //ReadSwiftID returns the swift-id in this directory, or an empty string if //the file does not exist. ReadSwiftID(mountPath string) (string, error) //WriteSwiftID writes the given swift-id into this directory. WriteSwiftID(mountPath, swiftID string) error //Chown changes the ownership of the given path. Both owner and group may //contain a name or an ID (as decimal integer literal) or be empty (to leave //that field unchanged). Chown(path, owner, group string) } //Drive contains information about a drive as detected by the OS. type Drive struct { DevicePath string FoundAtPath string //only used in log messages SerialNumber string } //DriveError represents a drive error that was found e.g. in a kernel log. type DriveError struct { DevicePath string Message string } //DeviceType describes the contents of a device, to the granularity required by //this program. type DeviceType int const ( //DeviceTypeUnknown describes a device that is readable, but contains neither //a LUKS container nor a filesystem. DeviceTypeUnknown DeviceType = iota //DeviceTypeUnreadable is returned by ClassifyDevice() when the device is
DeviceTypeUnreadable //DeviceTypeLUKS describes a device that contains a LUKS container. DeviceTypeLUKS //DeviceTypeFilesystem describes a device that contains an admissible //filesystem. DeviceTypeFilesystem ) //MountPoint describes an active mount point that is present on the system. type MountPoint struct { DevicePath string MountPath string Options map[string]bool } //MountScope describes whether a mount happens in the autopilot's mount //namespace or in the host mount namespace. type MountScope string const ( //HostScope is the MountScope for mounts in the host mount namespace. HostScope MountScope = "host" //LocalScope is the MountScope for mounts in the local mount namespace of the autopilot. LocalScope = "local" ) //ForeachMountScope calls the action once for each MountScope, aborting as soon //as one call returns false. func ForeachMountScope(action func(MountScope) (ok bool)) (ok bool) { if !action(HostScope) { return false } return action(LocalScope) } //ForeachMountScopeOrError is like ForeachMountScope, but propagates errors instead of bools. func ForeachMountScopeOrError(action func(MountScope) error) error { err := action(HostScope) if err != nil { return err } return action(LocalScope) }
//unreadable.
InstallPWA.stories.js
import React, { Fragment } from "react"; import Title from "accessories/Title"; import { InstallPWA } from "widgets"; export default { title: "Desing System/TemplatePWA/InstallPWA", }; export const INSTALLPWA = args => { return ( <Fragment> <Title variant="h2">Install PWA</Title> <InstallPWA {...args} /> </Fragment> ); }; INSTALLPWA.args = { text: "Agréganos a tu pantalla de inicio para visitarnos más fácil y rápido",
btnadd: "Agregar", }; INSTALLPWA.argTypes = { texto: { control: "text" }, btncancel: { control: "text" }, btnadd: { control: "text" }, };
btncancel: "No por ahora",
util.rs
use winapi::um::winnt::{ FILE_ATTRIBUTE_NORMAL, FILE_APPEND_DATA, GENERIC_READ, GENERIC_WRITE }; use winapi::um::fileapi::{ OPEN_ALWAYS, OPEN_EXISTING, CREATE_ALWAYS, WriteFile, ReadFile, CreateFileA, }; use winapi::um::handleapi::CloseHandle; #[cfg(feature = "logger")] #[macro_export] macro_rules! log { ($text:expr) => { crate::util::log0($text); }; ($text:expr, $val:expr) => { crate::util::log1($text,$val); }; ($text:expr, $val1:expr, $val2:expr) => { crate::util::log2($text,$val1,$val2); }; ($text:expr, $val1:expr, $val2:expr, $val3:expr) => { crate::util::log3($text,$val1,$val2,$val3); }; } #[cfg(not(feature = "logger"))] #[macro_export] macro_rules! log { ($text:expr) => { }; ($text:expr, $val:expr) => {}; ($text:expr, $val1:expr, $val2:expr) => {}; ($text:expr, $val1:expr, $val2:expr, $val3:expr) => {}; } #[cfg(feature = "logger")] pub unsafe fn log0( message : &str ) { let name = "dbg_out.txt\0"; let mut out = 0; let hFile = CreateFileA( name.as_ptr() as *const i8, FILE_APPEND_DATA, 0, 0 as *mut winapi::um::minwinbase::SECURITY_ATTRIBUTES, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, 0 as *mut winapi::ctypes::c_void ); WriteFile( hFile, message.as_ptr() as *const winapi::ctypes::c_void, message.len() as u32, &mut out, 0 as *mut winapi::um::minwinbase::OVERLAPPED ); CloseHandle( hFile ); } #[cfg(feature = "logger")] pub fn get_c_string_length( buffer: &[u8]) -> usize { let mut buffer_text_len = 0; while buffer_text_len < buffer.len() { if buffer[buffer_text_len] == 0 { break; } buffer_text_len += 1 } return buffer_text_len; } #[cfg(feature = "logger")] pub fn f32_to_text( dest: &mut[u8], value: f32, comma: bool ) -> usize { let int_part = value as u32; let frac_part = ((value - int_part as f32)*10000f32 ) as u32; unsafe{ winapi::um::winuser::wsprintfA( dest.as_mut_ptr() as * mut i8, "%d.%.4d\0".as_ptr() as * const i8, int_part, frac_part); } if comma { let length = get_c_string_length(dest); dest[ length ] = ',' as u8; dest[ length+1 ] = ' ' as u8; return length+2; } return get_c_string_length( &dest ); }
let mut length : usize = 0; length += f32_to_text( &mut buffer, value, false ); buffer[ length ] = '\n' as u8; let mut buffer_text_len = get_c_string_length(&buffer); log0( core::str::from_utf8_unchecked(&buffer[ 0 .. buffer_text_len ])); } #[cfg(feature = "logger")] pub unsafe fn log2( message : &str, value1: f32, value2: f32 ) { let mut buffer : [ u8; 256 ] = [ 0;256 ]; let mut length : usize = 0; length += f32_to_text( &mut buffer, value1, true ); length += f32_to_text( &mut buffer[length..], value2, false ); buffer[ length ] = '\n' as u8; let mut buffer_text_len = get_c_string_length(&buffer); log0( core::str::from_utf8_unchecked(&buffer[ 0 .. buffer_text_len ])); } #[cfg(feature = "logger")] pub unsafe fn log3( message : &str, value1: f32, value2: f32, value3: f32 ) { let mut buffer : [ u8; 256 ] = [ 0;256 ]; let mut length : usize = 0; length += f32_to_text( &mut buffer, value1, true ); length += f32_to_text( &mut buffer[length..], value2, true ); length += f32_to_text( &mut buffer[length..], value3, false ); buffer[ length ] = '\n' as u8; let mut buffer_text_len = get_c_string_length(&buffer); log0( core::str::from_utf8_unchecked(&buffer[ 0 .. buffer_text_len ])); } #[cfg(feature = "logger")] pub unsafe fn read_file( file_name : &str, dst : &mut [u8] ) { let name = "dbg_out.txt\0"; let mut out = 0; log!( "Creating file for reading\n"); let hFile = CreateFileA( file_name.as_ptr() as *const i8, GENERIC_READ, 0, 0 as *mut winapi::um::minwinbase::SECURITY_ATTRIBUTES, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, 0 as *mut winapi::ctypes::c_void ); log!( "Reading...\n"); ReadFile( hFile, dst.as_mut_ptr() as *mut winapi::ctypes::c_void, dst.len() as u32, &mut out, 0 as *mut winapi::um::minwinbase::OVERLAPPED ); log!( "Close handle...\n"); CloseHandle( hFile ); }
#[cfg(feature = "logger")] pub unsafe fn log1( message : &str, value: f32 ) { let mut buffer : [ u8; 256 ] = [ 0;256 ];
DescribeDataAssetsRequest.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdksddp.endpoint import endpoint_data class DescribeDataAssetsRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Sddp', '2019-01-03', 'DescribeDataAssets') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_RiskLevels(self): return self.get_query_params().get('RiskLevels') def set_RiskLevels(self,RiskLevels): self.add_query_param('RiskLevels',RiskLevels)
return self.get_query_params().get('RangeId') def set_RangeId(self,RangeId): self.add_query_param('RangeId',RangeId) def get_PageSize(self): return self.get_query_params().get('PageSize') def set_PageSize(self,PageSize): self.add_query_param('PageSize',PageSize) def get_Lang(self): return self.get_query_params().get('Lang') def set_Lang(self,Lang): self.add_query_param('Lang',Lang) def get_CurrentPage(self): return self.get_query_params().get('CurrentPage') def set_CurrentPage(self,CurrentPage): self.add_query_param('CurrentPage',CurrentPage) def get_Name(self): return self.get_query_params().get('Name') def set_Name(self,Name): self.add_query_param('Name',Name) def get_RuleId(self): return self.get_query_params().get('RuleId') def set_RuleId(self,RuleId): self.add_query_param('RuleId',RuleId)
def get_RangeId(self):
processor_test.go
// Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"). You may not // use this file except in compliance with the License. A copy of the // License is located at // // http://aws.amazon.com/apache2.0/ // // or in the "license" file accompanying this file. This file is distributed // on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, // either express or implied. See the License for the specific language governing // permissions and limitations under the License. // Package processor defines the document processing unit interface package processor import ( "fmt" "testing" "github.com/aws/amazon-ssm-agent/agent/appconfig" "github.com/aws/amazon-ssm-agent/agent/context" "github.com/aws/amazon-ssm-agent/agent/contracts" "github.com/aws/amazon-ssm-agent/agent/framework/processor/executer" executermocks "github.com/aws/amazon-ssm-agent/agent/framework/processor/executer/mock" "github.com/aws/amazon-ssm-agent/agent/task" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) // TestEngineProcessor_Submit tests the basic flow of start command thread operation // this function submits to the job pool func TestEngineProcessor_Submit(t *testing.T) { sendCommandPoolMock := new(task.MockedPool) ctx := context.NewMockDefault() executerMock := executermocks.NewMockExecuter() creator := func(ctx context.T) executer.Executer { return executerMock } sendCommandPoolMock.On("Submit", ctx.Log(), "messageID", mock.Anything).Return(nil) sendCommandPoolMock.On("BufferTokensIssued").Return(0) docMock := new(DocumentMgrMock) processor := EngineProcessor{ executerCreator: creator, sendCommandPool: sendCommandPoolMock, context: ctx, documentMgr: docMock, startWorker: NewWorkerProcessorSpec(ctx, 1, contracts.StartSession, 0), } docState := contracts.DocumentState{} docState.DocumentInformation.MessageID = "messageID" docState.DocumentType = contracts.StartSession docMock.On("PersistDocumentState", mock.Anything, appconfig.DefaultLocationOfPending, docState) errorCode := processor.Submit(docState) assert.Equal(t, errorCode, ErrorCode("")) sendCommandPoolMock.AssertExpectations(t) } func TestEngineProcessor_Cancel(t *testing.T) { cancelCommandPoolMock := new(task.MockedPool) ctx := context.NewMockDefault() docMock := new(DocumentMgrMock) processor := EngineProcessor{ context: ctx, documentMgr: docMock, cancelCommandPool: cancelCommandPoolMock, cancelWorker: NewWorkerProcessorSpec(ctx, 1, contracts.TerminateSession, 0), startWorker: NewWorkerProcessorSpec(ctx, 1, contracts.StartSession, 0), } cancelCommandPoolMock.On("Submit", ctx.Log(), "cancelMessageID", mock.Anything).Return(nil) cancelCommandPoolMock.On("BufferTokensIssued").Return(0) docState := contracts.DocumentState{} expectedVal := "cancelMessageID" docState.DocumentInformation.MessageID = expectedVal docState.DocumentType = contracts.TerminateSession docMock.On("PersistDocumentState", mock.Anything, appconfig.DefaultLocationOfPending, docState) errorCode := processor.Cancel(docState) assert.Equal(t, errorCode, ErrorCode("")) docMock.AssertExpectations(t) } func TestEngineProcessor_Stop(t *testing.T) { sendCommandPoolMock := new(task.MockedPool) cancelCommandPoolMock := new(task.MockedPool) ctx := context.NewMockDefault() resChan := make(chan contracts.DocumentResult) processor := EngineProcessor{ sendCommandPool: sendCommandPoolMock, cancelCommandPool: cancelCommandPoolMock, context: ctx, resChan: resChan, } sendCommandPoolMock.On("ShutdownAndWait", mock.AnythingOfType("time.Duration")).Return(true) cancelCommandPoolMock.On("ShutdownAndWait", mock.AnythingOfType("time.Duration")).Return(true) processor.Stop() sendCommandPoolMock.AssertExpectations(t) cancelCommandPoolMock.AssertExpectations(t) // multiple stop sendCommandPoolMock = new(task.MockedPool) cancelCommandPoolMock = new(task.MockedPool) processor.Stop() sendCommandPoolMock.AssertNotCalled(t, "ShutdownAndWait", mock.AnythingOfType("time.Duration")) cancelCommandPoolMock.AssertNotCalled(t, "ShutdownAndWait", mock.AnythingOfType("time.Duration")) } //TODO add shutdown and reboot test once we encapsulate docmanager func TestProcessCommand(t *testing.T) { ctx := context.NewMockDefault() docState := contracts.DocumentState{} docState.DocumentInformation.MessageID = "messageID" docState.DocumentInformation.InstanceID = "instanceID" docState.DocumentInformation.DocumentID = "documentID" executerMock := executermocks.NewMockExecuter() resChan := make(chan contracts.DocumentResult) statusChan := make(chan contracts.DocumentResult) cancelFlag := task.NewChanneledCancelFlag() executerMock.On("Run", cancelFlag, mock.AnythingOfType("*executer.DocumentFileStore")).Return(statusChan) // call method under test //orchestrationRootDir is set to empty such that it can meet the test expectation. creator := func(ctx context.T) executer.Executer { return executerMock } go func() { //send 3 updates for i := 0; i < 3; i++ { last := "" if i < 2 { last = fmt.Sprintf("plugin%d", i) } res := contracts.DocumentResult{ LastPlugin: last, Status: contracts.ResultStatusSuccess, } statusChan <- res res2 := <-resChan assert.Equal(t, res, res2) } close(statusChan) }() docMock := new(DocumentMgrMock) docMock.On("MoveDocumentState", "documentID", appconfig.DefaultLocationOfPending, appconfig.DefaultLocationOfCurrent) docMock.On("RemoveDocumentState", "documentID", appconfig.DefaultLocationOfCurrent) processCommand(ctx, creator, cancelFlag, resChan, &docState, docMock) executerMock.AssertExpectations(t) docMock.AssertExpectations(t) close(resChan) //assert channel is not closed, each instance of Processor keeps a distinct copy of channel assert.NotNil(t, resChan) } func TestCheckDocSubmissionAllowed(t *testing.T) { sendCommandPoolMock := new(task.MockedPool) ctx := context.NewMockDefault() resChan := make(chan contracts.DocumentResult) processor := EngineProcessor{ sendCommandPool: sendCommandPoolMock, context: ctx, resChan: resChan, startWorker: NewWorkerProcessorSpec(ctx, 1, contracts.StartSession, 1), poolToProcessorErrorCodeMap: make(map[task.PoolErrorCode]ErrorCode), } sendCommandPoolMock.On("AcquireBufferToken", "messageID").Return(task.JobQueueFull) sendCommandPoolMock.On("ReleaseBufferToken", "messageID").Return(task.PoolErrorCode("")) sendCommandPoolMock.On("BufferTokensIssued").Return(1) bufferLimit := 1 docState := contracts.DocumentState{} docState.DocumentInformation.MessageID = "messageID" docState.DocumentInformation.InstanceID = "instanceID" docState.DocumentInformation.DocumentID = "documentID" docState.DocumentType = contracts.StartSession errorCode := processor.checkDocSubmissionAllowed(&docState, sendCommandPoolMock, bufferLimit) assert.Equal(t, ConversionFailed, errorCode, "conversion failed") processor.loadProcessorPoolErrorCodes() sendCommandPoolMock = new(task.MockedPool) sendCommandPoolMock.On("BufferTokensIssued").Return(0) sendCommandPoolMock.On("AcquireBufferToken", mock.Anything).Return(task.JobQueueFull) processor.sendCommandPool = sendCommandPoolMock errorCode = processor.checkDocSubmissionAllowed(&docState, sendCommandPoolMock, bufferLimit) assert.Equal(t, CommandBufferFull, errorCode, "command buffer full") } func TestDocSubmission_Panic(t *testing.T) { sendCommandPoolMock := new(task.MockedPool) ctx := context.NewMockDefault() executerMock := executermocks.NewMockExecuter() creator := func(ctx context.T) executer.Executer { return executerMock } sendCommandPoolMock.On("Submit", ctx.Log(), "messageID", mock.Anything).Return(nil) sendCommandPoolMock.On("BufferTokensIssued").Return(0) sendCommandPoolMock.On("AcquireBufferToken", mock.Anything).Return(task.PoolErrorCode("")) sendCommandPoolMock.On("ReleaseBufferToken", mock.Anything).Return(task.PoolErrorCode("")) processor := EngineProcessor{ executerCreator: creator, sendCommandPool: sendCommandPoolMock, context: ctx, documentMgr: nil, // assigning nil panics Submit() startWorker: NewWorkerProcessorSpec(ctx, 1, contracts.StartSession, 1), } docState := contracts.DocumentState{} docState.DocumentInformation.MessageID = "messageID" docState.DocumentType = contracts.StartSession
assert.Equal(t, errorCode, SubmissionPanic) } func TestDocSubmission_CheckDocSubmissionAllowedError(t *testing.T) { ctx := context.NewMockDefault() executerMock := executermocks.NewMockExecuter() creator := func(ctx context.T) executer.Executer { return executerMock } sendCommandPoolMock := new(task.MockedPool) processor := EngineProcessor{ executerCreator: creator, sendCommandPool: sendCommandPoolMock, context: ctx, documentMgr: nil, // assigning nil panics Submit() startWorker: NewWorkerProcessorSpec(ctx, 1, contracts.StartSession, 1), poolToProcessorErrorCodeMap: make(map[task.PoolErrorCode]ErrorCode), } processor.loadProcessorPoolErrorCodes() docState := contracts.DocumentState{} docState.DocumentInformation.MessageID = "messageID" docState.DocumentInformation.InstanceID = "instanceID" docState.DocumentInformation.DocumentID = "documentID" docState.DocumentType = contracts.StartSession docMock := new(DocumentMgrMock) docMock.On("PersistDocumentState", mock.Anything, appconfig.DefaultLocationOfPending, docState) sendCommandPoolMock.On("AcquireBufferToken", mock.Anything).Return(task.DuplicateCommand) sendCommandPoolMock.On("Submit", ctx.Log(), "messageID", mock.Anything).Return(nil) sendCommandPoolMock.On("BufferTokensIssued").Return(0) sendCommandPoolMock.On("ReleaseBufferToken", mock.Anything).Return(task.PoolErrorCode("")) errorCode := processor.Submit(docState) assert.Equal(t, errorCode, DuplicateCommand) sendCommandPoolMock = new(task.MockedPool) sendCommandPoolMock.On("BufferTokensIssued").Return(0) sendCommandPoolMock.On("AcquireBufferToken", mock.Anything).Return(task.InvalidJobId) processor.sendCommandPool = sendCommandPoolMock errorCode = processor.Submit(docState) assert.Equal(t, errorCode, InvalidDocumentId) sendCommandPoolMock = new(task.MockedPool) sendCommandPoolMock.On("BufferTokensIssued").Return(0) sendCommandPoolMock.On("AcquireBufferToken", mock.Anything).Return(task.JobQueueFull) processor.sendCommandPool = sendCommandPoolMock errorCode = processor.Submit(docState) assert.Equal(t, errorCode, CommandBufferFull) } func TestDocCancellation_Panic(t *testing.T) { cancelCommandPoolMock := new(task.MockedPool) ctx := context.NewMockDefault() executerMock := executermocks.NewMockExecuter() creator := func(ctx context.T) executer.Executer { return executerMock } cancelCommandPoolMock.On("Submit", ctx.Log(), "messageID", mock.Anything).Return(nil) cancelCommandPoolMock.On("BufferTokensIssued").Return(0) cancelCommandPoolMock.On("AcquireBufferToken", mock.Anything).Return(task.PoolErrorCode("")) cancelCommandPoolMock.On("ReleaseBufferToken", mock.Anything).Return(task.PoolErrorCode("")) processor := EngineProcessor{ executerCreator: creator, cancelCommandPool: cancelCommandPoolMock, context: ctx, documentMgr: nil, // assigning nil panics Submit() startWorker: NewWorkerProcessorSpec(ctx, 1, contracts.StartSession, 1), cancelWorker: NewWorkerProcessorSpec(ctx, 1, contracts.TerminateSession, 1), } docState := contracts.DocumentState{} docState.DocumentInformation.MessageID = "messageID" docState.DocumentType = contracts.TerminateSession errorCode := processor.Cancel(docState) assert.Equal(t, errorCode, SubmissionPanic) } //TODO add shutdown and reboot test once we encapsulate docmanager func TestProcessCommand_Shutdown(t *testing.T) { ctx := context.NewMockDefault() docState := contracts.DocumentState{} docState.DocumentInformation.MessageID = "messageID" docState.DocumentInformation.InstanceID = "instanceID" docState.DocumentInformation.DocumentID = "documentID" executerMock := executermocks.NewMockExecuter() resChan := make(chan contracts.DocumentResult) statusChan := make(chan contracts.DocumentResult) cancelFlag := task.NewChanneledCancelFlag() executerMock.On("Run", cancelFlag, mock.AnythingOfType("*executer.DocumentFileStore")).Return(statusChan) // call method under test //orchestrationRootDir is set to empty such that it can meet the test expectation. creator := func(ctx context.T) executer.Executer { return executerMock } go func() { //executer shutdown close(statusChan) }() docMock := new(DocumentMgrMock) docMock.On("MoveDocumentState", "documentID", appconfig.DefaultLocationOfPending, appconfig.DefaultLocationOfCurrent) processCommand(ctx, creator, cancelFlag, resChan, &docState, docMock) executerMock.AssertExpectations(t) docMock.AssertExpectations(t) close(resChan) //assert channel is not closed, each instance of Processor keeps a distinct copy of channel assert.NotNil(t, resChan) //TODO assert document file is not moved } func TestProcessCancelCommand_Success(t *testing.T) { ctx := context.NewMockDefault() sendCommandPoolMock := new(task.MockedPool) docState := contracts.DocumentState{} docState.CancelInformation.CancelMessageID = "messageID" sendCommandPoolMock.On("Cancel", "messageID").Return(true) docMock := new(DocumentMgrMock) docMock.On("MoveDocumentState", "", appconfig.DefaultLocationOfPending, appconfig.DefaultLocationOfCurrent) docMock.On("RemoveDocumentState", "", appconfig.DefaultLocationOfCurrent, mock.Anything) processCancelCommand(ctx, sendCommandPoolMock, &docState, docMock) sendCommandPoolMock.AssertExpectations(t) docMock.AssertExpectations(t) assert.Equal(t, docState.DocumentInformation.DocumentStatus, contracts.ResultStatusSuccess) } type DocumentMgrMock struct { mock.Mock } func (m *DocumentMgrMock) MoveDocumentState(fileName, srcLocationFolder, dstLocationFolder string) { m.Called(fileName, srcLocationFolder, dstLocationFolder) return } func (m *DocumentMgrMock) PersistDocumentState(fileName, locationFolder string, state contracts.DocumentState) { m.Called(fileName, locationFolder, state) return } func (m *DocumentMgrMock) GetDocumentState(fileName, locationFolder string) contracts.DocumentState { args := m.Called(fileName, locationFolder) return args.Get(0).(contracts.DocumentState) } func (m *DocumentMgrMock) RemoveDocumentState(documentID, location string) { m.Called(documentID, location) return }
errorCode := processor.Submit(docState)
viz.py
# FIXME: not required dependency. from graphviz import Digraph def graph2dot(x, **kwargs):
dot = Digraph(body=["rankdir=LR;"], **kwargs) path = x.get_computation_path() for i in path: if i.is_input: dot.node(str(i.id), i.name, color="green") elif i.is_parameter: dot.node(str(i.id), i.name, color="gold") else: dot.node(str(i.id), i.name) for ii in i.inputs: dot.edge(str(ii.id), str(i.id)) return dot
deleteFloatingIp.js
/* eslint-disable no-unused-vars */ import FloatingIp from '../../models/openstack/FloatingIp'
const floatingIp = FloatingIp.findById(floatingIpId) if (!floatingIp) { console.log('Floating IP NOT found') return res.status(404).send({ err: 'Floating IP not found' }) } floatingIp.destroy() console.log('Floating IP destroyed') res.status(200).send({}) } export default deleteFloatingIp
const deleteFloatingIp = (req, res) => { // TODO: account for tenancy const { floatingIpId } = req.params console.log('Attempting to delete floatingIpId: ', floatingIpId)
mod.rs
pub mod inspect; use anyhow::Result; use clap::{App, ArgMatches}; use crate::commands::{CommandRunner, CommandConfig}; // To add a beta subcommand, add your new command to the `beta_subcommands` // and `runner_for_beta_subcommands` functions. // Creates a Vec of CLI configurations for all of the available built-in commands pub fn beta_subcommands() -> Vec<CommandConfig> { vec![ inspect::app(), ] } pub fn runner_for_beta_subcommand(command_name: &str) -> Option<CommandRunner> {
"inspect" => inspect::run, _ => return None }; Some(runner) } // The functions below are used by the top-level `ion` command when `beta` is invoked. pub fn run(_command_name: &str, matches: &ArgMatches<'static>) -> Result<()> { // ^-- At this level of dispatch, this command will always be the text `beta`. // We want to evaluate the name of the subcommand that was invoked --v let (command_name, command_args) = matches.subcommand(); if let Some(runner) = runner_for_beta_subcommand(command_name) { // If a runner is registered for the given command name, command_args is guaranteed to // be defined; we can safely unwrap it. runner(command_name, command_args.unwrap())?; } else { let message = format!( "The requested beta command ('{}') is not supported and clap did not generate an error message.", command_name ); unreachable!(message); } Ok(()) } pub fn app() -> CommandConfig { App::new("beta") .about( "The 'beta' command is a namespace for commands whose interfaces are not yet stable.", ) .subcommands(beta_subcommands()) }
let runner = match command_name {
conftest.py
# pylint: disable=redefined-outer-name from unittest import mock import pytest from fastapi.testclient import TestClient from app.common import cache @pytest.fixture(autouse=True, scope="function") def clear_cache(): # pylint: disable=protected-access cache._redis_cli.flushall() # noqa @pytest.fixture def chat_id(faker): return faker.pystr() @pytest.fixture def username(faker): return f"@{faker.name()}" @pytest.fixture def
(chat_id): return mock.MagicMock(message=mock.MagicMock(chat_id=chat_id, reply_text=mock.MagicMock())) @pytest.fixture def tg_context(username): return mock.MagicMock(args=[username]) @pytest.fixture def tg_bot(): # bot.get_chat(chat_id).send_message() return mock.MagicMock( get_chat=mock.MagicMock(return_value=mock.MagicMock(send_message=mock.MagicMock(return_value=None))) ) @pytest.fixture def requests_post(): return mock.MagicMock() @pytest.fixture def slack_web_client(): # pylint: disable=import-outside-toplevel from app.slack.run_bot import app return TestClient(app)
tg_update
dbw_node.py
#!/usr/bin/env python import rospy from std_msgs.msg import Bool from dbw_mkz_msgs.msg import ThrottleCmd, SteeringCmd, BrakeCmd, SteeringReport from geometry_msgs.msg import TwistStamped import math from twist_controller import Controller ''' You can build this node only after you have built (or partially built) the `waypoint_updater` node. You will subscribe to `/twist_cmd` message which provides the proposed linear and angular velocities. You can subscribe to any other message that you find important or refer to the document for list of messages subscribed to by the reference implementation of this node. One thing to keep in mind while building this node and the `twist_controller` class is the status of `dbw_enabled`. While in the simulator, its enabled all the time, in the real car, that will not be the case. This may cause your PID controller to accumulate error because the car could temporarily be driven by a human instead of your controller. We have provided two launch files with this node. Vehicle specific values (like vehicle_mass, wheel_base) etc should not be altered in these files. We have also provided some reference implementations for PID controller and other utility classes. You are free to use them or build your own. Once you have the proposed throttle, brake, and steer values, publish it on the various publishers that we have created in the `__init__` function. ''' class DBWNode(object): def __init__(self): rospy.init_node('dbw_node') # Vehicle Properties vehicle_mass = rospy.get_param('~vehicle_mass', 1736.35) fuel_capacity = rospy.get_param('~fuel_capacity', 13.5) brake_deadband = rospy.get_param('~brake_deadband', .1) decel_limit = rospy.get_param('~decel_limit', -5) accel_limit = rospy.get_param('~accel_limit', 1.) wheel_radius = rospy.get_param('~wheel_radius', 0.2413) wheel_base = rospy.get_param('~wheel_base', 2.8498) steer_ratio = rospy.get_param('~steer_ratio', 14.8) max_lat_accel = rospy.get_param('~max_lat_accel', 3.) max_steer_angle = rospy.get_param('~max_steer_angle', 8.) # ROS Publishers self.steer_pub = rospy.Publisher('/vehicle/steering_cmd', SteeringCmd, queue_size=1) self.throttle_pub = rospy.Publisher('/vehicle/throttle_cmd', ThrottleCmd, queue_size=1) self.brake_pub = rospy.Publisher('/vehicle/brake_cmd', BrakeCmd, queue_size=1) # ROS Subscribers rospy.Subscriber('/vehicle/dbw_enabled', Bool, self.dbw_enabled_cb) rospy.Subscriber('/twist_cmd', TwistStamped, self.twist_cb) rospy.Subscriber('/current_velocity', TwistStamped, self.velocity_cb) # DBW Controller self.controller = Controller(vehicle_mass=vehicle_mass, fuel_capacity=fuel_capacity, brake_deadband=brake_deadband, decel_limit=decel_limit, accel_limit=accel_limit, wheel_radius=wheel_radius, wheel_base=wheel_base, steer_ratio=steer_ratio, max_lat_accel=max_lat_accel, max_steer_angle=max_steer_angle) # Class Attributes self.current_vel = None self.curr_ang_vel = None self.dbw_enabled = None self.linear_vel = None self.angular_vel = None self.throttle = 0 self.steering = 0 self.brake = 0 self.loop() def loop(self): rate = rospy.Rate(50) # 50Hz while not rospy.is_shutdown(): if not None in (self.current_vel, self.linear_vel, self.angular_vel): self.throttle, self.brake, self.steering = self.controller.control(self.current_vel, self.dbw_enabled, self.linear_vel, self.angular_vel) if self.dbw_enabled: self.publish(self.throttle, self.brake, self.steering) rate.sleep() def publish(self, throttle, brake, steer): tcmd = ThrottleCmd() tcmd.enable = True tcmd.pedal_cmd_type = ThrottleCmd.CMD_PERCENT tcmd.pedal_cmd = throttle self.throttle_pub.publish(tcmd) scmd = SteeringCmd() scmd.enable = True scmd.steering_wheel_angle_cmd = steer self.steer_pub.publish(scmd) bcmd = BrakeCmd() bcmd.enable = True bcmd.pedal_cmd_type = BrakeCmd.CMD_TORQUE bcmd.pedal_cmd = brake self.brake_pub.publish(bcmd) def twist_cb(self, msg): self.linear_vel = msg.twist.linear.x self.angular_vel = msg.twist.angular.z def velocity_cb(self, msg): self.current_vel = msg.twist.linear.x def
(self, msg): self.dbw_enabled = msg if __name__ == '__main__': DBWNode()
dbw_enabled_cb
auth_captcha.rs
use tarkov::auth::LoginError; use tarkov::hwid::generate_hwid; use tarkov::{Error, Tarkov}; #[tokio::main] async fn main() -> Result<(), Error> { std::env::set_var("RUST_LOG", "tarkov=info"); env_logger::init(); let email = "[email protected]"; let password = "password"; let hwid = generate_hwid(); let t = match Tarkov::login(email, password, &hwid).await { Ok(t) => Ok(t), Err(Error::LoginError(e)) => match e { // Captcha required! LoginError::CaptchaRequired =>
_ => Err(e)?, }, Err(e) => Err(e), }?; println!("{}", t.session); Ok(()) }
{ // Solve captcha here and try again let captcha = "03AOLTBLQ952pO-qQYPeLr53N5nK9Co14iXyCp..."; Tarkov::login_with_captcha(email, password, captcha, &hwid).await }
pipelines.py
# Define your item pipelines here # # Don't forget to add your pipeline to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import json import socket import scrapy
import hashlib from scrapy.exceptions import DropItem from scrapy.pipelines.images import ImagesPipeline from scrapy.utils.project import get_project_settings s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) count = 0 class ImagesPipeline(ImagesPipeline): def get_media_requests(self, item, info): for image_url in item['image_urls']: yield scrapy.Request(image_url.strip()) def item_completed(self, results, item, info): image_paths = [x['path'] for ok, x in results if ok] if not image_paths: raise DropItem("Item contains no images") return item class JsonWriterPipeline(object): def open_spider(self, spider): self.file = open('chuansong_items.json', 'w') def close_spider(self, spider): self.file.close() def process_item(self, item, spider): global count count += 1 if spider.settings.get('COUNT_DATA') and count % 100 == 0: s.sendto(u"8c5d9918967dd5901fcbadab29308672, %s" % count, ('www.anycrawl.info', 3500)) line = json.dumps(dict(item)) + "\n" self.file.write(line) return item class CsvWriterPipeline(object): def open_spider(self, spider): self.file = open('chuansong_items.csv', 'w') def close_spider(self, spider): self.file.close() def process_item(self, item, spider): line = "\t".join(dict(item).values()) self.file.write(line.encode('utf-8')) return item class MongoPipeline(object): def open_spider(self, spider): import pymongo host = spider.settings.get('MONGODB_HOST') port = spider.settings.get('MONGODB_PORT') db_name = spider.settings.get('MONGODB_DBNAME') client = pymongo.MongoClient(host=host, port=port) db = client[db_name] self.collection = db[spider.settings.get('MONGODB_DOCNAME')] def close_spider(self, spider): pass def process_item(self, item, spider): self.collection.insert(dict(item)) return item class ElasticSearchPipeline(object): def __init__(self): from pyes import ES self.settings = get_project_settings() if self.settings['ELASTICSEARCH_PORT']: uri = "%s:%d" % (self.settings['ELASTICSEARCH_SERVER'], self.settings['ELASTICSEARCH_PORT']) else: uri = "%s" % (self.settings['ELASTICSEARCH_SERVER']) self.es = ES([uri]) def process_item(self, item, spider): if self.__get_uniq_key() is None: self.es.index(dict(item), self.settings['ELASTICSEARCH_INDEX'], self.settings['ELASTICSEARCH_TYPE'], id=item['id'], op_type='create',) else: self.es.index(dict(item), self.settings['ELASTICSEARCH_INDEX'], self.settings['ELASTICSEARCH_TYPE'], self._get_item_key(item)) return item def _get_item_key(self, item): uniq = self.__get_uniq_key() if isinstance(uniq, list): values = [item[key] for key in uniq] value = ''.join(values) else: value = uniq return hashlib.sha1(value).hexdigest() def __get_uniq_key(self): if not self.settings['ELASTICSEARCH_UNIQ_KEY'] or self.settings['ELASTICSEARCH_UNIQ_KEY'] == "": return None return self.settings['ELASTICSEARCH_UNIQ_KEY']
Submission S3.py
''' Exercise 6: Scrap more information about TripAdvisor reviews url = "https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html" Please write a code that prints out review content, numeric rating, title, date, reviewer's username of ALL the 10 reviews on the FIRST page of a particular restaurant ''' #this is Patrick's comment import csv import requests from bs4 import BeautifulSoup def scrapecontent(url): scrape_response = requests.get(url) print(scrape_response.status_code) if scrape_response.status_code == 200: soup = BeautifulSoup(scrape_response.text) return soup else: print('Error accessing url: ', scrape_response.status_code) return None def main(): scrape_url = 'https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html' ret_soup = scrapecontent(scrape_url) # print(ret_soup.find_all("div", class_="prw_rup prw_reviews_review_resp")) if ret_soup: count = 1 for rev_data in ret_soup.find_all("div", class_="prw_rup prw_reviews_review_resp"): print(rev_data) print('review number: ', count) title = rev_data.find('span', class_='noQuotes') print('title: ', title.text)
review = rev_data.find('p', class_='partial_entry') print('review content: ', review.text) rating = rev_data.find('span', class_='ui_bubble_rating') print('numeric rating: ', int(int(rating['class'][1][7:])/10)) date = rev_data.find('span', class_='ratingDate') print('date: ', date['title']) username = rev_data.find('div', class_='info_text pointer_cursor') print("reviewer's username: ", username.text) count += 1 print('\n') main() ''' Excercise 7: Predict the sentiment (positive, negative, neutral) of review text url = "https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html" for ALL the 10 reviews on the FIRST page of a particular restaurant: # Using the corpus of word sentiment in the word_sentiment.csv file, # calculate the sentiment of review texts.* If the sentiment score is positive, the sentiment is positive; if the sentiment score is negative, the sentiment is negative; if the sentiment score is zero, the sentiment is neutral. ''' SENTIMENT_CSV = "/content/word_sentiment.csv" NEGATIVE_WORDS = ["not", "don't", "doesn't"] def word_sentiment(word): with open(SENTIMENT_CSV, 'rt', encoding='utf-8') as senti_data: sentiment = csv.reader(senti_data) for data_row in sentiment: if data_row[0] == word.lower(): sentiment_val = data_row[1] return sentiment_val return 0 def sentiment(sentence): sentiment = 0 words_list = sentence.split() for word in words_list: previous_index = words_list.index(word) - 1 if words_list[previous_index] in NEGATIVE_WORDS: sentiment = sentiment + -1 * int(word_sentiment(word)) else: sentiment = sentiment + int(word_sentiment(word)) return sentiment scrape_url = "https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html" response = requests.get(scrape_url) print(response.status_code) def review_sentiment(): if response.status_code == 200: soup = BeautifulSoup(response.text, 'html.parser') count = 1 print("These are the sentiments of each review: ") for review in soup.find_all('p', class_='partial_entry'): pure_review = review.text.lower() review_sentiment = sentiment(pure_review) if review_sentiment > 0: print("The sentiment of review ", count, "is positive") elif review_sentiment == 0: print("The sentiment of review ", count, "is neutral") else: print("The sentiment of review ", count, "is negative") count += 1 review_sentiment() ''' Excercise 8: Predict the sentiment (positive, negative, neutral) of review text and compare with the ground true (the actual review rating) url = "https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html" for ALL the 10 reviews on the FIRST page of a particular restaurant: Using the corpus of word sentiment in the word_sentiment.csv file, calculate the sentiment of review texts as the predicted sentiment: If the sentiment score is positive, the sentiment is positive; if the sentiment score is negative, the sentiment is negative; if the sentiment score is zero, the sentiment is neutral. Scrap the review rating of the reviews, and get the ground truth if the rating is greater than 3, the sentiment is positive; if the rating is less than 3, the sentiment is negative; if the rating is equal to 3, the sentiment is neutral. Question: Compute the prediction accuracy (hit rate) for the 10 reviews, i.e., how many times the predictions are correct?? ''' SENTIMENT_CSV = "/content/word_sentiment.csv" NEGATIVE_WORDS = ["not", "don't", "doesn't"] def word_sentiment(word): """This function uses the word_sentiment.csv file to find the sentiment of the word entered""" with open(SENTIMENT_CSV, 'rt', encoding='utf-8') as senti_data: sentiment = csv.reader(senti_data) for data_row in sentiment: if data_row[0] == word.lower(): sentiment_val = data_row[1] return sentiment_val return 0 def sentiment(sentence): sentiment = 0 words_list = sentence.split() for word in words_list: previous_index = words_list.index(word) - 1 if words_list[previous_index] in NEGATIVE_WORDS: sentiment = sentiment + -1 * int(word_sentiment(word)) else: sentiment = sentiment + int(word_sentiment(word)) return sentiment scrape_url = "https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html" response = requests.get(scrape_url) print(response.status_code) def accuracy(): if response.status_code == 200: soup = BeautifulSoup(response.text, 'html.parser') review_list = [] for review in soup.find_all('p', class_='partial_entry'): pure_review = review.text.lower() review_sentiment = sentiment(pure_review) if review_sentiment > 0: review_list.append('positive') elif review_sentiment == 0: review_list.append('neutral') else: review_list.append('negative') if response.status_code == 200: soup = BeautifulSoup(response.text, 'html.parser') rating_list = [] for review in soup.find_all('div', class_='ui_column is-9'): for rating in review.find_all('span', class_='ui_bubble_rating'): actual_rating = int(int(rating['class'][1][7:])/10) if actual_rating > 3: rating_list.append('positive') elif actual_rating == 3: rating_list.append('neutral') else: rating_list.append('negative') matches = len([i for i, j in zip(rating_list, review_list) if i == j]) accuracy = matches / len(rating_list) print('The prediction accuracy is', accuracy) accuracy()
main.rs
use clap::{App, Arg}; use std::{fs, process}; #[derive(Debug)] enum Row { Front, Back } #[derive(Debug)] enum Column { Left, Right } #[derive(Debug)] struct Partition { rows: Vec<Row>, columns: Vec<Column>, } fn find_seat_row(p: &[Row]) -> usize { assert!(p.len() == 7); let res = p.iter().fold((0, 127), |(min, max), r| { let mid = min + ((max - min) / 2); match r { Row::Front => (min, mid), Row::Back => (mid+1, max), } }); assert!(res.0 == res.1); res.0 } fn find_seat_column(p: &[Column]) -> usize { assert!(p.len() == 3); let res = p.iter().fold((0, 7), |(min, max), r| { let mid = min + ((max - min) / 2); match r { Column::Left => (min, mid), Column::Right => (mid+1, max), } }); assert!(res.0 == res.1); res.0 } fn find_seat(p: &Partition) -> (usize, usize) { (find_seat_row(&p.rows), find_seat_column(&p.columns)) } fn seat_id((row, column): (usize, usize)) -> usize { row * 8 + column } fn parse_partition(s: &str) -> Option<Partition> { if s.len() != 10 { return None } let r: Option<Vec<_>> = s[..7].chars().map(|c| { match c { 'F' => Some(Row::Front), 'B' => Some(Row::Back), _ => None } }).collect(); let rows = r?; let c: Option<Vec<_>> = s[7..].chars().map(|c| { match c { 'L' => Some(Column::Left), 'R' => Some(Column::Right), _ => None } }).collect(); let columns = c?; Some(Partition{rows, columns}) } fn part1(partitions: &[Partition]) -> usize { partitions.iter() .map(find_seat) .map(seat_id) .max() .unwrap() } fn part2(partitions: &[Partition]) -> usize { let mut seat_ids: Vec<_> = partitions.iter() .map(find_seat) .map(seat_id) .collect(); seat_ids.sort(); for i in 1..seat_ids.len() { if seat_ids[i] - seat_ids[i-1] == 2 { return seat_ids[i-1] + 1; } } 0 } fn
() { let matches = App::new("AOC2020 Day5") .arg(Arg::with_name("input") .long("input") .required(true) .takes_value(true)) .arg(Arg::with_name("part") .long("part") .required(true) .takes_value(true)) .get_matches(); let input_file = matches.value_of("input").unwrap(); let path = fs::canonicalize(input_file).expect("file does not exist"); let contents = fs::read_to_string(path).expect("reading input"); let _partitions: Result<Vec<_>, _> = contents.split('\n') .filter(|l| !l.is_empty()) .enumerate() .map(|(i, l)| parse_partition(l). ok_or(format!("invalid partition at input row {}", i))) .collect(); if _partitions.is_err() { eprintln!("Error parsing input:\n{}", _partitions.unwrap_err()); process::exit(1); } let partitions = _partitions.unwrap(); let part = matches.value_of("part").unwrap(); if part == "1" { println!("{}", part1(&partitions)); } else { println!("{}", part2(&partitions)); } }
main
serviceManager.ts
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. import { Container, injectable, interfaces } from 'inversify'; import { Abstract, ClassType, IServiceManager, Newable } from './types'; type identifier<T> = string | symbol | Newable<T> | Abstract<T>; @injectable() export class ServiceManager implements IServiceManager { constructor(private container: Container) {} public add<T>( serviceIdentifier: identifier<T>, // tslint:disable-next-line:no-any constructor: new (...args: any[]) => T, name?: string | number | symbol | undefined, bindings?: symbol[] ): void { if (name) { this.container.bind<T>(serviceIdentifier).to(constructor).whenTargetNamed(name); } else { this.container.bind<T>(serviceIdentifier).to(constructor); } if (bindings) { bindings.forEach((binding) => { this.addBinding(serviceIdentifier, binding); }); } } public addFactory<T>( factoryIdentifier: interfaces.ServiceIdentifier<interfaces.Factory<T>>, factoryMethod: interfaces.FactoryCreator<T> ): void { this.container.bind<interfaces.Factory<T>>(factoryIdentifier).toFactory<T>(factoryMethod); } public addBinding<T1, T2>(from: identifier<T1>, to: identifier<T2>): void { this.container.bind(to).toService(from); } public addSingleton<T>( serviceIdentifier: identifier<T>, // tslint:disable-next-line:no-any constructor: new (...args: any[]) => T, name?: string | number | symbol | undefined, bindings?: symbol[] ): void { if (name) { this.container.bind<T>(serviceIdentifier).to(constructor).inSingletonScope().whenTargetNamed(name); } else { this.container.bind<T>(serviceIdentifier).to(constructor).inSingletonScope(); } if (bindings) { bindings.forEach((binding) => { this.addBinding(serviceIdentifier, binding); }); } } public addSingletonInstance<T>( serviceIdentifier: identifier<T>, instance: T, name?: string | number | symbol | undefined ): void { if (name) { this.container.bind<T>(serviceIdentifier).toConstantValue(instance).whenTargetNamed(name); } else { this.container.bind<T>(serviceIdentifier).toConstantValue(instance); } } public get<T>(serviceIdentifier: identifier<T>, name?: string | number | symbol | undefined): T { return name ? this.container.getNamed<T>(serviceIdentifier, name) : this.container.get<T>(serviceIdentifier); } public getAll<T>(serviceIdentifier: identifier<T>, name?: string | number | symbol | undefined): T[] { return name ? this.container.getAllNamed<T>(serviceIdentifier, name) : this.container.getAll<T>(serviceIdentifier); } public rebind<T>( serviceIdentifier: interfaces.ServiceIdentifier<T>, constructor: ClassType<T>, name?: string | number | symbol ): void { if (name) { this.container.rebind<T>(serviceIdentifier).to(constructor).whenTargetNamed(name); } else { this.container.rebind<T>(serviceIdentifier).to(constructor); } } public rebindInstance<T>( serviceIdentifier: interfaces.ServiceIdentifier<T>, instance: T, name?: string | number | symbol
if (name) { this.container.rebind<T>(serviceIdentifier).toConstantValue(instance).whenTargetNamed(name); } else { this.container.rebind<T>(serviceIdentifier).toConstantValue(instance); } } public dispose() { this.container.unbindAll(); this.container.unload(); } }
): void {
ornstein_uhlenbeck.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from .annealed_guassian import AnnealedGaussianProcess
__author__ = "Christian Heider Nielsen" # Based on http://math.stackexchange.com/questions/1287634/implementing-ornstein-uhlenbeck-in-matlab import numpy __all__ = ["OrnsteinUhlenbeckProcess"] class OrnsteinUhlenbeckProcess(AnnealedGaussianProcess): def __init__( self, *, theta: float = 0.15, mean: float = 0.0, sigma: float = 1.0, dt: float = 1e-2, x_0=None, sigma_min: float = None, n_steps_annealing: int = 1000, **kwargs ): super().__init__( mean=mean, sigma=sigma, sigma_min=sigma_min, n_steps_annealing=n_steps_annealing, **kwargs ) self.theta = theta self.mean = mean self.dt = dt self.x_0 = x_0 self.reset() def sample(self, size): x = ( self.x_prev + self.theta * (self.mean - self.x_prev) * self.dt + self.current_sigma * numpy.sqrt(self.dt) * numpy.random.normal(size=size) ) self.x_prev = x self.n_steps += 1 return x def reset(self): super().reset() self.x_prev = self.x_0 if self.x_0 is not None else numpy.zeros_like(self.x_0) if __name__ == "__main__": random_process = OrnsteinUhlenbeckProcess(theta=0.5) for i in range(1000): print(random_process.sample((2, 1)))
localization.js
const fs = require("fs"); let getLocalString = function(key, local="de") { let locales = JSON.parse(fs.readFileSync(`${__dirname}/local/${local}.json`).toString()); if(locales[key]) { let locale = locales[key]; if(Array.isArray(locale)) { locale = locale.join('\n'); } return locale; } else { return key; }
if(format) { Object.keys(format).forEach(key => { result = result.replace(`{k:${key}}`, format[key]); }); } return result; }; module.exports = { formatLocalString }
}; let formatLocalString = function(key, format, local="de") { let result = getLocalString(key, local);
helpers.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import re import warnings from datetime import datetime from functools import reduce from itertools import filterfalse, tee from typing import Any, Callable, Dict, Generator, Iterable, List, Optional, TypeVar from urllib import parse from flask import url_for from jinja2 import Template from airflow.configuration import conf from airflow.exceptions import AirflowException from airflow.utils.module_loading import import_string KEY_REGEX = re.compile(r'^[\w.-]+$') def validate_key(k, max_length=250): """Validates value used as a key.""" if not isinstance(k, str): raise TypeError("The key has to be a string") elif len(k) > max_length: raise AirflowException(f"The key has to be less than {max_length} characters") elif not KEY_REGEX.match(k): raise AirflowException( "The key ({k}) has to be made of alphanumeric characters, dashes, " "dots and underscores exclusively".format(k=k) ) else: return True def alchemy_to_dict(obj: Any) -> Optional[Dict]: """Transforms a SQLAlchemy model instance into a dictionary""" if not obj: return None output = {} for col in obj.__table__.columns: value = getattr(obj, col.name) if isinstance(value, datetime): value = value.isoformat() output[col.name] = value return output def ask_yesno(question): """Helper to get yes / no answer from user.""" yes = {'yes', 'y'} no = {'no', 'n'} done = False print(question) while not done: choice = input().lower() if choice in yes: return True elif choice in no: return False else: print("Please respond by yes or no.") def is_container(obj): """Test if an object is a container (iterable) but not a string""" return hasattr(obj, '__iter__') and not isinstance(obj, str) def as_tuple(obj): """ If obj is a container, returns obj as a tuple. Otherwise, returns a tuple containing obj. """ if is_container(obj): return tuple(obj) else: return tuple([obj]) T = TypeVar('T') S = TypeVar('S') def chunks(items: List[T], chunk_size: int) -> Generator[List[T], None, None]: """Yield successive chunks of a given size from a list of items""" if chunk_size <= 0: raise ValueError('Chunk size must be a positive integer') for i in range(0, len(items), chunk_size): yield items[i : i + chunk_size] def reduce_in_chunks(fn: Callable[[S, List[T]], S], iterable: List[T], initializer: S, chunk_size: int = 0): """ Reduce the given list of items by splitting it into chunks of the given size and passing each chunk through the reducer """ if len(iterable) == 0: return initializer if chunk_size == 0: chunk_size = len(iterable) return reduce(fn, chunks(iterable, chunk_size), initializer) def as_flattened_list(iterable: Iterable[Iterable[T]]) -> List[T]: """ Return an iterable with one level flattened >>> as_flattened_list((('blue', 'red'), ('green', 'yellow', 'pink'))) ['blue', 'red', 'green', 'yellow', 'pink'] """ return [e for i in iterable for e in i] def parse_template_string(template_string): """Parses Jinja template string.""" if "{{" in template_string: # jinja mode return None, Template(template_string) else: return template_string, None def render_log_filename(ti, try_number, filename_template):
def convert_camel_to_snake(camel_str): """Converts CamelCase to snake_case.""" return re.sub('(?!^)([A-Z]+)', r'_\1', camel_str).lower() def merge_dicts(dict1, dict2): """ Merge two dicts recursively, returning new dict (input dict is not mutated). Lists are not concatenated. Items in dict2 overwrite those also found in dict1. """ merged = dict1.copy() for k, v in dict2.items(): if k in merged and isinstance(v, dict): merged[k] = merge_dicts(merged.get(k, {}), v) else: merged[k] = v return merged def partition(pred: Callable, iterable: Iterable): """Use a predicate to partition entries into false entries and true entries""" iter_1, iter_2 = tee(iterable) return filterfalse(pred, iter_1), filter(pred, iter_2) def chain(*args, **kwargs): """This function is deprecated. Please use `airflow.models.baseoperator.chain`.""" warnings.warn( "This function is deprecated. Please use `airflow.models.baseoperator.chain`.", DeprecationWarning, stacklevel=2, ) return import_string('airflow.models.baseoperator.chain')(*args, **kwargs) def cross_downstream(*args, **kwargs): """This function is deprecated. Please use `airflow.models.baseoperator.cross_downstream`.""" warnings.warn( "This function is deprecated. Please use `airflow.models.baseoperator.cross_downstream`.", DeprecationWarning, stacklevel=2, ) return import_string('airflow.models.baseoperator.cross_downstream')(*args, **kwargs) def build_airflow_url_with_query(query: Dict[str, Any]) -> str: """ Build airflow url using base_url and default_view and provided query For example: 'http://0.0.0.0:8000/base/graph?dag_id=my-task&root=&execution_date=2020-10-27T10%3A59%3A25.615587 """ view = conf.get('webserver', 'dag_default_view').lower() url = url_for(f"Airflow.{view}") return f"{url}?{parse.urlencode(query)}"
""" Given task instance, try_number, filename_template, return the rendered log filename :param ti: task instance :param try_number: try_number of the task :param filename_template: filename template, which can be jinja template or python string template """ filename_template, filename_jinja_template = parse_template_string(filename_template) if filename_jinja_template: jinja_context = ti.get_template_context() jinja_context['try_number'] = try_number return filename_jinja_template.render(**jinja_context) return filename_template.format( dag_id=ti.dag_id, task_id=ti.task_id, execution_date=ti.execution_date.isoformat(), try_number=try_number, )
company_settings_altinn_search_parameters.go
// Code generated by go-swagger; DO NOT EDIT. package altinn // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "context" "net/http" "time" "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" "github.com/go-openapi/strfmt" ) // NewCompanySettingsAltinnSearchParams creates a new CompanySettingsAltinnSearchParams object // with the default values initialized. func NewCompanySettingsAltinnSearchParams() *CompanySettingsAltinnSearchParams { var () return &CompanySettingsAltinnSearchParams{ timeout: cr.DefaultTimeout, } } // NewCompanySettingsAltinnSearchParamsWithTimeout creates a new CompanySettingsAltinnSearchParams object // with the default values initialized, and the ability to set a timeout on a request func NewCompanySettingsAltinnSearchParamsWithTimeout(timeout time.Duration) *CompanySettingsAltinnSearchParams { var () return &CompanySettingsAltinnSearchParams{ timeout: timeout, } } // NewCompanySettingsAltinnSearchParamsWithContext creates a new CompanySettingsAltinnSearchParams object // with the default values initialized, and the ability to set a context for a request func NewCompanySettingsAltinnSearchParamsWithContext(ctx context.Context) *CompanySettingsAltinnSearchParams { var () return &CompanySettingsAltinnSearchParams{ Context: ctx, } } // NewCompanySettingsAltinnSearchParamsWithHTTPClient creates a new CompanySettingsAltinnSearchParams object // with the default values initialized, and the ability to set a custom HTTPClient for a request func NewCompanySettingsAltinnSearchParamsWithHTTPClient(client *http.Client) *CompanySettingsAltinnSearchParams { var () return &CompanySettingsAltinnSearchParams{ HTTPClient: client, } } /*CompanySettingsAltinnSearchParams contains all the parameters to send to the API endpoint for the company settings altinn search operation typically these are written to a http.Request */ type CompanySettingsAltinnSearchParams struct { /*Fields Fields filter pattern */ Fields *string timeout time.Duration Context context.Context HTTPClient *http.Client } // WithTimeout adds the timeout to the company settings altinn search params func (o *CompanySettingsAltinnSearchParams) WithTimeout(timeout time.Duration) *CompanySettingsAltinnSearchParams { o.SetTimeout(timeout) return o } // SetTimeout adds the timeout to the company settings altinn search params func (o *CompanySettingsAltinnSearchParams) SetTimeout(timeout time.Duration) { o.timeout = timeout } // WithContext adds the context to the company settings altinn search params func (o *CompanySettingsAltinnSearchParams) WithContext(ctx context.Context) *CompanySettingsAltinnSearchParams { o.SetContext(ctx) return o } // SetContext adds the context to the company settings altinn search params func (o *CompanySettingsAltinnSearchParams) SetContext(ctx context.Context) { o.Context = ctx }
o.SetHTTPClient(client) return o } // SetHTTPClient adds the HTTPClient to the company settings altinn search params func (o *CompanySettingsAltinnSearchParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } // WithFields adds the fields to the company settings altinn search params func (o *CompanySettingsAltinnSearchParams) WithFields(fields *string) *CompanySettingsAltinnSearchParams { o.SetFields(fields) return o } // SetFields adds the fields to the company settings altinn search params func (o *CompanySettingsAltinnSearchParams) SetFields(fields *string) { o.Fields = fields } // WriteToRequest writes these params to a swagger request func (o *CompanySettingsAltinnSearchParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { if err := r.SetTimeout(o.timeout); err != nil { return err } var res []error if o.Fields != nil { // query param fields var qrFields string if o.Fields != nil { qrFields = *o.Fields } qFields := qrFields if qFields != "" { if err := r.SetQueryParam("fields", qFields); err != nil { return err } } } if len(res) > 0 { return errors.CompositeValidationError(res...) } return nil }
// WithHTTPClient adds the HTTPClient to the company settings altinn search params func (o *CompanySettingsAltinnSearchParams) WithHTTPClient(client *http.Client) *CompanySettingsAltinnSearchParams {
lib.rs
use crate::model::application_map::ApplicationMap; use crate::model::identity_map::IdentityMap; use candid::Principal; use std::cell::RefCell; use std::collections::HashSet; use types::VerificationCode; use utils::env::{EmptyEnvironment, Environment}; use utils::event_stream::EventStream; mod lifecycle; mod model; mod queries; mod updates; #[allow(clippy::all)] mod internet_identity; pub const CONFIRMATION_CODE_EXPIRY_MILLIS: u64 = 60 * 60 * 1000; // 1 hour thread_local! { pub static RUNTIME_STATE: RefCell<RuntimeState> = RefCell::default(); } pub struct RuntimeState { pub env: Box<dyn Environment>, pub data: Data, } impl RuntimeState { pub fn new(env: Box<dyn Environment>, data: Data) -> RuntimeState { RuntimeState { env, data } } pub fn is_caller_verification_code_sender(&self) -> bool { self.data .verification_code_sender_principals .contains(&self.env.caller()) } }
env: Box::new(EmptyEnvironment {}), data: Data::default(), } } } #[derive(Default)] pub struct Data { pub verification_code_sender_principals: HashSet<Principal>, pub identities: IdentityMap, pub applications: ApplicationMap, pub verifications_to_send: EventStream<VerificationCode>, } impl Data { pub fn new(verification_code_sender_principals: Vec<Principal>) -> Data { Data { verification_code_sender_principals: verification_code_sender_principals .into_iter() .collect(), identities: IdentityMap::default(), applications: ApplicationMap::default(), verifications_to_send: EventStream::default(), } } } #[cfg(test)] mod tests { use utils::env::test::TestEnv; #[test] fn test_rand_u128() { let env = TestEnv::default(); let value = env.rand_u128(); println!("u128: {}", value); assert!(value > 0); } }
impl Default for RuntimeState { fn default() -> Self { RuntimeState {
persist_test.go
package adal import ( "encoding/json" "io/ioutil" "os" "path" "reflect" "runtime" "strings" "testing" ) const MockTokenJSON string = `{ "access_token": "accessToken", "refresh_token": "refreshToken", "expires_in": "1000", "expires_on": "2000", "not_before": "3000", "resource": "resource", "token_type": "type" }` var TestToken = Token{ AccessToken: "accessToken", RefreshToken: "refreshToken", ExpiresIn: "1000", ExpiresOn: "2000", NotBefore: "3000", Resource: "resource", Type: "type", } func writeTestTokenFile(t *testing.T, suffix string, contents string) *os.File { f, err := ioutil.TempFile(os.TempDir(), suffix) if err != nil { t.Fatalf("azure: unexpected error when creating temp file: %v", err) } defer f.Close() _, err = f.Write([]byte(contents)) if err != nil { t.Fatalf("azure: unexpected error when writing temp test file: %v", err) } return f } func TestLoadToken(t *testing.T) { f := writeTestTokenFile(t, "testloadtoken", MockTokenJSON) defer os.Remove(f.Name()) expectedToken := TestToken actualToken, err := LoadToken(f.Name()) if err != nil { t.Fatalf("azure: unexpected error loading token from file: %v", err) } if *actualToken != expectedToken { t.Fatalf("azure: failed to decode properly expected(%v) actual(%v)", expectedToken, *actualToken) } // test that LoadToken closes the file properly err = SaveToken(f.Name(), 0600, *actualToken) if err != nil { t.Fatalf("azure: could not save token after LoadToken: %v", err) } } func
(t *testing.T) { _, err := LoadToken("/tmp/this_file_should_never_exist_really") expectedSubstring := "failed to open file" if err == nil || !strings.Contains(err.Error(), expectedSubstring) { t.Fatalf("azure: failed to get correct error expected(%s) actual(%s)", expectedSubstring, err.Error()) } } func TestLoadTokenFailsBadJson(t *testing.T) { gibberishJSON := strings.Replace(MockTokenJSON, "expires_on", ";:\"gibberish", -1) f := writeTestTokenFile(t, "testloadtokenfailsbadjson", gibberishJSON) defer os.Remove(f.Name()) _, err := LoadToken(f.Name()) expectedSubstring := "failed to decode contents of file" if err == nil || !strings.Contains(err.Error(), expectedSubstring) { t.Fatalf("azure: failed to get correct error expected(%s) actual(%s)", expectedSubstring, err.Error()) } } func token() *Token { var token Token json.Unmarshal([]byte(MockTokenJSON), &token) return &token } func TestSaveToken(t *testing.T) { f, err := ioutil.TempFile("", "testloadtoken") if err != nil { t.Fatalf("azure: unexpected error when creating temp file: %v", err) } defer os.Remove(f.Name()) f.Close() mode := os.ModePerm & 0642 err = SaveToken(f.Name(), mode, *token()) if err != nil { t.Fatalf("azure: unexpected error saving token to file: %v", err) } fi, err := os.Stat(f.Name()) // open a new stat as held ones are not fresh if err != nil { t.Fatalf("azure: stat failed: %v", err) } if runtime.GOOS != "windows" { // permissions don't work on Windows if perm := fi.Mode().Perm(); perm != mode { t.Fatalf("azure: wrong file perm. got:%s; expected:%s file :%s", perm, mode, f.Name()) } } var actualToken Token var expectedToken Token json.Unmarshal([]byte(MockTokenJSON), expectedToken) contents, err := ioutil.ReadFile(f.Name()) if err != nil { t.Fatal("!!") } json.Unmarshal(contents, actualToken) if !reflect.DeepEqual(actualToken, expectedToken) { t.Fatal("azure: token was not serialized correctly") } } func TestSaveTokenFailsNoPermission(t *testing.T) { pathWhereWeShouldntHavePermission := "/usr/thiswontwork/atall" if runtime.GOOS == "windows" { pathWhereWeShouldntHavePermission = path.Join(os.Getenv("windir"), "system32\\mytokendir\\mytoken") } err := SaveToken(pathWhereWeShouldntHavePermission, 0644, *token()) expectedSubstring := "failed to create directory" if err == nil || !strings.Contains(err.Error(), expectedSubstring) { t.Fatalf("azure: failed to get correct error expected(%s) actual(%v)", expectedSubstring, err) } } func TestSaveTokenFailsCantCreate(t *testing.T) { tokenPath := "/thiswontwork" if runtime.GOOS == "windows" { tokenPath = path.Join(os.Getenv("windir"), "system32") } err := SaveToken(tokenPath, 0644, *token()) expectedSubstring := "failed to create the temp file to write the token" if err == nil || !strings.Contains(err.Error(), expectedSubstring) { t.Fatalf("azure: failed to get correct error expected(%s) actual(%v)", expectedSubstring, err) } }
TestLoadTokenFailsBadPath
zz_generated_client.go
package client import ( "github.com/rancher/norman/clientbase" ) type Client struct { clientbase.APIBaseClient NodePool NodePoolOperations Node NodeOperations NodeDriver NodeDriverOperations NodeTemplate NodeTemplateOperations Project ProjectOperations GlobalRole GlobalRoleOperations GlobalRoleBinding GlobalRoleBindingOperations RoleTemplate RoleTemplateOperations PodSecurityPolicyTemplate PodSecurityPolicyTemplateOperations PodSecurityPolicyTemplateProjectBinding PodSecurityPolicyTemplateProjectBindingOperations ClusterRoleTemplateBinding ClusterRoleTemplateBindingOperations ProjectRoleTemplateBinding ProjectRoleTemplateBindingOperations Cluster ClusterOperations ClusterEvent ClusterEventOperations ClusterRegistrationToken ClusterRegistrationTokenOperations Catalog CatalogOperations Template TemplateOperations TemplateVersion TemplateVersionOperations TemplateContent TemplateContentOperations Group GroupOperations GroupMember GroupMemberOperations Principal PrincipalOperations User UserOperations AuthConfig AuthConfigOperations LdapConfig LdapConfigOperations Token TokenOperations DynamicSchema DynamicSchemaOperations Preference PreferenceOperations ProjectNetworkPolicy ProjectNetworkPolicyOperations ClusterLogging ClusterLoggingOperations ProjectLogging ProjectLoggingOperations ListenConfig ListenConfigOperations Setting SettingOperations Notifier NotifierOperations ClusterAlert ClusterAlertOperations ProjectAlert ProjectAlertOperations ComposeConfig ComposeConfigOperations ResourceQuotaTemplate ResourceQuotaTemplateOperations } func NewClient(opts *clientbase.ClientOpts) (*Client, error) { baseClient, err := clientbase.NewAPIClient(opts) if err != nil { return nil, err } client := &Client{ APIBaseClient: baseClient, } client.NodePool = newNodePoolClient(client) client.Node = newNodeClient(client) client.NodeDriver = newNodeDriverClient(client) client.NodeTemplate = newNodeTemplateClient(client) client.Project = newProjectClient(client) client.GlobalRole = newGlobalRoleClient(client) client.GlobalRoleBinding = newGlobalRoleBindingClient(client) client.RoleTemplate = newRoleTemplateClient(client) client.PodSecurityPolicyTemplate = newPodSecurityPolicyTemplateClient(client) client.PodSecurityPolicyTemplateProjectBinding = newPodSecurityPolicyTemplateProjectBindingClient(client) client.ClusterRoleTemplateBinding = newClusterRoleTemplateBindingClient(client) client.ProjectRoleTemplateBinding = newProjectRoleTemplateBindingClient(client) client.Cluster = newClusterClient(client) client.ClusterEvent = newClusterEventClient(client) client.ClusterRegistrationToken = newClusterRegistrationTokenClient(client) client.Catalog = newCatalogClient(client) client.Template = newTemplateClient(client) client.TemplateVersion = newTemplateVersionClient(client) client.TemplateContent = newTemplateContentClient(client) client.Group = newGroupClient(client) client.GroupMember = newGroupMemberClient(client) client.Principal = newPrincipalClient(client)
client.DynamicSchema = newDynamicSchemaClient(client) client.Preference = newPreferenceClient(client) client.ProjectNetworkPolicy = newProjectNetworkPolicyClient(client) client.ClusterLogging = newClusterLoggingClient(client) client.ProjectLogging = newProjectLoggingClient(client) client.ListenConfig = newListenConfigClient(client) client.Setting = newSettingClient(client) client.Notifier = newNotifierClient(client) client.ClusterAlert = newClusterAlertClient(client) client.ProjectAlert = newProjectAlertClient(client) client.ComposeConfig = newComposeConfigClient(client) client.ResourceQuotaTemplate = newResourceQuotaTemplateClient(client) return client, nil }
client.User = newUserClient(client) client.AuthConfig = newAuthConfigClient(client) client.LdapConfig = newLdapConfigClient(client) client.Token = newTokenClient(client)
store.go
package evidence import ( "fmt" dbm "github.com/tendermint/tm-db" "github.com/tendermint/tendermint/types" ) /* Requirements: - Valid new evidence must be persisted immediately and never forgotten - Uncommitted evidence must be continuously broadcast - Uncommitted evidence has a partial order, the evidence's priority Impl: - First commit atomically in outqueue, pending, lookup. - Once broadcast, remove from outqueue. No need to sync - Once committed, atomically remove from pending and update lookup. Schema for indexing evidence (note you need both height and hash to find a piece of evidence): "evidence-lookup"/<evidence-height>/<evidence-hash> -> Info "evidence-outqueue"/<priority>/<evidence-height>/<evidence-hash> -> Info "evidence-pending"/<evidence-height>/<evidence-hash> -> Info */ type Info struct { Committed bool Priority int64 Evidence types.Evidence } const ( baseKeyLookup = "evidence-lookup" // all evidence baseKeyOutqueue = "evidence-outqueue" // not-yet broadcast baseKeyPending = "evidence-pending" // broadcast but not committed ) func keyLookup(evidence types.Evidence) []byte { return keyLookupFromHeightAndHash(evidence.Height(), evidence.Hash()) } // big endian padded hex func bE(h int64) string { return fmt.Sprintf("%0.16X", h) } func keyLookupFromHeightAndHash(height int64, hash []byte) []byte { if hash == nil { return _key("%s/%s", baseKeyLookup, bE(height)) } return _key("%s/%s/%X", baseKeyLookup, bE(height), hash) } func keyOutqueue(evidence types.Evidence, priority int64) []byte { return _key("%s/%s/%s/%X", baseKeyOutqueue, bE(priority), bE(evidence.Height()), evidence.Hash()) } func keyPending(evidence types.Evidence) []byte { return _key("%s/%s/%X", baseKeyPending, bE(evidence.Height()), evidence.Hash()) } func keyPendingFromHeightAndHash(height int64, hash []byte) []byte { if hash == nil { return _key("%s/%s", baseKeyPending, bE(height)) } return _key("%s/%s/%X", baseKeyPending, bE(height), hash) } func
(format string, o ...interface{}) []byte { return []byte(fmt.Sprintf(format, o...)) } // Store is a store of all the evidence we've seen, including // evidence that has been committed, evidence that has been verified but not broadcast, // and evidence that has been broadcast but not yet committed. type Store struct { db dbm.DB } func NewStore(db dbm.DB) *Store { return &Store{ db: db, } } // PriorityEvidence returns the evidence from the outqueue, sorted by highest priority. func (store *Store) PriorityEvidence() (evidence []types.Evidence) { // reverse the order so highest priority is first l := store.listEvidence(baseKeyOutqueue, -1) for i, j := 0, len(l)-1; i < j; i, j = i+1, j-1 { l[i], l[j] = l[j], l[i] } return l } // PendingEvidence returns up to maxNum known, uncommitted evidence. // If maxNum is -1, all evidence is returned. func (store *Store) PendingEvidence(maxNum int64) (evidence []types.Evidence) { return store.listEvidence(baseKeyPending, maxNum) } // listEvidence lists up to maxNum pieces of evidence for the given prefix key. // It is wrapped by PriorityEvidence and PendingEvidence for convenience. // If maxNum is -1, there's no cap on the size of returned evidence. func (store *Store) listEvidence(prefixKey string, maxNum int64) (evidence []types.Evidence) { var count int64 iter, _ := dbm.IteratePrefix(store.db, []byte(prefixKey)) defer iter.Close() for ; iter.Valid(); iter.Next() { val := iter.Value() if count == maxNum { return evidence } count++ var ei Info err := cdc.UnmarshalBinaryBare(val, &ei) if err != nil { panic(err) } evidence = append(evidence, ei.Evidence) } return evidence } // GetInfo fetches the Info with the given height and hash. // If not found, ei.Evidence is nil. func (store *Store) GetInfo(height int64, hash []byte) Info { key := keyLookupFromHeightAndHash(height, hash) val, _ := store.db.Get(key) if len(val) == 0 { return Info{} } var ei Info err := cdc.UnmarshalBinaryBare(val, &ei) if err != nil { panic(err) } return ei } func (store *Store) DeleteOutqueueEvidence() { iter, _ := dbm.IteratePrefix(store.db, []byte(baseKeyOutqueue)) defer iter.Close() for ; iter.Valid(); iter.Next() { store.db.Delete(iter.Key()) } } func (store *Store) DeletePendingEvidence(height int64) { iter, _ := dbm.IteratePrefix(store.db, keyPendingFromHeightAndHash(height, nil)) defer iter.Close() for ; iter.Valid(); iter.Next() { store.db.Delete(iter.Key()) } } func (store *Store) DeleteLookupEvidence(height int64) { iter, _ := dbm.IteratePrefix(store.db, keyLookupFromHeightAndHash(height, nil)) defer iter.Close() for ; iter.Valid(); iter.Next() { store.db.Delete(iter.Key()) } } func (store *Store) DeleteEvidenceFromHeight(height int64, latestHeight int64) { store.DeleteOutqueueEvidence() for ; height <= latestHeight; height++ { store.DeletePendingEvidence(height) store.DeleteLookupEvidence(height) } } // Has checks if the evidence is already stored func (store *Store) Has(evidence types.Evidence) bool { key := keyLookup(evidence) ok, _ := store.db.Has(key) return ok } // AddNewEvidence adds the given evidence to the database. // It returns false if the evidence is already stored. func (store *Store) AddNewEvidence(evidence types.Evidence, priority int64) (bool, error) { // check if we already have seen it if store.Has(evidence) { return false, nil } ei := Info{ Committed: false, Priority: priority, Evidence: evidence, } eiBytes := cdc.MustMarshalBinaryBare(ei) // add it to the store var err error key := keyOutqueue(evidence, priority) if err = store.db.Set(key, eiBytes); err != nil { return false, err } key = keyPending(evidence) if err = store.db.Set(key, eiBytes); err != nil { return false, err } key = keyLookup(evidence) if err = store.db.SetSync(key, eiBytes); err != nil { return false, err } return true, nil } // MarkEvidenceAsBroadcasted removes evidence from Outqueue. func (store *Store) MarkEvidenceAsBroadcasted(evidence types.Evidence) { ei := store.getInfo(evidence) if ei.Evidence == nil { // nothing to do; we did not store the evidence yet (AddNewEvidence): return } // remove from the outqueue key := keyOutqueue(evidence, ei.Priority) store.db.Delete(key) } // MarkEvidenceAsCommitted removes evidence from pending and outqueue and sets the state to committed. func (store *Store) MarkEvidenceAsCommitted(evidence types.Evidence) { // if its committed, its been broadcast store.MarkEvidenceAsBroadcasted(evidence) pendingKey := keyPending(evidence) store.db.Delete(pendingKey) // committed Info doens't need priority ei := Info{ Committed: true, Evidence: evidence, Priority: 0, } lookupKey := keyLookup(evidence) store.db.SetSync(lookupKey, cdc.MustMarshalBinaryBare(ei)) } //--------------------------------------------------- // utils // getInfo is convenience for calling GetInfo if we have the full evidence. func (store *Store) getInfo(evidence types.Evidence) Info { return store.GetInfo(evidence.Height(), evidence.Hash()) }
_key
mod.rs
use crate::lib::*; /// Builds the chunk pipeline. macro_rules! build_chunk_pipeline { ($handle: ident, $id: expr, $name: ident, $file: expr) => { /// The constant render pipeline for a chunk. pub(crate) const $handle: HandleUntyped = HandleUntyped::weak_from_u64(PipelineDescriptor::TYPE_UUID, $id); /// Builds the chunk render pipeline. fn $name(shaders: &mut Assets<Shader>) -> PipelineDescriptor { PipelineDescriptor { color_target_states: vec![ColorTargetState { format: TextureFormat::default(), blend: Some(BlendState { color: BlendComponent { src_factor: BlendFactor::SrcAlpha, dst_factor: BlendFactor::OneMinusSrcAlpha, operation: BlendOperation::Add, }, alpha: BlendComponent { src_factor: BlendFactor::One, dst_factor: BlendFactor::One, operation: BlendOperation::Add, }, }), write_mask: ColorWrite::ALL, }], depth_stencil: Some(DepthStencilState { format: TextureFormat::Depth32Float, depth_write_enabled: true, depth_compare: CompareFunction::LessEqual, stencil: StencilState { front: StencilFaceState::IGNORE, back: StencilFaceState::IGNORE, read_mask: 0, write_mask: 0, }, bias: DepthBiasState { constant: 0, slope_scale: 0.0, clamp: 0.0, }, }), ..PipelineDescriptor::new(ShaderStages { vertex: shaders.add(Shader::from_glsl(ShaderStage::Vertex, { if cfg!(target_arch = "wasm32") { include_str!(concat!("web/", $file)) } else { include_str!($file) } })), fragment: Some(shaders.add(Shader::from_glsl(ShaderStage::Fragment, { if cfg!(target_arch = "wasm32") { include_str!("web/tilemap.frag") } else { include_str!("tilemap.frag") } }))), }) } } }; } build_chunk_pipeline!( CHUNK_SQUARE_PIPELINE, 2110840099625352487, build_chunk_square_pipeline, "tilemap-square.vert" ); build_chunk_pipeline!( CHUNK_HEX_X_PIPELINE, 7038597873061171051, build_chunk_hex_x, "tilemap-hex-x.vert" ); build_chunk_pipeline!( CHUNK_HEX_Y_PIPELINE, 4304966217182648108, build_chunk_hex_y, "tilemap-hex-y.vert" ); build_chunk_pipeline!( CHUNK_HEXCOLS_EVEN_PIPELINE, 7604280309043018950, build_chunk_hexcols_even, "tilemap-hexcols-even.vert" ); build_chunk_pipeline!( CHUNK_HEXCOLS_ODD_PIPELINE, 3111565682159860869, build_chunk_hexcols_odd, "tilemap-hexcols-odd.vert" ); build_chunk_pipeline!( CHUNK_HEXROWS_EVEN_PIPELINE, 1670470246078408352, build_chunk_hexrows_even, "tilemap-hexrows-even.vert" ); build_chunk_pipeline!( CHUNK_HEXROWS_ODD_PIPELINE, 8160067835497533408, build_chunk_hexrows_odd, "tilemap-hexrows-odd.vert" ); /// Topology of the tilemap grid (square or hex) #[derive(Component, Debug, Clone, Copy, PartialEq, Eq, Hash, Reflect, Serialize, Deserialize)] #[reflect_value(PartialEq, Serialize, Deserialize)] pub enum
{ /// Square grid Square, /// Hex grid with rows offset (hexes with pointy top). HexY, /// Hex grid with columns offset (hexes with flat top). HexX, /// Hex grid with offset on even rows (hexes with pointy top). HexEvenRows, /// Hex grid with offset on odd rows (hexes with pointy top). HexOddRows, /// Hex grid with offset on even columns (hexes with flat top). HexEvenCols, /// Hex grid with offset on odd columns (hexes with flat top). HexOddCols, } impl GridTopology { /// Takes a grid topology and returns a handle. pub(crate) fn into_pipeline_handle(self) -> HandleUntyped { use GridTopology::*; match self { Square => CHUNK_SQUARE_PIPELINE, HexY => CHUNK_HEX_Y_PIPELINE, HexX => CHUNK_HEX_X_PIPELINE, HexEvenRows => CHUNK_HEXROWS_EVEN_PIPELINE, HexOddRows => CHUNK_HEXROWS_ODD_PIPELINE, HexEvenCols => CHUNK_HEXCOLS_EVEN_PIPELINE, HexOddCols => CHUNK_HEXCOLS_ODD_PIPELINE, } } } /// Adds the tilemap graph to the pipeline and shaders. pub(crate) fn add_tilemap_graph( pipelines: &mut Assets<PipelineDescriptor>, shaders: &mut Assets<Shader>, ) { // Might need graph.add_system_node here...? pipelines.set_untracked(CHUNK_SQUARE_PIPELINE, build_chunk_square_pipeline(shaders)); pipelines.set_untracked(CHUNK_HEX_X_PIPELINE, build_chunk_hex_x(shaders)); pipelines.set_untracked(CHUNK_HEX_Y_PIPELINE, build_chunk_hex_y(shaders)); pipelines.set_untracked( CHUNK_HEXCOLS_EVEN_PIPELINE, build_chunk_hexcols_even(shaders), ); pipelines.set_untracked(CHUNK_HEXCOLS_ODD_PIPELINE, build_chunk_hexcols_odd(shaders)); pipelines.set_untracked( CHUNK_HEXROWS_EVEN_PIPELINE, build_chunk_hexrows_even(shaders), ); pipelines.set_untracked(CHUNK_HEXROWS_ODD_PIPELINE, build_chunk_hexrows_odd(shaders)); }
GridTopology
views.py
import re import urllib import time from django.shortcuts import render, redirect, get_object_or_404 from django import http from django.db.utils import DatabaseError from django.db import transaction from django.db.models import Count from django.conf import settings from django.core.urlresolvers import reverse from django.contrib.auth.decorators import login_required from django.contrib import messages from django.views.decorators.http import require_POST from jsonview.decorators import json_view from airmozilla.main.models import Event, Tag, Channel, get_profile_safely from airmozilla.main.views import is_contributor from airmozilla.base.utils import paginator from airmozilla.main.utils import get_event_channels from . import forms from . import utils from .models import LoggedSearch, SavedSearch from .split_search import split_search @transaction.atomic def home(request): context = { 'q': None, 'events_found': None, 'search_error': None, 'tags': None, 'possible_tags': None, 'channels': None, 'possible_channels': None, 'found_channels': [], 'found_channels_count': 0, } if request.GET.get('q'): form = forms.SearchForm(request.GET) else: form = forms.SearchForm() if request.GET.get('q') and form.is_valid(): context['q'] = form.cleaned_data['q'] privacy_filter = {} privacy_exclude = {} qs = Event.objects.scheduled_or_processing() if request.user.is_active: if is_contributor(request.user): privacy_exclude = {'privacy': Event.PRIVACY_COMPANY} else: # privacy_filter = {'privacy': Event.PRIVACY_PUBLIC} privacy_exclude = {'privacy': Event.PRIVACY_COMPANY} qs = qs.approved() extra = {} rest, params = split_search(context['q'], ('tag', 'channel')) if params.get('tag'): tags = Tag.objects.filter(name__iexact=params['tag']) if tags: context['q'] = rest context['tags'] = extra['tags'] = tags else: # is the search term possibly a tag? all_tag_names = Tag.objects.all().values_list('name', flat=True) tags_regex = re.compile( r'\b(%s)\b' % ('|'.join(re.escape(x) for x in all_tag_names),), re.I ) # next we need to turn all of these into a Tag QuerySet # because we can't do `filter(name__in=tags_regex.findall(...))` # because that case sensitive. tag_ids = [] for match in tags_regex.findall(rest): tag_ids.extend( Tag.objects.filter(name__iexact=match) .values_list('id', flat=True) ) possible_tags = Tag.objects.filter( id__in=tag_ids ) for tag in possible_tags: regex = re.compile(re.escape(tag.name), re.I) tag._query_string = regex.sub( '', context['q'], ) tag._query_string += ' tag: %s' % tag.name # reduce all excess whitespace into 1 tag._query_string = re.sub( '\s\s+', ' ', tag._query_string ) tag._query_string = tag._query_string.strip() context['possible_tags'] = possible_tags if params.get('channel'): channels = Channel.objects.filter(name__iexact=params['channel']) if channels: context['q'] = rest context['channels'] = extra['channels'] = channels else: # is the search term possibly a channel? all_channel_names = ( Channel.objects.all().values_list('name', flat=True) ) channels_regex = re.compile( r'\b(%s)\b' % ('|'.join(re.escape(x) for x in all_channel_names),), re.I ) channel_ids = [] for match in channels_regex.findall(rest): channel_ids.extend( Channel.objects .filter(name__iexact=match).values_list('id', flat=True) ) possible_channels = Channel.objects.filter( id__in=channel_ids ) for channel in possible_channels: regex = re.compile(re.escape(channel.name), re.I) channel._query_string = regex.sub( '', context['q'], ) channel._query_string += ' channel: %s' % channel.name # reduce all excess whitespace into 1 channel._query_string = re.sub( '\s\s+', ' ', channel._query_string ) channel._query_string = channel._query_string.strip() context['possible_channels'] = possible_channels events = _search( qs, context['q'], privacy_filter=privacy_filter, privacy_exclude=privacy_exclude, sort=request.GET.get('sort'), **extra ) if not events.count() and utils.possible_to_or_query(context['q']): events = _search( qs, context['q'], privacy_filter=privacy_filter, privacy_exclude=privacy_exclude, sort=request.GET.get('sort'), fuzzy=True ) found_channels = _find_channels(context['q']) context['found_channels'] = found_channels # it's a list context['found_channels_count'] = len(found_channels) elif request.GET.get('ss'): savedsearch = get_object_or_404( SavedSearch, id=request.GET.get('ss') ) context['savedsearch'] = savedsearch events = savedsearch.get_events() # But if you're just browsing we want to make sure you don't # see anything you're not supposed to see. if request.user.is_active: if is_contributor(request.user): events = events.exclude(privacy=Event.PRIVACY_COMPANY) else: events = events.filter(privacy=Event.PRIVACY_PUBLIC) # It's not obvious how to sort these. They all match the saved # search. # Let's keep it simple and sort by start time for now events = events.order_by('-start_time') else: events = None if events is not None: try: page = int(request.GET.get('page', 1)) if page < 1: raise ValueError except ValueError: return http.HttpResponseBadRequest('Invalid page') # we use the paginator() function to get the Paginator # instance so we can avoid calling `events.count()` for the # header of the page where it says "XX events found" try: with transaction.atomic(): pager, events_paged = paginator(events, page, 10) _database_error_happened = False except DatabaseError: _database_error_happened = True # don't feed the trolls, just return nothing found pager, events_paged = paginator(Event.objects.none(), 1, 10) next_page_url = prev_page_url = None def url_maker(page): querystring = {'page': page} if context.get('savedsearch'): querystring['ss'] = context['savedsearch'].id else: querystring['q'] = context['q'].encode('utf-8') querystring = urllib.urlencode(querystring) return '%s?%s' % (reverse('search:home'), querystring) if events_paged.has_next(): next_page_url = url_maker(events_paged.next_page_number()) if events_paged.has_previous(): prev_page_url = url_maker(events_paged.previous_page_number()) context['events_paged'] = events_paged context['next_page_url'] = next_page_url context['prev_page_url'] = prev_page_url context['events_found'] = pager.count context['channels'] = get_event_channels(events_paged) log_searches = settings.LOG_SEARCHES and '_nolog' not in request.GET if ( log_searches and not _database_error_happened and request.GET.get('q', '').strip() ): logged_search = LoggedSearch.objects.create( term=request.GET['q'][:200], results=events.count(), page=page, user=request.user.is_authenticated() and request.user or None ) request.session['logged_search'] = ( logged_search.pk, time.time() ) elif request.GET.get('q'): context['search_error'] = form.errors['q'] else: context['events'] = [] context['form'] = form return render(request, 'search/home.html', context) def _find_channels(q): search_escaped = utils.make_or_query(q) sql = """ to_tsvector('english', name) @@ plainto_tsquery('english', %s) OR slug ILIKE %s """ channels_qs = Channel.objects.all().extra( where=[sql], params=[ search_escaped, search_escaped, ], select={ 'name_highlit': ( "ts_headline('english', name, " "plainto_tsquery('english', %s))" ), 'rank_name': ( "ts_rank_cd(to_tsvector('english', name), " "plainto_tsquery('english', %s))" ), }, select_params=[ search_escaped, search_escaped, ] ) # make a dict of parental counts subchannel_counts = {} qs = ( Channel.objects .filter(parent__isnull=False) .values('parent_id') .order_by() # necessary because the model has a default ordering .annotate(Count('parent')) ) for each in qs: subchannel_counts[each['parent_id']] = each['parent__count'] # make a dict of events counts by channel event_counts = {} qs = ( Event.channels.through.objects.filter(channel__in=channels_qs) .values('channel_id') .annotate(Count('channel')) ) for each in qs: event_counts[each['channel_id']] = each['channel__count'] channels = [] for channel in channels_qs[:5]: channel._event_count = event_counts.get(channel.id, 0) channel._subchannel_count = subchannel_counts.get(channel.id, 0) channels.append(channel) return channels def _search(qs, q, **options): # we only want to find upcoming or archived events # some optional filtering if 'tags' in options: qs = qs.filter(tags__in=options['tags']) if 'channels' in options: qs = qs.filter(channels__in=options['channels']) if options.get('privacy_filter'): qs = qs.filter(**options['privacy_filter']) elif options.get('privacy_exclude'): qs = qs.exclude(**options['privacy_exclude']) if q and options.get('fuzzy'): sql = """ ( to_tsvector('english', title) @@ to_tsquery('english', %s) OR to_tsvector('english', description || ' ' || short_description) @@ to_tsquery('english', %s) OR to_tsvector('english', transcript) @@ to_tsquery('english', %s) ) """ search_escaped = utils.make_or_query(q) elif q: sql = """ ( to_tsvector('english', title) @@ plainto_tsquery('english', %s) OR to_tsvector('english', description || ' ' || short_description) @@ plainto_tsquery('english', %s) OR to_tsvector('english', transcript) @@ plainto_tsquery('english', %s) ) """ search_escaped = q if q: qs = qs.extra( where=[sql], params=[search_escaped, search_escaped, search_escaped], select={ 'title_highlit': ( "ts_headline('english', title, " "plainto_tsquery('english', %s))" ), 'desc_highlit': ( "ts_headline('english', short_description, " "plainto_tsquery('english', %s))" ), 'transcript_highlit': ( "ts_headline('english', transcript, " "plainto_tsquery('english', %s))" ), 'rank_title': ( "ts_rank_cd(to_tsvector('english', title), " "plainto_tsquery('english', %s))" ), 'rank_desc': ( "ts_rank_cd(to_tsvector('english', description " "|| ' ' || short_description), " "plainto_tsquery('english', %s))" ), 'rank_transcript': ( "ts_rank_cd(to_tsvector('english', transcript), " "plainto_tsquery('english', %s))" ), }, select_params=[ search_escaped, search_escaped, search_escaped, search_escaped, search_escaped, search_escaped ], ) qs = qs.order_by('-rank_title', '-start_time', '-rank_desc') else: qs = qs.order_by('-start_time') return qs @require_POST @login_required @transaction.atomic() def
(request): q = request.POST.get('q', '').strip() if not q: return http.HttpResponseBadRequest('no q') form = forms.SearchForm(request.POST) if not form.is_valid(): return http.HttpResponseBadRequest(form.errors) title = form.cleaned_data['q'] rest, params = split_search(title, ('tag', 'channel')) tags = None channels = None if params.get('tag'): tags = Tag.objects.filter(name__iexact=params['tag']) if tags: title = rest if params.get('channel'): channels = Channel.objects.filter( name__iexact=params['channel'] ) if channels: title = rest filters = {} if q: filters['title'] = { 'include': title } if tags: filters['tags'] = { 'include': [tag.id for tag in tags], } if channels: filters['channels'] = { 'include': [channel.id for channel in channels], } for other in SavedSearch.objects.filter(user=request.user): if other.filters == filters: return redirect('search:savedsearch', id=other.id) savedsearch = SavedSearch.objects.create( user=request.user, filters=filters, ) messages.success( request, 'Search saved' ) return redirect('search:savedsearch', id=savedsearch.id) @login_required @transaction.atomic() def savedsearch(request, id=None): savedsearch = get_object_or_404(SavedSearch, id=id) if request.method == 'POST': forked = False if savedsearch.user != request.user: # fork the saved search forked = True savedsearch = SavedSearch.objects.create( user=request.user, name=savedsearch.name, filters=savedsearch.filters, ) form = forms.SavedSearchForm(request.POST) if form.is_valid(): data = form.export_filters() savedsearch.name = form.cleaned_data['name'] savedsearch.filters = data savedsearch.save() if forked: messages.success( request, 'Saved Search forked and saved' ) else: messages.success( request, 'Saved Search saved' ) return redirect('search:savedsearch', id=savedsearch.id) elif request.GET.get('sample'): events = savedsearch.get_events() return http.JsonResponse({'events': events.count()}) else: data = forms.SavedSearchForm.convert_filters( savedsearch.filters, pks=True ) data['name'] = savedsearch.name form = forms.SavedSearchForm(data) context = { 'savedsearch': savedsearch, 'form': form, 'use_findable': True, } return render(request, 'search/savesearch.html', context) @login_required @transaction.atomic() def new_savedsearch(request): if request.method == 'POST': form = forms.SavedSearchForm(request.POST) if form.is_valid(): data = form.export_filters() SavedSearch.objects.create( user=request.user, filters=data, name=form.cleaned_data['name'], ) messages.success( request, 'Saved Search saved' ) return redirect('search:savedsearches') else: form = forms.SavedSearchForm() context = { 'form': form, 'use_findable': False, } return render(request, 'search/savesearch.html', context) @login_required def savedsearches(request): context = {} return render(request, 'search/savedsearches.html', context) @login_required @json_view def savedsearches_data(request): context = {} qs = SavedSearch.objects.filter( user=request.user ).order_by('-created') searches = [] for savedsearch in qs: item = { 'id': savedsearch.id, 'name': savedsearch.name, 'summary': savedsearch.summary, 'modified': savedsearch.modified.isoformat(), } searches.append(item) # We need a general Feed URL that is tailored to this user from airmozilla.main.context_processors import base feed = base(request)['get_feed_data']() if request.user.is_active: profile = get_profile_safely(request.user) if profile and profile.contributor: calendar_privacy = 'contributors' else: calendar_privacy = 'company' else: calendar_privacy = 'public' context['savedsearches'] = searches context['urls'] = { 'search:savedsearch': reverse('search:savedsearch', args=(0,)), 'search:home': reverse('search:home'), 'feed': feed['url'], 'ical': reverse('main:calendar_ical', args=(calendar_privacy,)), } return context @login_required @json_view def delete_savedsearch(request, id): savedsearch = get_object_or_404(SavedSearch, id=id) if savedsearch.user != request.user: return http.HttpResponseForbidden('Not yours to delete') savedsearch.delete() return {'ok': True}
savesearch
mod.rs
//! Raw Pipeline State Objects //! //! This module contains items used to create and manage Pipelines. use crate::{device, pass, Backend}; use std::{fmt, io, slice}; mod compute; mod descriptor; mod graphics; mod input_assembler; mod output_merger; mod specialization; pub use self::{ compute::*, descriptor::*, graphics::*, input_assembler::*, output_merger::*, specialization::*, }; /// Error types happening upon PSO creation on the device side. #[derive(Clone, Debug, PartialEq)] pub enum CreationError { /// Unknown other error. Other, /// Invalid subpass (not part of renderpass). InvalidSubpass(pass::SubpassId), /// Shader compilation error. Shader(device::ShaderError), /// Out of either host or device memory. OutOfMemory(device::OutOfMemory), } impl From<device::OutOfMemory> for CreationError { fn from(err: device::OutOfMemory) -> Self { CreationError::OutOfMemory(err) } } impl std::fmt::Display for CreationError { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { CreationError::OutOfMemory(err) => write!(fmt, "Failed to create pipeline: {}", err), CreationError::Other => write!(fmt, "Failed to create pipeline: Unsupported usage: Implementation specific error occurred"), CreationError::InvalidSubpass(subpass) => write!(fmt, "Failed to create pipeline: Invalid subpass: {}", subpass), CreationError::Shader(err) => write!(fmt, "Failed to create pipeline: {}", err), } } } impl std::error::Error for CreationError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { CreationError::OutOfMemory(err) => Some(err), CreationError::Shader(err) => Some(err), CreationError::InvalidSubpass(_) => None, CreationError::Other => None, }
bitflags!( /// Stages of the logical pipeline. /// /// The pipeline is structured by the ordering of the flags. /// Some stages are queue type dependent. #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct PipelineStage: u32 { /// Beginning of the command queue. const TOP_OF_PIPE = 0x1; /// Indirect data consumption. const DRAW_INDIRECT = 0x2; /// Vertex data consumption. const VERTEX_INPUT = 0x4; /// Vertex shader execution. const VERTEX_SHADER = 0x8; /// Hull shader execution. const HULL_SHADER = 0x10; /// Domain shader execution. const DOMAIN_SHADER = 0x20; /// Geometry shader execution. const GEOMETRY_SHADER = 0x40; /// Fragment shader execution. const FRAGMENT_SHADER = 0x80; /// Stage of early depth and stencil test. const EARLY_FRAGMENT_TESTS = 0x100; /// Stage of late depth and stencil test. const LATE_FRAGMENT_TESTS = 0x200; /// Stage of final color value calculation. const COLOR_ATTACHMENT_OUTPUT = 0x400; /// Compute shader execution, const COMPUTE_SHADER = 0x800; /// Copy/Transfer command execution. const TRANSFER = 0x1000; /// End of the command queue. const BOTTOM_OF_PIPE = 0x2000; /// Read/Write access from host. /// (Not a real pipeline stage) const HOST = 0x4000; } ); bitflags!( /// Combination of different shader pipeline stages. #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct ShaderStageFlags: u32 { /// Vertex shader stage. const VERTEX = 0x1; /// Hull (tessellation) shader stage. const HULL = 0x2; /// Domain (tessellation) shader stage. const DOMAIN = 0x4; /// Geometry shader stage. const GEOMETRY = 0x8; /// Fragment shader stage. const FRAGMENT = 0x10; /// Compute shader stage. const COMPUTE = 0x20; /// All graphics pipeline shader stages. const GRAPHICS = Self::VERTEX.bits | Self::HULL.bits | Self::DOMAIN.bits | Self::GEOMETRY.bits | Self::FRAGMENT.bits; /// All shader stages (matches Vulkan). const ALL = 0x7FFFFFFF; } ); // Note: this type is only needed for backends, not used anywhere within gfx_hal. /// Which program stage this shader represents. #[allow(missing_docs)] #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[repr(u8)] pub enum Stage { Vertex, Hull, Domain, Geometry, Fragment, Compute, } impl From<Stage> for ShaderStageFlags { fn from(stage: Stage) -> Self { match stage { Stage::Vertex => ShaderStageFlags::VERTEX, Stage::Hull => ShaderStageFlags::HULL, Stage::Domain => ShaderStageFlags::DOMAIN, Stage::Geometry => ShaderStageFlags::GEOMETRY, Stage::Fragment => ShaderStageFlags::FRAGMENT, Stage::Compute => ShaderStageFlags::COMPUTE, } } } impl fmt::Display for Stage { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(match self { Stage::Vertex => "vertex", Stage::Hull => "hull", Stage::Domain => "domain", Stage::Geometry => "geometry", Stage::Fragment => "fragment", Stage::Compute => "compute", }) } } /// Shader entry point. #[derive(Debug)] pub struct EntryPoint<'a, B: Backend> { /// Entry point name. pub entry: &'a str, /// Shader module reference. pub module: &'a B::ShaderModule, /// Specialization. pub specialization: Specialization<'a>, } impl<'a, B: Backend> Clone for EntryPoint<'a, B> { fn clone(&self) -> Self { EntryPoint { entry: self.entry, module: self.module, specialization: self.specialization.clone(), } } } bitflags!( /// Pipeline creation flags. #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct PipelineCreationFlags: u32 { /// Disable pipeline optimizations. /// /// May speedup pipeline creation. const DISABLE_OPTIMIZATION = 0x1; /// Allow derivatives (children) of the pipeline. /// /// Must be set when pipelines set the pipeline as base. const ALLOW_DERIVATIVES = 0x2; } ); /// A reference to a parent pipeline. The assumption is that /// a parent and derivative/child pipeline have most settings /// in common, and one may be switched for another more quickly /// than entirely unrelated pipelines would be. #[derive(Debug)] pub enum BasePipeline<'a, P: 'a> { /// Referencing an existing pipeline as parent. Pipeline(&'a P), /// A pipeline in the same create pipelines call. /// /// The index of the parent must be lower than the index of the child. Index(usize), /// No parent pipeline exists. None, } /// Pipeline state which may be static or dynamic. #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum State<T> { /// Static state that cannot be altered. Static(T), /// Dynamic state set through a command buffer. Dynamic, } impl<T> State<T> { /// Returns the static value or a default. pub fn static_or(self, default: T) -> T { match self { State::Static(v) => v, State::Dynamic => default, } } /// Whether the state is static. pub fn is_static(self) -> bool { match self { State::Static(_) => true, State::Dynamic => false, } } /// Whether the state is dynamic. pub fn is_dynamic(self) -> bool { !self.is_static() } } /// Safely read SPIR-V /// /// Converts to native endianness and returns correctly aligned storage without unnecessary /// copying. Returns an `InvalidData` error if the input is trivially not SPIR-V. /// /// This function can also be used to convert an already in-memory `&[u8]` to a valid `Vec<u32>`, /// but prefer working with `&[u32]` from the start whenever possible. /// /// # Examples /// ```no_run /// let mut file = std::fs::File::open("/path/to/shader.spv").unwrap(); /// let words = gfx_hal::pso::read_spirv(&mut file).unwrap(); /// ``` /// ``` /// const SPIRV: &[u8] = &[ /// 0x03, 0x02, 0x23, 0x07, // ... /// ]; /// let words = gfx_hal::pso::read_spirv(std::io::Cursor::new(&SPIRV[..])).unwrap(); /// ``` pub fn read_spirv<R: io::Read + io::Seek>(mut x: R) -> io::Result<Vec<u32>> { let size = x.seek(io::SeekFrom::End(0))?; if size % 4 != 0 { return Err(io::Error::new( io::ErrorKind::InvalidData, "input length not divisible by 4", )); } if size > usize::max_value() as u64 { return Err(io::Error::new(io::ErrorKind::InvalidData, "input too long")); } let words = (size / 4) as usize; let mut result = Vec::<u32>::with_capacity(words); x.seek(io::SeekFrom::Start(0))?; unsafe { // Writing all bytes through a pointer with less strict alignment when our type has no // invalid bitpatterns is safe. x.read_exact(slice::from_raw_parts_mut( result.as_mut_ptr() as *mut u8, words * 4, ))?; result.set_len(words); } const MAGIC_NUMBER: u32 = 0x07230203; if result.len() > 0 && result[0] == MAGIC_NUMBER.swap_bytes() { for word in &mut result { *word = word.swap_bytes(); } } if result.len() == 0 || result[0] != MAGIC_NUMBER { return Err(io::Error::new( io::ErrorKind::InvalidData, "input missing SPIR-V magic number", )); } Ok(result) }
} }