file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
KeyValCfg.py | # coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
class KeyValCfg(object):
def __init__(self, key, val, atCfg, id=None, matchOp=None):
| """
:param id: (Optional) 序号id,更新时不能为空
:param matchOp: (Optional) 0-5 完全匹配0 前缀匹配1 包含2 正则3 大于4 后缀5
:param key: cookie key
:param val: val
:param atCfg: action配置
"""
self.id = id
self.matchOp = matchOp
self.key = key
self.val = val
self.atCfg = atCfg
|
|
rules_test.go | // Copyright 2016 The prometheus-operator Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package prometheus
import (
"strings"
"testing"
monitoringv1 "github.com/coreos/prometheus-operator/pkg/client/monitoring/v1"
"k8s.io/api/core/v1"
)
func TestMakeRulesConfigMaps(t *testing.T) {
t.Run("ShouldReturnAtLeastOneConfigMap", shouldReturnAtLeastOneConfigMap)
t.Run("ShouldErrorOnTooLargeRuleFile", shouldErrorOnTooLargeRuleFile)
t.Run("ShouldSplitUpLargeSmallIntoTwo", shouldSplitUpLargeSmallIntoTwo)
}
// makeRulesConfigMaps should return at least one ConfigMap even if it is empty
// when there are no rules. Otherwise adding a rule to a Prometheus without rules
// would change the statefulset definition and thereby force Prometheus to
// restart.
func shouldReturnAtLeastOneConfigMap(t *testing.T) {
p := &monitoringv1.Prometheus{}
ruleFiles := map[string]string{}
configMaps, err := makeRulesConfigMaps(p, ruleFiles)
if err != nil {
t.Fatalf("expected no error but got: %v", err.Error())
}
if len(configMaps) != 1 {
t.Fatalf("expected one ConfigMaps but got %v", len(configMaps))
}
}
func shouldErrorOnTooLargeRuleFile(t *testing.T) |
func shouldSplitUpLargeSmallIntoTwo(t *testing.T) {
p := &monitoringv1.Prometheus{}
ruleFiles := map[string]string{}
ruleFiles["my-rule-file-1"] = strings.Repeat("a", maxConfigMapDataSize)
ruleFiles["my-rule-file-2"] = "a"
configMaps, err := makeRulesConfigMaps(p, ruleFiles)
if err != nil {
t.Fatalf("expected no error but got: %v", err)
}
if len(configMaps) != 2 {
t.Fatalf("expected rule files to be split up into two ConfigMaps, but got '%v' instead", len(configMaps))
}
if configMaps[0].Data["my-rule-file-1"] != ruleFiles["my-rule-file-1"] &&
configMaps[1].Data["my-rule-file-2"] != ruleFiles["my-rule-file-2"] {
t.Fatal("expected ConfigMap data to match rule file content")
}
}
| {
expectedError := "rule file 'my-rule-file' is too large for a single Kubernetes ConfigMap"
p := &monitoringv1.Prometheus{}
ruleFiles := map[string]string{}
ruleFiles["my-rule-file"] = strings.Repeat("a", v1.MaxSecretSize+1)
_, err := makeRulesConfigMaps(p, ruleFiles)
if err == nil || err.Error() != expectedError {
t.Fatalf("expected makeRulesConfigMaps to return error '%v' but got '%v'", expectedError, err)
}
} |
animation_transition_factory.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {AnimationOptions, ɵStyleData} from '@angular/animations';
import {AnimationDriver} from '../render/animation_driver';
import {getOrSetAsInMap} from '../render/shared';
import {copyObj, interpolateParams, iteratorToArray} from '../util';
import {StyleAst, TransitionAst} from './animation_ast';
import {buildAnimationTimelines} from './animation_timeline_builder';
import {TransitionMatcherFn} from './animation_transition_expr';
import {AnimationTransitionInstruction, createTransitionInstruction} from './animation_transition_instruction';
import {ElementInstructionMap} from './element_instruction_map';
import {AnimationStyleNormalizer} from './style_normalization/animation_style_normalizer';
const EMPTY_OBJECT = {};
export class AnimationTransitionFactory {
constructor(
private _triggerName: string, public ast: TransitionAst,
private _stateStyles: {[stateName: string]: AnimationStateStyles}) {}
match(currentState: any, nextState: any, element: any, params: {[key: string]: any}): boolean {
return oneOrMoreTransitionsMatch(this.ast.matchers, currentState, nextState, element, params);
}
buildStyles(stateName: string, params: {[key: string]: any}, errors: Error[]) {
const backupStateStyler = this._stateStyles['*'];
const stateStyler = this._stateStyles[stateName];
const backupStyles = backupStateStyler ? backupStateStyler.buildStyles(params, errors) : {};
return stateStyler ? stateStyler.buildStyles(params, errors) : backupStyles;
}
build(
driver: AnimationDriver, element: any, currentState: any, nextState: any,
enterClassName: string, leaveClassName: string, currentOptions?: AnimationOptions,
nextOptions?: AnimationOptions, subInstructions?: ElementInstructionMap,
skipAstBuild?: boolean): AnimationTransitionInstruction {
const errors: Error[] = [];
const transitionAnimationParams = this.ast.options && this.ast.options.params || EMPTY_OBJECT;
const currentAnimationParams = currentOptions && currentOptions.params || EMPTY_OBJECT;
const currentStateStyles = this.buildStyles(currentState, currentAnimationParams, errors);
const nextAnimationParams = nextOptions && nextOptions.params || EMPTY_OBJECT;
const nextStateStyles = this.buildStyles(nextState, nextAnimationParams, errors);
const queriedElements = new Set<any>();
const preStyleMap = new Map<any, {[prop: string]: boolean}>();
const postStyleMap = new Map<any, {[prop: string]: boolean}>();
const isRemoval = nextState === 'void';
const animationOptions = {params: {...transitionAnimationParams, ...nextAnimationParams}};
const timelines = skipAstBuild ?
[] :
buildAnimationTimelines(
driver, element, this.ast.animation, enterClassName, leaveClassName, currentStateStyles,
nextStateStyles, animationOptions, subInstructions, errors);
let totalTime = 0;
timelines.forEach(tl => {
totalTime = Math.max(tl.duration + tl.delay, totalTime);
});
if (errors.length) {
return createTransitionInstruction(
element, this._triggerName, currentState, nextState, isRemoval, currentStateStyles,
nextStateStyles, [], [], preStyleMap, postStyleMap, totalTime, errors);
}
timelines.forEach(tl => {
const elm = tl.element;
const preProps = getOrSetAsInMap(preStyleMap, elm, {});
tl.preStyleProps.forEach(prop => preProps[prop] = true);
const postProps = getOrSetAsInMap(postStyleMap, elm, {});
tl.postStyleProps.forEach(prop => postProps[prop] = true);
if (elm !== element) {
queriedElements.add(elm);
}
});
const queriedElementsList = iteratorToArray(queriedElements.values());
return createTransitionInstruction(
element, this._triggerName, currentState, nextState, isRemoval, currentStateStyles,
nextStateStyles, timelines, queriedElementsList, preStyleMap, postStyleMap, totalTime);
}
}
function o |
matchFns: TransitionMatcherFn[], currentState: any, nextState: any, element: any,
params: {[key: string]: any}): boolean {
return matchFns.some(fn => fn(currentState, nextState, element, params));
}
export class AnimationStateStyles {
constructor(
private styles: StyleAst, private defaultParams: {[key: string]: any},
private normalizer: AnimationStyleNormalizer) {}
buildStyles(params: {[key: string]: any}, errors: Error[]): ɵStyleData {
const finalStyles: ɵStyleData = {};
const combinedParams = copyObj(this.defaultParams);
Object.keys(params).forEach(key => {
const value = params[key];
if (value != null) {
combinedParams[key] = value;
}
});
this.styles.styles.forEach(value => {
if (typeof value !== 'string') {
const styleObj = value as any;
Object.keys(styleObj).forEach(prop => {
let val = styleObj[prop];
if (val.length > 1) {
val = interpolateParams(val, combinedParams, errors);
}
const normalizedProp = this.normalizer.normalizePropertyName(prop, errors);
val = this.normalizer.normalizeStyleValue(prop, normalizedProp, val, errors);
finalStyles[normalizedProp] = val;
});
}
});
return finalStyles;
}
}
| neOrMoreTransitionsMatch( |
demand.rs | use crate::check::FnCtxt;
use rustc::infer::InferOk;
use rustc::traits::{ObligationCause, ObligationCauseCode};
use syntax::util::parser::PREC_POSTFIX;
use syntax_pos::Span;
use rustc::hir;
use rustc::hir::def::Def;
use rustc::hir::Node;
use rustc::hir::{Item, ItemKind, print};
use rustc::ty::{self, Ty, AssociatedItem};
use rustc::ty::adjustment::AllowTwoPhase;
use errors::{Applicability, DiagnosticBuilder, SourceMapper};
use super::method::probe;
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
// Requires that the two types unify, and prints an error message if
// they don't.
pub fn demand_suptype(&self, sp: Span, expected: Ty<'tcx>, actual: Ty<'tcx>) {
self.demand_suptype_diag(sp, expected, actual).map(|mut e| e.emit());
}
pub fn demand_suptype_diag(&self,
sp: Span,
expected: Ty<'tcx>,
actual: Ty<'tcx>) -> Option<DiagnosticBuilder<'tcx>> {
let cause = &self.misc(sp);
match self.at(cause, self.param_env).sup(expected, actual) {
Ok(InferOk { obligations, value: () }) => {
self.register_predicates(obligations);
None
},
Err(e) => {
Some(self.report_mismatched_types(&cause, expected, actual, e))
}
}
}
pub fn demand_eqtype(&self, sp: Span, expected: Ty<'tcx>, actual: Ty<'tcx>) {
if let Some(mut err) = self.demand_eqtype_diag(sp, expected, actual) {
err.emit();
}
}
pub fn demand_eqtype_diag(&self,
sp: Span,
expected: Ty<'tcx>,
actual: Ty<'tcx>) -> Option<DiagnosticBuilder<'tcx>> {
self.demand_eqtype_with_origin(&self.misc(sp), expected, actual)
}
pub fn demand_eqtype_with_origin(&self,
cause: &ObligationCause<'tcx>,
expected: Ty<'tcx>,
actual: Ty<'tcx>) -> Option<DiagnosticBuilder<'tcx>> {
match self.at(cause, self.param_env).eq(expected, actual) {
Ok(InferOk { obligations, value: () }) => {
self.register_predicates(obligations);
None
}
Err(e) => {
Some(self.report_mismatched_types(cause, expected, actual, e))
}
}
}
pub fn | (
&self,
cause_span: Span,
expected: Ty<'tcx>,
actual: Ty<'tcx>,
match_expr_span: Option<Span>,
) {
let cause = if let Some(span) = match_expr_span {
self.cause(
cause_span,
ObligationCauseCode::MatchExpressionArmPattern { span, ty: expected },
)
} else {
self.misc(cause_span)
};
self.demand_eqtype_with_origin(&cause, expected, actual).map(|mut err| err.emit());
}
pub fn demand_coerce(&self,
expr: &hir::Expr,
checked_ty: Ty<'tcx>,
expected: Ty<'tcx>,
allow_two_phase: AllowTwoPhase)
-> Ty<'tcx> {
let (ty, err) = self.demand_coerce_diag(expr, checked_ty, expected, allow_two_phase);
if let Some(mut err) = err {
err.emit();
}
ty
}
// Checks that the type of `expr` can be coerced to `expected`.
//
// N.B., this code relies on `self.diverges` to be accurate. In
// particular, assignments to `!` will be permitted if the
// diverges flag is currently "always".
pub fn demand_coerce_diag(&self,
expr: &hir::Expr,
checked_ty: Ty<'tcx>,
expected: Ty<'tcx>,
allow_two_phase: AllowTwoPhase)
-> (Ty<'tcx>, Option<DiagnosticBuilder<'tcx>>) {
let expected = self.resolve_type_vars_with_obligations(expected);
let e = match self.try_coerce(expr, checked_ty, expected, allow_two_phase) {
Ok(ty) => return (ty, None),
Err(e) => e
};
let cause = self.misc(expr.span);
let expr_ty = self.resolve_type_vars_with_obligations(checked_ty);
let mut err = self.report_mismatched_types(&cause, expected, expr_ty, e);
// If the expected type is an enum (Issue #55250) with any variants whose
// sole field is of the found type, suggest such variants. (Issue #42764)
if let ty::Adt(expected_adt, substs) = expected.sty {
if expected_adt.is_enum() {
let mut compatible_variants = expected_adt.variants
.iter()
.filter(|variant| variant.fields.len() == 1)
.filter_map(|variant| {
let sole_field = &variant.fields[0];
let sole_field_ty = sole_field.ty(self.tcx, substs);
if self.can_coerce(expr_ty, sole_field_ty) {
let variant_path = self.tcx.item_path_str(variant.did);
// FIXME #56861: DRYer prelude filtering
Some(variant_path.trim_start_matches("std::prelude::v1::").to_string())
} else {
None
}
}).peekable();
if compatible_variants.peek().is_some() {
let expr_text = print::to_string(print::NO_ANN, |s| s.print_expr(expr));
let suggestions = compatible_variants
.map(|v| format!("{}({})", v, expr_text));
err.span_suggestions(
expr.span,
"try using a variant of the expected type",
suggestions,
Applicability::MaybeIncorrect,
);
}
}
}
self.suggest_ref_or_into(&mut err, expr, expected, expr_ty);
(expected, Some(err))
}
pub fn get_conversion_methods(&self, span: Span, expected: Ty<'tcx>, checked_ty: Ty<'tcx>)
-> Vec<AssociatedItem> {
let mut methods = self.probe_for_return_type(span,
probe::Mode::MethodCall,
expected,
checked_ty,
hir::DUMMY_HIR_ID);
methods.retain(|m| {
self.has_no_input_arg(m) &&
self.tcx.get_attrs(m.def_id).iter()
// This special internal attribute is used to whitelist
// "identity-like" conversion methods to be suggested here.
//
// FIXME (#46459 and #46460): ideally
// `std::convert::Into::into` and `std::borrow:ToOwned` would
// also be `#[rustc_conversion_suggestion]`, if not for
// method-probing false-positives and -negatives (respectively).
//
// FIXME? Other potential candidate methods: `as_ref` and
// `as_mut`?
.find(|a| a.check_name("rustc_conversion_suggestion")).is_some()
});
methods
}
// This function checks if the method isn't static and takes other arguments than `self`.
fn has_no_input_arg(&self, method: &AssociatedItem) -> bool {
match method.def() {
Def::Method(def_id) => {
self.tcx.fn_sig(def_id).inputs().skip_binder().len() == 1
}
_ => false,
}
}
/// Identify some cases where `as_ref()` would be appropriate and suggest it.
///
/// Given the following code:
/// ```
/// struct Foo;
/// fn takes_ref(_: &Foo) {}
/// let ref opt = Some(Foo);
///
/// opt.map(|arg| takes_ref(arg));
/// ```
/// Suggest using `opt.as_ref().map(|arg| takes_ref(arg));` instead.
///
/// It only checks for `Option` and `Result` and won't work with
/// ```
/// opt.map(|arg| { takes_ref(arg) });
/// ```
fn can_use_as_ref(
&self,
expr: &hir::Expr,
) -> Option<(Span, &'static str, String)> {
if let hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) = expr.node {
if let hir::def::Def::Local(id) = path.def {
let parent = self.tcx.hir().get_parent_node(id);
if let Some(Node::Expr(hir::Expr {
hir_id,
node: hir::ExprKind::Closure(_, decl, ..),
..
})) = self.tcx.hir().find(parent) {
let parent = self.tcx.hir().get_parent_node_by_hir_id(*hir_id);
if let (Some(Node::Expr(hir::Expr {
node: hir::ExprKind::MethodCall(path, span, expr),
..
})), 1) = (self.tcx.hir().find_by_hir_id(parent), decl.inputs.len()) {
let self_ty = self.tables.borrow().node_type(expr[0].hir_id);
let self_ty = format!("{:?}", self_ty);
let name = path.ident.as_str();
let is_as_ref_able = (
self_ty.starts_with("&std::option::Option") ||
self_ty.starts_with("&std::result::Result") ||
self_ty.starts_with("std::option::Option") ||
self_ty.starts_with("std::result::Result")
) && (name == "map" || name == "and_then");
match (is_as_ref_able, self.sess().source_map().span_to_snippet(*span)) {
(true, Ok(src)) => {
return Some((*span, "consider using `as_ref` instead",
format!("as_ref().{}", src)));
},
_ => ()
}
}
}
}
}
None
}
/// This function is used to determine potential "simple" improvements or users' errors and
/// provide them useful help. For example:
///
/// ```
/// fn some_fn(s: &str) {}
///
/// let x = "hey!".to_owned();
/// some_fn(x); // error
/// ```
///
/// No need to find every potential function which could make a coercion to transform a
/// `String` into a `&str` since a `&` would do the trick!
///
/// In addition of this check, it also checks between references mutability state. If the
/// expected is mutable but the provided isn't, maybe we could just say "Hey, try with
/// `&mut`!".
pub fn check_ref(&self,
expr: &hir::Expr,
checked_ty: Ty<'tcx>,
expected: Ty<'tcx>)
-> Option<(Span, &'static str, String)> {
let cm = self.sess().source_map();
// Use the callsite's span if this is a macro call. #41858
let sp = cm.call_span_if_macro(expr.span);
if !cm.span_to_filename(sp).is_real() {
return None;
}
match (&expected.sty, &checked_ty.sty) {
(&ty::Ref(_, exp, _), &ty::Ref(_, check, _)) => match (&exp.sty, &check.sty) {
(&ty::Str, &ty::Array(arr, _)) |
(&ty::Str, &ty::Slice(arr)) if arr == self.tcx.types.u8 => {
if let hir::ExprKind::Lit(_) = expr.node {
if let Ok(src) = cm.span_to_snippet(sp) {
if src.starts_with("b\"") {
return Some((sp,
"consider removing the leading `b`",
src[1..].to_string()));
}
}
}
},
(&ty::Array(arr, _), &ty::Str) |
(&ty::Slice(arr), &ty::Str) if arr == self.tcx.types.u8 => {
if let hir::ExprKind::Lit(_) = expr.node {
if let Ok(src) = cm.span_to_snippet(sp) {
if src.starts_with("\"") {
return Some((sp,
"consider adding a leading `b`",
format!("b{}", src)));
}
}
}
}
_ => {}
},
(&ty::Ref(_, _, mutability), _) => {
// Check if it can work when put into a ref. For example:
//
// ```
// fn bar(x: &mut i32) {}
//
// let x = 0u32;
// bar(&x); // error, expected &mut
// ```
let ref_ty = match mutability {
hir::Mutability::MutMutable => self.tcx.mk_mut_ref(
self.tcx.mk_region(ty::ReStatic),
checked_ty),
hir::Mutability::MutImmutable => self.tcx.mk_imm_ref(
self.tcx.mk_region(ty::ReStatic),
checked_ty),
};
if self.can_coerce(ref_ty, expected) {
if let Ok(src) = cm.span_to_snippet(sp) {
let needs_parens = match expr.node {
// parenthesize if needed (Issue #46756)
hir::ExprKind::Cast(_, _) |
hir::ExprKind::Binary(_, _, _) => true,
// parenthesize borrows of range literals (Issue #54505)
_ if self.is_range_literal(expr) => true,
_ => false,
};
let sugg_expr = if needs_parens {
format!("({})", src)
} else {
src
};
if let Some(sugg) = self.can_use_as_ref(expr) {
return Some(sugg);
}
return Some(match mutability {
hir::Mutability::MutMutable => {
(sp, "consider mutably borrowing here", format!("&mut {}",
sugg_expr))
}
hir::Mutability::MutImmutable => {
(sp, "consider borrowing here", format!("&{}", sugg_expr))
}
});
}
}
}
(_, &ty::Ref(_, checked, _)) => {
// We have `&T`, check if what was expected was `T`. If so,
// we may want to suggest adding a `*`, or removing
// a `&`.
//
// (But, also check the `expn_info()` to see if this is
// a macro; if so, it's hard to extract the text and make a good
// suggestion, so don't bother.)
if self.infcx.can_sub(self.param_env, checked, &expected).is_ok() &&
sp.ctxt().outer().expn_info().is_none() {
match expr.node {
// Maybe remove `&`?
hir::ExprKind::AddrOf(_, ref expr) => {
if !cm.span_to_filename(expr.span).is_real() {
return None;
}
if let Ok(code) = cm.span_to_snippet(expr.span) {
return Some((sp, "consider removing the borrow", code));
}
}
// Maybe add `*`? Only if `T: Copy`.
_ => {
if self.infcx.type_is_copy_modulo_regions(self.param_env,
checked,
sp) {
// do not suggest if the span comes from a macro (#52783)
if let (Ok(code),
true) = (cm.span_to_snippet(sp), sp == expr.span) {
return Some((
sp,
"consider dereferencing the borrow",
format!("*{}", code),
));
}
}
}
}
}
}
_ => {}
}
None
}
/// This function checks if the specified expression is a built-in range literal.
/// (See: `LoweringContext::lower_expr()` in `src/librustc/hir/lowering.rs`).
fn is_range_literal(&self, expr: &hir::Expr) -> bool {
use hir::{Path, QPath, ExprKind, TyKind};
// We support `::std::ops::Range` and `::core::ops::Range` prefixes
let is_range_path = |path: &Path| {
let mut segs = path.segments.iter()
.map(|seg| seg.ident.as_str());
if let (Some(root), Some(std_core), Some(ops), Some(range), None) =
(segs.next(), segs.next(), segs.next(), segs.next(), segs.next())
{
// "{{root}}" is the equivalent of `::` prefix in Path
root == "{{root}}" && (std_core == "std" || std_core == "core")
&& ops == "ops" && range.starts_with("Range")
} else {
false
}
};
let span_is_range_literal = |span: &Span| {
// Check whether a span corresponding to a range expression
// is a range literal, rather than an explicit struct or `new()` call.
let source_map = self.tcx.sess.source_map();
let end_point = source_map.end_point(*span);
if let Ok(end_string) = source_map.span_to_snippet(end_point) {
!(end_string.ends_with("}") || end_string.ends_with(")"))
} else {
false
}
};
match expr.node {
// All built-in range literals but `..=` and `..` desugar to Structs
ExprKind::Struct(ref qpath, _, _) => {
if let QPath::Resolved(None, ref path) = **qpath {
return is_range_path(&path) && span_is_range_literal(&expr.span);
}
}
// `..` desugars to its struct path
ExprKind::Path(QPath::Resolved(None, ref path)) => {
return is_range_path(&path) && span_is_range_literal(&expr.span);
}
// `..=` desugars into `::std::ops::RangeInclusive::new(...)`
ExprKind::Call(ref func, _) => {
if let ExprKind::Path(QPath::TypeRelative(ref ty, ref segment)) = func.node {
if let TyKind::Path(QPath::Resolved(None, ref path)) = ty.node {
let call_to_new = segment.ident.as_str() == "new";
return is_range_path(&path) && span_is_range_literal(&expr.span)
&& call_to_new;
}
}
}
_ => {}
}
false
}
pub fn check_for_cast(
&self,
err: &mut DiagnosticBuilder<'tcx>,
expr: &hir::Expr,
checked_ty: Ty<'tcx>,
expected_ty: Ty<'tcx>,
) -> bool {
let parent_id = self.tcx.hir().get_parent_node_by_hir_id(expr.hir_id);
if let Some(parent) = self.tcx.hir().find_by_hir_id(parent_id) {
// Shouldn't suggest `.into()` on `const`s.
if let Node::Item(Item { node: ItemKind::Const(_, _), .. }) = parent {
// FIXME(estebank): modify once we decide to suggest `as` casts
return false;
}
};
let will_truncate = "will truncate the source value";
let depending_on_isize = "will truncate or zero-extend depending on the bit width of \
`isize`";
let depending_on_usize = "will truncate or zero-extend depending on the bit width of \
`usize`";
let will_sign_extend = "will sign-extend the source value";
let will_zero_extend = "will zero-extend the source value";
// If casting this expression to a given numeric type would be appropriate in case of a type
// mismatch.
//
// We want to minimize the amount of casting operations that are suggested, as it can be a
// lossy operation with potentially bad side effects, so we only suggest when encountering
// an expression that indicates that the original type couldn't be directly changed.
//
// For now, don't suggest casting with `as`.
let can_cast = false;
let mut prefix = String::new();
if let Some(hir::Node::Expr(hir::Expr {
node: hir::ExprKind::Struct(_, fields, _),
..
})) = self.tcx.hir().find_by_hir_id(self.tcx.hir().get_parent_node_by_hir_id(expr.hir_id)) {
// `expr` is a literal field for a struct, only suggest if appropriate
for field in fields {
if field.expr.hir_id == expr.hir_id && field.is_shorthand {
// This is a field literal
prefix = format!("{}: ", field.ident);
break;
}
}
if &prefix == "" {
// Likely a field was meant, but this field wasn't found. Do not suggest anything.
return false;
}
}
let needs_paren = expr.precedence().order() < (PREC_POSTFIX as i8);
if let Ok(src) = self.tcx.sess.source_map().span_to_snippet(expr.span) {
let msg = format!("you can cast an `{}` to `{}`", checked_ty, expected_ty);
let cast_suggestion = format!(
"{}{}{}{} as {}",
prefix,
if needs_paren { "(" } else { "" },
src,
if needs_paren { ")" } else { "" },
expected_ty,
);
let into_suggestion = format!(
"{}{}{}{}.into()",
prefix,
if needs_paren { "(" } else { "" },
src,
if needs_paren { ")" } else { "" },
);
let literal_is_ty_suffixed = |expr: &hir::Expr| {
if let hir::ExprKind::Lit(lit) = &expr.node {
lit.node.is_suffixed()
} else {
false
}
};
let into_sugg = into_suggestion.clone();
let suggest_to_change_suffix_or_into = |err: &mut DiagnosticBuilder<'_>,
note: Option<&str>| {
let suggest_msg = if literal_is_ty_suffixed(expr) {
format!(
"change the type of the numeric literal from `{}` to `{}`",
checked_ty,
expected_ty,
)
} else {
match note {
Some(note) => format!("{}, which {}", msg, note),
_ => format!("{} in a lossless way", msg),
}
};
let suffix_suggestion = format!(
"{}{}{}{}",
if needs_paren { "(" } else { "" },
src.trim_end_matches(&checked_ty.to_string()),
expected_ty,
if needs_paren { ")" } else { "" },
);
err.span_suggestion(
expr.span,
&suggest_msg,
if literal_is_ty_suffixed(expr) {
suffix_suggestion
} else {
into_sugg
},
Applicability::MachineApplicable,
);
};
match (&expected_ty.sty, &checked_ty.sty) {
(&ty::Int(ref exp), &ty::Int(ref found)) => {
match (found.bit_width(), exp.bit_width()) {
(Some(found), Some(exp)) if found > exp => {
if can_cast {
err.span_suggestion(
expr.span,
&format!("{}, which {}", msg, will_truncate),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
}
(None, _) | (_, None) => {
if can_cast {
err.span_suggestion(
expr.span,
&format!("{}, which {}", msg, depending_on_isize),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
}
_ => {
suggest_to_change_suffix_or_into(
err,
Some(will_sign_extend),
);
}
}
true
}
(&ty::Uint(ref exp), &ty::Uint(ref found)) => {
match (found.bit_width(), exp.bit_width()) {
(Some(found), Some(exp)) if found > exp => {
if can_cast {
err.span_suggestion(
expr.span,
&format!("{}, which {}", msg, will_truncate),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
}
(None, _) | (_, None) => {
if can_cast {
err.span_suggestion(
expr.span,
&format!("{}, which {}", msg, depending_on_usize),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
}
_ => {
suggest_to_change_suffix_or_into(
err,
Some(will_zero_extend),
);
}
}
true
}
(&ty::Int(ref exp), &ty::Uint(ref found)) => {
if can_cast {
match (found.bit_width(), exp.bit_width()) {
(Some(found), Some(exp)) if found > exp - 1 => {
err.span_suggestion(
expr.span,
&format!("{}, which {}", msg, will_truncate),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
(None, None) => {
err.span_suggestion(
expr.span,
&format!("{}, which {}", msg, will_truncate),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
(None, _) => {
err.span_suggestion(
expr.span,
&format!("{}, which {}", msg, depending_on_isize),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
(_, None) => {
err.span_suggestion(
expr.span,
&format!("{}, which {}", msg, depending_on_usize),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
_ => {
err.span_suggestion(
expr.span,
&format!("{}, which {}", msg, will_zero_extend),
cast_suggestion,
Applicability::MachineApplicable
);
}
}
}
true
}
(&ty::Uint(ref exp), &ty::Int(ref found)) => {
if can_cast {
match (found.bit_width(), exp.bit_width()) {
(Some(found), Some(exp)) if found - 1 > exp => {
err.span_suggestion(
expr.span,
&format!("{}, which {}", msg, will_truncate),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
(None, None) => {
err.span_suggestion(
expr.span,
&format!("{}, which {}", msg, will_sign_extend),
cast_suggestion,
Applicability::MachineApplicable // lossy conversion
);
}
(None, _) => {
err.span_suggestion(
expr.span,
&format!("{}, which {}", msg, depending_on_usize),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
(_, None) => {
err.span_suggestion(
expr.span,
&format!("{}, which {}", msg, depending_on_isize),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
_ => {
err.span_suggestion(
expr.span,
&format!("{}, which {}", msg, will_sign_extend),
cast_suggestion,
Applicability::MachineApplicable
);
}
}
}
true
}
(&ty::Float(ref exp), &ty::Float(ref found)) => {
if found.bit_width() < exp.bit_width() {
suggest_to_change_suffix_or_into(
err,
None,
);
} else if can_cast {
err.span_suggestion(
expr.span,
&format!("{}, producing the closest possible value", msg),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
true
}
(&ty::Uint(_), &ty::Float(_)) | (&ty::Int(_), &ty::Float(_)) => {
if can_cast {
err.span_suggestion(
expr.span,
&format!("{}, rounding the float towards zero", msg),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
err.warn("casting here will cause undefined behavior if the rounded value \
cannot be represented by the target integer type, including \
`Inf` and `NaN` (this is a bug and will be fixed)");
}
true
}
(&ty::Float(ref exp), &ty::Uint(ref found)) => {
// if `found` is `None` (meaning found is `usize`), don't suggest `.into()`
if exp.bit_width() > found.bit_width().unwrap_or(256) {
err.span_suggestion(
expr.span,
&format!("{}, producing the floating point representation of the \
integer",
msg),
into_suggestion,
Applicability::MachineApplicable
);
} else if can_cast {
err.span_suggestion(
expr.span,
&format!("{}, producing the floating point representation of the \
integer, rounded if necessary",
msg),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
true
}
(&ty::Float(ref exp), &ty::Int(ref found)) => {
// if `found` is `None` (meaning found is `isize`), don't suggest `.into()`
if exp.bit_width() > found.bit_width().unwrap_or(256) {
err.span_suggestion(
expr.span,
&format!("{}, producing the floating point representation of the \
integer",
msg),
into_suggestion,
Applicability::MachineApplicable
);
} else if can_cast {
err.span_suggestion(
expr.span,
&format!("{}, producing the floating point representation of the \
integer, rounded if necessary",
msg),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
true
}
_ => false,
}
} else {
false
}
}
}
| demand_eqtype_pat |
eviction_test.go | /*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package e2e_node
import (
"fmt"
"path/filepath"
"strconv"
"strings"
"time"
"k8s.io/api/core/v1"
schedulerapi "k8s.io/api/scheduling/v1beta1"
"k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/api/resource"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/fields"
nodeutil "k8s.io/kubernetes/pkg/api/v1/node"
"k8s.io/kubernetes/pkg/kubelet/apis/kubeletconfig"
stats "k8s.io/kubernetes/pkg/kubelet/apis/stats/v1alpha1"
"k8s.io/kubernetes/pkg/kubelet/eviction"
kubeletmetrics "k8s.io/kubernetes/pkg/kubelet/metrics"
kubetypes "k8s.io/kubernetes/pkg/kubelet/types"
"k8s.io/kubernetes/test/e2e/framework"
imageutils "k8s.io/kubernetes/test/utils/image"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
// Eviction Policy is described here:
// https://github.com/kubernetes/community/blob/master/contributors/design-proposals/kubelet-eviction.md
const (
postTestConditionMonitoringPeriod = 1 * time.Minute
evictionPollInterval = 2 * time.Second
pressureDissapearTimeout = 1 * time.Minute
longPodDeletionTimeout = 10 * time.Minute
// pressure conditions often surface after evictions because the kubelet only updates
// node conditions periodically.
// we wait this period after evictions to make sure that we wait out this delay
pressureDelay = 20 * time.Second
testContextFmt = "when we run containers that should cause %s"
noPressure = v1.NodeConditionType("NoPressure")
lotsOfDisk = 10240 // 10 Gb in Mb
lotsOfFiles = 1000000000 // 1 billion
resourceInodes = v1.ResourceName("inodes")
noStarvedResource = v1.ResourceName("none")
)
// InodeEviction tests that the node responds to node disk pressure by evicting only responsible pods.
// Node disk pressure is induced by consuming all inodes on the node.
var _ = framework.KubeDescribe("InodeEviction [Slow] [Serial] [Disruptive][NodeFeature:Eviction]", func() {
f := framework.NewDefaultFramework("inode-eviction-test")
expectedNodeCondition := v1.NodeDiskPressure
expectedStarvedResource := resourceInodes
pressureTimeout := 15 * time.Minute
inodesConsumed := uint64(200000)
Context(fmt.Sprintf(testContextFmt, expectedNodeCondition), func() {
tempSetCurrentKubeletConfig(f, func(initialConfig *kubeletconfig.KubeletConfiguration) {
// Set the eviction threshold to inodesFree - inodesConsumed, so that using inodesConsumed causes an eviction.
summary := eventuallyGetSummary()
inodesFree := *summary.Node.Fs.InodesFree
if inodesFree <= inodesConsumed {
framework.Skipf("Too few inodes free on the host for the InodeEviction test to run")
}
initialConfig.EvictionHard = map[string]string{"nodefs.inodesFree": fmt.Sprintf("%d", inodesFree-inodesConsumed)}
initialConfig.EvictionMinimumReclaim = map[string]string{}
})
runEvictionTest(f, pressureTimeout, expectedNodeCondition, expectedStarvedResource, logInodeMetrics, []podEvictSpec{
{
evictionPriority: 1,
pod: inodeConsumingPod("container-inode-hog", lotsOfFiles, nil),
},
{
evictionPriority: 1,
pod: inodeConsumingPod("volume-inode-hog", lotsOfFiles, &v1.VolumeSource{EmptyDir: &v1.EmptyDirVolumeSource{}}),
},
{
evictionPriority: 0,
pod: innocentPod(),
},
})
})
})
// ImageGCNoEviction tests that the node does not evict pods when inodes are consumed by images
// Disk pressure is induced by pulling large images
var _ = framework.KubeDescribe("ImageGCNoEviction [Slow] [Serial] [Disruptive][NodeFeature:Eviction]", func() {
f := framework.NewDefaultFramework("image-gc-eviction-test")
pressureTimeout := 10 * time.Minute
expectedNodeCondition := v1.NodeDiskPressure
expectedStarvedResource := resourceInodes
inodesConsumed := uint64(100000)
Context(fmt.Sprintf(testContextFmt, expectedNodeCondition), func() {
tempSetCurrentKubeletConfig(f, func(initialConfig *kubeletconfig.KubeletConfiguration) {
// Set the eviction threshold to inodesFree - inodesConsumed, so that using inodesConsumed causes an eviction.
summary := eventuallyGetSummary()
inodesFree := *summary.Node.Fs.InodesFree
if inodesFree <= inodesConsumed {
framework.Skipf("Too few inodes free on the host for the InodeEviction test to run")
}
initialConfig.EvictionHard = map[string]string{"nodefs.inodesFree": fmt.Sprintf("%d", inodesFree-inodesConsumed)}
initialConfig.EvictionMinimumReclaim = map[string]string{}
})
// Consume enough inodes to induce disk pressure,
// but expect that image garbage collection can reduce it enough to avoid an eviction
runEvictionTest(f, pressureTimeout, expectedNodeCondition, expectedStarvedResource, logDiskMetrics, []podEvictSpec{
{
evictionPriority: 0,
pod: inodeConsumingPod("container-inode", 110000, nil),
},
})
})
})
// MemoryAllocatableEviction tests that the node responds to node memory pressure by evicting only responsible pods.
// Node memory pressure is only encountered because we reserve the majority of the node's capacity via kube-reserved.
var _ = framework.KubeDescribe("MemoryAllocatableEviction [Slow] [Serial] [Disruptive][NodeFeature:Eviction]", func() {
f := framework.NewDefaultFramework("memory-allocatable-eviction-test")
expectedNodeCondition := v1.NodeMemoryPressure
expectedStarvedResource := v1.ResourceMemory
pressureTimeout := 10 * time.Minute
Context(fmt.Sprintf(testContextFmt, expectedNodeCondition), func() {
tempSetCurrentKubeletConfig(f, func(initialConfig *kubeletconfig.KubeletConfiguration) {
// Set large system and kube reserved values to trigger allocatable thresholds far before hard eviction thresholds.
kubeReserved := getNodeCPUAndMemoryCapacity(f)[v1.ResourceMemory]
// The default hard eviction threshold is 250Mb, so Allocatable = Capacity - Reserved - 250Mb
// We want Allocatable = 50Mb, so set Reserved = Capacity - Allocatable - 250Mb = Capacity - 300Mb
kubeReserved.Sub(resource.MustParse("300Mi"))
initialConfig.KubeReserved = map[string]string{
string(v1.ResourceMemory): kubeReserved.String(),
}
initialConfig.EnforceNodeAllocatable = []string{kubetypes.NodeAllocatableEnforcementKey}
initialConfig.CgroupsPerQOS = true
})
runEvictionTest(f, pressureTimeout, expectedNodeCondition, expectedStarvedResource, logMemoryMetrics, []podEvictSpec{
{
evictionPriority: 1,
pod: getMemhogPod("memory-hog-pod", "memory-hog", v1.ResourceRequirements{}),
},
{
evictionPriority: 0,
pod: innocentPod(),
},
})
})
})
// LocalStorageEviction tests that the node responds to node disk pressure by evicting only responsible pods
// Disk pressure is induced by running pods which consume disk space.
var _ = framework.KubeDescribe("LocalStorageEviction [Slow] [Serial] [Disruptive][NodeFeature:Eviction]", func() {
f := framework.NewDefaultFramework("localstorage-eviction-test")
pressureTimeout := 10 * time.Minute
expectedNodeCondition := v1.NodeDiskPressure
expectedStarvedResource := v1.ResourceEphemeralStorage
Context(fmt.Sprintf(testContextFmt, expectedNodeCondition), func() {
tempSetCurrentKubeletConfig(f, func(initialConfig *kubeletconfig.KubeletConfiguration) {
diskConsumed := resource.MustParse("100Mi")
summary := eventuallyGetSummary()
availableBytes := *(summary.Node.Fs.AvailableBytes)
initialConfig.EvictionHard = map[string]string{"nodefs.available": fmt.Sprintf("%d", availableBytes-uint64(diskConsumed.Value()))}
initialConfig.EvictionMinimumReclaim = map[string]string{}
})
runEvictionTest(f, pressureTimeout, expectedNodeCondition, expectedStarvedResource, logDiskMetrics, []podEvictSpec{
{
evictionPriority: 1,
pod: diskConsumingPod("container-disk-hog", lotsOfDisk, nil, v1.ResourceRequirements{}),
},
{
evictionPriority: 0,
pod: innocentPod(),
},
})
})
})
// LocalStorageEviction tests that the node responds to node disk pressure by evicting only responsible pods
// Disk pressure is induced by running pods which consume disk space, which exceed the soft eviction threshold.
// Note: This test's purpose is to test Soft Evictions. Local storage was chosen since it is the least costly to run.
var _ = framework.KubeDescribe("LocalStorageSoftEviction [Slow] [Serial] [Disruptive][NodeFeature:Eviction]", func() {
f := framework.NewDefaultFramework("localstorage-eviction-test")
pressureTimeout := 10 * time.Minute
expectedNodeCondition := v1.NodeDiskPressure
expectedStarvedResource := v1.ResourceEphemeralStorage
Context(fmt.Sprintf(testContextFmt, expectedNodeCondition), func() {
tempSetCurrentKubeletConfig(f, func(initialConfig *kubeletconfig.KubeletConfiguration) {
diskConsumed := resource.MustParse("100Mi")
summary := eventuallyGetSummary()
availableBytes := *(summary.Node.Fs.AvailableBytes)
if availableBytes <= uint64(diskConsumed.Value()) {
framework.Skipf("Too little disk free on the host for the LocalStorageSoftEviction test to run")
}
initialConfig.EvictionSoft = map[string]string{"nodefs.available": fmt.Sprintf("%d", availableBytes-uint64(diskConsumed.Value()))}
initialConfig.EvictionSoftGracePeriod = map[string]string{"nodefs.available": "1m"}
// Defer to the pod default grace period
initialConfig.EvictionMaxPodGracePeriod = 30
initialConfig.EvictionMinimumReclaim = map[string]string{}
// Ensure that pods are not evicted because of the eviction-hard threshold
// setting a threshold to 0% disables; non-empty map overrides default value (necessary due to omitempty)
initialConfig.EvictionHard = map[string]string{"memory.available": "0%"}
})
runEvictionTest(f, pressureTimeout, expectedNodeCondition, expectedStarvedResource, logDiskMetrics, []podEvictSpec{
{
evictionPriority: 1,
pod: diskConsumingPod("container-disk-hog", lotsOfDisk, nil, v1.ResourceRequirements{}),
},
{
evictionPriority: 0,
pod: innocentPod(),
},
})
})
})
// LocalStorageCapacityIsolationEviction tests that container and volume local storage limits are enforced through evictions
var _ = framework.KubeDescribe("LocalStorageCapacityIsolationEviction [Slow] [Serial] [Disruptive] [Feature:LocalStorageCapacityIsolation][NodeFeature:Eviction]", func() {
f := framework.NewDefaultFramework("localstorage-eviction-test")
evictionTestTimeout := 10 * time.Minute
Context(fmt.Sprintf(testContextFmt, "evictions due to pod local storage violations"), func() {
tempSetCurrentKubeletConfig(f, func(initialConfig *kubeletconfig.KubeletConfiguration) {
// setting a threshold to 0% disables; non-empty map overrides default value (necessary due to omitempty)
initialConfig.EvictionHard = map[string]string{"memory.available": "0%"}
})
sizeLimit := resource.MustParse("100Mi")
useOverLimit := 101 /* Mb */
useUnderLimit := 99 /* Mb */
containerLimit := v1.ResourceList{v1.ResourceEphemeralStorage: sizeLimit}
runEvictionTest(f, evictionTestTimeout, noPressure, noStarvedResource, logDiskMetrics, []podEvictSpec{
{
evictionPriority: 1, // This pod should be evicted because emptyDir (default storage type) usage violation
pod: diskConsumingPod("emptydir-disk-sizelimit", useOverLimit, &v1.VolumeSource{
EmptyDir: &v1.EmptyDirVolumeSource{SizeLimit: &sizeLimit},
}, v1.ResourceRequirements{}),
},
{
evictionPriority: 1, // This pod should be evicted because of memory emptyDir usage violation
pod: diskConsumingPod("emptydir-memory-sizelimit", useOverLimit, &v1.VolumeSource{
EmptyDir: &v1.EmptyDirVolumeSource{Medium: "Memory", SizeLimit: &sizeLimit},
}, v1.ResourceRequirements{}),
},
{
evictionPriority: 1, // This pod should cross the container limit by writing to its writable layer.
pod: diskConsumingPod("container-disk-limit", useOverLimit, nil, v1.ResourceRequirements{Limits: containerLimit}),
},
{
evictionPriority: 1, // This pod should hit the container limit by writing to an emptydir
pod: diskConsumingPod("container-emptydir-disk-limit", useOverLimit, &v1.VolumeSource{EmptyDir: &v1.EmptyDirVolumeSource{}},
v1.ResourceRequirements{Limits: containerLimit}),
},
{
evictionPriority: 0, // This pod should not be evicted because it uses less than its limit
pod: diskConsumingPod("emptydir-disk-below-sizelimit", useUnderLimit, &v1.VolumeSource{
EmptyDir: &v1.EmptyDirVolumeSource{SizeLimit: &sizeLimit},
}, v1.ResourceRequirements{}),
},
{
evictionPriority: 0, // This pod should not be evicted because it uses less than its limit
pod: diskConsumingPod("container-disk-below-sizelimit", useUnderLimit, nil, v1.ResourceRequirements{Limits: containerLimit}),
},
})
})
})
// PriorityMemoryEvictionOrdering tests that the node responds to node memory pressure by evicting pods.
// This test tests that the guaranteed pod is never evicted, and that the lower-priority pod is evicted before
// the higher priority pod.
var _ = framework.KubeDescribe("PriorityMemoryEvictionOrdering [Slow] [Serial] [Disruptive][NodeFeature:Eviction]", func() {
f := framework.NewDefaultFramework("priority-memory-eviction-ordering-test")
expectedNodeCondition := v1.NodeMemoryPressure
expectedStarvedResource := v1.ResourceMemory
pressureTimeout := 10 * time.Minute
highPriorityClassName := f.BaseName + "-high-priority"
highPriority := int32(999999999)
BeforeEach(func() {
_, err := f.ClientSet.SchedulingV1beta1().PriorityClasses().Create(&schedulerapi.PriorityClass{ObjectMeta: metav1.ObjectMeta{Name: highPriorityClassName}, Value: highPriority})
Expect(err == nil || errors.IsAlreadyExists(err)).To(BeTrue())
})
AfterEach(func() {
err := f.ClientSet.SchedulingV1beta1().PriorityClasses().Delete(highPriorityClassName, &metav1.DeleteOptions{})
Expect(err).NotTo(HaveOccurred())
})
Context(fmt.Sprintf(testContextFmt, expectedNodeCondition), func() {
tempSetCurrentKubeletConfig(f, func(initialConfig *kubeletconfig.KubeletConfiguration) {
memoryConsumed := resource.MustParse("600Mi")
summary := eventuallyGetSummary()
availableBytes := *(summary.Node.Memory.AvailableBytes)
if availableBytes <= uint64(memoryConsumed.Value()) {
framework.Skipf("Too little memory free on the host for the PriorityMemoryEvictionOrdering test to run")
}
initialConfig.EvictionHard = map[string]string{"memory.available": fmt.Sprintf("%d", availableBytes-uint64(memoryConsumed.Value()))}
initialConfig.EvictionMinimumReclaim = map[string]string{}
})
specs := []podEvictSpec{
{
evictionPriority: 2,
pod: getMemhogPod("memory-hog-pod", "memory-hog", v1.ResourceRequirements{}),
},
{
evictionPriority: 1,
pod: getMemhogPod("high-priority-memory-hog-pod", "high-priority-memory-hog", v1.ResourceRequirements{}),
},
{
evictionPriority: 0,
pod: getMemhogPod("guaranteed-pod", "guaranteed-pod", v1.ResourceRequirements{
Requests: v1.ResourceList{
v1.ResourceMemory: resource.MustParse("300Mi"),
},
Limits: v1.ResourceList{
v1.ResourceMemory: resource.MustParse("300Mi"),
},
}),
},
}
specs[1].pod.Spec.PriorityClassName = highPriorityClassName
runEvictionTest(f, pressureTimeout, expectedNodeCondition, expectedStarvedResource, logMemoryMetrics, specs)
})
})
// PriorityLocalStorageEvictionOrdering tests that the node responds to node disk pressure by evicting pods.
// This test tests that the guaranteed pod is never evicted, and that the lower-priority pod is evicted before
// the higher priority pod.
var _ = framework.KubeDescribe("PriorityLocalStorageEvictionOrdering [Slow] [Serial] [Disruptive][NodeFeature:Eviction]", func() {
f := framework.NewDefaultFramework("priority-disk-eviction-ordering-test")
expectedNodeCondition := v1.NodeDiskPressure
expectedStarvedResource := v1.ResourceEphemeralStorage
pressureTimeout := 10 * time.Minute
highPriorityClassName := f.BaseName + "-high-priority"
highPriority := int32(999999999)
BeforeEach(func() {
_, err := f.ClientSet.SchedulingV1beta1().PriorityClasses().Create(&schedulerapi.PriorityClass{ObjectMeta: metav1.ObjectMeta{Name: highPriorityClassName}, Value: highPriority})
Expect(err == nil || errors.IsAlreadyExists(err)).To(BeTrue())
})
AfterEach(func() {
err := f.ClientSet.SchedulingV1beta1().PriorityClasses().Delete(highPriorityClassName, &metav1.DeleteOptions{})
Expect(err).NotTo(HaveOccurred())
})
Context(fmt.Sprintf(testContextFmt, expectedNodeCondition), func() {
tempSetCurrentKubeletConfig(f, func(initialConfig *kubeletconfig.KubeletConfiguration) {
diskConsumed := resource.MustParse("350Mi")
summary := eventuallyGetSummary()
availableBytes := *(summary.Node.Fs.AvailableBytes)
if availableBytes <= uint64(diskConsumed.Value()) {
framework.Skipf("Too little disk free on the host for the PriorityLocalStorageEvictionOrdering test to run")
}
initialConfig.EvictionHard = map[string]string{"nodefs.available": fmt.Sprintf("%d", availableBytes-uint64(diskConsumed.Value()))}
initialConfig.EvictionMinimumReclaim = map[string]string{}
})
specs := []podEvictSpec{
{
evictionPriority: 2,
pod: diskConsumingPod("best-effort-disk", lotsOfDisk, nil, v1.ResourceRequirements{}),
},
{
evictionPriority: 1,
pod: diskConsumingPod("high-priority-disk", lotsOfDisk, nil, v1.ResourceRequirements{}),
},
{
evictionPriority: 0,
// Only require 99% accuracy (297/300 Mb) because on some OS distributions, the file itself (excluding contents), consumes disk space.
pod: diskConsumingPod("guaranteed-disk", 297 /* Mb */, nil, v1.ResourceRequirements{
Requests: v1.ResourceList{
v1.ResourceEphemeralStorage: resource.MustParse("300Mi"),
},
Limits: v1.ResourceList{
v1.ResourceEphemeralStorage: resource.MustParse("300Mi"),
},
}),
},
}
specs[1].pod.Spec.PriorityClassName = highPriorityClassName
runEvictionTest(f, pressureTimeout, expectedNodeCondition, expectedStarvedResource, logDiskMetrics, specs)
})
})
// Struct used by runEvictionTest that specifies the pod, and when that pod should be evicted, relative to other pods
type podEvictSpec struct {
// P0 should never be evicted, P1 shouldn't evict before P2, etc.
// If two are ranked at P1, either is permitted to fail before the other.
// The test ends when all pods other than p0 have been evicted
evictionPriority int
pod *v1.Pod
}
// runEvictionTest sets up a testing environment given the provided pods, and checks a few things:
// It ensures that the desired expectedNodeCondition is actually triggered.
// It ensures that evictionPriority 0 pods are not evicted
// It ensures that lower evictionPriority pods are always evicted before higher evictionPriority pods (2 evicted before 1, etc.)
// It ensures that all pods with non-zero evictionPriority are eventually evicted.
// runEvictionTest then cleans up the testing environment by deleting provided pods, and ensures that expectedNodeCondition no longer exists
func runEvictionTest(f *framework.Framework, pressureTimeout time.Duration, expectedNodeCondition v1.NodeConditionType, expectedStarvedResource v1.ResourceName, logFunc func(), testSpecs []podEvictSpec) {
// Place the remainder of the test within a context so that the kubelet config is set before and after the test.
Context("", func() {
BeforeEach(func() {
// reduce memory usage in the allocatable cgroup to ensure we do not have MemoryPressure
reduceAllocatableMemoryUsage()
// Nodes do not immediately report local storage capacity
// Sleep so that pods requesting local storage do not fail to schedule
time.Sleep(30 * time.Second)
By("seting up pods to be used by tests")
for _, spec := range testSpecs {
By(fmt.Sprintf("creating pod with container: %s", spec.pod.Name))
f.PodClient().CreateSync(spec.pod)
}
})
It("should eventually evict all of the correct pods", func() {
By(fmt.Sprintf("Waiting for node to have NodeCondition: %s", expectedNodeCondition))
Eventually(func() error {
logFunc()
if expectedNodeCondition == noPressure || hasNodeCondition(f, expectedNodeCondition) {
return nil
}
return fmt.Errorf("NodeCondition: %s not encountered", expectedNodeCondition)
}, pressureTimeout, evictionPollInterval).Should(BeNil())
By("Waiting for evictions to occur")
Eventually(func() error {
if expectedNodeCondition != noPressure {
if hasNodeCondition(f, expectedNodeCondition) {
framework.Logf("Node has %s", expectedNodeCondition)
} else {
framework.Logf("Node does NOT have %s", expectedNodeCondition)
}
}
logKubeletLatencyMetrics(kubeletmetrics.EvictionStatsAgeKey)
logFunc()
return verifyEvictionOrdering(f, testSpecs)
}, pressureTimeout, evictionPollInterval).Should(BeNil())
// We observe pressure from the API server. The eviction manager observes pressure from the kubelet internal stats.
// This means the eviction manager will observe pressure before we will, creating a delay between when the eviction manager
// evicts a pod, and when we observe the pressure by querying the API server. Add a delay here to account for this delay
By("making sure pressure from test has surfaced before continuing")
time.Sleep(pressureDelay)
By(fmt.Sprintf("Waiting for NodeCondition: %s to no longer exist on the node", expectedNodeCondition))
Eventually(func() error {
logFunc()
logKubeletLatencyMetrics(kubeletmetrics.EvictionStatsAgeKey)
if expectedNodeCondition != noPressure && hasNodeCondition(f, expectedNodeCondition) {
return fmt.Errorf("Conditions havent returned to normal, node still has %s", expectedNodeCondition)
}
return nil
}, pressureDissapearTimeout, evictionPollInterval).Should(BeNil())
By("checking for stable, pressure-free condition without unexpected pod failures")
Consistently(func() error {
if expectedNodeCondition != noPressure && hasNodeCondition(f, expectedNodeCondition) {
return fmt.Errorf("%s dissappeared and then reappeared", expectedNodeCondition)
}
logFunc()
logKubeletLatencyMetrics(kubeletmetrics.EvictionStatsAgeKey)
return verifyEvictionOrdering(f, testSpecs)
}, postTestConditionMonitoringPeriod, evictionPollInterval).Should(BeNil())
By("checking for correctly formatted eviction events")
verifyEvictionEvents(f, testSpecs, expectedStarvedResource)
})
AfterEach(func() {
By("deleting pods")
for _, spec := range testSpecs {
By(fmt.Sprintf("deleting pod: %s", spec.pod.Name))
f.PodClient().DeleteSync(spec.pod.Name, &metav1.DeleteOptions{}, 10*time.Minute)
}
reduceAllocatableMemoryUsage()
if expectedNodeCondition == v1.NodeDiskPressure && framework.TestContext.PrepullImages {
// The disk eviction test may cause the prepulled images to be evicted,
// prepull those images again to ensure this test not affect following tests.
PrePullAllImages()
}
By("making sure we can start a new pod after the test")
podName := "test-admit-pod"
f.PodClient().CreateSync(&v1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: podName,
},
Spec: v1.PodSpec{
RestartPolicy: v1.RestartPolicyNever,
Containers: []v1.Container{
{
Image: imageutils.GetPauseImageName(),
Name: podName,
},
},
},
})
if CurrentGinkgoTestDescription().Failed {
if framework.TestContext.DumpLogsOnFailure {
logPodEvents(f)
logNodeEvents(f)
}
}
})
})
}
// verifyEvictionOrdering returns an error if all non-zero priority pods have not been evicted, nil otherwise
// This function panics (via Expect) if eviction ordering is violated, or if a priority-zero pod fails.
func verifyEvictionOrdering(f *framework.Framework, testSpecs []podEvictSpec) error {
// Gather current information
updatedPodList, err := f.ClientSet.CoreV1().Pods(f.Namespace.Name).List(metav1.ListOptions{})
if err != nil {
return err
}
updatedPods := updatedPodList.Items
for _, p := range updatedPods {
framework.Logf("fetching pod %s; phase= %v", p.Name, p.Status.Phase)
}
By("checking eviction ordering and ensuring important pods dont fail")
done := true
for _, priorityPodSpec := range testSpecs {
var priorityPod v1.Pod
for _, p := range updatedPods {
if p.Name == priorityPodSpec.pod.Name {
priorityPod = p
}
}
Expect(priorityPod).NotTo(BeNil())
Expect(priorityPod.Status.Phase).NotTo(Equal(v1.PodSucceeded),
fmt.Sprintf("pod: %s succeeded unexpectedly", priorityPod.Name))
// Check eviction ordering.
// Note: it is alright for a priority 1 and priority 2 pod (for example) to fail in the same round,
// but never alright for a priority 1 pod to fail while the priority 2 pod is still running
for _, lowPriorityPodSpec := range testSpecs {
var lowPriorityPod v1.Pod
for _, p := range updatedPods {
if p.Name == lowPriorityPodSpec.pod.Name {
lowPriorityPod = p
}
}
Expect(lowPriorityPod).NotTo(BeNil())
if priorityPodSpec.evictionPriority < lowPriorityPodSpec.evictionPriority && lowPriorityPod.Status.Phase == v1.PodRunning {
Expect(priorityPod.Status.Phase).NotTo(Equal(v1.PodFailed),
fmt.Sprintf("priority %d pod: %s failed before priority %d pod: %s",
priorityPodSpec.evictionPriority, priorityPodSpec.pod.Name, lowPriorityPodSpec.evictionPriority, lowPriorityPodSpec.pod.Name))
}
}
if priorityPod.Status.Phase == v1.PodFailed {
Expect(priorityPod.Status.Reason, eviction.Reason, "pod %s failed; expected Status.Reason to be %s, but got %s",
priorityPod.Name, eviction.Reason, priorityPod.Status.Reason)
}
// EvictionPriority 0 pods should not fail
if priorityPodSpec.evictionPriority == 0 {
Expect(priorityPod.Status.Phase).NotTo(Equal(v1.PodFailed),
fmt.Sprintf("priority 0 pod: %s failed", priorityPod.Name))
}
// If a pod that is not evictionPriority 0 has not been evicted, we are not done
if priorityPodSpec.evictionPriority != 0 && priorityPod.Status.Phase != v1.PodFailed {
done = false
}
}
if done {
return nil
}
return fmt.Errorf("pods that should be evicted are still running")
}
func verifyEvictionEvents(f *framework.Framework, testSpecs []podEvictSpec, expectedStarvedResource v1.ResourceName) {
for _, spec := range testSpecs {
pod := spec.pod
if spec.evictionPriority != 0 {
selector := fields.Set{
"involvedObject.kind": "Pod",
"involvedObject.name": pod.Name,
"involvedObject.namespace": f.Namespace.Name,
"reason": eviction.Reason,
}.AsSelector().String()
podEvictEvents, err := f.ClientSet.CoreV1().Events(f.Namespace.Name).List(metav1.ListOptions{FieldSelector: selector})
Expect(err).To(BeNil(), "Unexpected error getting events during eviction test: %v", err)
Expect(len(podEvictEvents.Items)).To(Equal(1), "Expected to find 1 eviction event for pod %s, got %d", pod.Name, len(podEvictEvents.Items))
event := podEvictEvents.Items[0]
if expectedStarvedResource != noStarvedResource {
// Check the eviction.StarvedResourceKey
starved, found := event.Annotations[eviction.StarvedResourceKey]
Expect(found).To(BeTrue(), "Expected to find an annotation on the eviction event for pod %s containing the starved resource %s, but it was not found",
pod.Name, expectedStarvedResource)
starvedResource := v1.ResourceName(starved)
Expect(starvedResource).To(Equal(expectedStarvedResource), "Expected to the starved_resource annotation on pod %s to contain %s, but got %s instead",
pod.Name, expectedStarvedResource, starvedResource)
// We only check these keys for memory, because ephemeral storage evictions may be due to volume usage, in which case these values are not present
if expectedStarvedResource == v1.ResourceMemory {
// Check the eviction.OffendingContainersKey
offendersString, found := event.Annotations[eviction.OffendingContainersKey]
Expect(found).To(BeTrue(), "Expected to find an annotation on the eviction event for pod %s containing the offending containers, but it was not found",
pod.Name)
offendingContainers := strings.Split(offendersString, ",")
Expect(len(offendingContainers)).To(Equal(1), "Expected to find the offending container's usage in the %s annotation, but no container was found",
eviction.OffendingContainersKey)
Expect(offendingContainers[0]).To(Equal(pod.Spec.Containers[0].Name), "Expected to find the offending container: %s's usage in the %s annotation, but found %s instead",
pod.Spec.Containers[0].Name, eviction.OffendingContainersKey, offendingContainers[0])
// Check the eviction.OffendingContainersUsageKey
offendingUsageString, found := event.Annotations[eviction.OffendingContainersUsageKey]
Expect(found).To(BeTrue(), "Expected to find an annotation on the eviction event for pod %s containing the offending containers' usage, but it was not found",
pod.Name)
offendingContainersUsage := strings.Split(offendingUsageString, ",")
Expect(len(offendingContainersUsage)).To(Equal(1), "Expected to find the offending container's usage in the %s annotation, but found %+v",
eviction.OffendingContainersUsageKey, offendingContainersUsage)
usageQuantity, err := resource.ParseQuantity(offendingContainersUsage[0])
Expect(err).To(BeNil(), "Expected to be able to parse pod %s's %s annotation as a quantity, but got err: %v", pod.Name, eviction.OffendingContainersUsageKey, err)
request := pod.Spec.Containers[0].Resources.Requests[starvedResource]
Expect(usageQuantity.Cmp(request)).To(Equal(1), "Expected usage of offending container: %s in pod %s to exceed its request %s",
usageQuantity.String(), pod.Name, request.String())
}
}
}
}
}
// Returns TRUE if the node has the node condition, FALSE otherwise
func hasNodeCondition(f *framework.Framework, expectedNodeCondition v1.NodeConditionType) bool {
localNodeStatus := getLocalNode(f).Status
_, actualNodeCondition := nodeutil.GetNodeCondition(&localNodeStatus, expectedNodeCondition)
Expect(actualNodeCondition).NotTo(BeNil())
return actualNodeCondition.Status == v1.ConditionTrue
}
func logInodeMetrics() {
summary, err := getNodeSummary()
if err != nil {
framework.Logf("Error getting summary: %v", err)
return
}
if summary.Node.Runtime != nil && summary.Node.Runtime.ImageFs != nil && summary.Node.Runtime.ImageFs.Inodes != nil && summary.Node.Runtime.ImageFs.InodesFree != nil {
framework.Logf("imageFsInfo.Inodes: %d, imageFsInfo.InodesFree: %d", *summary.Node.Runtime.ImageFs.Inodes, *summary.Node.Runtime.ImageFs.InodesFree)
}
if summary.Node.Fs != nil && summary.Node.Fs.Inodes != nil && summary.Node.Fs.InodesFree != nil {
framework.Logf("rootFsInfo.Inodes: %d, rootFsInfo.InodesFree: %d", *summary.Node.Fs.Inodes, *summary.Node.Fs.InodesFree)
}
for _, pod := range summary.Pods {
framework.Logf("Pod: %s", pod.PodRef.Name)
for _, container := range pod.Containers {
if container.Rootfs != nil && container.Rootfs.InodesUsed != nil {
framework.Logf("--- summary Container: %s inodeUsage: %d", container.Name, *container.Rootfs.InodesUsed)
}
}
for _, volume := range pod.VolumeStats {
if volume.FsStats.InodesUsed != nil {
framework.Logf("--- summary Volume: %s inodeUsage: %d", volume.Name, *volume.FsStats.InodesUsed)
}
}
}
}
func logDiskMetrics() {
summary, err := getNodeSummary()
if err != nil {
framework.Logf("Error getting summary: %v", err)
return
}
if summary.Node.Runtime != nil && summary.Node.Runtime.ImageFs != nil && summary.Node.Runtime.ImageFs.CapacityBytes != nil && summary.Node.Runtime.ImageFs.AvailableBytes != nil {
framework.Logf("imageFsInfo.CapacityBytes: %d, imageFsInfo.AvailableBytes: %d", *summary.Node.Runtime.ImageFs.CapacityBytes, *summary.Node.Runtime.ImageFs.AvailableBytes)
}
if summary.Node.Fs != nil && summary.Node.Fs.CapacityBytes != nil && summary.Node.Fs.AvailableBytes != nil {
framework.Logf("rootFsInfo.CapacityBytes: %d, rootFsInfo.AvailableBytes: %d", *summary.Node.Fs.CapacityBytes, *summary.Node.Fs.AvailableBytes)
}
for _, pod := range summary.Pods {
framework.Logf("Pod: %s", pod.PodRef.Name)
for _, container := range pod.Containers {
if container.Rootfs != nil && container.Rootfs.UsedBytes != nil {
framework.Logf("--- summary Container: %s UsedBytes: %d", container.Name, *container.Rootfs.UsedBytes)
}
}
for _, volume := range pod.VolumeStats {
if volume.FsStats.InodesUsed != nil {
framework.Logf("--- summary Volume: %s UsedBytes: %d", volume.Name, *volume.FsStats.UsedBytes)
}
}
}
}
func | () {
summary, err := getNodeSummary()
if err != nil {
framework.Logf("Error getting summary: %v", err)
return
}
if summary.Node.Memory != nil && summary.Node.Memory.WorkingSetBytes != nil && summary.Node.Memory.AvailableBytes != nil {
framework.Logf("Node.Memory.WorkingSetBytes: %d, Node.Memory.AvailableBytes: %d", *summary.Node.Memory.WorkingSetBytes, *summary.Node.Memory.AvailableBytes)
}
for _, sysContainer := range summary.Node.SystemContainers {
if sysContainer.Name == stats.SystemContainerPods && sysContainer.Memory != nil && sysContainer.Memory.WorkingSetBytes != nil && sysContainer.Memory.AvailableBytes != nil {
framework.Logf("Allocatable.Memory.WorkingSetBytes: %d, Allocatable.Memory.AvailableBytes: %d", *sysContainer.Memory.WorkingSetBytes, *sysContainer.Memory.AvailableBytes)
}
}
for _, pod := range summary.Pods {
framework.Logf("Pod: %s", pod.PodRef.Name)
for _, container := range pod.Containers {
if container.Memory != nil && container.Memory.WorkingSetBytes != nil {
framework.Logf("--- summary Container: %s WorkingSetBytes: %d", container.Name, *container.Memory.WorkingSetBytes)
}
}
}
}
func eventuallyGetSummary() (s *stats.Summary) {
Eventually(func() error {
summary, err := getNodeSummary()
if err != nil {
return err
}
if summary == nil || summary.Node.Fs == nil || summary.Node.Fs.InodesFree == nil || summary.Node.Fs.AvailableBytes == nil {
return fmt.Errorf("some part of data is nil")
}
s = summary
return nil
}, time.Minute, evictionPollInterval).Should(BeNil())
return
}
// returns a pod that does not use any resources
func innocentPod() *v1.Pod {
return &v1.Pod{
ObjectMeta: metav1.ObjectMeta{Name: "innocent-pod"},
Spec: v1.PodSpec{
RestartPolicy: v1.RestartPolicyNever,
Containers: []v1.Container{
{
Image: busyboxImage,
Name: "innocent-container",
Command: []string{
"sh",
"-c",
"while true; do sleep 5; done",
},
},
},
},
}
}
const (
volumeMountPath = "/test-mnt"
volumeName = "test-volume"
)
func inodeConsumingPod(name string, numFiles int, volumeSource *v1.VolumeSource) *v1.Pod {
// Each iteration creates an empty file
return podWithCommand(volumeSource, v1.ResourceRequirements{}, numFiles, name, "touch %s${i}.txt; sleep 0.001")
}
func diskConsumingPod(name string, diskConsumedMB int, volumeSource *v1.VolumeSource, resources v1.ResourceRequirements) *v1.Pod {
// Each iteration writes 1 Mb, so do diskConsumedMB iterations.
return podWithCommand(volumeSource, resources, diskConsumedMB, name, "dd if=/dev/urandom of=%s${i} bs=1048576 count=1 2>/dev/null")
}
// podWithCommand returns a pod with the provided volumeSource and resourceRequirements.
// If a volumeSource is provided, then the volumeMountPath to the volume is inserted into the provided command.
func podWithCommand(volumeSource *v1.VolumeSource, resources v1.ResourceRequirements, iterations int, name, command string) *v1.Pod {
path := ""
volumeMounts := []v1.VolumeMount{}
volumes := []v1.Volume{}
if volumeSource != nil {
path = volumeMountPath
volumeMounts = []v1.VolumeMount{{MountPath: volumeMountPath, Name: volumeName}}
volumes = []v1.Volume{{Name: volumeName, VolumeSource: *volumeSource}}
}
return &v1.Pod{
ObjectMeta: metav1.ObjectMeta{Name: fmt.Sprintf("%s-pod", name)},
Spec: v1.PodSpec{
RestartPolicy: v1.RestartPolicyNever,
Containers: []v1.Container{
{
Image: busyboxImage,
Name: fmt.Sprintf("%s-container", name),
Command: []string{
"sh",
"-c",
fmt.Sprintf("i=0; while [ $i -lt %d ]; do %s; i=$(($i+1)); done; while true; do sleep 5; done", iterations, fmt.Sprintf(command, filepath.Join(path, "file"))),
},
Resources: resources,
VolumeMounts: volumeMounts,
},
},
Volumes: volumes,
},
}
}
func getMemhogPod(podName string, ctnName string, res v1.ResourceRequirements) *v1.Pod {
env := []v1.EnvVar{
{
Name: "MEMORY_LIMIT",
ValueFrom: &v1.EnvVarSource{
ResourceFieldRef: &v1.ResourceFieldSelector{
Resource: "limits.memory",
},
},
},
}
// If there is a limit specified, pass 80% of it for -mem-total, otherwise use the downward API
// to pass limits.memory, which will be the total memory available.
// This helps prevent a guaranteed pod from triggering an OOM kill due to it's low memory limit,
// which will cause the test to fail inappropriately.
var memLimit string
if limit, ok := res.Limits[v1.ResourceMemory]; ok {
memLimit = strconv.Itoa(int(
float64(limit.Value()) * 0.8))
} else {
memLimit = "$(MEMORY_LIMIT)"
}
return &v1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: podName,
},
Spec: v1.PodSpec{
RestartPolicy: v1.RestartPolicyNever,
Containers: []v1.Container{
{
Name: ctnName,
Image: "k8s.gcr.io/stress:v1",
ImagePullPolicy: "Always",
Env: env,
// 60 min timeout * 60s / tick per 10s = 360 ticks before timeout => ~11.11Mi/tick
// to fill ~4Gi of memory, so initial ballpark 12Mi/tick.
// We might see flakes due to timeout if the total memory on the nodes increases.
Args: []string{"-mem-alloc-size", "12Mi", "-mem-alloc-sleep", "10s", "-mem-total", memLimit},
Resources: res,
},
},
},
}
}
| logMemoryMetrics |
parse.go | package parser
import (
"bytes"
"errors"
"strings"
"unicode/utf8"
)
var (
UnterminatedSingleQuoteError = errors.New("Unterminated single-quoted string")
UnterminatedDoubleQuoteError = errors.New("Unterminated double-quoted string")
UnterminatedEscapeError = errors.New("Unterminated backslash-escape")
)
var (
splitChars = " \n\t"
singleChar = '\''
doubleChar = '"'
escapeChar = '\\'
doubleEscapeChars = "$`\"\n\\"
)
// Split splits a string according to /bin/sh's word-splitting rules. It
// supports backslash-escapes, single-quotes, and double-quotes. Notably it does
// not support the $'' style of quoting. It also doesn't attempt to perform any
// other sort of expansion, including brace expansion, shell expansion, or
// pathname expansion.
//
// If the given input has an unterminated quoted string or ends in a
// backslash-escape, one of UnterminatedSingleQuoteError,
// UnterminatedDoubleQuoteError, or UnterminatedEscapeError is returned.
func Parse(input string) (words []string, err error) {
var buf bytes.Buffer
words = make([]string, 0)
for len(input) > 0 {
// skip any splitChars at the start
c, l := utf8.DecodeRuneInString(input)
if strings.ContainsRune(splitChars, c) {
input = input[l:]
continue
}
var word string
word, input, err = splitWord(input, &buf)
if err != nil {
return
}
words = append(words, word)
}
return
}
func splitWord(input string, buf *bytes.Buffer) (word string, remainder string, err error) {
buf.Reset()
raw:
{
cur := input
for len(cur) > 0 {
c, l := utf8.DecodeRuneInString(cur)
cur = cur[l:]
if c == singleChar {
buf.WriteString(input[0 : len(input)-len(cur)-l])
input = cur
goto single
} else if c == doubleChar {
buf.WriteString(input[0 : len(input)-len(cur)-l])
input = cur
goto double
} else if c == escapeChar {
buf.WriteString(input[0 : len(input)-len(cur)-l])
input = cur
goto escape
} else if strings.ContainsRune(splitChars, c) {
buf.WriteString(input[0 : len(input)-len(cur)-l])
return buf.String(), cur, nil
}
}
if len(input) > 0 {
buf.WriteString(input)
input = ""
}
goto done
}
escape:
{
if len(input) == 0 {
return "", "", UnterminatedEscapeError
}
c, l := utf8.DecodeRuneInString(input)
if c == '\n' {
// a backslash-escaped newline is elided from the output entirely
} else {
buf.WriteString(input[:l])
}
input = input[l:]
}
goto raw
single:
{
i := strings.IndexRune(input, singleChar)
if i == -1 {
return "", "", UnterminatedSingleQuoteError
}
buf.WriteString(input[0:i])
input = input[i+1:]
goto raw
}
double:
{
cur := input
for len(cur) > 0 {
c, l := utf8.DecodeRuneInString(cur)
cur = cur[l:]
if c == doubleChar {
buf.WriteString(input[0 : len(input)-len(cur)-l])
input = cur
goto raw
} else if c == escapeChar {
// bash only supports certain escapes in double-quoted strings
c2, l2 := utf8.DecodeRuneInString(cur)
cur = cur[l2:]
if strings.ContainsRune(doubleEscapeChars, c2) {
buf.WriteString(input[0 : len(input)-len(cur)-l-l2])
if c2 == '\n' {
// newline is special, skip the backslash entirely | }
input = cur
}
}
}
return "", "", UnterminatedDoubleQuoteError
}
done:
return buf.String(), input, nil
} | } else {
buf.WriteRune(c2) |
tests.rs | use crate::RelativePath;
#[test]
fn relative_path_ok() {
assert_eq!(Some("a"), RelativePath::new("a").unwrap().to_str());
assert_eq!(Some("a"), RelativePath::new("./a").unwrap().to_str());
assert_eq!(Some("a"), RelativePath::new("b/../a").unwrap().to_str());
assert_eq!(
Some("a/c"),
RelativePath::new("b/../a/././c").unwrap().to_str()
);
}
#[test]
fn relative_path_err() {
assert!(RelativePath::new("../a").is_err());
assert!(RelativePath::new("/a").is_err());
}
#[test]
fn relative_path_normalize() | {
assert_eq!(Some("a"), RelativePath::new("a/").unwrap().to_str());
} |
|
fp6_3over2.rs | use crate::{
io::{Read, Result as IoResult, Write},
UniformRand,
};
use core::{
cmp::Ordering,
fmt,
marker::PhantomData,
ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign},
};
use num_traits::{One, Zero};
use rand::{
distributions::{Distribution, Standard},
Rng,
};
use crate::{
bytes::{FromBytes, ToBytes},
fields::{Field, Fp2, Fp2Parameters},
};
pub trait Fp6Parameters: 'static + Send + Sync + Copy {
type Fp2Params: Fp2Parameters;
const NONRESIDUE: Fp2<Self::Fp2Params>;
/// Coefficients for the Frobenius automorphism.
const FROBENIUS_COEFF_FP6_C1: [Fp2<Self::Fp2Params>; 6];
const FROBENIUS_COEFF_FP6_C2: [Fp2<Self::Fp2Params>; 6];
#[inline(always)]
fn mul_fp2_by_nonresidue(fe: &Fp2<Self::Fp2Params>) -> Fp2<Self::Fp2Params> {
Self::NONRESIDUE * fe
}
}
/// An element of Fp6, represented by c0 + c1 * v + c2 * v^(2).
#[derive(Derivative)]
#[derivative(
Default(bound = "P: Fp6Parameters"),
Hash(bound = "P: Fp6Parameters"),
Clone(bound = "P: Fp6Parameters"),
Copy(bound = "P: Fp6Parameters"),
Debug(bound = "P: Fp6Parameters"),
PartialEq(bound = "P: Fp6Parameters"),
Eq(bound = "P: Fp6Parameters")
)]
#[derive(Serialize, Deserialize)]
pub struct Fp6<P: Fp6Parameters> {
pub c0: Fp2<P::Fp2Params>,
pub c1: Fp2<P::Fp2Params>,
pub c2: Fp2<P::Fp2Params>,
#[derivative(Debug = "ignore")]
#[doc(hidden)]
pub params: PhantomData<P>,
}
impl<P: Fp6Parameters> Fp6<P> {
pub fn new(c0: Fp2<P::Fp2Params>, c1: Fp2<P::Fp2Params>, c2: Fp2<P::Fp2Params>) -> Self {
Self {
c0,
c1,
c2,
params: PhantomData,
}
}
pub fn mul_by_fp(&mut self, element: &<P::Fp2Params as Fp2Parameters>::Fp) {
self.c0.mul_assign_by_fp(&element);
self.c1.mul_assign_by_fp(&element);
self.c2.mul_assign_by_fp(&element);
}
pub fn mul_by_fp2(&mut self, element: &Fp2<P::Fp2Params>) {
self.c0.mul_assign(element);
self.c1.mul_assign(element);
self.c2.mul_assign(element);
}
pub fn mul_by_1(&mut self, c1: &Fp2<P::Fp2Params>) {
let mut b_b = self.c1;
b_b.mul_assign(c1);
let mut t1 = *c1;
{
let mut tmp = self.c1;
tmp.add_assign(&self.c2);
t1.mul_assign(&tmp);
t1.sub_assign(&b_b);
t1 = P::mul_fp2_by_nonresidue(&t1);
}
let mut t2 = *c1;
{
let mut tmp = self.c0;
tmp.add_assign(&self.c1);
t2.mul_assign(&tmp);
t2.sub_assign(&b_b);
}
self.c0 = t1;
self.c1 = t2;
self.c2 = b_b;
}
pub fn mul_by_01(&mut self, c0: &Fp2<P::Fp2Params>, c1: &Fp2<P::Fp2Params>) {
let mut a_a = self.c0;
let mut b_b = self.c1;
a_a.mul_assign(c0);
b_b.mul_assign(c1);
let mut t1 = *c1;
{
let mut tmp = self.c1;
tmp.add_assign(&self.c2);
t1.mul_assign(&tmp);
t1.sub_assign(&b_b);
t1 = P::mul_fp2_by_nonresidue(&t1);
t1.add_assign(&a_a);
}
let mut t3 = *c0;
{
let mut tmp = self.c0;
tmp.add_assign(&self.c2);
t3.mul_assign(&tmp);
t3.sub_assign(&a_a);
t3.add_assign(&b_b);
}
let mut t2 = *c0;
t2.add_assign(c1);
{
let mut tmp = self.c0;
tmp.add_assign(&self.c1);
t2.mul_assign(&tmp);
t2.sub_assign(&a_a);
t2.sub_assign(&b_b);
}
self.c0 = t1;
self.c1 = t2;
self.c2 = t3;
}
}
impl<P: Fp6Parameters> Zero for Fp6<P> {
fn zero() -> Self {
Self::new(Fp2::zero(), Fp2::zero(), Fp2::zero())
}
fn is_zero(&self) -> bool {
self.c0.is_zero() && self.c1.is_zero() && self.c2.is_zero()
}
}
impl<P: Fp6Parameters> One for Fp6<P> {
fn one() -> Self {
Self::new(Fp2::one(), Fp2::zero(), Fp2::zero())
}
fn is_one(&self) -> bool {
self.c0.is_one() && self.c1.is_zero() && self.c2.is_zero()
}
}
impl<P: Fp6Parameters> Field for Fp6<P> {
#[inline]
fn characteristic<'a>() -> &'a [u64] {
Fp2::<P::Fp2Params>::characteristic()
}
fn double(&self) -> Self {
let mut result = self.clone();
result.double_in_place();
result
}
fn double_in_place(&mut self) -> &mut Self {
self.c0.double_in_place();
self.c1.double_in_place();
self.c2.double_in_place();
self
}
#[inline]
fn from_random_bytes_with_flags(bytes: &[u8]) -> Option<(Self, u8)> {
let split_at = bytes.len() / 3;
if let Some(c0) = Fp2::<P::Fp2Params>::from_random_bytes(&bytes[..split_at]) {
if let Some(c1) = Fp2::<P::Fp2Params>::from_random_bytes(&bytes[split_at..2 * split_at])
{
if let Some((c2, flags)) =
Fp2::<P::Fp2Params>::from_random_bytes_with_flags(&bytes[2 * split_at..])
{
return Some((Fp6::new(c0, c1, c2), flags));
}
}
}
None
}
#[inline]
fn from_random_bytes(bytes: &[u8]) -> Option<Self> {
Self::from_random_bytes_with_flags(bytes).map(|f| f.0)
}
fn square(&self) -> Self {
let mut result = self.clone();
result.square_in_place();
result
}
fn square_in_place(&mut self) -> &mut Self {
let s0 = self.c0.square();
let s1 = (self.c0 * &self.c1).double();
let s2 = (self.c0 - &self.c1 + &self.c2).square();
let s3 = (self.c1 * &self.c2).double();
let s4 = self.c2.square();
self.c0 = s0 + &P::mul_fp2_by_nonresidue(&s3);
self.c1 = s1 + &P::mul_fp2_by_nonresidue(&s4);
self.c2 = s1 + &s2 + &s3 - &s0 - &s4;
self
}
fn inverse(&self) -> Option<Self> {
if self.is_zero() {
None
} else {
let mut c0 = self.c2;
c0 = P::mul_fp2_by_nonresidue(&c0);
c0.mul_assign(&self.c1);
c0 = c0.neg();
{
let mut c0s = self.c0;
c0s.square_in_place();
c0.add_assign(&c0s);
}
let mut c1 = self.c2;
c1.square_in_place();
c1 = P::mul_fp2_by_nonresidue(&c1);
{
let mut c01 = self.c0;
c01.mul_assign(&self.c1);
c1.sub_assign(&c01);
}
let mut c2 = self.c1;
c2.square_in_place();
{
let mut c02 = self.c0;
c02.mul_assign(&self.c2);
c2.sub_assign(&c02);
}
let mut tmp1 = self.c2;
tmp1.mul_assign(&c1);
let mut tmp2 = self.c1;
tmp2.mul_assign(&c2);
tmp1.add_assign(&tmp2);
tmp1 = P::mul_fp2_by_nonresidue(&tmp1);
tmp2 = self.c0;
tmp2.mul_assign(&c0);
tmp1.add_assign(&tmp2);
tmp1.inverse().map(|t| Self::new(t * &c0, t * &c1, t * &c2))
}
}
fn inverse_in_place(&mut self) -> Option<&mut Self> {
if let Some(inverse) = self.inverse() {
*self = inverse;
Some(self)
} else {
None
}
}
fn frobenius_map(&mut self, power: usize) {
self.c0.frobenius_map(power);
self.c1.frobenius_map(power);
self.c2.frobenius_map(power);
self.c1.mul_assign(&P::FROBENIUS_COEFF_FP6_C1[power % 6]);
self.c2.mul_assign(&P::FROBENIUS_COEFF_FP6_C2[power % 6]);
}
}
impl<P: Fp6Parameters> fmt::Display for Fp6<P> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Fq6_3over2({} + {} * v, {} * v^2)",
self.c0, self.c1, self.c2
)
}
}
impl<P: Fp6Parameters> Distribution<Fp6<P>> for Standard {
#[inline]
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Fp6<P> {
Fp6::new(
UniformRand::rand(rng),
UniformRand::rand(rng),
UniformRand::rand(rng),
)
}
}
impl<P: Fp6Parameters> Neg for Fp6<P> {
type Output = Self;
#[inline]
#[must_use]
fn neg(self) -> Self {
let mut copy = Self::zero();
copy.c0 = self.c0.neg();
copy.c1 = self.c1.neg();
copy.c2 = self.c2.neg();
copy
}
}
impl<'a, P: Fp6Parameters> Add<&'a Self> for Fp6<P> {
type Output = Self;
#[inline]
fn add(self, other: &Self) -> Self {
let mut result = self;
result.add_assign(other);
result
}
}
impl<'a, P: Fp6Parameters> Sub<&'a Self> for Fp6<P> {
type Output = Self;
#[inline]
fn sub(self, other: &Self) -> Self {
let mut result = self;
result.sub_assign(other);
result
}
}
impl<'a, P: Fp6Parameters> Mul<&'a Self> for Fp6<P> {
type Output = Self;
#[inline]
fn mul(self, other: &Self) -> Self {
let mut result = self;
result.mul_assign(other);
result
}
}
impl<'a, P: Fp6Parameters> Div<&'a Self> for Fp6<P> {
type Output = Self;
#[inline]
fn div(self, other: &Self) -> Self {
let mut result = self;
result.mul_assign(&other.inverse().unwrap());
result
}
}
impl_additive_ops_from_ref!(Fp6, Fp6Parameters);
impl_multiplicative_ops_from_ref!(Fp6, Fp6Parameters);
impl<'a, P: Fp6Parameters> AddAssign<&'a Self> for Fp6<P> {
#[inline]
fn add_assign(&mut self, other: &Self) {
self.c0 += &other.c0;
self.c1 += &other.c1;
self.c2 += &other.c2;
}
}
impl<'a, P: Fp6Parameters> SubAssign<&'a Self> for Fp6<P> {
#[inline]
fn sub_assign(&mut self, other: &Self) {
self.c0 -= &other.c0;
self.c1 -= &other.c1;
self.c2 -= &other.c2;
}
}
impl<'a, P: Fp6Parameters> MulAssign<&'a Self> for Fp6<P> {
#[inline]
fn mul_assign(&mut self, other: &Self) {
let v0 = self.c0 * &other.c0;
let v1 = self.c1 * &other.c1;
let v2 = self.c2 * &other.c2;
let c0 =
P::mul_fp2_by_nonresidue(&((self.c1 + &self.c2) * &(other.c1 + &other.c2) - &v1 - &v2))
+ &v0;
let c1 = (self.c0 + &self.c1) * &(other.c0 + &other.c1) - &v0 - &v1
+ &P::mul_fp2_by_nonresidue(&v2);
let c2 = (self.c0 + &self.c2) * &(other.c0 + &other.c2) - &v0 - &v2 + &v1;
self.c0 = c0;
self.c1 = c1;
self.c2 = c2;
}
}
impl<'a, P: Fp6Parameters> DivAssign<&'a Self> for Fp6<P> {
#[inline]
fn div_assign(&mut self, other: &Self) |
}
impl<'a, P: Fp6Parameters> From<&'a [bool]> for Fp6<P> {
fn from(_bits: &[bool]) -> Self {
unimplemented!()
}
}
/// `Fp3` elements are ordered lexicographically.
impl<P: Fp6Parameters> Ord for Fp6<P> {
#[inline(always)]
fn cmp(&self, other: &Self) -> Ordering {
let c2_cmp = self.c2.cmp(&other.c2);
let c1_cmp = self.c1.cmp(&other.c1);
let c0_cmp = self.c0.cmp(&other.c0);
if c2_cmp == Ordering::Equal {
if c1_cmp == Ordering::Equal {
c0_cmp
} else {
c1_cmp
}
} else {
c2_cmp
}
}
}
impl<P: Fp6Parameters> PartialOrd for Fp6<P> {
#[inline(always)]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<P: Fp6Parameters> From<u128> for Fp6<P> {
fn from(other: u128) -> Self {
Self::new(other.into(), Fp2::zero(), Fp2::zero())
}
}
impl<P: Fp6Parameters> From<u64> for Fp6<P> {
fn from(other: u64) -> Self {
Self::new(other.into(), Fp2::zero(), Fp2::zero())
}
}
impl<P: Fp6Parameters> From<u32> for Fp6<P> {
fn from(other: u32) -> Self {
Self::new(other.into(), Fp2::zero(), Fp2::zero())
}
}
impl<P: Fp6Parameters> From<u16> for Fp6<P> {
fn from(other: u16) -> Self {
Self::new(other.into(), Fp2::zero(), Fp2::zero())
}
}
impl<P: Fp6Parameters> From<u8> for Fp6<P> {
fn from(other: u8) -> Self {
Self::new(other.into(), Fp2::zero(), Fp2::zero())
}
}
impl<P: Fp6Parameters> ToBytes for Fp6<P> {
#[inline]
fn write<W: Write>(&self, mut writer: W) -> IoResult<()> {
self.c0.write(&mut writer)?;
self.c1.write(&mut writer)?;
self.c2.write(&mut writer)
}
}
impl<P: Fp6Parameters> FromBytes for Fp6<P> {
#[inline]
fn read<R: Read>(mut reader: R) -> IoResult<Self> {
let c0 = Fp2::read(&mut reader)?;
let c1 = Fp2::read(&mut reader)?;
let c2 = Fp2::read(&mut reader)?;
Ok(Fp6::new(c0, c1, c2))
}
}
| {
self.mul_assign(&other.inverse().unwrap());
} |
tagdescription.go | package apimanagement
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// TagDescriptionClient is the apiManagement Client
type TagDescriptionClient struct {
BaseClient
}
// NewTagDescriptionClient creates an instance of the TagDescriptionClient client.
func NewTagDescriptionClient(subscriptionID string) TagDescriptionClient {
return NewTagDescriptionClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewTagDescriptionClientWithBaseURI creates an instance of the TagDescriptionClient client.
func NewTagDescriptionClientWithBaseURI(baseURI string, subscriptionID string) TagDescriptionClient {
return TagDescriptionClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// CreateOrUpdate create/Update tag description in scope of the Api.
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
// apiid - API revision identifier. Must be unique in the current API Management service instance. Non-current
// revision has ;rev=n as a suffix where n is the revision number.
// tagID - tag identifier. Must be unique in the current API Management service instance.
// parameters - create parameters.
// ifMatch - eTag of the Entity. Not required when creating an entity, but required when updating an entity.
func (client TagDescriptionClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, serviceName string, apiid string, tagID string, parameters TagDescriptionCreateParameters, ifMatch string) (result TagDescriptionContract, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/TagDescriptionClient.CreateOrUpdate")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}},
{TargetValue: apiid,
Constraints: []validation.Constraint{{Target: "apiid", Name: validation.MaxLength, Rule: 256, Chain: nil},
{Target: "apiid", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "apiid", Name: validation.Pattern, Rule: `^[^*#&+:<>?]+$`, Chain: nil}}},
{TargetValue: tagID,
Constraints: []validation.Constraint{{Target: "tagID", Name: validation.MaxLength, Rule: 80, Chain: nil},
{Target: "tagID", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "tagID", Name: validation.Pattern, Rule: `(^[\w]+$)|(^[\w][\w\-]+[\w]$)`, Chain: nil}}},
{TargetValue: parameters,
Constraints: []validation.Constraint{{Target: "parameters.TagDescriptionBaseProperties", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.TagDescriptionBaseProperties.ExternalDocsURL", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.TagDescriptionBaseProperties.ExternalDocsURL", Name: validation.MaxLength, Rule: 2000, Chain: nil}}},
}}}}}); err != nil {
return result, validation.NewError("apimanagement.TagDescriptionClient", "CreateOrUpdate", err.Error())
}
req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, serviceName, apiid, tagID, parameters, ifMatch)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
resp, err := client.CreateOrUpdateSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "CreateOrUpdate", resp, "Failure sending request")
return
}
result, err = client.CreateOrUpdateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "CreateOrUpdate", resp, "Failure responding to request")
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client TagDescriptionClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, serviceName string, apiid string, tagID string, parameters TagDescriptionCreateParameters, ifMatch string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"apiId": autorest.Encode("path", apiid),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"tagId": autorest.Encode("path", tagID),
}
const APIVersion = "2018-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/tagDescriptions/{tagId}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
if len(ifMatch) > 0 {
preparer = autorest.DecoratePreparer(preparer,
autorest.WithHeader("If-Match", autorest.String(ifMatch)))
}
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client TagDescriptionClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client TagDescriptionClient) CreateOrUpdateResponder(resp *http.Response) (result TagDescriptionContract, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete delete tag description for the Api.
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
// apiid - API revision identifier. Must be unique in the current API Management service instance. Non-current
// revision has ;rev=n as a suffix where n is the revision number.
// tagID - tag identifier. Must be unique in the current API Management service instance.
// ifMatch - eTag of the Entity. ETag should match the current entity state from the header response of the GET
// request or it should be * for unconditional update.
func (client TagDescriptionClient) Delete(ctx context.Context, resourceGroupName string, serviceName string, apiid string, tagID string, ifMatch string) (result autorest.Response, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/TagDescriptionClient.Delete")
defer func() {
sc := -1
if result.Response != nil {
sc = result.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}},
{TargetValue: apiid,
Constraints: []validation.Constraint{{Target: "apiid", Name: validation.MaxLength, Rule: 256, Chain: nil},
{Target: "apiid", Name: validation.MinLength, Rule: 1, Chain: nil}, | {TargetValue: tagID,
Constraints: []validation.Constraint{{Target: "tagID", Name: validation.MaxLength, Rule: 80, Chain: nil},
{Target: "tagID", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "tagID", Name: validation.Pattern, Rule: `(^[\w]+$)|(^[\w][\w\-]+[\w]$)`, Chain: nil}}}}); err != nil {
return result, validation.NewError("apimanagement.TagDescriptionClient", "Delete", err.Error())
}
req, err := client.DeletePreparer(ctx, resourceGroupName, serviceName, apiid, tagID, ifMatch)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "Delete", nil, "Failure preparing request")
return
}
resp, err := client.DeleteSender(req)
if err != nil {
result.Response = resp
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "Delete", resp, "Failure sending request")
return
}
result, err = client.DeleteResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "Delete", resp, "Failure responding to request")
}
return
}
// DeletePreparer prepares the Delete request.
func (client TagDescriptionClient) DeletePreparer(ctx context.Context, resourceGroupName string, serviceName string, apiid string, tagID string, ifMatch string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"apiId": autorest.Encode("path", apiid),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"tagId": autorest.Encode("path", tagID),
}
const APIVersion = "2018-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/tagDescriptions/{tagId}", pathParameters),
autorest.WithQueryParameters(queryParameters),
autorest.WithHeader("If-Match", autorest.String(ifMatch)))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client TagDescriptionClient) DeleteSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client TagDescriptionClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// Get get tag associated with the API.
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
// apiid - API revision identifier. Must be unique in the current API Management service instance. Non-current
// revision has ;rev=n as a suffix where n is the revision number.
// tagID - tag identifier. Must be unique in the current API Management service instance.
func (client TagDescriptionClient) Get(ctx context.Context, resourceGroupName string, serviceName string, apiid string, tagID string) (result TagDescriptionContract, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/TagDescriptionClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}},
{TargetValue: apiid,
Constraints: []validation.Constraint{{Target: "apiid", Name: validation.MaxLength, Rule: 256, Chain: nil},
{Target: "apiid", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "apiid", Name: validation.Pattern, Rule: `^[^*#&+:<>?]+$`, Chain: nil}}},
{TargetValue: tagID,
Constraints: []validation.Constraint{{Target: "tagID", Name: validation.MaxLength, Rule: 80, Chain: nil},
{Target: "tagID", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "tagID", Name: validation.Pattern, Rule: `(^[\w]+$)|(^[\w][\w\-]+[\w]$)`, Chain: nil}}}}); err != nil {
return result, validation.NewError("apimanagement.TagDescriptionClient", "Get", err.Error())
}
req, err := client.GetPreparer(ctx, resourceGroupName, serviceName, apiid, tagID)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client TagDescriptionClient) GetPreparer(ctx context.Context, resourceGroupName string, serviceName string, apiid string, tagID string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"apiId": autorest.Encode("path", apiid),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"tagId": autorest.Encode("path", tagID),
}
const APIVersion = "2018-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/tagDescriptions/{tagId}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client TagDescriptionClient) GetSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client TagDescriptionClient) GetResponder(resp *http.Response) (result TagDescriptionContract, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// GetEntityState gets the entity state version of the tag specified by its identifier.
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
// apiid - API revision identifier. Must be unique in the current API Management service instance. Non-current
// revision has ;rev=n as a suffix where n is the revision number.
// tagID - tag identifier. Must be unique in the current API Management service instance.
func (client TagDescriptionClient) GetEntityState(ctx context.Context, resourceGroupName string, serviceName string, apiid string, tagID string) (result autorest.Response, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/TagDescriptionClient.GetEntityState")
defer func() {
sc := -1
if result.Response != nil {
sc = result.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}},
{TargetValue: apiid,
Constraints: []validation.Constraint{{Target: "apiid", Name: validation.MaxLength, Rule: 256, Chain: nil},
{Target: "apiid", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "apiid", Name: validation.Pattern, Rule: `^[^*#&+:<>?]+$`, Chain: nil}}},
{TargetValue: tagID,
Constraints: []validation.Constraint{{Target: "tagID", Name: validation.MaxLength, Rule: 80, Chain: nil},
{Target: "tagID", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "tagID", Name: validation.Pattern, Rule: `(^[\w]+$)|(^[\w][\w\-]+[\w]$)`, Chain: nil}}}}); err != nil {
return result, validation.NewError("apimanagement.TagDescriptionClient", "GetEntityState", err.Error())
}
req, err := client.GetEntityStatePreparer(ctx, resourceGroupName, serviceName, apiid, tagID)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "GetEntityState", nil, "Failure preparing request")
return
}
resp, err := client.GetEntityStateSender(req)
if err != nil {
result.Response = resp
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "GetEntityState", resp, "Failure sending request")
return
}
result, err = client.GetEntityStateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "GetEntityState", resp, "Failure responding to request")
}
return
}
// GetEntityStatePreparer prepares the GetEntityState request.
func (client TagDescriptionClient) GetEntityStatePreparer(ctx context.Context, resourceGroupName string, serviceName string, apiid string, tagID string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"apiId": autorest.Encode("path", apiid),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"tagId": autorest.Encode("path", tagID),
}
const APIVersion = "2018-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsHead(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/tagDescriptions/{tagId}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetEntityStateSender sends the GetEntityState request. The method will close the
// http.Response Body if it receives an error.
func (client TagDescriptionClient) GetEntityStateSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// GetEntityStateResponder handles the response to the GetEntityState request. The method always
// closes the http.Response Body.
func (client TagDescriptionClient) GetEntityStateResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByClosing())
result.Response = resp
return
}
// ListByAPI lists all Tags descriptions in scope of API. Model similar to swagger - tagDescription is defined on API
// level but tag may be assigned to the Operations
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
// apiid - API revision identifier. Must be unique in the current API Management service instance. Non-current
// revision has ;rev=n as a suffix where n is the revision number.
// filter - | Field | Supported operators | Supported functions |
// |-------------|------------------------|---------------------------------------------|
// | id | ge, le, eq, ne, gt, lt | substringof, contains, startswith, endswith |
// | name | ge, le, eq, ne, gt, lt | substringof, contains, startswith, endswith |
// top - number of records to return.
// skip - number of records to skip.
func (client TagDescriptionClient) ListByAPI(ctx context.Context, resourceGroupName string, serviceName string, apiid string, filter string, top *int32, skip *int32) (result TagDescriptionCollectionPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/TagDescriptionClient.ListByAPI")
defer func() {
sc := -1
if result.tdc.Response.Response != nil {
sc = result.tdc.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}},
{TargetValue: apiid,
Constraints: []validation.Constraint{{Target: "apiid", Name: validation.MaxLength, Rule: 256, Chain: nil},
{Target: "apiid", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "apiid", Name: validation.Pattern, Rule: `^[^*#&+:<>?]+$`, Chain: nil}}},
{TargetValue: top,
Constraints: []validation.Constraint{{Target: "top", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "top", Name: validation.InclusiveMinimum, Rule: 1, Chain: nil}}}}},
{TargetValue: skip,
Constraints: []validation.Constraint{{Target: "skip", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "skip", Name: validation.InclusiveMinimum, Rule: 0, Chain: nil}}}}}}); err != nil {
return result, validation.NewError("apimanagement.TagDescriptionClient", "ListByAPI", err.Error())
}
result.fn = client.listByAPINextResults
req, err := client.ListByAPIPreparer(ctx, resourceGroupName, serviceName, apiid, filter, top, skip)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "ListByAPI", nil, "Failure preparing request")
return
}
resp, err := client.ListByAPISender(req)
if err != nil {
result.tdc.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "ListByAPI", resp, "Failure sending request")
return
}
result.tdc, err = client.ListByAPIResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "ListByAPI", resp, "Failure responding to request")
}
return
}
// ListByAPIPreparer prepares the ListByAPI request.
func (client TagDescriptionClient) ListByAPIPreparer(ctx context.Context, resourceGroupName string, serviceName string, apiid string, filter string, top *int32, skip *int32) (*http.Request, error) {
pathParameters := map[string]interface{}{
"apiId": autorest.Encode("path", apiid),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if len(filter) > 0 {
queryParameters["$filter"] = autorest.Encode("query", filter)
}
if top != nil {
queryParameters["$top"] = autorest.Encode("query", *top)
}
if skip != nil {
queryParameters["$skip"] = autorest.Encode("query", *skip)
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/tagDescriptions", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListByAPISender sends the ListByAPI request. The method will close the
// http.Response Body if it receives an error.
func (client TagDescriptionClient) ListByAPISender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListByAPIResponder handles the response to the ListByAPI request. The method always
// closes the http.Response Body.
func (client TagDescriptionClient) ListByAPIResponder(resp *http.Response) (result TagDescriptionCollection, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listByAPINextResults retrieves the next set of results, if any.
func (client TagDescriptionClient) listByAPINextResults(ctx context.Context, lastResults TagDescriptionCollection) (result TagDescriptionCollection, err error) {
req, err := lastResults.tagDescriptionCollectionPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "listByAPINextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListByAPISender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "listByAPINextResults", resp, "Failure sending next results request")
}
result, err = client.ListByAPIResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.TagDescriptionClient", "listByAPINextResults", resp, "Failure responding to next results request")
}
return
}
// ListByAPIComplete enumerates all values, automatically crossing page boundaries as required.
func (client TagDescriptionClient) ListByAPIComplete(ctx context.Context, resourceGroupName string, serviceName string, apiid string, filter string, top *int32, skip *int32) (result TagDescriptionCollectionIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/TagDescriptionClient.ListByAPI")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.ListByAPI(ctx, resourceGroupName, serviceName, apiid, filter, top, skip)
return
} | {Target: "apiid", Name: validation.Pattern, Rule: `^[^*#&+:<>?]+$`, Chain: nil}}}, |
StatusDropdown.js | import React from "react";
import { Dropdown } from "semantic-ui-react";
const statusOptions = [
{
key: "Working",
text: "Working",
value: 1
}
];
class ContactBox extends React.Component {
render() {
return (
<Dropdown
selection
compact
options={statusOptions} | color: "#20CB96",
boxShadow: "0px 3px 6px #00000039",
border: "1px solid #959494",
width: "135px",
padding: "13px 16px"
}}
/>
);
}
}
export default ContactBox; | value={1}
style={{
font: "18px Medium Lato", |
MainAnnotator.py | """
le script principale sert à annoter un répertoire de fichiers xml de recettes
"""
import glob
import re
import os
from oper_utils import xml_to_recipe_annotated
from Ner_classifieur_annote import load_crf_model, predict_text, transform_to_xml_annote
from NER_ingredient_detector import get_content_from_xmlfile
from ComplexCalculator import ComplexCalculator
modelpath = "../ml_models/model-20210515.pkl"
ner_clf = load_crf_model(modelpath)
def an | ilename, ner_clf):
"""
Annoter le fichier avec CRF, renvoie une string de recette avec annotation
"""
ingredients, text_recette = get_content_from_xmlfile(filename)
liste = predict_text(text_recette,ner_clf)
text_after = transform_to_xml_annote(liste)
return text_after
def transform_doc_to_xml(doc):
text_after = []
for token in doc:
if token.ent_iob_ == "O":
text_after.append(token.text)
elif token.ent_iob_ == "B" and token.i == doc[-1].i:
text_after.append(f'<{token.ent_type_} id="{token.ent_kb_id_ + token.ent_id_}">' + token.text + f"</{token.ent_type_}>")
elif token.ent_iob_ == "B" and doc[token.i+1].ent_iob_ == "I":
text_after.append(f'<{token.ent_type_} id="{token.ent_kb_id_ + token.ent_id_}">' + token.text)
elif token.ent_iob_ == "B" and doc[token.i+1].ent_iob_ != "I":
text_after.append(f'<{token.ent_type_} id="{token.ent_kb_id_ + token.ent_id_}">' + token.text + f"</{token.ent_type_}>")
elif token.ent_iob_ == "I" and token.i == doc[-1].i:
text_after.append(token.text + f"</{token.ent_type_}>")
elif token.ent_iob_ == "I" and doc[token.i+1].ent_iob_ == "I":
text_after.append(token.text)
elif token.ent_iob_ == "I" and doc[token.i+1].ent_iob_ != "I":
text_after.append(token.text + f"</{token.ent_type_}>")
text_after = " ".join(text_after)
text_after = re.sub("' ", "'", text_after)
text_after = re.sub(r" (,|\.)", "\\1", text_after)
return text_after
def parcours_corpus_annote(corpus_path, output_dir, liste=False):
if not liste:
fics = glob.glob(f"{corpus_path}\*.xml")
# fics = glob.glob(f"{corpus_path}{os.sep}*.xml")
else:
fics = corpus_path
for fic in fics:
try:
fic_name = fic.split(f'{os.sep}')[-1]
recette_annote_crf = annote_with_crf(fic, ner_clf)
recette_doc_spacy, dico_ingreds, dico_opers = xml_to_recipe_annotated(fic)
recette_annote_rules = transform_doc_to_xml(recette_doc_spacy)
calculator = ComplexCalculator(dico_ingreds, dico_opers)
complex_temps = calculator.get_O_temps()
complex_espace = calculator.O_espace_f()
ingreds = dico_ingreds_to_xml(dico_ingreds)
opers = dico_opers_to_xml(dico_opers)
## add to xmlfile
with open(fic,encoding="utf8") as f:
xml_text = f.read()
recette_xml_rules = '\n <annotation methode="symbolique">\n '+ recette_annote_rules + '\n </annotation>'
recette_xml_crf = '\n <annotation methode="crf">\n '+ recette_annote_crf + '\n </annotation>'
complexite_t = '\n <complexite>\n <temps>' + complex_temps + '</temps>\n <complexite>'
complexite_e = '\n <complexite>\n <espace>' + complex_espace + '</espace>\n <complexite>'
xml_text = re.sub("(</preparation>)", "\\1" + recette_xml_rules + recette_xml_crf + complexite_t + complexite_e + ingreds + opers, xml_text)
with open(output_dir + os.sep + fic_name, "w", encoding="utf8") as f:
f.write(xml_text)
except Exception:
print(f"Rencontrer problème pour: {fic}")
def dico_ingreds_to_xml(dico_ingreds):
liste = []
for ingred in dico_ingreds.values():
formate = f'ingredient:{ingred["ingredient"]}\t id:{ingred["id"]}\t quantité:{ingred["quantite"]}\t unité:{ingred["unit"]}\t denombrable:{ingred["denombrable"]}\t recipient:{ingred["recipient"]}\n'
liste.append(formate)
liste = "".join(liste)
liste = "\n<ingredients_trouve>\n<![CDATA[\n" + liste + "]]>\n</ingredients_trouve>"
return liste
def dico_opers_to_xml(dico_opers):
liste = []
for oper_id,oper in dico_opers.items():
formate = f'operation:{oper["action"]}\t id:{oper_id}\t ingrédients_ralatifs:{oper["ingreds"]}\t nombre_opération_atomique:{oper["nb_oper"]}\t temps:{oper["temps"]}\t recipient:{oper["recipient"]}\n'
liste.append(formate)
liste = "".join(liste)
liste = "\n<operation_trouve>\n<![CDATA[\n" + liste + "]]>\n</operation_trouve>"
return liste
if __name__ == "__main__":
corpus_path = "../corpus_recettes/corpus_for_final"
output = "../corpus_recettes/out_put"
parcours_corpus_annote(corpus_path, output) | note_with_crf(f |
index.ts | export const RPC_URL_RINKEBY:string = 'https://rinkeby.infura.io/v3/947932ecbc0e4cc38e95d92a1f6efca0' | export const RPS_CONTRACT_ADDRESS_RINKEBY:string = '0x2b327cea6c152f83a04C4cd33f3c06d58cd8f66a' |
|
test_marshall_enum.py | from enum import Enum
from unittest import TestCase
|
class VehicleTypes(Enum):
CAR = 'car'
TRUCK = 'truck'
BIKE = 'bike'
class TestMarshallEnum(TestCase):
def test_marshall(self):
dumped = dump(VehicleTypes.CAR)
assert VehicleTypes.CAR.value == dumped
loaded = load(VehicleTypes, dumped)
assert VehicleTypes.CAR == loaded
def test_unknown_value_not_permitted(self):
with self.assertRaises(MarshallError):
load(VehicleTypes, 'spaceship')
def test_unknown_value_permitted(self):
# Allow unknown values to be placed in the enum
marshaller = EnumMarshallerFactory(allow_unknown=True).create(get_default_context(), VehicleTypes)
loaded = marshaller.load('spaceship')
assert loaded.value == 'spaceship'
assert loaded.__class__ == VehicleTypes
dumped = marshaller.dump(loaded)
assert dumped == 'spaceship' | from marshy import dump, load, get_default_context
from marshy.errors import MarshallError
from marshy.factory.enum_marshaller_factory import EnumMarshallerFactory |
writedocs.go | package bootstrap
import (
"fmt"
"path/filepath"
"reflect"
"github.com/google/blueprint"
"github.com/google/blueprint/bootstrap/bpdoc"
"github.com/google/blueprint/pathtools"
)
// ModuleTypeDocs returns a list of bpdoc.ModuleType objects that contain information relevant
// to generating documentation for module types supported by the primary builder.
func ModuleTypeDocs(ctx *blueprint.Context, factories map[string]reflect.Value) ([]*bpdoc.Package, error) | {
// Find the module that's marked as the "primary builder", which means it's
// creating the binary that we'll use to generate the non-bootstrap
// build.ninja file.
var primaryBuilders []*goBinary
ctx.VisitAllModulesIf(isBootstrapBinaryModule,
func(module blueprint.Module) {
binaryModule := module.(*goBinary)
if binaryModule.properties.PrimaryBuilder {
primaryBuilders = append(primaryBuilders, binaryModule)
}
})
var primaryBuilder *goBinary
switch len(primaryBuilders) {
case 0:
return nil, fmt.Errorf("no primary builder module present")
case 1:
primaryBuilder = primaryBuilders[0]
default:
return nil, fmt.Errorf("multiple primary builder modules present")
}
pkgFiles := make(map[string][]string)
ctx.VisitDepsDepthFirst(primaryBuilder, func(module blueprint.Module) {
switch m := module.(type) {
case (*goPackage):
pkgFiles[m.properties.PkgPath] = pathtools.PrefixPaths(m.properties.Srcs,
filepath.Join(ctx.SrcDir(), ctx.ModuleDir(m)))
default:
panic(fmt.Errorf("unknown dependency type %T", module))
}
})
mergedFactories := make(map[string]reflect.Value)
for moduleType, factory := range factories {
mergedFactories[moduleType] = factory
}
for moduleType, factory := range ctx.ModuleTypeFactories() {
if _, exists := mergedFactories[moduleType]; !exists {
mergedFactories[moduleType] = reflect.ValueOf(factory)
}
}
return bpdoc.AllPackages(pkgFiles, mergedFactories, ctx.ModuleTypePropertyStructs())
} |
|
syscall_linux_ppc64x.go | // Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build linux
// +build ppc64 ppc64le
package syscall
const (
_SYS_dup = SYS_DUP2
_SYS_setgroups = SYS_SETGROUPS
)
//sys Dup2(oldfd int, newfd int) (err error)
//sys Fchown(fd int, uid int, gid int) (err error)
//sys Fstat(fd int, stat *Stat_t) (err error)
//sys Fstatfs(fd int, buf *Statfs_t) (err error)
//sys Ftruncate(fd int, length int64) (err error)
//sysnb Getegid() (egid int)
//sysnb Geteuid() (euid int)
//sysnb Getgid() (gid int)
//sysnb Getrlimit(resource int, rlim *Rlimit) (err error) = SYS_UGETRLIMIT
//sysnb Getuid() (uid int)
//sysnb InotifyInit() (fd int, err error)
//sys Ioperm(from int, num int, on int) (err error)
//sys Iopl(level int) (err error)
//sys Lchown(path string, uid int, gid int) (err error)
//sys Listen(s int, n int) (err error)
//sys Lstat(path string, stat *Stat_t) (err error)
//sys Pread(fd int, p []byte, offset int64) (n int, err error) = SYS_PREAD64
//sys Pwrite(fd int, p []byte, offset int64) (n int, err error) = SYS_PWRITE64
//sys Seek(fd int, offset int64, whence int) (off int64, err error) = SYS_LSEEK
//sys Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error) = SYS__NEWSELECT
//sys sendfile(outfd int, infd int, offset *int64, count int) (written int, err error)
//sys Setfsgid(gid int) (err error)
//sys Setfsuid(uid int) (err error)
//sysnb Setregid(rgid int, egid int) (err error)
//sysnb Setresgid(rgid int, egid int, sgid int) (err error)
//sysnb Setresuid(ruid int, euid int, suid int) (err error)
//sysnb Setrlimit(resource int, rlim *Rlimit) (err error)
//sysnb Setreuid(ruid int, euid int) (err error)
//sys Shutdown(fd int, how int) (err error)
//sys Splice(rfd int, roff *int64, wfd int, woff *int64, len int, flags int) (n int64, err error)
//sys Stat(path string, stat *Stat_t) (err error)
//sys Statfs(path string, buf *Statfs_t) (err error)
//sys SyncFileRange(fd int, off int64, n int64, flags int) (err error) = SYS_SYNC_FILE_RANGE2
//sys Truncate(path string, length int64) (err error)
//sys accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error)
//sys accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error)
//sys bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error)
//sys connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error)
//sysnb getgroups(n int, list *_Gid_t) (nn int, err error)
//sysnb setgroups(n int, list *_Gid_t) (err error)
//sys getsockopt(s int, level int, name int, val unsafe.Pointer, vallen *_Socklen) (err error)
//sys setsockopt(s int, level int, name int, val unsafe.Pointer, vallen uintptr) (err error)
//sysnb socket(domain int, typ int, proto int) (fd int, err error)
//sysnb socketpair(domain int, typ int, proto int, fd *[2]int32) (err error)
//sysnb getpeername(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error)
//sysnb getsockname(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error)
//sys recvfrom(fd int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Socklen) (n int, err error)
//sys sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) (err error)
//sys recvmsg(s int, msg *Msghdr, flags int) (n int, err error)
//sys sendmsg(s int, msg *Msghdr, flags int) (n int, err error)
//sys mmap(addr uintptr, length uintptr, prot int, flags int, fd int, offset int64) (xaddr uintptr, err error)
//sysnb Gettimeofday(tv *Timeval) (err error)
//sysnb Time(t *Time_t) (tt Time_t, err error)
func setTimespec(sec, nsec int64) Timespec {
return Timespec{Sec: sec, Nsec: nsec}
}
func setTimeval(sec, usec int64) Timeval {
return Timeval{Sec: sec, Usec: usec}
}
func Pipe(p []int) (err error) {
if len(p) != 2 |
var pp [2]_C_int
err = pipe2(&pp, 0)
p[0] = int(pp[0])
p[1] = int(pp[1])
return
}
//sysnb pipe2(p *[2]_C_int, flags int) (err error)
func Pipe2(p []int, flags int) (err error) {
if len(p) != 2 {
return EINVAL
}
var pp [2]_C_int
err = pipe2(&pp, flags)
p[0] = int(pp[0])
p[1] = int(pp[1])
return
}
func (r *PtraceRegs) PC() uint64 { return r.Nip }
func (r *PtraceRegs) SetPC(pc uint64) { r.Nip = pc }
func (iov *Iovec) SetLen(length int) {
iov.Len = uint64(length)
}
func (msghdr *Msghdr) SetControllen(length int) {
msghdr.Controllen = uint64(length)
}
func (cmsg *Cmsghdr) SetLen(length int) {
cmsg.Len = uint64(length)
}
func rawVforkSyscall(trap, a1 uintptr) (r1 uintptr, err Errno) {
panic("not implemented")
}
| {
return EINVAL
} |
compile_graph.py | from graphserver.graphdb import GraphDatabase
from graphserver.ext.gtfs.gtfsdb import GTFSDatabase
from graphserver.ext.osm.osmdb import OSMDB
from graphserver.ext.osm.profiledb import ProfileDB
from graphserver import compiler
from graphserver.core import Graph
import sys
from sys import argv
def process_street_graph(osmdb_filename, graphdb_filename, profiledb_filename, slogs={}):
OSMDB_FILENAME = "ext/osm/bartarea.sqlite"
GRAPHDB_FILENAME = "bartstreets.db"
print( "Opening OSM-DB '%s'"%osmdb_filename )
osmdb = OSMDB( osmdb_filename )
if profiledb_filename:
print( "Opening ProfileDB '%s'"%profiledb_filename )
profiledb = ProfileDB( profiledb_filename )
else:
print( "No ProfileDB supplied" )
profiledb = None
g = Graph()
compiler.load_streets_to_graph( g, osmdb, profiledb, slogs, reporter=sys.stdout )
graphdb = GraphDatabase( graphdb_filename, overwrite=True )
graphdb.populate( g, reporter=sys.stdout )
def | (graphdb_filename, gtfsdb_filenames, osmdb_filename=None, profiledb_filename=None, agency_id=None, link_stations=False, slogs={}):
g = Graph()
if profiledb_filename:
print( "Opening ProfileDB '%s'"%profiledb_filename )
profiledb = ProfileDB( profiledb_filename )
else:
print( "No ProfileDB supplied" )
profiledb = None
if osmdb_filename:
# Load osmdb ===============================
print( "Opening OSM-DB '%s'"%osmdb_filename )
osmdb = OSMDB( osmdb_filename )
compiler.load_streets_to_graph( g, osmdb, profiledb, slogs, reporter=sys.stdout )
# Load gtfsdb ==============================
for i, gtfsdb_filename in enumerate(gtfsdb_filenames):
gtfsdb = GTFSDatabase( gtfsdb_filename )
service_ids = [x.encode("ascii") for x in gtfsdb.service_ids()]
compiler.load_gtfsdb_to_boardalight_graph(g, str(i), gtfsdb, agency_id=agency_id, service_ids=service_ids)
if osmdb_filename:
compiler.load_transit_street_links_to_graph( g, osmdb, gtfsdb, reporter=sys.stdout )
if link_stations:
compiler.link_nearby_stops( g, gtfsdb )
# Export to graphdb ========================
graphdb = GraphDatabase( graphdb_filename, overwrite=True )
graphdb.populate( g, reporter=sys.stdout )
def main():
from optparse import OptionParser
usage = """usage: python compile_graph.py [options] <graphdb_filename> """
parser = OptionParser(usage=usage)
parser.add_option("-o", "--osmdb", dest="osmdb_filename", default=None,
help="conflate with the compiled OSMDB", metavar="FILE")
parser.add_option("-l", "--link",
action="store_true", dest="link", default=False,
help="create walking links between adjacent/nearby stations if not compiling with an OSMDB")
parser.add_option("-g", "--gtfsdb",
action="append", dest="gtfsdb_files", default=[],
help="compile with the specified GTFS file(s)")
parser.add_option("-p", "--profiledb",
dest="profiledb_filename", default=None,
help="compile road network with elevation information from profiledb")
parser.add_option("-s", "--slog",
action="append", dest="slog_strings", default=[],
help="specify slog for highway type, in highway_type:slog form. For example, 'motorway:10.5'")
(options, args) = parser.parse_args()
slogs = {}
for slog_string in options.slog_strings:
highway_type,slog_penalty = slog_string.split(":")
slogs[highway_type] = float(slog_penalty)
print slogs
if len(args) != 1 or not options.osmdb_filename and not len(options.gtfsdb_files):
parser.print_help()
exit(-1)
graphdb_filename = args[0]
# just street graph compilation
if options.osmdb_filename and not len(options.gtfsdb_files):
process_street_graph(options.osmdb_filename, graphdb_filename, options.profiledb_filename, slogs)
exit(0)
process_transit_graph(graphdb_filename, options.gtfsdb_files,
osmdb_filename=options.osmdb_filename,
profiledb_filename=options.profiledb_filename,
link_stations=options.link and not options.osmdb_filename, slogs=slogs)
exit(0)
if __name__=='__main__': main()
| process_transit_graph |
deployment.rs | // Generated from definition io.k8s.api.apps.v1.Deployment
/// Deployment enables declarative updates for Pods and ReplicaSets.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct | {
/// Standard object metadata.
pub metadata: crate::apimachinery::pkg::apis::meta::v1::ObjectMeta,
/// Specification of the desired behavior of the Deployment.
pub spec: Option<crate::api::apps::v1::DeploymentSpec>,
/// Most recently observed status of the Deployment.
pub status: Option<crate::api::apps::v1::DeploymentStatus>,
}
// Begin apps/v1/Deployment
// Generated from operation createAppsV1NamespacedDeployment
impl Deployment {
/// create a Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::CreateResponse`]`<Self>>` constructor, or [`crate::CreateResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn create_namespaced_deployment(
namespace: &str,
body: &crate::api::apps::v1::Deployment,
optional: crate::CreateOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::CreateResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1/namespaces/{namespace}/deployments?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::post(__url);
let __body = crate::serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(crate::http::header::CONTENT_TYPE, crate::http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation deleteAppsV1CollectionNamespacedDeployment
impl Deployment {
/// delete collection of Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::DeleteResponse`]`<`[`crate::List`]`<Self>>>` constructor, or [`crate::DeleteResponse`]`<`[`crate::List`]`<Self>>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `delete_optional`
///
/// Delete options. Use `Default::default()` to not pass any.
///
/// * `list_optional`
///
/// List options. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn delete_collection_namespaced_deployment(
namespace: &str,
delete_optional: crate::DeleteOptional<'_>,
list_optional: crate::ListOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::DeleteResponse<crate::List<Self>>>), crate::RequestError> {
let __url = format!("/apis/apps/v1/namespaces/{namespace}/deployments?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
list_optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::delete(__url);
let __body = crate::serde_json::to_vec(&delete_optional).map_err(crate::RequestError::Json)?;
let __request = __request.header(crate::http::header::CONTENT_TYPE, crate::http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation deleteAppsV1NamespacedDeployment
impl Deployment {
/// delete a Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::DeleteResponse`]`<Self>>` constructor, or [`crate::DeleteResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the Deployment
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn delete_namespaced_deployment(
name: &str,
namespace: &str,
optional: crate::DeleteOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::DeleteResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1/namespaces/{namespace}/deployments/{name}",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let __request = crate::http::Request::delete(__url);
let __body = crate::serde_json::to_vec(&optional).map_err(crate::RequestError::Json)?;
let __request = __request.header(crate::http::header::CONTENT_TYPE, crate::http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation listAppsV1DeploymentForAllNamespaces
impl Deployment {
/// list or watch objects of kind Deployment
///
/// This operation only supports listing all items of this type.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::ListResponse`]`<Self>>` constructor, or [`crate::ListResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn list_deployment_for_all_namespaces(
optional: crate::ListOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::ListResponse<Self>>), crate::RequestError> {
let __url = "/apis/apps/v1/deployments?".to_owned();
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation listAppsV1NamespacedDeployment
impl Deployment {
/// list or watch objects of kind Deployment
///
/// This operation only supports listing all items of this type.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::ListResponse`]`<Self>>` constructor, or [`crate::ListResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn list_namespaced_deployment(
namespace: &str,
optional: crate::ListOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::ListResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1/namespaces/{namespace}/deployments?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation patchAppsV1NamespacedDeployment
impl Deployment {
/// partially update the specified Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::PatchResponse`]`<Self>>` constructor, or [`crate::PatchResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the Deployment
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn patch_namespaced_deployment(
name: &str,
namespace: &str,
body: &crate::apimachinery::pkg::apis::meta::v1::Patch,
optional: crate::PatchOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::PatchResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1/namespaces/{namespace}/deployments/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::patch(__url);
let __body = crate::serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(crate::http::header::CONTENT_TYPE, crate::http::header::HeaderValue::from_static(match body {
crate::apimachinery::pkg::apis::meta::v1::Patch::Json(_) => "application/json-patch+json",
crate::apimachinery::pkg::apis::meta::v1::Patch::Merge(_) => "application/merge-patch+json",
crate::apimachinery::pkg::apis::meta::v1::Patch::StrategicMerge(_) => "application/strategic-merge-patch+json",
}));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation patchAppsV1NamespacedDeploymentStatus
impl Deployment {
/// partially update status of the specified Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::PatchResponse`]`<Self>>` constructor, or [`crate::PatchResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the Deployment
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn patch_namespaced_deployment_status(
name: &str,
namespace: &str,
body: &crate::apimachinery::pkg::apis::meta::v1::Patch,
optional: crate::PatchOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::PatchResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1/namespaces/{namespace}/deployments/{name}/status?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::patch(__url);
let __body = crate::serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(crate::http::header::CONTENT_TYPE, crate::http::header::HeaderValue::from_static(match body {
crate::apimachinery::pkg::apis::meta::v1::Patch::Json(_) => "application/json-patch+json",
crate::apimachinery::pkg::apis::meta::v1::Patch::Merge(_) => "application/merge-patch+json",
crate::apimachinery::pkg::apis::meta::v1::Patch::StrategicMerge(_) => "application/strategic-merge-patch+json",
}));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation readAppsV1NamespacedDeployment
impl Deployment {
/// read the specified Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`ReadNamespacedDeploymentResponse`]`>` constructor, or [`ReadNamespacedDeploymentResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the Deployment
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn read_namespaced_deployment(
name: &str,
namespace: &str,
optional: ReadNamespacedDeploymentOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<ReadNamespacedDeploymentResponse>), crate::RequestError> {
let ReadNamespacedDeploymentOptional {
pretty,
} = optional;
let __url = format!("/apis/apps/v1/namespaces/{namespace}/deployments/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
if let Some(pretty) = pretty {
__query_pairs.append_pair("pretty", pretty);
}
let __url = __query_pairs.finish();
let __request = crate::http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Optional parameters of [`Deployment::read_namespaced_deployment`]
#[cfg(feature = "api")]
#[derive(Clone, Copy, Debug, Default)]
pub struct ReadNamespacedDeploymentOptional<'a> {
/// If 'true', then the output is pretty printed.
pub pretty: Option<&'a str>,
}
/// Use `<ReadNamespacedDeploymentResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::read_namespaced_deployment`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ReadNamespacedDeploymentResponse {
Ok(crate::api::apps::v1::Deployment),
Other(Result<Option<crate::serde_json::Value>, crate::serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ReadNamespacedDeploymentResponse {
fn try_from_parts(status_code: crate::http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
crate::http::StatusCode::OK => {
let result = match crate::serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ReadNamespacedDeploymentResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match crate::serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((ReadNamespacedDeploymentResponse::Other(result), read))
},
}
}
}
// Generated from operation readAppsV1NamespacedDeploymentStatus
impl Deployment {
/// read status of the specified Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`ReadNamespacedDeploymentStatusResponse`]`>` constructor, or [`ReadNamespacedDeploymentStatusResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the Deployment
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn read_namespaced_deployment_status(
name: &str,
namespace: &str,
optional: ReadNamespacedDeploymentStatusOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<ReadNamespacedDeploymentStatusResponse>), crate::RequestError> {
let ReadNamespacedDeploymentStatusOptional {
pretty,
} = optional;
let __url = format!("/apis/apps/v1/namespaces/{namespace}/deployments/{name}/status?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
if let Some(pretty) = pretty {
__query_pairs.append_pair("pretty", pretty);
}
let __url = __query_pairs.finish();
let __request = crate::http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Optional parameters of [`Deployment::read_namespaced_deployment_status`]
#[cfg(feature = "api")]
#[derive(Clone, Copy, Debug, Default)]
pub struct ReadNamespacedDeploymentStatusOptional<'a> {
/// If 'true', then the output is pretty printed.
pub pretty: Option<&'a str>,
}
/// Use `<ReadNamespacedDeploymentStatusResponse as Response>::try_from_parts` to parse the HTTP response body of [`Deployment::read_namespaced_deployment_status`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ReadNamespacedDeploymentStatusResponse {
Ok(crate::api::apps::v1::Deployment),
Other(Result<Option<crate::serde_json::Value>, crate::serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ReadNamespacedDeploymentStatusResponse {
fn try_from_parts(status_code: crate::http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
crate::http::StatusCode::OK => {
let result = match crate::serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ReadNamespacedDeploymentStatusResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match crate::serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((ReadNamespacedDeploymentStatusResponse::Other(result), read))
},
}
}
}
// Generated from operation replaceAppsV1NamespacedDeployment
impl Deployment {
/// replace the specified Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::ReplaceResponse`]`<Self>>` constructor, or [`crate::ReplaceResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the Deployment
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn replace_namespaced_deployment(
name: &str,
namespace: &str,
body: &crate::api::apps::v1::Deployment,
optional: crate::ReplaceOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::ReplaceResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1/namespaces/{namespace}/deployments/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::put(__url);
let __body = crate::serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(crate::http::header::CONTENT_TYPE, crate::http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation replaceAppsV1NamespacedDeploymentStatus
impl Deployment {
/// replace status of the specified Deployment
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::ReplaceResponse`]`<Self>>` constructor, or [`crate::ReplaceResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the Deployment
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn replace_namespaced_deployment_status(
name: &str,
namespace: &str,
body: &crate::api::apps::v1::Deployment,
optional: crate::ReplaceOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::ReplaceResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1/namespaces/{namespace}/deployments/{name}/status?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::put(__url);
let __body = crate::serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(crate::http::header::CONTENT_TYPE, crate::http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation watchAppsV1DeploymentForAllNamespaces
impl Deployment {
/// list or watch objects of kind Deployment
///
/// This operation only supports watching one item, or a list of items, of this type for changes.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::WatchResponse`]`<Self>>` constructor, or [`crate::WatchResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn watch_deployment_for_all_namespaces(
optional: crate::WatchOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::WatchResponse<Self>>), crate::RequestError> {
let __url = "/apis/apps/v1/deployments?".to_owned();
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation watchAppsV1NamespacedDeployment
impl Deployment {
/// list or watch objects of kind Deployment
///
/// This operation only supports watching one item, or a list of items, of this type for changes.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::WatchResponse`]`<Self>>` constructor, or [`crate::WatchResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn watch_namespaced_deployment(
namespace: &str,
optional: crate::WatchOptional<'_>,
) -> Result<(crate::http::Request<Vec<u8>>, fn(crate::http::StatusCode) -> crate::ResponseBody<crate::WatchResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/apps/v1/namespaces/{namespace}/deployments?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = crate::http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// End apps/v1/Deployment
impl crate::Resource for Deployment {
const API_VERSION: &'static str = "apps/v1";
const GROUP: &'static str = "apps";
const KIND: &'static str = "Deployment";
const VERSION: &'static str = "v1";
const URL_PATH_SEGMENT: &'static str = "deployments";
type Scope = crate::NamespaceResourceScope;
}
impl crate::ListableResource for Deployment {
const LIST_KIND: &'static str = "DeploymentList";
}
impl crate::Metadata for Deployment {
type Ty = crate::apimachinery::pkg::apis::meta::v1::ObjectMeta;
fn metadata(&self) -> &<Self as crate::Metadata>::Ty {
&self.metadata
}
fn metadata_mut(&mut self) -> &mut<Self as crate::Metadata>::Ty {
&mut self.metadata
}
}
impl<'de> crate::serde::Deserialize<'de> for Deployment {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_api_version,
Key_kind,
Key_metadata,
Key_spec,
Key_status,
Other,
}
impl<'de> crate::serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: crate::serde::de::Error {
Ok(match v {
"apiVersion" => Field::Key_api_version,
"kind" => Field::Key_kind,
"metadata" => Field::Key_metadata,
"spec" => Field::Key_spec,
"status" => Field::Key_status,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = Deployment;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(<Self::Value as crate::Resource>::KIND)
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: crate::serde::de::MapAccess<'de> {
let mut value_metadata: Option<crate::apimachinery::pkg::apis::meta::v1::ObjectMeta> = None;
let mut value_spec: Option<crate::api::apps::v1::DeploymentSpec> = None;
let mut value_status: Option<crate::api::apps::v1::DeploymentStatus> = None;
while let Some(key) = crate::serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_api_version => {
let value_api_version: String = crate::serde::de::MapAccess::next_value(&mut map)?;
if value_api_version != <Self::Value as crate::Resource>::API_VERSION {
return Err(crate::serde::de::Error::invalid_value(crate::serde::de::Unexpected::Str(&value_api_version), &<Self::Value as crate::Resource>::API_VERSION));
}
},
Field::Key_kind => {
let value_kind: String = crate::serde::de::MapAccess::next_value(&mut map)?;
if value_kind != <Self::Value as crate::Resource>::KIND {
return Err(crate::serde::de::Error::invalid_value(crate::serde::de::Unexpected::Str(&value_kind), &<Self::Value as crate::Resource>::KIND));
}
},
Field::Key_metadata => value_metadata = Some(crate::serde::de::MapAccess::next_value(&mut map)?),
Field::Key_spec => value_spec = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_status => value_status = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: crate::serde::de::IgnoredAny = crate::serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(Deployment {
metadata: value_metadata.ok_or_else(|| crate::serde::de::Error::missing_field("metadata"))?,
spec: value_spec,
status: value_status,
})
}
}
deserializer.deserialize_struct(
<Self as crate::Resource>::KIND,
&[
"apiVersion",
"kind",
"metadata",
"spec",
"status",
],
Visitor,
)
}
}
impl crate::serde::Serialize for Deployment {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: crate::serde::Serializer {
let mut state = serializer.serialize_struct(
<Self as crate::Resource>::KIND,
3 +
self.spec.as_ref().map_or(0, |_| 1) +
self.status.as_ref().map_or(0, |_| 1),
)?;
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "apiVersion", <Self as crate::Resource>::API_VERSION)?;
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "kind", <Self as crate::Resource>::KIND)?;
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "metadata", &self.metadata)?;
if let Some(value) = &self.spec {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "spec", value)?;
}
if let Some(value) = &self.status {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "status", value)?;
}
crate::serde::ser::SerializeStruct::end(state)
}
}
#[cfg(feature = "schemars")]
impl crate::schemars::JsonSchema for Deployment {
fn schema_name() -> String {
"io.k8s.api.apps.v1.Deployment".to_owned()
}
fn json_schema(__gen: &mut crate::schemars::gen::SchemaGenerator) -> crate::schemars::schema::Schema {
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("Deployment enables declarative updates for Pods and ReplicaSets.".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::Object))),
object: Some(Box::new(crate::schemars::schema::ObjectValidation {
properties: std::array::IntoIter::new([
(
"apiVersion".to_owned(),
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::String))),
..Default::default()
}),
),
(
"kind".to_owned(),
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::String))),
..Default::default()
}),
),
(
"metadata".to_owned(),
{
let mut schema_obj = __gen.subschema_for::<crate::apimachinery::pkg::apis::meta::v1::ObjectMeta>().into_object();
schema_obj.metadata = Some(Box::new(crate::schemars::schema::Metadata {
description: Some("Standard object metadata.".to_owned()),
..Default::default()
}));
crate::schemars::schema::Schema::Object(schema_obj)
},
),
(
"spec".to_owned(),
{
let mut schema_obj = __gen.subschema_for::<crate::api::apps::v1::DeploymentSpec>().into_object();
schema_obj.metadata = Some(Box::new(crate::schemars::schema::Metadata {
description: Some("Specification of the desired behavior of the Deployment.".to_owned()),
..Default::default()
}));
crate::schemars::schema::Schema::Object(schema_obj)
},
),
(
"status".to_owned(),
{
let mut schema_obj = __gen.subschema_for::<crate::api::apps::v1::DeploymentStatus>().into_object();
schema_obj.metadata = Some(Box::new(crate::schemars::schema::Metadata {
description: Some("Most recently observed status of the Deployment.".to_owned()),
..Default::default()
}));
crate::schemars::schema::Schema::Object(schema_obj)
},
),
]).collect(),
required: std::array::IntoIter::new([
"metadata",
]).map(std::borrow::ToOwned::to_owned).collect(),
..Default::default()
})),
..Default::default()
})
}
}
| Deployment |
askalcoin_mk.ts | <TS language="mk" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Десен клик за уредување на адреса или етикета</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Креирај нова адреса</translation>
</message>
<message>
<source>&New</source>
<translation>&Нова</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Копирај ја избраната адреса на системскиот клипборд</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Копирај</translation>
</message>
<message>
<source>C&lose</source>
<translation>З&атвори</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Избриши ја избраната адреса од листата</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Експортирај ги податоците од активното јазиче во датотека</translation>
</message>
<message>
<source>&Export</source>
<translation>&Експорт</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Избриши</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Enter passphrase</source>
<translation>Внеси тајна фраза</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Нова тајна фраза</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Повторете ја новата тајна фраза</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
</context>
<context>
<name>AskalcoinGUI</name>
<message>
<source>Sign &message...</source>
<translation>Потпиши &порака...</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Синхронизација со мрежата...</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Преглед</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Трансакции</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Преглед на историјата на трансакции</translation>
</message>
<message>
<source>E&xit</source>
<translation>И&злез</translation>
</message>
<message>
<source>Quit application</source>
<translation>Напушти ја апликацијата</translation>
</message>
<message>
<source>About &Qt</source>
<translation>За &Qt</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Прикажи информации за Qt</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Опции...</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>&Криптирање на Паричник...</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>&Бекап на Паричник...</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>&Измени Тајна Фраза...</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>Отвори &URI...</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Повторно индексирање на блокови од дискот...</translation>
</message>
<message>
<source>Send coins to a Askalcoin address</source>
<translation>Испрати биткоини на Биткоин адреса</translation>
</message>
<message>
<source>&Verify message...</source>
<translation>&Потврди порака...</translation>
</message>
<message>
<source>&Send</source>
<translation>&Испрати</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Прими</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Прикажи / Сокриј</translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Криптирај ги приватните клучеви кои припаѓаат на твојот паричник</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Подесувања</translation>
</message>
<message>
<source>&Help</source>
<translation>&Помош</translation>
</message>
<message numerus="yes">
<source>Processed %n block(s) of transaction history.</source>
<translation><numerusform>Обработен %n блок од историјата на трансакции.</numerusform><numerusform>Обработени %n блокови од историјата на трансакции.</numerusform></translation>
</message>
<message>
<source>%1 behind</source>
<translation>%1 позади</translation> | <message>
<source>Error</source>
<translation>Грешка</translation>
</message>
<message>
<source>Warning</source>
<translation>Предупредување</translation>
</message>
<message>
<source>Up to date</source>
<translation>Во тек</translation>
</message>
<message>
<source>&Window</source>
<translation>&Прозорец</translation>
</message>
<message>
<source>Date: %1
</source>
<translation>Дата: %1
</translation>
</message>
<message>
<source>Amount: %1
</source>
<translation>Сума: %1
</translation>
</message>
<message>
<source>Type: %1
</source>
<translation>Тип: %1
</translation>
</message>
<message>
<source>Label: %1
</source>
<translation>Етикета: %1
</translation>
</message>
<message>
<source>Address: %1
</source>
<translation>Адреса: %1
</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Bytes:</source>
<translation>Бајти:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Сума:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Провизија:</translation>
</message>
<message>
<source>Dust:</source>
<translation>Прашина:</translation>
</message>
<message>
<source>After Fee:</source>
<translation>После Провизија:</translation>
</message>
<message>
<source>Change:</source>
<translation>Кусур:</translation>
</message>
<message>
<source>Amount</source>
<translation>Сума</translation>
</message>
<message>
<source>Date</source>
<translation>Дата</translation>
</message>
</context>
<context>
<name>CreateWalletActivity</name>
</context>
<context>
<name>CreateWalletDialog</name>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Измени Адреса</translation>
</message>
<message>
<source>&Label</source>
<translation>&Етикета</translation>
</message>
<message>
<source>&Address</source>
<translation>&Адреса</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>name</source>
<translation>име</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>version</source>
<translation>верзија</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Askalcoin</source>
<translation>Биткоин</translation>
</message>
<message>
<source>Error</source>
<translation>Грешка</translation>
</message>
</context>
<context>
<name>ModalOverlay</name>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>URI:</source>
<translation>URI:</translation>
</message>
</context>
<context>
<name>OpenWalletActivity</name>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Опции</translation>
</message>
<message>
<source>&Network</source>
<translation>&Мрежа</translation>
</message>
<message>
<source>W&allet</source>
<translation>П&аричник</translation>
</message>
<message>
<source>&Window</source>
<translation>&Прозорец</translation>
</message>
<message>
<source>&OK</source>
<translation>&ОК</translation>
</message>
<message>
<source>&Cancel</source>
<translation>&Откажи</translation>
</message>
<message>
<source>none</source>
<translation>нема</translation>
</message>
<message>
<source>Error</source>
<translation>Грешка</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Total:</source>
<translation>Вкупно:</translation>
</message>
</context>
<context>
<name>PSBTOperationsDialog</name>
</context>
<context>
<name>PaymentServer</name>
</context>
<context>
<name>PeerTableModel</name>
<message>
<source>Sent</source>
<translation>Испратени</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Сума</translation>
</message>
<message>
<source>%1 d</source>
<translation>%1 д</translation>
</message>
<message>
<source>%1 h</source>
<translation>%1 ч</translation>
</message>
<message>
<source>%1 m</source>
<translation>%1 м</translation>
</message>
<message>
<source>%1 s</source>
<translation>%1 с</translation>
</message>
<message>
<source>%1 ms</source>
<translation>%1 мс</translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 и %2</translation>
</message>
<message>
<source>%1 B</source>
<translation>%1 Б</translation>
</message>
<message>
<source>%1 KB</source>
<translation>%1 КБ</translation>
</message>
<message>
<source>%1 MB</source>
<translation>%1 МБ</translation>
</message>
<message>
<source>%1 GB</source>
<translation>%1 ГБ</translation>
</message>
</context>
<context>
<name>QRImageWidget</name>
<message>
<source>&Save Image...</source>
<translation>&Сними Слика...</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>Network</source>
<translation>Мрежа</translation>
</message>
<message>
<source>Name</source>
<translation>Име</translation>
</message>
<message>
<source>Number of connections</source>
<translation>Број на конекции</translation>
</message>
<message>
<source>Block chain</source>
<translation>Block chain</translation>
</message>
<message>
<source>Sent</source>
<translation>Испратени</translation>
</message>
<message>
<source>Version</source>
<translation>Верзија</translation>
</message>
<message>
<source>&Console</source>
<translation>&Конзола</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Amount:</source>
<translation>&Сума:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Етикета:</translation>
</message>
<message>
<source>&Message:</source>
<translation>&Порака:</translation>
</message>
<message>
<source>Show</source>
<translation>Прикажи</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>Amount:</source>
<translation>Сума:</translation>
</message>
<message>
<source>Message:</source>
<translation>Порака:</translation>
</message>
<message>
<source>Copy &URI</source>
<translation>Копирај &URI</translation>
</message>
<message>
<source>Copy &Address</source>
<translation>Копирај &Адреса</translation>
</message>
<message>
<source>&Save Image...</source>
<translation>&Сними Слика...</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Date</source>
<translation>Дата</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Bytes:</source>
<translation>Бајти:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Сума:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Провизија:</translation>
</message>
<message>
<source>After Fee:</source>
<translation>После Провизија:</translation>
</message>
<message>
<source>Change:</source>
<translation>Кусур:</translation>
</message>
<message>
<source>Dust:</source>
<translation>Прашина:</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>Сума:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Етикета:</translation>
</message>
<message>
<source>Message:</source>
<translation>Порака:</translation>
</message>
</context>
<context>
<name>ShutdownWindow</name>
</context>
<context>
<name>SignVerifyMessageDialog</name>
</context>
<context>
<name>TrafficGraphWidget</name>
</context>
<context>
<name>TransactionDesc</name>
<message>
<source>Date</source>
<translation>Дата</translation>
</message>
<message>
<source>Amount</source>
<translation>Сума</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Date</source>
<translation>Дата</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>Date</source>
<translation>Дата</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
</context>
<context>
<name>WalletController</name>
</context>
<context>
<name>WalletFrame</name>
</context>
<context>
<name>WalletModel</name>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>&Експорт</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Експортирај ги податоците од активното јазиче во датотека</translation>
</message>
<message>
<source>Error</source>
<translation>Грешка</translation>
</message>
</context>
<context>
<name>askalcoin-core</name>
</context>
</TS> | </message> |
boxShape.go | package chipmunk
import (
"github.com/vova616/chipmunk/transform"
"github.com/vova616/chipmunk/vect"
)
// Convenience wrapper around PolygonShape.
type BoxShape struct {
Shape *Shape
// The polygon that represents this box. Do not touch!
Polygon *PolygonShape
verts [4]vect.Vect
// The width of the box. Call UpdatePoly() if changed.
Width vect.Float
// The height of the box. Call UpdatePoly() if changed.
Height vect.Float
// The center of the box. Call UpdatePoly() if changed.
Position vect.Vect
}
// Creates a new BoxShape with given position, width and height.
func NewBox(pos vect.Vect, w, h vect.Float) *Shape {
shape := newShape()
box := &BoxShape{
Polygon: &PolygonShape{Shape: shape},
Width: w,
Height: h,
Position: pos,
Shape: shape,
}
hw := w / 2.0
hh := h / 2.0
if hw < 0 {
hw = -hw
}
if hh < 0 {
hh = -hh
}
box.verts = [4]vect.Vect{
{-hw, -hh},
{-hw, hh},
{hw, hh},
{hw, -hh},
}
poly := box.Polygon
poly.SetVerts(box.verts[:], box.Position)
shape.ShapeClass = box
return shape
}
func (box *BoxShape) Moment(mass float32) vect.Float {
return (vect.Float(mass) * (box.Width*box.Width + box.Height*box.Height) / 12.0)
}
// Recalculates the internal Polygon with the Width, Height and Position.
func (box *BoxShape) UpdatePoly() {
hw := box.Width / 2.0
hh := box.Height / 2.0
if hw < 0 {
hw = -hw
}
if hh < 0 {
hh = -hh
}
box.verts = [4]vect.Vect{
{-hw, -hh},
{-hw, hh},
{hw, hh},
{hw, -hh},
}
poly := box.Polygon
poly.SetVerts(box.verts[:], box.Position)
}
// Returns ShapeType_Box. Needed to implemet the ShapeClass interface.
func (box *BoxShape) ShapeType() ShapeType {
return ShapeType_Box
}
// Returns ShapeType_Box. Needed to implemet the ShapeClass interface.
func (box *BoxShape) Clone(s *Shape) ShapeClass {
clone := *box
clone.Polygon = &PolygonShape{Shape: s}
clone.Shape = s
clone.UpdatePoly()
return &clone
}
// Recalculates the transformed vertices, axes and the bounding box. | }
// Returns true if the given point is located inside the box.
func (box *BoxShape) TestPoint(point vect.Vect) bool {
return box.Polygon.TestPoint(point)
} | func (box *BoxShape) update(xf transform.Transform) AABB {
return box.Polygon.update(xf) |
accept_login_request_responses.go | // Code generated by go-swagger; DO NOT EDIT.
package admin
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"fmt"
"io"
"github.com/go-openapi/runtime"
strfmt "github.com/go-openapi/strfmt"
models "github.com/ory/hydra/sdk/go/hydra/models"
)
// AcceptLoginRequestReader is a Reader for the AcceptLoginRequest structure.
type AcceptLoginRequestReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the received o.
func (o *AcceptLoginRequestReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 200:
result := NewAcceptLoginRequestOK()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
case 401:
result := NewAcceptLoginRequestUnauthorized()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 404:
result := NewAcceptLoginRequestNotFound()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 500:
result := NewAcceptLoginRequestInternalServerError()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
default:
return nil, runtime.NewAPIError("unknown error", response, response.Code())
}
}
// NewAcceptLoginRequestOK creates a AcceptLoginRequestOK with default headers values
func NewAcceptLoginRequestOK() *AcceptLoginRequestOK {
return &AcceptLoginRequestOK{}
}
/*AcceptLoginRequestOK handles this case with default header values.
completedRequest
*/
type AcceptLoginRequestOK struct {
Payload *models.RequestHandlerResponse
}
func (o *AcceptLoginRequestOK) Error() string {
return fmt.Sprintf("[PUT /oauth2/auth/requests/login/accept][%d] acceptLoginRequestOK %+v", 200, o.Payload)
}
func (o *AcceptLoginRequestOK) GetPayload() *models.RequestHandlerResponse {
return o.Payload
}
func (o *AcceptLoginRequestOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.RequestHandlerResponse)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewAcceptLoginRequestUnauthorized creates a AcceptLoginRequestUnauthorized with default headers values
func NewAcceptLoginRequestUnauthorized() *AcceptLoginRequestUnauthorized {
return &AcceptLoginRequestUnauthorized{}
}
/*AcceptLoginRequestUnauthorized handles this case with default header values.
genericError
*/
type AcceptLoginRequestUnauthorized struct {
Payload *models.GenericError
}
func (o *AcceptLoginRequestUnauthorized) Error() string {
return fmt.Sprintf("[PUT /oauth2/auth/requests/login/accept][%d] acceptLoginRequestUnauthorized %+v", 401, o.Payload)
}
func (o *AcceptLoginRequestUnauthorized) GetPayload() *models.GenericError {
return o.Payload
}
func (o *AcceptLoginRequestUnauthorized) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.GenericError)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewAcceptLoginRequestNotFound creates a AcceptLoginRequestNotFound with default headers values
func | () *AcceptLoginRequestNotFound {
return &AcceptLoginRequestNotFound{}
}
/*AcceptLoginRequestNotFound handles this case with default header values.
genericError
*/
type AcceptLoginRequestNotFound struct {
Payload *models.GenericError
}
func (o *AcceptLoginRequestNotFound) Error() string {
return fmt.Sprintf("[PUT /oauth2/auth/requests/login/accept][%d] acceptLoginRequestNotFound %+v", 404, o.Payload)
}
func (o *AcceptLoginRequestNotFound) GetPayload() *models.GenericError {
return o.Payload
}
func (o *AcceptLoginRequestNotFound) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.GenericError)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewAcceptLoginRequestInternalServerError creates a AcceptLoginRequestInternalServerError with default headers values
func NewAcceptLoginRequestInternalServerError() *AcceptLoginRequestInternalServerError {
return &AcceptLoginRequestInternalServerError{}
}
/*AcceptLoginRequestInternalServerError handles this case with default header values.
genericError
*/
type AcceptLoginRequestInternalServerError struct {
Payload *models.GenericError
}
func (o *AcceptLoginRequestInternalServerError) Error() string {
return fmt.Sprintf("[PUT /oauth2/auth/requests/login/accept][%d] acceptLoginRequestInternalServerError %+v", 500, o.Payload)
}
func (o *AcceptLoginRequestInternalServerError) GetPayload() *models.GenericError {
return o.Payload
}
func (o *AcceptLoginRequestInternalServerError) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.GenericError)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
| NewAcceptLoginRequestNotFound |
settings.py | import json
from machine_settings import _MachineConfig
import os.path as os_path
ENCODING = 'utf8'
def get_config():
config = Config()
return config
class _ConfigBase:
def __init__(self, parent):
self._parent = parent
machine_config = _MachineConfig()
self._initialize(machine_config)
def _initialize(self, machine_config):
pass
def __str__(self):
dict = {}
self._toJson(dict, self)
return json.dumps(dict, indent=4)
@classmethod
def _toJson(cls, parent, obj):
for attribute_name in dir(obj):
if not attribute_name.startswith('_'):
attribute = getattr(obj, attribute_name)
if isinstance(attribute, _ConfigBase):
child = {}
parent[attribute_name] = child
cls._toJson(child, attribute)
else:
parent[attribute_name] = attribute
class _CheckpointConfig(_ConfigBase):
def _initialize(self, _):
self.enabled = True
self.weights_only = True
self.dir = 'checkpoints'
self.filename = 'best_model.hdf5'
class _CrossValidationConfig(_ConfigBase):
def _initialize(self, machine_config):
self.train_set_ids_path = os_path.join(machine_config.data_dir, 'preprocessed/cross-validation/group_1_train_set_1809-2018.txt')
self.dev_set_ids_path = os_path.join(machine_config.data_dir, 'preprocessed/cross-validation/group_1_target_dev_set_2018-2018.txt')
self.test_set_ids_path = os_path.join(machine_config.data_dir, 'preprocessed/cross-validation/group_1_reporting_test_set_2018-2018.txt')
self.encoding = ENCODING
self.train_limit = machine_config.train_limit
self.dev_limit = machine_config.dev_limit
self.test_limit = machine_config.test_limit
class _CsvLoggerConfig(_ConfigBase):
def _initialize(self, _):
self.dir = 'logs'
self.filename = 'logs.csv'
self.best_epoch_filename = 'best_epoch_logs.txt'
self.encoding = ENCODING
class _DatabaseConfig(_ConfigBase):
def _initialize(self, machine_config):
self.config = { 'user': '****',
'database': '****',
'password': '****',
'host': machine_config.database_host,
'charset': 'utf8mb4',
'collation': 'utf8mb4_unicode_ci',
'use_unicode': True }
class _EarlyStoppingConfig(_ConfigBase):
def _initialize(self, _):
self.min_delta = 0.001
self.patience = 2
class _ModelConfig(_ConfigBase):
def _initialize(self, _):
self.checkpoint = _CheckpointConfig(self)
self.word_embedding_size = 300
self.word_embedding_dropout_rate = 0.25
self.conv_act = 'relu'
self.num_conv_filter_sizes = 3
self.min_conv_filter_size = 2
self.conv_filter_size_step = 3
self.total_conv_filters = 350
self.num_pool_regions = 5
self.num_journals = 30347
self.journal_embedding_size = 50
self.num_hidden_layers = 1
self.hidden_layer_size = 3365
self.hidden_layer_act = 'relu'
self.inputs_dropout_rate = 0.0
self.dropout_rate = 0.5
self.output_layer_act = 'sigmoid'
self.output_layer_size = self._pp_config.num_labels
self.init_threshold = 0.5
self.init_learning_rate = 0.001
@property
def hidden_layer_sizes(self):
return [self.hidden_layer_size]*self.num_hidden_layers
@property
def conv_filter_sizes(self):
sizes = [self.min_conv_filter_size + self.conv_filter_size_step*idx for idx in range(self.num_conv_filter_sizes)]
return sizes
@property
def conv_num_filters(self):
num_filters = round(self.total_conv_filters / len(self.conv_filter_sizes))
return num_filters
@property
def _pp_config(self):
return self._parent.inputs.preprocessing
@property
def vocab_size(self):
return self._pp_config.vocab_size
@property
def title_max_words(self):
return self._pp_config.title_max_words
@property
def abstract_max_words(self):
return self._pp_config.abstract_max_words
@property
def num_year_completed_time_periods(self):
return self._pp_config.num_year_completed_time_periods
@property
def num_pub_year_time_periods(self):
return self._pp_config.num_pub_year_time_periods
class _PreprocessingConfig(_ConfigBase):
def _initialize(self, machine_config):
self.word_index_lookup_path = os_path.join(machine_config.data_dir, 'preprocessed/vocab/cross_val_group_1_word_index_lookup.pkl') # indices start from 2
self.unknown_index = 1
self.padding_index = 0
self.title_max_words = 64
self.abstract_max_words = 448
self.num_labels = 1
self.vocab_size = 400000
self.min_year_completed= 1965
self.max_year_completed = 2018
self.num_year_completed_time_periods = 1 + self.max_year_completed - self.min_year_completed
self.min_pub_year = 1809
self.max_pub_year = 2018
self.num_pub_year_time_periods = 1 + self.max_pub_year - self.min_pub_year
class _ProcessingConfig(_ConfigBase):
def _initialize(self, machine_config):
self.run_on_cpu = machine_config.run_on_cpu
self.use_multiprocessing = machine_config.use_multiprocessing
self.workers = machine_config.workers
self.max_queue_size = machine_config.max_queue_size
class _ReduceLearningRateConfig(_ConfigBase):
def _initialize(self, _):
self.factor = 0.33
self.patience = 1
self.min_delta = 0.001
class _RestoreConfig(_ConfigBase):
def _initialize(self, machine_config):
super()._initialize(machine_config)
self.sub_dir = '****'
self.model_json_filename = 'model.json'
self.encoding = ENCODING
self.model_checkpoint_dir = 'checkpoints'
self.model_checkpoint_filename = 'best_model.hdf5'
self.weights_only_checkpoint = True
self.threshold = 0.5
self.learning_rate = 0.001
class _ResumeConfig(_RestoreConfig):
def _initialize(self, machine_config):
super()._initialize(machine_config)
self.enabled = False
self.resume_checkpoint_filename = 'best_model_resume.hdf5'
self.resume_logger_filename = 'logs_resume.csv'
class _SaveConfig(_ConfigBase):
def _initialize(self, _):
self.settings_filename = 'settings.json'
self.model_json_filename = 'model.json'
self.encoding = ENCODING
self.model_img_filename = 'model.png'
class _TensorboardConfig(_ConfigBase):
def _initialize(self, _):
self.enabled = False
self.dir = 'logs'
self.write_graph = True
class _EvaluateConfig(_RestoreConfig, _ProcessingConfig):
def | (self, machine_config):
super()._initialize(machine_config)
self.results_filename = 'eval-result.txt'
self.encoding = ENCODING
self.batch_size = 128
self.limit = 1000000000
class _PredictConfig(_RestoreConfig, _ProcessingConfig):
def _initialize(self, machine_config):
super()._initialize(machine_config)
self.pmids_filepath = '../datasets/pipeline_validation_set.json'
self.results_dir = 'predictions_val'
self.results_filename = 'predictions.csv'
self.dereferenced_filename = 'dereferenced_predictions.csv'
self.metrics_filename_template = 'metrics{}.csv'
self.journal_groups_filepath = os_path.join(machine_config.data_dir, 'preprocessed/selective-indexing/selectively_indexed_journal_groups.csv')
self.encoding = ENCODING
self.delimiter = ','
self.batch_size = 128
self.limit = 1000000000
class _InputsConfig(_ConfigBase):
def _initialize(self, _):
self.preprocessing = _PreprocessingConfig(self)
class _OptimizeFscoreThresholdConfig(_ProcessingConfig):
def _initialize(self, machine_config):
super()._initialize(machine_config)
self.enabled = True
self.batch_size = 128
self.limit = 1000000000
self.metric_name = 'fscore'
self.alpha = 0.005
self.k = 3
class _TrainingConfig(_ProcessingConfig):
def _initialize(self, machine_config):
super()._initialize(machine_config)
self.batch_size = 128
self.initial_epoch = 0
self.max_epochs = 500
self.train_limit = 1000000000
self.dev_limit = 1000000000
self.monitor_metric = 'val_fscore'
self.monitor_mode = 'max'
self.save_config = _SaveConfig(self)
self.optimize_fscore_threshold = _OptimizeFscoreThresholdConfig(self)
self.reduce_learning_rate = _ReduceLearningRateConfig(self)
self.early_stopping = _EarlyStoppingConfig(self)
self.tensorboard = _TensorboardConfig(self)
self.csv_logger = _CsvLoggerConfig(self)
self.resume = _ResumeConfig(self)
class Config(_ConfigBase):
def __init__(self):
super().__init__(self)
def _initialize(self, machine_config):
self.root_dir = machine_config.runs_dir
self.data_dir = machine_config.data_dir
self.inputs = _InputsConfig(self)
self.model = _ModelConfig(self)
self.cross_val = _CrossValidationConfig(self)
self.train = _TrainingConfig(self)
self.eval = _EvaluateConfig(self)
self.pred = _PredictConfig(self)
self.database = _DatabaseConfig(self) | _initialize |
standalone-modal-encrypt.js | import React from 'react'
import 'babel-polyfill';
import 'whatwg-fetch';
import ModalEncrypt from '../modal-encrypt/index.jsx';
function renderModalEncrypt(options, container, callback){
let modal = null; | }
module.exports = renderModalEncrypt;
window.renderModalEncrypt = renderModalEncrypt; | ReactDOM.render(<ModalEncrypt options={options} ref={ _modal => { modal = _modal; }} />, container, callback);
return modal; |
script.js | console.log('Olá Mundo!');
document.getElementById('btn-submit').addEventListener('click', e => {
console.log('O botão foi clicado!');
})
|
console.log('O mouse está sobre o formulário.');
});
document.querySelector('#form-login').addEventListener('mouseleave', e=> {
console.log('O mouse está fora do formulário.');
});
document.querySelector('#form-login').addEventListener('submit', e=> {
e.preventDefault();
let email = document.querySelector('#email').value;
let password = document.querySelector('#password').value;
let json = {
email,
password
};
if (!json.email) {
console.error("O campo e-mail deve ser preenchido!");
} else if (!json.password) {
console.error("O campo password deve ser preenchido!");
} else {
console.info("Dados validados com sucesso!");
}
}); | document.getElementById('form-login').addEventListener('mouseenter', e => { |
main.py | # import nltk
# import gensim
# import pandas
import string
from nltk.corpus import stopwords
from nltk import word_tokenize
from nltk import WordNetLemmatizer
from nltk import pos_tag
from nltk.stem import PorterStemmer
from gensim.models.doc2vec import TaggedDocument
from gensim.corpora import Dictionary
from gensim.models.phrases import Phrases, Phraser
"""
TODO
stemming - DONE
lemmatizing - DONE
pos filter
tfidf splitter
w2v theme relevence
w2v weightings
frequency filtering (found more than twice)
RESEARCH
kwargumenrts - ad hoc arguments for theme relevence
"""
class saniti:
def __init__(self, text = [], pipeline = [], **kwargs):
#setup
self.processes = {"token": self.token,
"depunct": self.depunct,
"unempty": self.unempty,
"out_tag_doc": self.out_tag_doc,
"out_corp_dict": self.out_corp_dic,
"lemma": self.lemma,
"destop": self.destop,
"posfilter": self.posfilter,
"phrase": self.phrase_gen,
"stem": self.stem}
self.pipeline = pipeline
self.original_text = text
if text != []:
self.text = self.process(text, self.pipeline, **kwargs)
def process(self, text, pipeline, **kwargs):
self.text = text
for line in pipeline: |
def destop(self, text, **kwargs):
text = [[word for word in doc if word not in stopwords.words("english")] for doc in text]
return text
def token(self, text, **kwargs):
if "tokenizer" in kwargs:
tokenizer = kwargs["tokenizer"]
else:
tokenizer = word_tokenize
text = [tokenizer(x) for x in text]
return text
def depunct(self, text, **kwargs):
if "puct" in kwargs:
punct = kwargs["punct"]
else:
punct = string.punctuation
punct = str.maketrans("", "", punct)
text = [[s.translate(punct) for s in doc] for doc in text]
return text
def unempty(self, text, **kwargs):
text = [[s for s in doc if s != ""] for doc in text]
return text
def lemma(self, text, **kwargs):
if "lemmatizer" in kwargs:
lemmatizer = kwargs["lemmatizer"]
else:
lemmatizer = WordNetLemmatizer()
text = [[lemmatizer.lemmatize(w) for w in doc] for doc in text]
return text
def phrase_gen(self, text, **kwargs):
if "common_terms" in kwargs:
common_terms = kwargs["common_terms"]
else:
common_terms = stopwords.words("english")
# print(list(common_terms))
phrases = Phrases(text, common_terms=common_terms)
phraser = Phraser(phrases)
text = [phraser[x] for x in text]
return text
def stem(self, text, **kwargs):
if "stemmer" in kwargs:
stemmer = kwargs["stemmer"]
else:
stemmer = PorterStemmer()
text = [[stemmer.stem(word) for word in doc] for doc in text]
return text
def posfilter(self, text, **kwargs):
if "pos_tagger" not in kwargs:
pos_tagger = pos_tag
else:
pos_tagger = kwargs["pos_tagger"]
if "pos_only" not in kwargs:
pos_only = ["NN", "VB"]
else:
pos_only = kwargs["pos_only"]
print(text)
text = [[word[0] for word in pos_tagger(doc) if word[1] in pos_only] if doc != [] else doc for doc in text]
return text
def out_corp_dic(self, text, **kwargs):
dictionary = Dictionary(text)
corpus = [dictionary.doc2bow(doc) for doc in text]
return {"dictionary": dictionary, "corpus": corpus}
def out_tag_doc(self, text, **kwargs):
if "tags" in kwargs:
tags = kwargs["tags"]
else:
tags = []
if tags == []:
if self.original_text != []:
tags = self.original_text
else :
tags = [" ".join(doc) for doc in text]
list2 = []
for xt, xid in zip(text, tags):
try:
td = TaggedDocument(xt, [xid])
list2.append(td)
except:
print(f"disambig {x}")
return(list2)
if __name__ == "__main__":
original_text = ["I like to moves it, move its", "I likeing to move it!", "the of"]
text = saniti(original_text, ["token", "destop", "depunct", "unempty", "phrase"])
print(text.text)
sani1 = saniti()
text = sani1.process(original_text, ["token", "destop", "depunct", "unempty", "lemma", "out_tag_doc"])
print(text) | text = self.processes[line](text, **kwargs)
return text |
rustdoc_lib.rs | #[cfg(feature = "with_proc_macro")]
use rustdoc_proc_macro::make_answer;
#[cfg(feature = "with_proc_macro")]
make_answer!();
/// The answer to the ultimate question
/// ```
/// fn answer() -> u32 { 42 }
/// assert_eq!(answer(), 42);
/// ```
#[cfg(not(feature = "with_proc_macro"))]
pub fn answer() -> u32 { | #[cfg(test)]
mod test {
use super::*;
#[test]
fn test_answer() {
assert_eq!(answer(), 42);
}
} | 42
}
|
custom.py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=too-many-lines
# pylint: disable=line-too-long
from __future__ import print_function
from collections import OrderedDict
import codecs
import json
import os
import platform
import re
import ssl
import sys
import uuid
import base64
from six.moves.urllib.request import urlopen # pylint: disable=import-error
from six.moves.urllib.parse import urlparse # pylint: disable=import-error
from msrestazure.tools import is_valid_resource_id, parse_resource_id
from azure.mgmt.resource.resources.models import GenericResource, DeploymentMode
from azure.cli.core.parser import IncorrectUsageError
from azure.cli.core.util import get_file_json, read_file_content, shell_safe_json_parse, sdk_no_wait
from azure.cli.core.commands import LongRunningOperation
from azure.cli.core.commands.client_factory import get_mgmt_service_client
from azure.cli.core.profiles import ResourceType, get_sdk, get_api_version, AZURE_API_PROFILES
from azure.cli.command_modules.resource._client_factory import (
_resource_client_factory, _resource_policy_client_factory, _resource_lock_client_factory,
_resource_links_client_factory, _resource_deploymentscripts_client_factory, _authorization_management_client, _resource_managedapps_client_factory, _resource_templatespecs_client_factory)
from azure.cli.command_modules.resource._validators import _parse_lock_id
from knack.log import get_logger
from knack.prompting import prompt, prompt_pass, prompt_t_f, prompt_choice_list, prompt_int, NoTTYException
from knack.util import CLIError
from msrest.serialization import Serializer
from msrest.pipeline import SansIOHTTPPolicy
from ._validators import MSI_LOCAL_ID
from ._formatters import format_what_if_operation_result
logger = get_logger(__name__)
def _build_resource_id(**kwargs):
from msrestazure.tools import resource_id as resource_id_from_dict
try:
return resource_id_from_dict(**kwargs)
except KeyError:
return None
def _process_parameters(template_param_defs, parameter_lists): # pylint: disable=too-many-statements
def _try_parse_json_object(value):
try:
parsed = _remove_comments_from_json(value, False)
return parsed.get('parameters', parsed)
except Exception: # pylint: disable=broad-except
return None
def _try_load_file_object(file_path):
try:
is_file = os.path.isfile(file_path)
except ValueError:
return None
if is_file is True:
try:
content = read_file_content(file_path)
if not content:
return None
parsed = _remove_comments_from_json(content, False, file_path)
return parsed.get('parameters', parsed)
except Exception as ex:
raise CLIError("Failed to parse {} with exception:\n {}".format(file_path, ex))
return None
def _try_load_uri(uri):
if "://" in uri:
try:
value = _urlretrieve(uri).decode('utf-8')
parsed = _remove_comments_from_json(value, False)
return parsed.get('parameters', parsed)
except Exception: # pylint: disable=broad-except
pass
return None
def _try_parse_key_value_object(template_param_defs, parameters, value):
# support situation where empty JSON "{}" is provided
if value == '{}' and not parameters:
return True
try:
key, value = value.split('=', 1)
except ValueError:
return False
param = template_param_defs.get(key, None)
if param is None:
raise CLIError("unrecognized template parameter '{}'. Allowed parameters: {}"
.format(key, ', '.join(sorted(template_param_defs.keys()))))
param_type = param.get('type', None)
if param_type:
param_type = param_type.lower()
if param_type in ['object', 'array', 'secureobject']:
parameters[key] = {'value': shell_safe_json_parse(value)}
elif param_type in ['string', 'securestring']:
parameters[key] = {'value': value}
elif param_type == 'bool':
parameters[key] = {'value': value.lower() == 'true'}
elif param_type == 'int':
parameters[key] = {'value': int(value)}
else:
logger.warning("Unrecognized type '%s' for parameter '%s'. Interpretting as string.", param_type, key)
parameters[key] = {'value': value}
return True
parameters = {}
for params in parameter_lists or []:
for item in params:
param_obj = _try_load_file_object(item)
if param_obj is None:
param_obj = _try_parse_json_object(item)
if param_obj is None:
param_obj = _try_load_uri(item)
if param_obj is not None:
parameters.update(param_obj)
elif not _try_parse_key_value_object(template_param_defs, parameters, item):
raise CLIError('Unable to parse parameter: {}'.format(item))
return parameters
# pylint: disable=redefined-outer-name
def _find_missing_parameters(parameters, template):
if template is None:
return {}
template_parameters = template.get('parameters', None)
if template_parameters is None:
return {}
missing = OrderedDict()
for parameter_name in template_parameters:
parameter = template_parameters[parameter_name]
if 'defaultValue' in parameter:
continue
if parameters is not None and parameters.get(parameter_name, None) is not None:
continue
missing[parameter_name] = parameter
return missing
def _prompt_for_parameters(missing_parameters, fail_on_no_tty=True): # pylint: disable=too-many-statements
prompt_list = missing_parameters.keys() if isinstance(missing_parameters, OrderedDict) \
else sorted(missing_parameters)
result = OrderedDict()
no_tty = False
for param_name in prompt_list:
param = missing_parameters[param_name]
param_type = param.get('type', 'string').lower()
description = 'Missing description'
metadata = param.get('metadata', None)
if metadata is not None:
description = metadata.get('description', description)
allowed_values = param.get('allowedValues', None)
prompt_str = "Please provide {} value for '{}' (? for help): ".format(param_type, param_name)
while True:
if allowed_values is not None:
try:
ix = prompt_choice_list(prompt_str, allowed_values, help_string=description)
result[param_name] = allowed_values[ix]
except NoTTYException:
result[param_name] = None
no_tty = True
break
elif param_type == 'securestring':
try:
value = prompt_pass(prompt_str, help_string=description)
except NoTTYException:
value = None
no_tty = True
result[param_name] = value
break
elif param_type == 'int':
try:
int_value = prompt_int(prompt_str, help_string=description)
result[param_name] = int_value
except NoTTYException:
result[param_name] = 0
no_tty = True
break
elif param_type == 'bool':
try:
value = prompt_t_f(prompt_str, help_string=description)
result[param_name] = value
except NoTTYException:
result[param_name] = False
no_tty = True
break
elif param_type in ['object', 'array']:
try:
value = prompt(prompt_str, help_string=description)
except NoTTYException:
value = ''
no_tty = True
if value == '':
value = {} if param_type == 'object' else []
else:
try:
value = shell_safe_json_parse(value)
except Exception as ex: # pylint: disable=broad-except
logger.error(ex)
continue
result[param_name] = value
break
else:
try:
result[param_name] = prompt(prompt_str, help_string=description)
except NoTTYException:
result[param_name] = None
no_tty = True
break
if no_tty and fail_on_no_tty:
raise NoTTYException
return result
# pylint: disable=redefined-outer-name
def _get_missing_parameters(parameters, template, prompt_fn, no_prompt=False):
missing = _find_missing_parameters(parameters, template)
if missing:
if no_prompt is True:
logger.warning("Missing input parameters: %s ", ', '.join(sorted(missing.keys())))
else:
try:
prompt_parameters = prompt_fn(missing)
for param_name in prompt_parameters:
parameters[param_name] = {
"value": prompt_parameters[param_name]
}
except NoTTYException:
raise CLIError("Missing input parameters: {}".format(', '.join(sorted(missing.keys()))))
return parameters
def _ssl_context():
if sys.version_info < (3, 4):
return ssl.SSLContext(ssl.PROTOCOL_TLSv1)
return ssl.create_default_context()
def _urlretrieve(url):
req = urlopen(url, context=_ssl_context())
return req.read()
# pylint: disable=redefined-outer-name
def _remove_comments_from_json(template, preserve_order=True, file_path=None):
from jsmin import jsmin
# When commenting at the bottom of all elements in a JSON object, jsmin has a bug that will wrap lines.
# It will affect the subsequent multi-line processing logic, so deal with this situation in advance here.
template = re.sub(r'(^[\t ]*//[\s\S]*?\n)|(^[\t ]*/\*{1,2}[\s\S]*?\*/)', '', template, flags=re.M)
minified = jsmin(template)
# Get rid of multi-line strings. Note, we are not sending it on the wire rather just extract parameters to prompt for values
result = re.sub(r'"[^"]*?\n[^"]*?(?<!\\)"', '"#Azure Cli#"', minified, re.DOTALL)
try:
return shell_safe_json_parse(result, preserve_order)
except CLIError:
# Because the processing of removing comments and compression will lead to misplacement of error location,
# so the error message should be wrapped.
if file_path:
raise CLIError("Failed to parse '{}', please check whether it is a valid JSON format".format(file_path))
raise CLIError("Failed to parse the JSON data, please check whether it is a valid JSON format")
# pylint: disable=too-many-locals, too-many-statements, too-few-public-methods
def _deploy_arm_template_core_unmodified(cmd, resource_group_name, template_file=None,
template_uri=None, deployment_name=None, parameters=None,
mode=None, rollback_on_error=None, validate_only=False, no_wait=False,
aux_subscriptions=None, aux_tenants=None, no_prompt=False):
DeploymentProperties, TemplateLink, OnErrorDeployment = cmd.get_models('DeploymentProperties', 'TemplateLink',
'OnErrorDeployment')
template_link = None
template_obj = None
on_error_deployment = None
template_content = None
if template_uri:
template_link = TemplateLink(uri=template_uri)
template_obj = _remove_comments_from_json(_urlretrieve(template_uri).decode('utf-8'), file_path=template_uri)
else:
template_content = read_file_content(template_file)
template_obj = _remove_comments_from_json(template_content, file_path=template_file)
if rollback_on_error == '':
on_error_deployment = OnErrorDeployment(type='LastSuccessful')
elif rollback_on_error:
on_error_deployment = OnErrorDeployment(type='SpecificDeployment', deployment_name=rollback_on_error)
template_param_defs = template_obj.get('parameters', {})
template_obj['resources'] = template_obj.get('resources', [])
parameters = _process_parameters(template_param_defs, parameters) or {}
parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters, no_prompt)
parameters = json.loads(json.dumps(parameters))
properties = DeploymentProperties(template=template_content, template_link=template_link,
parameters=parameters, mode=mode, on_error_deployment=on_error_deployment)
smc = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
aux_subscriptions=aux_subscriptions, aux_tenants=aux_tenants)
deployment_client = smc.deployments # This solves the multi-api for you
if not template_uri:
# pylint: disable=protected-access
deployment_client._serialize = JSONSerializer(
deployment_client._serialize.dependencies
)
# Plug this as default HTTP pipeline
from msrest.pipeline import Pipeline
from msrest.pipeline.requests import (
RequestsCredentialsPolicy,
RequestsPatchSession,
PipelineRequestsHTTPSender
)
from msrest.universal_http.requests import RequestsHTTPSender
smc.config.pipeline = Pipeline(
policies=[
JsonCTemplatePolicy(),
smc.config.user_agent_policy,
RequestsPatchSession(),
smc.config.http_logger_policy,
RequestsCredentialsPolicy(smc.config.credentials)
],
sender=PipelineRequestsHTTPSender(RequestsHTTPSender(smc.config))
)
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
Deployment = cmd.get_models('Deployment')
deployment = Deployment(properties=properties)
validation_poller = deployment_client.validate(resource_group_name, deployment_name, deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = deployment_client.validate(resource_group_name, deployment_name, properties)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
return validation_result
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, deployment_client.create_or_update, resource_group_name, deployment_name, deployment)
return sdk_no_wait(no_wait, deployment_client.create_or_update, resource_group_name, deployment_name, properties)
class JsonCTemplate:
def __init__(self, template_as_bytes):
self.template_as_bytes = template_as_bytes
class JSONSerializer(Serializer):
def body(self, data, data_type, **kwargs):
if data_type in ('Deployment', 'ScopedDeployment', 'DeploymentWhatIf', 'ScopedDeploymentWhatIf'):
# Be sure to pass a DeploymentProperties
template = data.properties.template
if template:
data_as_dict = data.serialize()
data_as_dict["properties"]["template"] = JsonCTemplate(template)
return data_as_dict
return super(JSONSerializer, self).body(data, data_type, **kwargs)
class JsonCTemplatePolicy(SansIOHTTPPolicy):
def on_request(self, request, **kwargs):
http_request = request.http_request
logger.info(http_request.data)
if (getattr(http_request, 'data', {}) or {}).get('properties', {}).get('template'):
template = http_request.data["properties"]["template"]
if not isinstance(template, JsonCTemplate):
raise ValueError()
del http_request.data["properties"]["template"]
# templateLink nad template cannot exist at the same time in deployment_dry_run mode
if "templateLink" in http_request.data["properties"].keys():
del http_request.data["properties"]["templateLink"]
partial_request = json.dumps(http_request.data)
http_request.data = partial_request[:-2] + ", template:" + template.template_as_bytes + r"}}"
http_request.data = http_request.data.encode('utf-8')
# pylint: disable=unused-argument
def deploy_arm_template_at_subscription_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False,
confirm_with_what_if=None, what_if_result_format=None,
what_if_exclude_change_types=None, template_spec=None):
if confirm_with_what_if:
what_if_deploy_arm_template_at_subscription_scope(cmd,
template_file=template_file, template_uri=template_uri,
parameters=parameters, deployment_name=deployment_name,
deployment_location=deployment_location,
result_format=what_if_result_format,
exclude_change_types=what_if_exclude_change_types,
no_prompt=no_prompt, template_spec=template_spec)
from knack.prompting import prompt_y_n
if not prompt_y_n("\nAre you sure you want to execute the deployment?"):
return None
return _deploy_arm_template_at_subscription_scope(cmd=cmd,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=False, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
# pylint: disable=unused-argument
def validate_arm_template_at_subscription_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None,
no_prompt=False, template_spec=None):
return _deploy_arm_template_at_subscription_scope(cmd=cmd,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=True, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec,)
def _deploy_arm_template_at_subscription_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None, validate_only=False,
no_wait=False, no_prompt=False, template_spec=None):
deployment_properties = _prepare_deployment_properties_unmodified(cmd, template_file=template_file,
template_uri=template_uri, parameters=parameters,
mode='Incremental',
no_prompt=no_prompt,
template_spec=template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
Deployment = cmd.get_models('Deployment')
deployment = Deployment(properties=deployment_properties, location=deployment_location)
validation_poller = mgmt_client.validate_at_subscription_scope(deployment_name, deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = mgmt_client.validate_at_subscription_scope(deployment_name, deployment_properties, deployment_location)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
return validation_result
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_subscription_scope, deployment_name, deployment)
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_subscription_scope, deployment_name,
deployment_properties, deployment_location)
# pylint: disable=unused-argument
def deploy_arm_template_at_resource_group(cmd,
resource_group_name=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, mode=None, rollback_on_error=None,
no_wait=False, handle_extended_json_format=None,
aux_subscriptions=None, aux_tenants=None, no_prompt=False,
confirm_with_what_if=None, what_if_result_format=None,
what_if_exclude_change_types=None, template_spec=None):
if confirm_with_what_if:
what_if_deploy_arm_template_at_resource_group(cmd,
resource_group_name=resource_group_name,
template_file=template_file, template_uri=template_uri,
parameters=parameters, deployment_name=deployment_name, mode=mode,
aux_tenants=aux_tenants, result_format=what_if_result_format,
exclude_change_types=what_if_exclude_change_types,
no_prompt=no_prompt, template_spec=template_spec)
from knack.prompting import prompt_y_n
if not prompt_y_n("\nAre you sure you want to execute the deployment?"):
return None
return _deploy_arm_template_at_resource_group(cmd=cmd,
resource_group_name=resource_group_name,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, mode=mode, rollback_on_error=rollback_on_error,
validate_only=False, no_wait=no_wait,
aux_subscriptions=aux_subscriptions, aux_tenants=aux_tenants,
no_prompt=no_prompt, template_spec=template_spec)
# pylint: disable=unused-argument
def validate_arm_template_at_resource_group(cmd,
resource_group_name=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, mode=None, rollback_on_error=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False, template_spec=None):
return _deploy_arm_template_at_resource_group(cmd,
resource_group_name=resource_group_name,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, mode=mode, rollback_on_error=rollback_on_error,
validate_only=True, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
def _deploy_arm_template_at_resource_group(cmd,
resource_group_name=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, mode=None, rollback_on_error=None,
validate_only=False, no_wait=False,
aux_subscriptions=None, aux_tenants=None, no_prompt=False, template_spec=None):
deployment_properties = _prepare_deployment_properties_unmodified(cmd, template_file=template_file,
template_uri=template_uri,
parameters=parameters, mode=mode,
rollback_on_error=rollback_on_error,
no_prompt=no_prompt, template_spec=template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, aux_subscriptions=aux_subscriptions,
aux_tenants=aux_tenants, plug_pipeline=(template_uri is None and template_spec is None))
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
Deployment = cmd.get_models('Deployment')
deployment = Deployment(properties=deployment_properties)
validation_poller = mgmt_client.validate(resource_group_name, deployment_name, deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = mgmt_client.validate(resource_group_name, deployment_name, deployment_properties)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
|
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, mgmt_client.create_or_update, resource_group_name, deployment_name, deployment)
return sdk_no_wait(no_wait, mgmt_client.create_or_update, resource_group_name, deployment_name, deployment_properties)
# pylint: disable=unused-argument
def deploy_arm_template_at_management_group(cmd,
management_group_id=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False,
confirm_with_what_if=None, what_if_result_format=None,
what_if_exclude_change_types=None, template_spec=None):
if confirm_with_what_if:
what_if_deploy_arm_template_at_management_group(cmd,
management_group_id=management_group_id,
template_file=template_file, template_uri=template_uri,
parameters=parameters, deployment_name=deployment_name,
deployment_location=deployment_location,
result_format=what_if_result_format,
exclude_change_types=what_if_exclude_change_types,
no_prompt=no_prompt, template_spec=template_spec)
from knack.prompting import prompt_y_n
if not prompt_y_n("\nAre you sure you want to execute the deployment?"):
return None
return _deploy_arm_template_at_management_group(cmd=cmd,
management_group_id=management_group_id,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=False, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
# pylint: disable=unused-argument
def validate_arm_template_at_management_group(cmd,
management_group_id=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None,
no_prompt=False, template_spec=None):
return _deploy_arm_template_at_management_group(cmd=cmd,
management_group_id=management_group_id,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=True, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
def _deploy_arm_template_at_management_group(cmd,
management_group_id=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None, validate_only=False,
no_wait=False, no_prompt=False, template_spec=None):
deployment_properties = _prepare_deployment_properties_unmodified(cmd, template_file=template_file,
template_uri=template_uri,
parameters=parameters, mode='Incremental',
no_prompt=no_prompt, template_spec=template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
ScopedDeployment = cmd.get_models('ScopedDeployment')
deployment = ScopedDeployment(properties=deployment_properties, location=deployment_location)
validation_poller = mgmt_client.validate_at_management_group_scope(management_group_id, deployment_name, deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = mgmt_client.validate_at_management_group_scope(management_group_id, deployment_name,
deployment_properties, deployment_location)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
return validation_result
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_management_group_scope,
management_group_id, deployment_name, deployment)
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_management_group_scope,
management_group_id, deployment_name, deployment_properties, deployment_location)
# pylint: disable=unused-argument
def deploy_arm_template_at_tenant_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False,
confirm_with_what_if=None, what_if_result_format=None,
what_if_exclude_change_types=None, template_spec=None):
if confirm_with_what_if:
what_if_deploy_arm_template_at_tenant_scope(cmd,
template_file=template_file, template_uri=template_uri,
parameters=parameters, deployment_name=deployment_name,
deployment_location=deployment_location,
result_format=what_if_result_format,
exclude_change_types=what_if_exclude_change_types,
no_prompt=no_prompt, template_spec=template_spec)
from knack.prompting import prompt_y_n
if not prompt_y_n("\nAre you sure you want to execute the deployment?"):
return None
return _deploy_arm_template_at_tenant_scope(cmd=cmd,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=False, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
# pylint: disable=unused-argument
def validate_arm_template_at_tenant_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
no_wait=False, handle_extended_json_format=None, no_prompt=False, template_spec=None):
return _deploy_arm_template_at_tenant_scope(cmd=cmd,
template_file=template_file, template_uri=template_uri, parameters=parameters,
deployment_name=deployment_name, deployment_location=deployment_location,
validate_only=True, no_wait=no_wait,
no_prompt=no_prompt, template_spec=template_spec)
def _deploy_arm_template_at_tenant_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None, validate_only=False,
no_wait=False, no_prompt=False, template_spec=None):
deployment_properties = _prepare_deployment_properties_unmodified(cmd, template_file=template_file,
template_uri=template_uri,
parameters=parameters, mode='Incremental',
no_prompt=no_prompt, template_spec=template_spec,)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
ScopedDeployment = cmd.get_models('ScopedDeployment')
deployment = ScopedDeployment(properties=deployment_properties, location=deployment_location)
validation_poller = mgmt_client.validate_at_tenant_scope(deployment_name=deployment_name, parameters=deployment)
validation_result = LongRunningOperation(cmd.cli_ctx)(validation_poller)
else:
validation_result = mgmt_client.validate_at_tenant_scope(deployment_name=deployment_name,
properties=deployment_properties,
location=deployment_location)
if validation_result and validation_result.error:
err_message = _build_preflight_error_message(validation_result.error)
raise CLIError(err_message)
if validate_only:
return validation_result
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_tenant_scope, deployment_name, deployment)
return sdk_no_wait(no_wait, mgmt_client.create_or_update_at_tenant_scope, deployment_name,
deployment_properties, deployment_location)
def what_if_deploy_arm_template_at_resource_group(cmd, resource_group_name,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, mode=DeploymentMode.incremental,
aux_tenants=None, result_format=None,
no_pretty_print=None, no_prompt=False,
exclude_change_types=None, template_spec=None):
what_if_properties = _prepare_deployment_what_if_properties(cmd, template_file, template_uri,
parameters, mode, result_format, no_prompt, template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, aux_tenants=aux_tenants,
plug_pipeline=(template_uri is None and template_spec is None))
what_if_poller = mgmt_client.what_if(resource_group_name, deployment_name, what_if_properties)
return _what_if_deploy_arm_template_core(cmd.cli_ctx, what_if_poller, no_pretty_print, exclude_change_types)
def what_if_deploy_arm_template_at_subscription_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
result_format=None, no_pretty_print=None, no_prompt=False,
exclude_change_types=None, template_spec=None):
what_if_properties = _prepare_deployment_what_if_properties(cmd, template_file, template_uri, parameters,
DeploymentMode.incremental, result_format, no_prompt, template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
what_if_poller = mgmt_client.what_if_at_subscription_scope(deployment_name, what_if_properties, deployment_location)
return _what_if_deploy_arm_template_core(cmd.cli_ctx, what_if_poller, no_pretty_print, exclude_change_types)
def what_if_deploy_arm_template_at_management_group(cmd, management_group_id=None,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
result_format=None, no_pretty_print=None, no_prompt=False,
exclude_change_types=None, template_spec=None):
what_if_properties = _prepare_deployment_what_if_properties(cmd, template_file, template_uri, parameters,
DeploymentMode.incremental, result_format, no_prompt, template_spec=template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
what_if_poller = mgmt_client.what_if_at_management_group_scope(management_group_id, deployment_name,
deployment_location, what_if_properties)
return _what_if_deploy_arm_template_core(cmd.cli_ctx, what_if_poller, no_pretty_print, exclude_change_types)
def what_if_deploy_arm_template_at_tenant_scope(cmd,
template_file=None, template_uri=None, parameters=None,
deployment_name=None, deployment_location=None,
result_format=None, no_pretty_print=None, no_prompt=False,
exclude_change_types=None, template_spec=None):
what_if_properties = _prepare_deployment_what_if_properties(cmd, template_file, template_uri, parameters,
DeploymentMode.incremental, result_format, no_prompt, template_spec)
mgmt_client = _get_deployment_management_client(cmd.cli_ctx, plug_pipeline=(template_uri is None and template_spec is None))
what_if_poller = mgmt_client.what_if_at_tenant_scope(deployment_name, deployment_location, what_if_properties)
return _what_if_deploy_arm_template_core(cmd.cli_ctx, what_if_poller, no_pretty_print, exclude_change_types)
def _what_if_deploy_arm_template_core(cli_ctx, what_if_poller, no_pretty_print, exclude_change_types):
what_if_result = LongRunningOperation(cli_ctx)(what_if_poller)
if what_if_result.error:
# The status code is 200 even when there's an error, because
# it is technically a successful What-If operation. The error
# is on the ARM template but not the operation.
err_message = _build_preflight_error_message(what_if_result.error)
raise CLIError(err_message)
if exclude_change_types:
exclude_change_types = set(map(lambda x: x.lower(), exclude_change_types))
what_if_result.changes = list(
filter(lambda x: x.change_type.lower() not in exclude_change_types, what_if_result.changes)
)
if no_pretty_print:
return what_if_result
try:
if cli_ctx.enable_color:
# Diabling colorama since it will silently strip out the Xterm 256 color codes the What-If formatter
# is using. Unfortuanately, the colors that colorama supports are very limited, which doesn't meet our needs.
from colorama import deinit
deinit()
# Enable virtual terminal mode for Windows console so it processes color codes.
if platform.system() == "Windows":
from ._win_vt import enable_vt_mode
enable_vt_mode()
print(format_what_if_operation_result(what_if_result, cli_ctx.enable_color))
finally:
if cli_ctx.enable_color:
from colorama import init
init()
return None
def _build_preflight_error_message(preflight_error):
err_messages = [f'{preflight_error.code} - {preflight_error.message}']
for detail in preflight_error.details or []:
err_messages.append(_build_preflight_error_message(detail))
return '\n'.join(err_messages)
def _prepare_deployment_properties_unmodified(cmd, template_file=None, template_uri=None, parameters=None,
mode=None, rollback_on_error=None, no_prompt=False, template_spec=None):
cli_ctx = cmd.cli_ctx
DeploymentProperties, TemplateLink, OnErrorDeployment = get_sdk(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
'DeploymentProperties', 'TemplateLink',
'OnErrorDeployment', mod='models')
template_link = None
template_obj = None
on_error_deployment = None
template_content = None
if template_uri:
template_link = TemplateLink(uri=template_uri)
template_obj = _remove_comments_from_json(_urlretrieve(template_uri).decode('utf-8'), file_path=template_uri)
elif template_spec:
template_link = TemplateLink(id=template_spec, mode="Incremental")
template_obj = show_resource(cmd=cmd, resource_ids=[template_spec]).properties['template']
else:
template_content = read_file_content(template_file)
template_obj = _remove_comments_from_json(template_content, file_path=template_file)
if rollback_on_error == '':
on_error_deployment = OnErrorDeployment(type='LastSuccessful')
elif rollback_on_error:
on_error_deployment = OnErrorDeployment(type='SpecificDeployment', deployment_name=rollback_on_error)
template_param_defs = template_obj.get('parameters', {})
template_obj['resources'] = template_obj.get('resources', [])
parameters = _process_parameters(template_param_defs, parameters) or {}
parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters, no_prompt)
parameters = json.loads(json.dumps(parameters))
properties = DeploymentProperties(template=template_content, template_link=template_link,
parameters=parameters, mode=mode, on_error_deployment=on_error_deployment)
return properties
def _prepare_deployment_what_if_properties(cmd, template_file, template_uri, parameters,
mode, result_format, no_prompt, template_spec):
DeploymentWhatIfProperties, DeploymentWhatIfSettings = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
'DeploymentWhatIfProperties', 'DeploymentWhatIfSettings',
mod='models')
deployment_properties = _prepare_deployment_properties_unmodified(cmd=cmd, template_file=template_file, template_uri=template_uri,
parameters=parameters, mode=mode, no_prompt=no_prompt, template_spec=template_spec)
deployment_what_if_properties = DeploymentWhatIfProperties(template=deployment_properties.template, template_link=deployment_properties.template_link,
parameters=deployment_properties.parameters, mode=deployment_properties.mode,
what_if_settings=DeploymentWhatIfSettings(result_format=result_format))
return deployment_what_if_properties
def _get_deployment_management_client(cli_ctx, aux_subscriptions=None, aux_tenants=None, plug_pipeline=True):
smc = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES, aux_subscriptions=aux_subscriptions,
aux_tenants=aux_tenants)
deployment_client = smc.deployments # This solves the multi-api for you
if plug_pipeline:
# pylint: disable=protected-access
deployment_client._serialize = JSONSerializer(
deployment_client._serialize.dependencies
)
# Plug this as default HTTP pipeline
from msrest.pipeline import Pipeline
from msrest.pipeline.requests import (
RequestsCredentialsPolicy,
RequestsPatchSession,
PipelineRequestsHTTPSender
)
from msrest.universal_http.requests import RequestsHTTPSender
smc.config.pipeline = Pipeline(
policies=[
JsonCTemplatePolicy(),
smc.config.user_agent_policy,
RequestsPatchSession(),
smc.config.http_logger_policy,
RequestsCredentialsPolicy(smc.config.credentials)
],
sender=PipelineRequestsHTTPSender(RequestsHTTPSender(smc.config))
)
return deployment_client
def _list_resources_odata_filter_builder(resource_group_name=None, resource_provider_namespace=None,
resource_type=None, name=None, tag=None, location=None):
"""Build up OData filter string from parameters """
if tag is not None:
if resource_group_name:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--resource-group\''
'(If the default value for resource group is set, please use \'az configure --defaults group=""\' command to clear it first)')
if resource_provider_namespace:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--namespace\'')
if resource_type:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--resource-type\'')
if name:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--name\'')
if location:
raise IncorrectUsageError('you cannot use \'--tag\' with \'--location\''
'(If the default value for location is set, please use \'az configure --defaults location=""\' command to clear it first)')
filters = []
if resource_group_name:
filters.append("resourceGroup eq '{}'".format(resource_group_name))
if name:
filters.append("name eq '{}'".format(name))
if location:
filters.append("location eq '{}'".format(location))
if resource_type:
if resource_provider_namespace:
f = "'{}/{}'".format(resource_provider_namespace, resource_type)
else:
if not re.match('[^/]+/[^/]+', resource_type):
raise CLIError(
'Malformed resource-type: '
'--resource-type=<namespace>/<resource-type> expected.')
# assume resource_type is <namespace>/<type>. The worst is to get a server error
f = "'{}'".format(resource_type)
filters.append("resourceType eq " + f)
else:
if resource_provider_namespace:
raise CLIError('--namespace also requires --resource-type')
if tag:
tag_name = list(tag.keys())[0] if isinstance(tag, dict) else tag
tag_value = tag[tag_name] if isinstance(tag, dict) else ''
if tag_name:
if tag_name[-1] == '*':
filters.append("startswith(tagname, '%s')" % tag_name[0:-1])
else:
filters.append("tagname eq '%s'" % tag_name)
if tag_value != '':
filters.append("tagvalue eq '%s'" % tag_value)
return ' and '.join(filters)
def _get_auth_provider_latest_api_version(cli_ctx):
rcf = _resource_client_factory(cli_ctx)
api_version = _ResourceUtils.resolve_api_version(rcf, 'Microsoft.Authorization', None, 'providerOperations')
return api_version
def _update_provider(cli_ctx, namespace, registering, wait):
import time
target_state = 'Registered' if registering else 'Unregistered'
rcf = _resource_client_factory(cli_ctx)
if registering:
r = rcf.providers.register(namespace)
else:
r = rcf.providers.unregister(namespace)
if r.registration_state == target_state:
return
if wait:
while True:
time.sleep(10)
rp_info = rcf.providers.get(namespace)
if rp_info.registration_state == target_state:
break
else:
action = 'Registering' if registering else 'Unregistering'
msg_template = '%s is still on-going. You can monitor using \'az provider show -n %s\''
logger.warning(msg_template, action, namespace)
def _build_policy_scope(subscription_id, resource_group_name, scope):
subscription_scope = '/subscriptions/' + subscription_id
if scope:
if resource_group_name:
err = "Resource group '{}' is redundant because 'scope' is supplied"
raise CLIError(err.format(resource_group_name))
elif resource_group_name:
scope = subscription_scope + '/resourceGroups/' + resource_group_name
else:
scope = subscription_scope
return scope
def _resolve_policy_id(cmd, policy, policy_set_definition, client):
policy_id = policy or policy_set_definition
if not is_valid_resource_id(policy_id):
if policy:
policy_def = _get_custom_or_builtin_policy(cmd, client, policy)
policy_id = policy_def.id
else:
policy_set_def = _get_custom_or_builtin_policy(cmd, client, policy_set_definition, None, None, True)
policy_id = policy_set_def.id
return policy_id
def _parse_management_group_reference(name):
if _is_management_group_scope(name):
parts = name.split('/')
if len(parts) >= 9:
return parts[4], parts[8]
return None, name
def _parse_management_group_id(scope):
if _is_management_group_scope(scope):
parts = scope.split('/')
if len(parts) >= 5:
return parts[4]
return None
def _get_custom_or_builtin_policy(cmd, client, name, subscription=None, management_group=None, for_policy_set=False):
from msrest.exceptions import HttpOperationError
from msrestazure.azure_exceptions import CloudError
policy_operations = client.policy_set_definitions if for_policy_set else client.policy_definitions
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
client.config.subscription_id = subscription_id
try:
if cmd.supported_api_version(min_api='2018-03-01'):
if not management_group:
management_group, name = _parse_management_group_reference(name)
if management_group:
return policy_operations.get_at_management_group(name, management_group)
return policy_operations.get(name)
except (CloudError, HttpOperationError) as ex:
status_code = ex.status_code if isinstance(ex, CloudError) else ex.response.status_code
if status_code == 404:
try:
return policy_operations.get_built_in(name)
except CloudError as ex2:
# When the `--policy` parameter is neither a valid policy definition name nor conforms to the policy definition id format,
# an exception of "AuthorizationFailed" will be reported to mislead customers.
# So we need to modify the exception information thrown here.
if ex2.status_code == 403 and ex2.error and ex2.error.error == 'AuthorizationFailed':
raise IncorrectUsageError('\'--policy\' should be a valid name or id of the policy definition')
raise ex2
raise
def _load_file_string_or_uri(file_or_string_or_uri, name, required=True):
if file_or_string_or_uri is None:
if required:
raise CLIError('--{} is required'.format(name))
return None
url = urlparse(file_or_string_or_uri)
if url.scheme == 'http' or url.scheme == 'https' or url.scheme == 'file':
response = urlopen(file_or_string_or_uri)
reader = codecs.getreader('utf-8')
result = json.load(reader(response))
response.close()
return result
if os.path.exists(file_or_string_or_uri):
return get_file_json(file_or_string_or_uri)
return shell_safe_json_parse(file_or_string_or_uri)
def _call_subscription_get(cmd, lock_client, *args):
if cmd.supported_api_version(max_api='2015-01-01'):
return lock_client.management_locks.get(*args)
return lock_client.management_locks.get_at_subscription_level(*args)
def _extract_lock_params(resource_group_name, resource_provider_namespace,
resource_type, resource_name):
if resource_group_name is None:
return (None, None, None, None)
if resource_name is None:
return (resource_group_name, None, None, None)
parts = resource_type.split('/', 2)
if resource_provider_namespace is None and len(parts) == 2:
resource_provider_namespace = parts[0]
resource_type = parts[1]
return (resource_group_name, resource_name, resource_provider_namespace, resource_type)
def _update_lock_parameters(parameters, level, notes):
if level is not None:
parameters.level = level
if notes is not None:
parameters.notes = notes
def _validate_resource_inputs(resource_group_name, resource_provider_namespace,
resource_type, resource_name):
if resource_group_name is None:
raise CLIError('--resource-group/-g is required.')
if resource_type is None:
raise CLIError('--resource-type is required')
if resource_name is None:
raise CLIError('--name/-n is required')
if resource_provider_namespace is None:
raise CLIError('--namespace is required')
# region Custom Commands
def list_resource_groups(cmd, tag=None): # pylint: disable=no-self-use
""" List resource groups, optionally filtered by a tag.
:param str tag:tag to filter by in 'key[=value]' format
"""
rcf = _resource_client_factory(cmd.cli_ctx)
filters = []
if tag:
key = list(tag.keys())[0]
filters.append("tagname eq '{}'".format(key))
filters.append("tagvalue eq '{}'".format(tag[key]))
filter_text = ' and '.join(filters) if filters else None
groups = rcf.resource_groups.list(filter=filter_text)
return list(groups)
def create_resource_group(cmd, rg_name, location, tags=None, managed_by=None):
""" Create a new resource group.
:param str resource_group_name:the desired resource group name
:param str location:the resource group location
:param str tags:tags in 'a=b c' format
"""
rcf = _resource_client_factory(cmd.cli_ctx)
ResourceGroup = cmd.get_models('ResourceGroup')
parameters = ResourceGroup(
location=location,
tags=tags
)
if cmd.supported_api_version(min_api='2016-09-01'):
parameters.managed_by = managed_by
return rcf.resource_groups.create_or_update(rg_name, parameters)
def update_resource_group(instance, tags=None):
if tags is not None:
instance.tags = tags
return instance
def export_group_as_template(
cmd, resource_group_name, include_comments=False, include_parameter_default_value=False, resource_ids=None, skip_resource_name_params=False, skip_all_params=False):
"""Captures a resource group as a template.
:param str resource_group_name: the name of the resource group.
:param resource_ids: space-separated resource ids to filter the export by. To export all resources, do not specify this argument or supply "*".
:param bool include_comments: export template with comments.
:param bool include_parameter_default_value: export template parameter with default value.
:param bool skip_resource_name_params: export template and skip resource name parameterization.
:param bool skip_all_params: export template parameter and skip all parameterization.
"""
rcf = _resource_client_factory(cmd.cli_ctx)
export_options = []
if include_comments:
export_options.append('IncludeComments')
if include_parameter_default_value:
export_options.append('IncludeParameterDefaultValue')
if skip_resource_name_params:
export_options.append('SkipResourceNameParameterization')
if skip_all_params:
export_options.append('SkipAllParameterization')
resources = []
if resource_ids is None or resource_ids[0] == "*":
resources = ["*"]
else:
for i in resource_ids:
if is_valid_resource_id(i):
resources.append(i)
else:
raise CLIError('az resource: error: argument --resource-ids: invalid ResourceId value: \'%s\'' % i)
options = ','.join(export_options) if export_options else None
# Exporting a resource group as a template is async since API version 2019-08-01.
if cmd.supported_api_version(min_api='2019-08-01'):
result_poller = rcf.resource_groups.export_template(resource_group_name, resources, options=options)
result = LongRunningOperation(cmd.cli_ctx)(result_poller)
else:
result = rcf.resource_groups.export_template(resource_group_name, resources, options=options)
# pylint: disable=no-member
# On error, server still returns 200, with details in the error attribute
if result.error:
error = result.error
try:
logger.warning(error.message)
except AttributeError:
logger.warning(str(error))
for detail in getattr(error, 'details', None) or []:
logger.error(detail.message)
return result.template
def create_application(cmd, resource_group_name,
application_name, managedby_resource_group_id,
kind, managedapp_definition_id=None, location=None,
plan_name=None, plan_publisher=None, plan_product=None,
plan_version=None, tags=None, parameters=None):
""" Create a new managed application.
:param str resource_group_name:the desired resource group name
:param str application_name:the managed application name
:param str kind:the managed application kind. can be marketplace or servicecatalog
:param str plan_name:the managed application package plan name
:param str plan_publisher:the managed application package plan publisher
:param str plan_product:the managed application package plan product
:param str plan_version:the managed application package plan version
:param str tags:tags in 'a=b c' format
"""
from azure.mgmt.resource.managedapplications.models import Application, Plan
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
rcf = _resource_client_factory(cmd.cli_ctx)
if not location:
location = rcf.resource_groups.get(resource_group_name).location
application = Application(
location=location,
managed_resource_group_id=managedby_resource_group_id,
kind=kind,
tags=tags
)
if kind.lower() == 'servicecatalog':
if managedapp_definition_id:
application.application_definition_id = managedapp_definition_id
else:
raise CLIError('--managedapp-definition-id is required if kind is ServiceCatalog')
elif kind.lower() == 'marketplace':
if (plan_name is None and plan_product is None and
plan_publisher is None and plan_version is None):
raise CLIError('--plan-name, --plan-product, --plan-publisher and \
--plan-version are all required if kind is MarketPlace')
application.plan = Plan(name=plan_name, publisher=plan_publisher, product=plan_product, version=plan_version)
applicationParameters = None
if parameters:
if os.path.exists(parameters):
applicationParameters = get_file_json(parameters)
else:
applicationParameters = shell_safe_json_parse(parameters)
application.parameters = applicationParameters
return racf.applications.create_or_update(resource_group_name, application_name, application)
def show_application(cmd, resource_group_name=None, application_name=None):
""" Gets a managed application.
:param str resource_group_name:the resource group name
:param str application_name:the managed application name
"""
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
return racf.applications.get(resource_group_name, application_name)
def show_applicationdefinition(cmd, resource_group_name=None, application_definition_name=None):
""" Gets a managed application definition.
:param str resource_group_name:the resource group name
:param str application_definition_name:the managed application definition name
"""
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
return racf.application_definitions.get(resource_group_name, application_definition_name)
def create_applicationdefinition(cmd, resource_group_name,
application_definition_name,
lock_level, authorizations,
description, display_name,
package_file_uri=None, create_ui_definition=None,
main_template=None, location=None, tags=None):
""" Create a new managed application definition.
:param str resource_group_name:the desired resource group name
:param str application_definition_name:the managed application definition name
:param str description:the managed application definition description
:param str display_name:the managed application definition display name
:param str package_file_uri:the managed application definition package file uri
:param str create_ui_definition:the managed application definition create ui definition
:param str main_template:the managed application definition main template
:param str tags:tags in 'a=b c' format
"""
from azure.mgmt.resource.managedapplications.models import ApplicationDefinition, ApplicationProviderAuthorization
if not package_file_uri and not create_ui_definition and not main_template:
raise CLIError('usage error: --package-file-uri <url> | --create-ui-definition --main-template')
if package_file_uri:
if create_ui_definition or main_template:
raise CLIError('usage error: must not specify --create-ui-definition --main-template')
if not package_file_uri:
if not create_ui_definition or not main_template:
raise CLIError('usage error: must specify --create-ui-definition --main-template')
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
rcf = _resource_client_factory(cmd.cli_ctx)
if not location:
location = rcf.resource_groups.get(resource_group_name).location
authorizations = authorizations or []
applicationAuthList = []
for name_value in authorizations:
# split at the first ':', neither principalId nor roldeDefinitionId should have a ':'
principalId, roleDefinitionId = name_value.split(':', 1)
applicationAuth = ApplicationProviderAuthorization(
principal_id=principalId,
role_definition_id=roleDefinitionId)
applicationAuthList.append(applicationAuth)
applicationDef = ApplicationDefinition(lock_level=lock_level,
authorizations=applicationAuthList,
package_file_uri=package_file_uri)
applicationDef.display_name = display_name
applicationDef.description = description
applicationDef.location = location
applicationDef.package_file_uri = package_file_uri
applicationDef.create_ui_definition = create_ui_definition
applicationDef.main_template = main_template
applicationDef.tags = tags
return racf.application_definitions.create_or_update(resource_group_name,
application_definition_name, applicationDef)
def list_applications(cmd, resource_group_name=None):
racf = _resource_managedapps_client_factory(cmd.cli_ctx)
if resource_group_name:
applications = racf.applications.list_by_resource_group(resource_group_name)
else:
applications = racf.applications.list_by_subscription()
return list(applications)
def list_deployments_at_subscription_scope(cmd, filter_string=None):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.list_at_subscription_scope(filter=filter_string)
def list_deployments_at_resource_group(cmd, resource_group_name, filter_string=None):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.list_by_resource_group(resource_group_name, filter=filter_string)
def list_deployments_at_management_group(cmd, management_group_id, filter_string=None):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.list_at_management_group_scope(management_group_id, filter=filter_string)
def list_deployments_at_tenant_scope(cmd, filter_string=None):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.list_at_tenant_scope(filter=filter_string)
def get_deployment_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.get_at_subscription_scope(deployment_name)
def get_deployment_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.get(resource_group_name, deployment_name)
def get_deployment_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.get_at_management_group_scope(management_group_id, deployment_name)
def get_deployment_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.get_at_tenant_scope(deployment_name)
def delete_deployment_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.delete_at_subscription_scope(deployment_name)
def delete_deployment_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.delete(resource_group_name, deployment_name)
def delete_deployment_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.delete_at_management_group_scope(management_group_id, deployment_name)
def delete_deployment_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.delete_at_tenant_scope(deployment_name)
def cancel_deployment_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.cancel_at_subscription_scope(deployment_name)
def cancel_deployment_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.cancel(resource_group_name, deployment_name)
def cancel_deployment_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.cancel_at_management_group_scope(management_group_id, deployment_name)
def cancel_deployment_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployments.cancel_at_tenant_scope(deployment_name)
# pylint: disable=unused-argument
def deploy_arm_template(cmd, resource_group_name,
template_file=None, template_uri=None, deployment_name=None,
parameters=None, mode=None, rollback_on_error=None, no_wait=False,
handle_extended_json_format=None, aux_subscriptions=None, aux_tenants=None,
no_prompt=False):
return _deploy_arm_template_core_unmodified(cmd, resource_group_name=resource_group_name,
template_file=template_file, template_uri=template_uri,
deployment_name=deployment_name, parameters=parameters, mode=mode,
rollback_on_error=rollback_on_error, no_wait=no_wait,
aux_subscriptions=aux_subscriptions, aux_tenants=aux_tenants,
no_prompt=no_prompt)
# pylint: disable=unused-argument
def validate_arm_template(cmd, resource_group_name, template_file=None, template_uri=None,
parameters=None, mode=None, rollback_on_error=None, handle_extended_json_format=None,
no_prompt=False):
return _deploy_arm_template_core_unmodified(cmd, resource_group_name, template_file, template_uri,
'deployment_dry_run', parameters, mode, rollback_on_error,
validate_only=True, no_prompt=no_prompt)
def export_template_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
result = rcf.deployments.export_template_at_subscription_scope(deployment_name)
print(json.dumps(result.template, indent=2)) # pylint: disable=no-member
def export_template_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
result = rcf.deployments.export_template(resource_group_name, deployment_name)
print(json.dumps(result.template, indent=2)) # pylint: disable=no-member
def export_template_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
result = rcf.deployments.export_template_at_management_group_scope(management_group_id, deployment_name)
print(json.dumps(result.template, indent=2)) # pylint: disable=no-member
def export_template_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
result = rcf.deployments.export_template_at_tenant_scope(deployment_name)
print(json.dumps(result.template, indent=2)) # pylint: disable=no-member
def export_deployment_as_template(cmd, resource_group_name, deployment_name):
smc = _resource_client_factory(cmd.cli_ctx)
result = smc.deployments.export_template(resource_group_name, deployment_name)
print(json.dumps(result.template, indent=2)) # pylint: disable=no-member
def create_resource(cmd, properties,
resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None,
resource_id=None, api_version=None, location=None, is_full_object=False,
latest_include_preview=False):
res = _ResourceUtils(cmd.cli_ctx, resource_group_name, resource_provider_namespace,
parent_resource_path, resource_type, resource_name,
resource_id, api_version, latest_include_preview=latest_include_preview)
return res.create_resource(properties, location, is_full_object)
def _get_parsed_resource_ids(resource_ids):
"""
Returns a generator of parsed resource ids. Raise when there is invalid resource id.
"""
if not resource_ids:
return None
for rid in resource_ids:
if not is_valid_resource_id(rid):
raise CLIError('az resource: error: argument --ids: invalid ResourceId value: \'%s\'' % rid)
return ({'resource_id': rid} for rid in resource_ids)
def _get_rsrc_util_from_parsed_id(cli_ctx, parsed_id, api_version, latest_include_preview=False):
return _ResourceUtils(cli_ctx,
parsed_id.get('resource_group', None),
parsed_id.get('resource_namespace', None),
parsed_id.get('resource_parent', None),
parsed_id.get('resource_type', None),
parsed_id.get('resource_name', None),
parsed_id.get('resource_id', None),
api_version,
latest_include_preview=latest_include_preview)
def _create_parsed_id(cli_ctx, resource_group_name=None, resource_provider_namespace=None, parent_resource_path=None,
resource_type=None, resource_name=None):
from azure.cli.core.commands.client_factory import get_subscription_id
subscription = get_subscription_id(cli_ctx)
return {
'resource_group': resource_group_name,
'resource_namespace': resource_provider_namespace,
'resource_parent': parent_resource_path,
'resource_type': resource_type,
'resource_name': resource_name,
'subscription': subscription
}
def _single_or_collection(obj, default=None):
if not obj:
return default
if isinstance(obj, list) and len(obj) == 1:
return obj[0]
return obj
# pylint: unused-argument
def show_resource(cmd, resource_ids=None, resource_group_name=None,
resource_provider_namespace=None, parent_resource_path=None, resource_type=None,
resource_name=None, api_version=None, include_response_body=False, latest_include_preview=False):
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
return _single_or_collection(
[_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview).get_resource(
include_response_body) for id_dict in parsed_ids])
# pylint: disable=unused-argument
def delete_resource(cmd, resource_ids=None, resource_group_name=None,
resource_provider_namespace=None, parent_resource_path=None, resource_type=None,
resource_name=None, api_version=None, latest_include_preview=False):
"""
Deletes the given resource(s).
This function allows deletion of ids with dependencies on one another.
This is done with multiple passes through the given ids.
"""
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
to_be_deleted = [(_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview), id_dict)
for id_dict in parsed_ids]
results = []
from msrestazure.azure_exceptions import CloudError
while to_be_deleted:
logger.debug("Start new loop to delete resources.")
operations = []
failed_to_delete = []
for rsrc_utils, id_dict in to_be_deleted:
try:
operations.append(rsrc_utils.delete())
resource = _build_resource_id(**id_dict) or resource_name
logger.debug("deleting %s", resource)
except CloudError as e:
# request to delete failed, add parsed id dict back to queue
id_dict['exception'] = str(e)
failed_to_delete.append((rsrc_utils, id_dict))
to_be_deleted = failed_to_delete
# stop deleting if none deletable
if not operations:
break
# all operations return result before next pass
for operation in operations:
results.append(operation.result())
if to_be_deleted:
error_msg_builder = ['Some resources failed to be deleted (run with `--verbose` for more information):']
for _, id_dict in to_be_deleted:
logger.info(id_dict['exception'])
resource_id = _build_resource_id(**id_dict) or id_dict['resource_id']
error_msg_builder.append(resource_id)
raise CLIError(os.linesep.join(error_msg_builder))
return _single_or_collection(results)
# pylint: unused-argument
def update_resource(cmd, parameters, resource_ids=None,
resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None, api_version=None,
latest_include_preview=False):
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
return _single_or_collection(
[_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview).update(parameters)
for id_dict in parsed_ids])
# pylint: unused-argument
def tag_resource(cmd, tags, resource_ids=None, resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None, api_version=None,
is_incremental=None, latest_include_preview=False):
""" Updates the tags on an existing resource. To clear tags, specify the --tag option
without anything else. """
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
return _single_or_collection(
[_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview).tag(
tags, is_incremental) for id_dict in parsed_ids])
# pylint: unused-argument
def invoke_resource_action(cmd, action, request_body=None, resource_ids=None,
resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None,
api_version=None, latest_include_preview=False):
""" Invokes the provided action on an existing resource."""
parsed_ids = _get_parsed_resource_ids(resource_ids) or [_create_parsed_id(cmd.cli_ctx,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name)]
return _single_or_collection(
[_get_rsrc_util_from_parsed_id(cmd.cli_ctx, id_dict, api_version, latest_include_preview).invoke_action(
action, request_body) for id_dict in parsed_ids])
def get_deployment_operations(client, resource_group_name, deployment_name, operation_ids):
"""get a deployment's operation."""
result = []
for op_id in operation_ids:
dep = client.get(resource_group_name, deployment_name, op_id)
result.append(dep)
return result
def get_deployment_operations_at_subscription_scope(client, deployment_name, operation_ids):
result = []
for op_id in operation_ids:
deployment = client.get_at_subscription_scope(deployment_name, op_id)
result.append(deployment)
return result
def get_deployment_operations_at_resource_group(client, resource_group_name, deployment_name, operation_ids):
result = []
for op_id in operation_ids:
dep = client.get(resource_group_name, deployment_name, op_id)
result.append(dep)
return result
def get_deployment_operations_at_management_group(client, management_group_id, deployment_name, operation_ids):
result = []
for op_id in operation_ids:
dep = client.get_at_management_group_scope(management_group_id, deployment_name, op_id)
result.append(dep)
return result
def get_deployment_operations_at_tenant_scope(client, deployment_name, operation_ids):
result = []
for op_id in operation_ids:
dep = client.get_at_tenant_scope(deployment_name, op_id)
result.append(dep)
return result
def list_deployment_scripts(cmd, resource_group_name=None):
rcf = _resource_deploymentscripts_client_factory(cmd.cli_ctx)
if resource_group_name is not None:
return rcf.deployment_scripts.list_by_resource_group(resource_group_name)
return rcf.deployment_scripts.list_by_subscription()
def get_deployment_script(cmd, resource_group_name, name):
rcf = _resource_deploymentscripts_client_factory(cmd.cli_ctx)
return rcf.deployment_scripts.get(resource_group_name, name)
def get_deployment_script_logs(cmd, resource_group_name, name):
rcf = _resource_deploymentscripts_client_factory(cmd.cli_ctx)
return rcf.deployment_scripts.get_logs(resource_group_name, name)
def delete_deployment_script(cmd, resource_group_name, name):
rcf = _resource_deploymentscripts_client_factory(cmd.cli_ctx)
rcf.deployment_scripts.delete(resource_group_name, name)
def get_template_spec(cmd, resource_group_name=None, name=None, version=None, template_spec=None):
if template_spec:
id_parts = parse_resource_id(template_spec)
resource_group_name = id_parts.get('resource_group')
name = id_parts.get('name')
version = id_parts.get('resource_name')
if version == name:
version = None
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if version:
return rcf.template_spec_versions.get(resource_group_name, name, version)
return rcf.template_specs.get(resource_group_name, name)
def create_template_spec(cmd, resource_group_name, name, template_file=None, location=None, display_name=None,
description=None, version=None, version_description=None):
artifacts = None
input_template = None
if location is None:
rcf = _resource_client_factory(cmd.cli_ctx)
location = rcf.resource_groups.get(resource_group_name).location
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if version:
if template_file:
from azure.cli.command_modules.resource._packing_engine import (pack)
packed_template = pack(cmd, template_file)
input_template = getattr(packed_template, 'RootTemplate')
artifacts = getattr(packed_template, 'Artifacts')
try: # Check if parent template spec already exists.
rcf.template_specs.get(resource_group_name=resource_group_name, template_spec_name=name)
except Exception: # pylint: disable=broad-except
TemplateSpec = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpec', mod='models')
template_spec_parent = TemplateSpec(location=location, description=description, display_name=display_name, tags=None)
rcf.template_specs.create_or_update(resource_group_name, name, template_spec_parent)
TemplateSpecVersion = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpecVersion', mod='models')
template_spec_child = TemplateSpecVersion(location=location, artifacts=artifacts, description=version_description, template=input_template, tags=None)
return rcf.template_spec_versions.create_or_update(resource_group_name, name, version, template_spec_child)
TemplateSpec = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpec', mod='models')
template_spec_parent = TemplateSpec(location=location, description=description, display_name=display_name, tags=None)
return rcf.template_specs.create_or_update(resource_group_name, name, template_spec_parent)
def update_template_spec(cmd, resource_group_name=None, name=None, template_spec=None, template_file=None, display_name=None,
description=None, version=None, version_description=None):
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if template_spec:
id_parts = parse_resource_id(template_spec)
resource_group_name = id_parts.get('resource_group')
name = id_parts.get('name')
version = id_parts.get('resource_name')
if version == name:
version = None
existing_template = None
artifacts = None
if template_file:
from azure.cli.command_modules.resource._packing_engine import (pack)
packed_template = pack(cmd, template_file)
input_template = getattr(packed_template, 'RootTemplate')
artifacts = getattr(packed_template, 'Artifacts')
if version:
existing_template = rcf.template_spec_versions.get(resource_group_name=resource_group_name, template_spec_name=name, template_spec_version=version)
location = getattr(existing_template, 'location')
version_tags = getattr(existing_template, 'tags')
if version_description is None:
version_description = getattr(existing_template, 'description')
if template_file is None:
input_template = getattr(existing_template, 'template')
TemplateSpecVersion = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpecVersion', mod='models')
updated_template_spec = TemplateSpecVersion(location=location, artifacts=artifacts, description=version_description, template=input_template, tags=version_tags)
return rcf.template_spec_versions.create_or_update(resource_group_name, name, version, updated_template_spec)
existing_template = rcf.template_specs.get(resource_group_name=resource_group_name, template_spec_name=name)
location = getattr(existing_template, 'location')
tags = getattr(existing_template, 'tags')
if display_name is None:
display_name = getattr(existing_template, 'display_name')
if description is None:
description = getattr(existing_template, 'description')
TemplateSpec = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'TemplateSpec', mod='models')
root_template = TemplateSpec(location=location, description=description, display_name=display_name, tags=tags)
return rcf.template_specs.create_or_update(resource_group_name, name, root_template)
def export_template_spec(cmd, output_folder, resource_group_name=None, name=None, version=None, template_spec=None):
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if template_spec:
id_parts = parse_resource_id(template_spec)
resource_group_name = id_parts.get('resource_group')
name = id_parts.get('name')
version = id_parts.get('resource_name')
if version == name:
version = None
exported_template = rcf.template_spec_versions.get(resource_group_name, name, version) if version else rcf.template_specs.get(resource_group_name, name)
from azure.cli.command_modules.resource._packing_engine import (unpack)
return unpack(cmd, exported_template, output_folder, (str(name) + '.JSON'))
def delete_template_spec(cmd, resource_group_name=None, name=None, version=None, template_spec=None):
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if template_spec:
id_parts = parse_resource_id(template_spec)
resource_group_name = id_parts.get('resource_group')
name = id_parts.get('name')
version = id_parts.get('resource_name')
if version == name:
version = None
if version:
return rcf.template_specs.delete(resource_group_name=resource_group_name, template_spec_name=name, template_spec_version=version)
return rcf.template_specs.delete(resource_group_name=resource_group_name, template_spec_name=name)
def list_template_specs(cmd, resource_group_name=None, name=None):
rcf = _resource_templatespecs_client_factory(cmd.cli_ctx)
if resource_group_name is not None:
if name is not None:
return rcf.template_spec_versions.list(resource_group_name=resource_group_name, template_spec_name=name)
return rcf.template_specs.list_by_resource_group(resource_group_name)
return rcf.template_specs.list_by_subscription()
def list_deployment_operations_at_subscription_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.list_at_subscription_scope(deployment_name)
def list_deployment_operations_at_resource_group(cmd, resource_group_name, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.list(resource_group_name, deployment_name)
def list_deployment_operations_at_management_group(cmd, management_group_id, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.list_at_management_group_scope(management_group_id, deployment_name)
def list_deployment_operations_at_tenant_scope(cmd, deployment_name):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.list_at_tenant_scope(deployment_name)
def get_deployment_operation_at_subscription_scope(cmd, deployment_name, op_id):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.get_at_subscription_scope(deployment_name, op_id)
def get_deployment_operation_at_resource_group(cmd, resource_group_name, deployment_name, op_id):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.get(resource_group_name, deployment_name, op_id)
def get_deployment_operation_at_management_group(cmd, management_group_id, deployment_name, op_id):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.get_at_management_group_scope(management_group_id, deployment_name, op_id)
def get_deployment_operation_at_tenant_scope(cmd, deployment_name, op_id):
rcf = _resource_client_factory(cmd.cli_ctx)
return rcf.deployment_operations.get_at_tenant_scope(deployment_name, op_id)
def list_resources(cmd, resource_group_name=None,
resource_provider_namespace=None, resource_type=None, name=None, tag=None,
location=None):
rcf = _resource_client_factory(cmd.cli_ctx)
if resource_group_name is not None:
rcf.resource_groups.get(resource_group_name)
odata_filter = _list_resources_odata_filter_builder(resource_group_name,
resource_provider_namespace,
resource_type, name, tag, location)
expand = "createdTime,changedTime,provisioningState"
resources = rcf.resources.list(filter=odata_filter, expand=expand)
return list(resources)
def register_provider(cmd, resource_provider_namespace, wait=False):
_update_provider(cmd.cli_ctx, resource_provider_namespace, registering=True, wait=wait)
def unregister_provider(cmd, resource_provider_namespace, wait=False):
_update_provider(cmd.cli_ctx, resource_provider_namespace, registering=False, wait=wait)
def list_provider_operations(cmd):
auth_client = _authorization_management_client(cmd.cli_ctx)
return auth_client.provider_operations_metadata.list()
def show_provider_operations(cmd, resource_provider_namespace):
version = getattr(get_api_version(cmd.cli_ctx, ResourceType.MGMT_AUTHORIZATION), 'provider_operations_metadata')
auth_client = _authorization_management_client(cmd.cli_ctx)
if version == '2015-07-01':
return auth_client.provider_operations_metadata.get(resource_provider_namespace, version)
return auth_client.provider_operations_metadata.get(resource_provider_namespace)
def move_resource(cmd, ids, destination_group, destination_subscription_id=None):
"""Moves resources from one resource group to another(can be under different subscription)
:param ids: the space-separated resource ids to be moved
:param destination_group: the destination resource group name
:param destination_subscription_id: the destination subscription identifier
"""
# verify all resource ids are valid and under the same group
resources = []
for i in ids:
if is_valid_resource_id(i):
resources.append(parse_resource_id(i))
else:
raise CLIError('Invalid id "{}", as it has no group or subscription field'.format(i))
if len({r['subscription'] for r in resources}) > 1:
raise CLIError('All resources should be under the same subscription')
if len({r['resource_group'] for r in resources}) > 1:
raise CLIError('All resources should be under the same group')
rcf = _resource_client_factory(cmd.cli_ctx)
target = _build_resource_id(subscription=(destination_subscription_id or rcf.config.subscription_id),
resource_group=destination_group)
return rcf.resources.move_resources(resources[0]['resource_group'], ids, target)
def list_features(client, resource_provider_namespace=None):
if resource_provider_namespace:
return client.list(resource_provider_namespace=resource_provider_namespace)
return client.list_all()
def register_feature(client, resource_provider_namespace, feature_name):
logger.warning("Once the feature '%s' is registered, invoking 'az provider register -n %s' is required "
"to get the change propagated", feature_name, resource_provider_namespace)
return client.register(resource_provider_namespace, feature_name)
def unregister_feature(client, resource_provider_namespace, feature_name):
logger.warning("Once the feature '%s' is unregistered, invoking 'az provider register -n %s' is required "
"to get the change propagated", feature_name, resource_provider_namespace)
return client.unregister(resource_provider_namespace, feature_name)
# pylint: disable=inconsistent-return-statements,too-many-locals
def create_policy_assignment(cmd, policy=None, policy_set_definition=None,
name=None, display_name=None, params=None,
resource_group_name=None, scope=None, sku=None,
not_scopes=None, location=None, assign_identity=None,
identity_scope=None, identity_role='Contributor', enforcement_mode='Default'):
"""Creates a policy assignment
:param not_scopes: Space-separated scopes where the policy assignment does not apply.
"""
if bool(policy) == bool(policy_set_definition):
raise CLIError('usage error: --policy NAME_OR_ID | '
'--policy-set-definition NAME_OR_ID')
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id,
resource_group_name, scope)
policy_id = _resolve_policy_id(cmd, policy, policy_set_definition, policy_client)
params = _load_file_string_or_uri(params, 'params', False)
PolicyAssignment = cmd.get_models('PolicyAssignment')
assignment = PolicyAssignment(display_name=display_name, policy_definition_id=policy_id, scope=scope, enforcement_mode=enforcement_mode)
assignment.parameters = params if params else None
if cmd.supported_api_version(min_api='2017-06-01-preview'):
if not_scopes:
kwargs_list = []
for id_arg in not_scopes.split(' '):
if parse_resource_id(id_arg):
kwargs_list.append(id_arg)
else:
logger.error('az policy assignment create error: argument --not-scopes: \
invalid notscopes value: \'%s\'', id_arg)
return
assignment.not_scopes = kwargs_list
PolicySku = cmd.get_models('PolicySku')
policySku = PolicySku(name='A0', tier='Free')
if sku:
policySku = policySku if sku.lower() == 'free' else PolicySku(name='A1', tier='Standard')
assignment.sku = policySku
if cmd.supported_api_version(min_api='2018-05-01'):
if location:
assignment.location = location
identity = None
if assign_identity is not None:
identity = _build_identities_info(cmd, assign_identity)
assignment.identity = identity
if name is None:
name = (base64.urlsafe_b64encode(uuid.uuid4().bytes).decode())[:-2]
createdAssignment = policy_client.policy_assignments.create(scope, name, assignment)
# Create the identity's role assignment if requested
if assign_identity is not None and identity_scope:
from azure.cli.core.commands.arm import assign_identity as _assign_identity_helper
_assign_identity_helper(cmd.cli_ctx, lambda: createdAssignment, lambda resource: createdAssignment, identity_role, identity_scope)
return createdAssignment
def _build_identities_info(cmd, identities):
identities = identities or []
ResourceIdentityType = cmd.get_models('ResourceIdentityType')
identity_type = ResourceIdentityType.none
if not identities or MSI_LOCAL_ID in identities:
identity_type = ResourceIdentityType.system_assigned
ResourceIdentity = cmd.get_models('Identity')
return ResourceIdentity(type=identity_type)
def delete_policy_assignment(cmd, name, resource_group_name=None, scope=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id,
resource_group_name, scope)
policy_client.policy_assignments.delete(scope, name)
def show_policy_assignment(cmd, name, resource_group_name=None, scope=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id,
resource_group_name, scope)
return policy_client.policy_assignments.get(scope, name)
def list_policy_assignment(cmd, disable_scope_strict_match=None, resource_group_name=None, scope=None):
from azure.cli.core.commands.client_factory import get_subscription_id
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
_scope = _build_policy_scope(get_subscription_id(cmd.cli_ctx),
resource_group_name, scope)
id_parts = parse_resource_id(_scope)
subscription = id_parts.get('subscription')
resource_group = id_parts.get('resource_group')
resource_type = id_parts.get('child_type_1') or id_parts.get('type')
resource_name = id_parts.get('child_name_1') or id_parts.get('name')
management_group = _parse_management_group_id(scope)
if management_group:
result = policy_client.policy_assignments.list_for_management_group(management_group_id=management_group, filter='atScope()')
elif all([resource_type, resource_group, subscription]):
namespace = id_parts.get('namespace')
parent_resource_path = '' if not id_parts.get('child_name_1') else (id_parts['type'] + '/' + id_parts['name'])
result = policy_client.policy_assignments.list_for_resource(
resource_group, namespace,
parent_resource_path, resource_type, resource_name)
elif resource_group:
result = policy_client.policy_assignments.list_for_resource_group(resource_group)
elif subscription:
result = policy_client.policy_assignments.list()
elif scope:
raise CLIError('usage error `--scope`: must be a fully qualified ARM ID.')
else:
raise CLIError('usage error: --scope ARM_ID | --resource-group NAME')
if not disable_scope_strict_match:
result = [i for i in result if _scope.lower().strip('/') == i.scope.lower().strip('/')]
return result
def set_identity(cmd, name, scope=None, resource_group_name=None, identity_role='Contributor', identity_scope=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id, resource_group_name, scope)
def getter():
return policy_client.policy_assignments.get(scope, name)
def setter(policyAssignment):
policyAssignment.identity = _build_identities_info(cmd, [MSI_LOCAL_ID])
return policy_client.policy_assignments.create(scope, name, policyAssignment)
from azure.cli.core.commands.arm import assign_identity as _assign_identity_helper
updatedAssignment = _assign_identity_helper(cmd.cli_ctx, getter, setter, identity_role, identity_scope)
return updatedAssignment.identity
def show_identity(cmd, name, scope=None, resource_group_name=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id, resource_group_name, scope)
return policy_client.policy_assignments.get(scope, name).identity
def remove_identity(cmd, name, scope=None, resource_group_name=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
scope = _build_policy_scope(policy_client.config.subscription_id, resource_group_name, scope)
policyAssignment = policy_client.policy_assignments.get(scope, name)
ResourceIdentityType = cmd.get_models('ResourceIdentityType')
ResourceIdentity = cmd.get_models('Identity')
policyAssignment.identity = ResourceIdentity(type=ResourceIdentityType.none)
policyAssignment = policy_client.policy_assignments.create(scope, name, policyAssignment)
return policyAssignment.identity
def enforce_mutually_exclusive(subscription, management_group):
if subscription and management_group:
raise IncorrectUsageError('cannot provide both --subscription and --management-group')
def create_policy_definition(cmd, name, rules=None, params=None, display_name=None, description=None, mode=None,
metadata=None, subscription=None, management_group=None):
rules = _load_file_string_or_uri(rules, 'rules')
params = _load_file_string_or_uri(params, 'params', False)
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
PolicyDefinition = cmd.get_models('PolicyDefinition')
parameters = PolicyDefinition(policy_rule=rules, parameters=params, description=description,
display_name=display_name)
if cmd.supported_api_version(min_api='2016-12-01'):
parameters.mode = mode
if cmd.supported_api_version(min_api='2017-06-01-preview'):
parameters.metadata = metadata
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_definitions.create_or_update_at_management_group(name, parameters, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_definitions.create_or_update(name, parameters)
def create_policy_setdefinition(cmd, name, definitions, params=None, display_name=None, description=None,
subscription=None, management_group=None, definition_groups=None, metadata=None):
definitions = _load_file_string_or_uri(definitions, 'definitions')
params = _load_file_string_or_uri(params, 'params', False)
definition_groups = _load_file_string_or_uri(definition_groups, 'definition_groups', False)
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
PolicySetDefinition = cmd.get_models('PolicySetDefinition')
parameters = PolicySetDefinition(policy_definitions=definitions, parameters=params, description=description,
display_name=display_name, policy_definition_groups=definition_groups)
if cmd.supported_api_version(min_api='2017-06-01-preview'):
parameters.metadata = metadata
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_set_definitions.create_or_update_at_management_group(name, parameters, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_set_definitions.create_or_update(name, parameters)
def get_policy_definition(cmd, policy_definition_name, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
return _get_custom_or_builtin_policy(cmd, policy_client, policy_definition_name, subscription, management_group)
def get_policy_setdefinition(cmd, policy_set_definition_name, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
return _get_custom_or_builtin_policy(cmd, policy_client, policy_set_definition_name, subscription, management_group, True)
def list_policy_definition(cmd, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_definitions.list_by_management_group(management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_definitions.list()
def list_policy_setdefinition(cmd, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_set_definitions.list_by_management_group(management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_set_definitions.list()
def delete_policy_definition(cmd, policy_definition_name, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_definitions.delete_at_management_group(policy_definition_name, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_definitions.delete(policy_definition_name)
def delete_policy_setdefinition(cmd, policy_set_definition_name, subscription=None, management_group=None):
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_set_definitions.delete_at_management_group(policy_set_definition_name, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_set_definitions.delete(policy_set_definition_name)
def update_policy_definition(cmd, policy_definition_name, rules=None, params=None,
display_name=None, description=None, metadata=None, mode=None,
subscription=None, management_group=None):
rules = _load_file_string_or_uri(rules, 'rules', False)
params = _load_file_string_or_uri(params, 'params', False)
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
definition = _get_custom_or_builtin_policy(cmd, policy_client, policy_definition_name, subscription, management_group)
# pylint: disable=line-too-long,no-member
PolicyDefinition = cmd.get_models('PolicyDefinition')
parameters = PolicyDefinition(
policy_rule=rules if rules is not None else definition.policy_rule,
parameters=params if params is not None else definition.parameters,
display_name=display_name if display_name is not None else definition.display_name,
description=description if description is not None else definition.description,
metadata=metadata if metadata is not None else definition.metadata)
if cmd.supported_api_version(min_api='2016-12-01'):
parameters.mode = mode
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_definitions.create_or_update_at_management_group(policy_definition_name, parameters, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_definitions.create_or_update(policy_definition_name, parameters)
def update_policy_setdefinition(cmd, policy_set_definition_name, definitions=None, params=None,
display_name=None, description=None,
subscription=None, management_group=None, definition_groups=None, metadata=None):
definitions = _load_file_string_or_uri(definitions, 'definitions', False)
params = _load_file_string_or_uri(params, 'params', False)
definition_groups = _load_file_string_or_uri(definition_groups, 'definition_groups', False)
policy_client = _resource_policy_client_factory(cmd.cli_ctx)
definition = _get_custom_or_builtin_policy(cmd, policy_client, policy_set_definition_name, subscription, management_group, True)
# pylint: disable=line-too-long,no-member
PolicySetDefinition = cmd.get_models('PolicySetDefinition')
parameters = PolicySetDefinition(
policy_definitions=definitions if definitions is not None else definition.policy_definitions,
description=description if description is not None else definition.description,
display_name=display_name if display_name is not None else definition.display_name,
parameters=params if params is not None else definition.parameters,
policy_definition_groups=definition_groups if definition_groups is not None else definition.policy_definition_groups,
metadata=metadata if metadata is not None else definition.metadata)
if cmd.supported_api_version(min_api='2018-03-01'):
enforce_mutually_exclusive(subscription, management_group)
if management_group:
return policy_client.policy_set_definitions.create_or_update_at_management_group(policy_set_definition_name, parameters, management_group)
if subscription:
subscription_id = _get_subscription_id_from_subscription(cmd.cli_ctx, subscription)
policy_client.config.subscription_id = subscription_id
return policy_client.policy_set_definitions.create_or_update(policy_set_definition_name, parameters)
def _register_rp(cli_ctx, subscription_id=None):
rp = "Microsoft.Management"
import time
rcf = get_mgmt_service_client(
cli_ctx,
ResourceType.MGMT_RESOURCE_RESOURCES,
subscription_id)
rcf.providers.register(rp)
while True:
time.sleep(10)
rp_info = rcf.providers.get(rp)
if rp_info.registration_state == 'Registered':
break
def _get_subscription_id_from_subscription(cli_ctx, subscription): # pylint: disable=inconsistent-return-statements
from azure.cli.core._profile import Profile
profile = Profile(cli_ctx=cli_ctx)
subscriptions_list = profile.load_cached_subscriptions()
for sub in subscriptions_list:
if subscription in (sub['id'], sub['name']):
return sub['id']
raise CLIError("Subscription not found in the current context.")
def _get_parent_id_from_parent(parent):
if parent is None or _is_management_group_scope(parent):
return parent
return "/providers/Microsoft.Management/managementGroups/" + parent
def _is_management_group_scope(scope):
return scope is not None and scope.lower().startswith("/providers/microsoft.management/managementgroups")
def cli_managementgroups_group_list(cmd, client):
_register_rp(cmd.cli_ctx)
return client.list()
def cli_managementgroups_group_show(
cmd,
client,
group_name,
expand=False,
recurse=False):
_register_rp(cmd.cli_ctx)
if expand:
return client.get(group_name, "children", recurse)
return client.get(group_name)
def cli_managementgroups_group_create(
cmd,
client,
group_name,
display_name=None,
parent=None):
_register_rp(cmd.cli_ctx)
parent_id = _get_parent_id_from_parent(parent)
from azure.mgmt.managementgroups.models import (
CreateManagementGroupRequest, CreateManagementGroupDetails, CreateParentGroupInfo)
create_parent_grp_info = CreateParentGroupInfo(id=parent_id)
create_mgmt_grp_details = CreateManagementGroupDetails(parent=create_parent_grp_info)
create_mgmt_grp_request = CreateManagementGroupRequest(
name=group_name,
display_name=display_name,
details=create_mgmt_grp_details)
return client.create_or_update(group_name, create_mgmt_grp_request)
def cli_managementgroups_group_update_custom_func(
instance,
display_name=None,
parent_id=None):
parent_id = _get_parent_id_from_parent(parent_id)
instance.display_name = display_name
instance.parent_id = parent_id
return instance
def cli_managementgroups_group_update_get():
from azure.mgmt.managementgroups.models import PatchManagementGroupRequest
update_parameters = PatchManagementGroupRequest(display_name=None, parent_id=None)
return update_parameters
def cli_managementgroups_group_update_set(
cmd, client, group_name, parameters=None):
return client.update(group_name, parameters)
def cli_managementgroups_group_delete(cmd, client, group_name):
_register_rp(cmd.cli_ctx)
return client.delete(group_name)
def cli_managementgroups_subscription_add(
cmd, client, group_name, subscription):
subscription_id = _get_subscription_id_from_subscription(
cmd.cli_ctx, subscription)
return client.create(group_name, subscription_id)
def cli_managementgroups_subscription_remove(
cmd, client, group_name, subscription):
subscription_id = _get_subscription_id_from_subscription(
cmd.cli_ctx, subscription)
return client.delete(group_name, subscription_id)
# region Locks
def _validate_lock_params_match_lock(
lock_client, name, resource_group, resource_provider_namespace, parent_resource_path,
resource_type, resource_name):
"""
Locks are scoped to subscription, resource group or resource.
However, the az list command returns all locks for the current scopes
and all lower scopes (e.g. resource group level also includes resource locks).
This can lead to a confusing user experience where the user specifies a lock
name and assumes that it will work, even if they haven't given the right
scope. This function attempts to validate the parameters and help the
user find the right scope, by first finding the lock, and then infering
what it's parameters should be.
"""
locks = lock_client.management_locks.list_at_subscription_level()
found_count = 0 # locks at different levels can have the same name
lock_resource_id = None
for lock in locks:
if lock.name == name:
found_count = found_count + 1
lock_resource_id = lock.id
if found_count == 1:
# If we only found one lock, let's validate that the parameters are correct,
# if we found more than one, we'll assume the user knows what they're doing
# TODO: Add validation for that case too?
resource = parse_resource_id(lock_resource_id)
_resource_group = resource.get('resource_group', None)
_resource_namespace = resource.get('namespace', None)
if _resource_group is None:
return
if resource_group != _resource_group:
raise CLIError(
'Unexpected --resource-group for lock {}, expected {}'.format(
name, _resource_group))
if _resource_namespace is None or _resource_namespace == 'Microsoft.Authorization':
return
if resource_provider_namespace != _resource_namespace:
raise CLIError(
'Unexpected --namespace for lock {}, expected {}'.format(name, _resource_namespace))
if resource.get('child_type_2', None) is None:
_resource_type = resource.get('type', None)
_resource_name = resource.get('name', None)
else:
if resource.get('child_type_3', None) is None:
_resource_type = resource.get('child_type_1', None)
_resource_name = resource.get('child_name_1', None)
parent = (resource['type'] + '/' + resource['name'])
else:
_resource_type = resource.get('child_type_2', None)
_resource_name = resource.get('child_name_2', None)
parent = (resource['type'] + '/' + resource['name'] + '/' +
resource['child_type_1'] + '/' + resource['child_name_1'])
if parent != parent_resource_path:
raise CLIError(
'Unexpected --parent for lock {}, expected {}'.format(
name, parent))
if resource_type != _resource_type:
raise CLIError('Unexpected --resource-type for lock {}, expected {}'.format(
name, _resource_type))
if resource_name != _resource_name:
raise CLIError('Unexpected --resource-name for lock {}, expected {}'.format(
name, _resource_name))
def list_locks(cmd, resource_group=None,
resource_provider_namespace=None, parent_resource_path=None, resource_type=None,
resource_name=None, filter_string=None):
"""
:param resource_provider_namespace: Name of a resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: Path to a parent resource
:type parent_resource_path: str
:param resource_type: The type for the resource with the lock.
:type resource_type: str
:param resource_name: Name of a resource that has a lock.
:type resource_name: str
:param filter_string: A query filter to use to restrict the results.
:type filter_string: str
"""
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
if resource_group is None:
return lock_client.management_locks.list_at_subscription_level(filter=filter_string)
if resource_name is None:
return lock_client.management_locks.list_at_resource_group_level(
resource_group, filter=filter_string)
return lock_client.management_locks.list_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, filter=filter_string)
# pylint: disable=inconsistent-return-statements
def get_lock(cmd, lock_name=None, resource_group=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None, ids=None):
"""
:param name: The name of the lock.
:type name: str
"""
if ids:
kwargs_list = []
for id_arg in ids:
try:
kwargs_list.append(_parse_lock_id(id_arg))
except AttributeError:
logger.error('az lock show: error: argument --ids: invalid ResourceId value: \'%s\'', id_arg)
return
results = [get_lock(cmd, **kwargs) for kwargs in kwargs_list]
return results[0] if len(results) == 1 else results
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
_validate_lock_params_match_lock(lock_client, lock_name, resource_group,
resource_provider_namespace, parent_resource_path,
resource_type, resource_name)
if resource_group is None:
return _call_subscription_get(cmd, lock_client, lock_name)
if resource_name is None:
return lock_client.management_locks.get_at_resource_group_level(resource_group, lock_name)
if cmd.supported_api_version(max_api='2015-01-01'):
lock_list = list_locks(resource_group, resource_provider_namespace, parent_resource_path,
resource_type, resource_name)
return next((lock for lock in lock_list if lock.name == lock_name), None)
return lock_client.management_locks.get_at_resource_level(
resource_group, resource_provider_namespace,
parent_resource_path or '', resource_type, resource_name, lock_name)
# pylint: disable=inconsistent-return-statements
def delete_lock(cmd, lock_name=None, resource_group=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None, ids=None):
"""
:param name: The name of the lock.
:type name: str
:param resource_provider_namespace: Name of a resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: Path to a parent resource
:type parent_resource_path: str
:param resource_type: The type for the resource with the lock.
:type resource_type: str
:param resource_name: Name of a resource that has a lock.
:type resource_name: str
"""
if ids:
kwargs_list = []
for id_arg in ids:
try:
kwargs_list.append(_parse_lock_id(id_arg))
except AttributeError:
logger.error('az lock delete: error: argument --ids: invalid ResourceId value: \'%s\'', id_arg)
return
results = [delete_lock(cmd, **kwargs) for kwargs in kwargs_list]
return results[0] if len(results) == 1 else results
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
_validate_lock_params_match_lock(lock_client, lock_name, resource_group,
resource_provider_namespace, parent_resource_path,
resource_type, resource_name)
if resource_group is None:
return lock_client.management_locks.delete_at_subscription_level(lock_name)
if resource_name is None:
return lock_client.management_locks.delete_at_resource_group_level(
resource_group, lock_name)
return lock_client.management_locks.delete_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, lock_name)
def create_lock(cmd, lock_name, level,
resource_group=None, resource_provider_namespace=None, notes=None,
parent_resource_path=None, resource_type=None, resource_name=None):
"""
:param name: The name of the lock.
:type name: str
:param resource_provider_namespace: Name of a resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: Path to a parent resource
:type parent_resource_path: str
:param resource_type: The type for the resource with the lock.
:type resource_type: str
:param resource_name: Name of a resource that has a lock.
:type resource_name: str
:param notes: Notes about this lock.
:type notes: str
"""
ManagementLockObject = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_LOCKS, 'ManagementLockObject', mod='models')
parameters = ManagementLockObject(level=level, notes=notes, name=lock_name)
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
if resource_group is None:
return lock_client.management_locks.create_or_update_at_subscription_level(lock_name, parameters)
if resource_name is None:
return lock_client.management_locks.create_or_update_at_resource_group_level(
resource_group, lock_name, parameters)
return lock_client.management_locks.create_or_update_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, lock_name, parameters)
# pylint: disable=inconsistent-return-statements
def update_lock(cmd, lock_name=None, resource_group=None, resource_provider_namespace=None, notes=None,
parent_resource_path=None, resource_type=None, resource_name=None, level=None, ids=None):
"""
Allows updates to the lock-type(level) and the notes of the lock
"""
if ids:
kwargs_list = []
for id_arg in ids:
try:
kwargs_list.append(_parse_lock_id(id_arg))
except AttributeError:
logger.error('az lock update: error: argument --ids: invalid ResourceId value: \'%s\'', id_arg)
return
results = [update_lock(cmd, level=level, notes=notes, **kwargs) for kwargs in kwargs_list]
return results[0] if len(results) == 1 else results
lock_client = _resource_lock_client_factory(cmd.cli_ctx)
lock_resource = _extract_lock_params(resource_group, resource_provider_namespace,
resource_type, resource_name)
resource_group = lock_resource[0]
resource_name = lock_resource[1]
resource_provider_namespace = lock_resource[2]
resource_type = lock_resource[3]
_validate_lock_params_match_lock(lock_client, lock_name, resource_group, resource_provider_namespace,
parent_resource_path, resource_type, resource_name)
if resource_group is None:
params = _call_subscription_get(cmd, lock_client, lock_name)
_update_lock_parameters(params, level, notes)
return lock_client.management_locks.create_or_update_at_subscription_level(lock_name, params)
if resource_name is None:
params = lock_client.management_locks.get_at_resource_group_level(resource_group, lock_name)
_update_lock_parameters(params, level, notes)
return lock_client.management_locks.create_or_update_at_resource_group_level(
resource_group, lock_name, params)
if cmd.supported_api_version(max_api='2015-01-01'):
lock_list = list_locks(resource_group, resource_provider_namespace, parent_resource_path,
resource_type, resource_name)
return next((lock for lock in lock_list if lock.name == lock_name), None)
params = lock_client.management_locks.get_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, lock_name)
_update_lock_parameters(params, level, notes)
return lock_client.management_locks.create_or_update_at_resource_level(
resource_group, resource_provider_namespace, parent_resource_path or '', resource_type,
resource_name, lock_name, params)
# endregion
# region ResourceLinks
def create_resource_link(cmd, link_id, target_id, notes=None):
links_client = _resource_links_client_factory(cmd.cli_ctx).resource_links
ResourceLinkProperties = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_LINKS,
'ResourceLinkProperties', mod='models')
properties = ResourceLinkProperties(target_id=target_id, notes=notes)
links_client.create_or_update(link_id, properties)
def update_resource_link(cmd, link_id, target_id=None, notes=None):
links_client = _resource_links_client_factory(cmd.cli_ctx).resource_links
params = links_client.get(link_id)
ResourceLinkProperties = get_sdk(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_LINKS,
'ResourceLinkProperties', mod='models')
properties = ResourceLinkProperties(
target_id=target_id if target_id is not None else params.properties.target_id,
# pylint: disable=no-member
notes=notes if notes is not None else params.properties.notes) # pylint: disable=no-member
links_client.create_or_update(link_id, properties)
def list_resource_links(cmd, scope=None, filter_string=None):
links_client = _resource_links_client_factory(cmd.cli_ctx).resource_links
if scope is not None:
return links_client.list_at_source_scope(scope, filter=filter_string)
return links_client.list_at_subscription(filter=filter_string)
# endregion
# region tags
def get_tag_at_scope(cmd, resource_id=None):
rcf = _resource_client_factory(cmd.cli_ctx)
if resource_id is not None:
return rcf.tags.get_at_scope(scope=resource_id)
return rcf.tags.list()
def create_or_update_tag_at_scope(cmd, resource_id=None, tags=None, tag_name=None):
rcf = _resource_client_factory(cmd.cli_ctx)
if resource_id is not None:
if not tags:
raise IncorrectUsageError("Tags could not be empty.")
Tags = cmd.get_models('Tags')
tag_obj = Tags(tags=tags)
return rcf.tags.create_or_update_at_scope(scope=resource_id, properties=tag_obj)
return rcf.tags.create_or_update(tag_name=tag_name)
def delete_tag_at_scope(cmd, resource_id=None, tag_name=None):
rcf = _resource_client_factory(cmd.cli_ctx)
if resource_id is not None:
return rcf.tags.delete_at_scope(scope=resource_id)
return rcf.tags.delete(tag_name=tag_name)
def update_tag_at_scope(cmd, resource_id, tags, operation):
rcf = _resource_client_factory(cmd.cli_ctx)
if not tags:
raise IncorrectUsageError("Tags could not be empty.")
Tags = cmd.get_models('Tags')
tag_obj = Tags(tags=tags)
return rcf.tags.update_at_scope(scope=resource_id, properties=tag_obj, operation=operation)
# endregion
class _ResourceUtils: # pylint: disable=too-many-instance-attributes
def __init__(self, cli_ctx,
resource_group_name=None, resource_provider_namespace=None,
parent_resource_path=None, resource_type=None, resource_name=None,
resource_id=None, api_version=None, rcf=None, latest_include_preview=False):
# if the resouce_type is in format 'namespace/type' split it.
# (we don't have to do this, but commands like 'vm show' returns such values)
if resource_type and not resource_provider_namespace and not parent_resource_path:
parts = resource_type.split('/')
if len(parts) > 1:
resource_provider_namespace = parts[0]
resource_type = parts[1]
self.rcf = rcf or _resource_client_factory(cli_ctx)
if api_version is None:
if resource_id:
api_version = _ResourceUtils._resolve_api_version_by_id(self.rcf, resource_id,
latest_include_preview=latest_include_preview)
else:
_validate_resource_inputs(resource_group_name, resource_provider_namespace,
resource_type, resource_name)
api_version = _ResourceUtils.resolve_api_version(self.rcf,
resource_provider_namespace,
parent_resource_path,
resource_type,
latest_include_preview=latest_include_preview)
self.resource_group_name = resource_group_name
self.resource_provider_namespace = resource_provider_namespace
self.parent_resource_path = parent_resource_path if parent_resource_path else ''
self.resource_type = resource_type
self.resource_name = resource_name
self.resource_id = resource_id
self.api_version = api_version
def create_resource(self, properties, location, is_full_object):
try:
res = json.loads(properties)
except json.decoder.JSONDecodeError as ex:
raise CLIError('Error parsing JSON.\n{}\n{}'.format(properties, ex))
if not is_full_object:
if not location:
if self.resource_id:
rg_name = parse_resource_id(self.resource_id)['resource_group']
else:
rg_name = self.resource_group_name
location = self.rcf.resource_groups.get(rg_name).location
res = GenericResource(location=location, properties=res)
elif res.get('location', None) is None:
raise IncorrectUsageError("location of the resource is required")
if self.resource_id:
resource = self.rcf.resources.create_or_update_by_id(self.resource_id,
self.api_version,
res)
else:
resource = self.rcf.resources.create_or_update(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
res)
return resource
def get_resource(self, include_response_body=False):
if self.resource_id:
resource = self.rcf.resources.get_by_id(self.resource_id, self.api_version, raw=include_response_body)
else:
resource = self.rcf.resources.get(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
raw=include_response_body)
if include_response_body:
temp = resource.output
setattr(temp, 'response_body', json.loads(resource.response.content.decode()))
resource = temp
return resource
def delete(self):
if self.resource_id:
return self.rcf.resources.delete_by_id(self.resource_id, self.api_version)
return self.rcf.resources.delete(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version)
def update(self, parameters):
if self.resource_id:
return self.rcf.resources.create_or_update_by_id(self.resource_id,
self.api_version,
parameters)
return self.rcf.resources.create_or_update(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
parameters)
def tag(self, tags, is_incremental=False):
resource = self.get_resource()
if is_incremental is True:
if not tags:
raise CLIError("When modifying tag incrementally, the parameters of tag must have specific values.")
if resource.tags:
resource.tags.update(tags)
tags = resource.tags
# please add the service type that needs to be requested with PATCH type here
# for example: the properties of RecoveryServices/vaults must be filled, and a PUT request that passes back
# to properties will fail due to the lack of properties, so the PATCH type should be used
need_patch_service = ['Microsoft.RecoveryServices/vaults', 'Microsoft.Resources/resourceGroups',
'Microsoft.ContainerRegistry/registries/webhooks',
'Microsoft.ContainerInstance/containerGroups']
if resource is not None and resource.type in need_patch_service:
parameters = GenericResource(tags=tags)
if self.resource_id:
return self.rcf.resources.update_by_id(self.resource_id, self.api_version, parameters)
return self.rcf.resources.update(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
parameters)
# pylint: disable=no-member
parameters = GenericResource(
location=resource.location,
tags=tags,
plan=resource.plan,
properties=resource.properties,
kind=resource.kind,
managed_by=resource.managed_by,
sku=resource.sku,
identity=resource.identity)
if self.resource_id:
return self.rcf.resources.create_or_update_by_id(self.resource_id, self.api_version,
parameters)
return self.rcf.resources.create_or_update(self.resource_group_name,
self.resource_provider_namespace,
self.parent_resource_path,
self.resource_type,
self.resource_name,
self.api_version,
parameters)
def invoke_action(self, action, request_body):
"""
Formats Url if none provided and sends the POST request with the url and request-body.
"""
from msrestazure.azure_operation import AzureOperationPoller
query_parameters = {}
serialize = self.rcf.resources._serialize # pylint: disable=protected-access
client = self.rcf.resources._client # pylint: disable=protected-access
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/' \
'{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/{action}'
if self.resource_id:
url = client.format_url(
'{resource_id}/{action}',
resource_id=self.resource_id,
action=serialize.url("action", action, 'str'))
else:
url = client.format_url(
url,
resourceGroupName=serialize.url(
"resource_group_name", self.resource_group_name, 'str',
max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
resourceProviderNamespace=serialize.url(
"resource_provider_namespace", self.resource_provider_namespace, 'str'),
parentResourcePath=serialize.url(
"parent_resource_path", self.parent_resource_path, 'str', skip_quote=True),
resourceType=serialize.url("resource_type", self.resource_type, 'str', skip_quote=True),
resourceName=serialize.url("resource_name", self.resource_name, 'str'),
subscriptionId=serialize.url(
"self.config.subscription_id", self.rcf.resources.config.subscription_id, 'str'),
action=serialize.url("action", action, 'str'))
# Construct parameters
query_parameters['api-version'] = serialize.query("api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.rcf.resources.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid4())
if self.rcf.resources.config.accept_language is not None:
header_parameters['accept-language'] = serialize.header(
"self.config.accept_language", self.rcf.resources.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = client.post(url, query_parameters)
return client.send(
request, header_parameters, json.loads(request_body) if request_body else None)
def get_long_running_status(status_link, headers=None):
request = client.get(status_link)
if headers:
request.headers.update(headers)
return client.send(request, header_parameters)
def get_long_running_output(response):
from msrestazure.azure_exceptions import CloudError
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response.text
return AzureOperationPoller(long_running_send, get_long_running_output, get_long_running_status,
self.rcf.resources.config.long_running_operation_timeout)
@staticmethod
def resolve_api_version(rcf, resource_provider_namespace, parent_resource_path, resource_type,
latest_include_preview=False):
provider = rcf.providers.get(resource_provider_namespace)
# If available, we will use parent resource's api-version
resource_type_str = (parent_resource_path.split('/')[0] if parent_resource_path else resource_type)
rt = [t for t in provider.resource_types
if t.resource_type.lower() == resource_type_str.lower()]
if not rt:
raise IncorrectUsageError('Resource type {} not found.'.format(resource_type_str))
if len(rt) == 1 and rt[0].api_versions:
# If latest_include_preview is true,
# the last api-version will be taken regardless of whether it is preview version or not
if latest_include_preview:
return rt[0].api_versions[0]
# Take the latest stable version first.
# if there is no stable version, the latest preview version will be taken.
npv = [v for v in rt[0].api_versions if 'preview' not in v.lower()]
return npv[0] if npv else rt[0].api_versions[0]
raise IncorrectUsageError(
'API version is required and could not be resolved for resource {}'
.format(resource_type))
@staticmethod
def _resolve_api_version_by_id(rcf, resource_id, latest_include_preview=False):
parts = parse_resource_id(resource_id)
if len(parts) == 2 and parts['subscription'] is not None and parts['resource_group'] is not None:
return AZURE_API_PROFILES['latest'][ResourceType.MGMT_RESOURCE_RESOURCES]
if 'namespace' not in parts:
raise CLIError('The type of value entered by --ids parameter is not supported.')
namespace = parts.get('child_namespace_1', parts['namespace'])
if parts.get('child_type_2'):
parent = (parts['type'] + '/' + parts['name'] + '/' +
parts['child_type_1'] + '/' + parts['child_name_1'])
resource_type = parts['child_type_2']
elif parts.get('child_type_1'):
# if the child resource has a provider namespace it is independent of the
# parent, so set the parent to empty
if parts.get('child_namespace_1') is not None:
parent = ''
else:
parent = parts['type'] + '/' + parts['name']
resource_type = parts['child_type_1']
else:
parent = None
resource_type = parts['type']
return _ResourceUtils.resolve_api_version(rcf, namespace, parent, resource_type,
latest_include_preview=latest_include_preview)
| return validation_result |
service_test.go | package discovery
import (
"sync"
"testing"
log "github.com/pion/ion-log"
"github.com/stretchr/testify/assert"
)
const (
etcdAddr = "http://127.0.0.1:2379"
)
func init() |
func TestWatch(t *testing.T) {
var wg sync.WaitGroup
s, err := NewService("sfu", "dc1", []string{etcdAddr})
assert.NoError(t, err)
s.Watch("sfu", func(state State, id string, node *Node) {
if state == NodeUp {
assert.Equal(t, s.node, *node)
wg.Done()
} else if state == NodeDown {
assert.Equal(t, s.node.ID(), id)
wg.Done()
}
})
wg.Add(1)
s.KeepAlive()
wg.Wait()
wg.Add(1)
s.Close()
wg.Wait()
}
func TestGetNodes(t *testing.T) {
var wg sync.WaitGroup
islb, err := NewService("islb", "dc1", []string{etcdAddr})
assert.NoError(t, err)
biz, err := NewService("biz", "dc1", []string{etcdAddr})
assert.NoError(t, err)
islb.Watch("", func(state State, id string, node *Node) {
if state == NodeUp {
wg.Done()
} else if state == NodeDown {
wg.Done()
}
})
wg.Add(2)
biz.KeepAlive()
islb.KeepAlive()
wg.Wait()
nodes := make(map[string]Node)
err = islb.GetNodes("", nodes)
assert.NoError(t, err)
assert.Equal(t, 2, len(nodes))
assert.Equal(t, biz.node, nodes[biz.node.ID()])
assert.Equal(t, islb.node, nodes[islb.node.ID()])
wg.Add(2)
biz.Close()
islb.Close()
wg.Wait()
}
| {
log.Init("info", []string{"asm_amd64.s", "proc.go"}, []string{})
} |
script.js | const flashdata = $('.flash-data').data('flashdata');
if(flashdata){
Swal.fire({
| type: 'success'
});
}
$('.tombol-hapus').on('click', function(e){
// untuk mematikan aksi default browser
e.preventDefault();
const href = $(this).attr('href');
Swal.fire({
title: 'Apakah anda yakin',
text: "Data akan dihapus",
type: 'warning',
showCancelButton: true,
confirmButtonColor: '#3085d6',
cancelButtonColor: '#d33',
confirmBttonText: 'Hapus Data!'
}).then((result) => {
if(result.value) {
document.location.href = href;
}
})
});
// $('#btn_personal').on('click', function(e){
// e.preventDefault();
// const flashdata = $('.flash-data').data('flashdata');
// Swal.fire({
// title: 'Are you sure?',
// text: "This Is Valid Data",
// type: 'warning',
// showCancelButton: true,
// confirmButtonColor: '#3085d6',
// cancelButtonColor: '#d33',
// confirmButtonText: 'Yes, Valid'
// }).then((result) => {
// if (result.value) {
// Swal.fire({
// title: 'Data',
// text: flashdata,
// type:'success'
// });
// }
// }); | title: 'Data',
text: flashdata,
|
mod.rs | // Copyright 2018-2020 Parity Technologies (UK) Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! A small storage vector that allows to store a limited amount of elements.
//!
//! Prefer using [`SmallVec`] over [`crate::Vec`] if you know up front
//! the maximum amount of unique elements that have to be stored in the vector
//! at the same time, given the number is fairly low: e.g. not exceeding several
//! hundreds of elements.
mod impls;
mod iter;
mod storage;
#[cfg(test)]
mod tests;
pub use self::iter::{
Iter,
IterMut,
};
use crate::{
lazy::{
Lazy,
LazyArray,
LazyArrayLength,
},
traits::PackedLayout,
};
/// The used index type.
type Index = u32;
/// A contiguous growable array type.
///
/// # Note
///
/// - The `storage::SmallVec` has a very similar API compared to a `storage::Vec`.
/// The major difference between both data structures is that the `SmallVec`
/// can only contain up to a fixed amount of elements given by `N` whereas the
/// `Vec` can contain up to 2^32 elements which is the maximum for 32-bit Wasm
/// targets.
/// - The performance characteristics may be different from Rust's
/// `Vec` due to the internal differences.
/// - Allows to store up to N elements.
#[derive(Debug)]
pub struct SmallVec<T, N>
where
T: PackedLayout,
N: LazyArrayLength<T>,
{
/// The current length of the small vector.
len: Lazy<u32>,
/// The entries of the small vector.
elems: LazyArray<T, N>,
}
impl<T, N> Default for SmallVec<T, N>
where
T: PackedLayout,
N: LazyArrayLength<T>,
{
fn default() -> Self {
Self::new()
}
}
impl<T, N> SmallVec<T, N>
where
T: PackedLayout,
N: LazyArrayLength<T>,
{
/// Clears the underlying storage cells of the storage vector.
///
/// # Note
///
/// This completely invalidates the storage vector's invariances about
/// the contents of its associated storage region.
///
/// This API is used for the `Drop` implementation of [`Vec`] as well as
/// for the [`SpreadLayout::clear_spread`] trait implementation.
fn clear_cells(&self) {
if self.elems.key().is_none() {
// We won't clear any storage if we are in lazy state since there
// probably has not been any state written to storage, yet.
return
}
for index in 0..self.len() {
self.elems.clear_packed_at(index);
}
}
}
impl<T, N> SmallVec<T, N>
where
T: PackedLayout,
N: LazyArrayLength<T>,
{
/// Creates a new empty vector.
pub fn new() -> Self {
Self {
len: Lazy::new(0),
elems: Default::default(),
}
}
/// Returns the capacity of the small vector.
#[inline]
pub fn capacity(&self) -> u32 {
self.elems.capacity()
}
/// Returns the number of elements in the vector, also referred to as its 'length'.
#[inline]
pub fn len(&self) -> u32 {
*self.len
}
/// Returns `true` if the vector contains no elements.
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
}
impl<T, N> SmallVec<T, N>
where
T: PackedLayout,
N: LazyArrayLength<T>,
{
/// Returns an iterator yielding shared references to all elements.
///
/// # Note
///
/// - Avoid unbounded iteration over big storage vectors.
/// - Prefer using methods like `Iterator::take` in order to limit the number
/// of yielded elements.
pub fn iter(&self) -> Iter<T, N> {
Iter::new(self)
}
/// Returns an iterator yielding exclusive references to all elements.
///
/// # Note
///
/// - Avoid unbounded iteration over big storage vectors.
/// - Prefer using methods like `Iterator::take` in order to limit the number
/// of yielded elements.
pub fn iter_mut(&mut self) -> IterMut<T, N> {
IterMut::new(self)
}
/// Returns the index if it is witihn bounds or `None` otherwise.
fn within_bounds(&self, index: Index) -> Option<Index> {
if index < self.len() {
return Some(index)
}
None
}
/// Returns a shared reference to the first element if any.
pub fn first(&self) -> Option<&T> {
if self.is_empty() {
return None
}
self.get(0)
}
/// Returns a shared reference to the last element if any.
pub fn last(&self) -> Option<&T> {
if self.is_empty() {
return None
}
let last_index = self.len() - 1;
self.get(last_index)
}
/// Returns a shared reference to the indexed element.
///
/// Returns `None` if `index` is out of bounds.
pub fn get(&self, index: u32) -> Option<&T> {
self.within_bounds(index)
.and_then(|index| self.elems.get(index))
}
}
impl<T, N> SmallVec<T, N>
where
T: PackedLayout,
N: LazyArrayLength<T>,
{
/// Appends an element to the back of the vector.
pub fn push(&mut self, value: T) {
assert!(
self.len() < self.capacity(),
"cannot push more elements into the vector"
);
let last_index = self.len();
*self.len += 1;
self.elems.put(last_index, Some(value));
}
}
impl<T, N> SmallVec<T, N>
where
T: PackedLayout,
N: LazyArrayLength<T>,
{
/// Pops the last element from the vector and returns it.
//
/// Returns `None` if the vector is empty.
pub fn pop(&mut self) -> Option<T> {
if self.is_empty() {
return None
}
let last_index = self.len() - 1;
*self.len = last_index;
self.elems.put_get(last_index, None)
}
/// Pops the last element from the vector and immediately drops it.
///
/// Returns `Some(())` if an element has been removed and `None` otherwise.
///
/// # Note
///
/// This operation is a bit more efficient than [`SmallVec::pop`]
/// since it avoids reading from contract storage in some use cases.
pub fn pop_drop(&mut self) -> Option<()> {
if self.is_empty() {
return None
}
let last_index = self.len() - 1;
*self.len = last_index;
self.elems.put(last_index, None);
Some(())
}
/// Returns an exclusive reference to the first element if any.
pub fn first_mut(&mut self) -> Option<&mut T> {
if self.is_empty() {
return None
}
self.get_mut(0)
}
/// Returns an exclusive reference to the last element if any.
pub fn last_mut(&mut self) -> Option<&mut T> {
if self.is_empty() {
return None
}
let last_index = self.len() - 1;
self.get_mut(last_index)
}
/// Returns an exclusive reference to the indexed element.
///
/// Returns `None` if `index` is out of bounds.
pub fn get_mut(&mut self, index: u32) -> Option<&mut T> {
self.within_bounds(index)
.and_then(move |index| self.elems.get_mut(index))
}
/// Swaps the elements at the given indices.
///
/// # Panics
///
/// If one or both indices are out of bounds.
pub fn swap(&mut self, a: u32, b: u32) {
assert!(
a < self.len() && b < self.len(),
"indices are out of bounds"
);
self.elems.swap(a, b)
}
/// Removes the indexed element from the vector and returns it.
///
/// The last element of the vector is put into the indexed slot.
/// Returns `None` and does not mutate the vector if the index is out of bounds.
///
/// # Note
///
/// This operation does not preserve ordering but is constant time.
pub fn swap_remove(&mut self, n: u32) -> Option<T> {
if self.is_empty() |
self.elems.swap(n, self.len() - 1);
self.pop()
}
/// Removes the indexed element from the vector.
///
/// The last element of the vector is put into the indexed slot.
/// Returns `Some(())` if an element has been removed and `None` otherwise.
///
/// # Note
///
/// This operation should be preferred over [`Vec::swap_remove`] if there is
/// no need to return the removed element since it avoids a contract storage
/// read for some use cases.
pub fn swap_remove_drop(&mut self, n: u32) -> Option<()> {
if self.is_empty() {
return None
}
self.elems.put(n, None);
let last_index = self.len() - 1;
let last = self.elems.put_get(last_index, None);
self.elems.put(n, last);
*self.len = last_index;
Some(())
}
}
| {
return None
} |
unmount.go | package images
import (
"fmt"
"github.com/containers/podman/v3/cmd/podman/common"
"github.com/containers/podman/v3/cmd/podman/registry"
"github.com/containers/podman/v3/cmd/podman/utils"
"github.com/containers/podman/v3/pkg/domain/entities"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
)
var (
description = `Image storage increments a mount counter each time an image is mounted.
When an image is unmounted, the mount counter is decremented. The image's root filesystem is physically unmounted only when the mount counter reaches zero indicating no other processes are using the mount.
An unmount can be forced with the --force flag.
`
unmountCommand = &cobra.Command{
Annotations: map[string]string{registry.EngineMode: registry.ABIMode},
Use: "unmount [options] IMAGE [IMAGE...]",
Aliases: []string{"umount"},
Short: "Unmount an image's root filesystem",
Long: description, | Example: `podman unmount imgID
podman unmount imgID1 imgID2 imgID3
podman unmount --all`,
}
)
var (
unmountOpts entities.ImageUnmountOptions
)
func unmountFlags(flags *pflag.FlagSet) {
flags.BoolVarP(&unmountOpts.All, "all", "a", false, "Unmount all of the currently mounted images")
flags.BoolVarP(&unmountOpts.Force, "force", "f", false, "Force the complete unmount of the specified mounted images")
}
func init() {
registry.Commands = append(registry.Commands, registry.CliCommand{
Parent: imageCmd,
Command: unmountCommand,
})
unmountFlags(unmountCommand.Flags())
}
func unmount(cmd *cobra.Command, args []string) error {
var errs utils.OutputErrors
if len(args) < 1 && !unmountOpts.All {
return errors.New("image name or ID must be specified")
}
if len(args) > 0 && unmountOpts.All {
return errors.New("when using the --all switch, you may not pass any image names or IDs")
}
reports, err := registry.ImageEngine().Unmount(registry.GetContext(), args, unmountOpts)
if err != nil {
return err
}
for _, r := range reports {
if r.Err == nil {
fmt.Println(r.Id)
} else {
errs = append(errs, r.Err)
}
}
return errs.PrintErrors()
} | RunE: unmount,
ValidArgsFunction: common.AutocompleteImages, |
zz_generated.swagger_doc_generated.go | package v1
// This file contains a collection of methods that can be used from go-restful to
// generate Swagger API documentation for its models. Please read this PR for more
// information on the implementation: https://github.com/emicklei/go-restful/pull/215
//
// TODOs are ignored from the parser (e.g. TODO(andronat):... || TODO:...) if and only if
// they are on one line! For multiple line or blocks that you want to ignore use ---.
// Any context after a --- is ignored.
//
// Those methods can be generated by using hack/update-swagger-docs.sh
// AUTO-GENERATED FUNCTIONS START HERE
var map_ClusterNetwork = map[string]string{
"": "ClusterNetwork describes the cluster network. There is normally only one object of this type, named \"default\", which is created by the SDN network plugin based on the master configuration when the cluster is brought up for the first time.",
"metadata": "Standard object's metadata.",
"network": "Network is a CIDR string specifying the global overlay network's L3 space",
"hostsubnetlength": "HostSubnetLength is the number of bits of network to allocate to each node. eg, 8 would mean that each node would have a /24 slice of the overlay network for its pods",
"serviceNetwork": "ServiceNetwork is the CIDR range that Service IP addresses are allocated from",
"pluginName": "PluginName is the name of the network plugin being used",
"clusterNetworks": "ClusterNetworks is a list of ClusterNetwork objects that defines the global overlay network's L3 space by specifying a set of CIDR and netmasks that the SDN can allocate addresses from.",
"vxlanPort": "VXLANPort sets the VXLAN destination port used by the cluster. It is set by the master configuration file on startup and cannot be edited manually. Valid values for VXLANPort are integers 1-65535 inclusive and if unset defaults to 4789. Changing VXLANPort allows users to resolve issues between openshift SDN and other software trying to use the same VXLAN destination port.",
"mtu": "MTU is the MTU for the overlay network. This should be 50 less than the MTU of the network connecting the nodes. It is normally autodetected by the cluster network operator.",
} |
func (ClusterNetwork) SwaggerDoc() map[string]string {
return map_ClusterNetwork
}
var map_ClusterNetworkEntry = map[string]string{
"": "ClusterNetworkEntry defines an individual cluster network. The CIDRs cannot overlap with other cluster network CIDRs, CIDRs reserved for external ips, CIDRs reserved for service networks, and CIDRs reserved for ingress ips.",
"CIDR": "CIDR defines the total range of a cluster networks address space.",
"hostSubnetLength": "HostSubnetLength is the number of bits of the accompanying CIDR address to allocate to each node. eg, 8 would mean that each node would have a /24 slice of the overlay network for its pods.",
}
func (ClusterNetworkEntry) SwaggerDoc() map[string]string {
return map_ClusterNetworkEntry
}
var map_ClusterNetworkList = map[string]string{
"": "ClusterNetworkList is a collection of ClusterNetworks",
"metadata": "Standard object's metadata.",
"items": "Items is the list of cluster networks",
}
func (ClusterNetworkList) SwaggerDoc() map[string]string {
return map_ClusterNetworkList
}
var map_EgressNetworkPolicy = map[string]string{
"": "EgressNetworkPolicy describes the current egress network policy for a Namespace. When using the 'redhat/openshift-ovs-multitenant' network plugin, traffic from a pod to an IP address outside the cluster will be checked against each EgressNetworkPolicyRule in the pod's namespace's EgressNetworkPolicy, in order. If no rule matches (or no EgressNetworkPolicy is present) then the traffic will be allowed by default.",
"metadata": "metadata for EgressNetworkPolicy",
"spec": "spec is the specification of the current egress network policy",
}
func (EgressNetworkPolicy) SwaggerDoc() map[string]string {
return map_EgressNetworkPolicy
}
var map_EgressNetworkPolicyList = map[string]string{
"": "EgressNetworkPolicyList is a collection of EgressNetworkPolicy",
"metadata": "metadata for EgressNetworkPolicyList",
"items": "items is the list of policies",
}
func (EgressNetworkPolicyList) SwaggerDoc() map[string]string {
return map_EgressNetworkPolicyList
}
var map_EgressNetworkPolicyPeer = map[string]string{
"": "EgressNetworkPolicyPeer specifies a target to apply egress network policy to",
"cidrSelector": "cidrSelector is the CIDR range to allow/deny traffic to. If this is set, dnsName must be unset",
"dnsName": "dnsName is the domain name to allow/deny traffic to. If this is set, cidrSelector must be unset",
}
func (EgressNetworkPolicyPeer) SwaggerDoc() map[string]string {
return map_EgressNetworkPolicyPeer
}
var map_EgressNetworkPolicyRule = map[string]string{
"": "EgressNetworkPolicyRule contains a single egress network policy rule",
"type": "type marks this as an \"Allow\" or \"Deny\" rule",
"to": "to is the target that traffic is allowed/denied to",
}
func (EgressNetworkPolicyRule) SwaggerDoc() map[string]string {
return map_EgressNetworkPolicyRule
}
var map_EgressNetworkPolicySpec = map[string]string{
"": "EgressNetworkPolicySpec provides a list of policies on outgoing network traffic",
"egress": "egress contains the list of egress policy rules",
}
func (EgressNetworkPolicySpec) SwaggerDoc() map[string]string {
return map_EgressNetworkPolicySpec
}
var map_HostSubnet = map[string]string{
"": "HostSubnet describes the container subnet network on a node. The HostSubnet object must have the same name as the Node object it corresponds to.",
"metadata": "Standard object's metadata.",
"host": "Host is the name of the node. (This is the same as the object's name, but both fields must be set.)",
"hostIP": "HostIP is the IP address to be used as a VTEP by other nodes in the overlay network",
"subnet": "Subnet is the CIDR range of the overlay network assigned to the node for its pods",
"egressIPs": "EgressIPs is the list of automatic egress IP addresses currently hosted by this node. If EgressCIDRs is empty, this can be set by hand; if EgressCIDRs is set then the master will overwrite the value here with its own allocation of egress IPs.",
"egressCIDRs": "EgressCIDRs is the list of CIDR ranges available for automatically assigning egress IPs to this node from. If this field is set then EgressIPs should be treated as read-only.",
}
func (HostSubnet) SwaggerDoc() map[string]string {
return map_HostSubnet
}
var map_HostSubnetList = map[string]string{
"": "HostSubnetList is a collection of HostSubnets",
"metadata": "Standard object's metadata.",
"items": "Items is the list of host subnets",
}
func (HostSubnetList) SwaggerDoc() map[string]string {
return map_HostSubnetList
}
var map_NetNamespace = map[string]string{
"": "NetNamespace describes a single isolated network. When using the redhat/openshift-ovs-multitenant plugin, every Namespace will have a corresponding NetNamespace object with the same name. (When using redhat/openshift-ovs-subnet, NetNamespaces are not used.)",
"metadata": "Standard object's metadata.",
"netname": "NetName is the name of the network namespace. (This is the same as the object's name, but both fields must be set.)",
"netid": "NetID is the network identifier of the network namespace assigned to each overlay network packet. This can be manipulated with the \"oc adm pod-network\" commands.",
"egressIPs": "EgressIPs is a list of reserved IPs that will be used as the source for external traffic coming from pods in this namespace. (If empty, external traffic will be masqueraded to Node IPs.)",
}
func (NetNamespace) SwaggerDoc() map[string]string {
return map_NetNamespace
}
var map_NetNamespaceList = map[string]string{
"": "NetNamespaceList is a collection of NetNamespaces",
"metadata": "Standard object's metadata.",
"items": "Items is the list of net namespaces",
}
func (NetNamespaceList) SwaggerDoc() map[string]string {
return map_NetNamespaceList
}
// AUTO-GENERATED FUNCTIONS END HERE | |
TechnologyList.tsx | import React, {ReactElement} from 'react';
import dotnetLogo from '../../assets/logos/technologies/dotnet-logo.png';
// eslint-disable-next-line max-len
import aspNetCoreLogo from '../../assets/logos/technologies/aspdotnetcore-logo.png';
import efCoreLogo from '../../assets/logos/technologies/efcore-logo.png';
import dockerLogo from '../../assets/logos/technologies/docker-logo.png';
import vueLogo from '../../assets/logos/technologies/vue-logo.png';
import reactLogo from '../../assets/logos/technologies/react-logo.png';
import './TechnologyList.sass';
export default function | (): ReactElement {
return (
<div
// eslint-disable-next-line max-len
className="technologyList grid grid-flow-row gap-11 grid-cols-3 mx-auto p-10 auto-cols-auto justify-items-center"
>
<img
src={dotnetLogo}
alt="dotnet"
className="technology h-40 mx-10"
/>
<img
src={aspNetCoreLogo}
alt="aspnetcore"
className="technology h-40 mx-10"
/>
<img
src={efCoreLogo}
alt="efcore"
className="technology h-40 mx-10"
/>
<img
src={dockerLogo}
alt="aspnetcore"
className="technology h-40 mx-10"
/>
<img src={vueLogo} alt="vue" className="technology h-40 mx-10" />
<img
src={reactLogo}
alt="react"
className="technology h-40 mx-10"
/>
</div>
);
}
| TechnologyList |
ava_connect_clouds.py | import socket
from cmd_functions import is_ip_valid, is_port_valid
'''
A library that allows AVA to connect to various cloud services
'''
def send_to_cloud(socket, data):
"""
Send data over the specified socket to the associated cloud
socket = any socket object
data = a string or int to be sent over the specified socket
"""
try:
data = data.encode()
socket.send((str(data) +"\n").encode())
return True | def connect_ava_cloud(ip, port=25680):
"""
Connect to AVA Cloud and return prepared socket to the caller
ip = AVA Cloud's IP Address
port = AVA Cloud's Port (Optional, default is 25680)
"""
if is_ip_valid(ip) and is_port_valid(port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((ip, int(port)))
data = s.recv(1024)
print("Received: ", repr(data))
return s
else:
return False | except Exception as exp:
print("Exception: " + str(exp))
return False
|
phone-grid.module.ts | import { NgModule } from '@angular/core';
import { SharedModule } from '@app/shared/shared.module';
import { WorkplacesSharedModule } from '@app/routes/workplaces/shared/shared.module';
import { PhoneGridComponent } from './phone-grid.component';
@NgModule({
imports: [
SharedModule,
WorkplacesSharedModule,
],
declarations: [
PhoneGridComponent,
],
exports: [ | })
export class PhoneGridModule {} | PhoneGridComponent,
] |
PercentageTest.py | '''
Tests the WindowDiff evaluation metric.
.. moduleauthor:: Chris Fournier <[email protected]>
'''
#===============================================================================
# Copyright (c) 2011-2012, Chris Fournier
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author nor the names of its contributors may
# be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#===============================================================================
import unittest
from decimal import Decimal
from .Percentage import percentage, pairwise_percentage, \
find_boundary_position_freqs
from ..data.Samples import KAZANTSEVA2012_G5, KAZANTSEVA2012_G2, \
COMPLETE_AGREEMENT, LARGE_DISAGREEMENT
from .. import convert_positions_to_masses
class TestPercentage(unittest.TestCase):
'''
Test segmentation percentage.
'''
# pylint: disable=R0904
def test_identical(self):
'''
Test whether identical segmentations produce 1.0.
'''
# pylint: disable=C0324
segs_a = convert_positions_to_masses(
[1,1,1,1,1,2,2,2,3,3,3,3,3])
segs_b = convert_positions_to_masses(
[1,1,1,1,1,2,2,2,3,3,3,3,3])
self.assertEqual(percentage(segs_a, segs_b),1.0)
def test_no_boundaries(self):
'''
Test whether no segments versus some segments produce 0.0.
'''
# pylint: disable=C0324
segs_a = convert_positions_to_masses(
[1,1,1,1,1,1,1,1,1,1,1,1,1])
segs_b = convert_positions_to_masses(
[1,1,1,1,2,2,2,2,3,3,3,3,3])
self.assertEqual(percentage(segs_a, segs_b),0)
self.assertEqual(percentage(segs_b, segs_a),0)
def test_all_boundaries(self):
'''
Test whether all segments versus some segments produces 2/12, or 0.167.
'''
# pylint: disable=C0324
segs_a = convert_positions_to_masses(
[1,2,3,4,5,6,7,8,9,10,11,12,13])
segs_b = convert_positions_to_masses(
[1,1,1,1,2,2,2,2,3,3,3,3,3])
self.assertEqual(percentage(segs_a, segs_b),
Decimal('0.1666666666666666666666666667'))
self.assertEqual(percentage(segs_b, segs_a),
Decimal('0.1666666666666666666666666667'))
def test_all_and_no_boundaries(self):
|
def test_translated_boundary(self):
'''
Test whether 2/3 total segments participate in mis-alignment produces
0.33.
'''
# pylint: disable=C0324
segs_a = convert_positions_to_masses(
[1,1,1,1,1,2,2,2,3,3,3,3,3])
segs_b = convert_positions_to_masses(
[1,1,1,1,2,2,2,2,3,3,3,3,3])
self.assertEqual(percentage(segs_a, segs_b),
Decimal('0.3333333333333333333333333333'))
self.assertEqual(percentage(segs_b, segs_a),
Decimal('0.3333333333333333333333333333'))
def test_extra_boundary(self):
'''
Test whether 1/3 segments that are non-existent produces 0.66.
'''
# pylint: disable=C0324
segs_a = convert_positions_to_masses(
[1,1,1,1,1,2,2,2,3,3,3,3,3])
segs_b = convert_positions_to_masses(
[1,1,1,1,1,2,3,3,4,4,4,4,4])
self.assertEqual(percentage(segs_a, segs_b),
Decimal('0.6666666666666666666666666667'))
self.assertEqual(percentage(segs_b, segs_a),
Decimal('0.6666666666666666666666666667'))
def test_full_miss_and_misaligned(self):
'''
Test whether a full miss and a translated boundary out of 4 produces
0.25.
'''
# pylint: disable=C0324
segs_a = convert_positions_to_masses(
[1,1,1,1,2,2,2,2,3,3,3,3,3])
segs_b = convert_positions_to_masses(
[1,1,1,1,1,2,3,3,4,4,4,4,4])
self.assertEqual(percentage(segs_a, segs_b), Decimal('0.25'))
self.assertEqual(percentage(segs_b, segs_a), Decimal('0.25'))
class TestPairwisePercentage(unittest.TestCase):
# pylint: disable=R0904
'''
Test permuted pairwise percentage.
'''
def test_kazantseva2012_g5(self):
'''
Calculate permuted pairwise percentage on Group 5 from the dataset
collected in Kazantseva (2012).
'''
self.assertEqual(pairwise_percentage(KAZANTSEVA2012_G5),
(Decimal('0.1621263635243898401793138635'),
Decimal('0.1788409781886208812486660585'),
Decimal('0.03198409547946276978304443503'),
Decimal('0.03650576180519474391025947712')))
def test_kazantseva2012_g2(self):
'''
Calculate mean permuted pairwise percentage on Group 2 from the dataset
collected in Kazantseva (2012).
'''
self.assertEqual(pairwise_percentage(KAZANTSEVA2012_G2),
(Decimal('0.3398087832646656176067940768'),
Decimal('0.1948481072924021072633034332'),
Decimal('0.03796578491543144325163024138'),
Decimal('0.02515478248611697670879150623')))
def test_large_disagreement(self):
'''
Calculate mean permuted pairwise percentage on a theoretical dataset
containing large disagreement.
'''
self.assertEqual(pairwise_percentage(LARGE_DISAGREEMENT),
(0.0,
0.0,
0.0,
0.0))
def test_complete_agreement(self):
'''
Calculate mean permuted pairwise percentage on a theoretical dataset
containing complete agreement.
'''
self.assertEqual(pairwise_percentage(COMPLETE_AGREEMENT),
(1.0,
0.0,
0.0,
0.0))
class TestPercentageUtils(unittest.TestCase):
# pylint: disable=R0904
'''
Test utility functions used to calculate percentage.
'''
def test_find_seg_positions(self):
'''
Test segmentation position frequency counting.
'''
# pylint: disable=C0324
seg_positions = find_boundary_position_freqs([[1,2,3,3,2,1],
[1,2,2,4,2,1]])
self.assertEqual(seg_positions, { 1: 2,
3: 2,
5: 1,
6: 1,
9: 2,
11: 2})
| '''
Test whether all segments versus no segments produces 0.0.
'''
# pylint: disable=C0324
segs_a = convert_positions_to_masses(
[1,2,3,4,5,6,7,8,9,10,11,12,13])
segs_b = convert_positions_to_masses(
[1,1,1,1,1,1,1,1,1,1,1,1,1])
self.assertEqual(percentage(segs_a, segs_b),0)
self.assertEqual(percentage(segs_b, segs_a),0) |
interface-declaration-metadata.ts | import { MultipleBodyTree } from '../../../tree/body/multiple/multiple-body-tree';
import { DeclarationTree } from '../../../tree/declaration/declaration-tree';
import { DeclarationStatementTree } from '../../../tree/statement/declaration/declaration-statement-tree';
import { SourceReference } from '../../../util/source-reference';
import { DeclarationScope } from '../../declaration-scope';
import { ExpressionMetadata } from '../../expression/expression-metadata';
import { getExpressionMetadata } from '../../expression/expression-metadata-helper';
import { DeclarationMetadata } from '../declaration-metadata';
import { getDeclarationsMetadata } from '../declaration-metadata-helper';
export class InterfaceDeclarationMetadata implements DeclarationMetadata {
sourceReference: SourceReference;
name: string;
constructor(private node: DeclarationTree, private scope: DeclarationScope) {
this.sourceReference = node.sourceReference;
this.name = node.id.name.text;
}
generics(): ExpressionMetadata[] {
throw new Error('Not implemented');
// return this.node.id.generics.map((x) => getExpressionMetadata(x, this.scope));
}
attributes(): DeclarationMetadata[] {
const ancestorsAttributes = this.ancestors().flatMap((x) => x.attributes());
if (this.node.body instanceof MultipleBodyTree) {
const currentAttributes = getDeclarationsMetadata(
this.node.body.statements
.filter((x) => x instanceof DeclarationStatementTree)
.map((x) => x as DeclarationStatementTree)
.map((x) => x.declaration),
this.scope,
);
return [...currentAttributes, ...ancestorsAttributes];
}
return ancestorsAttributes;
}
ancestors(): ExpressionMetadata[] {
throw new Error('Not implemented');
// return this.node.ancestors.map((x) => getExpressionMetadata(x, this.scope));
}
}
interface A{
f()
}
interface B extends A{
f2(n:number): any
}
class | implements B{
f2(n: number) {
throw new Error('Method not implemented.');
}
f() {
throw new Error('Method not implemented.');
}
} | C |
json_deser.rs | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub fn parse_generic_error(
response: &http::Response<bytes::Bytes>,
) -> Result<smithy_types::Error, smithy_json::deserialize::Error> {
crate::json_errors::parse_generic_error(response)
}
pub fn deser_structure_cloud_trail_arn_invalid_exceptionjson_err(
input: &[u8],
mut builder: crate::error::cloud_trail_arn_invalid_exception::Builder,
) -> Result<crate::error::cloud_trail_arn_invalid_exception::Builder, smithy_json::deserialize::Error>
{
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_tag_parameter_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_tag_parameter_exception::Builder,
) -> Result<crate::error::invalid_tag_parameter_exception::Builder, smithy_json::deserialize::Error>
{
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_trail_name_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_trail_name_exception::Builder,
) -> Result<crate::error::invalid_trail_name_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_not_organization_master_account_exceptionjson_err(
input: &[u8],
mut builder: crate::error::not_organization_master_account_exception::Builder,
) -> Result<
crate::error::not_organization_master_account_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_operation_not_permitted_exceptionjson_err(
input: &[u8],
mut builder: crate::error::operation_not_permitted_exception::Builder,
) -> Result<crate::error::operation_not_permitted_exception::Builder, smithy_json::deserialize::Error>
{
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_resource_not_found_exceptionjson_err(
input: &[u8],
mut builder: crate::error::resource_not_found_exception::Builder,
) -> Result<crate::error::resource_not_found_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_resource_type_not_supported_exceptionjson_err(
input: &[u8],
mut builder: crate::error::resource_type_not_supported_exception::Builder,
) -> Result<
crate::error::resource_type_not_supported_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_tags_limit_exceeded_exceptionjson_err(
input: &[u8],
mut builder: crate::error::tags_limit_exceeded_exception::Builder,
) -> Result<crate::error::tags_limit_exceeded_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_unsupported_operation_exceptionjson_err(
input: &[u8],
mut builder: crate::error::unsupported_operation_exception::Builder,
) -> Result<crate::error::unsupported_operation_exception::Builder, smithy_json::deserialize::Error>
{
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_cloud_trail_access_not_enabled_exceptionjson_err(
input: &[u8],
mut builder: crate::error::cloud_trail_access_not_enabled_exception::Builder,
) -> Result<
crate::error::cloud_trail_access_not_enabled_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_cloud_trail_invalid_client_token_id_exceptionjson_err(
input: &[u8],
mut builder: crate::error::cloud_trail_invalid_client_token_id_exception::Builder,
) -> Result<
crate::error::cloud_trail_invalid_client_token_id_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_cloud_watch_logs_delivery_unavailable_exceptionjson_err(
input: &[u8],
mut builder: crate::error::cloud_watch_logs_delivery_unavailable_exception::Builder,
) -> Result<
crate::error::cloud_watch_logs_delivery_unavailable_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_insufficient_dependency_service_access_permission_exceptionjson_err(
input: &[u8],
mut builder: crate::error::insufficient_dependency_service_access_permission_exception::Builder,
) -> Result<
crate::error::insufficient_dependency_service_access_permission_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_insufficient_encryption_policy_exceptionjson_err(
input: &[u8],
mut builder: crate::error::insufficient_encryption_policy_exception::Builder,
) -> Result<
crate::error::insufficient_encryption_policy_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_insufficient_s3_bucket_policy_exceptionjson_err(
input: &[u8],
mut builder: crate::error::insufficient_s3_bucket_policy_exception::Builder,
) -> Result<
crate::error::insufficient_s3_bucket_policy_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_insufficient_sns_topic_policy_exceptionjson_err(
input: &[u8],
mut builder: crate::error::insufficient_sns_topic_policy_exception::Builder,
) -> Result<
crate::error::insufficient_sns_topic_policy_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_cloud_watch_logs_log_group_arn_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_cloud_watch_logs_log_group_arn_exception::Builder,
) -> Result<
crate::error::invalid_cloud_watch_logs_log_group_arn_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_cloud_watch_logs_role_arn_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_cloud_watch_logs_role_arn_exception::Builder,
) -> Result<
crate::error::invalid_cloud_watch_logs_role_arn_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_kms_key_id_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_kms_key_id_exception::Builder,
) -> Result<crate::error::invalid_kms_key_id_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_parameter_combination_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_parameter_combination_exception::Builder,
) -> Result<
crate::error::invalid_parameter_combination_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_s3_bucket_name_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_s3_bucket_name_exception::Builder,
) -> Result<crate::error::invalid_s3_bucket_name_exception::Builder, smithy_json::deserialize::Error>
{
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_s3_prefix_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_s3_prefix_exception::Builder,
) -> Result<crate::error::invalid_s3_prefix_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_sns_topic_name_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_sns_topic_name_exception::Builder,
) -> Result<crate::error::invalid_sns_topic_name_exception::Builder, smithy_json::deserialize::Error>
{
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_kms_exceptionjson_err(
input: &[u8],
mut builder: crate::error::kms_exception::Builder,
) -> Result<crate::error::kms_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_kms_key_disabled_exceptionjson_err(
input: &[u8],
mut builder: crate::error::kms_key_disabled_exception::Builder,
) -> Result<crate::error::kms_key_disabled_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_kms_key_not_found_exceptionjson_err(
input: &[u8],
mut builder: crate::error::kms_key_not_found_exception::Builder,
) -> Result<crate::error::kms_key_not_found_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_maximum_number_of_trails_exceeded_exceptionjson_err(
input: &[u8],
mut builder: crate::error::maximum_number_of_trails_exceeded_exception::Builder,
) -> Result<
crate::error::maximum_number_of_trails_exceeded_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_organization_not_in_all_features_mode_exceptionjson_err(
input: &[u8],
mut builder: crate::error::organization_not_in_all_features_mode_exception::Builder,
) -> Result<
crate::error::organization_not_in_all_features_mode_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_organizations_not_in_use_exceptionjson_err(
input: &[u8],
mut builder: crate::error::organizations_not_in_use_exception::Builder,
) -> Result<
crate::error::organizations_not_in_use_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_s3_bucket_does_not_exist_exceptionjson_err(
input: &[u8],
mut builder: crate::error::s3_bucket_does_not_exist_exception::Builder,
) -> Result<
crate::error::s3_bucket_does_not_exist_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_trail_already_exists_exceptionjson_err(
input: &[u8],
mut builder: crate::error::trail_already_exists_exception::Builder,
) -> Result<crate::error::trail_already_exists_exception::Builder, smithy_json::deserialize::Error>
{
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_trail_not_provided_exceptionjson_err(
input: &[u8],
mut builder: crate::error::trail_not_provided_exception::Builder,
) -> Result<crate::error::trail_not_provided_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_create_trail(
input: &[u8],
mut builder: crate::output::create_trail_output::Builder,
) -> Result<crate::output::create_trail_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"S3BucketName" => {
builder = builder.set_s3_bucket_name(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"S3KeyPrefix" => {
builder = builder.set_s3_key_prefix(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"SnsTopicName" => {
builder = builder.set_sns_topic_name(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"SnsTopicARN" => {
builder = builder.set_sns_topic_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"IncludeGlobalServiceEvents" => {
builder = builder.set_include_global_service_events(
smithy_json::deserialize::token::expect_bool_or_null(tokens.next())?,
);
}
"IsMultiRegionTrail" => {
builder = builder.set_is_multi_region_trail(
smithy_json::deserialize::token::expect_bool_or_null(tokens.next())?,
);
}
"TrailARN" => {
builder = builder.set_trail_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LogFileValidationEnabled" => {
builder = builder.set_log_file_validation_enabled(
smithy_json::deserialize::token::expect_bool_or_null(tokens.next())?,
);
}
"CloudWatchLogsLogGroupArn" => {
builder = builder.set_cloud_watch_logs_log_group_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"CloudWatchLogsRoleArn" => {
builder = builder.set_cloud_watch_logs_role_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"KmsKeyId" => {
builder = builder.set_kms_key_id(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"IsOrganizationTrail" => {
builder = builder.set_is_organization_trail(
smithy_json::deserialize::token::expect_bool_or_null(tokens.next())?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_conflict_exceptionjson_err(
input: &[u8],
mut builder: crate::error::conflict_exception::Builder,
) -> Result<crate::error::conflict_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_home_region_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_home_region_exception::Builder,
) -> Result<crate::error::invalid_home_region_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_trail_not_found_exceptionjson_err(
input: &[u8],
mut builder: crate::error::trail_not_found_exception::Builder,
) -> Result<crate::error::trail_not_found_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_describe_trails(
input: &[u8],
mut builder: crate::output::describe_trails_output::Builder,
) -> Result<crate::output::describe_trails_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"trailList" => {
builder = builder
.set_trail_list(crate::json_deser::deser_list_trail_list(tokens)?);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_get_event_selectors(
input: &[u8],
mut builder: crate::output::get_event_selectors_output::Builder,
) -> Result<crate::output::get_event_selectors_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"TrailARN" => {
builder = builder.set_trail_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"EventSelectors" => {
builder = builder.set_event_selectors(
crate::json_deser::deser_list_event_selectors(tokens)?,
);
}
"AdvancedEventSelectors" => {
builder = builder.set_advanced_event_selectors(
crate::json_deser::deser_list_advanced_event_selectors(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_insight_not_enabled_exceptionjson_err(
input: &[u8],
mut builder: crate::error::insight_not_enabled_exception::Builder,
) -> Result<crate::error::insight_not_enabled_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_get_insight_selectors(
input: &[u8],
mut builder: crate::output::get_insight_selectors_output::Builder,
) -> Result<crate::output::get_insight_selectors_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"TrailARN" => {
builder = builder.set_trail_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"InsightSelectors" => {
builder = builder.set_insight_selectors(
crate::json_deser::deser_list_insight_selectors(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_get_trail(
input: &[u8],
mut builder: crate::output::get_trail_output::Builder,
) -> Result<crate::output::get_trail_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Trail" => {
builder =
builder.set_trail(crate::json_deser::deser_structure_trail(tokens)?);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_get_trail_status(
input: &[u8],
mut builder: crate::output::get_trail_status_output::Builder,
) -> Result<crate::output::get_trail_status_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"IsLogging" => {
builder = builder.set_is_logging(
smithy_json::deserialize::token::expect_bool_or_null(tokens.next())?,
);
}
"LatestDeliveryError" => {
builder = builder.set_latest_delivery_error(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LatestNotificationError" => {
builder = builder.set_latest_notification_error(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LatestDeliveryTime" => {
builder = builder.set_latest_delivery_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"LatestNotificationTime" => {
builder = builder.set_latest_notification_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"StartLoggingTime" => {
builder = builder.set_start_logging_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"StopLoggingTime" => {
builder = builder.set_stop_logging_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"LatestCloudWatchLogsDeliveryError" => {
builder = builder.set_latest_cloud_watch_logs_delivery_error(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LatestCloudWatchLogsDeliveryTime" => {
builder = builder.set_latest_cloud_watch_logs_delivery_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"LatestDigestDeliveryTime" => {
builder = builder.set_latest_digest_delivery_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"LatestDigestDeliveryError" => {
builder = builder.set_latest_digest_delivery_error(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LatestDeliveryAttemptTime" => {
builder = builder.set_latest_delivery_attempt_time(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LatestNotificationAttemptTime" => {
builder = builder.set_latest_notification_attempt_time(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LatestNotificationAttemptSucceeded" => {
builder = builder.set_latest_notification_attempt_succeeded(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LatestDeliveryAttemptSucceeded" => {
builder = builder.set_latest_delivery_attempt_succeeded(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"TimeLoggingStarted" => {
builder = builder.set_time_logging_started(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"TimeLoggingStopped" => {
builder = builder.set_time_logging_stopped(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_time_range_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_time_range_exception::Builder,
) -> Result<crate::error::invalid_time_range_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_token_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_token_exception::Builder,
) -> Result<crate::error::invalid_token_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_list_public_keys(
input: &[u8],
mut builder: crate::output::list_public_keys_output::Builder,
) -> Result<crate::output::list_public_keys_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"PublicKeyList" => {
builder = builder.set_public_key_list(
crate::json_deser::deser_list_public_key_list(tokens)?,
);
}
"NextToken" => {
builder = builder.set_next_token(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_list_tags(
input: &[u8],
mut builder: crate::output::list_tags_output::Builder,
) -> Result<crate::output::list_tags_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"ResourceTagList" => {
builder = builder.set_resource_tag_list(
crate::json_deser::deser_list_resource_tag_list(tokens)?,
);
}
"NextToken" => {
builder = builder.set_next_token(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_list_trails(
input: &[u8],
mut builder: crate::output::list_trails_output::Builder,
) -> Result<crate::output::list_trails_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Trails" => {
builder = builder.set_trails(crate::json_deser::deser_list_trails(tokens)?);
}
"NextToken" => {
builder = builder.set_next_token(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_event_category_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_event_category_exception::Builder,
) -> Result<crate::error::invalid_event_category_exception::Builder, smithy_json::deserialize::Error>
{
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_lookup_attributes_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_lookup_attributes_exception::Builder,
) -> Result<
crate::error::invalid_lookup_attributes_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_max_results_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_max_results_exception::Builder,
) -> Result<crate::error::invalid_max_results_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_next_token_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_next_token_exception::Builder,
) -> Result<crate::error::invalid_next_token_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_lookup_events(
input: &[u8],
mut builder: crate::output::lookup_events_output::Builder,
) -> Result<crate::output::lookup_events_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Events" => {
builder =
builder.set_events(crate::json_deser::deser_list_events_list(tokens)?);
}
"NextToken" => {
builder = builder.set_next_token(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_event_selectors_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_event_selectors_exception::Builder,
) -> Result<crate::error::invalid_event_selectors_exception::Builder, smithy_json::deserialize::Error>
{
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_put_event_selectors(
input: &[u8],
mut builder: crate::output::put_event_selectors_output::Builder,
) -> Result<crate::output::put_event_selectors_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"TrailARN" => {
builder = builder.set_trail_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"EventSelectors" => {
builder = builder.set_event_selectors(
crate::json_deser::deser_list_event_selectors(tokens)?,
);
}
"AdvancedEventSelectors" => {
builder = builder.set_advanced_event_selectors(
crate::json_deser::deser_list_advanced_event_selectors(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_insight_selectors_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_insight_selectors_exception::Builder,
) -> Result<
crate::error::invalid_insight_selectors_exception::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_put_insight_selectors(
input: &[u8],
mut builder: crate::output::put_insight_selectors_output::Builder,
) -> Result<crate::output::put_insight_selectors_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"TrailARN" => {
builder = builder.set_trail_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"InsightSelectors" => {
builder = builder.set_insight_selectors(
crate::json_deser::deser_list_insight_selectors(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_update_trail(
input: &[u8],
mut builder: crate::output::update_trail_output::Builder,
) -> Result<crate::output::update_trail_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"S3BucketName" => {
builder = builder.set_s3_bucket_name(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"S3KeyPrefix" => {
builder = builder.set_s3_key_prefix(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"SnsTopicName" => {
builder = builder.set_sns_topic_name(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"SnsTopicARN" => {
builder = builder.set_sns_topic_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"IncludeGlobalServiceEvents" => {
builder = builder.set_include_global_service_events(
smithy_json::deserialize::token::expect_bool_or_null(tokens.next())?,
);
}
"IsMultiRegionTrail" => {
builder = builder.set_is_multi_region_trail(
smithy_json::deserialize::token::expect_bool_or_null(tokens.next())?,
);
}
"TrailARN" => {
builder = builder.set_trail_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LogFileValidationEnabled" => {
builder = builder.set_log_file_validation_enabled(
smithy_json::deserialize::token::expect_bool_or_null(tokens.next())?,
);
}
"CloudWatchLogsLogGroupArn" => {
builder = builder.set_cloud_watch_logs_log_group_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"CloudWatchLogsRoleArn" => {
builder = builder.set_cloud_watch_logs_role_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"KmsKeyId" => {
builder = builder.set_kms_key_id(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"IsOrganizationTrail" => {
builder = builder.set_is_organization_trail(
smithy_json::deserialize::token::expect_bool_or_null(tokens.next())?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn or_empty_doc(data: &[u8]) -> &[u8] {
if data.is_empty() {
b"{}"
} else {
data
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_trail_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Trail>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_trail(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_event_selectors<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::EventSelector>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_event_selector(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn | <'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::AdvancedEventSelector>>,
smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_advanced_event_selector(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_insight_selectors<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::InsightSelector>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_insight_selector(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_trail<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Trail>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Trail::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"S3BucketName" => {
builder = builder.set_s3_bucket_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"S3KeyPrefix" => {
builder = builder.set_s3_key_prefix(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"SnsTopicName" => {
builder = builder.set_sns_topic_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"SnsTopicARN" => {
builder = builder.set_sns_topic_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"IncludeGlobalServiceEvents" => {
builder = builder.set_include_global_service_events(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"IsMultiRegionTrail" => {
builder = builder.set_is_multi_region_trail(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"HomeRegion" => {
builder = builder.set_home_region(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"TrailARN" => {
builder = builder.set_trail_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LogFileValidationEnabled" => {
builder = builder.set_log_file_validation_enabled(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"CloudWatchLogsLogGroupArn" => {
builder = builder.set_cloud_watch_logs_log_group_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"CloudWatchLogsRoleArn" => {
builder = builder.set_cloud_watch_logs_role_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"KmsKeyId" => {
builder = builder.set_kms_key_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"HasCustomEventSelectors" => {
builder = builder.set_has_custom_event_selectors(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"HasInsightSelectors" => {
builder = builder.set_has_insight_selectors(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"IsOrganizationTrail" => {
builder = builder.set_is_organization_trail(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_public_key_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::PublicKey>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_public_key(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_resource_tag_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::ResourceTag>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_resource_tag(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_trails<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::TrailInfo>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_trail_info(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_events_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Event>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_event(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_event_selector<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::EventSelector>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::EventSelector::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"ReadWriteType" => {
builder = builder.set_read_write_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::ReadWriteType::from(u.as_ref()))
})
.transpose()?,
);
}
"IncludeManagementEvents" => {
builder = builder.set_include_management_events(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"DataResources" => {
builder = builder.set_data_resources(
crate::json_deser::deser_list_data_resources(tokens)?,
);
}
"ExcludeManagementEventSources" => {
builder = builder.set_exclude_management_event_sources(
crate::json_deser::deser_list_exclude_management_event_sources(
tokens,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_advanced_event_selector<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::AdvancedEventSelector>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::AdvancedEventSelector::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"FieldSelectors" => {
builder = builder.set_field_selectors(
crate::json_deser::deser_list_advanced_field_selectors(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_insight_selector<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InsightSelector>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InsightSelector::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"InsightType" => {
builder = builder.set_insight_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::InsightType::from(u.as_ref()))
})
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_public_key<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::PublicKey>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::PublicKey::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Value" => {
builder = builder.set_value(
smithy_json::deserialize::token::expect_blob_or_null(
tokens.next(),
)?,
);
}
"ValidityStartTime" => {
builder = builder.set_validity_start_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"ValidityEndTime" => {
builder = builder.set_validity_end_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"Fingerprint" => {
builder = builder.set_fingerprint(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_resource_tag<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ResourceTag>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ResourceTag::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"ResourceId" => {
builder = builder.set_resource_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"TagsList" => {
builder = builder.set_tags_list(
crate::json_deser::deser_list_tags_list(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_trail_info<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::TrailInfo>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::TrailInfo::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"TrailARN" => {
builder = builder.set_trail_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"HomeRegion" => {
builder = builder.set_home_region(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_event<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Event>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Event::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"EventId" => {
builder = builder.set_event_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"EventName" => {
builder = builder.set_event_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ReadOnly" => {
builder = builder.set_read_only(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"AccessKeyId" => {
builder = builder.set_access_key_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"EventTime" => {
builder = builder.set_event_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"EventSource" => {
builder = builder.set_event_source(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Username" => {
builder = builder.set_username(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Resources" => {
builder = builder.set_resources(
crate::json_deser::deser_list_resource_list(tokens)?,
);
}
"CloudTrailEvent" => {
builder = builder.set_cloud_trail_event(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_data_resources<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::DataResource>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_data_resource(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_exclude_management_event_sources<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_advanced_field_selectors<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::AdvancedFieldSelector>>,
smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_advanced_field_selector(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_tags_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Tag>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_tag(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_resource_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Resource>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_resource(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_data_resource<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::DataResource>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::DataResource::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Type" => {
builder = builder.set_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Values" => {
builder = builder.set_values(
crate::json_deser::deser_list_data_resource_values(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_advanced_field_selector<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::AdvancedFieldSelector>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::AdvancedFieldSelector::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Field" => {
builder = builder.set_field(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Equals" => {
builder = builder
.set_equals(crate::json_deser::deser_list_operator(tokens)?);
}
"StartsWith" => {
builder = builder.set_starts_with(
crate::json_deser::deser_list_operator(tokens)?,
);
}
"EndsWith" => {
builder = builder
.set_ends_with(crate::json_deser::deser_list_operator(tokens)?);
}
"NotEquals" => {
builder = builder.set_not_equals(
crate::json_deser::deser_list_operator(tokens)?,
);
}
"NotStartsWith" => {
builder = builder.set_not_starts_with(
crate::json_deser::deser_list_operator(tokens)?,
);
}
"NotEndsWith" => {
builder = builder.set_not_ends_with(
crate::json_deser::deser_list_operator(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_tag<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Tag>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Tag::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Key" => {
builder = builder.set_key(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Value" => {
builder = builder.set_value(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_resource<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Resource>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Resource::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"ResourceType" => {
builder = builder.set_resource_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ResourceName" => {
builder = builder.set_resource_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_data_resource_values<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_operator<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
| deser_list_advanced_event_selectors |
index.ts | /*
* Copyright 2015 Palantir Technologies, Inc. All rights reserved.
* Licensed under the terms of the LICENSE file distributed with this project.
*/
import "@blueprintjs/test-commons/bootstrap";
import "./alert/alertTests";
import "./breadcrumbs/breadcrumbTests";
import "./buttons/buttonTests";
import "./callout/calloutTests";
import "./card/cardTests";
import "./collapse/collapseTests";
import "./collapsible-list/collapsibleListTests";
import "./common/propsTests";
import "./common/utils/compareUtilsTests";
import "./common/utilsTests";
import "./context-menu/contextMenuTests";
import "./controls/controlsTests";
import "./controls/inputGroupTests";
import "./controls/numericInputTests";
import "./controls/radioGroupTests";
import "./dialog/dialogTests";
import "./editable-text/editableTextTests";
import "./forms/fileInputTests";
import "./forms/formGroupTests";
import "./hotkeys/hotkeysTests";
import "./icon/iconTests";
import "./menu/menuItemTests";
import "./menu/menuTests"; | import "./non-ideal-state/nonIdealStateTests";
import "./overflow-list/overflowListTests";
import "./overlay/overlayTests";
import "./popover/popoverTests";
import "./portal/portalTests";
import "./progress/progressBarTests";
import "./slider/rangeSliderTests";
import "./slider/sliderTests";
import "./spinner/spinnerTests";
import "./tabs/tabsTests";
import "./tag-input/tagInputTests";
import "./tag/tagTests";
import "./text/textTests";
import "./toast/toasterTests";
import "./toast/toastTests";
import "./tooltip/tooltipTests";
import "./tree/treeTests"; | |
s3cleanup.py | import sys
import os
import boto3
import datetime
import argparse
def | (args):
s3 = boto3.resource('s3', aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'], aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY'], region_name=os.environ['AWS_S3_REGION_NAME'], endpoint_url=os.environ['AWS_S3_ENDPOINT_URL'], use_ssl=os.environ['AWS_S3_USE_SSL'])
bucket = s3.Bucket(args.bucket)
today = datetime.date.today()
last_month = today - datetime.timedelta(days=30)
prefix = last_month.strftime('%Y/%m')
backups = [o for o in bucket.objects.filter(Prefix=prefix)]
to_delete = backups[0:-1]
client = boto3.client('s3')
deleted = 0
for o in to_delete:
print('Delete object %s' % o)
deleted += 1
o.delete()
if deleted:
print('%d backups deleted' % (deleted))
return 0
def main():
parser = argparse.ArgumentParser(description='Delete old database backups')
parser.add_argument('-b', dest='bucket', action='store', help='bucket name')
parser.add_argument('--force', action='store_true', default=False, help='Force check even if not in production environment')
args = parser.parse_args()
env = os.environ['IMAGE_TAG']
if env != 'prod' and not args.force:
return 0
else:
return cleanup_s3db(args)
if __name__ == '__main__':
sys.exit(main())
| cleanup_s3db |
assign.ts | import {IStatement} from "./_statement";
import {seq, alt, opt, tok, per, optPrio, altPrio} from "../combi";
import {InstanceArrow, StaticArrow} from "../../1_lexer/tokens";
import {FSTarget, Target, Source, Dynamic, Field, TypeName} from "../expressions";
import {IStatementRunnable} from "../statement_runnable";
export class Assign implements IStatement {
public getMatcher(): IStatementRunnable {
const component = seq("COMPONENT",
Source,
"OF STRUCTURE",
Source);
const tableField = seq("TABLE FIELD", alt(Source, Dynamic));
const arrow = alt(tok(InstanceArrow), tok(StaticArrow));
const source = alt(seq(Source, opt(seq(arrow, Dynamic))),
component,
tableField,
seq(Dynamic, opt(seq(arrow, alt(Field, Dynamic)))));
const type = seq("TYPE", alt(Dynamic, TypeName));
const like = seq("LIKE", alt(Dynamic, Source));
const handle = seq("TYPE HANDLE", Source); | const casting = seq("CASTING", opt(alt(like, handle, per(type, decimals))));
const obsoleteType = seq("TYPE", Source, optPrio(decimals));
const ret = seq("ASSIGN",
opt(seq(Target, "INCREMENT")),
source,
"TO",
FSTarget,
opt(altPrio(casting, obsoleteType)),
opt(range));
return ret;
}
} | const range = seq("RANGE", Source);
const decimals = seq("DECIMALS", Source);
|
id_positive.rs | use crate::{common::*, with_header, Provider};
#[test]
fn int_id_without_default_should_have_strategy_none() {
let dml = indoc! {r#"
model Model {
id Int @id
}
"#};
let datamodel = parse(dml);
let user_model = datamodel.assert_has_model("Model");
user_model.assert_has_scalar_field("id").assert_is_id(user_model);
}
#[test]
fn int_id_with_default_autoincrement_should_have_strategy_auto() {
let dml = indoc! {r#"
model Model {
id Int @id @default(autoincrement())
}
"#};
let datamodel = parse(dml);
let user_model = datamodel.assert_has_model("Model");
user_model.assert_has_scalar_field("id").assert_is_id(user_model);
}
#[test]
fn should_allow_string_ids_with_cuid() {
let dml = indoc! {r#"
model Model {
id String @id @default(cuid())
}
"#};
let datamodel = parse(dml);
let user_model = datamodel.assert_has_model("Model");
user_model
.assert_has_scalar_field("id")
.assert_is_id(user_model)
.assert_base_type(&ScalarType::String)
.assert_default_value(DefaultValue::new_expression(ValueGenerator::new_cuid()));
}
#[test]
fn should_allow_string_ids_with_uuid() {
let dml = indoc! {r#"
model Model {
id String @id @default(uuid())
}
"#};
let datamodel = parse(dml);
let user_model = datamodel.assert_has_model("Model");
user_model
.assert_has_scalar_field("id")
.assert_is_id(user_model)
.assert_base_type(&ScalarType::String)
.assert_default_value(DefaultValue::new_expression(ValueGenerator::new_uuid()));
}
#[test]
fn should_allow_string_ids_without_default() {
let dml = indoc! {r#"
model Model {
id String @id
}
"#};
let datamodel = parse(dml);
let user_model = datamodel.assert_has_model("Model");
user_model
.assert_has_scalar_field("id")
.assert_is_id(user_model)
.assert_base_type(&ScalarType::String);
}
#[test]
fn should_allow_string_ids_with_static_default() {
let dml = indoc! {r#"
model Model {
id String @id @default("")
}
"#};
let datamodel = parse(dml);
let user_model = datamodel.assert_has_model("Model");
user_model
.assert_has_scalar_field("id")
.assert_is_id(user_model)
.assert_default_value(DefaultValue::new_single(PrismaValue::String(String::from(""))))
.assert_base_type(&ScalarType::String);
}
#[test]
fn should_allow_int_ids_with_static_default() {
let dml = indoc! {r#"
model Model {
id Int @id @default(0)
}
"#};
let datamodel = parse(dml);
let user_model = datamodel.assert_has_model("Model");
user_model
.assert_has_scalar_field("id")
.assert_is_id(user_model)
.assert_default_value(DefaultValue::new_single(PrismaValue::Int(0)))
.assert_base_type(&ScalarType::Int);
}
#[test]
fn multi_field_ids_must_work() {
let dml = indoc! {r#"
model Model {
a String
b Int
@@id([a,b])
}
"#};
let datamodel = parse(dml);
let user_model = datamodel.assert_has_model("Model");
user_model.assert_has_pk(PrimaryKeyDefinition {
name: None,
db_name: None,
fields: vec![PrimaryKeyField::new("a"), PrimaryKeyField::new("b")],
defined_on_field: false,
clustered: None,
});
}
#[test]
fn should_allow_unique_and_id_on_same_field() {
let dml = indoc! {r#"
model Model {
id Int @id @unique
}
"#};
let datamodel = parse(dml);
let user_model = datamodel.assert_has_model("Model");
user_model.assert_has_pk(PrimaryKeyDefinition {
name: None,
db_name: None,
fields: vec![PrimaryKeyField::new("id")],
defined_on_field: true,
clustered: None,
});
user_model.assert_has_index(IndexDefinition {
name: None,
db_name: Some("Model_id_key".to_string()),
fields: vec![IndexField::new_in_model("id")],
tpe: IndexType::Unique,
defined_on_field: true,
algorithm: None,
clustered: None,
});
}
#[test]
fn unnamed_and_unmapped_multi_field_ids_must_work() {
let dml = with_header(
indoc! {r#"
model Model {
a String
b Int
@@id([a,b])
}
"#},
Provider::Postgres,
&[],
);
let datamodel = parse(&dml);
let user_model = datamodel.assert_has_model("Model");
user_model.assert_has_id_fields(&["a", "b"]);
user_model.assert_has_named_pk("Model_pkey");
}
#[test]
fn unmapped_singular_id_must_work() {
let dml = with_header(
indoc! {r#"
model Model {
a String @id
}
"#},
Provider::Postgres,
&[],
);
let datamodel = parse(&dml);
let model = datamodel.assert_has_model("Model");
model.assert_has_id_fields(&["a"]);
model.assert_has_named_pk("Model_pkey");
}
#[test]
fn named_multi_field_ids_must_work() {
let dml = with_header(
indoc! {r#"
model Model {
a String
b Int
@@id([a,b], name: "compoundId")
}
"#},
Provider::Postgres,
&[],
);
let datamodel = parse(&dml);
let user_model = datamodel.assert_has_model("Model");
user_model.assert_has_id_fields(&["a", "b"]);
user_model.assert_has_named_pk("Model_pkey");
}
#[test]
fn | () {
let dml = with_header(
indoc! {r#"
model Model {
a String
b Int
@@id([a,b], map:"dbname")
}
"#},
Provider::Postgres,
&[],
);
let datamodel = parse(&dml);
let user_model = datamodel.assert_has_model("Model");
user_model.assert_has_id_fields(&["a", "b"]);
user_model.assert_has_named_pk("dbname");
}
#[test]
fn mapped_singular_id_must_work() {
let dml = with_header(
indoc! {r#"
model Model {
a String @id(map: "test")
}
model Model2 {
a String @id(map: "test2")
}
"#},
Provider::Postgres,
&[],
);
let datamodel = parse(&dml);
let model = datamodel.assert_has_model("Model");
model.assert_has_id_fields(&["a"]);
model.assert_has_named_pk("test");
let model2 = datamodel.assert_has_model("Model2");
model2.assert_has_id_fields(&["a"]);
model2.assert_has_named_pk("test2");
}
#[test]
fn named_and_mapped_multi_field_ids_must_work() {
let dml = with_header(
indoc! {r#"
model Model {
a String
b Int
@@id([a,b], name: "compoundId", map:"dbname")
}
"#},
Provider::Postgres,
&[],
);
let datamodel = parse(&dml);
let user_model = datamodel.assert_has_model("Model");
user_model.assert_has_id_fields(&["a", "b"]);
user_model.assert_has_named_pk("dbname");
}
#[test]
fn id_accepts_length_arg_on_mysql() {
let dml = with_header(
r#"
model User {
firstName String
middleName String
lastName String
@@id([firstName, middleName(length: 1), lastName])
}
model Blog {
title String @id(length:5)
}
"#,
Provider::Mysql,
&["extendedIndexes"],
);
let schema = parse(&dml);
let user_model = schema.assert_has_model("User");
let blog_model = schema.assert_has_model("Blog");
user_model.assert_has_pk(PrimaryKeyDefinition {
name: None,
db_name: None,
fields: vec![
PrimaryKeyField {
name: "firstName".to_string(),
sort_order: None,
length: None,
},
PrimaryKeyField {
name: "middleName".to_string(),
sort_order: None,
length: Some(1),
},
PrimaryKeyField {
name: "lastName".to_string(),
sort_order: None,
length: None,
},
],
defined_on_field: false,
clustered: None,
});
blog_model.assert_has_pk(PrimaryKeyDefinition {
name: None,
db_name: None,
fields: vec![PrimaryKeyField {
name: "title".to_string(),
sort_order: None,
length: Some(5),
}],
defined_on_field: true,
clustered: None,
});
}
#[test]
fn id_accepts_sort_arg_on_sqlserver() {
let dml = with_header(
r#"
model User {
firstName String
middleName String
lastName String
@@id([firstName, middleName(sort: Desc), lastName])
}
model Blog {
title String @id(sort: Desc)
}
"#,
Provider::SqlServer,
&["extendedIndexes"],
);
let schema = parse(&dml);
let user_model = schema.assert_has_model("User");
let blog_model = schema.assert_has_model("Blog");
user_model.assert_has_pk(PrimaryKeyDefinition {
name: None,
db_name: Some("User_pkey".to_string()),
fields: vec![
PrimaryKeyField {
name: "firstName".to_string(),
sort_order: None,
length: None,
},
PrimaryKeyField {
name: "middleName".to_string(),
sort_order: Some(SortOrder::Desc),
length: None,
},
PrimaryKeyField {
name: "lastName".to_string(),
sort_order: None,
length: None,
},
],
defined_on_field: false,
clustered: None,
});
blog_model.assert_has_pk(PrimaryKeyDefinition {
name: None,
db_name: Some("Blog_pkey".to_string()),
fields: vec![PrimaryKeyField {
name: "title".to_string(),
sort_order: Some(SortOrder::Desc),
length: None,
}],
defined_on_field: true,
clustered: None,
});
}
#[test]
fn mysql_allows_id_length_prefix() {
let dml = indoc! {r#"
model A {
id String @id(length: 30) @test.VarChar(255)
}
"#};
let schema = with_header(dml, Provider::Mysql, &["extendedIndexes"]);
assert!(datamodel::parse_schema(&schema).is_ok());
}
#[test]
fn mysql_allows_compound_id_length_prefix() {
let dml = indoc! {r#"
model A {
a String @test.VarChar(255)
b String @test.VarChar(255)
@@id([a(length: 10), b(length: 20)])
}
"#};
let schema = with_header(dml, Provider::Mysql, &["extendedIndexes"]);
assert!(datamodel::parse_schema(&schema).is_ok());
}
#[test]
fn mssql_allows_id_sort_argument() {
let dml = indoc! {r#"
model A {
id Int @id(sort: Desc)
}
"#};
let schema = with_header(dml, Provider::SqlServer, &["extendedIndexes"]);
assert!(datamodel::parse_schema(&schema).is_ok());
}
#[test]
fn mssql_allows_compound_id_sort_argument() {
let dml = indoc! {r#"
model A {
a String @test.VarChar(255)
b String @test.VarChar(255)
@@id([a(sort: Asc), b(sort: Desc)])
}
"#};
let schema = with_header(dml, Provider::SqlServer, &["extendedIndexes"]);
assert!(datamodel::parse_schema(&schema).is_ok());
}
#[test]
fn mongodb_compound_unique_can_have_id_as_part_of_it() {
let dml = indoc! {r#"
model User {
id String @id @map("_id") @test.ObjectId
di Int
@@unique([id, di])
}
"#};
let schema = with_header(dml, Provider::Mongo, &[]);
assert!(datamodel::parse_schema(&schema).is_ok());
}
| mapped_multi_field_ids_must_work |
0001_initial.py | # Generated by Django 2.1.15 on 2020-02-16 11:10
# flake8: noqa
from django.db import migrations, models
class Migration(migrations.Migration):
| initial = True
dependencies = [
('auth', '0009_alter_user_last_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(max_length=255, unique=True)),
('name', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
] |
|
info.go | package serializer
// isRegistered 用户序列化器
type BindInfo struct {
ErrCode int `json:"errcode"`
IsBind int `json:"is_bind"`
CorpCode string `json:"corp_code"`
}
// BuildBindInfo 序列化 | ErrCode:errCode,
IsBind:isBind,
CorpCode:corpCode,
}
}
func BuildBindInfoResponse(errCode int, isBind int, corpCode string) Response {
return Response{
Data: BuildBindInfo(errCode, isBind, corpCode),
}
} | func BuildBindInfo(errCode int, isBind int, corpCode string) BindInfo {
return BindInfo{ |
cadastro-aluno.controller.ts | import {
Body,
Controller,
Inject,
InternalServerErrorException,
NotFoundException,
Post,
UnprocessableEntityException,
} from '@nestjs/common';
import { ApiInternalServerErrorResponse, ApiTags } from '@nestjs/swagger';
import { CadastrarAluno } from '../../../_business/alunos/casos-de-uso/cadastrar-aluno.feat';
import { Auth } from '../../../_adapters/auth/decorators/auth.decorator';
import { TipoUsuario } from '../../../_business/usuarios/casos-de-uso/cadastrar-novo-usuario.feat';
import { CreateAlunoDto } from '../../../_adapters/alunos/dto/create-aluno.dto';
import { Usuario } from '../../../_business/usuarios/entidades/usuario.entity';
import { User } from '../../../_adapters/auth/decorators/user.decorator';
import {
UsuarioInvalidoError,
UsuarioNaoEncontradoError,
} from '../../../_business/usuarios/erros/usuarios.errors';
@ApiTags('Aluno')
@Controller('cadastro-aluno')
export class CadastroAlunoController {
constructor(
@Inject(CadastrarAluno)
private readonly cadastrarAluno: CadastrarAluno,
) {}
@ApiInternalServerErrorResponse({
description: 'Erro genérico',
})
@Post()
@Auth(TipoUsuario.ALUNO)
async cadastrar(
@Body() dto: CreateAlunoDto,
@User() user: Usuario,
): Promise<void> {
try {
await this.cadastrarAluno.execute({
...dto,
usuario: user, | switch (true) {
case e instanceof UsuarioNaoEncontradoError:
throw new NotFoundException(e);
case e instanceof UsuarioInvalidoError:
throw new UnprocessableEntityException(e);
default:
throw new InternalServerErrorException({
code: 500,
message: 'Erro genérico',
});
}
}
}
} | });
} catch (e) { |
open_pr.py | import click
from doing.utils import get_config
from doing.utils import get_repo_name
from typing import Union
def | (pullrequest_id: Union[str, int]) -> None:
"""
Open a specific PULLREQUEST_ID. '!' prefix is allowed.
"""
pullrequest_id = str(pullrequest_id).lstrip("!").strip()
project = get_config("project")
organization = get_config("organization")
click.launch(f"{organization}/{project}/_git/{get_repo_name()}/pullrequest/{pullrequest_id}")
| cmd_open_pr |
input.js | import ToolPen from './tools/tool-pen.js'
import ToolEraser from './tools/tool-eraser.js'
import ToolLine from './tools/tool-line.js'
import ToolRect from './tools/tool-rect.js'
import ToolText from './tools/tool-text.js'
import { paper } from './paper-canvas'
// import { drawingCanvas } from './main.js'
import { dist } from './helper.js'
import ToolImage from './tools/tool-image.js'
export let tools = {
"tool-type-pen": new ToolPen(),
"tool-type-eraser": new ToolEraser(),
"tool-type-marker": new ToolPen(true),
"tool-type-line": new ToolLine(),
"tool-type-rect": new ToolRect(),
"tool-type-ellipse": null,
"tool-type-text": new ToolText(),
"tool-type-image": new ToolImage(),
"tool-type-line-width": null
}
export let activeTool = tools["tool-type-pen"];
let ZOOM_SPEED = 0.004;
let PINCH_THRESHOLD = 50;
let touchesCache = [];
let touchesCacheBegin = [];
let viewMatrixTouchStart = new paper.Matrix();
let handleTouchType = ""
export function setActiveTool(id) {
activeTool = tools[id];
window.appData.rightPanel.setToolPanel(activeTool.getSettingsPanel())
}
export default function init_input(element) {
let el = element;
// POINTER
el.onpointerdown = function (e) {
// e.preventDefault();
console.log("onpointerdown");
let project_pt = appData.drawingCanvas.getTransformedPointer(e.offsetX, e.offsetY);
if (e.pointerType == "touch") {
if (touchesCache.length == 0) {
activeTool.tooldown(project_pt.x, project_pt.y, e.pressure);
} else {
activeTool.toolcancel();
viewMatrixTouchStart = new paper.Matrix(paper.view.matrix)
// touchZoomCache = appData.drawingCanvas.getZoom();
}
touchesCacheBegin.push(e);
touchesCache.push(e);
} else {
// let project_pt = appData.drawingCanvas.getTransformedPointer(e.offsetX, e.offsetY);
activeTool.tooldown(project_pt.x, project_pt.y, e.pressure);
}
};
function mouseOrPen(e) { return (e.pointerType == "mouse" || e.pointerType == "pen") }
el.onpointermove = function (e) {
// e.preventDefault()
// console.log("onpointermove");
if ((e.buttons == 1 && mouseOrPen(e)) || (e.pointerType == 'touch' && touchesCache.length < 2)) {
if (!activeTool.tool_canceled) {
let project_pt = appData.drawingCanvas.getTransformedPointer(e.offsetX, e.offsetY);
activeTool.toolmove(project_pt.x, project_pt.y, e.pressure);
}
} else if (e.buttons == 4 && mouseOrPen(e)) {
let offset = new Point(e.movementX, e.movementY)
appData.drawingCanvas.offset(offset.divide(appData.drawingCanvas.getZoom()));
}
else if (touchesCache.length == 2 && e.pointerType == "touch") {
let index = touchesCache.findIndex((el) => { return e.pointerId === el.pointerId });
touchesCache[index] = e;
handlePanZoom();
}
activeTool.toolpreviewmove(appData.drawingCanvas.getTransformedPointer(e.offsetX, e.offsetY))
};
el.onpointerup = function (e) {
console.log("onpointerup");
let project_pt = appData.drawingCanvas.getTransformedPointer(e.offsetX, e.offsetY);
if (e.pointerType == "touch") {
touchesCache = touchesCache.filter((cache_event) => (cache_event.pointerId !== e.pointerId));
touchesCacheBegin = touchesCacheBegin.filter((cache_event) => (cache_event.pointerId !== e.pointerId));
handleTouchType = "";
if (!activeTool.tool_canceled) {
activeTool.toolup(project_pt.x, project_pt.y, e.pressure);
}
} else {
activeTool.toolup(project_pt.x, project_pt.y, e.pressure);
}
};
// WHEEL
el.onwheel = function (e) {
e.preventDefault();
if (e.ctrlKey) {
// ctrl is used as the indicator for pinch gestures... (Not a fan...)
appData.drawingCanvas.zoom(1 + e.wheelDeltaY*ZOOM_SPEED, new Point(e.offsetX, e.offsetY))
// zoom(, 1 + e.wheelDeltaY);
} else {
let scroll_speed = 0.5;
let offset = new Point(e.wheelDeltaX * scroll_speed, e.wheelDeltaY * scroll_speed);
appData.drawingCanvas.offset(offset.divide(appData.drawingCanvas.getZoom()));
}
};
el.addEventListener("touchstart", (e) => {
e.preventDefault();
}, { passive: false });
el.addEventListener("gesturestart", (e) => {
e.preventDefault();
}, { passive: false });
// el.ontouchstart = (e) => {
// e.preventDefault();
// };
// el.ontouchmove = (e) => {
// // e.preventDefault();
// };
}
function handlePanZoom() {
// Get all relevant points in the coordinate system at the start of the 2 finger interaction
let [start1, start2, current1, current2] = getTransformedPoints(viewMatrixTouchStart)
// calculate center points of touch start and current (All in the coord at the time of touch start)
let currentCenter = current1.add(current2).multiply(0.5);
let startCenter = start1.add(start2).multiply(0.5);
// calculate distances for threshold test and zoom factor
let distStart = dist(start1, start2);
let distCurrent = dist(current1, current2);
// activate both (pan&zoom) if pinch threshold is exceeded (pan is always handled)
let pinchDistDelta = Math.abs(distStart - distCurrent) * viewMatrixTouchStart.scaling.x;
if (handleTouchType === "" && pinchDistDelta > PINCH_THRESHOLD) {
handleTouchType = "both"
}
// calculate offset vector (in touch start coord system)
let offset = currentCenter.subtract(startCenter)
let newM = new paper.Matrix(viewMatrixTouchStart)
newM.translate(offset)
// pinch zoom
if (handleTouchType === "both") {
// get the current center point for the scale operation in the translated newM system
let centerInIdentitySpace = viewMatrixTouchStart.transform(currentCenter)
let centerInNewM = newM.inverseTransform(centerInIdentitySpace)
// calculate total zoom factor based on the distance realation between start and current
let zoom = distCurrent / distStart;
// apply the zoom with the correct center
newM.scale(zoom, centerInNewM)
}
// apply the new matrix
appData.drawingCanvas.setMatrix(newM)
}
function getTransformedPoints(matrix) {
let drawC = appData.drawingCanvas
let cx = drawC.canvas.getBoundingClientRect().x;
let cy = drawC.canvas.getBoundingClientRect().y;
let start1 = matrix.inverseTransform(touchesCacheBegin[0].clientX - cx, touchesCacheBegin[0].clientY - cy);
let start2 = matrix.inverseTransform(touchesCacheBegin[1].clientX - cx, touchesCacheBegin[1].clientY - cy);
let current1 = matrix.inverseTransform(touchesCache[0].clientX - cx, touchesCache[0].clientY - cy);
let current2 = matrix.inverseTransform(touchesCache[1].clientX - cx, touchesCache[1].clientY - cy);
return [start1, start2, current1, current2]
}
// following functions are DEPRECATED
// function handlePanZoomSingle() {
// let drawC = appData.drawingCanvas
// let cx = drawC.canvas.getBoundingClientRect().x;
// let cy = drawC.canvas.getBoundingClientRect().y;
// let canvasZoom = drawC.getZoom(); | // let start2 = drawC.getTransformedPointer(touchesCacheBegin[1].clientX - cx, touchesCacheBegin[1].clientY - cy);
// let current1 = drawC.getTransformedPointer(touchesCache[0].clientX - cx, touchesCache[0].clientY - cy);
// let current2 = drawC.getTransformedPointer(touchesCache[1].clientX - cx, touchesCache[1].clientY - cy);
// let PINCH_THRESHOLD = 100 //drawC.canvas.clientWidth / 40;
// let PAN_THRESHOLD = 30
// let distStart = dist(start1, start2);
// let distCurrent = dist(current1, current2);
// let currentCenter = current1.add(current2).multiply(0.5); //new Point((current1.x + current2.x) / 2, (current1.y + current2.y) / 2)
// let startCenter = start1.add(start2).multiply(0.5); //[(start1.x + start2.x) / 2, (start1.y + start2.y) / 2]
// let panDistDelta = dist(currentCenter, startCenter) * canvasZoom;
// let pinchDistDelta = Math.abs(distStart - distCurrent) * canvasZoom;
// // console.log("pinch Dist: ", panDistDelta)
// // console.log("pan Dist: ", pinchDistDelta)
// if (pinchDistDelta < PINCH_THRESHOLD && panDistDelta < PAN_THRESHOLD) {
// return
// }
// if (handleTouchType == "") {
// if (pinchDistDelta > PINCH_THRESHOLD && panDistDelta > PAN_THRESHOLD) {
// handleTouchType = pinchDistDelta > panDistDelta ? "pinch" : "pan"
// }
// else if (pinchDistDelta > PINCH_THRESHOLD) {
// handleTouchType = "pinch"
// }
// else if (panDistDelta > PAN_THRESHOLD) {
// handleTouchType = "pan"
// }
// }
// if (handleTouchType == "pinch") {
// // Zoom
// let currentZoomFactor = distCurrent / distStart;
// // console.log("zoomFactor: ", currentZoomFactor);
// //TODO some log or exp to make absolute zoom... Maybe not. feels just fine as it is...
// drawC.setZoom(touchZoomCache * currentZoomFactor, startCenter);
// touchZoomCache = distCurrent / distStart;
// }
// if (handleTouchType == "pan") {
// // Pan
// let offset = startCenter.subtract(currentCenter) //new DOMPoint(startCenter[0] - currentCenter[0], startCenter[1] - currentCenter[1]);
// // console.log("offset: ", offset);
// let offsetDiff = new Point(touchPanCache.x - offset.x, touchPanCache.y - offset.y);
// touchPanCache = offset;
// // console.log("offsetDiff: ", offsetDiff, drawC.getZoom());
// // multipy with zoom
// drawC.offset(offsetDiff);
// }
// }
// function handlePanZoomSwitch() {
// let drawC = appData.drawingCanvas
// let cx = drawC.canvas.getBoundingClientRect().x;
// let cy = drawC.canvas.getBoundingClientRect().y;
// let canvasZoom = drawC.getZoom();
// let start1 = drawC.getTransformedPointer(touchesCacheBegin[0].clientX - cx, touchesCacheBegin[0].clientY - cy);
// let start2 = drawC.getTransformedPointer(touchesCacheBegin[1].clientX - cx, touchesCacheBegin[1].clientY - cy);
// let current1 = drawC.getTransformedPointer(touchesCache[0].clientX - cx, touchesCache[0].clientY - cy);
// let current2 = drawC.getTransformedPointer(touchesCache[1].clientX - cx, touchesCache[1].clientY - cy);
// let PINCH_THRESHOLD = 10 //drawC.canvas.clientWidth / 40;
// let PAN_THRESHOLD = 10
// let distStart = dist(start1, start2);
// let distCurrent = dist(current1, current2);
// let currentCenter = current1.add(current2).multiply(0.5); //new Point((current1.x + current2.x) / 2, (current1.y + current2.y) / 2)
// let startCenter = start1.add(start2).multiply(0.5); //[(start1.x + start2.x) / 2, (start1.y + start2.y) / 2]
// let panDistDelta = dist(currentCenter, startCenter) * canvasZoom;
// let pinchDistDelta = Math.abs(distStart - distCurrent) * canvasZoom;
// // console.log("pinch Dist: ", panDistDelta)
// // console.log("pan Dist: ", pinchDistDelta)
// if (pinchDistDelta < PINCH_THRESHOLD && panDistDelta < PAN_THRESHOLD) {
// return
// }
// handleTouchType = pinchDistDelta > panDistDelta ? "pinch" : "pan"
// if (handleTouchType == "pinch") {
// // Zoom
// let currentZoomFactor = distCurrent / distStart;
// // console.log("zoomFactor: ", currentZoomFactor);
// //TODO some log or exp to make absolute zoom... Maybe not. feels just fine as it is...
// drawC.setZoom(touchZoomCache * currentZoomFactor, startCenter);
// touchZoomCache = distCurrent / distStart;
// }
// if (handleTouchType == "pan") {
// // Pan
// let offset = startCenter.subtract(currentCenter) //new DOMPoint(startCenter[0] - currentCenter[0], startCenter[1] - currentCenter[1]);
// // console.log("offset: ", offset);
// let offsetDiff = new Point(touchPanCache.x - offset.x, touchPanCache.y - offset.y);
// touchPanCache = offset;
// // console.log("offsetDiff: ", offsetDiff, drawC.getZoom());
// // multipy with zoom
// drawC.offset(offsetDiff);
// }
// } | // let start1 = drawC.getTransformedPointer(touchesCacheBegin[0].clientX - cx, touchesCacheBegin[0].clientY - cy); |
stock_item_group.py | from sql.entity import Entity
class StockItemGroup(Entity):
def __init__(self, data): | self.is_active = self._get_bool('IsActive')
self.last_modified = self._get_int('LastModified') | super().__init__(data)
self.code = self._get_str('Code') # Primary Key
self.description = self._get_str('Description') |
structural_impls.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module contains implements of the `Lift` and `TypeFoldable`
//! traits for various types in the Rust compiler. Most are written by
//! hand, though we've recently added some macros (e.g.,
//! `BraceStructLiftImpl!`) to help with the tedium.
use middle::const_val::{self, ConstVal, ConstEvalErr};
use ty::{self, Lift, Ty, TyCtxt};
use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
use rustc_data_structures::accumulate_vec::AccumulateVec;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use rustc_data_structures::sync::Lrc;
use mir::interpret;
use std::rc::Rc;
///////////////////////////////////////////////////////////////////////////
// Atomic structs
//
// For things that don't carry any arena-allocated data (and are
// copy...), just add them to this list.
CloneTypeFoldableAndLiftImpls! {
(),
bool,
usize,
u64,
::middle::region::Scope,
::syntax::ast::FloatTy,
::syntax::ast::NodeId,
::syntax_pos::symbol::Symbol,
::hir::def::Def,
::hir::def_id::DefId,
::hir::InlineAsm,
::hir::MatchSource,
::hir::Mutability,
::hir::Unsafety,
::rustc_target::spec::abi::Abi,
::mir::Local,
::mir::Promoted,
::traits::Reveal,
::ty::adjustment::AutoBorrowMutability,
::ty::AdtKind,
// Including `BoundRegion` is a *bit* dubious, but direct
// references to bound region appear in `ty::Error`, and aren't
// really meant to be folded. In general, we can only fold a fully
// general `Region`.
::ty::BoundRegion,
::ty::ClosureKind,
::ty::IntVarValue,
::syntax_pos::Span,
}
///////////////////////////////////////////////////////////////////////////
// Lift implementations
impl<'tcx, A: Lift<'tcx>, B: Lift<'tcx>> Lift<'tcx> for (A, B) {
type Lifted = (A::Lifted, B::Lifted);
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.0).and_then(|a| tcx.lift(&self.1).map(|b| (a, b)))
}
}
impl<'tcx, A: Lift<'tcx>, B: Lift<'tcx>, C: Lift<'tcx>> Lift<'tcx> for (A, B, C) {
type Lifted = (A::Lifted, B::Lifted, C::Lifted);
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.0).and_then(|a| {
tcx.lift(&self.1).and_then(|b| tcx.lift(&self.2).map(|c| (a, b, c)))
})
}
}
impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Option<T> {
type Lifted = Option<T::Lifted>;
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
match *self {
Some(ref x) => tcx.lift(x).map(Some),
None => Some(None)
}
}
}
impl<'tcx, T: Lift<'tcx>, E: Lift<'tcx>> Lift<'tcx> for Result<T, E> {
type Lifted = Result<T::Lifted, E::Lifted>;
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
match *self {
Ok(ref x) => tcx.lift(x).map(Ok),
Err(ref e) => tcx.lift(e).map(Err)
}
}
}
impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Box<T> {
type Lifted = Box<T::Lifted>;
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&**self).map(Box::new)
}
}
impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for [T] {
type Lifted = Vec<T::Lifted>;
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
// type annotation needed to inform `projection_must_outlive`
let mut result : Vec<<T as Lift<'tcx>>::Lifted>
= Vec::with_capacity(self.len());
for x in self {
if let Some(value) = tcx.lift(x) {
result.push(value);
} else {
return None;
}
}
Some(result)
}
}
impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Vec<T> {
type Lifted = Vec<T::Lifted>;
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self[..])
}
}
impl<'tcx, I: Idx, T: Lift<'tcx>> Lift<'tcx> for IndexVec<I, T> {
type Lifted = IndexVec<I, T::Lifted>;
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
self.iter()
.map(|e| tcx.lift(e))
.collect()
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::TraitRef<'a> {
type Lifted = ty::TraitRef<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.substs).map(|substs| ty::TraitRef {
def_id: self.def_id,
substs,
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::ExistentialTraitRef<'a> {
type Lifted = ty::ExistentialTraitRef<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.substs).map(|substs| ty::ExistentialTraitRef {
def_id: self.def_id,
substs,
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::TraitPredicate<'a> {
type Lifted = ty::TraitPredicate<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
-> Option<ty::TraitPredicate<'tcx>> {
tcx.lift(&self.trait_ref).map(|trait_ref| ty::TraitPredicate {
trait_ref,
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::SubtypePredicate<'a> {
type Lifted = ty::SubtypePredicate<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
-> Option<ty::SubtypePredicate<'tcx>> {
tcx.lift(&(self.a, self.b)).map(|(a, b)| ty::SubtypePredicate {
a_is_expected: self.a_is_expected,
a,
b,
})
}
}
impl<'tcx, A: Copy+Lift<'tcx>, B: Copy+Lift<'tcx>> Lift<'tcx> for ty::OutlivesPredicate<A, B> {
type Lifted = ty::OutlivesPredicate<A::Lifted, B::Lifted>;
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&(self.0, self.1)).map(|(a, b)| ty::OutlivesPredicate(a, b))
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::ProjectionTy<'a> {
type Lifted = ty::ProjectionTy<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
-> Option<ty::ProjectionTy<'tcx>> {
tcx.lift(&self.substs).map(|substs| {
ty::ProjectionTy {
item_def_id: self.item_def_id,
substs,
}
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::ProjectionPredicate<'a> {
type Lifted = ty::ProjectionPredicate<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
-> Option<ty::ProjectionPredicate<'tcx>> {
tcx.lift(&(self.projection_ty, self.ty)).map(|(projection_ty, ty)| {
ty::ProjectionPredicate {
projection_ty,
ty,
}
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::ExistentialProjection<'a> {
type Lifted = ty::ExistentialProjection<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.substs).map(|substs| {
ty::ExistentialProjection {
substs,
ty: tcx.lift(&self.ty).expect("type must lift when substs do"),
item_def_id: self.item_def_id,
}
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::Predicate<'a> {
type Lifted = ty::Predicate<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
match *self {
ty::Predicate::Trait(ref binder) => {
tcx.lift(binder).map(ty::Predicate::Trait)
}
ty::Predicate::Subtype(ref binder) => {
tcx.lift(binder).map(ty::Predicate::Subtype)
}
ty::Predicate::RegionOutlives(ref binder) => {
tcx.lift(binder).map(ty::Predicate::RegionOutlives)
}
ty::Predicate::TypeOutlives(ref binder) => {
tcx.lift(binder).map(ty::Predicate::TypeOutlives)
}
ty::Predicate::Projection(ref binder) => {
tcx.lift(binder).map(ty::Predicate::Projection)
}
ty::Predicate::WellFormed(ty) => {
tcx.lift(&ty).map(ty::Predicate::WellFormed)
}
ty::Predicate::ClosureKind(closure_def_id, closure_substs, kind) => {
tcx.lift(&closure_substs)
.map(|closure_substs| ty::Predicate::ClosureKind(closure_def_id,
closure_substs,
kind))
}
ty::Predicate::ObjectSafe(trait_def_id) => {
Some(ty::Predicate::ObjectSafe(trait_def_id))
}
ty::Predicate::ConstEvaluatable(def_id, substs) => {
tcx.lift(&substs).map(|substs| {
ty::Predicate::ConstEvaluatable(def_id, substs)
})
}
}
}
}
impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::Binder<T> {
type Lifted = ty::Binder<T::Lifted>;
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(self.skip_binder()).map(ty::Binder::bind)
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::ParamEnv<'a> {
type Lifted = ty::ParamEnv<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.caller_bounds).map(|caller_bounds| {
ty::ParamEnv {
reveal: self.reveal,
caller_bounds,
}
})
}
}
impl<'a, 'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::ParamEnvAnd<'a, T> {
type Lifted = ty::ParamEnvAnd<'tcx, T::Lifted>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.param_env).and_then(|param_env| {
tcx.lift(&self.value).map(|value| {
ty::ParamEnvAnd {
param_env,
value,
}
})
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::ClosureSubsts<'a> {
type Lifted = ty::ClosureSubsts<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.substs).map(|substs| {
ty::ClosureSubsts { substs }
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::GeneratorSubsts<'a> {
type Lifted = ty::GeneratorSubsts<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.substs).map(|substs| {
ty::GeneratorSubsts { substs }
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::Adjustment<'a> {
type Lifted = ty::adjustment::Adjustment<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.kind).and_then(|kind| {
tcx.lift(&self.target).map(|target| {
ty::adjustment::Adjustment { kind, target }
})
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::Adjust<'a> {
type Lifted = ty::adjustment::Adjust<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
match *self {
ty::adjustment::Adjust::NeverToAny =>
Some(ty::adjustment::Adjust::NeverToAny),
ty::adjustment::Adjust::ReifyFnPointer =>
Some(ty::adjustment::Adjust::ReifyFnPointer),
ty::adjustment::Adjust::UnsafeFnPointer =>
Some(ty::adjustment::Adjust::UnsafeFnPointer),
ty::adjustment::Adjust::ClosureFnPointer =>
Some(ty::adjustment::Adjust::ClosureFnPointer),
ty::adjustment::Adjust::MutToConstPointer =>
Some(ty::adjustment::Adjust::MutToConstPointer),
ty::adjustment::Adjust::Unsize =>
Some(ty::adjustment::Adjust::Unsize),
ty::adjustment::Adjust::Deref(ref overloaded) => {
tcx.lift(overloaded).map(ty::adjustment::Adjust::Deref)
}
ty::adjustment::Adjust::Borrow(ref autoref) => {
tcx.lift(autoref).map(ty::adjustment::Adjust::Borrow)
}
}
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::OverloadedDeref<'a> {
type Lifted = ty::adjustment::OverloadedDeref<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.region).map(|region| {
ty::adjustment::OverloadedDeref {
region,
mutbl: self.mutbl,
}
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::AutoBorrow<'a> {
type Lifted = ty::adjustment::AutoBorrow<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
match *self {
ty::adjustment::AutoBorrow::Ref(r, m) => {
tcx.lift(&r).map(|r| ty::adjustment::AutoBorrow::Ref(r, m))
}
ty::adjustment::AutoBorrow::RawPtr(m) => {
Some(ty::adjustment::AutoBorrow::RawPtr(m))
}
}
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::GenSig<'a> {
type Lifted = ty::GenSig<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&(self.yield_ty, self.return_ty))
.map(|(yield_ty, return_ty)| {
ty::GenSig {
yield_ty,
return_ty,
}
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::FnSig<'a> {
type Lifted = ty::FnSig<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.inputs_and_output).map(|x| {
ty::FnSig {
inputs_and_output: x,
variadic: self.variadic,
unsafety: self.unsafety,
abi: self.abi,
}
})
}
}
impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::error::ExpectedFound<T> {
type Lifted = ty::error::ExpectedFound<T::Lifted>;
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.expected).and_then(|expected| {
tcx.lift(&self.found).map(|found| {
ty::error::ExpectedFound {
expected,
found,
}
})
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::error::TypeError<'a> {
type Lifted = ty::error::TypeError<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
use ty::error::TypeError::*;
Some(match *self {
Mismatch => Mismatch,
UnsafetyMismatch(x) => UnsafetyMismatch(x),
AbiMismatch(x) => AbiMismatch(x),
Mutability => Mutability,
TupleSize(x) => TupleSize(x),
FixedArraySize(x) => FixedArraySize(x),
ArgCount => ArgCount,
RegionsDoesNotOutlive(a, b) => {
return tcx.lift(&(a, b)).map(|(a, b)| RegionsDoesNotOutlive(a, b))
}
RegionsInsufficientlyPolymorphic(a, b) => {
return tcx.lift(&b).map(|b| RegionsInsufficientlyPolymorphic(a, b))
}
RegionsOverlyPolymorphic(a, b) => {
return tcx.lift(&b).map(|b| RegionsOverlyPolymorphic(a, b))
}
IntMismatch(x) => IntMismatch(x),
FloatMismatch(x) => FloatMismatch(x),
Traits(x) => Traits(x),
VariadicMismatch(x) => VariadicMismatch(x),
CyclicTy(t) => return tcx.lift(&t).map(|t| CyclicTy(t)),
ProjectionMismatched(x) => ProjectionMismatched(x),
ProjectionBoundsLength(x) => ProjectionBoundsLength(x),
Sorts(ref x) => return tcx.lift(x).map(Sorts),
OldStyleLUB(ref x) => return tcx.lift(x).map(OldStyleLUB),
ExistentialMismatch(ref x) => return tcx.lift(x).map(ExistentialMismatch)
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ConstEvalErr<'a> {
type Lifted = ConstEvalErr<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&*self.kind).map(|kind| {
ConstEvalErr {
span: self.span,
kind: Lrc::new(kind),
}
})
}
}
impl<'a, 'tcx> Lift<'tcx> for interpret::EvalError<'a> {
type Lifted = interpret::EvalError<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
Some(interpret::EvalError {
kind: tcx.lift(&self.kind)?,
})
}
}
impl<'a, 'tcx, O: Lift<'tcx>> Lift<'tcx> for interpret::EvalErrorKind<'a, O> {
type Lifted = interpret::EvalErrorKind<'tcx, <O as Lift<'tcx>>::Lifted>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
use ::mir::interpret::EvalErrorKind::*;
Some(match *self {
MachineError(ref err) => MachineError(err.clone()),
FunctionPointerTyMismatch(a, b) => FunctionPointerTyMismatch(
tcx.lift(&a)?,
tcx.lift(&b)?,
),
NoMirFor(ref s) => NoMirFor(s.clone()),
UnterminatedCString(ptr) => UnterminatedCString(ptr),
DanglingPointerDeref => DanglingPointerDeref,
DoubleFree => DoubleFree,
InvalidMemoryAccess => InvalidMemoryAccess,
InvalidFunctionPointer => InvalidFunctionPointer,
InvalidBool => InvalidBool,
InvalidDiscriminant => InvalidDiscriminant,
PointerOutOfBounds {
ptr,
access,
allocation_size,
} => PointerOutOfBounds { ptr, access, allocation_size },
InvalidNullPointerUsage => InvalidNullPointerUsage,
ReadPointerAsBytes => ReadPointerAsBytes,
ReadBytesAsPointer => ReadBytesAsPointer,
InvalidPointerMath => InvalidPointerMath,
ReadUndefBytes => ReadUndefBytes,
DeadLocal => DeadLocal,
InvalidBoolOp(bop) => InvalidBoolOp(bop),
Unimplemented(ref s) => Unimplemented(s.clone()),
DerefFunctionPointer => DerefFunctionPointer,
ExecuteMemory => ExecuteMemory,
BoundsCheck { ref len, ref index } => BoundsCheck {
len: tcx.lift(len)?,
index: tcx.lift(index)?,
},
Intrinsic(ref s) => Intrinsic(s.clone()),
InvalidChar(c) => InvalidChar(c),
StackFrameLimitReached => StackFrameLimitReached,
OutOfTls => OutOfTls,
TlsOutOfBounds => TlsOutOfBounds,
AbiViolation(ref s) => AbiViolation(s.clone()),
AlignmentCheckFailed {
required,
has,
} => AlignmentCheckFailed { required, has },
MemoryLockViolation {
ptr,
len,
frame,
access,
ref lock,
} => MemoryLockViolation { ptr, len, frame, access, lock: lock.clone() },
MemoryAcquireConflict {
ptr,
len,
kind,
ref lock,
} => MemoryAcquireConflict { ptr, len, kind, lock: lock.clone() },
InvalidMemoryLockRelease {
ptr,
len,
frame,
ref lock,
} => InvalidMemoryLockRelease { ptr, len, frame, lock: lock.clone() },
DeallocatedLockedMemory {
ptr,
ref lock,
} => DeallocatedLockedMemory { ptr, lock: lock.clone() },
ValidationFailure(ref s) => ValidationFailure(s.clone()),
CalledClosureAsFunction => CalledClosureAsFunction,
VtableForArgumentlessMethod => VtableForArgumentlessMethod,
ModifiedConstantMemory => ModifiedConstantMemory,
AssumptionNotHeld => AssumptionNotHeld,
InlineAsm => InlineAsm,
TypeNotPrimitive(ty) => TypeNotPrimitive(tcx.lift(&ty)?),
ReallocatedWrongMemoryKind(ref a, ref b) => {
ReallocatedWrongMemoryKind(a.clone(), b.clone())
},
DeallocatedWrongMemoryKind(ref a, ref b) => {
DeallocatedWrongMemoryKind(a.clone(), b.clone())
},
ReallocateNonBasePtr => ReallocateNonBasePtr,
DeallocateNonBasePtr => DeallocateNonBasePtr,
IncorrectAllocationInformation(a, b, c, d) => {
IncorrectAllocationInformation(a, b, c, d)
},
Layout(lay) => Layout(tcx.lift(&lay)?),
HeapAllocZeroBytes => HeapAllocZeroBytes,
HeapAllocNonPowerOfTwoAlignment(n) => HeapAllocNonPowerOfTwoAlignment(n),
Unreachable => Unreachable,
Panic => Panic,
ReadFromReturnPointer => ReadFromReturnPointer,
PathNotFound(ref v) => PathNotFound(v.clone()),
UnimplementedTraitSelection => UnimplementedTraitSelection,
TypeckError => TypeckError,
ReferencedConstant(ref err) => ReferencedConstant(tcx.lift(err)?),
OverflowNeg => OverflowNeg,
Overflow(op) => Overflow(op),
DivisionByZero => DivisionByZero,
RemainderByZero => RemainderByZero,
GeneratorResumedAfterReturn => GeneratorResumedAfterReturn,
GeneratorResumedAfterPanic => GeneratorResumedAfterPanic,
})
}
}
impl<'a, 'tcx> Lift<'tcx> for const_val::ErrKind<'a> {
type Lifted = const_val::ErrKind<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
use middle::const_val::ErrKind::*;
Some(match *self {
CouldNotResolve => CouldNotResolve,
TypeckError => TypeckError,
CheckMatchError => CheckMatchError,
Miri(ref e, ref frames) => return tcx.lift(e).map(|e| Miri(e, frames.clone())),
})
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::layout::LayoutError<'a> {
type Lifted = ty::layout::LayoutError<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
match *self {
ty::layout::LayoutError::Unknown(ref ty) => {
tcx.lift(ty).map(ty::layout::LayoutError::Unknown)
}
ty::layout::LayoutError::SizeOverflow(ref ty) => {
tcx.lift(ty).map(ty::layout::LayoutError::SizeOverflow)
}
}
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::InstanceDef<'a> {
type Lifted = ty::InstanceDef<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
match *self {
ty::InstanceDef::Item(def_id) =>
Some(ty::InstanceDef::Item(def_id)),
ty::InstanceDef::Intrinsic(def_id) =>
Some(ty::InstanceDef::Intrinsic(def_id)),
ty::InstanceDef::FnPtrShim(def_id, ref ty) =>
Some(ty::InstanceDef::FnPtrShim(def_id, tcx.lift(ty)?)),
ty::InstanceDef::Virtual(def_id, n) =>
Some(ty::InstanceDef::Virtual(def_id, n)),
ty::InstanceDef::ClosureOnceShim { call_once } =>
Some(ty::InstanceDef::ClosureOnceShim { call_once }),
ty::InstanceDef::DropGlue(def_id, ref ty) =>
Some(ty::InstanceDef::DropGlue(def_id, tcx.lift(ty)?)),
ty::InstanceDef::CloneShim(def_id, ref ty) =>
Some(ty::InstanceDef::CloneShim(def_id, tcx.lift(ty)?)),
}
}
}
BraceStructLiftImpl! {
impl<'a, 'tcx> Lift<'tcx> for ty::Instance<'a> {
type Lifted = ty::Instance<'tcx>;
def, substs
}
}
BraceStructLiftImpl! {
impl<'a, 'tcx> Lift<'tcx> for interpret::GlobalId<'a> {
type Lifted = interpret::GlobalId<'tcx>;
instance, promoted
}
}
///////////////////////////////////////////////////////////////////////////
// TypeFoldable implementations.
//
// Ideally, each type should invoke `folder.fold_foo(self)` and
// nothing else. In some cases, though, we haven't gotten around to
// adding methods on the `folder` yet, and thus the folding is
// hard-coded here. This is less-flexible, because folders cannot
// override the behavior, but there are a lot of random types and one
// can easily refactor the folding into the TypeFolder trait as
// needed.
/// AdtDefs are basically the same as a DefId.
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::AdtDef {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, _folder: &mut F) -> Self {
*self
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, _visitor: &mut V) -> bool {
false
}
}
impl<'tcx, T:TypeFoldable<'tcx>, U:TypeFoldable<'tcx>> TypeFoldable<'tcx> for (T, U) {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> (T, U) {
(self.0.fold_with(folder), self.1.fold_with(folder))
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
self.0.visit_with(visitor) || self.1.visit_with(visitor)
}
}
EnumTypeFoldableImpl! {
impl<'tcx, T> TypeFoldable<'tcx> for Option<T> {
(Some)(a),
(None),
} where T: TypeFoldable<'tcx>
}
impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Rc<T> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
Rc::new((**self).fold_with(folder))
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
(**self).visit_with(visitor)
}
}
impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Box<T> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
let content: T = (**self).fold_with(folder);
box content
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
(**self).visit_with(visitor)
}
}
impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Vec<T> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
self.iter().map(|t| t.fold_with(folder)).collect()
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
self.iter().any(|t| t.visit_with(visitor))
}
}
impl<'tcx, T:TypeFoldable<'tcx>> TypeFoldable<'tcx> for ty::Binder<T> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
self.map_bound_ref(|ty| ty.fold_with(folder))
}
fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
folder.fold_binder(self)
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
self.skip_binder().visit_with(visitor)
}
fn visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
visitor.visit_binder(self)
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::ParamEnv<'tcx> { reveal, caller_bounds }
}
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Slice<ty::ExistentialPredicate<'tcx>> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
let v = self.iter().map(|p| p.fold_with(folder)).collect::<AccumulateVec<[_; 8]>>();
folder.tcx().intern_existential_predicates(&v)
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
self.iter().any(|p| p.visit_with(visitor))
}
}
EnumTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::ExistentialPredicate<'tcx> {
(ty::ExistentialPredicate::Trait)(a),
(ty::ExistentialPredicate::Projection)(a),
(ty::ExistentialPredicate::AutoTrait)(a),
}
}
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Slice<Ty<'tcx>> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
let v = self.iter().map(|t| t.fold_with(folder)).collect::<AccumulateVec<[_; 8]>>();
folder.tcx().intern_type_list(&v)
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
self.iter().any(|t| t.visit_with(visitor))
}
}
impl<'tcx> TypeFoldable<'tcx> for ty::instance::Instance<'tcx> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
use ty::InstanceDef::*;
Self {
substs: self.substs.fold_with(folder),
def: match self.def {
Item(did) => Item(did.fold_with(folder)),
Intrinsic(did) => Intrinsic(did.fold_with(folder)),
FnPtrShim(did, ty) => FnPtrShim(
did.fold_with(folder),
ty.fold_with(folder),
),
Virtual(did, i) => Virtual(
did.fold_with(folder),
i,
),
ClosureOnceShim { call_once } => ClosureOnceShim {
call_once: call_once.fold_with(folder),
},
DropGlue(did, ty) => DropGlue(
did.fold_with(folder),
ty.fold_with(folder),
),
CloneShim(did, ty) => CloneShim(
did.fold_with(folder),
ty.fold_with(folder),
),
},
}
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
use ty::InstanceDef::*;
self.substs.visit_with(visitor) ||
match self.def {
Item(did) => did.visit_with(visitor),
Intrinsic(did) => did.visit_with(visitor),
FnPtrShim(did, ty) => {
did.visit_with(visitor) ||
ty.visit_with(visitor)
},
Virtual(did, _) => did.visit_with(visitor),
ClosureOnceShim { call_once } => call_once.visit_with(visitor),
DropGlue(did, ty) => {
did.visit_with(visitor) ||
ty.visit_with(visitor)
},
CloneShim(did, ty) => {
did.visit_with(visitor) ||
ty.visit_with(visitor)
},
}
} | impl<'tcx> TypeFoldable<'tcx> for interpret::GlobalId<'tcx> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
Self {
instance: self.instance.fold_with(folder),
promoted: self.promoted
}
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
self.instance.visit_with(visitor)
}
}
impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
let sty = match self.sty {
ty::TyRawPtr(tm) => ty::TyRawPtr(tm.fold_with(folder)),
ty::TyArray(typ, sz) => ty::TyArray(typ.fold_with(folder), sz.fold_with(folder)),
ty::TySlice(typ) => ty::TySlice(typ.fold_with(folder)),
ty::TyAdt(tid, substs) => ty::TyAdt(tid, substs.fold_with(folder)),
ty::TyDynamic(ref trait_ty, ref region) =>
ty::TyDynamic(trait_ty.fold_with(folder), region.fold_with(folder)),
ty::TyTuple(ts) => ty::TyTuple(ts.fold_with(folder)),
ty::TyFnDef(def_id, substs) => {
ty::TyFnDef(def_id, substs.fold_with(folder))
}
ty::TyFnPtr(f) => ty::TyFnPtr(f.fold_with(folder)),
ty::TyRef(ref r, ty, mutbl) => {
ty::TyRef(r.fold_with(folder), ty.fold_with(folder), mutbl)
}
ty::TyGenerator(did, substs, movability) => {
ty::TyGenerator(
did,
substs.fold_with(folder),
movability)
}
ty::TyGeneratorWitness(types) => ty::TyGeneratorWitness(types.fold_with(folder)),
ty::TyClosure(did, substs) => ty::TyClosure(did, substs.fold_with(folder)),
ty::TyProjection(ref data) => ty::TyProjection(data.fold_with(folder)),
ty::TyAnon(did, substs) => ty::TyAnon(did, substs.fold_with(folder)),
ty::TyBool | ty::TyChar | ty::TyStr | ty::TyInt(_) |
ty::TyUint(_) | ty::TyFloat(_) | ty::TyError | ty::TyInfer(_) |
ty::TyParam(..) | ty::TyNever | ty::TyForeign(..) => return self
};
if self.sty == sty {
self
} else {
folder.tcx().mk_ty(sty)
}
}
fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
folder.fold_ty(*self)
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
match self.sty {
ty::TyRawPtr(ref tm) => tm.visit_with(visitor),
ty::TyArray(typ, sz) => typ.visit_with(visitor) || sz.visit_with(visitor),
ty::TySlice(typ) => typ.visit_with(visitor),
ty::TyAdt(_, substs) => substs.visit_with(visitor),
ty::TyDynamic(ref trait_ty, ref reg) =>
trait_ty.visit_with(visitor) || reg.visit_with(visitor),
ty::TyTuple(ts) => ts.visit_with(visitor),
ty::TyFnDef(_, substs) => substs.visit_with(visitor),
ty::TyFnPtr(ref f) => f.visit_with(visitor),
ty::TyRef(r, ty, _) => r.visit_with(visitor) || ty.visit_with(visitor),
ty::TyGenerator(_did, ref substs, _) => {
substs.visit_with(visitor)
}
ty::TyGeneratorWitness(ref types) => types.visit_with(visitor),
ty::TyClosure(_did, ref substs) => substs.visit_with(visitor),
ty::TyProjection(ref data) => data.visit_with(visitor),
ty::TyAnon(_, ref substs) => substs.visit_with(visitor),
ty::TyBool | ty::TyChar | ty::TyStr | ty::TyInt(_) |
ty::TyUint(_) | ty::TyFloat(_) | ty::TyError | ty::TyInfer(_) |
ty::TyParam(..) | ty::TyNever | ty::TyForeign(..) => false,
}
}
fn visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
visitor.visit_ty(self)
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::TypeAndMut<'tcx> {
ty, mutbl
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::GenSig<'tcx> {
yield_ty, return_ty
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::FnSig<'tcx> {
inputs_and_output, variadic, unsafety, abi
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::TraitRef<'tcx> { def_id, substs }
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::ExistentialTraitRef<'tcx> { def_id, substs }
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::ImplHeader<'tcx> {
impl_def_id,
self_ty,
trait_ref,
predicates,
}
}
impl<'tcx> TypeFoldable<'tcx> for ty::Region<'tcx> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, _folder: &mut F) -> Self {
*self
}
fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
folder.fold_region(*self)
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, _visitor: &mut V) -> bool {
false
}
fn visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
visitor.visit_region(*self)
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::ClosureSubsts<'tcx> {
substs,
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::GeneratorSubsts<'tcx> {
substs,
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::adjustment::Adjustment<'tcx> {
kind,
target,
}
}
EnumTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::adjustment::Adjust<'tcx> {
(ty::adjustment::Adjust::NeverToAny),
(ty::adjustment::Adjust::ReifyFnPointer),
(ty::adjustment::Adjust::UnsafeFnPointer),
(ty::adjustment::Adjust::ClosureFnPointer),
(ty::adjustment::Adjust::MutToConstPointer),
(ty::adjustment::Adjust::Unsize),
(ty::adjustment::Adjust::Deref)(a),
(ty::adjustment::Adjust::Borrow)(a),
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::adjustment::OverloadedDeref<'tcx> {
region, mutbl,
}
}
EnumTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::adjustment::AutoBorrow<'tcx> {
(ty::adjustment::AutoBorrow::Ref)(a, b),
(ty::adjustment::AutoBorrow::RawPtr)(m),
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::GenericPredicates<'tcx> {
parent, predicates
}
}
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Slice<ty::Predicate<'tcx>> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
let v = self.iter().map(|p| p.fold_with(folder)).collect::<AccumulateVec<[_; 8]>>();
folder.tcx().intern_predicates(&v)
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
self.iter().any(|p| p.visit_with(visitor))
}
}
EnumTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::Predicate<'tcx> {
(ty::Predicate::Trait)(a),
(ty::Predicate::Subtype)(a),
(ty::Predicate::RegionOutlives)(a),
(ty::Predicate::TypeOutlives)(a),
(ty::Predicate::Projection)(a),
(ty::Predicate::WellFormed)(a),
(ty::Predicate::ClosureKind)(a, b, c),
(ty::Predicate::ObjectSafe)(a),
(ty::Predicate::ConstEvaluatable)(a, b),
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::ProjectionPredicate<'tcx> {
projection_ty, ty
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::ExistentialProjection<'tcx> {
ty, substs, item_def_id
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::ProjectionTy<'tcx> {
substs, item_def_id
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::InstantiatedPredicates<'tcx> {
predicates
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx, T> TypeFoldable<'tcx> for ty::ParamEnvAnd<'tcx, T> {
param_env, value
} where T: TypeFoldable<'tcx>
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::SubtypePredicate<'tcx> {
a_is_expected, a, b
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::TraitPredicate<'tcx> {
trait_ref
}
}
TupleStructTypeFoldableImpl! {
impl<'tcx,T,U> TypeFoldable<'tcx> for ty::OutlivesPredicate<T,U> {
a, b
} where T : TypeFoldable<'tcx>, U : TypeFoldable<'tcx>,
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::ClosureUpvar<'tcx> {
def, span, ty
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx, T> TypeFoldable<'tcx> for ty::error::ExpectedFound<T> {
expected, found
} where T: TypeFoldable<'tcx>
}
impl<'tcx, T: TypeFoldable<'tcx>, I: Idx> TypeFoldable<'tcx> for IndexVec<I, T> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
self.iter().map(|x| x.fold_with(folder)).collect()
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
self.iter().any(|t| t.visit_with(visitor))
}
}
EnumTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for ty::error::TypeError<'tcx> {
(ty::error::TypeError::Mismatch),
(ty::error::TypeError::UnsafetyMismatch)(x),
(ty::error::TypeError::AbiMismatch)(x),
(ty::error::TypeError::Mutability),
(ty::error::TypeError::TupleSize)(x),
(ty::error::TypeError::FixedArraySize)(x),
(ty::error::TypeError::ArgCount),
(ty::error::TypeError::RegionsDoesNotOutlive)(a, b),
(ty::error::TypeError::RegionsInsufficientlyPolymorphic)(a, b),
(ty::error::TypeError::RegionsOverlyPolymorphic)(a, b),
(ty::error::TypeError::IntMismatch)(x),
(ty::error::TypeError::FloatMismatch)(x),
(ty::error::TypeError::Traits)(x),
(ty::error::TypeError::VariadicMismatch)(x),
(ty::error::TypeError::CyclicTy)(t),
(ty::error::TypeError::ProjectionMismatched)(x),
(ty::error::TypeError::ProjectionBoundsLength)(x),
(ty::error::TypeError::Sorts)(x),
(ty::error::TypeError::ExistentialMismatch)(x),
(ty::error::TypeError::OldStyleLUB)(x),
}
}
impl<'tcx> TypeFoldable<'tcx> for ConstVal<'tcx> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
match *self {
ConstVal::Value(v) => ConstVal::Value(v),
ConstVal::Unevaluated(def_id, substs) => {
ConstVal::Unevaluated(def_id, substs.fold_with(folder))
}
}
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
match *self {
ConstVal::Value(_) => false,
ConstVal::Unevaluated(_, substs) => substs.visit_with(visitor),
}
}
}
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Const<'tcx> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
let ty = self.ty.fold_with(folder);
let val = self.val.fold_with(folder);
folder.tcx().mk_const(ty::Const {
ty,
val
})
}
fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
folder.fold_const(*self)
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
self.ty.visit_with(visitor) || self.val.visit_with(visitor)
}
fn visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
visitor.visit_const(self)
}
} | }
|
segments-config.js | import {
getConsumptionsSegmentsConfig,
getCirclesSegmentsConfig,
} from "../../../calculations/gas-network/utils";
async function handler(req, res) {
if (req.method !== "POST") {
res.status(405).json({
message: `Метод не підтримується сервером!`,
});
return; |
try {
const { type } = clientData;
delete clientData.type;
const config =
type === `consumptions`
? getConsumptionsSegmentsConfig(clientData)
: getCirclesSegmentsConfig(clientData);
res.status(201).json(config);
} catch (error) {
res.status(501).json({
message:
error || `Помилка в розрахунку, перевірте правильність введених даних!`,
});
}
}
export default handler; | }
const { body: clientData } = req; |
authentication.js | var request = require('request-promise');
var passport = require('passport');
var OnshapeStrategy = require('passport-onshape').Strategy;
var oauthClientId;
var oauthClientSecret;
var platformPath = process.env.ONSHAPE_PLATFORM;
var hostedPath = process.env.ONSHAPE_HOST;
var oauthPath = process.env.ONSHAPE_OAUTH_SERVICE;
if (process.env.OAUTH_CLIENT_ID) {
oauthClientId = process.env.OAUTH_CLIENT_ID;
}
if (process.env.OAUTH_CLIENT_SECRET) {
oauthClientSecret = process.env.OAUTH_CLIENT_SECRET;
}
function init() {
passport.serializeUser(function(user, done) {
done(null, user);
});
passport.deserializeUser(function(obj, done) {
done(null, obj);
});
passport.use(new OnshapeStrategy({
clientID: oauthClientId,
clientSecret: oauthClientSecret,
callbackURL: "https://aqueous-harbor-68833.herokuapp.com/oauthRedirect",
authorizationURL: oauthPath + "/oauth/authorize",
tokenURL: oauthPath + "/oauth/token",
userProfileURL: platformPath + "/api/users/session"
},
function(accessToken, refreshToken, profile, done) {
// asynchronous verification, for effect...
process.nextTick(function () {
profile.accessToken = accessToken;
profile.refreshToken = refreshToken;
// To keep the example simple, the user's Onshape profile is returned to
// represent the logged-in user. In a typical application, you would want
// to associate the Onshape account with a user record in your database,
// and return that user instead.
return done(null, profile);
});
}
));
}
function onOAuthTokenReceived(body, req) {
var jsonResponse;
jsonResponse = JSON.parse(body);
if (jsonResponse) {
req.user.accessToken = jsonResponse.access_token;
req.user.refreshToken = jsonResponse.refresh_token;
}
}
var pendingTokenRefreshes = {};
function refreshOAuthToken(req, res, next) {
if (pendingTokenRefreshes[req.session.id]) {
return pendingTokenRefreshes[req.session.id]
}
var refreshToken = req.user.refreshToken;
if (refreshToken) {
pendingTokenRefreshes[req.session.id] = request.post({
uri: platformPath + '/oauth/token',
form: {
'client_id': oauthClientId,
'client_secret': oauthClientSecret,
'grant_type': 'refresh_token',
'refresh_token': refreshToken
}
}).then(function(body) {
delete pendingTokenRefreshes[req.session.id];
return onOAuthTokenReceived(body, req);
}).catch(function(error) {
delete pendingTokenRefreshes[req.session.id];
console.log('Error refreshing OAuth Token: ', error);
res.status(401).send({
authUri: getAuthUri(),
msg: 'Authentication required.'
});
throw(error);
});
return pendingTokenRefreshes[req.session.id];
} else {
return Promise.reject('No refresh_token');
}
}
function | () {
return platformPath + '/oauth/authorize?response_type=code&client_id=' + oauthClientId;
}
module.exports = {
'init': init,
'refreshOAuthToken': refreshOAuthToken,
'getAuthUri': getAuthUri
};
| getAuthUri |
mul_assign_like.rs | use mul_like::{struct_exprs, tuple_exprs};
use proc_macro2::{Span, TokenStream};
use std::collections::HashSet;
use std::iter;
use syn::{Data, DeriveInput, Fields, Ident};
use utils::{add_where_clauses_for_new_ident, get_field_types_iter, named_to_vec, unnamed_to_vec};
pub fn | (
input: &DeriveInput,
trait_name: &str,
import_root: proc_macro2::TokenStream,
) -> TokenStream {
let trait_ident = Ident::new(trait_name, Span::call_site());
let trait_path = "e!(#import_root::ops::#trait_ident);
let method_name = trait_name.to_string();
let method_name = method_name.trim_right_matches("Assign");
let method_name = method_name.to_lowercase();
let method_ident = Ident::new(&(method_name.to_string() + "_assign"), Span::call_site());
let input_type = &input.ident;
let (exprs, fields) = match input.data {
Data::Struct(ref data_struct) => match data_struct.fields {
Fields::Unnamed(ref fields) => {
let field_vec = unnamed_to_vec(fields);
(tuple_exprs(&field_vec, &method_ident), field_vec)
}
Fields::Named(ref fields) => {
let field_vec = named_to_vec(fields);
(struct_exprs(&field_vec, &method_ident), field_vec)
}
_ => panic!(format!("Unit structs cannot use derive({})", trait_name)),
},
_ => panic!(format!("Only structs can use derive({})", trait_name)),
};
let scalar_ident = &Ident::new("__RhsT", Span::call_site());
let tys: &HashSet<_> = &get_field_types_iter(&fields).collect();
let scalar_iter = iter::repeat(scalar_ident);
let trait_path_iter = iter::repeat(trait_path);
let type_where_clauses = quote!{
where #(#tys: #trait_path_iter<#scalar_iter>),*
};
let new_generics = add_where_clauses_for_new_ident(
&input.generics,
&fields,
scalar_ident,
type_where_clauses,
import_root,
);
let (impl_generics, _, where_clause) = new_generics.split_for_impl();
let (_, ty_generics, _) = input.generics.split_for_impl();
quote!(
impl#impl_generics #trait_path<#scalar_ident> for #input_type#ty_generics #where_clause{
#[inline]
fn #method_ident(&mut self, rhs: #scalar_ident#ty_generics) {
#(#exprs;
)*
}
}
)
}
| expand |
nmt.py | from datautil.dataloader import batch_iter
import torch.nn.functional as F
import torch.optim as optim
import torch.nn.utils as nn_utils
import time
import torch
import numpy as np
from config.Const import *
class NMT(object):
def __init__(self, encoder, decoder):
super(NMT, self).__init__()
self.encoder = encoder
self.decoder = decoder
def summary(self):
print('encoder:', self.encoder)
print('decoder:', self.decoder)
# 训练一轮
def train(self, train_pairs, enc_optimizer, dec_optimizer, args, src_vocab, tgt_vocab):
train_loss = 0
for src_batch, tgt_batch in batch_iter(train_pairs, args, src_vocab, tgt_vocab):
loss = 0
# enc_out: (batch_size, seq_len, hidden_size * nb_directions)
# enc_hidden: (num_layers * nb_directions, batch_size, hidden_size)
enc_out, enc_hidden = self.encoder(src_batch.src_idxs, mask=src_batch.non_pad_mask)
self.encoder.zero_grad()
self.decoder.zero_grad()
dec_hidden = enc_hidden
dec_input = tgt_batch.src_idxs[0].unsqueeze(1)
if np.random.uniform(0, 1) <= args.teacher_force:
# print('以目标作为下一个输入')
for i in range(1, tgt_batch.src_idxs.size(0)):
dec_out, dec_hidden = self.decoder(dec_input, dec_hidden, enc_out)
dec_hidden *= tgt_batch.non_pad_mask[i].unsqueeze(1).repeat(1, dec_hidden.size(-1))
loss += self.calc_loss(dec_out, tgt_batch.src_idxs[i])
train_loss += loss.data.item()
dec_input = tgt_batch.src_idxs[i].unsqueeze(1)
else:
# print('以网络的预测输出作为下一个输入')
for i in range(1, tgt_batch.src_idxs.size(0)):
dec_out, dec_hidden = self.decoder(dec_input, dec_hidden, enc_out)
dec_hidden *= tgt_batch.non_pad_mask[i].unsqueeze(1).repeat(1, dec_hidden.size(-1))
loss += self.calc_loss(dec_out, tgt_batch.src_idxs[i])
train_loss += loss.data.item()
_, top_i = dec_out.data.topk(1)
dec_input = top_i # (batch_size, 1)
loss.backward()
nn_utils.clip_grad_norm_(filter(lambda p: p.requires_grad, self.encoder.parameters()), max_norm=5.0)
nn_utils.clip_grad_norm_(filter(lambda p: p.requires_grad, self.decoder.parameters()), max_norm=5.0)
enc_optimizer.step()
dec_optimizer.step()
return train_loss / len(train_pairs)
# 训练多轮
def train_iter(self, train_pairs, args, src_vocab, tgt_vocab):
self.encoder.train()
self.decoder.train()
enc_optimizer = optim.Adam(filter(lambda p: p.requires_grad, self.encoder.parameters()), lr=args.lr)
dec_optimizer = optim.Adam(filter(lambda p: p.requires_grad, self.decoder.parameters()), lr=args.lr)
enc_lr_scheduler = optim.lr_scheduler.LambdaLR(enc_optimizer, lambda ep: max(0.95**ep, 1e-4))
dec_lr_scheduler = optim.lr_scheduler.LambdaLR(dec_optimizer, lambda ep: max(0.95**ep, 1e-4))
# enc_lr_scheduler = optim.lr_scheduler.LambdaLR(enc_optimizer, lambda ep: max(1 - 0.75 * ep / args.epoch, 1e-4))
# dec_lr_scheduler = optim.lr_scheduler.LambdaLR(dec_optimizer, lambda ep: max(1 - 0.75 * ep / args.epoch, 1e-4))
for i in range(args.epoch):
enc_lr_scheduler.step()
dec_lr_scheduler.step()
t1 = time.time()
train_loss = self.train(train_pairs, enc_optimizer, dec_optimizer, args, src_vocab, tgt_vocab)
t2 = time.time()
print('[Epoch %d] train loss: %.3f' % (i+1, train_loss))
print('encoder lr:', enc_lr_scheduler.get_lr())
print('decoder lr:', dec_lr_scheduler.get_lr())
print('time cost: %.2fs' % (t2 - t1))
def calc_loss(self, pred, tgt):
return F.nll_loss(pred, tgt, ignore_index=0)
# def evaluate(self, test_pairs, args, src_vocab, tgt_vocab):
# self.encoder.eval()
# self.decoder.eval()
# pred_wds, tgt_wds = [], []
# for src_batch, tgt_batch in batch_iter(test_pairs, args, src_vocab, tgt_vocab):
# batch_pred_wds, batch_tgt_wds = [], []
# enc_out, enc_hidden = self.encoder(src_batch.src_idxs, mask=src_batch.non_pad_mask)
#
# dec_hidden = enc_hidden
# dec_input = tgt_batch.src_idxs[0]
# for i in range(1, tgt_batch.src_idxs.size(0)):
# dec_out, dec_hidden = self.decoder(dec_input, dec_hidden, enc_out)
#
# dec_hidden *= tgt_batch.non_pad_mask[i].unsqueeze(1).repeat(1, dec_hidden.size(-1))
# tgt_idxs = tgt_batch.src_idxs[i]
# # greedy search
# pred_idxs = dec_out.data.argmax(dim=1)
# batch_pred_wds.append(tgt_vocab.index2word(pred_idxs.tolist()))
# batch_tgt_wds.append(tgt_vocab.index2word(tgt_idxs.tolist()))
# dec_input = pred_idxs
#
# pred_wds.extend(self.extract_valid(np.asarray(batch_pred_wds).T.tolist()))
# tgt_wds.extend(self.extract_valid(np.asarray(batch_tgt_wds).T.tolist()))
#
# print('BLEU:', self.corpus_bleu(pred_wds, tgt_wds))
# beam search
'''
执行过程:设beam size = 3
1、选择t1时刻输出的概率分数最大的3个词
2、分别将t-1时刻选择的3个词作为当前时刻的输入
3、求t时刻累积的(序列)概率分数(历史所选择词的对数似然和),选择分数值最大的3个词
4、重复2-3过程,直到到达最大长度(或遇到<eos>)
'''
def evaluate(self, test_pairs, args, src_vocab, tgt_vocab):
self.encoder.eval()
self.decoder.eval()
# pred_wds, tgt_wds = [], []
for src_batch, tgt_batch in batch_iter(test_pairs, args, src_vocab, tgt_vocab):
# batch_pred_wds, batch_tgt_wds = [], []
enc_out, enc_hidden = self.e | l_count = 0
for i in range(len(cand) - n + 1):
cand_count, ref_count = 1, 0
ngram = cand[i: i + n]
# 统计ngram在机器翻译译文中出现的次数
for j in range(i + n, len(cand) - n + 1):
if ngram == cand[j: j + n]:
cand_count += 1
# 统计ngram在人工译文中出现的次数
for k in range(len(ref) - n + 1):
if ngram == ref[k: k + n]:
ref_count += 1
total_count += min(cand_count, ref_count)
return total_count
# 计算单句话的BLEU值,取值在[0, 1]之间,越大越好
def sentence_bleu(self, cand: list, ref: list, N=4) -> float:
'''
:param cand: sentence_tokens
:param ref: sentence_tokens
:return:
'''
assert len(cand) != 0 and len(ref) != 0
# n-gram中n的取值在[1, 4]之间
res = 0
cand_len, ref_len = len(cand), len(ref)
for n in range(1, N+1):
cand_gram = max(0, cand_len - n + 1)
res += 0.25 * np.log(self.count_ngram(cand, ref, n) / cand_gram)
# 短译句惩罚因子
# bp = np.exp(1 - max(1., len(ref) / len(cand)))
return np.exp(res + min(0., 1 - ref_len / cand_len))
# 计算多句话的BLEU值(注:不是直接对sentence bleu求和求平均)
def corpus_bleu(self, cands: list, refs: list, N=4) -> float:
'''
:param cands: [sentence_tokens1, sentence_tokens2]
:param refs: [sentence_tokens1, sentence_tokens2]
:return:
'''
assert len(cands) != 0 and len(cands) == len(refs)
ref_len, cand_len = 0, 0
for cand, ref in zip(cands, refs):
ref_len += len(ref)
cand_len += len(cand)
res = 0
for n in range(1, N+1):
n_match, n_grams = 0, 0
for cand, ref in zip(cands, refs):
n_match += self.count_ngram(cand, ref, n)
n_grams += max(0, len(cand) - n + 1)
res += 0.25 * np.log(n_match / n_grams + 1e-8)
return np.exp(res + min(0., 1 - ref_len / cand_len))
| ncoder(src_batch.src_idxs, mask=src_batch.non_pad_mask)
# 保存历史分数
seq_len, batch_size = tgt_batch.src_idxs.size()
# (bz, beam_size)
hist_score = torch.zeros((batch_size, args.beam_size), device=args.device)
# (beam_size, bz, vocab_size)
beam_score = torch.zeros((args.beam_size, batch_size, tgt_vocab.vocab_size), device=args.device)
# (bz, beam_size, max_len)
best_paths = torch.zeros((MAX_LEN, batch_size, args.beam_size), device=args.device)
dec_hidden = enc_hidden
dec_input = tgt_batch.src_idxs[0].unsqueeze(1)
for i in range(1, min(MAX_LEN, seq_len)):
if i == 1:
# dec_input: (bz, 1)
# dec_out: (bz, vocab_size)
dec_out, dec_hidden = self.decoder(dec_input, dec_hidden, enc_out)
dec_hidden *= tgt_batch.non_pad_mask[i].unsqueeze(1).repeat(1, dec_hidden.size(-1))
# (bz, beam_size)
top_prob, top_idxs = dec_out.data.topk(args.beam_size, dim=1)
hist_score = top_prob
best_paths[i] = top_idxs
# (bz, beam_size)
dec_input = top_idxs
else:
# dec_input: (bz, beam_size) -> (beam_size, bz)
dec_input = dec_input.transpose(0, 1)
for j in range(args.beam_size):
# dec_out: (bz, vocab_size)
dec_out, dec_hidden = self.decoder(dec_input[j].unsqueeze(1), dec_hidden, enc_out)
dec_hidden *= tgt_batch.non_pad_mask[i].unsqueeze(1).repeat(1, dec_hidden.size(-1))
beam_score[j] = dec_out
# (bz, beam_size, 1) -> (bz, beam_size, vocab_size)
hist_score = hist_score.unsqueeze(-1).expand((-1, -1, tgt_vocab.vocab_size))
hist_score += beam_score.transpose(0, 1) # (bz, beam_size, vocab_size)
# (bz, beam_size * vocab_size)
hist_score = hist_score.reshape((batch_size, -1))
# (bz, beam_size)
top_prob, top_idxs = hist_score.topk(args.beam_size, dim=1)
hist_score = top_prob
top_idxs %= tgt_vocab.vocab_size
best_paths[i] = top_idxs
dec_input = top_idxs
# pred_wds.extend(self.extract_valid(np.asarray(batch_pred_wds).T.tolist()))
# tgt_wds.extend(self.extract_valid(np.asarray(batch_tgt_wds).T.tolist()))
# 提取序列的非填充部分
def extract_valid(self, seqs: list):
return list(map(lambda x: x[:x.index(EOS)] if EOS in x else x, seqs))
# 统计ngram数目
def count_ngram(self, cand: list, ref: list, n=1) -> int:
assert len(cand) != 0 and len(ref) != 0
tota |
__init__.py | """
yq: Command-line YAML processor - jq wrapper for YAML documents
yq transcodes YAML documents to JSON and passes them to jq.
See https://github.com/kislyuk/yq for more information.
"""
# PYTHON_ARGCOMPLETE_OK
from __future__ import absolute_import, division, print_function, unicode_literals
import sys, argparse, subprocess, json
from collections import OrderedDict
from datetime import datetime, date, time
import yaml, argcomplete
from .compat import USING_PYTHON2, open
from .parser import get_parser, jq_arg_spec
from .loader import get_loader
from .dumper import get_dumper
from .version import __version__ # noqa
class JSONDateTimeEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, (datetime, date, time)):
return o.isoformat()
return json.JSONEncoder.default(self, o)
def decode_docs(jq_output, json_decoder):
while jq_output:
doc, pos = json_decoder.raw_decode(jq_output)
jq_output = jq_output[pos + 1:]
yield doc
def xq_cli():
cli(input_format="xml", program_name="xq")
def tq_cli():
cli(input_format="toml", program_name="tq")
class DeferredOutputStream:
def __init__(self, name, mode="w"):
self.name = name
self.mode = mode
self._fh = None
@property
def fh(self):
|
def flush(self):
if self._fh is not None:
return self.fh.flush()
def close(self):
if self._fh is not None:
return self.fh.close()
def __getattr__(self, a):
return getattr(self.fh, a)
def cli(args=None, input_format="yaml", program_name="yq"):
parser = get_parser(program_name, __doc__)
argcomplete.autocomplete(parser)
args, jq_args = parser.parse_known_args(args=args)
for i, arg in enumerate(jq_args):
if arg.startswith("-") and not arg.startswith("--"):
if "i" in arg:
args.in_place = True
if "y" in arg:
args.output_format = "yaml"
elif "Y" in arg:
args.output_format = "annotated_yaml"
elif "x" in arg:
args.output_format = "xml"
jq_args[i] = arg.replace("i", "").replace("x", "").replace("y", "").replace("Y", "")
if args.output_format != "json":
jq_args[i] = jq_args[i].replace("C", "")
if jq_args[i] == "-":
jq_args[i] = None
jq_args = [arg for arg in jq_args if arg is not None]
for arg in jq_arg_spec:
values = getattr(args, arg, None)
delattr(args, arg)
if values is not None:
for value_group in values:
jq_args.append(arg)
jq_args.extend(value_group)
if "--from-file" in jq_args or "-f" in jq_args:
args.input_streams.insert(0, argparse.FileType()(args.jq_filter))
else:
jq_filter_arg_loc = len(jq_args)
if "--args" in jq_args:
jq_filter_arg_loc = jq_args.index('--args') + 1
elif "--jsonargs" in jq_args:
jq_filter_arg_loc = jq_args.index('--jsonargs') + 1
jq_args.insert(jq_filter_arg_loc, args.jq_filter)
delattr(args, "jq_filter")
in_place = args.in_place
delattr(args, "in_place")
if sys.stdin.isatty() and not args.input_streams:
return parser.print_help()
yq_args = dict(input_format=input_format, program_name=program_name, jq_args=jq_args, **vars(args))
if in_place:
if USING_PYTHON2:
sys.exit("{}: -i/--in-place is not compatible with Python 2".format(program_name))
if args.output_format not in {"yaml", "annotated_yaml"}:
sys.exit("{}: -i/--in-place can only be used with -y/-Y".format(program_name))
input_streams = yq_args.pop("input_streams")
if len(input_streams) == 1 and input_streams[0].name == "<stdin>":
msg = "{}: -i/--in-place can only be used with filename arguments, not on standard input"
sys.exit(msg.format(program_name))
for i, input_stream in enumerate(input_streams):
def exit_handler(arg=None):
if arg:
sys.exit(arg)
if i < len(input_streams):
yq_args["exit_func"] = exit_handler
yq(input_streams=[input_stream], output_stream=DeferredOutputStream(input_stream.name), **yq_args)
else:
yq(**yq_args)
def yq(input_streams=None, output_stream=None, input_format="yaml", output_format="json",
program_name="yq", width=None, indentless_lists=False, xml_root=None, xml_dtd=False, xml_force_list=frozenset(),xml_attr_prefix=frozenset()
explicit_start=False, explicit_end=False, jq_args=frozenset(), exit_func=None):
if not input_streams:
input_streams = [sys.stdin]
if not output_stream:
output_stream = sys.stdout
if not exit_func:
exit_func = sys.exit
converting_output = True if output_format != "json" else False
try:
# Note: universal_newlines is just a way to induce subprocess to make stdin a text buffer and encode it for us
jq = subprocess.Popen(["jq"] + list(jq_args),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE if converting_output else None,
universal_newlines=True)
except OSError as e:
msg = "{}: Error starting jq: {}: {}. Is jq installed and available on PATH?"
exit_func(msg.format(program_name, type(e).__name__, e))
try:
if converting_output:
# TODO: enable true streaming in this branch (with asyncio, asyncproc, a multi-shot variant of
# subprocess.Popen._communicate, etc.)
# See https://stackoverflow.com/questions/375427/non-blocking-read-on-a-subprocess-pipe-in-python
use_annotations = True if output_format == "annotated_yaml" else False
input_docs = []
for input_stream in input_streams:
if input_format == "yaml":
loader = get_loader(use_annotations=use_annotations)
input_docs.extend(yaml.load_all(input_stream, Loader=loader))
elif input_format == "xml":
import xmltodict
input_docs.append(xmltodict.parse(input_stream.read(), disable_entities=True,
force_list=xml_force_list, attr_prefix=xml_attr_prefix))
elif input_format == "toml":
import toml
input_docs.append(toml.load(input_stream))
else:
raise Exception("Unknown input format")
input_payload = "\n".join(json.dumps(doc, cls=JSONDateTimeEncoder) for doc in input_docs)
jq_out, jq_err = jq.communicate(input_payload)
json_decoder = json.JSONDecoder(object_pairs_hook=OrderedDict)
if output_format == "yaml" or output_format == "annotated_yaml":
yaml.dump_all(decode_docs(jq_out, json_decoder), stream=output_stream,
Dumper=get_dumper(use_annotations=use_annotations, indentless=indentless_lists),
width=width, allow_unicode=True, default_flow_style=False,
explicit_start=explicit_start, explicit_end=explicit_end)
elif output_format == "xml":
import xmltodict
for doc in decode_docs(jq_out, json_decoder):
if xml_root:
doc = {xml_root: doc}
elif not isinstance(doc, OrderedDict):
msg = ("{}: Error converting JSON to XML: cannot represent non-object types at top level. "
"Use --xml-root=name to envelope your output with a root element.")
exit_func(msg.format(program_name))
full_document = True if xml_dtd else False
try:
xmltodict.unparse(doc, output=output_stream, full_document=full_document, pretty=True,
indent=" ")
except ValueError as e:
if "Document must have exactly one root" in str(e):
raise Exception(str(e) + " Use --xml-root=name to envelope your output with a root element")
else:
raise
output_stream.write(b"\n" if sys.version_info < (3, 0) else "\n")
elif output_format == "toml":
import toml
for doc in decode_docs(jq_out, json_decoder):
if not isinstance(doc, OrderedDict):
msg = "{}: Error converting JSON to TOML: cannot represent non-object types at top level."
exit_func(msg.format(program_name))
if USING_PYTHON2:
# For Python 2, dump the string and encode it into bytes.
output = toml.dumps(doc)
output_stream.write(output.encode("utf-8"))
else:
# For Python 3, write the unicode to the buffer directly.
toml.dump(doc, output_stream)
else:
if input_format == "yaml":
loader = get_loader(use_annotations=False)
for input_stream in input_streams:
for doc in yaml.load_all(input_stream, Loader=loader):
json.dump(doc, jq.stdin, cls=JSONDateTimeEncoder)
jq.stdin.write("\n")
elif input_format == "xml":
import xmltodict
for input_stream in input_streams:
json.dump(xmltodict.parse(input_stream.read(), disable_entities=True,
force_list=xml_force_list, attr_prefix=xml_attr_prefix), jq.stdin)
jq.stdin.write("\n")
elif input_format == "toml":
import toml
for input_stream in input_streams:
json.dump(toml.load(input_stream), jq.stdin)
jq.stdin.write("\n")
else:
raise Exception("Unknown input format")
jq.stdin.close()
jq.wait()
for input_stream in input_streams:
input_stream.close()
exit_func(jq.returncode)
except Exception as e:
exit_func("{}: Error running jq: {}: {}.".format(program_name, type(e).__name__, e))
| if self._fh is None:
self._fh = open(self.name, self.mode)
return self._fh |
lon_mode_ttgr_lon_mode.rs | #[doc = "Register `TTGR_LON_MODE` reader"]
pub struct R(crate::R<LON_MODE_TTGR_LON_MODE_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<LON_MODE_TTGR_LON_MODE_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::convert::From<crate::R<LON_MODE_TTGR_LON_MODE_SPEC>> for R {
fn from(reader: crate::R<LON_MODE_TTGR_LON_MODE_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `TTGR_LON_MODE` writer"]
pub struct W(crate::W<LON_MODE_TTGR_LON_MODE_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<LON_MODE_TTGR_LON_MODE_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl core::convert::From<crate::W<LON_MODE_TTGR_LON_MODE_SPEC>> for W {
fn from(writer: crate::W<LON_MODE_TTGR_LON_MODE_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `PCYCLE` reader - LON PCYCLE Length"]
pub struct PCYCLE_R(crate::FieldReader<u32, u32>);
impl PCYCLE_R {
pub(crate) fn new(bits: u32) -> Self {
PCYCLE_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for PCYCLE_R {
type Target = crate::FieldReader<u32, u32>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `PCYCLE` writer - LON PCYCLE Length"]
pub struct PCYCLE_W<'a> {
w: &'a mut W,
}
impl<'a> PCYCLE_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0x00ff_ffff) | (value as u32 & 0x00ff_ffff);
self.w
}
}
impl R {
#[doc = "Bits 0:23 - LON PCYCLE Length"]
#[inline(always)]
pub fn pcycle(&self) -> PCYCLE_R {
PCYCLE_R::new((self.bits & 0x00ff_ffff) as u32)
}
}
impl W {
#[doc = "Bits 0:23 - LON PCYCLE Length"]
#[inline(always)]
pub fn pcycle(&mut self) -> PCYCLE_W |
#[doc = "Writes raw bits to the register."]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Transmitter Timeguard Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [lon_mode_ttgr_lon_mode](index.html) module"]
pub struct LON_MODE_TTGR_LON_MODE_SPEC;
impl crate::RegisterSpec for LON_MODE_TTGR_LON_MODE_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [lon_mode_ttgr_lon_mode::R](R) reader structure"]
impl crate::Readable for LON_MODE_TTGR_LON_MODE_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [lon_mode_ttgr_lon_mode::W](W) writer structure"]
impl crate::Writable for LON_MODE_TTGR_LON_MODE_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets TTGR_LON_MODE to value 0"]
impl crate::Resettable for LON_MODE_TTGR_LON_MODE_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| {
PCYCLE_W { w: self }
} |
config.js | import { configure, addDecorator } from '@storybook/react'; |
// console config
addDecorator((storyFn, context) => withConsole()(storyFn)(context)); | import { withConsole } from '@storybook/addon-console';
// automatically import all files ending in *.stories.js
configure(require.context('../stories', true, /\.stories\.js$/), module); |
mpmc_bounded_queue.rs | /* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Dmitry Vyukov.
*/
#![allow(missing_docs, dead_code)]
// http://www.1024cores.net/home/lock-free-algorithms/queues/bounded-mpmc-queue
// This queue is copy pasted from old rust stdlib.
use std::sync::Arc;
use std::cell::UnsafeCell;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::{Relaxed, Release, Acquire};
struct Node<T> {
sequence: AtomicUsize,
value: Option<T>,
}
unsafe impl<T: Send> Send for Node<T> {}
unsafe impl<T: Sync> Sync for Node<T> {}
struct State<T> {
pad0: [u8; 64],
buffer: Vec<UnsafeCell<Node<T>>>,
mask: usize,
pad1: [u8; 64],
enqueue_pos: AtomicUsize,
pad2: [u8; 64],
dequeue_pos: AtomicUsize,
pad3: [u8; 64],
}
unsafe impl<T: Send> Send for State<T> {}
unsafe impl<T: Sync> Sync for State<T> {}
pub struct Queue<T> {
state: Arc<State<T>>,
}
impl<T: Send> State<T> {
fn with_capacity(capacity: usize) -> State<T> {
let capacity = if capacity < 2 || (capacity & (capacity - 1)) != 0 {
if capacity < 2 {
2
} else {
// use next power of 2 as capacity
capacity.next_power_of_two()
}
} else {
capacity
};
let buffer = (0..capacity).map(|i| {
UnsafeCell::new(Node { sequence:AtomicUsize::new(i), value: None })
}).collect::<Vec<_>>();
State{
pad0: [0; 64],
buffer: buffer,
mask: capacity-1,
pad1: [0; 64],
enqueue_pos: AtomicUsize::new(0),
pad2: [0; 64],
dequeue_pos: AtomicUsize::new(0),
pad3: [0; 64],
}
}
fn push(&self, value: T) -> Result<(), T> {
let mask = self.mask;
let mut pos = self.enqueue_pos.load(Relaxed);
loop {
let node = &self.buffer[pos & mask];
let seq = unsafe { (*node.get()).sequence.load(Acquire) };
let diff: isize = seq as isize - pos as isize;
if diff == 0 {
let enqueue_pos = self.enqueue_pos.compare_and_swap(pos, pos+1, Relaxed);
if enqueue_pos == pos {
unsafe {
(*node.get()).value = Some(value);
(*node.get()).sequence.store(pos+1, Release);
}
break
} else {
pos = enqueue_pos;
}
} else if diff < 0 {
return Err(value);
} else {
pos = self.enqueue_pos.load(Relaxed);
}
}
Ok(())
}
fn pop(&self) -> Option<T> {
let mask = self.mask;
let mut pos = self.dequeue_pos.load(Relaxed);
loop {
let node = &self.buffer[pos & mask];
let seq = unsafe { (*node.get()).sequence.load(Acquire) };
let diff: isize = seq as isize - (pos + 1) as isize;
if diff == 0 {
let dequeue_pos = self.dequeue_pos.compare_and_swap(pos, pos+1, Relaxed);
if dequeue_pos == pos {
unsafe {
let value = (*node.get()).value.take();
(*node.get()).sequence.store(pos + mask + 1, Release);
return value
}
} else {
pos = dequeue_pos;
}
} else if diff < 0 {
return None
} else {
pos = self.dequeue_pos.load(Relaxed);
}
}
}
}
impl<T: Send> Queue<T> {
pub fn with_capacity(capacity: usize) -> Queue<T> {
Queue{
state: Arc::new(State::with_capacity(capacity))
}
}
pub fn push(&self, value: T) -> Result<(), T> {
self.state.push(value)
}
pub fn pop(&self) -> Option<T> {
self.state.pop()
}
}
impl<T: Send> Clone for Queue<T> {
fn clone(&self) -> Queue<T> {
Queue { state: self.state.clone() }
}
}
#[cfg(test)]
mod tests {
use std::thread;
use std::sync::mpsc::channel;
use super::Queue;
#[test]
fn | () {
let nthreads = 8;
let nmsgs = 1000;
let q = Queue::with_capacity(nthreads*nmsgs);
assert_eq!(None, q.pop());
let (tx, rx) = channel();
for _ in 0..nthreads {
let q = q.clone();
let tx = tx.clone();
thread::spawn(move || {
let q = q;
for i in 0..nmsgs {
assert!(q.push(i).is_ok());
}
tx.send(()).unwrap();
});
}
let mut completion_rxs = vec![];
for _ in 0..nthreads {
let (tx, rx) = channel();
completion_rxs.push(rx);
let q = q.clone();
thread::spawn(move || {
let q = q;
let mut i = 0;
loop {
match q.pop() {
None => {},
Some(_) => {
i += 1;
if i == nmsgs { break }
}
}
}
tx.send(i).unwrap();
});
}
for rx in completion_rxs.iter_mut() {
assert_eq!(nmsgs, rx.recv().unwrap());
}
for _ in 0..nthreads {
rx.recv().unwrap();
}
}
}
| test |
lib.rs | //! Utilities for manipulating tree graphs, for the analysis of neuronal arbors.
use std::cmp::Ordering;
use std::collections::{HashMap, HashSet};
use std::fmt::Debug;
use std::hash::Hash;
pub use slab_tree;
use slab_tree::{NodeId, NodeRef, RemoveBehavior, Tree, TreeBuilder};
pub type Precision = f64;
/// Trait adding some topological utilities to a tree representation.
pub trait TopoArbor {
type Node;
/// Remove the given nodes and everything below them.
/// Some nodes in the starting set may have been removed as
/// descendants of others.
fn prune_at(&mut self, node_ids: &[NodeId]) -> HashSet<NodeId>;
/// Remove everything distal to the given node,
/// and up to the next branch proximal of it.
fn prune_branches_containing(&mut self, node_ids: &[NodeId]) -> HashSet<NodeId>;
/// Remove all branches with a strahler index less than `threshold`.
fn prune_below_strahler(&mut self, threshold: usize) -> HashSet<NodeId>;
/// Remove all branches greater than `threshold` branch points from the root.
fn prune_beyond_branches(&mut self, threshold: usize) -> HashSet<NodeId>;
/// Remove all nodes greater than `threshold` steps from the root.
fn prune_beyond_steps(&mut self, threshold: usize) -> HashSet<NodeId>;
// TODO: iterator?
/// Decompose the arbor into slabs: unbranched runs of nodes.
/// The start of every slab is the root or a branch point,
/// and the end of every slab is a branch point or leaf.
/// Returned depth first in preorder.
fn slabs(&self) -> Vec<Vec<NodeId>>;
// fn get_node(&self, node_id: NodeId) -> Option<NodeRef<Self::Node>>;
}
pub trait SpatialArbor {
/// Prune up to the nearest branch point of all leaves <= `threshold`
/// away from that branch point.
fn prune_twigs(&mut self, threshold: Precision) -> HashSet<NodeId>;
/// Prune all nodes whose distance from the root is >= `threshold`.
fn prune_beyond_distance(&mut self, threshold: Precision) -> HashSet<NodeId>;
/// Total cable length present in the arbor.
fn cable_length(&self) -> Precision;
}
/// Given tuples of (child_id, optional_parent_id, child_data),
/// make a tree whose node data are (id, data).
/// Returns that tree, and a mapping from the passed-in IDs to the internal IDs.
///
/// If the data type D implements Location,
/// the (id, data) tuple will also: this is required by
/// [resample_tree_points](fn.resample_tree_points.html).
pub fn edges_to_tree_with_data<T: Hash + Eq + Copy, D: Clone>(
edges: &[(T, Option<T>, D)],
) -> Result<(Tree<(T, D)>, HashMap<T, NodeId>), &'static str> {
let size = edges.len();
let mut root_opt: Option<T> = None;
let mut data: HashMap<T, D> = HashMap::with_capacity(size);
let mut child_vecs: HashMap<T, Vec<T>> = HashMap::with_capacity(size);
for (child, parent_opt, d) in edges.iter() {
data.insert(*child, d.clone());
match parent_opt {
Some(p) => child_vecs
.entry(*p)
.or_insert_with(Vec::default)
.push(*child),
None => {
if root_opt.is_some() {
return Err("More than one root");
}
root_opt.replace(*child);
}
}
}
let root_tnid = root_opt.ok_or("No root")?;
let mut tree = TreeBuilder::new()
.with_capacity(edges.len())
.with_root((root_tnid, data.remove(&root_tnid).unwrap()))
.build();
let mut tnid_to_id = HashMap::default();
tnid_to_id.insert(root_tnid, tree.root_id().unwrap());
// ? can we use the NodeMut object here? lifetime issues
let mut to_visit = vec![tree.root_id().expect("Just set root")];
while let Some(node_id) = to_visit.pop() {
let mut parent = tree.get_mut(node_id).expect("Just placed");
let parent_data = &parent.data();
if let Some(v) = child_vecs.remove(&parent_data.0) {
to_visit.extend(v.into_iter().map(|tnid| {
let datum = data.remove(&tnid).unwrap();
let node_id = parent.append((tnid, datum)).node_id();
tnid_to_id.insert(tnid, node_id);
node_id
}));
}
}
Ok((tree, tnid_to_id))
}
impl<T: Debug> TopoArbor for Tree<T> {
type Node = T;
fn prune_at(&mut self, node_ids: &[NodeId]) -> HashSet<NodeId> {
let mut pruned = HashSet::with_capacity(node_ids.len());
for node_id in node_ids {
if self
.remove(*node_id, RemoveBehavior::DropChildren)
.is_some()
{
pruned.insert(*node_id);
}
}
pruned
}
fn prune_branches_containing(&mut self, node_ids: &[NodeId]) -> HashSet<NodeId> {
let mut visited = HashSet::new();
let mut to_remove = Vec::default();
for node_id in node_ids {
let mut ancestor = *node_id;
while let Some(node) = self.get(ancestor) {
// seem to be going id -> node -> id more than necessary?
if visited.contains(&ancestor) {
break;
} else if node.prev_sibling().is_some() || node.next_sibling().is_some() {
to_remove.push(ancestor);
break;
}
visited.insert(ancestor);
ancestor = match node.parent() {
Some(n) => n.node_id(),
_ => break,
};
}
}
self.prune_at(&to_remove)
}
fn prune_below_strahler(&mut self, threshold: usize) -> HashSet<NodeId> {
let mut strahler: HashMap<NodeId, usize> = HashMap::default();
let mut to_prune = Vec::default();
for node in self.root().expect("must have a root").traverse_post_order() {
let mut max_child_strahler = 0;
let mut max_strahler_count = 0;
let mut sub_threshold = Vec::default();
for child in node.children() {
let child_strahler = strahler
.remove(&child.node_id())
.expect("If it has children, they must have been visited");
if child_strahler < threshold {
sub_threshold.push(child.node_id());
}
match child_strahler.cmp(&max_child_strahler) {
Ordering::Greater => {
max_child_strahler = child_strahler;
max_strahler_count = 1;
}
Ordering::Equal => max_strahler_count += 1,
_ => (),
}
}
let node_strahler = match max_strahler_count.cmp(&1) {
Ordering::Equal => max_child_strahler,
Ordering::Greater => max_child_strahler + 1,
_ => 1,
};
if node_strahler == threshold {
to_prune.extend(sub_threshold.into_iter());
}
strahler.insert(node.node_id(), node_strahler);
}
self.prune_at(&to_prune)
}
fn prune_beyond_branches(&mut self, threshold: usize) -> HashSet<NodeId> {
let mut to_prune = Vec::default();
let mut to_visit = vec![(self.root().expect("must have root"), 0)];
while let Some((node, level)) = to_visit.pop() {
let children: Vec<NodeRef<T>> = node.children().collect();
if children.len() > 1 {
if level >= threshold {
to_prune.extend(children.into_iter().map(|n| n.node_id()));
} else {
to_visit.extend(children.into_iter().map(|n| (n, level + 1)));
}
} else {
to_visit.extend(children.into_iter().map(|n| (n, level)));
}
}
self.prune_at(&to_prune)
}
fn prune_beyond_steps(&mut self, threshold: usize) -> HashSet<NodeId> {
let mut to_prune = Vec::default();
let mut to_visit = vec![(self.root().expect("must have root"), 0)];
while let Some((node, steps)) = to_visit.pop() {
if steps >= threshold {
to_prune.extend(node.children().map(|n| n.node_id()));
} else {
let new_steps = steps + 1;
to_visit.extend(node.children().map(|n| (n, new_steps)));
}
}
self.prune_at(&to_prune)
}
fn slabs(&self) -> Vec<Vec<NodeId>> {
let mut to_visit = vec![vec![self.root().expect("must have root").node_id()]];
let mut slabs = Vec::default();
while let Some(mut slab) = to_visit.pop() {
let mut tail = self
.get(*slab.last().expect("has length"))
.expect("has node");
loop {
let mut children: Vec<NodeRef<T>> = tail.children().collect();
match children.len().cmp(&1) {
Ordering::Greater => {
to_visit.extend(
children
.into_iter()
.map(|c| vec![tail.node_id(), c.node_id()]),
);
break;
}
Ordering::Equal => {
tail = children.pop().expect("know it exists");
slab.push(tail.node_id());
}
Ordering::Less => break,
}
}
slabs.push(slab);
}
slabs
}
}
impl<T: Debug + Location> SpatialArbor for Tree<T> {
fn prune_twigs(&mut self, threshold: Precision) -> HashSet<NodeId> |
fn prune_beyond_distance(&mut self, threshold: Precision) -> HashSet<NodeId> {
let mut to_prune = Vec::default();
let mut to_visit = vec![(self.root().unwrap(), 0.0)];
while let Some((node, dist)) = to_visit.pop() {
to_visit.extend(node.children().filter_map(|c| {
let c_dist = dist + c.data().distance_to(node.data().location());
if c_dist >= threshold {
to_prune.push(c.node_id());
None
} else {
Some((c, c_dist))
}
}));
}
self.prune_at(&to_prune)
}
fn cable_length(&self) -> Precision {
self.root()
.unwrap()
.traverse_pre_order()
.skip(1)
.fold(0.0, |total, child| {
let parent = child.parent().unwrap();
total + child.data().distance_to(parent.data().location())
})
}
}
// ? generic so that different Locations can be cross-compared
// Trait for types which describe a 3D point.
// In this crate, it is implemented for [Precision; 3],
// (T, [Precision; 3]), and references to both.
pub trait Location {
/// Where the point is, in 3D space
fn location(&self) -> &[Precision; 3];
/// How far from one Location object to another
fn distance_to(&self, other: &[Precision; 3]) -> Precision {
let mut squares_total = 0.0;
for (a, b) in self.location().iter().zip(other.location().iter()) {
squares_total += (a - b).powf(2.0);
}
squares_total.sqrt()
}
/// Where you would end up if you travelled `distance` towards `other`,
/// and the overshoot: how far past the point you have travelled
/// (negative if the point was not reached).
fn project_towards(
&self,
other: &[Precision; 3],
distance: Precision,
) -> ([Precision; 3], Precision) {
let self_loc = self.location();
let distance_to = self.distance_to(other);
if distance_to * distance == 0.0 {
return (*self_loc, 0.0);
}
let mut out = [0.0, 0.0, 0.0];
for (idx, (a, b)) in self_loc.iter().zip(other.location().iter()).enumerate() {
let diff = b - a;
out[idx] = a + (diff / distance_to) * distance;
}
(out, distance - distance_to)
}
}
impl Location for [Precision; 3] {
fn location(&self) -> &[Precision; 3] {
self
}
}
impl Location for &[Precision; 3] {
fn location(&self) -> &[Precision; 3] {
self
}
}
impl<T, L: Location> Location for (T, L) {
fn location(&self) -> &[Precision; 3] {
self.1.location()
}
}
impl<T, L: Location> Location for &(T, L) {
fn location(&self) -> &[Precision; 3] {
self.1.location()
}
}
// TODO: take iterator, return iterator
/// Place one point at the start of the linestring.
/// Travel down the linestring, placing another point at intervals of `length`,
/// until you reach the end.
/// Return all the placed points.
pub fn resample_linestring(linestring: &[impl Location], length: Precision) -> Vec<[Precision; 3]> {
if length <= 0.0 {
// TODO: result
panic!("Can't resample with length <= 0");
}
let mut it = linestring.iter().map(|s| s.location());
let mut prev = match it.next() {
Some(p) => *p,
_ => return vec![],
};
let mut out = vec![prev];
let mut remaining = length;
let mut next_opt = it.next();
while let Some(next) = next_opt {
if remaining <= 0.0 {
remaining = length
}
let (new, overshoot) = prev.project_towards(next, remaining);
match overshoot.partial_cmp(&0.0).expect("Non-numeric float") {
Ordering::Greater => {
// we've overshot
remaining = overshoot;
next_opt = it.next();
prev = *next;
}
Ordering::Less => {
// we've undershot (overshoot is negative)
remaining = length;
out.push(new);
prev = new;
}
Ordering::Equal => {
remaining = length;
out.push(new);
prev = new;
next_opt = it.next();
}
};
}
out
}
/// Keeps root, branches, and leaves: otherwise, resample each slab with the given length.
pub fn resample_tree_points<T: Location + Debug>(
tree: &Tree<T>,
length: Precision,
) -> Vec<[Precision; 3]> {
let id_slabs = tree.slabs();
let root_loc = tree.get(id_slabs[0][0]).unwrap().data().location();
let mut out = vec![*root_loc];
for slab_ids in id_slabs.into_iter() {
let slab_locs: Vec<_> = slab_ids
.into_iter()
.map(|sid| tree.get(sid).unwrap().data().location())
.collect();
out.extend(resample_linestring(&slab_locs, length).into_iter().skip(1));
out.push(**slab_locs.last().unwrap());
}
out
}
#[cfg(test)]
mod tests {
use crate::*;
use std::fmt::Debug;
const EPSILON: Precision = 0.0001;
/// From [wikipedia](https://en.wikipedia.org/wiki/Tree_traversal#/media/File:Sorted_binary_tree_ALL.svg)
///
/// F
/// / \
/// B G
/// / \ \
/// A D I
/// / \ \
/// C E H
fn make_topotree() -> (Tree<&'static str>, HashMap<&'static str, NodeId>) {
let mut tree = TreeBuilder::new().with_capacity(9).with_root("F").build();
let mut f = tree.root_mut().unwrap();
let mut b = f.append("B");
b.append("A");
let mut d = b.append("D");
d.append("C");
d.append("E");
f.append("G").append("I").append("H");
let map = f
.as_ref()
.traverse_pre_order()
.map(|n| (*n.data(), n.node_id()))
.collect();
print_tree(&tree, "ORIGINAL");
(tree, map)
}
fn nodes<T: Hash + Eq + Copy>(tree: &Tree<T>) -> HashSet<T> {
tree.root()
.unwrap()
.traverse_pre_order()
.map(|n| *n.data())
.collect()
}
fn print_tree<T: Debug>(tree: &Tree<T>, label: &'static str) {
let mut s = String::new();
tree.write_formatted(&mut s).unwrap();
println!("{}\n{}", label, s);
}
fn assert_nodes<T: Debug + Hash + Eq + Copy>(
tree: &Tree<T>,
contains: &[T],
not_contains: &[T],
) {
print_tree(tree, "RESULT");
let tns = nodes(tree);
for n in contains {
assert!(tns.contains(n));
}
for n in not_contains {
assert!(!tns.contains(n));
}
}
#[test]
fn prune_at() {
let (mut tree, map) = make_topotree();
tree.prune_at(&[map["G"]]);
assert_nodes(&tree, &["F"], &["G", "H", "I"]);
}
#[test]
fn prune_containing() {
let (mut tree, map) = make_topotree();
tree.prune_branches_containing(&[map["I"]]);
assert_nodes(&tree, &["F"], &["G", "H", "I"]);
}
#[test]
fn prune_containing_multiple() {
let (mut tree, map) = make_topotree();
tree.prune_branches_containing(&[map["G"], map["H"]]);
assert_nodes(&tree, &["F"], &["G", "H", "I"]);
}
#[test]
fn prune_below_strahler() {
let (mut tree, _) = make_topotree();
tree.prune_below_strahler(2);
assert_nodes(&tree, &["F", "B", "D"], &["C", "E", "G"]);
}
#[test]
fn prune_beyond_branches() {
let (mut tree, _) = make_topotree();
tree.prune_beyond_branches(2);
assert_nodes(&tree, &["D", "A", "H"], &["C", "E"]);
}
#[test]
fn prune_beyond_steps() {
let (mut tree, _) = make_topotree();
tree.prune_beyond_steps(1);
assert_nodes(&tree, &["B", "G"], &["A", "D", "I"]);
}
fn add_points(a: &[Precision; 3], b: &[Precision; 3]) -> [Precision; 3] {
let mut v = Vec::with_capacity(3);
for (this_a, this_b) in a.iter().zip(b.iter()) {
v.push(this_a + this_b);
}
[v[0], v[1], v[2]]
}
fn make_linestring(
start: &[Precision; 3],
step: &[Precision; 3],
count: usize,
) -> Vec<[Precision; 3]> {
let mut out = vec![*start];
for _ in 0..(count - 1) {
let next = add_points(out.last().unwrap(), step);
out.push(next);
}
out
}
fn assert_close(a: Precision, b: Precision) {
if (a - b).abs() >= EPSILON {
panic!("{} != {}", a, b);
}
}
#[test]
fn project_towards() {
let dist = 0.001;
let p1 = [1.0, 0.0, 0.0];
let p2 = [2.0, 0.0, 0.0];
let (r1, o1) = p1.project_towards(&p2, 1.0);
assert_near(&r1, &[2.0, 0.0, 0.0], dist);
assert_close(o1, 0.0);
let (r2, o2) = p1.project_towards(&p2, 2.0);
assert_near(&r2, &[3.0, 0.0, 0.0], dist);
assert_close(o2, 1.0);
let (r3, o3) = p1.project_towards(&p2, 0.5);
assert_near(&r3, &[1.5, 0.0, 0.0], dist);
assert_close(o3, -0.5);
}
fn assert_near<S: Location + Debug>(p1: &S, p2: &S, dist: Precision) {
if p1.distance_to(p2.location()) >= dist {
panic!("{:?} not near {:?}", p1, p2);
}
}
fn assert_linestring<S: Location + Debug>(ls1: &[S], ls2: &[S], dist: Precision) {
assert_eq!(ls1.len(), ls2.len());
for (p1, p2) in ls1.iter().zip(ls2.iter()) {
assert_near(p1, p2, dist);
}
}
#[test]
fn resample_ls() {
let linestring = make_linestring(&[0., 0., 0.], &[1., 0., 0.], 4);
let resampled_08 = resample_linestring(&linestring, 0.8);
assert_linestring(
&resampled_08,
&[[0., 0., 0.], [0.8, 0., 0.], [1.6, 0., 0.], [2.4, 0., 0.]],
0.001,
);
let resampled_12 = resample_linestring(&linestring, 1.2);
assert_linestring(
&resampled_12,
&[[0., 0., 0.], [1.2, 0., 0.], [2.4, 0., 0.]],
0.001,
);
}
#[test]
fn test_edges_to_tree_constructs() {
let edges: Vec<(&'static str, Option<&'static str>, ())> = vec![
("F", None, ()),
("B", Some("F"), ()),
("A", Some("B"), ()),
("D", Some("B"), ()),
("C", Some("D"), ()),
("E", Some("D"), ()),
("G", Some("F"), ()),
("I", Some("G"), ()),
("H", Some("I"), ()),
];
let (test_tree, _) = edges_to_tree_with_data(&edges).expect("Couldn't construct");
print_tree(&test_tree, "TEST");
let test_dfs: Vec<_> = test_tree
.root()
.unwrap()
.traverse_pre_order()
.map(|n| n.data().0)
.collect();
let (ref_tree, _) = make_topotree();
let ref_dfs: Vec<_> = ref_tree
.root()
.unwrap()
.traverse_pre_order()
.map(|n| n.data())
.collect();
assert_eq!(format!("{:?}", test_dfs), format!("{:?}", ref_dfs));
}
#[test]
fn test_edges_to_tree_jumbled() {
let edges: Vec<(&'static str, Option<&'static str>, ())> = vec![
("A", Some("B"), ()),
("C", Some("D"), ()),
("F", None, ()),
("G", Some("F"), ()),
("E", Some("D"), ()),
("D", Some("B"), ()),
("I", Some("G"), ()),
("H", Some("I"), ()),
("B", Some("F"), ()),
];
let (test_tree, _) = edges_to_tree_with_data(&edges).expect("Couldn't construct");
print_tree(&test_tree, "TEST");
}
fn spatial_tree() -> (
Tree<(&'static str, [Precision; 3])>,
HashMap<&'static str, NodeId>,
) {
let edges: Vec<(&'static str, Option<&'static str>, [Precision; 3])> = vec![
("F", None, [3.0, 0.0, 0.0]),
("B", Some("F"), [2.0, 1.0, 0.0]),
("A", Some("B"), [1.0, 2.0, 0.0]),
("D", Some("B"), [3.0, 2.0, 0.0]),
("C", Some("D"), [2.0, 3.0, 0.0]),
("E", Some("D"), [4.0, 3.0, 0.0]),
("G", Some("F"), [4.0, 1.0, 0.0]),
("I", Some("G"), [5.0, 2.0, 0.0]),
("H", Some("I"), [6.0, 3.0, 0.0]),
];
edges_to_tree_with_data(&edges).expect("Couldn't construct")
}
#[test]
fn test_resample_tree() {
let (test_tree, _) = spatial_tree();
print_tree(&test_tree, "PRE-SAMPLE");
resample_tree_points(&test_tree, 0.3);
}
#[test]
fn test_cable() {
let (test_tree, _) = spatial_tree();
let cable = test_tree.cable_length();
let rt2 = (2.0 as Precision).sqrt();
assert_close(cable, 8.0 * rt2);
}
#[test]
fn test_prune_twigs() {
let (mut test_tree, _) = spatial_tree();
test_tree.prune_twigs(2.0);
let existing: HashSet<_> = test_tree
.root()
.unwrap()
.traverse_pre_order()
.map(|n| n.data().0)
.collect();
for n in ["B", "D", "H"].iter() {
assert!(existing.contains(n));
}
for n in ["A", "C", "E"].iter() {
assert!(!existing.contains(n))
}
}
#[test]
fn test_prune_beyond() {
let (mut test_tree, _) = spatial_tree();
test_tree.prune_beyond_distance(3.0);
let existing: HashSet<_> = test_tree
.root()
.unwrap()
.traverse_pre_order()
.map(|n| n.data().0)
.collect();
println!("Contains nodes: {:?}", existing);
for n in ["A", "D", "I"].iter() {
assert!(existing.contains(n));
}
for n in ["C", "E", "H"].iter() {
assert!(!existing.contains(n))
}
}
}
| {
let mut to_prune = Vec::default();
let root = self.root().expect("has root");
let mut to_visit: Vec<_> = root
.children()
.map(|n| {
(
n.node_id(),
n.node_id(),
n.data().distance_to(root.data().location()),
)
})
.collect();
while let Some((node_id, head_id, dist)) = to_visit.pop() {
let node = self.get(node_id).unwrap();
let mut children: Vec<_> = node.children().collect();
match children.len() {
0 => {
if dist <= threshold {
to_prune.push(head_id);
}
}
1 => {
let child = children.pop().unwrap();
to_visit.push((
child.node_id(),
head_id,
dist + child.data().distance_to(node.data().location()),
));
}
_ => to_visit.extend(children.into_iter().map(|c| {
(
c.node_id(),
c.node_id(),
c.data().distance_to(node.data().location()),
)
})),
};
}
self.prune_at(&to_prune)
} |
queryset.py | from django.db.models import query
from .query import SafeDeleteQuery
from functools import partial, reduce
from django.db.models.constants import LOOKUP_SEP
from django.db.models import Max, Min, F
from django.utils.module_loading import import_string
def get_lookup_value(obj, field):
return reduce(lambda i, f: getattr(i, f), field.split(LOOKUP_SEP), obj)
class SafeDeleteQueryset(query.QuerySet):
"""Default queryset for the SafeDeleteManager.
Takes care of "lazily evaluating" safedelete QuerySets. QuerySets passed
within the ``SafeDeleteQueryset`` will have all of the models available.
The deleted policy is evaluated at the very end of the chain when the
QuerySet itself is evaluated.
"""
def __init__(self, model=None, query=None, using=None, hints=None):
super(SafeDeleteQueryset, self).__init__(model=model, query=query, using=using, hints=hints)
self.query = query or SafeDeleteQuery(self.model)
def delete(self, force_policy=None):
"""Overrides bulk delete behaviour.
.. note::
The current implementation loses performance on bulk deletes in order
to safely delete objects according to the deletion policies set.
.. seealso::
:py:func:`safedelete.models.SafeDeleteModel.delete`
"""
assert self.query.can_filter(), "Cannot use 'limit' or 'offset' with delete."
# TODO: Replace this by bulk update if we can
for obj in self.all(): | self._result_cache = None
delete.alters_data = True
def undelete(self, force_policy=None):
"""Undelete all soft deleted models.
.. note::
The current implementation loses performance on bulk undeletes in
order to call the pre/post-save signals.
.. seealso::
:py:func:`safedelete.models.SafeDeleteModel.undelete`
"""
assert self.query.can_filter(), "Cannot use 'limit' or 'offset' with undelete."
# TODO: Replace this by bulk update if we can (need to call pre/post-save signal)
for obj in self.all():
obj.undelete(force_policy=force_policy)
self._result_cache = None
undelete.alters_data = True
def all(self, force_visibility=None):
"""Override so related managers can also see the deleted models.
A model's m2m field does not easily have access to `all_objects` and
so setting `force_visibility` to True is a way of getting all of the
models. It is not recommended to use `force_visibility` outside of related
models because it will create a new queryset.
Args:
force_visibility: Force a deletion visibility. (default: {None})
"""
if force_visibility is not None:
self.query._safedelete_force_visibility = force_visibility
return super(SafeDeleteQueryset, self).all()
def filter(self, *args, **kwargs):
# Return a copy, see #131
queryset = self._clone()
queryset.query.check_field_filter(**kwargs)
return super(SafeDeleteQueryset, queryset).filter(*args, **kwargs)
class OrderedSafeDeleteQueryset(SafeDeleteQueryset):
"""
# ADDED BY LEE
This extends SafeDeleteQueryset with methods from OrderedModelQuerySet
of the django-ordered-model package, so that we can have both proper ordering and
safe-deletion
"""
def _get_order_field_name(self):
return self.model.order_field_name
def _get_order_field_lookup(self, lookup):
order_field_name = self._get_order_field_name()
return LOOKUP_SEP.join([order_field_name, lookup])
def _get_order_with_respect_to(self):
model = self.model
order_with_respect_to = model.order_with_respect_to
if isinstance(order_with_respect_to, str):
order_with_respect_to = (order_with_respect_to,)
if order_with_respect_to is None:
raise AssertionError(
(
'ordered model admin "{0}" has not specified "order_with_respect_to"; note that this '
"should go in the model body, and is not to be confused with the Meta property of the same name, "
"which is independent Django functionality"
).format(model)
)
return order_with_respect_to
def get_max_order(self):
order_field_name = self._get_order_field_name()
return self.aggregate(Max(order_field_name)).get(
self._get_order_field_lookup("max")
)
def get_min_order(self):
order_field_name = self._get_order_field_name()
return self.aggregate(Min(order_field_name)).get(
self._get_order_field_lookup("min")
)
def get_next_order(self):
order = self.get_max_order()
return order + 1 if order is not None else 0
def above(self, order, inclusive=False):
"""Filter items above order."""
lookup = "gte" if inclusive else "gt"
return self.filter(**{self._get_order_field_lookup(lookup): order})
def above_instance(self, ref, inclusive=False):
"""Filter items above ref's order."""
order_field_name = self._get_order_field_name()
order = getattr(ref, order_field_name)
return self.above(order, inclusive=inclusive)
def below(self, order, inclusive=False):
"""Filter items below order."""
lookup = "lte" if inclusive else "lt"
return self.filter(**{self._get_order_field_lookup(lookup): order})
def below_instance(self, ref, inclusive=False):
"""Filter items below ref's order."""
order_field_name = self._get_order_field_name()
order = getattr(ref, order_field_name)
return self.below(order, inclusive=inclusive)
def decrease_order(self, **extra_kwargs):
"""Decrease `order_field_name` value by 1."""
order_field_name = self._get_order_field_name()
update_kwargs = {order_field_name: F(order_field_name) - 1}
if extra_kwargs:
update_kwargs.update(extra_kwargs)
return self.update(**update_kwargs)
def increase_order(self, **extra_kwargs):
"""Increase `order_field_name` value by 1."""
order_field_name = self._get_order_field_name()
update_kwargs = {order_field_name: F(order_field_name) + 1}
if extra_kwargs:
update_kwargs.update(extra_kwargs)
return self.update(**update_kwargs)
def bulk_create(self, objs, batch_size=None):
order_field_name = self._get_order_field_name()
order_with_respect_to = self.model.order_with_respect_to
objs = list(objs)
if order_with_respect_to:
order_with_respect_to_mapping = {}
order_with_respect_to = self._get_order_with_respect_to()
for obj in objs:
key = tuple(
get_lookup_value(obj, field) for field in order_with_respect_to
)
if key in order_with_respect_to_mapping:
order_with_respect_to_mapping[key] += 1
else:
order_with_respect_to_mapping[
key
] = self.filter_by_order_with_respect_to(obj).get_next_order()
setattr(obj, order_field_name, order_with_respect_to_mapping[key])
else:
for order, obj in enumerate(objs, self.get_next_order()):
setattr(obj, order_field_name, order)
return super().bulk_create(objs, batch_size=batch_size)
def _get_order_with_respect_to_filter_kwargs(self, ref):
order_with_respect_to = self._get_order_with_respect_to()
_get_lookup_value = partial(get_lookup_value, ref)
return {field: _get_lookup_value(field) for field in order_with_respect_to}
_get_order_with_respect_to_filter_kwargs.queryset_only = False
def filter_by_order_with_respect_to(self, ref):
order_with_respect_to = self.model.order_with_respect_to
if order_with_respect_to:
filter_kwargs = self._get_order_with_respect_to_filter_kwargs(ref)
return self.filter(**filter_kwargs)
return self | obj.delete(force_policy=force_policy) |
helm-with-proxy.ts | import { platform } from 'os';
import { existsSync, chmodSync } from 'fs';
import { IDeployer, IImageOptions } from './types';
import * as kubectl from '../../helpers/kubectl';
import { execWrapper as exec } from '../../helpers/exec';
const helmVersion = '3.0.0';
const helmPath = './helm';
const helmChartPath = './snyk-monitor';
export const helmWithProxyDeployer: IDeployer = {
deploy: deployKubernetesMonitor,
};
async function deployKubernetesMonitor(
imageOptions: IImageOptions,
): Promise<void> {
if (!existsSync(helmPath)) {
await downloadHelm();
}
await kubectl.applyK8sYaml('test/fixtures/proxying/tinyproxy-service.yaml');
await kubectl.applyK8sYaml(
'test/fixtures/proxying/tinyproxy-deployment.yaml',
);
await kubectl.waitForDeployment('forwarding-proxy', 'snyk-monitor');
const imageNameAndTag = imageOptions.nameAndTag.split(':');
const imageName = imageNameAndTag[0];
const imageTag = imageNameAndTag[1];
const imagePullPolicy = imageOptions.pullPolicy;
await exec(
`${helmPath} upgrade --install snyk-monitor ${helmChartPath} --namespace snyk-monitor ` +
`--set image.repository=${imageName} ` +
`--set image.tag=${imageTag} ` +
`--set image.pullPolicy=${imagePullPolicy} ` +
'--set integrationApi=https://kubernetes-upstream.dev.snyk.io ' +
'--set https_proxy=http://forwarding-proxy:8080', | }
async function downloadHelm(): Promise<void> {
console.log(`Downloading Helm ${helmVersion}...`);
const os = platform();
await exec(
`curl https://get.helm.sh/helm-v${helmVersion}-${os}-amd64.tar.gz | tar xfzO - ${os}-amd64/helm > ${helmPath}`,
);
chmodSync(helmPath, 0o755); // rwxr-xr-x
console.log('Downloaded Helm');
} | );
console.log(
`Deployed ${imageOptions.nameAndTag} with pull policy ${imageOptions.pullPolicy}`,
); |
pandas_backend.py | import cProfile
import logging
import os
import pstats
import sys
import warnings
from datetime import datetime
import numpy as np
import pandas as pd
import pandas.api.types as pdtypes
from future import standard_library
from .base_backend import ComputationalBackend
from .feature_tree import FeatureTree
from featuretools import variable_types
from featuretools.entityset.relationship import Relationship
from featuretools.exceptions import UnknownFeature
from featuretools.primitives import (
AggregationPrimitive,
DirectFeature,
IdentityFeature,
TransformPrimitive
)
from featuretools.utils.gen_utils import make_tqdm_iterator
standard_library.install_aliases()
warnings.simplefilter('ignore', np.RankWarning)
warnings.simplefilter("ignore", category=RuntimeWarning)
logger = logging.getLogger('featuretools.computational_backend')
ROOT_DIR = os.path.expanduser("~")
class PandasBackend(ComputationalBackend):
def __init__(self, entityset, features):
assert len(set(f.entity.id for f in features)) == 1, \
"Features must all be defined on the same entity"
self.entityset = entityset
self.target_eid = features[0].entity.id
self.features = features
self.feature_tree = FeatureTree(entityset, features)
def __sizeof__(self):
return self.entityset.__sizeof__()
def calculate_all_features(self, instance_ids, time_last,
training_window=None, profile=False,
precalculated_features=None, ignored=None,
verbose=False):
"""
Given a list of instance ids and features with a shared time window,
generate and return a mapping of instance -> feature values.
Args:
instance_ids (list): List of instance id for which to build features.
time_last (pd.Timestamp): Last allowed time. Data from exactly this
time not allowed.
training_window (Timedelta, optional): Data older than
time_last by more than this will be ignored.
profile (bool): Enable profiler if True.
verbose (bool): Print output progress if True.
Returns:
pd.DataFrame : Pandas DataFrame of calculated feature values.
Indexed by instance_ids. Columns in same order as features
passed in.
"""
assert len(instance_ids) > 0, "0 instance ids provided"
self.instance_ids = instance_ids
self.time_last = time_last
if self.time_last is None:
self.time_last = datetime.now()
# For debugging
if profile:
pr = cProfile.Profile()
pr.enable()
if precalculated_features is None:
precalculated_features = {}
# Access the index to get the filtered data we need
target_entity = self.entityset[self.target_eid]
if ignored:
# TODO: Just want to remove entities if don't have any (sub)features defined
# on them anymore, rather than recreating
ordered_entities = FeatureTree(self.entityset, self.features, ignored=ignored).ordered_entities
else:
ordered_entities = self.feature_tree.ordered_entities
necessary_columns = self.feature_tree.necessary_columns
eframes_by_filter = \
self.entityset.get_pandas_data_slice(filter_entity_ids=ordered_entities,
index_eid=self.target_eid,
instances=instance_ids,
entity_columns=necessary_columns,
time_last=time_last,
training_window=training_window,
verbose=verbose)
large_eframes_by_filter = None
if any([f.uses_full_entity for f in self.feature_tree.all_features]):
large_necessary_columns = self.feature_tree.necessary_columns_for_all_values_features
large_eframes_by_filter = \
self.entityset.get_pandas_data_slice(filter_entity_ids=ordered_entities,
index_eid=self.target_eid,
instances=None,
entity_columns=large_necessary_columns,
time_last=time_last,
training_window=training_window,
verbose=verbose)
# Handle an empty time slice by returning a dataframe with defaults
if eframes_by_filter is None:
return self.generate_default_df(instance_ids=instance_ids)
finished_entity_ids = []
# Populate entity_frames with precalculated features
if len(precalculated_features) > 0:
for entity_id, precalc_feature_values in precalculated_features.items():
if entity_id in eframes_by_filter:
frame = eframes_by_filter[entity_id][entity_id]
eframes_by_filter[entity_id][entity_id] = pd.merge(frame,
precalc_feature_values,
left_index=True,
right_index=True)
else:
# Only features we're taking from this entity
# are precomputed
# Make sure the id variable is a column as well as an index
entity_id_var = self.entityset[entity_id].index
precalc_feature_values[entity_id_var] = precalc_feature_values.index.values
eframes_by_filter[entity_id] = {entity_id: precalc_feature_values}
finished_entity_ids.append(entity_id)
# Iterate over the top-level entities (filter entities) in sorted order
# and calculate all relevant features under each one.
if verbose:
total_groups_to_compute = sum(len(group)
for group in self.feature_tree.ordered_feature_groups.values())
pbar = make_tqdm_iterator(total=total_groups_to_compute,
desc="Computing features",
unit="feature group")
if verbose:
pbar.update(0)
for filter_eid in ordered_entities:
entity_frames = eframes_by_filter[filter_eid]
large_entity_frames = None
if large_eframes_by_filter is not None:
large_entity_frames = large_eframes_by_filter[filter_eid]
# update the current set of entity frames with the computed features
# from previously finished entities
for eid in finished_entity_ids:
# only include this frame if it's not from a descendent entity:
# descendent entity frames will have to be re-calculated.
# TODO: this check might not be necessary, depending on our
# constraints
if not self.entityset.find_backward_path(start_entity_id=filter_eid,
goal_entity_id=eid):
entity_frames[eid] = eframes_by_filter[eid][eid]
# TODO: look this over again
# precalculated features will only be placed in entity_frames,
# and it's possible that that they are the only features computed
# for an entity. In this case, the entity won't be present in
# large_eframes_by_filter. The relevant lines that this case passes
# through are 136-143
if (large_eframes_by_filter is not None and
eid in large_eframes_by_filter and eid in large_eframes_by_filter[eid]):
large_entity_frames[eid] = large_eframes_by_filter[eid][eid]
if filter_eid in self.feature_tree.ordered_feature_groups:
for group in self.feature_tree.ordered_feature_groups[filter_eid]:
if verbose:
pbar.set_postfix({'running': 0})
test_feature = group[0]
entity_id = test_feature.entity.id
input_frames_type = self.feature_tree.input_frames_type(test_feature)
input_frames = large_entity_frames
if input_frames_type == "subset_entity_frames":
input_frames = entity_frames
handler = self._feature_type_handler(test_feature)
result_frame = handler(group, input_frames)
output_frames_type = self.feature_tree.output_frames_type(test_feature)
if output_frames_type in ['full_and_subset_entity_frames', 'subset_entity_frames']:
index = entity_frames[entity_id].index
# If result_frame came from a uses_full_entity feature,
# and the input was large_entity_frames,
# then it's possible it doesn't contain some of the features
# in the output entity_frames
# We thus need to concatenate the existing frame with the result frame,
# making sure not to duplicate any columns
_result_frame = result_frame.reindex(index)
cols_to_keep = [c for c in _result_frame.columns
if c not in entity_frames[entity_id].columns]
entity_frames[entity_id] = pd.concat([entity_frames[entity_id],
_result_frame[cols_to_keep]],
axis=1)
if output_frames_type in ['full_and_subset_entity_frames', 'full_entity_frames']:
index = large_entity_frames[entity_id].index
_result_frame = result_frame.reindex(index)
cols_to_keep = [c for c in _result_frame.columns
if c not in large_entity_frames[entity_id].columns]
large_entity_frames[entity_id] = pd.concat([large_entity_frames[entity_id],
_result_frame[cols_to_keep]],
axis=1)
if verbose:
pbar.update(1)
finished_entity_ids.append(filter_eid)
if verbose:
pbar.set_postfix({'running': 0})
pbar.refresh()
sys.stdout.flush()
pbar.close()
# debugging
if profile:
pr.disable()
prof_folder_path = os.path.join(ROOT_DIR, 'prof')
if not os.path.exists(prof_folder_path):
os.mkdir(prof_folder_path)
with open(os.path.join(prof_folder_path, 'inst-%s.log' %
list(instance_ids)[0]), 'w') as f:
pstats.Stats(pr, stream=f).strip_dirs().sort_stats("cumulative", "tottime").print_stats()
df = eframes_by_filter[self.target_eid][self.target_eid]
# fill in empty rows with default values
missing_ids = [i for i in instance_ids if i not in
df[target_entity.index]]
if missing_ids:
default_df = self.generate_default_df(instance_ids=missing_ids,
extra_columns=df.columns)
df = df.append(default_df, sort=True)
df.index.name = self.entityset[self.target_eid].index
return df[[feat.get_name() for feat in self.features]]
def generate_default_df(self, instance_ids, extra_columns=None):
index_name = self.features[0].entity.index
default_row = [f.default_value for f in self.features]
default_cols = [f.get_name() for f in self.features]
default_matrix = [default_row] * len(instance_ids)
default_df = pd.DataFrame(default_matrix,
columns=default_cols,
index=instance_ids)
default_df.index.name = index_name
if extra_columns is not None:
for c in extra_columns:
if c not in default_df.columns:
default_df[c] = [np.nan] * len(instance_ids)
return default_df
def | (self, f):
if isinstance(f, TransformPrimitive):
return self._calculate_transform_features
elif isinstance(f, DirectFeature):
return self._calculate_direct_features
elif isinstance(f, AggregationPrimitive):
return self._calculate_agg_features
elif isinstance(f, IdentityFeature):
return self._calculate_identity_features
else:
raise UnknownFeature(u"{} feature unknown".format(f.__class__))
def _calculate_identity_features(self, features, entity_frames):
entity_id = features[0].entity.id
assert (entity_id in entity_frames and
features[0].get_name() in entity_frames[entity_id].columns)
return entity_frames[entity_id]
def _calculate_transform_features(self, features, entity_frames):
entity_id = features[0].entity.id
assert len(set([f.entity.id for f in features])) == 1, \
"features must share base entity"
assert entity_id in entity_frames
frame = entity_frames[entity_id]
for f in features:
# handle when no data
if frame.shape[0] == 0:
set_default_column(frame, f)
continue
# collect only the variables we need for this transformation
variable_data = [frame[bf.get_name()].values
for bf in f.base_features]
feature_func = f.get_function()
# apply the function to the relevant dataframe slice and add the
# feature row to the results dataframe.
if f.uses_calc_time:
values = feature_func(*variable_data, time=self.time_last)
else:
values = feature_func(*variable_data)
if isinstance(values, pd.Series):
values = values.values
frame[f.get_name()] = list(values)
return frame
def _calculate_direct_features(self, features, entity_frames):
entity_id = features[0].entity.id
parent_entity_id = features[0].parent_entity.id
assert entity_id in entity_frames and parent_entity_id in entity_frames
path = self.entityset.find_forward_path(entity_id, parent_entity_id)
assert len(path) == 1, \
"Error calculating DirectFeatures, len(path) > 1"
parent_df = entity_frames[parent_entity_id]
child_df = entity_frames[entity_id]
merge_var = path[0].child_variable.id
# generate a mapping of old column names (in the parent entity) to
# new column names (in the child entity) for the merge
col_map = {path[0].parent_variable.id: merge_var}
index_as_feature = None
for f in features:
if f.base_features[0].get_name() == path[0].parent_variable.id:
index_as_feature = f
# Sometimes entityset._add_multigenerational_links adds link variables
# that would ordinarily get calculated as direct features,
# so we make sure not to attempt to calculate again
if f.get_name() in child_df.columns:
continue
col_map[f.base_features[0].get_name()] = f.get_name()
# merge the identity feature from the parent entity into the child
merge_df = parent_df[list(col_map.keys())].rename(columns=col_map)
if index_as_feature is not None:
merge_df.set_index(index_as_feature.get_name(), inplace=True,
drop=False)
else:
merge_df.set_index(merge_var, inplace=True)
new_df = pd.merge(left=child_df, right=merge_df,
left_on=merge_var, right_index=True,
how='left')
return new_df
def _calculate_agg_features(self, features, entity_frames):
test_feature = features[0]
entity = test_feature.entity
child_entity = test_feature.base_features[0].entity
assert entity.id in entity_frames and child_entity.id in entity_frames
frame = entity_frames[entity.id]
base_frame = entity_frames[child_entity.id]
# Sometimes approximate features get computed in a previous filter frame
# and put in the current one dynamically,
# so there may be existing features here
features = [f for f in features if f.get_name()
not in frame.columns]
if not len(features):
return frame
# handle where
where = test_feature.where
if where is not None and not base_frame.empty:
base_frame = base_frame.loc[base_frame[where.get_name()]]
# when no child data, just add all the features to frame with nan
if base_frame.empty:
for f in features:
frame[f.get_name()] = np.nan
else:
relationship_path = self.entityset.find_backward_path(entity.id,
child_entity.id)
groupby_var = Relationship._get_link_variable_name(relationship_path)
# if the use_previous property exists on this feature, include only the
# instances from the child entity included in that Timedelta
use_previous = test_feature.use_previous
if use_previous and not base_frame.empty:
# Filter by use_previous values
time_last = self.time_last
if use_previous.is_absolute():
time_first = time_last - use_previous
ti = child_entity.time_index
if ti is not None:
base_frame = base_frame[base_frame[ti] >= time_first]
else:
n = use_previous.value
def last_n(df):
return df.iloc[-n:]
base_frame = base_frame.groupby(groupby_var, observed=True, sort=False).apply(last_n)
to_agg = {}
agg_rename = {}
to_apply = set()
# apply multivariable and time-dependent features as we find them, and
# save aggregable features for later
for f in features:
if _can_agg(f):
variable_id = f.base_features[0].get_name()
if variable_id not in to_agg:
to_agg[variable_id] = []
func = f.get_function()
funcname = func
if callable(func):
funcname = func.__name__
to_agg[variable_id].append(func)
# this is used below to rename columns that pandas names for us
agg_rename[u"{}-{}".format(variable_id, funcname)] = f.get_name()
continue
to_apply.add(f)
# Apply the non-aggregable functions generate a new dataframe, and merge
# it with the existing one
if len(to_apply):
wrap = agg_wrapper(to_apply, self.time_last)
# groupby_var can be both the name of the index and a column,
# to silence pandas warning about ambiguity we explicitly pass
# the column (in actuality grouping by both index and group would
# work)
to_merge = base_frame.groupby(base_frame[groupby_var], observed=True, sort=False).apply(wrap)
frame = pd.merge(left=frame, right=to_merge,
left_index=True,
right_index=True, how='left')
# Apply the aggregate functions to generate a new dataframe, and merge
# it with the existing one
if len(to_agg):
# groupby_var can be both the name of the index and a column,
# to silence pandas warning about ambiguity we explicitly pass
# the column (in actuality grouping by both index and group would
# work)
to_merge = base_frame.groupby(base_frame[groupby_var],
observed=True, sort=False).agg(to_agg)
# rename columns to the correct feature names
to_merge.columns = [agg_rename["-".join(x)] for x in to_merge.columns.ravel()]
to_merge = to_merge[list(agg_rename.values())]
# workaround for pandas bug where categories are in the wrong order
# see: https://github.com/pandas-dev/pandas/issues/22501
if pdtypes.is_categorical_dtype(frame.index):
categories = pdtypes.CategoricalDtype(categories=frame.index.categories)
to_merge.index = to_merge.index.astype(object).astype(categories)
frame = pd.merge(left=frame, right=to_merge,
left_index=True, right_index=True, how='left')
# Handle default values
# 1. handle non scalar default values
iterfeats = [f for f in features
if hasattr(f.default_value, '__iter__')]
for f in iterfeats:
nulls = pd.isnull(frame[f.get_name()])
for ni in nulls[nulls].index:
frame.at[ni, f.get_name()] = f.default_value
# 2. handle scalars default values
fillna_dict = {f.get_name(): f.default_value for f in features
if f not in iterfeats}
frame.fillna(fillna_dict, inplace=True)
# convert boolean dtypes to floats as appropriate
# pandas behavior: https://github.com/pydata/pandas/issues/3752
for f in features:
if (not f.expanding and
f.variable_type == variable_types.Numeric and
frame[f.get_name()].dtype.name in ['object', 'bool']):
frame[f.get_name()] = frame[f.get_name()].astype(float)
return frame
def _can_agg(feature):
assert isinstance(feature, AggregationPrimitive)
base_features = feature.base_features
if feature.where is not None:
base_features = [bf.get_name() for bf in base_features
if bf.get_name() != feature.where.get_name()]
if feature.uses_calc_time:
return False
return len(base_features) == 1 and not feature.expanding
def agg_wrapper(feats, time_last):
def wrap(df):
d = {}
for f in feats:
func = f.get_function()
variable_ids = [bf.get_name() for bf in f.base_features]
args = [df[v] for v in variable_ids]
if f.uses_calc_time:
d[f.get_name()] = func(*args, time=time_last)
else:
d[f.get_name()] = func(*args)
return pd.Series(d)
return wrap
def set_default_column(frame, f):
default = f.default_value
if hasattr(default, '__iter__'):
length = frame.shape[0]
default = [f.default_value] * length
frame[f.get_name()] = default
| _feature_type_handler |
EmptyForm.ts | // tslint:disable
/**
* Evaluation
* This API descripes the online evaluation service, as accessed by the user and organizers.
*
* The version of the OpenAPI document: 1.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
import { exists, mapValues } from '../runtime';
import {
AbstractForm,
AbstractFormFromJSON,
AbstractFormToJSON,
Course,
CourseFromJSON,
CourseToJSON,
Prof,
ProfFromJSON,
ProfToJSON,
Tutor,
TutorFromJSON,
TutorToJSON,
} from './index';
/**
* Empty Form to be completed by student.
* @export
* @interface EmptyForm
*/
export interface EmptyForm {
/**
*
* @type {string}
* @memberof EmptyForm
*/
id?: string;
/**
* Name of the module evaluated.
* @type {string}
* @memberof EmptyForm
*/
moduleName?: string;
/**
*
* @type {Array<Prof>}
* @memberof EmptyForm
*/
profs?: Array<Prof>;
/**
*
* @type {Array<Tutor>}
* @memberof EmptyForm
*/
tutors?: Array<Tutor>;
/**
*
* @type {AbstractForm}
* @memberof EmptyForm
*/
abstractForm?: AbstractForm;
/**
*
* @type {Course}
* @memberof EmptyForm
*/
course?: Course;
}
export function | (json: any): EmptyForm {
return {
'id': !exists(json, 'id') ? undefined : json['id'],
'moduleName': !exists(json, 'moduleName') ? undefined : json['moduleName'],
'profs': !exists(json, 'profs') ? undefined : (json['profs'] as Array<any>).map(ProfFromJSON),
'tutors': !exists(json, 'tutors') ? undefined : (json['tutors'] as Array<any>).map(TutorFromJSON),
'abstractForm': !exists(json, 'abstractForm') ? undefined : AbstractFormFromJSON(json['abstractForm']),
'course': !exists(json, 'course') ? undefined : CourseFromJSON(json['course']),
};
}
export function EmptyFormToJSON(value?: EmptyForm): any {
if (value === undefined) {
return undefined;
}
return {
'id': value.id,
'moduleName': value.moduleName,
'profs': value.profs === undefined ? undefined : (value.profs as Array<any>).map(ProfToJSON),
'tutors': value.tutors === undefined ? undefined : (value.tutors as Array<any>).map(TutorToJSON),
'abstractForm': AbstractFormToJSON(value.abstractForm),
'course': CourseToJSON(value.course),
};
}
| EmptyFormFromJSON |
forms.module.ts | import { NgModule } from '@angular/core';
import {
NbActionsModule,
NbButtonModule,
NbCardModule,
NbCheckboxModule,
NbDatepickerModule, NbIconModule,
NbInputModule,
NbRadioModule,
NbSelectModule,
NbUserModule,
} from '@nebular/theme';
import { ThemeModule } from '../../@theme/theme.module';
import { FormsRoutingModule } from './forms-routing.module';
import { FormsComponent } from './forms.component';
import { FormLayoutsComponent } from './form-layouts/form-layouts.component';
import { DatepickerComponent } from './datepicker/datepicker.component';
@NgModule({
imports: [
ThemeModule, | NbUserModule,
NbCheckboxModule,
NbRadioModule,
NbDatepickerModule,
FormsRoutingModule,
NbSelectModule,
NbIconModule,
],
declarations: [
FormsComponent,
FormLayoutsComponent,
DatepickerComponent,
],
})
export class FormsModule { } | NbInputModule,
NbCardModule,
NbButtonModule,
NbActionsModule, |
test_basic.py | from os import path
import pytest
from client.client import Client
from tools import utils
from tools.paths import ACCOUNT_PATH
from tools.utils import assert_run_failure
from .contract_paths import CONTRACT_PATH
TRANSFER_ARGS = ['--burn-cap', '0.257']
@pytest.mark.incremental
class TestRawContext:
def test_delegates(self, client: Client):
path = '/chains/main/blocks/head/context/raw/bytes/delegates/?depth=3'
res = client.rpc('get', path)
expected = {
"ed25519": {
"02": {"29": None},
"a9": {"ce": None},
"c5": {"5c": None},
"da": {"c9": None},
"e7": {"67": None},
}
}
assert res == expected
def test_no_service_1(self, client: Client):
path = '/chains/main/blocks/head/context/raw/bytes/non-existent'
with assert_run_failure('No service found at this URL'):
client.rpc('get', path)
def test_no_service_2(self, client: Client):
path = (
'/chains/main/blocks/head/context/raw/bytes/'
'non-existent?depth=-1'
)
expected = 'Command failed: Extraction depth -1 is invalid'
with assert_run_failure(expected):
client.rpc('get', path)
def test_no_service_3(self, client: Client):
path = '/chains/main/blocks/head/context/raw/bytes/non-existent?depth=0'
with assert_run_failure('No service found at this URL'):
client.rpc('get', path)
def | (self, client: Client):
utils.bake(client, 'bootstrap4')
def test_gen_keys(self, client: Client, session):
session['keys'] = ['foo', 'bar', 'boo']
sigs = [None, 'secp256k1', 'ed25519']
for key, sig in zip(session['keys'], sigs):
args = [] if sig is None else ['--sig', sig]
client.gen_key(key, args)
def test_transfers(self, client: Client, session):
client.transfer(1000, 'bootstrap1', session['keys'][0], TRANSFER_ARGS)
utils.bake(client)
client.transfer(2000, 'bootstrap1', session['keys'][1], TRANSFER_ARGS)
utils.bake(client)
client.transfer(3000, 'bootstrap1', session['keys'][2], TRANSFER_ARGS)
utils.bake(client)
def test_balances(self, client: Client, session):
assert client.get_balance(session['keys'][0]) == 1000
assert client.get_balance(session['keys'][1]) == 2000
assert client.get_balance(session['keys'][2]) == 3000
def test_transfer_bar_foo(self, client: Client, session):
client.transfer(
1000,
session['keys'][1],
session['keys'][0],
['--fee', '0', '--force-low-fee'],
)
utils.bake(client)
def test_balances_bar_foo(self, client: Client, session):
assert client.get_balance(session['keys'][0]) == 2000
assert client.get_balance(session['keys'][1]) == 1000
def test_transfer_foo_bar(self, client: Client, session):
client.transfer(
1000, session['keys'][0], session['keys'][1], ['--fee', '0.05']
)
utils.bake(client)
def test_balances_foo_bar(self, client: Client, session):
assert client.get_balance(session['keys'][0]) == 999.95
assert client.get_balance(session['keys'][1]) == 2000
def test_transfer_failure(self, client: Client, session):
with pytest.raises(Exception):
client.transfer(999.95, session['keys'][0], session['keys'][1])
def test_originate_contract_noop(self, client: Client):
contract = path.join(CONTRACT_PATH, 'opcodes', 'noop.tz')
client.remember('noop', contract)
client.typecheck(contract)
client.originate(
'noop', 1000, 'bootstrap1', contract, ['--burn-cap', '0.295']
)
utils.bake(client)
def test_transfer_to_noop(self, client: Client):
client.transfer(10, 'bootstrap1', 'noop', ['--arg', 'Unit'])
utils.bake(client)
def test_contract_hardlimit(self, client: Client):
contract = path.join(CONTRACT_PATH, 'mini_scenarios', 'hardlimit.tz')
client.originate(
'hardlimit',
1000,
'bootstrap1',
contract,
['--init', '3', '--burn-cap', '0.341'],
)
utils.bake(client)
client.transfer(10, 'bootstrap1', 'hardlimit', ['--arg', 'Unit'])
utils.bake(client)
client.transfer(10, 'bootstrap1', 'hardlimit', ['--arg', 'Unit'])
utils.bake(client)
def test_transfers_bootstraps5_bootstrap1(self, client: Client):
assert client.get_balance('bootstrap5') == 4000000
client.transfer(
400000,
'bootstrap5',
'bootstrap1',
['--fee', '0', '--force-low-fee'],
)
utils.bake(client)
client.transfer(
400000,
'bootstrap1',
'bootstrap5',
['--fee', '0', '--force-low-fee'],
)
utils.bake(client)
assert client.get_balance('bootstrap5') == 4000000
def test_activate_accounts(self, client: Client, session):
account = f"{ACCOUNT_PATH}/king_commitment.json"
session['keys'] += ['king', 'queen']
client.activate_account(session['keys'][3], account)
utils.bake(client)
account = f"{ACCOUNT_PATH}/queen_commitment.json"
client.activate_account(session['keys'][4], account)
utils.bake(client)
assert client.get_balance(session['keys'][3]) == 23932454.669343
assert client.get_balance(session['keys'][4]) == 72954577.464032
def test_transfer_king_queen(self, client: Client, session):
keys = session['keys']
client.transfer(10, keys[3], keys[4], TRANSFER_ARGS)
utils.bake(client)
def test_duplicate_alias(self, client: Client):
client.add_address("baz", "foo", force=True)
show_foo = client.show_address("foo", show_secret=True)
assert show_foo.secret_key is not None
class TestRememberContract:
@pytest.mark.parametrize(
"contract_name,non_originated_contract_address",
[
("test", "KT1BuEZtb68c1Q4yjtckcNjGELqWt56Xyesc"),
("test-2", "KT1TZCh8fmUbuDqFxetPWC2fsQanAHzLx4W9"),
],
)
def test_non_originated_contract_no_forcing_not_saved_before(
self,
client,
contract_name,
non_originated_contract_address,
):
client.remember_contract(contract_name, non_originated_contract_address)
# As it is always the same client, the contracts have been saved
# before
@pytest.mark.parametrize(
"contract_name,non_originated_contract_address",
[
("test", "KT1BuEZtb68c1Q4yjtckcNjGELqWt56Xyesc"),
("test-2", "KT1TZCh8fmUbuDqFxetPWC2fsQanAHzLx4W9"),
],
)
def test_non_originated_contract_with_forcing_and_saved_before(
self,
client,
contract_name,
non_originated_contract_address,
):
client.remember_contract(
contract_name, non_originated_contract_address, force=True
)
# As it is always the same client, the contracts have been saved
# before
@pytest.mark.parametrize(
"contract_name,non_originated_contract_address",
[
("test", "KT1BuEZtb68c1Q4yjtckcNjGELqWt56Xyesc"),
("test-2", "KT1TZCh8fmUbuDqFxetPWC2fsQanAHzLx4W9"),
],
)
def test_non_originated_contract_no_forcing_and_saved_before(
self,
client,
contract_name,
non_originated_contract_address,
):
expected_error = f"The contract alias {contract_name} already exists"
with assert_run_failure(expected_error):
client.remember_contract(
contract_name, non_originated_contract_address, force=False
)
| test_bake |
deep_copy_generated.go | // +build !ignore_autogenerated
/*
Copyright 2016 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// This file was autogenerated by deepcopy-gen. Do not edit it manually!
package serializer
import (
conversion "k8s.io/kubernetes/pkg/conversion"
runtime "k8s.io/kubernetes/pkg/runtime"
)
func | (in CodecFactory, out *CodecFactory, c *conversion.Cloner) error {
if in.scheme != nil {
in, out := in.scheme, &out.scheme
*out = new(runtime.Scheme)
if err := runtime.DeepCopy_runtime_Scheme(*in, *out, c); err != nil {
return err
}
} else {
out.scheme = nil
}
if in.serializers != nil {
in, out := in.serializers, &out.serializers
*out = make([]serializerType, len(in))
for i := range in {
if newVal, err := c.DeepCopy(in[i]); err != nil {
return err
} else {
(*out)[i] = newVal.(serializerType)
}
}
} else {
out.serializers = nil
}
if in.universal == nil {
out.universal = nil
} else if newVal, err := c.DeepCopy(in.universal); err != nil {
return err
} else {
out.universal = newVal.(runtime.Decoder)
}
if in.accepts != nil {
in, out := in.accepts, &out.accepts
*out = make([]string, len(in))
copy(*out, in)
} else {
out.accepts = nil
}
if in.legacySerializer == nil {
out.legacySerializer = nil
} else if newVal, err := c.DeepCopy(in.legacySerializer); err != nil {
return err
} else {
out.legacySerializer = newVal.(runtime.Serializer)
}
return nil
}
| DeepCopy_serializer_CodecFactory |
BaseView.js | /*
* © Copyright IBM Corp. 2013
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
dojo.provide("sbt.controls.view.nls.BaseView");
| root: ({
messageClose : "Close",
successAlt : "Success:",
errorAlt : "Error:"
})
}); | // NLS_CHARSET=UTF-8
define('sbt/controls/view/nls/BaseView',[],{ |
not_in_loop.py | """Test that not-in-loop is detected properly."""
# pylint: disable=missing-docstring, invalid-name, too-few-public-methods
# pylint: disable=useless-else-on-loop, using-constant-test, useless-object-inheritance
# pylint: disable=no-else-continue
while True:
def ala():
continue # [not-in-loop]
while True:
pass
else:
continue # [not-in-loop]
def lala():
|
while True:
class A(object):
continue # [not-in-loop]
for _ in range(10):
pass
else:
continue # [not-in-loop]
for _ in range(42):
pass
else:
break # [not-in-loop]
if True:
continue # [not-in-loop]
else:
break # [not-in-loop]
for _ in range(10):
for _ in range(20):
pass
else:
continue
while True:
while True:
break
else:
break
break
else:
pass
for _ in range(1):
continue
for _ in range(42):
break
| continue # [not-in-loop] |
Video4.py | import firebase_admin
from firebase_admin import credentials,firestore
from firebase_admin import storage
cred = credentials.Certificate("./adminKey.json")
firebase_admin.initialize_app(cred, {
'storageBucket': 'women-e598c.appspot.com'
})
#Database Methods
db = firestore.client()
#discrip = ""
title = "Plight of Women"
cloudStorageLink = "https://firebasestorage.googleapis.com/v0/b/women-e598c.appspot.com/o/y2mate.com%20-%20The%20plight%20of%20women%20in%20India_360p.mp4?alt=media&token=3633254b-9fee-4f0c-9fa3-057e0616545c"
name = "CNN"
source = "YouTube"
sourceLink = "https://www.youtube.com/watch?v=XtHgTf67hzc"
discription = "CNN's Sumnima Udas examines the cycle of discrimination against women in India."
viewsOnVideo = 637,57,144340
socialHandle = " "
webpage = ""
if(len(title)!=0 and len(cloudStorageLink)!=0):
videsoWrite = db.collection("adminContent").document("Videos").collection("data").document().set({
"title":title,
"name":name,
"source":source,
"sourceLink":sourceLink,
"discription":discription,
"viewsOnVideo":viewsOnVideo,
"socialHandle":socialHandle,
"webpage":webpage,
"cloudStorageLink":cloudStorageLink
})
else:
| print("Error") |
|
generator_v02.py | #!/usr/bin/env python
# History
# v01 : adaptation from the one given by Udacity to work
# v02 : adapt to commonFunctions_v10.py to use generator.
# Start adding again everything from model_v12.py (image augmentation)
import os
import csv
import cv2
import numpy as np
import sklearn
from math import ceil
from random import shuffle
from sklearn.model_selection import train_test_split
from commonFunctions_v10 import get_lines_logfile
from commonFunctions_v10 import get_info_from_lines
from commonFunctions_v10 import flip_horizontally
STEER_CORRECTION_FACTOR = 0.2 # to tune up for left and right images/measurements
# Set our batch size for fit generator
batch_len= 6
# Reading CSV file, extracting lines.
samples = get_lines_logfile()
train_samples, validation_samples = train_test_split(samples[1:], test_size=0.2)
def generator(samples, batch_size=batch_len):
num_samples = len(samples)
# print('num_samples : {}'.format(num_samples))
while 1: # Loop forever so the generator never terminates
shuffle(samples)
for offset in range(0, num_samples, batch_size):
# correction : should go only until min(num_samples,offset+batch_size)
batch_samples = samples[offset: min(num_samples,offset+batch_size)]
# here will get both center, left, right images + their measurements.
# if batch_size = 32 --> 32*3 = 96 images ....
images, angles = get_info_from_lines(batch_samples,STEER_CORRECTION_FACTOR,nb_images=None)
# data augmentation flip horizontally image + inverse measurements
augm_images, augm_measurements = flip_horizontally(images,angles)
images.extend(augm_images)
angles.extend(augm_measurements)
# Nvidia : need to convert images in YUV ...
images = RGB2YUV(images)
# trim image to only see section with road
X_train = np.array(images)
y_train = np.array(angles)
yield sklearn.utils.shuffle(X_train, y_train)
# Set our batch size (*3 due to image center + left + right ....), then *2 due to flip of each images
batch_size=batch_len*3*2 #6*3*2 = 36 ....
# compile and train the model using the generator function
train_generator = generator(train_samples, batch_size=batch_size)
validation_generator = generator(validation_samples, batch_size=batch_size)
from keras.models import Sequential
from keras.layers import Flatten, Dense, Lambda, Cropping2D, Activation, Dropout
model = Sequential()
# Preprocess incoming data, centered around zero with small standard deviation
model.add(Lambda(lambda x: x/127.5 - 1.,
input_shape=(160,320,3))) |
model.compile(loss='mse', optimizer='adam')
model.fit_generator(train_generator,
steps_per_epoch=ceil(len(train_samples)/batch_size),
validation_data=validation_generator,
validation_steps=ceil(len(validation_samples)/batch_size),
epochs=5, verbose=1) | model.add(Flatten())
model.add(Dense(1)) |
test_vpnaas_driver_plugin.py | # Copyright 2013, Nachi Ueno, NTT I3, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import mock
from neutron.common import constants
from neutron import context
from neutron import manager
from neutron.plugins.common import constants as p_constants
from neutron.tests.unit.db.vpn import test_db_vpnaas
from neutron.tests.unit.openvswitch import test_agent_scheduler
from neutron.tests.unit import test_agent_ext_plugin
from neutron_vpnaas.db.vpn import vpn_validator
from neutron_vpnaas.services.vpn.service_drivers import ipsec as ipsec_driver
FAKE_HOST = test_agent_ext_plugin.L3_HOSTA
VPN_DRIVER_CLASS = 'neutron.services.vpn.plugin.VPNDriverPlugin'
class TestVPNDriverPlugin(test_db_vpnaas.TestVpnaas,
test_agent_scheduler.AgentSchedulerTestMixIn,
test_agent_ext_plugin.AgentDBTestMixIn):
def setUp(self):
self.adminContext = context.get_admin_context()
driver_cls_p = mock.patch(
'neutron.services.vpn.'
'service_drivers.ipsec.IPsecVPNDriver')
driver_cls = driver_cls_p.start()
self.driver = mock.Mock()
self.driver.service_type = ipsec_driver.IPSEC
self.driver.validator = vpn_validator.VpnReferenceValidator()
driver_cls.return_value = self.driver
super(TestVPNDriverPlugin, self).setUp(
vpnaas_plugin=VPN_DRIVER_CLASS)
def test_create_ipsec_site_connection(self, **extras):
super(TestVPNDriverPlugin, self).test_create_ipsec_site_connection()
self.driver.create_ipsec_site_connection.assert_called_once_with(
mock.ANY, mock.ANY)
self.driver.delete_ipsec_site_connection.assert_called_once_with(
mock.ANY, mock.ANY)
def test_delete_vpnservice(self, **extras):
super(TestVPNDriverPlugin, self).test_delete_vpnservice()
self.driver.delete_vpnservice.assert_called_once_with(
mock.ANY, mock.ANY)
def test_update_vpnservice(self, **extras):
super(TestVPNDriverPlugin, self).test_update_vpnservice()
self.driver.update_vpnservice.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY)
@contextlib.contextmanager
def vpnservice_set(self):
"""Test case to create a ipsec_site_connection."""
vpnservice_name = "vpn1" | ikename = "ikepolicy1"
ipsecname = "ipsecpolicy1"
description = "my-vpn-connection"
keys = {'name': vpnservice_name,
'description': "my-vpn-connection",
'peer_address': '192.168.1.10',
'peer_id': '192.168.1.10',
'peer_cidrs': ['192.168.2.0/24', '192.168.3.0/24'],
'initiator': 'bi-directional',
'mtu': 1500,
'dpd_action': 'hold',
'dpd_interval': 40,
'dpd_timeout': 120,
'tenant_id': self._tenant_id,
'psk': 'abcd',
'status': 'PENDING_CREATE',
'admin_state_up': True}
with self.ikepolicy(name=ikename) as ikepolicy:
with self.ipsecpolicy(name=ipsecname) as ipsecpolicy:
with self.subnet() as subnet:
with self.router() as router:
plugin = manager.NeutronManager.get_plugin()
agent = {'host': FAKE_HOST,
'agent_type': constants.AGENT_TYPE_L3,
'binary': 'fake-binary',
'topic': 'fake-topic'}
plugin.create_or_update_agent(self.adminContext, agent)
plugin.schedule_router(
self.adminContext, router['router']['id'])
with self.vpnservice(name=vpnservice_name,
subnet=subnet,
router=router) as vpnservice1:
keys['ikepolicy_id'] = ikepolicy['ikepolicy']['id']
keys['ipsecpolicy_id'] = (
ipsecpolicy['ipsecpolicy']['id']
)
keys['vpnservice_id'] = (
vpnservice1['vpnservice']['id']
)
with self.ipsec_site_connection(
self.fmt,
ipsec_site_connection_name,
keys['peer_address'],
keys['peer_id'],
keys['peer_cidrs'],
keys['mtu'],
keys['psk'],
keys['initiator'],
keys['dpd_action'],
keys['dpd_interval'],
keys['dpd_timeout'],
vpnservice1,
ikepolicy,
ipsecpolicy,
keys['admin_state_up'],
description=description,
):
yield vpnservice1['vpnservice']
def test_get_agent_hosting_vpn_services(self):
with self.vpnservice_set():
service_plugin = manager.NeutronManager.get_service_plugins()[
p_constants.VPN]
vpnservices = service_plugin._get_agent_hosting_vpn_services(
self.adminContext, FAKE_HOST)
vpnservices = vpnservices.all()
self.assertEqual(1, len(vpnservices))
vpnservice_db = vpnservices[0]
self.assertEqual(1, len(vpnservice_db.ipsec_site_connections))
ipsec_site_connection = vpnservice_db.ipsec_site_connections[0]
self.assertIsNotNone(
ipsec_site_connection['ikepolicy'])
self.assertIsNotNone(
ipsec_site_connection['ipsecpolicy'])
def test_update_status(self):
with self.vpnservice_set() as vpnservice:
self._register_agent_states()
service_plugin = manager.NeutronManager.get_service_plugins()[
p_constants.VPN]
service_plugin.update_status_by_agent(
self.adminContext,
[{'status': 'ACTIVE',
'ipsec_site_connections': {},
'updated_pending_status': True,
'id': vpnservice['id']}])
vpnservices = service_plugin._get_agent_hosting_vpn_services(
self.adminContext, FAKE_HOST)
vpnservice_db = vpnservices[0]
self.assertEqual(p_constants.ACTIVE, vpnservice_db['status']) | ipsec_site_connection_name = "ipsec_site_connection" |
test_random_search_kernel.py | import pytest
import numpy as np
import sklearn.linear_model
import sklearn.model_selection
import scipy.linalg
from himalaya.backend import set_backend
from himalaya.backend import ALL_BACKENDS
from himalaya.utils import assert_array_almost_equal
from himalaya.scoring import r2_score
from himalaya.kernel_ridge import solve_multiple_kernel_ridge_random_search
def _create_dataset(backend, n_targets=4):
n_featuress = (100, 200)
n_samples = 80
n_gammas = 3
Xs = [
backend.asarray(backend.randn(n_samples, n_features), backend.float64)
for n_features in n_featuress
]
Ks = backend.stack([X @ X.T for X in Xs])
ws = [
backend.asarray(backend.randn(n_features, n_targets), backend.float64)
for n_features in n_featuress
]
Ys = backend.stack([X @ w for X, w in zip(Xs, ws)])
Y = Ys.sum(0)
gammas = backend.asarray(backend.rand(n_gammas, Ks.shape[0]),
backend.float64)
gammas /= gammas.sum(1)[:, None]
return Ks, Y, gammas, Xs
@pytest.mark.parametrize('local_alpha', [True, False])
@pytest.mark.parametrize('backend', ALL_BACKENDS)
def test_solve_multiple_kernel_ridge_random_search_local_alphah(
backend, local_alpha):
_test_solve_multiple_kernel_ridge_random_search(backend=backend,
local_alpha=local_alpha)
@pytest.mark.parametrize('n_targets_batch', [None, 3])
@pytest.mark.parametrize('backend', ALL_BACKENDS)
def test_solve_multiple_kernel_ridge_random_search_n_targets_batch(
backend, n_targets_batch):
_test_solve_multiple_kernel_ridge_random_search(
backend=backend, n_targets_batch=n_targets_batch)
@pytest.mark.parametrize('n_alphas_batch', [None, 2])
@pytest.mark.parametrize('backend', ALL_BACKENDS)
def test_solve_multiple_kernel_ridge_random_search_n_alphas_batch(
backend, n_alphas_batch):
|
@pytest.mark.parametrize('return_weights', ['primal', 'dual'])
@pytest.mark.parametrize('backend', ALL_BACKENDS)
def test_solve_multiple_kernel_ridge_random_search_return_weights(
backend, return_weights):
_test_solve_multiple_kernel_ridge_random_search(
backend=backend, return_weights=return_weights)
@pytest.mark.parametrize('diagonalize_method', ['eigh', 'svd'])
@pytest.mark.parametrize('backend', ALL_BACKENDS)
def test_solve_multiple_kernel_ridge_random_search_diagonalize_method(
backend, diagonalize_method):
_test_solve_multiple_kernel_ridge_random_search(
backend=backend, diagonalize_method=diagonalize_method)
def _test_solve_multiple_kernel_ridge_random_search(
backend, n_targets_batch=None, n_alphas_batch=None,
return_weights="dual", diagonalize_method="eigh", local_alpha=True):
backend = set_backend(backend)
Ks, Y, gammas, Xs = _create_dataset(backend)
alphas = backend.asarray_like(backend.logspace(-3, 5, 9), Ks)
n_targets = Y.shape[1]
cv = sklearn.model_selection.check_cv(10)
############
# run solver
results = solve_multiple_kernel_ridge_random_search(
Ks, Y, n_iter=gammas, alphas=alphas, score_func=r2_score, cv=cv,
n_targets_batch=n_targets_batch, Xs=Xs, progress_bar=False,
return_weights=return_weights, n_alphas_batch=n_alphas_batch,
diagonalize_method=diagonalize_method, local_alpha=local_alpha)
best_deltas, refit_weights, cv_scores = results
#########################################
# compare with sklearn.linear_model.Ridge
if local_alpha: # only compare when each target optimizes alpha
test_scores = []
for gamma in backend.sqrt(gammas):
X = backend.concatenate([x * g for x, g in zip(Xs, gamma)], 1)
for train, test in cv.split(X):
for alpha in alphas:
model = sklearn.linear_model.Ridge(
alpha=backend.to_numpy(alpha), fit_intercept=False)
model = model.fit(backend.to_numpy(X[train]),
backend.to_numpy(Y[train]))
predictions = backend.asarray_like(
model.predict(backend.to_numpy(X[test])), Y)
test_scores.append(r2_score(Y[test], predictions))
test_scores = backend.stack(test_scores)
test_scores = test_scores.reshape(len(gammas), cv.get_n_splits(),
len(alphas), n_targets)
test_scores_mean = backend.max(test_scores.mean(1), 1)
assert_array_almost_equal(cv_scores, test_scores_mean, decimal=5)
######################
# test refited_weights
for tt in range(n_targets):
gamma = backend.exp(best_deltas[:, tt])
alpha = 1.0
if return_weights == 'primal':
# compare primal weights with sklearn.linear_model.Ridge
X = backend.concatenate(
[X * backend.sqrt(g) for X, g in zip(Xs, gamma)], 1)
model = sklearn.linear_model.Ridge(fit_intercept=False,
alpha=backend.to_numpy(alpha))
w1 = model.fit(backend.to_numpy(X),
backend.to_numpy(Y[:, tt])).coef_
w1 = np.split(w1, np.cumsum([X.shape[1] for X in Xs][:-1]), axis=0)
w1 = [backend.asarray(w) for w in w1]
w1_scaled = backend.concatenate(
[w * backend.sqrt(g) for w, g, in zip(w1, gamma)])
assert_array_almost_equal(w1_scaled, refit_weights[:, tt],
decimal=5)
elif return_weights == 'dual':
# compare dual weights with scipy.linalg.solve
Ks_64 = backend.asarray(Ks, dtype=backend.float64)
gamma_64 = backend.asarray(gamma, dtype=backend.float64)
K = backend.matmul(Ks_64.T, gamma_64).T
reg = backend.asarray_like(np.eye(K.shape[0]), K) * alpha
Y_64 = backend.asarray(Y, dtype=backend.float64)
c1 = scipy.linalg.solve(backend.to_numpy(K + reg),
backend.to_numpy(Y_64[:, tt]))
c1 = backend.asarray_like(c1, K)
assert_array_almost_equal(c1, refit_weights[:, tt], decimal=5)
@pytest.mark.parametrize('backend', ALL_BACKENDS)
def test_solve_multiple_kernel_ridge_random_search_single_alpha_numpy(backend):
backend = set_backend(backend)
# just a smoke test, so make it minimal
Ks, Y, gammas, Xs = _create_dataset(backend)
alphas = 1.0
# make Y a numpy array
Y = backend.to_numpy(Y)
results = solve_multiple_kernel_ridge_random_search(
Ks, Y, n_iter=gammas, alphas=alphas
)
@pytest.mark.parametrize('backend', ALL_BACKENDS)
@pytest.mark.parametrize('n_kernels', [1, 2])
def test_solve_multiple_kernel_ridge_random_search_global_alpha(backend, n_kernels):
backend = set_backend(backend)
# add more targets to make sure we get some variability
Ks, Y, gammas, Xs = _create_dataset(backend, n_targets=20)
alphas = backend.asarray_like(backend.logspace(-3, 5, 9), Ks)
cv = sklearn.model_selection.check_cv(5)
deltas, *_, best_alphas = solve_multiple_kernel_ridge_random_search(
Ks[:n_kernels],
Y,
n_iter=50,
progress_bar=False,
alphas=alphas,
cv=cv,
local_alpha=False,
return_alphas=True
)
# test that we return a single combination of deltas
deltas = backend.to_numpy(deltas)
if deltas.ndim == 1:
assert np.allclose(deltas[0], deltas)
else:
for dd in deltas:
assert np.allclose(dd[0], dd)
# test that we return a single alpha
best_alphas = backend.to_numpy(best_alphas)
assert np.allclose(best_alphas[0], best_alphas) | _test_solve_multiple_kernel_ridge_random_search(
backend=backend, n_alphas_batch=n_alphas_batch) |
unassigned-instances.js | /*
Copyright [2016] [Relevance Lab]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
var mongoose = require('mongoose');
var ObjectId = require('mongoose').Types.ObjectId;
var logger = require('_pr/logger')(module);
var Schema = mongoose.Schema;
var mongoosePaginate = require('mongoose-paginate');
var UnassignedInstancesSchema = new Schema({
orgId: {
type: String,
required: true,
trim: true,
},
orgName: {
type: String,
required: true,
trim: true,
},
providerId: {
type: String,
required: false,
trim: true | },
providerType: String,
providerData: Schema.Types.Mixed,
platformId: String,
ip: {
type: String,
index: true,
trim: true
},
os: String,
state: String,
isDeleted:{
type:Boolean,
default:false,
required:false
},
tags: Schema.Types.Mixed,
usage: Schema.Types.Mixed,
cost: Schema.Types.Mixed,
subnetId: {
type: String,
required: false,
trim: true
},
vpcId: {
type: String,
required: false,
trim: true
},
privateIpAddress: {
type: String,
required: false,
trim: true
},
hostName: {
type: String,
required: false,
trim: true
}
});
UnassignedInstancesSchema.plugin(mongoosePaginate);
UnassignedInstancesSchema.index({platformId: 1, providerId: 1}, {unique: true});
UnassignedInstancesSchema.statics.createNew = function createNew(data, callback) {
var self = this;
var unassignedInstance = new self(data);
unassignedInstance.save(function(err, instance) {
if (err) {
logger.error("Failed to create unassigned instance", err);
if (typeof callback == 'function') {
callback(err, null);
}
return;
} else if (typeof callback == 'function') {
return callback(null, instance);
}
});
};
UnassignedInstancesSchema.statics.getByProviderId = function getByProviderId(databaseReq, callback) {
databaseReq.queryObj.isDeleted = false;
this.paginate(databaseReq.queryObj, databaseReq.options, function (err, instances) {
if (err) {
logger.error("Failed getByProviderId (%s)", err);
callback(err, null);
return;
}
callback(null, instances);
});
};
UnassignedInstancesSchema.statics.getById = function getByProviderId(instanceId, callback) {
this.findById(instanceId,
function(err, instance) {
if (err) {
logger.error("Failed to get instance ", instanceId, err);
return callback(err);
} else {
return callback(null, instance);
}
}
);
};
UnassignedInstancesSchema.statics.getAllByIds = function getByProviderId(instanceIds, callback) {
var params = {
'_id': {$in: instanceIds}
};
this.find(params,function(err, instances) {
if (err) {
logger.error("Could not get instances");
return callback(err, null);
} else if(instances.length > 0) {
return callback(null, instances[0]);
} else {
return callback(null, null);
}
});
};
UnassignedInstancesSchema.statics.getByProviderIdAndPlatformId
= function getByProviderIdAndPlatformId(providerId, platformId, callback) {
var params = {
'providerId': providerId,
'platformId': platformId
};
this.find(params,
function(err, instances) {
if (err) {
logger.error("Could not get instance for ", providerId, platformId, err);
return callback(err, null);
} else if(instances.length > 0) {
return callback(null, instances[0]);
} else {
return callback(null, null);
}
}
);
};
UnassignedInstancesSchema.statics.getUnAssignedInstancesByProviderId
= function getUnAssignedInstancesByProviderId(providerId, callback) {
var params = {
providerId: providerId,
isDeleted:false
};
this.find(params, function (err, instances) {
if (err) {
logger.error("Failed getUnAssignedInstancesByProviderId (%s)", err);
callback(err, null);
return;
}
callback(null, instances);
});
};
UnassignedInstancesSchema.statics.updateInstance = function updateInstance(params, fields ,callback) {
this.update(params, fields,
function(err, data) {
if (err) {
logger.error("Failed to update unassigned instance data", err);
if (typeof callback == 'function') {
callback(err, null);
}
return;
} else if(data && (data.ok == 1)) {
return callback(null, data);
}
});
};
UnassignedInstancesSchema.statics.updateInstanceStatus = function updateInstanceStatus(instanceId,instance,callback) {
var updateObj={};
updateObj['state'] = instance.state;
if(instance.state === 'terminated' || instance.state === 'shutting-down'){
updateObj['isDeleted'] = true;
}else{
updateObj['isDeleted'] = false;
updateObj['subnetId']= instance.subnetId;
updateObj['ip'] = instance.ip;
updateObj['vpcId'] = instance.vpcId;
updateObj['hostName'] = instance.hostName;
updateObj['privateIpAddress'] = instance.privateIpAddress;
updateObj['tags'] = instance.tags;
}
UnassignedInstances.update({
"_id": ObjectId(instanceId)
},{
$set: updateObj
}, function(err, data) {
if (err) {
logger.error("Failed to update Unassigned Instance status data", err);
callback(err,null);
return;
}
callback(null, data);
});
};
UnassignedInstancesSchema.statics.deleteByPlatformAndProviderId
= function deleteByPlatformAndProviderId(providerId, platformId, callback) {
this.remove({
providerId: providerId,
platformId: platformId
}, function(err, data) {
if (err) {
logger.error("Failed to delete instance (%s)", platformId, err);
if (typeof callback == 'function') {
callback(err, null);
}
return;
}
if (typeof callback == 'function') {
callback(null, data);
}
});
};
UnassignedInstancesSchema.statics.removeInstancesByProviderId = function(providerId,callback) {
var queryObj={};
queryObj['providerId'] =providerId;
this.remove(queryObj, function(err, data) {
if (err) {
return callback(err, null);
} else {
callback(null, data);
}
});
};
UnassignedInstancesSchema.statics.updateUsage = function updateUsage(instanceId, usage, callBack) {
this.update({
_id: new ObjectId(instanceId)
}, {
$set: {usage: usage}
}, function(err, data) {
if (err) {
logger.error("Failed to update Unmanaged Instance data", err);
if (typeof callBack == 'function') {
callBack(err, null);
}
return;
}
if (typeof callBack == 'function') {
callBack(null, data);
}
});
};
UnassignedInstancesSchema.statics.updateInstanceCost = function(instanceCostData, callback) {
this.update({
platformId: instanceCostData.resourceId
}, {
$set: {
cost: instanceCostData.cost
}
}, {
upsert: false
}, function(err, data) {
if (err) {
return callback(err, null);
} else {
callback(null, data);
}
});
};
UnassignedInstancesSchema.statics.removeInstanceById = function(instanceId,callback) {
this.remove({
_id: new ObjectId(instanceId)
}, function (err, data) {
if (err) {
return callback(err, null);
} else {
callback(null, data);
}
});
};
UnassignedInstancesSchema.statics.getAllTerminatedInstances = function(orgId,callback) {
this.find({"orgId":orgId,"state":"terminated"}, function(err, data) {
if (err) {
return callback(err, null);
} else {
callback(null, data);
}
});
};
UnassignedInstancesSchema.statics.getInstancesByProviderIdOrgIdAndPlatformId = function getInstancesByProviderIdOrgIdAndPlatformId(orgId,providerId, platformId, callback) {
var params = {
'orgId': orgId,
'providerId': providerId,
'platformId': platformId
};
this.find(params,
function(err, instances) {
if (err) {
logger.error("Could not get instance for ",orgId, providerId, platformId, err);
return callback(err, null);
} else if(instances.length > 0) {
return callback(null, instances);
} else {
return callback(null, []);
}
}
);
};
UnassignedInstancesSchema.statics.removeTerminatedInstanceById = function(instanceId, callback) {
this.update({
"_id": ObjectId(instanceId)
}, {
$set: {
isDeleted: true,
state: 'terminated'
}
}, {
upsert: false
}, function(err, data) {
if (err) {
logger.error("Failed to removeTerminatedInstanceById (%s)", instanceId, err);
callback(err, null);
return;
}
callback(null, data);
});
};
UnassignedInstancesSchema.statics.getAll = function getAll(query, callback) {
query.queryObj.isDeleted = false;
this.paginate(query.queryObj, query.options,
function(err, instances) {
if (err) {
return callback(err);
} else {
return callback(null, instances);
}
}
);
};
var UnassignedInstances = mongoose.model('unassignedInstances', UnassignedInstancesSchema);
module.exports = UnassignedInstances; | |
3.py | class Person:
name='zhangsan'
age=20
p = Person()
print(p) # <__main__.Person object at 0x10073e668>
print('⭐️ ' * 20)
class Stu: | name='zhangsan'
age=20
def __str__(self):
return "name: %s; age: %d"%(self.name, self.age)
s = Stu()
print(s) # name: zhangsan; age: 20 | |
insertionSort.py | import pygame, sys, random
WIDTH = 720
HEIGHT = 400
pygame.init()
win = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption('Insertion Sort')
clock = pygame.time.Clock()
# Bar Width
n = 4
w = int(WIDTH/n)
h_arr = []
def | (num, in_min, in_max, out_min, out_max):
return (num - in_min) * (out_max - out_min) / (in_max - in_min) + out_min
for i in range(w):
h_arr.append(random.randint(10, 400))
counter = 0
j = 0
flag = False
while True:
win.fill((10, 10, 10))
if flag:
if counter < len(h_arr):
key = h_arr[counter]
j = counter - 1
while j >= 0 and key < h_arr[j]:
h_arr[j+1] = h_arr[j]
j -= 1
h_arr[j+1] = key
else:
print('Done')
counter+=1
# for i in range(len(h_arr)):
# pygame.draw.rect(win, (255, 255, 255), (i*n, HEIGHT - h_arr[i], n, h_arr[i]))
for i in range(len(h_arr)):
# if states[i] == 0:
# color = (255, 0, 0)
# elif states[i] == 2:
# color = (0, 255, 0)
# else:
# color = WHITE
h_ar = maps(h_arr[i], 0, 400, 20, 255)
# gap = win.get_height() - h_arr[i]
# pygame.draw.rect(win, (h_ar//4, h_ar, h_ar//2), pygame.Rect(int(i*n), gap//2, n, h_arr[i]))
pygame.draw.rect(win, (h_ar//3, h_ar, h_ar//4), pygame.Rect(int(i*n), (HEIGHT - h_arr[i])//2, n, h_arr[i]))
for events in pygame.event.get():
if events.type == pygame.QUIT:
pygame.quit()
sys.exit()
if events.type == pygame.KEYDOWN:
if events.key == pygame.K_RETURN:
flag = True
clock.tick(60)
pygame.display.flip()
# import pygame as pg, sys, random
# WHITE = (255, 255, 255)
# RED = (255, 0, 0)
# GREEN = (0, 255, 0)
# WIDTH = 640
# HEIGHT = 480
# win_size = (WIDTH, HEIGHT)
# pg.init()
# win = pg.display.set_mode(win_size)
# pg.display.set_caption('Insertion Sort Visualization')
# clock = pg.time.Clock()
# n = 4
# w = int(WIDTH/n)
# h_arr = []
# state = []
# for i in range(w):
# height = random.randint(10, 450)
# h_arr.append(height)
# state.append(1)
# counter = 0
# while True:
# win.fill((10, 10, 10))
# if counter < len(h_arr):
# key = h_arr[counter]
# j = counter - 1
# while j >= 0 and key < h_arr[j]:
# h_arr[j+1] = h_arr[j]
# j -= 1
# h_arr[j+1] = key
# else:
# print('Done')
# counter+=1
# for i in range(len(h_arr)):
# if state[i] == 0:
# color = RED
# elif state[i] == 2:
# color = GREEN
# else:
# color = WHITE
# pg.draw.rect(win, color, pg.Rect(int(i*n), HEIGHT - h_arr[i], n, h_arr[i]))
# for event in pg.event.get():
# if event.type == pg.QUIT:
# pg.quit()
# sys.exit()
# clock.tick(30)
# pg.display.flip()
| maps |
router.rs | use std::convert::Infallible;
use eyre::Result;
use warp::{hyper::StatusCode, Filter};
use atuin_common::api::SyncHistoryRequest;
use super::{
database::{Database, Postgres},
handlers,
};
use crate::{models::User, settings::Settings};
fn with_settings(
settings: Settings,
) -> impl Filter<Extract = (Settings,), Error = Infallible> + Clone {
warp::any().map(move || settings.clone())
}
fn with_db(
db: impl Database + Clone + Send + Sync,
) -> impl Filter<Extract = (impl Database + Clone,), Error = Infallible> + Clone {
warp::any().map(move || db.clone())
}
fn with_user(
postgres: Postgres,
) -> impl Filter<Extract = (User,), Error = warp::Rejection> + Clone {
warp::header::<String>("authorization").and_then(move |header: String| {
// async closures are still buggy :(
let postgres = postgres.clone();
async move {
let header: Vec<&str> = header.split(' ').collect();
let token = if header.len() == 2 {
if header[0] != "Token" |
header[1]
} else {
return Err(warp::reject());
};
let user = postgres
.get_session_user(token)
.await
.map_err(|_| warp::reject())?;
Ok(user)
}
})
}
pub async fn router(
settings: &Settings,
) -> Result<impl Filter<Extract = impl warp::Reply, Error = Infallible> + Clone> {
let postgres = Postgres::new(settings.db_uri.as_str()).await?;
let index = warp::get().and(warp::path::end()).map(handlers::index);
let count = warp::get()
.and(warp::path("sync"))
.and(warp::path("count"))
.and(warp::path::end())
.and(with_user(postgres.clone()))
.and(with_db(postgres.clone()))
.and_then(handlers::history::count)
.boxed();
let sync = warp::get()
.and(warp::path("sync"))
.and(warp::path("history"))
.and(warp::query::<SyncHistoryRequest>())
.and(warp::path::end())
.and(with_user(postgres.clone()))
.and(with_db(postgres.clone()))
.and_then(handlers::history::list)
.boxed();
let add_history = warp::post()
.and(warp::path("history"))
.and(warp::path::end())
.and(warp::body::json())
.and(with_user(postgres.clone()))
.and(with_db(postgres.clone()))
.and_then(handlers::history::add)
.boxed();
let user = warp::get()
.and(warp::path("user"))
.and(warp::path::param::<String>())
.and(warp::path::end())
.and(with_db(postgres.clone()))
.and_then(handlers::user::get)
.boxed();
let register = warp::post()
.and(warp::path("register"))
.and(warp::path::end())
.and(warp::body::json())
.and(with_settings(settings.clone()))
.and(with_db(postgres.clone()))
.and_then(handlers::user::register)
.boxed();
let login = warp::post()
.and(warp::path("login"))
.and(warp::path::end())
.and(warp::body::json())
.and(with_db(postgres))
.and_then(handlers::user::login)
.boxed();
let r = warp::any()
.and(
index
.or(count)
.or(sync)
.or(add_history)
.or(user)
.or(register)
.or(login)
.or(warp::any().map(|| warp::reply::with_status("☕", StatusCode::IM_A_TEAPOT))),
)
.with(warp::filters::log::log("atuin::api"));
Ok(r)
}
| {
return Err(warp::reject());
} |
stack.go | package main
import (
"fmt"
"errors"
)
// Represents a stack
type Stack struct {
Next *Stack // Next element down the stack
Word // Our {value: type} pair
}
// Create a new stack
func MkStack() *Stack {
return &Stack{
Next: nil,
Word: Word{Kind: HEAD},
}
}
// Internal pop function - use correctly ☺
func (s *Stack) Pop() (*Stack, Word) {
if s.Next == nil {
// We are the head
return s, s.Word
}
return s.Next, s.Word
}
// Pushes to the stack, may shift stack pointer - TODO - locks?
func (s *Stack) Push(w Word) (*Stack, error) {
switch w.Kind {
// These are all plain values
case Integral:
fallthrough
case Real:
fallthrough
case String:
top := &Stack{
Next: s,
Word: w,
}
return top, nil
case Procedure:
// Call the procedure to modify the stack
push := w.Value.(func(*Stack) (*Stack, error))
stk, err := push(s)
if err != nil {
return nil, err
}
return stk, nil
case Variable:
// TODO
return nil, errors.New("variable unimplemented for push")
case NIL:
return nil, errors.New("word type of NIL can't be pushed")
default:
return nil, errors.New(fmt.Sprint("unknown type: ", w.Kind))
}
// Change nothing by default?
return s, nil
}
// Descend to calculate the size of the stack - O(n)
func (s *Stack) Size() uint64 {
var size uint64 = 0
if s.Word.Kind == HEAD {
size = 0
} else {
size = 1 | if s.Next != nil {
size += s.Next.Size()
}
return size
} | }
|
testredis.go | package testredis
import (
"context"
"fmt"
"os/exec"
"runtime"
"strconv"
"strings"
"sync/atomic"
"testing"
"github.com/bazelbuild/rules_go/go/tools/bazel"
"github.com/buildbuddy-io/buildbuddy/server/testutil/app"
"github.com/buildbuddy-io/buildbuddy/server/util/log"
"github.com/stretchr/testify/assert"
)
const (
redisLinuxBinRunfilePath = "enterprise/server/test/bin/redis/redis-server-linux-x86_64"
)
// Start spawns a Redis server for the given test and returns a Redis target
// that points to it.
func Start(t *testing.T) string {
var redisBinPath string
osArchKey := runtime.GOOS + "_" + runtime.GOARCH
switch osArchKey {
case "linux_amd64":
redisBinPath = redisLinuxBinRunfilePath
default:
// Skip the test on unsupported platforms until we have mac binary in place.
t.SkipNow()
return ""
}
redisBinPath, err := bazel.Runfile(redisLinuxBinRunfilePath)
if err != nil {
assert.FailNow(t, "redis binary not found in runfiles", err.Error())
}
| redisPort := app.FreePort(t)
ctx, cancel := context.WithCancel(context.Background())
args := []string{"--port", strconv.Itoa(redisPort)}
// Disable persistence, not useful for testing.
args = append(args, "--save", "")
// Set a precautionary limit, tests should not reach it...
args = append(args, "--maxmemory", "1gb")
// ... but do break things if we reach the limit.
args = append(args, "--maxmemory-policy", "noeviction")
cmd := exec.CommandContext(ctx, redisBinPath, args...)
log.Printf("Starting redis server: %s", cmd)
cmd.Stdout = &logWriter{}
cmd.Stderr = &logWriter{}
err = cmd.Start()
if err != nil {
assert.FailNowf(t, "redis binary could not be started", err.Error())
}
var killed atomic.Value
killed.Store(false)
go func() {
if err := cmd.Wait(); err != nil && killed.Load() != true {
log.Warningf("redis server did not exit cleanly: %v", err)
}
}()
t.Cleanup(func() {
log.Info("Shutting down Redis server.")
killed.Store(true)
cancel()
})
return fmt.Sprintf("localhost:%d", redisPort)
}
type logWriter struct{}
func (w *logWriter) Write(b []byte) (int, error) {
log.Infof("[redis server] %s", strings.TrimSuffix(string(b), "\n"))
return len(b), nil
} | |
test_elastic_service.py | import unittest
from blindreviewparser.parser.blind_review_parser import *
class TestElasticService(unittest.TestCase):
def setUp(self) -> None:
self.es_endpoint = 'http://localhost:9200'
self.elastic_service = ElasticService(self.es_endpoint)
self.sample = Review(
company='occidere',
title='"테스트 리뷰"',
url='/kr/company/occidere/review/af9-0df3j',
score=5.0,
auth='현직원 · i*********", · IT 엔지니어 - 2021.02.17'
)
def tearDown(self) -> None:
self.__delete_sample()
def test_exist_any(self):
# BUILD
self.__index_sample()
# OPERATE
exist = self.elastic_service.exist_any([self.sample])
# CHECK
self.assertTrue(exist)
def test_bulk_upsert(self):
# BUILD
self.__delete_sample()
# OPERATE
self.elastic_service.bulk_upsert([self.sample])
# CHECK
resp = requests.get(f'{self.es_endpoint}/blind-review-210217/_doc/{self.sample.url_hash}')
self.assertEqual(resp.status_code, 200)
def __index_sample(self) -> None:
requests.post(
url=f'{self.es_endpoint}/blind-review-210217/_doc/{self.sample.url_hash}',
headers={'Content-Type': 'application/json'},
data=self.sample.to_json_str().encode('utf-8')
)
def __delete_sample(self) -> None:
requests.delete(f'{self.es | _endpoint}/blind-review-210217/_doc/{self.sample.url_hash}')
|
|
namespace_test2_generated.rs | // automatically generated by the FlatBuffers compiler, do not modify
#![allow(dead_code)]
#![allow(unused_imports)]
extern crate flatbuffers;
pub mod namespace_a {
#![allow(dead_code)]
#![allow(unused_imports)]
use std::mem;
use std::cmp::Ordering;
extern crate flatbuffers;
use self::flatbuffers::EndianScalar;
pub enum TableInFirstNSOffset {}
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct TableInFirstNS<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for TableInFirstNS<'a> {
type Inner = TableInFirstNS<'a>;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self {
_tab: flatbuffers::Table { buf: buf, loc: loc },
}
}
}
impl<'a> TableInFirstNS<'a> {
#[inline]
pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
TableInFirstNS {
_tab: table,
}
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args TableInFirstNSArgs<'args>) -> flatbuffers::WIPOffset<TableInFirstNS<'bldr>> {
let mut builder = TableInFirstNSBuilder::new(_fbb);
if let Some(x) = args.foo_struct { builder.add_foo_struct(x); }
if let Some(x) = args.foo_table { builder.add_foo_table(x); }
builder.add_foo_enum(args.foo_enum);
builder.finish()
}
pub const VT_FOO_TABLE: flatbuffers::VOffsetT = 4;
pub const VT_FOO_ENUM: flatbuffers::VOffsetT = 6;
pub const VT_FOO_STRUCT: flatbuffers::VOffsetT = 8;
#[inline]
pub fn foo_table(&self) -> Option<namespace_b::TableInNestedNS<'a>> {
self._tab.get::<flatbuffers::ForwardsUOffset<namespace_b::TableInNestedNS<'a>>>(TableInFirstNS::VT_FOO_TABLE, None)
}
#[inline]
pub fn foo_enum(&self) -> namespace_b::EnumInNestedNS {
self._tab.get::<namespace_b::EnumInNestedNS>(TableInFirstNS::VT_FOO_ENUM, Some(namespace_b::EnumInNestedNS::A)).unwrap()
}
#[inline]
pub fn foo_struct(&self) -> Option<&'a namespace_b::StructInNestedNS> {
self._tab.get::<namespace_b::StructInNestedNS>(TableInFirstNS::VT_FOO_STRUCT, None)
}
}
pub struct TableInFirstNSArgs<'a> {
pub foo_table: Option<flatbuffers::WIPOffset<namespace_b::TableInNestedNS<'a >>>,
pub foo_enum: namespace_b::EnumInNestedNS,
pub foo_struct: Option<&'a namespace_b::StructInNestedNS>,
}
impl<'a> Default for TableInFirstNSArgs<'a> {
#[inline]
fn default() -> Self {
TableInFirstNSArgs {
foo_table: None,
foo_enum: namespace_b::EnumInNestedNS::A,
foo_struct: None,
}
}
}
pub struct TableInFirstNSBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> TableInFirstNSBuilder<'a, 'b> {
#[inline]
pub fn add_foo_table(&mut self, foo_table: flatbuffers::WIPOffset<namespace_b::TableInNestedNS<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<namespace_b::TableInNestedNS>>(TableInFirstNS::VT_FOO_TABLE, foo_table);
}
#[inline]
pub fn add_foo_enum(&mut self, foo_enum: namespace_b::EnumInNestedNS) {
self.fbb_.push_slot::<namespace_b::EnumInNestedNS>(TableInFirstNS::VT_FOO_ENUM, foo_enum, namespace_b::EnumInNestedNS::A);
}
#[inline]
pub fn add_foo_struct(&mut self, foo_struct: &'b namespace_b::StructInNestedNS) {
self.fbb_.push_slot_always::<&namespace_b::StructInNestedNS>(TableInFirstNS::VT_FOO_STRUCT, foo_struct);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TableInFirstNSBuilder<'a, 'b> {
let start = _fbb.start_table();
TableInFirstNSBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<TableInFirstNS<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
pub enum SecondTableInAOffset {}
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct SecondTableInA<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for SecondTableInA<'a> {
type Inner = SecondTableInA<'a>;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self {
_tab: flatbuffers::Table { buf: buf, loc: loc },
}
}
}
impl<'a> SecondTableInA<'a> {
#[inline]
pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
SecondTableInA {
_tab: table,
}
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args SecondTableInAArgs<'args>) -> flatbuffers::WIPOffset<SecondTableInA<'bldr>> {
let mut builder = SecondTableInABuilder::new(_fbb);
if let Some(x) = args.refer_to_c { builder.add_refer_to_c(x); }
builder.finish()
}
pub const VT_REFER_TO_C: flatbuffers::VOffsetT = 4;
#[inline]
pub fn refer_to_c(&self) -> Option<super::namespace_c::TableInC<'a>> {
self._tab.get::<flatbuffers::ForwardsUOffset<super::namespace_c::TableInC<'a>>>(SecondTableInA::VT_REFER_TO_C, None)
}
}
pub struct SecondTableInAArgs<'a> {
pub refer_to_c: Option<flatbuffers::WIPOffset<super::namespace_c::TableInC<'a >>>,
}
impl<'a> Default for SecondTableInAArgs<'a> {
#[inline]
fn default() -> Self {
SecondTableInAArgs {
refer_to_c: None,
}
}
}
pub struct SecondTableInABuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> SecondTableInABuilder<'a, 'b> {
#[inline]
pub fn add_refer_to_c(&mut self, refer_to_c: flatbuffers::WIPOffset<super::namespace_c::TableInC<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<super::namespace_c::TableInC>>(SecondTableInA::VT_REFER_TO_C, refer_to_c);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> SecondTableInABuilder<'a, 'b> {
let start = _fbb.start_table();
SecondTableInABuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<SecondTableInA<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
} // pub mod NamespaceA
pub mod namespace_c {
#![allow(dead_code)]
#![allow(unused_imports)]
use std::mem;
use std::cmp::Ordering;
extern crate flatbuffers;
use self::flatbuffers::EndianScalar;
pub enum TableInCOffset {}
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct | <'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for TableInC<'a> {
type Inner = TableInC<'a>;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self {
_tab: flatbuffers::Table { buf: buf, loc: loc },
}
}
}
impl<'a> TableInC<'a> {
#[inline]
pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
TableInC {
_tab: table,
}
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args TableInCArgs<'args>) -> flatbuffers::WIPOffset<TableInC<'bldr>> {
let mut builder = TableInCBuilder::new(_fbb);
if let Some(x) = args.refer_to_a2 { builder.add_refer_to_a2(x); }
if let Some(x) = args.refer_to_a1 { builder.add_refer_to_a1(x); }
builder.finish()
}
pub const VT_REFER_TO_A1: flatbuffers::VOffsetT = 4;
pub const VT_REFER_TO_A2: flatbuffers::VOffsetT = 6;
#[inline]
pub fn refer_to_a1(&self) -> Option<super::namespace_a::TableInFirstNS<'a>> {
self._tab.get::<flatbuffers::ForwardsUOffset<super::namespace_a::TableInFirstNS<'a>>>(TableInC::VT_REFER_TO_A1, None)
}
#[inline]
pub fn refer_to_a2(&self) -> Option<super::namespace_a::SecondTableInA<'a>> {
self._tab.get::<flatbuffers::ForwardsUOffset<super::namespace_a::SecondTableInA<'a>>>(TableInC::VT_REFER_TO_A2, None)
}
}
pub struct TableInCArgs<'a> {
pub refer_to_a1: Option<flatbuffers::WIPOffset<super::namespace_a::TableInFirstNS<'a >>>,
pub refer_to_a2: Option<flatbuffers::WIPOffset<super::namespace_a::SecondTableInA<'a >>>,
}
impl<'a> Default for TableInCArgs<'a> {
#[inline]
fn default() -> Self {
TableInCArgs {
refer_to_a1: None,
refer_to_a2: None,
}
}
}
pub struct TableInCBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> TableInCBuilder<'a, 'b> {
#[inline]
pub fn add_refer_to_a1(&mut self, refer_to_a1: flatbuffers::WIPOffset<super::namespace_a::TableInFirstNS<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<super::namespace_a::TableInFirstNS>>(TableInC::VT_REFER_TO_A1, refer_to_a1);
}
#[inline]
pub fn add_refer_to_a2(&mut self, refer_to_a2: flatbuffers::WIPOffset<super::namespace_a::SecondTableInA<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<super::namespace_a::SecondTableInA>>(TableInC::VT_REFER_TO_A2, refer_to_a2);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TableInCBuilder<'a, 'b> {
let start = _fbb.start_table();
TableInCBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<TableInC<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
} // pub mod NamespaceC
| TableInC |
file.go | // Copyright 2020 The casbin Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package controllers
import (
"encoding/base64"
"encoding/json"
"fmt"
"strconv"
"strings"
"time"
"github.com/casbin/casnode/object"
"github.com/casbin/casnode/service"
"github.com/casbin/casnode/util"
)
type NewUploadFile struct {
FileName string `json:"fileName"`
FilePath string `json:"filePath"`
FileUrl string `json:"fileUrl"`
Size int `json:"size"`
}
func (c *ApiController) GetFiles() {
if c.RequireSignedIn() {
return
}
user := c.GetSessionUser()
limitStr := c.Input().Get("limit")
pageStr := c.Input().Get("page")
defaultLimit := object.DefaultFilePageNum
var limit, offset int
if len(limitStr) != 0 {
limit = util.ParseInt(limitStr)
} else {
limit = defaultLimit
}
if len(pageStr) != 0 {
page := util.ParseInt(pageStr)
offset = page*limit - limit
}
files := object.GetFiles(GetUserName(user), limit, offset)
fileNum := fileNumResp{Num: object.GetFilesNum(GetUserName(user)), MaxNum: object.GetMemberFileQuota(user)}
c.ResponseOk(files, fileNum)
}
func (c *ApiController) GetFileNum() {
if c.RequireSignedIn() {
return
}
user := c.GetSessionUser()
num := fileNumResp{Num: object.GetFilesNum(GetUserName(user)), MaxNum: object.GetMemberFileQuota(user)}
resp := Response{Status: "ok", Msg: "success", Data: num}
c.Data["json"] = resp
c.ServeJSON()
}
func (c *ApiController) AddFileRecord() {
if c.RequireSignedIn() {
return
}
user := c.GetSessionUser()
var file NewUploadFile
err := json.Unmarshal(c.Ctx.Input.RequestBody, &file)
if err != nil {
panic(err)
}
var resp Response
uploadFileNum := object.GetFilesNum(GetUserName(user))
if uploadFileNum >= object.GetMemberFileQuota(user) {
resp = Response{Status: "fail", Msg: "You have exceeded the upload limit."}
c.Data["json"] = resp
c.ServeJSON()
return
}
record := object.UploadFileRecord{
FileName: file.FileName,
FilePath: file.FilePath,
FileUrl: file.FileUrl,
FileType: util.FileType(file.FileName),
FileExt: util.FileExt(file.FileName),
MemberId: GetUserName(user),
CreatedTime: util.GetCurrentTime(),
Size: file.Size,
Deleted: false,
}
affected, id := object.AddFileRecord(&record)
if affected {
fileNum := fileNumResp{Num: object.GetFilesNum(GetUserName(user)), MaxNum: object.GetMemberFileQuota(user)}
resp = Response{Status: "ok", Msg: "success", Data: id, Data2: fileNum}
} else {
resp = Response{Status: "fail", Msg: "Add file failed, please try again.", Data: id}
}
c.Data["json"] = resp
c.ServeJSON()
}
func (c *ApiController) DeleteFile() {
idStr := c.Input().Get("id")
user := c.GetSessionUser()
id := util.ParseInt(idStr)
fileInfo := object.GetFile(id)
if !object.FileEditable(user, fileInfo.MemberId) {
c.ResponseError("Permission denied.")
return
}
affected := object.DeleteFileRecord(id)
var resp Response
if affected {
service.DeleteOSSFile(fileInfo.FilePath)
fileNum := fileNumResp{Num: object.GetFilesNum(GetUserName(user)), MaxNum: object.GetMemberFileQuota(user)}
resp = Response{Status: "ok", Msg: "success", Data: id, Data2: fileNum}
} else {
resp = Response{Status: "fail", Msg: "Delete file failed, please try again."}
} | c.ServeJSON()
}
func (c *ApiController) GetFile() {
idStr := c.Input().Get("id")
id := util.ParseInt(idStr)
file := object.GetFile(id)
var resp Response
if file == nil || file.Deleted {
resp = Response{Status: "error", Msg: "No such file."}
} else {
object.AddFileViewsNum(id) // together with add file views num
resp = Response{Status: "ok", Msg: "success", Data: file}
}
c.Data["json"] = resp
c.ServeJSON()
}
func (c *ApiController) UpdateFileDescribe() {
user := c.GetSessionUser()
id := util.ParseInt(c.Input().Get("id"))
var desc fileDescribe
err := json.Unmarshal(c.Ctx.Input.RequestBody, &desc)
if err != nil {
panic(err)
}
var resp Response
file := object.GetFile(id)
if !object.FileEditable(user, file.MemberId) {
resp = Response{Status: "fail", Msg: "Permission denied."}
c.Data["json"] = resp
c.ServeJSON()
return
} else {
res := object.UpdateFileDescribe(id, desc.FileName, desc.Desc)
resp = Response{Status: "ok", Msg: "success", Data: res}
}
c.Data["json"] = resp
c.ServeJSON()
}
func (c *ApiController) UploadFile() {
if c.RequireSignedIn() {
return
}
memberId := c.GetSessionUsername()
fileBase64 := c.Ctx.Request.Form.Get("file")
fileType := c.Ctx.Request.Form.Get("type")
fileName := c.Ctx.Request.Form.Get("name")
index := strings.Index(fileBase64, ",")
fileBytes, _ := base64.StdEncoding.DecodeString(fileBase64[index+1:])
fileURL := service.UploadFileToOSS(fileBytes, "/" + memberId + "/file/" + fileName + "." + fileType)
resp := Response{Status: "ok", Msg: fileName + "." + fileType, Data: fileURL}
c.Data["json"] = resp
c.ServeJSON()
}
func (c *ApiController) ModeratorUpload() {
if c.RequireSignedIn() {
return
}
user := c.GetSessionUser()
if !user.IsAdmin {
c.ResponseError("You have no permission to upload files here. Need to be moderator.")
return
}
fileBase64 := c.Ctx.Request.Form.Get("file")
fileName := c.Ctx.Request.Form.Get("name")
filePath := c.Ctx.Request.Form.Get("filepath")
index := strings.Index(fileBase64, ",")
fileBytes, _ := base64.StdEncoding.DecodeString(fileBase64[index+1:])
fileURL := service.UploadFileToOSS(fileBytes, "/" + filePath + "/" + fileName)
timeStamp := fmt.Sprintf("?time=%d", time.Now().UnixNano())
c.ResponseOk(fileURL + timeStamp)
//resp := Response{Status: "ok", Msg: fileName, Data: fileURL + timeStamp}
}
func (c *ApiController) UploadAvatar() {
if c.RequireSignedIn() {
return
}
memberId := c.GetSessionUsername()
avatarBase64 := c.Ctx.Request.Form.Get("avatar")
index := strings.Index(avatarBase64, ",")
if index < 0 || (avatarBase64[0:index] != "data:image/png;base64" && avatarBase64[0:index] != "data:image/jpeg;base64") {
resp := Response{Status: "error", Msg: "File encoding or type error"}
c.Data["json"] = resp
c.ServeJSON()
return
}
fileBytes, _ := base64.StdEncoding.DecodeString(avatarBase64[index+1:])
timestamp := strconv.FormatInt(time.Now().Unix(), 10)
fileURL := service.UploadFileToOSS(fileBytes, "/" + memberId + "/avatar/" + timestamp + "." + "png")
resp := Response{Status: "ok", Data: fileURL}
c.Data["json"] = resp
c.ServeJSON()
} |
c.Data["json"] = resp |
interpreter_unittest.py | #!/usr/bin/python2
# Copyright 2015 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the EC-3PO interpreter."""
from __future__ import print_function
# pylint: disable=cros-logging-import
import logging
import mock
import multiprocessing
import tempfile
import unittest
import interpreter
class TestEnhancedECBehaviour(unittest.TestCase):
"""Test case to verify all enhanced EC interpretation tasks."""
def setUp(self):
"""Setup the test harness."""
# Setup logging with a timestamp, the module, and the log level.
logging.basicConfig(level=logging.DEBUG,
format=('%(asctime)s - %(module)s -'
' %(levelname)s - %(message)s'))
# Create a tempfile that would represent the EC UART PTY.
self.tempfile = tempfile.NamedTemporaryFile()
# Create the pipes that the interpreter will use.
self.cmd_pipe_user, self.cmd_pipe_itpr = multiprocessing.Pipe()
self.dbg_pipe_user, self.dbg_pipe_itpr = multiprocessing.Pipe(duplex=False)
# Mock the open() function so we can inspect reads/writes to the EC.
self.ec_uart_pty = mock.mock_open()
with mock.patch('__builtin__.open', self.ec_uart_pty):
# Create an interpreter.
self.itpr = interpreter.Interpreter(self.tempfile.name,
self.cmd_pipe_itpr,
self.dbg_pipe_itpr,
log_level=logging.DEBUG)
@mock.patch('interpreter.os')
def test_HandlingCommandsThatProduceNoOutput(self, mock_os):
"""Verify that the Interpreter correctly handles non-output commands.
Args:
mock_os: MagicMock object replacing the 'os' module for this test
case.
"""
# The interpreter init should open the EC UART PTY.
expected_ec_calls = [mock.call(self.tempfile.name, 'a+')]
# Have a command come in the command pipe. The first command will be an
# interrogation to determine if the EC is enhanced or not.
self.cmd_pipe_user.send(interpreter.EC_SYN)
self.itpr.HandleUserData()
# At this point, the command should be queued up waiting to be sent, so
# let's actually send it to the EC.
self.itpr.SendCmdToEC()
expected_ec_calls.extend([mock.call().write(interpreter.EC_SYN),
mock.call().flush()])
# Now, assume that the EC sends only 1 response back of EC_ACK.
mock_os.read.side_effect = [interpreter.EC_ACK]
# When reading the EC, the interpreter will call file.fileno() to pass to
# os.read().
expected_ec_calls.append(mock.call().fileno())
# Simulate the response.
self.itpr.HandleECData()
# Now that the interrogation was complete, it's time to send down the real
# command.
test_cmd = 'chan save'
# Send the test command down the pipe.
self.cmd_pipe_user.send(test_cmd)
self.itpr.HandleUserData()
self.itpr.SendCmdToEC()
# Since the EC image is enhanced, we should have sent a packed command.
expected_ec_calls.append(mock.call().write(self.itpr.PackCommand(test_cmd)))
expected_ec_calls.append(mock.call().flush())
# Now that the first command was sent, we should send another command which
# produces no output. The console would send another interrogation.
self.cmd_pipe_user.send(interpreter.EC_SYN)
self.itpr.HandleUserData()
self.itpr.SendCmdToEC()
expected_ec_calls.extend([mock.call().write(interpreter.EC_SYN),
mock.call().flush()])
# Again, assume that the EC sends only 1 response back of EC_ACK.
mock_os.read.side_effect = [interpreter.EC_ACK]
# When reading the EC, the interpreter will call file.fileno() to pass to
# os.read().
expected_ec_calls.append(mock.call().fileno())
# Simulate the response.
self.itpr.HandleECData()
# Now send the second test command.
test_cmd = 'chan 0'
self.cmd_pipe_user.send(test_cmd)
self.itpr.HandleUserData()
self.itpr.SendCmdToEC()
# Since the EC image is enhanced, we should have sent a packed command.
expected_ec_calls.append(mock.call().write(self.itpr.PackCommand(test_cmd)))
expected_ec_calls.append(mock.call().flush())
# Finally, verify that the appropriate writes were actually sent to the EC.
self.ec_uart_pty.assert_has_calls(expected_ec_calls)
@mock.patch('interpreter.os')
def test_CommandRetryingOnError(self, mock_os):
"""Verify that commands are retried if an error is encountered.
Args:
mock_os: MagicMock object replacing the 'os' module for this test
case.
"""
# The interpreter init should open the EC UART PTY.
expected_ec_calls = [mock.call(self.tempfile.name, 'a+')]
# Have a command come in the command pipe. The first command will be an | # let's actually send it to the EC.
self.itpr.SendCmdToEC()
expected_ec_calls.extend([mock.call().write(interpreter.EC_SYN),
mock.call().flush()])
# Now, assume that the EC sends only 1 response back of EC_ACK.
mock_os.read.side_effect = [interpreter.EC_ACK]
# When reading the EC, the interpreter will call file.fileno() to pass to
# os.read().
expected_ec_calls.append(mock.call().fileno())
# Simulate the response.
self.itpr.HandleECData()
# Let's send a command that is received on the EC-side with an error.
test_cmd = 'accelinfo'
self.cmd_pipe_user.send(test_cmd)
self.itpr.HandleUserData()
self.itpr.SendCmdToEC()
packed_cmd = self.itpr.PackCommand(test_cmd)
expected_ec_calls.extend([mock.call().write(packed_cmd),
mock.call().flush()])
# Have the EC return the error string twice.
mock_os.read.side_effect = ['&&EE', '&&EE']
for i in range(2):
# When reading the EC, the interpreter will call file.fileno() to pass to
# os.read().
expected_ec_calls.append(mock.call().fileno())
# Simulate the response.
self.itpr.HandleECData()
# Since an error was received, the EC should attempt to retry the command.
expected_ec_calls.extend([mock.call().write(packed_cmd),
mock.call().flush()])
# Verify that the retry count was decremented.
self.assertEqual(interpreter.COMMAND_RETRIES-i-1, self.itpr.cmd_retries,
'Unexpected cmd_remaining count.')
# Actually retry the command.
self.itpr.SendCmdToEC()
# Now assume that the last one goes through with no trouble.
expected_ec_calls.extend([mock.call().write(packed_cmd),
mock.call().flush()])
self.itpr.SendCmdToEC()
# Verify all the calls.
self.ec_uart_pty.assert_has_calls(expected_ec_calls)
def test_PackCommandsForEnhancedEC(self):
"""Verify that the interpreter packs commands for enhanced EC images."""
# Assume current EC image is enhanced.
self.itpr.enhanced_ec = True
# Receive a command from the user.
test_cmd = 'gettime'
self.cmd_pipe_user.send(test_cmd)
# Mock out PackCommand to see if it was called.
self.itpr.PackCommand = mock.MagicMock()
# Have the interpreter handle the command.
self.itpr.HandleUserData()
# Verify that PackCommand() was called.
self.itpr.PackCommand.assert_called_once_with(test_cmd)
def test_DontPackCommandsForNonEnhancedEC(self):
"""Verify the interpreter doesn't pack commands for non-enhanced images."""
# Assume current EC image is not enhanced.
self.itpr.enhanced_ec = False
# Receive a command from the user.
test_cmd = 'gettime'
self.cmd_pipe_user.send(test_cmd)
# Mock out PackCommand to see if it was called.
self.itpr.PackCommand = mock.MagicMock()
# Have the interpreter handle the command.
self.itpr.HandleUserData()
# Verify that PackCommand() was called.
self.itpr.PackCommand.assert_not_called()
@mock.patch('interpreter.os')
def test_KeepingTrackOfInterrogation(self, mock_os):
"""Verify that the interpreter can track the state of the interrogation.
Args:
mock_os: MagicMock object replacing the 'os' module. for this test
case.
"""
# Upon init, the interpreter should assume that the current EC image is not
# enhanced.
self.assertFalse(self.itpr.enhanced_ec, msg=('State of enhanced_ec upon'
' init is not False.'))
# Assume an interrogation request comes in from the user.
self.cmd_pipe_user.send(interpreter.EC_SYN)
self.itpr.HandleUserData()
# Verify the state is now within an interrogation.
self.assertTrue(self.itpr.interrogating, 'interrogating should be True')
# The state of enhanced_ec should not be changed yet because we haven't
# received a valid response yet.
self.assertFalse(self.itpr.enhanced_ec, msg=('State of enhanced_ec is '
'not False.'))
# Assume that the EC responds with an EC_ACK.
mock_os.read.side_effect = [interpreter.EC_ACK]
self.itpr.HandleECData()
# Now, the interrogation should be complete and we should know that the
# current EC image is enhanced.
self.assertFalse(self.itpr.interrogating, msg=('interrogating should be '
'False'))
self.assertTrue(self.itpr.enhanced_ec, msg='enhanced_ec sholud be True')
# Now let's perform another interrogation, but pretend that the EC ignores
# it.
self.cmd_pipe_user.send(interpreter.EC_SYN)
self.itpr.HandleUserData()
# Verify interrogating state.
self.assertTrue(self.itpr.interrogating, 'interrogating sholud be True')
# We should assume that the image is not enhanced until we get the valid
# response.
self.assertFalse(self.itpr.enhanced_ec, 'enhanced_ec should be False now.')
# Let's pretend that we get a random debug print. This should clear the
# interrogating flag.
mock_os.read.side_effect = '[1660.593076 HC 0x103]'
self.itpr.HandleECData()
# Verify that interrogating flag is cleared and enhanced_ec is still False.
self.assertFalse(self.itpr.interrogating, 'interrogating should be False.')
self.assertFalse(self.itpr.enhanced_ec,
'enhanced_ec should still be False.')
if __name__ == '__main__':
unittest.main() | # interrogation to determine if the EC is enhanced or not.
self.cmd_pipe_user.send(interpreter.EC_SYN)
self.itpr.HandleUserData()
# At this point, the command should be queued up waiting to be sent, so |
StructureSketch.py | class StructureSketch(object):
"""Create 'Sketch' of the structure"""
def __init__(self,structureModel,structureGeometry):
"""init
Required argument:
Optional arguments:
None.
Return value:
Exceptions:
None.
"""
self.structureGeometry=structureGeometry
self.structureModel=structureModel
def CreateSketch(self):
"""Create Sketch
function summary
Args:
structureGeometry: structureGeometry instance
Returns:
Raises:
"""
#design pattern: builder
self.__CreateTowerSketch()
self.__CreateStiffeningGirderSketch()
self.__CreateGirderRigidarmSketch()
self.__CreateCableSketch()
self.__CreateSuspenderSketch();
def __CreateTowerSketch(self):
"""CreateTowerSketch
function summary
Args:
Returns:
Raises:
"""
#Tower:
dTB=self.structureGeometry.downTowerBottomCoordinate
rUD=self.structureGeometry.rUpDownTowerCoordinate
uTT=self.structureGeometry.upTowerTopCoordinate
self.towerSketch=[]
for i in range(0,2):
mySketch = self.structureModel.ConstrainedSketch(name='towerSketch'+str(i+1),sheetSize=10.0)
#dTB[0][0][0]: 1# tower, 1# tower column, x coordinate
mySketch.Line(point1=(dTB[i][0][0],dTB[i][0][1]), point2=(rUD[i][0][0],rUD[i][0][1]))
mySketch.Line(point1=(rUD[i][0][0],rUD[i][0][1]), point2=(uTT[i][0][0],uTT[i][0][1]))
mySketch.Line(point1=(uTT[i][0][0],uTT[i][0][1]), point2=(uTT[i][1][0]+(uTT[i][1][2]-uTT[i][0][2]),uTT[i][1][1]))
mySketch.Line(point1=(uTT[i][1][0]+(uTT[i][1][2]-uTT[i][0][2]),uTT[i][1][1]), point2=(rUD[i][1][0]+(rUD[i][1][2]-rUD[i][0][2]),rUD[i][1][1]))
mySketch.Line(point1=(rUD[i][1][0]+(rUD[i][1][2]-rUD[i][0][2]),rUD[i][1][1]), point2=(dTB[i][1][0]+(dTB[i][1][2]-dTB[i][0][2]),dTB[i][1][1]))
self.towerSketch.append(mySketch)
self.towerSketch=tuple(self.towerSketch)
def | (self):
"""Create Stiffening Girder Sketch
function summary
Args:
Returns:
Raises:
"""
eP=self.structureGeometry.EndPointCoordinate
rGR=self.structureGeometry.rGirderRigidarmCoordinate
rRS=self.structureGeometry.rRigidarmSuspenderCoordinate
#stiffeningGirderCoordinate=(eP[0],rGRC[0],eP[1])
lst=[]
lst.append(eP[0])
for i in range(len(rGR)):
lst.append(rGR[i])
lst.append(eP[1])
stiffeningGirderCoordinate=tuple(lst)
sG=stiffeningGirderCoordinate
mySketch = self.structureModel.ConstrainedSketch(name='stiffeningGirderSketch',sheetSize=10.0)
for i in range(len(sG)-1):
mySketch.Line(point1=(sG[i][0],sG[i][1]), point2=(sG[i+1][0],sG[i+1][1]))
self.stiffeningGirderSketch=mySketch
def __CreateGirderRigidarmSketch(self):
"""Create Girder Rigidarm Sketch
function summary
Args:
Returns:
Raises:
"""
rGR=self.structureGeometry.rGirderRigidarmCoordinate
rRS=self.structureGeometry.rRigidarmSuspenderCoordinate
#create GirderRigidarm Sketch
girderRigidarmSketch=[]
for i in range(len(rGR)):
mySketch = self.structureModel.ConstrainedSketch(name='girderRigidarmSketch'+str(i+1),sheetSize=10.0)
mySketch.Line(point1=(rRS[0][i][0]+rRS[0][i][2],rRS[0][i][1]), point2=(rGR[i][0],rGR[i][1]))
mySketch.Line(point1=(rGR[i][0],rGR[i][1]), point2=(rRS[1][i][0]+rRS[1][i][2],rRS[1][i][1]))
girderRigidarmSketch.append(mySketch) #rRS[0][i][2] is negative
self.girderRigidarmSketch=tuple(girderRigidarmSketch)
def __CreateCableSketch(self):
"""Create Cable Sketch
function summary
Args:
Returns:
Raises:
"""
#cable
cableCoordinate=self.structureGeometry.cableCoordinate
self.cableSketch=[]
cableSketch=[]
for i in range(len(cableCoordinate)):
mySketch = self.structureModel.ConstrainedSketch(name='cableSketch'+str(i+1),sheetSize=10.0)
for j in range(len(cableCoordinate[i])-1):
mySketch.Line(point1=(cableCoordinate[i][j][0],cableCoordinate[i][j][1]), point2=(cableCoordinate[i][j+1][0],cableCoordinate[i][j+1][1]))
cableSketch.append(mySketch)
self.cableSketch=tuple(cableSketch)
def __CreateSuspenderSketch(self):
"""Create Suspender Sketch
function summary
Args:
Returns:
Raises:
"""
hP=self.structureGeometry.hangingPointCoordinate
rRS=self.structureGeometry.rRigidarmSuspenderCoordinate
self.suspenderSketch=[]
suspenderSketch=[]
for i in range(len(rRS)):
for j in range(len(rRS[0])):
mySketch = self.structureModel.ConstrainedSketch(name='girderRigidarmSketch'+str(i+1)+'-'+str(j+1),sheetSize=10.0)
mySketch.Line(point1=(hP[i][j][0],hP[i][j][1]), point2=(rRS[i][j][0],rRS[i][j][1]))
suspenderSketch.append(mySketch)
self.suspenderSketch.append(tuple(suspenderSketch))
self.suspenderSketch=tuple(self.suspenderSketch)
| __CreateStiffeningGirderSketch |
webhook.go | /*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0 | See the License for the specific language governing permissions and
limitations under the License.
*/
package cronjob
import (
"k8s.io/apimachinery/pkg/runtime"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/webhook"
)
func (c *CronJob) SetupWebhookWithManager(mgr ctrl.Manager) error {
return ctrl.NewWebhookManagedBy(mgr).
For(c).
Complete()
}
// +kubebuilder:webhook:webhookVersions=v1beta1,verbs=create;update,path=/validate-testdata-kubebuilder-io-v1-cronjob,mutating=false,failurePolicy=fail,matchPolicy=Equivalent,groups=testdata.kubebuiler.io,resources=cronjobs,versions=v1,name=validation.cronjob.testdata.kubebuilder.io,sideEffects=None,admissionReviewVersions=v1;v1beta1
// +kubebuilder:webhook:verbs=create;update,path=/validate-testdata-kubebuilder-io-v1-cronjob,mutating=false,failurePolicy=fail,matchPolicy=Equivalent,groups=testdata.kubebuiler.io,resources=cronjobs,versions=v1,name=validation.cronjob.testdata.kubebuilder.io,sideEffects=NoneOnDryRun,admissionReviewVersions=v1;v1beta1
// +kubebuilder:webhook:webhookVersions=v1,verbs=create;update,path=/mutate-testdata-kubebuilder-io-v1-cronjob,mutating=true,failurePolicy=fail,matchPolicy=Equivalent,groups=testdata.kubebuiler.io,resources=cronjobs,versions=v1,name=default.cronjob.testdata.kubebuilder.io,sideEffects=None,admissionReviewVersions=v1;v1beta1
var _ webhook.Defaulter = &CronJob{}
var _ webhook.Validator = &CronJob{}
func (c *CronJob) Default() {
}
func (c *CronJob) ValidateCreate() error {
return nil
}
func (c *CronJob) ValidateUpdate(_ runtime.Object) error {
return nil
}
func (c *CronJob) ValidateDelete() error {
return nil
} |
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
registry.go | // Copyright (c) 2019 IoTeX Foundation
// This is an alpha (internal) release and is not suitable for production. This source code is provided 'as is' and no
// warranties are given as to title or non-infringement, merchantability or fitness for purpose and, to the extent
// permitted by law, all liability for your use of the code is disclaimed. This source code is governed by Apache
// License 2.0 that can be found in the LICENSE file.
package protocol
import (
"sync"
"github.com/pkg/errors"
"github.com/iotexproject/iotex-core/pkg/log"
)
// Registry is the hub of all protocols deployed on the chain
type Registry struct {
protocols sync.Map
}
// Register registers the protocol with a unique ID
func (r *Registry) Register(id string, p Protocol) error {
_, loaded := r.protocols.LoadOrStore(id, p)
if loaded {
return errors.Errorf("Protocol with ID %s is already registered", id)
}
return nil
}
// ForceRegister registers the protocol with a unique ID and force replacing the previous protocol if it exists
func (r *Registry) ForceRegister(id string, p Protocol) error {
r.protocols.Store(id, p)
return nil
}
// Find finds a protocol by ID
func (r *Registry) Find(id string) (Protocol, bool) {
value, ok := r.protocols.Load(id)
if !ok {
return nil, false
}
p, ok := value.(Protocol)
if !ok {
log.S().Panic("Registry stores the item which is not a protocol") | }
// All returns all protocols
func (r *Registry) All() []Protocol {
all := make([]Protocol, 0)
r.protocols.Range(func(_, value interface{}) bool {
p, ok := value.(Protocol)
if !ok {
log.S().Panic("Registry stores the item which is not a protocol")
}
all = append(all, p)
return true
})
return all
} | }
return p, true |
issue-10436.rs | // http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
fn works<T>(x: T) -> Vec<T> { vec![x] }
fn also_works<T: Clone>(x: T) -> Vec<T> { vec![x] }
fn main() {
let _: Vec<usize> = works(0);
let _: Vec<usize> = also_works(0);
let _ = works(0);
let _ = also_works(0);
} | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at |
|
util.py | # util.py
# -------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to http://ai.berkeley.edu.
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by John DeNero
# ([email protected]) and Dan Klein ([email protected]).
# Student side autograding was added by Brad Miller, Nick Hay, and
# Pieter Abbeel ([email protected]).
import heapq
import random
import sys
# import cStringIO
import types
import inspect
class FixedRandom:
def __init__(self):
fixedState = (3, (2147483648, 507801126, 683453281, 310439348, 2597246090, \
2209084787, 2267831527, 979920060, 3098657677, 37650879, 807947081, 3974896263, \
881243242, 3100634921, 1334775171, 3965168385, 746264660, 4074750168, 500078808, \
776561771, 702988163, 1636311725, 2559226045, 157578202, 2498342920, 2794591496, \
4130598723, 496985844, 2944563015, 3731321600, 3514814613, 3362575829, 3038768745, \
2206497038, 1108748846, 1317460727, 3134077628, 988312410, 1674063516, 746456451, \
3958482413, 1857117812, 708750586, 1583423339, 3466495450, 1536929345, 1137240525, \
3875025632, 2466137587, 1235845595, 4214575620, 3792516855, 657994358, 1241843248, \
1695651859, 3678946666, 1929922113, 2351044952, 2317810202, 2039319015, 460787996, \
3654096216, 4068721415, 1814163703, 2904112444, 1386111013, 574629867, 2654529343, \
3833135042, 2725328455, 552431551, 4006991378, 1331562057, 3710134542, 303171486, \
1203231078, 2670768975, 54570816, 2679609001, 578983064, 1271454725, 3230871056, \
2496832891, 2944938195, 1608828728, 367886575, 2544708204, 103775539, 1912402393, \
1098482180, 2738577070, 3091646463, 1505274463, 2079416566, 659100352, 839995305, \
1696257633, 274389836, 3973303017, 671127655, 1061109122, 517486945, 1379749962, \
3421383928, 3116950429, 2165882425, 2346928266, 2892678711, 2936066049, 1316407868, \
2873411858, 4279682888, 2744351923, 3290373816, 1014377279, 955200944, 4220990860, \
2386098930, 1772997650, 3757346974, 1621616438, 2877097197, 442116595, 2010480266, \
2867861469, 2955352695, 605335967, 2222936009, 2067554933, 4129906358, 1519608541, \
1195006590, 1942991038, 2736562236, 279162408, 1415982909, 4099901426, 1732201505, \
2934657937, 860563237, 2479235483, 3081651097, 2244720867, 3112631622, 1636991639, \
3860393305, 2312061927, 48780114, 1149090394, 2643246550, 1764050647, 3836789087, \
3474859076, 4237194338, 1735191073, 2150369208, 92164394, 756974036, 2314453957, \
323969533, 4267621035, 283649842, 810004843, 727855536, 1757827251, 3334960421, \
3261035106, 38417393, 2660980472, 1256633965, 2184045390, 811213141, 2857482069, \
2237770878, 3891003138, 2787806886, 2435192790, 2249324662, 3507764896, 995388363, \
856944153, 619213904, 3233967826, 3703465555, 3286531781, 3863193356, 2992340714, \
413696855, 3865185632, 1704163171, 3043634452, 2225424707, 2199018022, 3506117517, \
3311559776, 3374443561, 1207829628, 668793165, 1822020716, 2082656160, 1160606415, \
3034757648, 741703672, 3094328738, 459332691, 2702383376, 1610239915, 4162939394, \
557861574, 3805706338, 3832520705, 1248934879, 3250424034, 892335058, 74323433, \
3209751608, 3213220797, 3444035873, 3743886725, 1783837251, 610968664, 580745246, \
4041979504, 201684874, 2673219253, 1377283008, 3497299167, 2344209394, 2304982920, \
3081403782, 2599256854, 3184475235, 3373055826, 695186388, 2423332338, 222864327, \
1258227992, 3627871647, 3487724980, 4027953808, 3053320360, 533627073, 3026232514, \
2340271949, 867277230, 868513116, 2158535651, 2487822909, 3428235761, 3067196046, \
3435119657, 1908441839, 788668797, 3367703138, 3317763187, 908264443, 2252100381, \
764223334, 4127108988, 384641349, 3377374722, 1263833251, 1958694944, 3847832657, \
1253909612, 1096494446, 555725445, 2277045895, 3340096504, 1383318686, 4234428127, \
1072582179, 94169494, 1064509968, 2681151917, 2681864920, 734708852, 1338914021, \
1270409500, 1789469116, 4191988204, 1716329784, 2213764829, 3712538840, 919910444, \
1318414447, 3383806712, 3054941722, 3378649942, 1205735655, 1268136494, 2214009444, \
2532395133, 3232230447, 230294038, 342599089, 772808141, 4096882234, 3146662953, \
2784264306, 1860954704, 2675279609, 2984212876, 2466966981, 2627986059, 2985545332, \
2578042598, 1458940786, 2944243755, 3959506256, 1509151382, 325761900, 942251521, \
4184289782, 2756231555, 3297811774, 1169708099, 3280524138, 3805245319, 3227360276, \
3199632491, 2235795585, 2865407118, 36763651, 2441503575, 3314890374, 1755526087, \
17915536, 1196948233, 949343045, 3815841867, 489007833, 2654997597, 2834744136, \
417688687, 2843220846, 85621843, 747339336, 2043645709, 3520444394, 1825470818, \
647778910, 275904777, 1249389189, 3640887431, 4200779599, 323384601, 3446088641, \
4049835786, 1718989062, 3563787136, 44099190, 3281263107, 22910812, 1826109246, \
745118154, 3392171319, 1571490704, 354891067, 815955642, 1453450421, 940015623, \
796817754, 1260148619, 3898237757, 176670141, 1870249326, 3317738680, 448918002, \
4059166594, 2003827551, 987091377, 224855998, 3520570137, 789522610, 2604445123, \
454472869, 475688926, 2990723466, 523362238, 3897608102, 806637149, 2642229586, \
2928614432, 1564415411, 1691381054, 3816907227, 4082581003, 1895544448, 3728217394, \
3214813157, 4054301607, 1882632454, 2873728645, 3694943071, 1297991732, 2101682438, \
3952579552, 678650400, 1391722293, 478833748, 2976468591, 158586606, 2576499787, \
662690848, 3799889765, 3328894692, 2474578497, 2383901391, 1718193504, 3003184595, \
3630561213, 1929441113, 3848238627, 1594310094, 3040359840, 3051803867, 2462788790, \
954409915, 802581771, 681703307, 545982392, 2738993819, 8025358, 2827719383, \
770471093, 3484895980, 3111306320, 3900000891, 2116916652, 397746721, 2087689510, \
721433935, 1396088885, 2751612384, 1998988613, 2135074843, 2521131298, 707009172, \
2398321482, 688041159, 2264560137, 482388305, 207864885, 3735036991, 3490348331, \
1963642811, 3260224305, 3493564223, 1939428454, 1128799656, 1366012432, 2858822447, \
1428147157, 2261125391, 1611208390, 1134826333, 2374102525, 3833625209, 2266397263, \
3189115077, 770080230, 2674657172, 4280146640, 3604531615, 4235071805, 3436987249, \
509704467, 2582695198, 4256268040, 3391197562, 1460642842, 1617931012, 457825497, \
1031452907, 1330422862, 4125947620, 2280712485, 431892090, 2387410588, 2061126784, \
896457479, 3480499461, 2488196663, 4021103792, 1877063114, 2744470201, 1046140599, \
2129952955, 3583049218, 4217723693, 2720341743, 820661843, 1079873609, 3360954200, \
3652304997, 3335838575, 2178810636, 1908053374, 4026721976, 1793145418, 476541615, \
973420250, 515553040, 919292001, 2601786155, 1685119450, 3030170809, 1590676150, \
1665099167, 651151584, 2077190587, 957892642, 646336572, 2743719258, 866169074, \
851118829, 4225766285, 963748226, 799549420, 1955032629, 799460000, 2425744063, \
2441291571, 1928963772, 528930629, 2591962884, 3495142819, 1896021824, 901320159, \
3181820243, 843061941, 3338628510, 3782438992, 9515330, 1705797226, 953535929, \
764833876, 3202464965, 2970244591, 519154982, 3390617541, 566616744, 3438031503, \
1853838297, 170608755, 1393728434, 676900116, 3184965776, 1843100290, 78995357, \
2227939888, 3460264600, 1745705055, 1474086965, 572796246, 4081303004, 882828851, \
1295445825, 137639900, 3304579600, 2722437017, 4093422709, 273203373, 2666507854, \
3998836510, 493829981, 1623949669, 3482036755, 3390023939, 833233937, 1639668730, \
1499455075, 249728260, 1210694006, 3836497489, 1551488720, 3253074267, 3388238003, \
2372035079, 3945715164, 2029501215, 3362012634, 2007375355, 4074709820, 631485888, \
3135015769, 4273087084, 3648076204, 2739943601, 1374020358, 1760722448, 3773939706, \
1313027823, 1895251226, 4224465911, 421382535, 1141067370, 3660034846, 3393185650, \
1850995280, 1451917312, 3841455409, 3926840308, 1397397252, 2572864479, 2500171350, \
3119920613, 531400869, 1626487579, 1099320497, 407414753, 2438623324, 99073255, \
3175491512, 656431560, 1153671785, 236307875, 2824738046, 2320621382, 892174056, \
230984053, 719791226, 2718891946, 624), None)
self.random = random.Random()
self.random.setstate(fixedState)
"""
Data structures useful for implementing SearchAgents
"""
class Stack:
"A container with a last-in-first-out (LIFO) queuing policy."
def __init__(self):
self.list = []
def push(self, item):
"Push 'item' onto the stack"
self.list.append(item)
def pop(self):
"Pop the most recently pushed item from the stack"
return self.list.pop()
def isEmpty(self):
"Returns true if the stack is empty"
return len(self.list) == 0
class Queue:
"A container with a first-in-first-out (FIFO) queuing policy."
def __init__(self):
self.list = []
def push(self, item):
"Enqueue the 'item' into the queue"
self.list.insert(0, item)
def pop(self):
"""
Dequeue the earliest enqueued item still in the queue. This
operation removes the item from the queue.
"""
return self.list.pop()
def isEmpty(self):
"Returns true if the queue is empty"
return len(self.list) == 0
class PriorityQueue:
"""
Implements a priority queue data structure. Each inserted item
has a priority associated with it and the client is usually interested
in quick retrieval of the lowest-priority item in the queue. This
data structure allows O(1) access to the lowest-priority item.
"""
def __init__(self):
self.heap = []
self.count = 0
def push(self, item, priority):
entry = (priority, self.count, item)
heapq.heappush(self.heap, entry)
self.count += 1
def pop(self):
(_, _, item) = heapq.heappop(self.heap)
return item
def isEmpty(self):
return len(self.heap) == 0
def update(self, item, priority):
# If item already in priority queue with higher priority, update its priority and rebuild the heap.
# If item already in priority queue with equal or lower priority, do nothing.
# If item not in priority queue, do the same thing as self.push.
for index, (p, c, i) in enumerate(self.heap):
if i == item:
if p <= priority:
break
del self.heap[index]
self.heap.append((priority, c, item))
heapq.heapify(self.heap)
break
else:
self.push(item, priority)
class PriorityQueueWithFunction(PriorityQueue):
"""
Implements a priority queue with the same push/pop signature of the
Queue and the Stack classes. This is designed for drop-in replacement for
those two classes. The caller has to provide a priority function, which
extracts each item's priority.
"""
def __init__(self, priorityFunction):
"priorityFunction (item) -> priority"
self.priorityFunction = priorityFunction # store the priority function
PriorityQueue.__init__(self) # super-class initializer
def push(self, item):
"Adds an item to the queue with priority from the priority function"
PriorityQueue.push(self, item, self.priorityFunction(item))
def manhattanDistance(xy1, xy2):
"Returns the Manhattan distance between points xy1 and xy2"
return abs(xy1[0] - xy2[0]) + abs(xy1[1] - xy2[1])
"""
Data structures and functions useful for various course projects
The search project should not need anything below this line.
"""
class Counter(dict):
"""
A counter keeps track of counts for a set of keys.
The counter class is an extension of the standard python
dictionary type. It is specialized to have number values
(integers or floats), and includes a handful of additional
functions to ease the task of counting data. In particular,
all keys are defaulted to have value 0. Using a dictionary:
a = {}
print(a['test'])
would give an error, while the Counter class analogue:
>>> a = Counter()
>>> print(a['test'])
0
returns the default 0 value. Note that to reference a key
that you know is contained in the counter,
you can still use the dictionary syntax:
>>> a = Counter()
>>> a['test'] = 2
>>> print(a['test'])
2
This is very useful for counting things without initializing their counts,
see for example:
>>> a['blah'] += 1
>>> print(a['blah'])
1
The counter also includes additional functionality useful in implementing
the classifiers for this assignment. Two counters can be added,
subtracted or multiplied together. See below for details. They can
also be normalized and their total count and arg max can be extracted.
"""
def __getitem__(self, idx):
self.setdefault(idx, 0)
return dict.__getitem__(self, idx)
def incrementAll(self, keys, count):
"""
Increments all elements of keys by the same count.
>>> a = Counter()
>>> a.incrementAll(['one','two', 'three'], 1)
>>> a['one']
1
>>> a['two']
1
"""
for key in keys:
self[key] += count
def argMax(self):
"""
Returns the key with the highest value.
"""
if len(self.keys()) == 0: return None
all = list(self.items())
values = [x[1] for x in all]
maxIndex = values.index(max(values))
return all[maxIndex][0]
def sortedKeys(self):
"""
Returns a list of keys sorted by their values. Keys
with the highest values will appear first.
>>> a = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> a['third'] = 1
>>> a.sortedKeys()
['second', 'third', 'first']
"""
sortedItems = self.items()
compare = lambda x, y: sign(y[1] - x[1])
sortedItems.sort(cmp=compare)
return [x[0] for x in sortedItems]
def totalCount(self):
"""
Returns the sum of counts for all keys.
"""
return sum(self.values())
def normalize(self):
"""
Edits the counter such that the total count of all
keys sums to 1. The ratio of counts for all keys
will remain the same. Note that normalizing an empty
Counter will result in an error.
"""
total = float(self.totalCount())
if total == 0: return
for key in self.keys():
self[key] = self[key] / total
def divideAll(self, divisor):
"""
Divides all counts by divisor
"""
divisor = float(divisor)
for key in self:
self[key] /= divisor
def copy(self):
"""
Returns a copy of the counter
"""
return Counter(dict.copy(self))
def __mul__(self, y):
"""
Multiplying two counters gives the dot product of their vectors where
each unique label is a vector element.
>>> a = Counter()
>>> b = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> b['first'] = 3
>>> b['second'] = 5
>>> a['third'] = 1.5
>>> a['fourth'] = 2.5
>>> a * b
14
"""
sum = 0
x = self
if len(x) > len(y):
x, y = y, x
for key in x:
if key not in y:
continue
sum += x[key] * y[key]
return sum
def __radd__(self, y):
"""
Adding another counter to a counter increments the current counter
by the values stored in the second counter.
>>> a = Counter()
>>> b = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> b['first'] = 3
>>> b['third'] = 1
>>> a += b
>>> a['first']
1
"""
for key, value in y.items():
self[key] += value
def __add__(self, y):
"""
Adding two counters gives a counter with the union of all keys and
counts of the second added to counts of the first.
>>> a = Counter()
>>> b = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> b['first'] = 3
>>> b['third'] = 1
>>> (a + b)['first']
1
"""
addend = Counter()
for key in self:
if key in y:
addend[key] = self[key] + y[key]
else:
addend[key] = self[key]
for key in y:
if key in self:
continue
addend[key] = y[key]
return addend
def __sub__(self, y):
"""
Subtracting a counter from another gives a counter with the union of all keys and
counts of the second subtracted from counts of the first.
>>> a = Counter()
>>> b = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> b['first'] = 3
>>> b['third'] = 1
>>> (a - b)['first']
-5
"""
addend = Counter()
for key in self:
if key in y:
addend[key] = self[key] - y[key]
else:
addend[key] = self[key]
for key in y:
if key in self:
continue
addend[key] = -1 * y[key]
return addend
def raiseNotDefined():
fileName = inspect.stack()[1][1]
line = inspect.stack()[1][2]
method = inspect.stack()[1][3]
print("*** Method not implemented: %s at line %s of %s" % (method, line, fileName))
sys.exit(1)
def normalize(vectorOrCounter):
"""
normalize a vector or counter by dividing each value by the sum of all values
"""
normalizedCounter = Counter()
if type(vectorOrCounter) == type(normalizedCounter):
counter = vectorOrCounter
total = float(counter.totalCount())
if total == 0: return counter
for key in counter.keys():
value = counter[key]
normalizedCounter[key] = value / total
return normalizedCounter
else:
vector = vectorOrCounter
s = float(sum(vector))
if s == 0: return vector
return [el / s for el in vector]
def nSample(distribution, values, n):
if sum(distribution) != 1:
distribution = normalize(distribution)
rand = [random.random() for i in range(n)]
rand.sort()
samples = []
samplePos, distPos, cdf = 0, 0, distribution[0]
while samplePos < n:
if rand[samplePos] < cdf:
samplePos += 1
samples.append(values[distPos])
else:
distPos += 1
cdf += distribution[distPos]
return samples
def sample(distribution, values=None):
if type(distribution) == Counter:
items = sorted(distribution.items())
distribution = [i[1] for i in items]
values = [i[0] for i in items]
if sum(distribution) != 1:
distribution = normalize(distribution)
choice = random.random()
i, total = 0, distribution[0]
while choice > total:
i += 1
total += distribution[i]
return values[i]
def sampleFromCounter(ctr):
items = sorted(ctr.items())
return sample([v for k, v in items], [k for k, v in items])
def getProbability(value, distribution, values):
"""
Gives the probability of a value under a discrete distribution
defined by (distributions, values).
"""
total = 0.0
for prob, val in zip(distribution, values):
if val == value:
total += prob
return total
def flipCoin(p):
r = random.random()
return r < p
def chooseFromDistribution(distribution):
"Takes either a counter or a list of (prob, key) pairs and samples"
if type(distribution) == dict or type(distribution) == Counter:
return sample(distribution)
r = random.random()
base = 0.0
for prob, element in distribution:
base += prob
if r <= base: return element
def nearestPoint(pos):
"""
Finds the nearest grid point to a position (discretizes).
"""
(current_row, current_col) = pos
grid_row = int(current_row + 0.5)
grid_col = int(current_col + 0.5)
return (grid_row, grid_col)
def | (x):
"""
Returns 1 or -1 depending on the sign of x
"""
if (x >= 0):
return 1
else:
return -1
def arrayInvert(array):
"""
Inverts a matrix stored as a list of lists.
"""
result = [[] for i in array]
for outer in array:
for inner in range(len(outer)):
result[inner].append(outer[inner])
return result
def matrixAsList(matrix, value=True):
"""
Turns a matrix into a list of coordinates matching the specified value
"""
rows, cols = len(matrix), len(matrix[0])
cells = []
for row in range(rows):
for col in range(cols):
if matrix[row][col] == value:
cells.append((row, col))
return cells
def lookup(name, namespace):
"""
Get a method or class from any imported module from its name.
Usage: lookup(functionName, globals())
"""
dots = name.count('.')
if dots > 0:
moduleName, objName = '.'.join(name.split('.')[:-1]), name.split('.')[-1]
module = __import__(moduleName)
return getattr(module, objName)
else:
# modules = [obj for obj in namespace.values() if str(type(obj)) == "<type 'module'>"]
modules = [obj for obj in namespace.values() if isinstance(obj, types.ModuleType)]
options = [getattr(module, name) for module in modules if name in dir(module)]
options += [obj[1] for obj in namespace.items() if obj[0] == name]
if len(options) == 1: return options[0]
if len(options) > 1: raise Exception('Name conflict for %s')
raise Exception('%s not found as a method or class' % name)
def pause():
"""
Pauses the output stream awaiting user feedback.
"""
print("<Press enter/return to continue>")
raw_input()
# code to handle timeouts
#
# FIXME
# NOTE: TimeoutFuncton is NOT reentrant. Later timeouts will silently
# disable earlier timeouts. Could be solved by maintaining a global list
# of active time outs. Currently, questions which have test cases calling
# this have all student code so wrapped.
#
import signal
import time
class TimeoutFunctionException(Exception):
"""Exception to raise on a timeout"""
pass
class TimeoutFunction:
def __init__(self, function, timeout):
self.timeout = timeout
self.function = function
def handle_timeout(self, signum, frame):
raise TimeoutFunctionException()
def __call__(self, *args, **keyArgs):
# If we have SIGALRM signal, use it to cause an exception if and
# when this function runs too long. Otherwise check the time taken
# after the method has returned, and throw an exception then.
if hasattr(signal, 'SIGALRM'):
old = signal.signal(signal.SIGALRM, self.handle_timeout)
signal.alarm(self.timeout)
try:
result = self.function(*args, **keyArgs)
finally:
signal.signal(signal.SIGALRM, old)
signal.alarm(0)
else:
startTime = time.time()
result = self.function(*args, **keyArgs)
timeElapsed = time.time() - startTime
if timeElapsed >= self.timeout:
self.handle_timeout(None, None)
return result
_ORIGINAL_STDOUT = None
_ORIGINAL_STDERR = None
_MUTED = False
class WritableNull:
def write(self, string):
pass
def mutePrint():
global _ORIGINAL_STDOUT, _ORIGINAL_STDERR, _MUTED
if _MUTED:
return
_MUTED = True
_ORIGINAL_STDOUT = sys.stdout
# _ORIGINAL_STDERR = sys.stderr
sys.stdout = WritableNull()
# sys.stderr = WritableNull()
def unmutePrint():
global _ORIGINAL_STDOUT, _ORIGINAL_STDERR, _MUTED
if not _MUTED:
return
_MUTED = False
sys.stdout = _ORIGINAL_STDOUT
# sys.stderr = _ORIGINAL_STDERR
| sign |
app.go | /*
Copyright (c) 2021 OceanBase
ob-operator is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
*/
package initialization
import (
"context"
log "github.com/sirupsen/logrus"
"github.com/oceanbase/ob-operator/pkg/cable/server"
"github.com/oceanbase/ob-operator/pkg/cable/status"
"github.com/oceanbase/ob-operator/pkg/util"
)
func InitApp() {
// init logger
InitLogger()
util.FuncList = append(util.FuncList, StopApp)
log.Info("init directory for oceanbase")
// TODO use paths in dockerfile and remove this process
InitDir()
log.Info("init status variables")
// TODO set variable values, move from observer package to a meaningful one
status.Readiness = false
status.ObserverStarted = false
log.Info("init http server")
server.CableServer.Init()
go server.CableServer.Run()
} |
func StopApp() {
log.Info("stop cable server")
server.CableServer.Stop(context.TODO())
} | |
stream.rs | use std::collections::VecDeque;
use std::sync::{Mutex, Condvar};
#[derive(Debug)]
pub enum Event {
Error,
Finished,
NeedsInput,
Output(String),
}
#[derive(Default)]
struct StreamState {
buffer: String,
events: VecDeque<Event>,
}
pub struct Stream {
state: Mutex<StreamState>,
condvar: Condvar,
}
impl Stream {
pub fn new() -> Self {
Stream { state: Mutex::new(StreamState::default()), condvar: Condvar::new() }
}
pub fn finished(&self) {
let mut state = self.state.lock().unwrap();
state.events.push_back(Event::Finished);
self.condvar.notify_one();
}
pub fn read_input(&self) -> String {
let mut state = self.state.lock().unwrap();
state.events.push_back(Event::NeedsInput);
self.condvar.notify_one();
state = self.condvar.wait(state).unwrap();
let temp = state.buffer.clone();
state.buffer.clear();
temp
}
pub fn write_input(&self, s: &str) {
let mut state = self.state.lock().unwrap();
state.buffer = String::from(s);
self.condvar.notify_one();
}
pub fn write_output(&self, s: &str) {
let mut state = self.state.lock().unwrap();
state.events.push_back(Event::Output(String::from(s)));
self.condvar.notify_one();
}
pub fn get_event(&self) -> Option<Event> {
let mut state = self.state.lock().unwrap();
let event = state.events.pop_front();
if event.is_some() |
state = self.condvar.wait(state).unwrap();
state.events.pop_front()
}
}
| {
return event;
} |
JavaScript3.js | {"_ReportGenerationStatus":5,"IsConsumerViewed":false,"IsExpired":false,"NextScheduledDate":"/Date(1462895395390)/","SchedulFrequency":2,"OtherReports":[{"ReportInstanceId":"1face1d8-ea27-48ec-a5bd-6981b3780f5c","WhenCreated":"/Date(1460963321327)/"},{"ReportInstanceId":"3146ad51-6a3e-4077-8f47-23148f5dbdfe","WhenCreated":"/Date(1459832890360)/"},{"ReportInstanceId":"1d54d765-f91c-4264-9971-05d2036964bc","WhenCreated":"/Date(1459144087893)/"},{"ReportInstanceId":"89e06dec-56aa-4e3e-b3e5-88ab4e4f6200","WhenCreated":"/Date(1459098574827)/"},{"ReportInstanceId":"98623388-cca5-468c-a951-a4c5d4b674e3","WhenCreated":"/Date(1458580016373)/"},{"ReportInstanceId":"643efde6-d951-4f7e-8753-faecbb4ec7f2","WhenCreated":"/Date(1458144808143)/"},{"ReportInstanceId":"a0400bc8-fe6d-4a20-b4ad-cec8321e66e8","WhenCreated":"/Date(1457928194907)/"},{"ReportInstanceId":"bd50c1ba-92dd-41cf-b2b1-506a4aa3e64c","WhenCreated":"/Date(1457544517440)/"},{"ReportInstanceId":"9f07022e-856f-4613-b633-a60636a5f1f0","WhenCreated":"/Date(1457543953180)/"},{"ReportInstanceId":"12f78077-a1a4-4439-b407-daa023725a68","WhenCreated":"/Date(1457466714077)/"},{"ReportInstanceId":"ecb22079-235d-4190-bdd4-24591352ea28","WhenCreated":"/Date(1456860317463)/"},{"ReportInstanceId":"1a2673c8-2e9f-4642-9849-910e971aa301","WhenCreated":"/Date(1456257649953)/"},{"ReportInstanceId":"33f902bc-78ad-47ba-be4e-72732d7d647d","WhenCreated":"/Date(1455559063253)/"}],"ReportInstanceId":"0ff64408-a592-4833-b90a-998b242eef53","AhReportId":"ffdea513-9598-4d0b-8add-0155d236c673","ReportGenerationStatus":5,"NavigatorFaqUrl":"http://www.topproducer.com/campus/images/external/market-snapshot/faq.html","ConsumerInfor":{"Role":1,"PropertyType":1,"FirstName":"n1","LastName":"prd","Email":"[email protected]","Phone":"","HouseNumber":"","Street":"","City":"","State":"","Zip":"V3R 6J8","MinPrice":"0","MaxPrice":"99000000","BedRoom":"2+ beds","BathRoom":"2+ baths","SqFt":"0","Latitude":49.2019081115723,"Longitude":-122.788108825684,"SearchZip":"V3R","SearchBedRoom":"2+","SearchBathRoom":"2+","SearchSqFt":"","SearchPrice":"","GeocodingLevelId":1,"IsBuyer":true,"IsSeller":false},"PinImageUrlPath":"http://www.topmarketer.net/TMimages/AHReport/4d7536bb-e8b8-445b-916e-dcb714117797/0FF64408-A592-4833-B90A-998B242EEF53/","Pins":[{"Id":1,"ImageFile":"R2039028.jpg","Address":"10888 156 Street","Bath":4,"Bed":5,"Sq":"3079","LotSize":"0.16","ListingPrice":1140800,"SoldPrice":0,"DisplayPrice":1140800,"PricePerSq":371,"ListingDate":"/Date(1456210800000)/","SoldDate":null,"DaysOnMarket":63,"ListingId":681631073,"Status_UI":3,"Status":0,"Latitude":49.20016860961914,"Longitude":-122.78952026367188,"GeocodingLevelId":1,"MlsNumber":"R2039028","ListingDate_Grouped":"/Date(1456038000000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1456383600000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852780)/","ListAgentFirstName":"","ListAgentLastName":"Gaurav Lally","ListAgentPhone":"","ListAgentEmail":"[email protected]","DayOfMarket":75,"StatDOM":63,"PendingDate":null,"ExpiredDate":"/Date(1466665200000)/","SortDate":"/Date(1456210800000)/"},{"Id":2,"ImageFile":"R2055912.jpg","Address":"15565 110 Avenue","Bath":3,"Bed":4,"Sq":"2661","LotSize":"0.17","ListingPrice":928000,"SoldPrice":0,"DisplayPrice":928000,"PricePerSq":349,"ListingDate":"/Date(1460444400000)/","SoldDate":null,"DaysOnMarket":14,"ListingId":681631109,"Status_UI":2,"Status":0,"Latitude":49.20262908935547,"Longitude":-122.79075622558594,"GeocodingLevelId":1,"MlsNumber":"R2055912","ListingDate_Grouped":"/Date(1460271600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460530800000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852950)/","ListAgentFirstName":"","ListAgentLastName":"Emily Oh - PREC","ListAgentPhone":"778-896-9000","ListAgentEmail":"[email protected]","DayOfMarket":26,"StatDOM":14,"PendingDate":null,"ExpiredDate":"/Date(1472626800000)/","SortDate":"/Date(1460444400000)/"},{"Id":3,"ImageFile":"R2044878.jpg","Address":"11110 156 Street","Bath":3,"Bed":4,"Sq":"2660","LotSize":"0.16","ListingPrice":1049900,"SoldPrice":1130000,"DisplayPrice":1130000,"PricePerSq":425,"ListingDate":"/Date(1457679600000)/","SoldDate":"/Date(1458111600000)/","DaysOnMarket":5,"ListingId":681631048,"Status_UI":1,"Status":1,"Latitude":49.204490661621094,"Longitude":-122.78959655761719,"GeocodingLevelId":1,"MlsNumber":"R2044878","ListingDate_Grouped":"/Date(1457247600000)/","SoldDate_Grouped":"/Date(1457852400000)/","StatusDate":"/Date(1459234800000)/","PriceRatio":107.62929802838366,"WhenReceived":"/Date(1461685852653)/","ListAgentFirstName":"","ListAgentLastName":"Mitch Redekop - PREC","ListAgentPhone":"604-583-2000","ListAgentEmail":"[email protected]","DayOfMarket":6,"StatDOM":5,"PendingDate":null,"ExpiredDate":"/Date(1465628400000)/","SortDate":"/Date(1458111600000)/"},{"Id":4,"ImageFile":"R2053879.jpg","Address":"15558 109a Avenue","Bath":6,"Bed":8,"Sq":"4758","LotSize":"0.16","ListingPrice":1599000,"SoldPrice":0,"DisplayPrice":1599000,"PricePerSq":336,"ListingDate":"/Date(1459839600000)/","SoldDate":null,"DaysOnMarket":7,"ListingId":681631165,"Status_UI":4,"Status":2,"Latitude":49.201446533203125,"Longitude":-122.79146575927734,"GeocodingLevelId":1,"MlsNumber":"R2053879","ListingDate_Grouped":"/Date(1459666800000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460530800000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853147)/","ListAgentFirstName":"","ListAgentLastName":"Shafiq Kazemi","ListAgentPhone":"604-961-0543","ListAgentEmail":"[email protected]","DayOfMarket":9,"StatDOM":7,"PendingDate":null,"ExpiredDate":"/Date(1460530800000)/","SortDate":"/Date(1460530800000)/"},{"Id":5,"ImageFile":"R2056916.jpg","Address":"15558 109a Avenue","Bath":6,"Bed":8,"Sq":"4758","LotSize":"0.16","ListingPrice":1399000,"SoldPrice":0,"DisplayPrice":1399000,"PricePerSq":294,"ListingDate":"/Date(1460444400000)/","SoldDate":null,"DaysOnMarket":14,"ListingId":681631096,"Status_UI":2,"Status":0,"Latitude":49.201446533203125,"Longitude":-122.79146575927734,"GeocodingLevelId":1,"MlsNumber":"R2056916","ListingDate_Grouped":"/Date(1460271600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460530800000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852873)/","ListAgentFirstName":"","ListAgentLastName":"Shafiq Kazemi","ListAgentPhone":"604-961-0543","ListAgentEmail":"[email protected]","DayOfMarket":26,"StatDOM":14,"PendingDate":null,"ExpiredDate":"/Date(1475218800000)/","SortDate":"/Date(1460444400000)/"},{"Id":6,"ImageFile":"R2034702.jpg","Address":"15528 109a Avenue","Bath":5,"Bed":6,"Sq":"4133","LotSize":"0.16","ListingPrice":1244888,"SoldPrice":1220000,"DisplayPrice":1220000,"PricePerSq":295,"ListingDate":"/Date(1455519600000)/","SoldDate":"/Date(1455951600000)/","DaysOnMarket":5,"ListingId":681631018,"Status_UI":1,"Status":1,"Latitude":49.20138931274414,"Longitude":-122.79205322265625,"GeocodingLevelId":1,"MlsNumber":"R2034702","ListingDate_Grouped":"/Date(1455433200000)/","SoldDate_Grouped":"/Date(1455433200000)/","StatusDate":"/Date(1457334000000)/","PriceRatio":98.00078400627206,"WhenReceived":"/Date(1461685852260)/","ListAgentFirstName":"","ListAgentLastName":"Kylene Shannon","ListAgentPhone":"604-339-4039","ListAgentEmail":"[email protected]","DayOfMarket":6,"StatDOM":5,"PendingDate":null,"ExpiredDate":"/Date(1464678000000)/","SortDate":"/Date(1455951600000)/"},{"Id":7,"ImageFile":"R2028239.jpg","Address":"11048 155 Street","Bath":3,"Bed":4,"Sq":"2890","LotSize":"0.17","ListingPrice":899900,"SoldPrice":990000,"DisplayPrice":990000,"PricePerSq":343,"ListingDate":"/Date(1453791600000)/","SoldDate":"/Date(1454310000000)/","DaysOnMarket":6,"ListingId":681631007,"Status_UI":1,"Status":1,"Latitude":49.20328903198242,"Longitude":-122.7923583984375,"GeocodingLevelId":1,"MlsNumber":"R2028239","ListingDate_Grouped":"/Date(1453618800000)/","SoldDate_Grouped":"/Date(1454223600000)/","StatusDate":"/Date(1454482800000)/","PriceRatio":110.01222358039782,"WhenReceived":"/Date(1461685852217)/","ListAgentFirstName":"","ListAgentLastName":"Dale Redekop","ListAgentPhone":"604-315-4431","ListAgentEmail":"[email protected]","DayOfMarket":7,"StatDOM":6,"PendingDate":null,"ExpiredDate":"/Date(1461999600000)/","SortDate":"/Date(1454310000000)/"},{"Id":8,"ImageFile":"R2028414.jpg","Address":"10927 155 Street","Bath":4,"Bed":6,"Sq":"3330","LotSize":"0.14","ListingPrice":900000,"SoldPrice":965000,"DisplayPrice":965000,"PricePerSq":290,"ListingDate":"/Date(1453791600000)/","SoldDate":"/Date(1454050800000)/","DaysOnMarket":3,"ListingId":681631065,"Status_UI":1,"Status":1,"Latitude":49.200870513916016,"Longitude":-122.79273986816406,"GeocodingLevelId":1,"MlsNumber":"R2028414","ListingDate_Grouped":"/Date(1453618800000)/","SoldDate_Grouped":"/Date(1453618800000)/","StatusDate":"/Date(1454396400000)/","PriceRatio":107.22222222222223,"WhenReceived":"/Date(1461685852753)/","ListAgentFirstName":"","ListAgentLastName":"Katrina Amurao","ListAgentPhone":"604-507-0200","ListAgentEmail":"[email protected]","DayOfMarket":4,"StatDOM":3,"PendingDate":null,"ExpiredDate":"/Date(1469516400000)/","SortDate":"/Date(1454050800000)/"},{"Id":9,"ImageFile":"R2039641.jpg","Address":"10748 155a Street","Bath":3,"Bed":3,"Sq":"2018","LotSize":"0.16","ListingPrice":829000,"SoldPrice":815000,"DisplayPrice":815000,"PricePerSq":404,"ListingDate":"/Date(1456729200000)/","SoldDate":"/Date(1457938800000)/","DaysOnMarket":14,"ListingId":681630953,"Status_UI":1,"Status":1,"Latitude":49.197811126708984,"Longitude":-122.79107666015625,"GeocodingLevelId":1,"MlsNumber":"R2039641","ListingDate_Grouped":"/Date(1456642800000)/","SoldDate_Grouped":"/Date(1457852400000)/","StatusDate":"/Date(1458802800000)/","PriceRatio":98.31121833534378,"WhenReceived":"/Date(1461685851937)/","ListAgentFirstName":"","ListAgentLastName":"Emily Oh - PREC","ListAgentPhone":"778-896-9000","ListAgentEmail":"[email protected]","DayOfMarket":15,"StatDOM":14,"PendingDate":null,"ExpiredDate":"/Date(1483167600000)/","SortDate":"/Date(1457938800000)/"},{"Id":10,"ImageFile":"R2042450.jpg","Address":"15531 107a Avenue","Bath":3,"Bed":5,"Sq":"3067","LotSize":"0.16","ListingPrice":899000,"SoldPrice":888000,"DisplayPrice":888000,"PricePerSq":290,"ListingDate":"/Date(1456988400000)/","SoldDate":"/Date(1457852400000)/","DaysOnMarket":10,"ListingId":681631024,"Status_UI":1,"Status":1,"Latitude":49.19810485839844,"Longitude":-122.79165649414062,"GeocodingLevelId":1,"MlsNumber":"R2042450","ListingDate_Grouped":"/Date(1456642800000)/","SoldDate_Grouped":"/Date(1457852400000)/","StatusDate":"/Date(1458284400000)/","PriceRatio":98.77641824249166,"WhenReceived":"/Date(1461685852287)/","ListAgentFirstName":"","ListAgentLastName":"Keith Kartzewski","ListAgentPhone":"778-839-4663","ListAgentEmail":"[email protected]","DayOfMarket":11,"StatDOM":10,"PendingDate":null,"ExpiredDate":"/Date(1475478000000)/","SortDate":"/Date(1457852400000)/"},{"Id":11,"ImageFile":"R2033129.jpg","Address":"10693 156 Street","Bath":3,"Bed":4,"Sq":"2105","LotSize":"0.16","ListingPrice":829000,"SoldPrice":841000,"DisplayPrice":841000,"PricePerSq":400,"ListingDate":"/Date(1455001200000)/","SoldDate":"/Date(1455519600000)/","DaysOnMarket":6,"ListingId":681631055,"Status_UI":1,"Status":1,"Latitude":49.1970100402832,"Longitude":-122.79015350341797,"GeocodingLevelId":1,"MlsNumber":"R2033129","ListingDate_Grouped":"/Date(1454828400000)/","SoldDate_Grouped":"/Date(1455433200000)/","StatusDate":"/Date(1455692400000)/","PriceRatio":101.44752714113389,"WhenReceived":"/Date(1461685852700)/","ListAgentFirstName":"","ListAgentLastName":"Mansour Hozar","ListAgentPhone":"604-916-2773","ListAgentEmail":"[email protected]","DayOfMarket":7,"StatDOM":6,"PendingDate":null,"ExpiredDate":"/Date(1462863600000)/","SortDate":"/Date(1455519600000)/"},{"Id":12,"ImageFile":"R2027694.jpg","Address":"10685 156 Street","Bath":3,"Bed":4,"Sq":"2108","LotSize":"0.16","ListingPrice":799000,"SoldPrice":801250,"DisplayPrice":801250,"PricePerSq":380,"ListingDate":"/Date(1454396400000)/","SoldDate":"/Date(1454828400000)/","DaysOnMarket":5,"ListingId":681631028,"Status_UI":1,"Status":1,"Latitude":49.196861267089844,"Longitude":-122.7901382446289,"GeocodingLevelId":1,"MlsNumber":"R2027694","ListingDate_Grouped":"/Date(1454223600000)/","SoldDate_Grouped":"/Date(1454828400000)/","StatusDate":"/Date(1459321200000)/","PriceRatio":100.28160200250313,"WhenReceived":"/Date(1461685852520)/","ListAgentFirstName":"","ListAgentLastName":"Shen Po Chen - PREC","ListAgentPhone":"778-988-8832","ListAgentEmail":"[email protected]","DayOfMarket":6,"StatDOM":5,"PendingDate":null,"ExpiredDate":"/Date(1470121200000)/","SortDate":"/Date(1454828400000)/"},{"Id":13,"ImageFile":"R2038384.jpg","Address":"15522 107a Avenue","Bath":3,"Bed":3,"Sq":"2109","LotSize":"0.17","ListingPrice":760000,"SoldPrice":797500,"DisplayPrice":797500,"PricePerSq":378,"ListingDate":"/Date(1456124400000)/","SoldDate":"/Date(1456815600000)/","DaysOnMarket":8,"ListingId":681631067,"Status_UI":1,"Status":1,"Latitude":49.19783401489258,"Longitude":-122.79180145263672,"GeocodingLevelId":1,"MlsNumber":"R2038384","ListingDate_Grouped":"/Date(1456038000000)/","SoldDate_Grouped":"/Date(1456642800000)/","StatusDate":"/Date(1458284400000)/","PriceRatio":104.9342105263158,"WhenReceived":"/Date(1461685852760)/","ListAgentFirstName":"","ListAgentLastName":"Kate Pelzer","ListAgentPhone":"604-346-6920","ListAgentEmail":"[email protected]","DayOfMarket":9,"StatDOM":8,"PendingDate":null,"ExpiredDate":"/Date(1471849200000)/","SortDate":"/Date(1456815600000)/"},{"Id":14,"ImageFile":"R2027940.jpg","Address":"15480 111 Avenue","Bath":5,"Bed":5,"Sq":"3092","LotSize":"0.17","ListingPrice":1158000,"SoldPrice":1160000,"DisplayPrice":1160000,"PricePerSq":375,"ListingDate":"/Date(1453446000000)/","SoldDate":"/Date(1454137200000)/","DaysOnMarket":8,"ListingId":681630994,"Status_UI":1,"Status":1,"Latitude":49.204139709472656,"Longitude":-122.79319763183594,"GeocodingLevelId":1,"MlsNumber":"R2027940","ListingDate_Grouped":"/Date(1453014000000)/","SoldDate_Grouped":"/Date(1453618800000)/","StatusDate":"/Date(1455346800000)/","PriceRatio":100.17271157167531,"WhenReceived":"/Date(1461685852173)/","ListAgentFirstName":"","ListAgentLastName":"Kelvin Wang","ListAgentPhone":"778-688-5566","ListAgentEmail":"[email protected]","DayOfMarket":9,"StatDOM":8,"PendingDate":null,"ExpiredDate":"/Date(1467270000000)/","SortDate":"/Date(1454137200000)/"},{"Id":15,"ImageFile":"R2052591.jpg","Address":"15477 109 Avenue","Bath":4,"Bed":6,"Sq":"3707","LotSize":"0.2","ListingPrice":1098800,"SoldPrice":0,"DisplayPrice":1098800,"PricePerSq":296,"ListingDate":"/Date(1459839600000)/","SoldDate":null,"DaysOnMarket":21,"ListingId":681631095,"Status_UI":3,"Status":0,"Latitude":49.20077896118164,"Longitude":-122.79364776611328,"GeocodingLevelId":1,"MlsNumber":"R2052591","ListingDate_Grouped":"/Date(1459666800000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1459839600000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852870)/","ListAgentFirstName":"","ListAgentLastName":"Mark Attfield","ListAgentPhone":"604-812-6282","ListAgentEmail":"[email protected]","DayOfMarket":33,"StatDOM":21,"PendingDate":null,"ExpiredDate":"/Date(1474614000000)/","SortDate":"/Date(1459839600000)/"},{"Id":16,"ImageFile":"R2033418.jpg","Address":"15459 109a Avenue","Bath":4,"Bed":6,"Sq":"3845","LotSize":"0.14","ListingPrice":1098000,"SoldPrice":1090000,"DisplayPrice":1090000,"PricePerSq":283,"ListingDate":"/Date(1455001200000)/","SoldDate":"/Date(1456297200000)/","DaysOnMarket":15,"ListingId":681630983,"Status_UI":1,"Status":1,"Latitude":49.201629638671875,"Longitude":-122.79381561279297,"GeocodingLevelId":1,"MlsNumber":"R2033418","ListingDate_Grouped":"/Date(1454828400000)/","SoldDate_Grouped":"/Date(1456038000000)/","StatusDate":"/Date(1456815600000)/","PriceRatio":99.27140255009107,"WhenReceived":"/Date(1461685852093)/","ListAgentFirstName":"","ListAgentLastName":"Ritchie Zhao PREC*","ListAgentPhone":"778-389-9158","ListAgentEmail":"[email protected]","DayOfMarket":16,"StatDOM":15,"PendingDate":null,"ExpiredDate":"/Date(1460185200000)/","SortDate":"/Date(1456297200000)/"},{"Id":17,"ImageFile":"R2058008.jpg","Address":"15470 111 Avenue","Bath":4,"Bed":4,"Sq":"3094","LotSize":"0.17","ListingPrice":1358000,"SoldPrice":0,"DisplayPrice":1358000,"PricePerSq":439,"ListingDate":"/Date(1460444400000)/","SoldDate":null,"DaysOnMarket":14,"ListingId":681631098,"Status_UI":2,"Status":0,"Latitude":49.20415115356445,"Longitude":-122.79344177246094,"GeocodingLevelId":1,"MlsNumber":"R2058008","ListingDate_Grouped":"/Date(1460271600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460703600000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852883)/","ListAgentFirstName":"","ListAgentLastName":"Vivian Mo","ListAgentPhone":"604-773-1208","ListAgentEmail":"[email protected]","DayOfMarket":26,"StatDOM":14,"PendingDate":null,"ExpiredDate":"/Date(1475218800000)/","SortDate":"/Date(1460444400000)/"},{"Id":18,"ImageFile":"R2034714.jpg","Address":"15451 110a Avenue","Bath":5,"Bed":7,"Sq":"4510","LotSize":"0.16","ListingPrice":1238000,"SoldPrice":1250000,"DisplayPrice":1250000,"PricePerSq":277,"ListingDate":"/Date(1455519600000)/","SoldDate":"/Date(1456642800000)/","DaysOnMarket":13,"ListingId":681630952,"Status_UI":1,"Status":1,"Latitude":49.20350646972656,"Longitude":-122.79378509521484,"GeocodingLevelId":1,"MlsNumber":"R2034714","ListingDate_Grouped":"/Date(1455433200000)/","SoldDate_Grouped":"/Date(1456642800000)/","StatusDate":"/Date(1457420400000)/","PriceRatio":100.96930533117933,"WhenReceived":"/Date(1461685851933)/","ListAgentFirstName":"","ListAgentLastName":"Emily Oh - PREC","ListAgentPhone":"778-896-9000","ListAgentEmail":"[email protected]","DayOfMarket":14,"StatDOM":13,"PendingDate":null,"ExpiredDate":"/Date(1472626800000)/","SortDate":"/Date(1456642800000)/"},{"Id":19,"ImageFile":"R2058840.jpg","Address":"15477 107a Avenue","Bath":3,"Bed":4,"Sq":"2109","LotSize":"0.28","ListingPrice":800000,"SoldPrice":0,"DisplayPrice":800000,"PricePerSq":379,"ListingDate":"/Date(1460962800000)/","SoldDate":null,"DaysOnMarket":8,"ListingId":681631119,"Status_UI":2,"Status":0,"Latitude":49.19813537597656,"Longitude":-122.79269409179688,"GeocodingLevelId":1,"MlsNumber":"R2058840","ListingDate_Grouped":"/Date(1460876400000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460962800000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852987)/","ListAgentFirstName":"","ListAgentLastName":"Mehdi Shamei PREC*","ListAgentPhone":"604-729-0439","ListAgentEmail":"[email protected]","DayOfMarket":20,"StatDOM":8,"PendingDate":null,"ExpiredDate":"/Date(1483167600000)/","SortDate":"/Date(1460962800000)/"},{"Id":20,"ImageFile":"R2032365.jpg","Address":"15524 113 Avenue","Bath":4,"Bed":5,"Sq":"3270","LotSize":"0.11","ListingPrice":869800,"SoldPrice":970000,"DisplayPrice":970000,"PricePerSq":297,"ListingDate":"/Date(1455087600000)/","SoldDate":"/Date(1455519600000)/","DaysOnMarket":5,"ListingId":681630990,"Status_UI":1,"Status":1,"Latitude":49.20766067504883,"Longitude":-122.79165649414062,"GeocodingLevelId":1,"MlsNumber":"R2032365","ListingDate_Grouped":"/Date(1454828400000)/","SoldDate_Grouped":"/Date(1455433200000)/","StatusDate":"/Date(1455865200000)/","PriceRatio":111.51988962979995,"WhenReceived":"/Date(1461685852160)/","ListAgentFirstName":"","ListAgentLastName":"Patrick Kim","ListAgentPhone":"604-837-8827","ListAgentEmail":"[email protected]","DayOfMarket":6,"StatDOM":5,"PendingDate":null,"ExpiredDate":"/Date(1470726000000)/","SortDate":"/Date(1455519600000)/"},{"Id":21,"ImageFile":"R2023722.jpg","Address":"15418 110a Avenue","Bath":8,"Bed":8,"Sq":"6303","LotSize":"0.17","ListingPrice":1568000,"SoldPrice":1548000,"DisplayPrice":1548000,"PricePerSq":246,"ListingDate":"/Date(1452236400000)/","SoldDate":"/Date(1456038000000)/","DaysOnMarket":44,"ListingId":681630992,"Status_UI":1,"Status":1,"Latitude":49.20322799682617,"Longitude":-122.79476928710938,"GeocodingLevelId":1,"MlsNumber":"R2023722","ListingDate_Grouped":"/Date(1451804400000)/","SoldDate_Grouped":"/Date(1456038000000)/","StatusDate":"/Date(1457334000000)/","PriceRatio":98.72448979591837,"WhenReceived":"/Date(1461685852167)/","ListAgentFirstName":"","ListAgentLastName":"Luvi Sandhu","ListAgentPhone":"604-719-2621","ListAgentEmail":"[email protected]","DayOfMarket":45,"StatDOM":44,"PendingDate":null,"ExpiredDate":"/Date(1467270000000)/","SortDate":"/Date(1456038000000)/"},{"Id":22,"ImageFile":"R2038373.jpg","Address":"15523 113 Avenue","Bath":4,"Bed":4,"Sq":"3426","LotSize":"0.12","ListingPrice":968000,"SoldPrice":1020000,"DisplayPrice":1020000,"PricePerSq":298,"ListingDate":"/Date(1456210800000)/","SoldDate":"/Date(1456729200000)/","DaysOnMarket":6,"ListingId":681631066,"Status_UI":1,"Status":1,"Latitude":49.20785903930664,"Longitude":-122.79161071777344,"GeocodingLevelId":1,"MlsNumber":"R2038373","ListingDate_Grouped":"/Date(1456038000000)/","SoldDate_Grouped":"/Date(1456642800000)/","StatusDate":"/Date(1457593200000)/","PriceRatio":105.37190082644628,"WhenReceived":"/Date(1461685852757)/","ListAgentFirstName":"","ListAgentLastName":"Benjamin Peng - PREC","ListAgentPhone":"778-997-6698","ListAgentEmail":"[email protected]","DayOfMarket":7,"StatDOM":6,"PendingDate":null,"ExpiredDate":"/Date(1475218800000)/","SortDate":"/Date(1456729200000)/"},{"Id":23,"ImageFile":"R2050427.jpg","Address":"11382 154a Street","Bath":4,"Bed":6,"Sq":"3605","LotSize":"0.12","ListingPrice":1159000,"SoldPrice":1150000,"DisplayPrice":1150000,"PricePerSq":319,"ListingDate":"/Date(1459234800000)/","SoldDate":"/Date(1459753200000)/","DaysOnMarket":6,"ListingId":681630986,"Status_UI":1,"Status":1,"Latitude":49.20864486694336,"Longitude":-122.79121398925781,"GeocodingLevelId":1,"MlsNumber":"R2050427","ListingDate_Grouped":"/Date(1459062000000)/","SoldDate_Grouped":"/Date(1459666800000)/","StatusDate":"/Date(1460962800000)/","PriceRatio":99.2234685073339,"WhenReceived":"/Date(1461685852110)/","ListAgentFirstName":"","ListAgentLastName":"Emily Oh - PREC","ListAgentPhone":"778-896-9000","ListAgentEmail":"[email protected]","DayOfMarket":7,"StatDOM":6,"PendingDate":null,"ExpiredDate":"/Date(1472626800000)/","SortDate":"/Date(1459753200000)/"},{"Id":24,"ImageFile":"R2038328.jpg","Address":"11382 154a Street","Bath":4,"Bed":6,"Sq":"3605","LotSize":"0.12","ListingPrice":1180000,"SoldPrice":0,"DisplayPrice":1180000,"PricePerSq":327,"ListingDate":"/Date(1456210800000)/","SoldDate":null,"DaysOnMarket":35,"ListingId":681631159,"Status_UI":4,"Status":2,"Latitude":49.20864486694336,"Longitude":-122.79121398925781,"GeocodingLevelId":1,"MlsNumber":"R2038328","ListingDate_Grouped":"/Date(1456038000000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1459234800000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853127)/","ListAgentFirstName":"","ListAgentLastName":"Emily Oh - PREC","ListAgentPhone":"778-896-9000","ListAgentEmail":"[email protected]","DayOfMarket":36,"StatDOM":35,"PendingDate":null,"ExpiredDate":"/Date(1459234800000)/","SortDate":"/Date(1459234800000)/"},{"Id":25,"ImageFile":"R2024342.jpg","Address":"11288 154a Street","Bath":4,"Bed":6,"Sq":"3571","LotSize":"0.16","ListingPrice":1080000,"SoldPrice":1080000,"DisplayPrice":1080000,"PricePerSq":302,"ListingDate":"/Date(1452495600000)/","SoldDate":"/Date(1453878000000)/","DaysOnMarket":16,"ListingId":681631020,"Status_UI":1,"Status":1,"Latitude":49.2076416015625,"Longitude":-122.7929916381836,"GeocodingLevelId":1,"MlsNumber":"R2024342","ListingDate_Grouped":"/Date(1452409200000)/","SoldDate_Grouped":"/Date(1453618800000)/","StatusDate":"/Date(1455174000000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852267)/","ListAgentFirstName":"","ListAgentLastName":"Emily Oh - PREC","ListAgentPhone":"778-896-9000","ListAgentEmail":"[email protected]","DayOfMarket":17,"StatDOM":16,"PendingDate":null,"ExpiredDate":"/Date(1467270000000)/","SortDate":"/Date(1453878000000)/"},{"Id":26,"ImageFile":"R2048696.jpg","Address":"11081 154 Street","Bath":3,"Bed":5,"Sq":"2350","LotSize":"0.18","ListingPrice":1088000,"SoldPrice":0,"DisplayPrice":1088000,"PricePerSq":463,"ListingDate":"/Date(1458284400000)/","SoldDate":null,"DaysOnMarket":27,"ListingId":681631163,"Status_UI":4,"Status":2,"Latitude":49.20391845703125,"Longitude":-122.7955093383789,"GeocodingLevelId":1,"MlsNumber":"R2048696","ListingDate_Grouped":"/Date(1457852400000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460617200000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853143)/","ListAgentFirstName":"","ListAgentLastName":"Angela Guo PREC*","ListAgentPhone":"778-223-6666","ListAgentEmail":"[email protected]","DayOfMarket":28,"StatDOM":27,"PendingDate":null,"ExpiredDate":"/Date(1460617200000)/","SortDate":"/Date(1460617200000)/"},{"Id":27,"ImageFile":"R2058320.jpg","Address":"11081 154 Street","Bath":3,"Bed":5,"Sq":"2350","LotSize":"0.18","ListingPrice":959000,"SoldPrice":0,"DisplayPrice":959000,"PricePerSq":408,"ListingDate":"/Date(1460703600000)/","SoldDate":null,"DaysOnMarket":11,"ListingId":681631078,"Status_UI":2,"Status":0,"Latitude":49.20391845703125,"Longitude":-122.7955093383789,"GeocodingLevelId":1,"MlsNumber":"R2058320","ListingDate_Grouped":"/Date(1460271600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460790000000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852797)/","ListAgentFirstName":"","ListAgentLastName":"Ritchie Zhao PREC*","ListAgentPhone":"778-389-9158","ListAgentEmail":"[email protected]","DayOfMarket":23,"StatDOM":11,"PendingDate":null,"ExpiredDate":"/Date(1468566000000)/","SortDate":"/Date(1460703600000)/"},{"Id":28,"ImageFile":"R2056234.jpg","Address":"11145 154 Street","Bath":3,"Bed":5,"Sq":"2391","LotSize":"0.16","ListingPrice":899800,"SoldPrice":0,"DisplayPrice":899800,"PricePerSq":376,"ListingDate":"/Date(1460444400000)/","SoldDate":null,"DaysOnMarket":14,"ListingId":681631104,"Status_UI":2,"Status":0,"Latitude":49.20499038696289,"Longitude":-122.79551696777344,"GeocodingLevelId":1,"MlsNumber":"R2056234","ListingDate_Grouped":"/Date(1460271600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460530800000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852910)/","ListAgentFirstName":"","ListAgentLastName":"Adam Krajewski","ListAgentPhone":"778-861-5086","ListAgentEmail":"[email protected]","DayOfMarket":26,"StatDOM":14,"PendingDate":null,"ExpiredDate":"/Date(1468306800000)/","SortDate":"/Date(1460444400000)/"},{"Id":29,"ImageFile":"R2049114.jpg","Address":"15395 112a Avenue","Bath":2,"Bed":3,"Sq":"1840","LotSize":"0.3","ListingPrice":1150000,"SoldPrice":0,"DisplayPrice":1150000,"PricePerSq":625,"ListingDate":"/Date(1458543600000)/","SoldDate":null,"DaysOnMarket":36,"ListingId":681631123,"Status_UI":3,"Status":0,"Latitude":49.20720672607422,"Longitude":-122.79566955566406,"GeocodingLevelId":1,"MlsNumber":"R2049114","ListingDate_Grouped":"/Date(1458457200000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1458716400000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853007)/","ListAgentFirstName":"","ListAgentLastName":"Emily Oh - PREC","ListAgentPhone":"778-896-9000","ListAgentEmail":"[email protected]","DayOfMarket":48,"StatDOM":36,"PendingDate":null,"ExpiredDate":"/Date(1472626800000)/","SortDate":"/Date(1458543600000)/"},{"Id":30,"ImageFile":"R2051487.jpg","Address":"11217 153a Street","Bath":3,"Bed":4,"Sq":"2875","LotSize":"0.17","ListingPrice":1198000,"SoldPrice":1150000,"DisplayPrice":1150000,"PricePerSq":400,"ListingDate":"/Date(1459321200000)/","SoldDate":"/Date(1460358000000)/","DaysOnMarket":12,"ListingId":681631063,"Status_UI":1,"Status":1,"Latitude":49.20621871948242,"Longitude":-122.79713439941406,"GeocodingLevelId":1,"MlsNumber":"R2051487","ListingDate_Grouped":"/Date(1459062000000)/","SoldDate_Grouped":"/Date(1460271600000)/","StatusDate":"/Date(1461308400000)/","PriceRatio":95.99332220367279,"WhenReceived":"/Date(1461685852737)/","ListAgentFirstName":"","ListAgentLastName":"Jinny Ahn","ListAgentPhone":"778-938-7225","ListAgentEmail":"[email protected]","DayOfMarket":13,"StatDOM":12,"PendingDate":null,"ExpiredDate":"/Date(1475218800000)/","SortDate":"/Date(1460358000000)/"},{"Id":31,"ImageFile":"R2059248.jpg","Address":"15319 111a Avenue","Bath":4,"Bed":5,"Sq":"3505","LotSize":"0.17","ListingPrice":928000,"SoldPrice":0,"DisplayPrice":928000,"PricePerSq":265,"ListingDate":"/Date(1461049200000)/","SoldDate":null,"DaysOnMarket":7,"ListingId":681631072,"Status_UI":2,"Status":0,"Latitude":49.20530319213867,"Longitude":-122.79755401611328,"GeocodingLevelId":1,"MlsNumber":"R2059248","ListingDate_Grouped":"/Date(1460876400000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1461135600000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852777)/","ListAgentFirstName":"","ListAgentLastName":"Emily Oh - PREC","ListAgentPhone":"778-896-9000","ListAgentEmail":"[email protected]","DayOfMarket":19,"StatDOM":7,"PendingDate":null,"ExpiredDate":"/Date(1468998000000)/","SortDate":"/Date(1461049200000)/"},{"Id":32,"ImageFile":"R2033999.jpg","Address":"15275 111a Avenue","Bath":4,"Bed":6,"Sq":"3481","LotSize":"0.17","ListingPrice":1080000,"SoldPrice":0,"DisplayPrice":1080000,"PricePerSq":310,"ListingDate":"/Date(1454655600000)/","SoldDate":null,"DaysOnMarket":81,"ListingId":681631138,"Status_UI":3,"Status":0,"Latitude":49.20528793334961,"Longitude":-122.7986831665039,"GeocodingLevelId":1,"MlsNumber":"R2033999","ListingDate_Grouped":"/Date(1454223600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1455174000000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853070)/","ListAgentFirstName":"","ListAgentLastName":"Nav Khangura PREC*","ListAgentPhone":"778-792-1000","ListAgentEmail":"[email protected]","DayOfMarket":93,"StatDOM":81,"PendingDate":null,"ExpiredDate":"/Date(1483167600000)/","SortDate":"/Date(1454655600000)/"},{"Id":33,"ImageFile":"R2024665.jpg","Address":"11221 153 Street","Bath":3,"Bed":3,"Sq":"2208","LotSize":"0.16","ListingPrice":799000,"SoldPrice":835000,"DisplayPrice":835000,"PricePerSq":378,"ListingDate":"/Date(1452668400000)/","SoldDate":"/Date(1454742000000)/","DaysOnMarket":24,"ListingId":681631012,"Status_UI":1,"Status":1,"Latitude":49.20637130737305,"Longitude":-122.7985610961914,"GeocodingLevelId":1,"MlsNumber":"R2024665","ListingDate_Grouped":"/Date(1452409200000)/","SoldDate_Grouped":"/Date(1454223600000)/","StatusDate":"/Date(1456124400000)/","PriceRatio":104.50563204005006,"WhenReceived":"/Date(1461685852233)/","ListAgentFirstName":"","ListAgentLastName":"Lin (Linda) Xu","ListAgentPhone":"604-716-2695","ListAgentEmail":"[email protected]","DayOfMarket":25,"StatDOM":24,"PendingDate":null,"ExpiredDate":"/Date(1467270000000)/","SortDate":"/Date(1454742000000)/"},{"Id":34,"ImageFile":"R2030609.jpg","Address":"11321 153a Street","Bath":3,"Bed":4,"Sq":"2053","LotSize":"0.43","ListingPrice":849000,"SoldPrice":1010000,"DisplayPrice":1010000,"PricePerSq":492,"ListingDate":"/Date(1454223600000)/","SoldDate":"/Date(1454914800000)/","DaysOnMarket":8,"ListingId":681631041,"Status_UI":1,"Status":1,"Latitude":49.2078857421875,"Longitude":-122.79806518554688,"GeocodingLevelId":1,"MlsNumber":"R2030609","ListingDate_Grouped":"/Date(1454223600000)/","SoldDate_Grouped":"/Date(1454828400000)/","StatusDate":"/Date(1455865200000)/","PriceRatio":118.96348645465254,"WhenReceived":"/Date(1461685852603)/","ListAgentFirstName":"","ListAgentLastName":"Muzaffar Manghat","ListAgentPhone":"604-785-5555","ListAgentEmail":"[email protected]","DayOfMarket":9,"StatDOM":8,"PendingDate":null,"ExpiredDate":"/Date(1459407600000)/","SortDate":"/Date(1454914800000)/"},{"Id":35,"ImageFile":"R2049119.jpg","Address":"10307 155a Street","Bath":3,"Bed":3,"Sq":"1404","LotSize":"0.2","ListingPrice":700000,"SoldPrice":703000,"DisplayPrice":703000,"PricePerSq":501,"ListingDate":"/Date(1458630000000)/","SoldDate":"/Date(1459666800000)/","DaysOnMarket":12,"ListingId":681631003,"Status_UI":1,"Status":1,"Latitude":49.19022750854492,"Longitude":-122.7913589477539,"GeocodingLevelId":1,"MlsNumber":"R2049119","ListingDate_Grouped":"/Date(1458457200000)/","SoldDate_Grouped":"/Date(1459666800000)/","StatusDate":"/Date(1460098800000)/","PriceRatio":100.42857142857143,"WhenReceived":"/Date(1461685852210)/","ListAgentFirstName":"","ListAgentLastName":"Michael Tanlimco","ListAgentPhone":"604-571-0909","ListAgentEmail":"[email protected]","DayOfMarket":13,"StatDOM":12,"PendingDate":null,"ExpiredDate":"/Date(1466578800000)/","SortDate":"/Date(1459666800000)/"},{"Id":36,"ImageFile":"R2055495.jpg","Address":"11036 Swan Crescent","Bath":3,"Bed":4,"Sq":"2544","LotSize":"0.2","ListingPrice":729900,"SoldPrice":0,"DisplayPrice":729900,"PricePerSq":287,"ListingDate":"/Date(1460358000000)/","SoldDate":null,"DaysOnMarket":15,"ListingId":681631139,"Status_UI":3,"Status":0,"Latitude":49.20322036743164,"Longitude":-122.80187225341797,"GeocodingLevelId":1,"MlsNumber":"R2055495","ListingDate_Grouped":"/Date(1460271600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460358000000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853070)/","ListAgentFirstName":"","ListAgentLastName":"Colin Switzer - PREC","ListAgentPhone":"604-583-2000","ListAgentEmail":"[email protected]","DayOfMarket":27,"StatDOM":15,"PendingDate":null,"ExpiredDate":"/Date(1476169200000)/","SortDate":"/Date(1460358000000)/"},{"Id":37,"ImageFile":"R2061381.jpg","Address":"15153 Pheasant Drive","Bath":3,"Bed":4,"Sq":"2125","LotSize":"0.17","ListingPrice":674900,"SoldPrice":0,"DisplayPrice":674900,"PricePerSq":318,"ListingDate":"/Date(1461567600000)/","SoldDate":null,"DaysOnMarket":1,"ListingId":681631124,"Status_UI":2,"Status":0,"Latitude":49.204078674316406,"Longitude":-122.8023681640625,"GeocodingLevelId":1,"MlsNumber":"R2061381","ListingDate_Grouped":"/Date(1461481200000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1461567600000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853010)/","ListAgentFirstName":"","ListAgentLastName":"Bryan Coombes","ListAgentPhone":"604-533-3491","ListAgentEmail":"[email protected]","DayOfMarket":13,"StatDOM":1,"PendingDate":null,"ExpiredDate":"/Date(1472108400000)/","SortDate":"/Date(1461567600000)/"},{"Id":38,"ImageFile":"R2032241.jpg","Address":"11070 Swan Crescent","Bath":3,"Bed":4,"Sq":"2388","LotSize":"0.17","ListingPrice":625000,"SoldPrice":730000,"DisplayPrice":730000,"PricePerSq":306,"ListingDate":"/Date(1454655600000)/","SoldDate":"/Date(1457334000000)/","DaysOnMarket":31,"ListingId":681631053,"Status_UI":1,"Status":1,"Latitude":49.2032585144043,"Longitude":-122.80303192138672,"GeocodingLevelId":1,"MlsNumber":"R2032241","ListingDate_Grouped":"/Date(1454223600000)/","SoldDate_Grouped":"/Date(1457247600000)/","StatusDate":"/Date(1457593200000)/","PriceRatio":116.8,"WhenReceived":"/Date(1461685852693)/","ListAgentFirstName":"","ListAgentLastName":"Todd Murdoch - PREC","ListAgentPhone":"604-250-0507","ListAgentEmail":"[email protected]","DayOfMarket":32,"StatDOM":31,"PendingDate":null,"ExpiredDate":"/Date(1467270000000)/","SortDate":"/Date(1457334000000)/"},{"Id":39,"ImageFile":"R2039202.jpg","Address":"15094 Bluebird Crescent","Bath":2,"Bed":6,"Sq":"2343","LotSize":"0.18","ListingPrice":469000,"SoldPrice":0,"DisplayPrice":469000,"PricePerSq":200,"ListingDate":"/Date(1456124400000)/","SoldDate":null,"DaysOnMarket":37,"ListingId":681631170,"Status_UI":4,"Status":2,"Latitude":49.20109939575195,"Longitude":-122.80343627929688,"GeocodingLevelId":1,"MlsNumber":"R2039202","ListingDate_Grouped":"/Date(1456038000000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1459321200000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853160)/","ListAgentFirstName":"","ListAgentLastName":"Romeo Di Pietra","ListAgentPhone":"604-250-3547","ListAgentEmail":"[email protected]","DayOfMarket":38,"StatDOM":37,"PendingDate":null,"ExpiredDate":"/Date(1459321200000)/","SortDate":"/Date(1459321200000)/"},{"Id":40,"ImageFile":"R2041359.jpg","Address":"15106 Robin Crescent","Bath":2,"Bed":4,"Sq":"2190","LotSize":"0.17","ListingPrice":649900,"SoldPrice":640000,"DisplayPrice":640000,"PricePerSq":292,"ListingDate":"/Date(1456988400000)/","SoldDate":"/Date(1458630000000)/","DaysOnMarket":19,"ListingId":681631052,"Status_UI":1,"Status":1,"Latitude":49.20502853393555,"Longitude":-122.80352783203125,"GeocodingLevelId":1,"MlsNumber":"R2041359","ListingDate_Grouped":"/Date(1456642800000)/","SoldDate_Grouped":"/Date(1458457200000)/","StatusDate":"/Date(1458802800000)/","PriceRatio":98.47668872134174,"WhenReceived":"/Date(1461685852687)/","ListAgentFirstName":"","ListAgentLastName":"Don Zachary","ListAgentPhone":"604-583-2000","ListAgentEmail":"[email protected]","DayOfMarket":20,"StatDOM":19,"PendingDate":null,"ExpiredDate":"/Date(1467270000000)/","SortDate":"/Date(1458630000000)/"},{"Id":41,"ImageFile":"R2052756.jpg","Address":"15085 Eagle Place","Bath":2,"Bed":4,"Sq":"2075","LotSize":"0.17","ListingPrice":688888,"SoldPrice":752000,"DisplayPrice":752000,"PricePerSq":362,"ListingDate":"/Date(1459580400000)/","SoldDate":"/Date(1460962800000)/","DaysOnMarket":16,"ListingId":681630982,"Status_UI":1,"Status":1,"Latitude":49.20248031616211,"Longitude":-122.80423736572266,"GeocodingLevelId":1,"MlsNumber":"R2052756","ListingDate_Grouped":"/Date(1459062000000)/","SoldDate_Grouped":"/Date(1460876400000)/","StatusDate":"/Date(1461308400000)/","PriceRatio":109.16143117604022,"WhenReceived":"/Date(1461685852090)/","ListAgentFirstName":"","ListAgentLastName":"Todd Murdoch - PREC","ListAgentPhone":"604-250-0507","ListAgentEmail":"[email protected]","DayOfMarket":17,"StatDOM":16,"PendingDate":null,"ExpiredDate":"/Date(1483167600000)/","SortDate":"/Date(1460962800000)/"},{"Id":42,"ImageFile":"R2048080.jpg","Address":"10972 Jay Crescent","Bath":2,"Bed":4,"Sq":"2027","LotSize":"0.18","ListingPrice":600000,"SoldPrice":782177,"DisplayPrice":782177,"PricePerSq":386,"ListingDate":"/Date(1458630000000)/","SoldDate":"/Date(1459062000000)/","DaysOnMarket":5,"ListingId":681631059,"Status_UI":1,"Status":1,"Latitude":49.20274353027344,"Longitude":-122.80457305908203,"GeocodingLevelId":1,"MlsNumber":"R2048080","ListingDate_Grouped":"/Date(1458457200000)/","SoldDate_Grouped":"/Date(1459062000000)/","StatusDate":"/Date(1459926000000)/","PriceRatio":130.36283333333333,"WhenReceived":"/Date(1461685852720)/","ListAgentFirstName":"","ListAgentLastName":"Chris Whitehead - PREC","ListAgentPhone":"604-542-2444","ListAgentEmail":"[email protected]","DayOfMarket":6,"StatDOM":5,"PendingDate":null,"ExpiredDate":"/Date(1474527600000)/","SortDate":"/Date(1459062000000)/"},{"Id":43,"ImageFile":"R2046757.jpg","Address":"10973 Jay Crescent","Bath":2,"Bed":4,"Sq":"2138","LotSize":"0.16","ListingPrice":718000,"SoldPrice":778000,"DisplayPrice":778000,"PricePerSq":364,"ListingDate":"/Date(1458025200000)/","SoldDate":"/Date(1458543600000)/","DaysOnMarket":6,"ListingId":681630999,"Status_UI":1,"Status":1,"Latitude":49.20268249511719,"Longitude":-122.80497741699219,"GeocodingLevelId":1,"MlsNumber":"R2046757","ListingDate_Grouped":"/Date(1457852400000)/","SoldDate_Grouped":"/Date(1458457200000)/","StatusDate":"/Date(1458975600000)/","PriceRatio":108.35654596100278,"WhenReceived":"/Date(1461685852190)/","ListAgentFirstName":"","ListAgentLastName":"Terry Vato","ListAgentPhone":"604-729-0728","ListAgentEmail":"[email protected]","DayOfMarket":7,"StatDOM":6,"PendingDate":null,"ExpiredDate":"/Date(1465974000000)/","SortDate":"/Date(1458543600000)/"},{"Id":44,"ImageFile":"R2030757.jpg","Address":"15032 Swallow Drive","Bath":3,"Bed":5,"Sq":"2398","LotSize":"0.16","ListingPrice":749880,"SoldPrice":805000,"DisplayPrice":805000,"PricePerSq":336,"ListingDate":"/Date(1454396400000)/","SoldDate":"/Date(1460530800000)/","DaysOnMarket":71,"ListingId":681631031,"Status_UI":1,"Status":1,"Latitude":49.204261779785156,"Longitude":-122.8056411743164,"GeocodingLevelId":1,"MlsNumber":"R2030757","ListingDate_Grouped":"/Date(1454223600000)/","SoldDate_Grouped":"/Date(1460271600000)/","StatusDate":"/Date(1460962800000)/","PriceRatio":107.3505094148397,"WhenReceived":"/Date(1461685852550)/","ListAgentFirstName":"","ListAgentLastName":"Brent Roberts","ListAgentPhone":"604-585-7653","ListAgentEmail":"[email protected]","DayOfMarket":72,"StatDOM":71,"PendingDate":null,"ExpiredDate":"/Date(1470121200000)/","SortDate":"/Date(1460530800000)/"},{"Id":45,"ImageFile":"R2039538.jpg","Address":"10968 Partridge Crescent","Bath":2,"Bed":5,"Sq":"1980","LotSize":"0.17","ListingPrice":749000,"SoldPrice":730000,"DisplayPrice":730000,"PricePerSq":369,"ListingDate":"/Date(1456383600000)/","SoldDate":"/Date(1456383600000)/","DaysOnMarket":0,"ListingId":681631040,"Status_UI":1,"Status":1,"Latitude":49.2021598815918,"Longitude":-122.80612182617188,"GeocodingLevelId":1,"MlsNumber":"R2039538","ListingDate_Grouped":"/Date(1456038000000)/","SoldDate_Grouped":"/Date(1456038000000)/","StatusDate":"/Date(1456729200000)/","PriceRatio":97.46328437917224,"WhenReceived":"/Date(1461685852600)/","ListAgentFirstName":"","ListAgentLastName":"Jinder Jhajj - PREC","ListAgentPhone":"604-825-7160","ListAgentEmail":"[email protected]","DayOfMarket":1,"StatDOM":0,"PendingDate":null,"ExpiredDate":"/Date(1472540400000)/","SortDate":"/Date(1456383600000)/"},{"Id":46,"ImageFile":"R2022369.jpg","Address":"15017 Pheasant Drive","Bath":2,"Bed":4,"Sq":"2199","LotSize":"0.18","ListingPrice":599888,"SoldPrice":0,"DisplayPrice":599888,"PricePerSq":273,"ListingDate":"/Date(1451890800000)/","SoldDate":null,"DaysOnMarket":14,"ListingId":681631167,"Status_UI":4,"Status":2,"Latitude":49.20534133911133,"Longitude":-122.80580139160156,"GeocodingLevelId":1,"MlsNumber":"R2022369","ListingDate_Grouped":"/Date(1451804400000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1458198000000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853153)/","ListAgentFirstName":"","ListAgentLastName":"Todd Murdoch - PREC","ListAgentPhone":"604-250-0507","ListAgentEmail":"[email protected]","DayOfMarket":74,"StatDOM":14,"PendingDate":null,"ExpiredDate":"/Date(1467270000000)/","SortDate":"/Date(1467270000000)/"},{"Id":47,"ImageFile":"R2027756.jpg","Address":"15528 100 Avenue","Bath":3,"Bed":5,"Sq":"2440","LotSize":"0.17","ListingPrice":748888,"SoldPrice":0,"DisplayPrice":748888,"PricePerSq":307,"ListingDate":"/Date(1453273200000)/","SoldDate":null,"DaysOnMarket":97,"ListingId":681631135,"Status_UI":3,"Status":0,"Latitude":49.18415069580078,"Longitude":-122.79174041748047,"GeocodingLevelId":1,"MlsNumber":"R2027756","ListingDate_Grouped":"/Date(1453014000000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1453705200000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853060)/","ListAgentFirstName":"","ListAgentLastName":"Amrik Sull","ListAgentPhone":"604-589-0001","ListAgentEmail":"[email protected]","DayOfMarket":109,"StatDOM":97,"PendingDate":null,"ExpiredDate":"/Date(1461999600000)/","SortDate":"/Date(1453273200000)/"},{"Id":48,"ImageFile":"R2058152.jpg","Address":"14985 108 Avenue","Bath":3,"Bed":6,"Sq":"1990","LotSize":"0.17","ListingPrice":739900,"SoldPrice":0,"DisplayPrice":739900,"PricePerSq":372,"ListingDate":"/Date(1460358000000)/","SoldDate":null,"DaysOnMarket":15,"ListingId":681631068,"Status_UI":3,"Status":0,"Latitude":49.198909759521484,"Longitude":-122.80663299560547,"GeocodingLevelId":1,"MlsNumber":"R2058152","ListingDate_Grouped":"/Date(1460271600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460703600000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852763)/","ListAgentFirstName":"","ListAgentLastName":"Daniel Romey - PREC","ListAgentPhone":"604-754-0120","ListAgentEmail":"[email protected]","DayOfMarket":27,"StatDOM":15,"PendingDate":null,"ExpiredDate":"/Date(1468566000000)/","SortDate":"/Date(1460358000000)/"},{"Id":49,"ImageFile":"R2059233.jpg","Address":"14986 Kew Drive","Bath":2,"Bed":5,"Sq":"2116","LotSize":"0.17","ListingPrice":649000,"SoldPrice":0,"DisplayPrice":649000,"PricePerSq":307,"ListingDate":"/Date(1460185200000)/","SoldDate":null,"DaysOnMarket":17,"ListingId":681631084,"Status_UI":3,"Status":0,"Latitude":49.20587921142578,"Longitude":-122.80683135986328,"GeocodingLevelId":1,"MlsNumber":"R2059233","ListingDate_Grouped":"/Date(1459666800000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1461049200000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852827)/","ListAgentFirstName":"","ListAgentLastName":"Connie Tavernaro","ListAgentPhone":"","ListAgentEmail":"[email protected]","DayOfMarket":29,"StatDOM":17,"PendingDate":null,"ExpiredDate":"/Date(1465455600000)/","SortDate":"/Date(1460185200000)/"},{"Id":50,"ImageFile":"R2033647.jpg","Address":"11360 Lansdowne Drive","Bath":2,"Bed":5,"Sq":"2100","LotSize":"0.17","ListingPrice":599000,"SoldPrice":678888,"DisplayPrice":678888,"PricePerSq":323,"ListingDate":"/Date(1455001200000)/","SoldDate":"/Date(1455433200000)/","DaysOnMarket":5,"ListingId":681631056,"Status_UI":1,"Status":1,"Latitude":49.20841979980469,"Longitude":-122.80689239501953,"GeocodingLevelId":1,"MlsNumber":"R2033647","ListingDate_Grouped":"/Date(1454828400000)/","SoldDate_Grouped":"/Date(1455433200000)/","StatusDate":"/Date(1455692400000)/","PriceRatio":113.33689482470784,"WhenReceived":"/Date(1461685852703)/","ListAgentFirstName":"","ListAgentLastName":"Bob Harika","ListAgentPhone":"778-791-1000","ListAgentEmail":"[email protected]","DayOfMarket":6,"StatDOM":5,"PendingDate":null,"ExpiredDate":"/Date(1469948400000)/","SortDate":"/Date(1455433200000)/"},{"Id":51,"ImageFile":"R2034601.jpg","Address":"10999 Partridge Crescent","Bath":3,"Bed":5,"Sq":"2000","LotSize":"0.2","ListingPrice":629000,"SoldPrice":620100,"DisplayPrice":620100,"PricePerSq":310,"ListingDate":"/Date(1455260400000)/","SoldDate":"/Date(1456383600000)/","DaysOnMarket":13,"ListingId":681631034,"Status_UI":1,"Status":1,"Latitude":49.20212936401367,"Longitude":-122.80799102783203,"GeocodingLevelId":1,"MlsNumber":"R2034601","ListingDate_Grouped":"/Date(1454828400000)/","SoldDate_Grouped":"/Date(1456038000000)/","StatusDate":"/Date(1457593200000)/","PriceRatio":98.58505564387917,"WhenReceived":"/Date(1461685852567)/","ListAgentFirstName":"","ListAgentLastName":"Jagir Singh","ListAgentPhone":"","ListAgentEmail":"[email protected]","DayOfMarket":14,"StatDOM":13,"PendingDate":null,"ExpiredDate":"/Date(1483081200000)/","SortDate":"/Date(1456383600000)/"},{"Id":52,"ImageFile":"R2051723.jpg","Address":"11361 Lansdowne Drive","Bath":2,"Bed":3,"Sq":"2150","LotSize":"0.23","ListingPrice":649900,"SoldPrice":705000,"DisplayPrice":705000,"PricePerSq":328,"ListingDate":"/Date(1459407600000)/","SoldDate":"/Date(1459926000000)/","DaysOnMarket":6,"ListingId":681630977,"Status_UI":1,"Status":1,"Latitude":49.20861053466797,"Longitude":-122.8073501586914,"GeocodingLevelId":1,"MlsNumber":"R2051723","ListingDate_Grouped":"/Date(1459062000000)/","SoldDate_Grouped":"/Date(1459666800000)/","StatusDate":"/Date(1460185200000)/","PriceRatio":108.47822741960302,"WhenReceived":"/Date(1461685852070)/","ListAgentFirstName":"","ListAgentLastName":"A. June Busch","ListAgentPhone":"604-418-1242","ListAgentEmail":"[email protected]","DayOfMarket":7,"StatDOM":6,"PendingDate":null,"ExpiredDate":"/Date(1472626800000)/","SortDate":"/Date(1459926000000)/"},{"Id":53,"ImageFile":"R2050885.jpg","Address":"9829 156 Street","Bath":3,"Bed":4,"Sq":"2786","LotSize":"0.19","ListingPrice":790000,"SoldPrice":910000,"DisplayPrice":910000,"PricePerSq":327,"ListingDate":"/Date(1459321200000)/","SoldDate":"/Date(1460358000000)/","DaysOnMarket":12,"ListingId":681630956,"Status_UI":1,"Status":1,"Latitude":49.181209564208984,"Longitude":-122.79015350341797,"GeocodingLevelId":1,"MlsNumber":"R2050885","ListingDate_Grouped":"/Date(1459062000000)/","SoldDate_Grouped":"/Date(1460271600000)/","StatusDate":"/Date(1461308400000)/","PriceRatio":115.18987341772151,"WhenReceived":"/Date(1461685851950)/","ListAgentFirstName":"","ListAgentLastName":"Terry Wang","ListAgentPhone":"604-928-1826","ListAgentEmail":"[email protected]","DayOfMarket":13,"StatDOM":12,"PendingDate":null,"ExpiredDate":"/Date(1475218800000)/","SortDate":"/Date(1460358000000)/"},{"Id":54,"ImageFile":"R2035254.jpg","Address":"14919 Glen Avon Drive","Bath":6,"Bed":6,"Sq":"4061","LotSize":"0.2","ListingPrice":1240000,"SoldPrice":0,"DisplayPrice":1240000,"PricePerSq":305,"ListingDate":"/Date(1455606000000)/","SoldDate":null,"DaysOnMarket":62,"ListingId":681631158,"Status_UI":4,"Status":2,"Latitude":49.20566940307617,"Longitude":-122.80863189697266,"GeocodingLevelId":1,"MlsNumber":"R2035254","ListingDate_Grouped":"/Date(1455433200000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460962800000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853127)/","ListAgentFirstName":"","ListAgentLastName":"Bhupinder S. Litt","ListAgentPhone":"604-671-1730","ListAgentEmail":"[email protected]","DayOfMarket":63,"StatDOM":62,"PendingDate":null,"ExpiredDate":"/Date(1469948400000)/","SortDate":"/Date(1469948400000)/"},{"Id":55,"ImageFile":"R2030278.jpg","Address":"10990 Oriole Drive","Bath":2,"Bed":5,"Sq":"2350","LotSize":"0.19","ListingPrice":619900,"SoldPrice":726500,"DisplayPrice":726500,"PricePerSq":309,"ListingDate":"/Date(1454310000000)/","SoldDate":"/Date(1455001200000)/","DaysOnMarket":8,"ListingId":681631032,"Status_UI":1,"Status":1,"Latitude":49.202430725097656,"Longitude":-122.80899810791016,"GeocodingLevelId":1,"MlsNumber":"R2030278","ListingDate_Grouped":"/Date(1454223600000)/","SoldDate_Grouped":"/Date(1454828400000)/","StatusDate":"/Date(1455174000000)/","PriceRatio":117.19632198741732,"WhenReceived":"/Date(1461685852553)/","ListAgentFirstName":"","ListAgentLastName":"Stephan Zandbergen","ListAgentPhone":"604-970-6995","ListAgentEmail":"[email protected]","DayOfMarket":9,"StatDOM":8,"PendingDate":null,"ExpiredDate":"/Date(1464764400000)/","SortDate":"/Date(1455001200000)/"},{"Id":56,"ImageFile":"R2032678.jpg","Address":"11369 Loughren Drive","Bath":3,"Bed":4,"Sq":"2070","LotSize":"0.17","ListingPrice":609898,"SoldPrice":661000,"DisplayPrice":661000,"PricePerSq":319,"ListingDate":"/Date(1455001200000)/","SoldDate":"/Date(1455433200000)/","DaysOnMarket":5,"ListingId":681630958,"Status_UI":1,"Status":1,"Latitude":49.20901107788086,"Longitude":-122.80892181396484,"GeocodingLevelId":1,"MlsNumber":"R2032678","ListingDate_Grouped":"/Date(1454828400000)/","SoldDate_Grouped":"/Date(1455433200000)/","StatusDate":"/Date(1455778800000)/","PriceRatio":108.37877809076272,"WhenReceived":"/Date(1461685851957)/","ListAgentFirstName":"","ListAgentLastName":"Scott Williams","ListAgentPhone":"604-595-2896","ListAgentEmail":"[email protected]","DayOfMarket":6,"StatDOM":5,"PendingDate":null,"ExpiredDate":"/Date(1461826800000)/","SortDate":"/Date(1455433200000)/"},{"Id":57,"ImageFile":"R2057328.jpg","Address":"11334 Loughren Drive","Bath":2,"Bed":6,"Sq":"2300","LotSize":"0.19","ListingPrice":599000,"SoldPrice":0,"DisplayPrice":599000,"PricePerSq":260,"ListingDate":"/Date(1460271600000)/","SoldDate":null,"DaysOnMarket":16,"ListingId":681631081,"Status_UI":3,"Status":0,"Latitude":49.20851135253906,"Longitude":-122.80909729003906,"GeocodingLevelId":1,"MlsNumber":"R2057328","ListingDate_Grouped":"/Date(1460271600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460530800000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852807)/","ListAgentFirstName":"","ListAgentLastName":"Benjo Peralta","ListAgentPhone":"778-238-6777","ListAgentEmail":"[email protected]","DayOfMarket":28,"StatDOM":16,"PendingDate":null,"ExpiredDate":"/Date(1469862000000)/","SortDate":"/Date(1460271600000)/"},{"Id":58,"ImageFile":"R2061119.jpg","Address":"9735 153a Street","Bath":3,"Bed":5,"Sq":"2370","LotSize":"0.17","ListingPrice":788800,"SoldPrice":0,"DisplayPrice":788800,"PricePerSq":333,"ListingDate":"/Date(1461135600000)/","SoldDate":null,"DaysOnMarket":6,"ListingId":681631115,"Status_UI":2,"Status":0,"Latitude":49.179447174072266,"Longitude":-122.7970199584961,"GeocodingLevelId":1,"MlsNumber":"R2061119","ListingDate_Grouped":"/Date(1460876400000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1461394800000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852970)/","ListAgentFirstName":"","ListAgentLastName":"Beverly Ujhazy","ListAgentPhone":"604-889-2470","ListAgentEmail":"[email protected]","DayOfMarket":18,"StatDOM":6,"PendingDate":null,"ExpiredDate":"/Date(1480489200000)/","SortDate":"/Date(1461135600000)/"},{"Id":59,"ImageFile":"R2048402.jpg","Address":"9729 153a Street","Bath":3,"Bed":5,"Sq":"2558","LotSize":"0.17","ListingPrice":799000,"SoldPrice":888000,"DisplayPrice":888000,"PricePerSq":347,"ListingDate":"/Date(1458630000000)/","SoldDate":"/Date(1459234800000)/","DaysOnMarket":7,"ListingId":681631006,"Status_UI":1,"Status":1,"Latitude":49.179325103759766,"Longitude":-122.7970199584961,"GeocodingLevelId":1,"MlsNumber":"R2048402","ListingDate_Grouped":"/Date(1458457200000)/","SoldDate_Grouped":"/Date(1459062000000)/","StatusDate":"/Date(1459494000000)/","PriceRatio":111.13892365456822,"WhenReceived":"/Date(1461685852217)/","ListAgentFirstName":"","ListAgentLastName":"Minerva Pizarro","ListAgentPhone":"604-785-3167","ListAgentEmail":"[email protected]","DayOfMarket":8,"StatDOM":7,"PendingDate":null,"ExpiredDate":"/Date(1469948400000)/","SortDate":"/Date(1459234800000)/"},{"Id":60,"ImageFile":"R2046706.jpg","Address":"15525 96b Avenue","Bath":3,"Bed":3,"Sq":"2036","LotSize":"0.1","ListingPrice":699800,"SoldPrice":702000,"DisplayPrice":702000,"PricePerSq":345,"ListingDate":"/Date(1458025200000)/","SoldDate":"/Date(1458630000000)/","DaysOnMarket":7,"ListingId":681630976,"Status_UI":1,"Status":1,"Latitude":49.177913665771484,"Longitude":-122.7925796508789,"GeocodingLevelId":1,"MlsNumber":"R2046706","ListingDate_Grouped":"/Date(1457852400000)/","SoldDate_Grouped":"/Date(1458457200000)/","StatusDate":"/Date(1460358000000)/","PriceRatio":100.31437553586738,"WhenReceived":"/Date(1461685852057)/","ListAgentFirstName":"","ListAgentLastName":"Mike Marfori - PREC","ListAgentPhone":"","ListAgentEmail":"[email protected]","DayOfMarket":8,"StatDOM":7,"PendingDate":null,"ExpiredDate":"/Date(1463295600000)/","SortDate":"/Date(1458630000000)/"},{"Id":61,"ImageFile":"R2059610.jpg","Address":"11095 148 Street","Bath":2,"Bed":5,"Sq":"2291","LotSize":"0.18","ListingPrice":749000,"SoldPrice":0,"DisplayPrice":749000,"PricePerSq":327,"ListingDate":"/Date(1461049200000)/","SoldDate":null,"DaysOnMarket":7,"ListingId":681631074,"Status_UI":2,"Status":0,"Latitude":49.20412826538086,"Longitude":-122.8128662109375,"GeocodingLevelId":1,"MlsNumber":"R2059610","ListingDate_Grouped":"/Date(1460876400000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1461049200000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852783)/","ListAgentFirstName":"","ListAgentLastName":"Minerva Pizarro","ListAgentPhone":"604-785-3167","ListAgentEmail":"[email protected]","DayOfMarket":19,"StatDOM":7,"PendingDate":null,"ExpiredDate":"/Date(1472540400000)/","SortDate":"/Date(1461049200000)/"},{"Id":62,"ImageFile":"R2027796.jpg","Address":"14790 109a Avenue","Bath":2,"Bed":6,"Sq":"2024","LotSize":"0.17","ListingPrice":649000,"SoldPrice":0,"DisplayPrice":649000,"PricePerSq":321,"ListingDate":"/Date(1453273200000)/","SoldDate":null,"DaysOnMarket":9,"ListingId":681631168,"Status_UI":4,"Status":2,"Latitude":49.2013053894043,"Longitude":-122.81297302246094,"GeocodingLevelId":1,"MlsNumber":"R2027796","ListingDate_Grouped":"/Date(1453014000000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1459148400000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853157)/","ListAgentFirstName":"","ListAgentLastName":"Daniel Romey - PREC","ListAgentPhone":"604-754-0120","ListAgentEmail":"[email protected]","DayOfMarket":69,"StatDOM":9,"PendingDate":null,"ExpiredDate":"/Date(1461999600000)/","SortDate":"/Date(1461999600000)/"},{"Id":63,"ImageFile":"R2058143.jpg","Address":"15258 97a Avenue","Bath":3,"Bed":4,"Sq":"2001","LotSize":"0.16","ListingPrice":749999,"SoldPrice":0,"DisplayPrice":749999,"PricePerSq":375,"ListingDate":"/Date(1460530800000)/","SoldDate":null,"DaysOnMarket":13,"ListingId":681631075,"Status_UI":2,"Status":0,"Latitude":49.179256439208984,"Longitude":-122.79896545410156,"GeocodingLevelId":1,"MlsNumber":"R2058143","ListingDate_Grouped":"/Date(1460271600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460703600000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852787)/","ListAgentFirstName":"","ListAgentLastName":"Sunny Ball","ListAgentPhone":"604-354-3000","ListAgentEmail":"[email protected]","DayOfMarket":25,"StatDOM":13,"PendingDate":null,"ExpiredDate":"/Date(1469084400000)/","SortDate":"/Date(1460530800000)/"},{"Id":64,"ImageFile":"R2044287.jpg","Address":"9590 156 Street","Bath":3,"Bed":6,"Sq":"2299","LotSize":"0.16","ListingPrice":649880,"SoldPrice":716500,"DisplayPrice":716500,"PricePerSq":312,"ListingDate":"/Date(1457506800000)/","SoldDate":"/Date(1458802800000)/","DaysOnMarket":15,"ListingId":681630964,"Status_UI":1,"Status":1,"Latitude":49.17668914794922,"Longitude":-122.78972625732422,"GeocodingLevelId":1,"MlsNumber":"R2044287","ListingDate_Grouped":"/Date(1457247600000)/","SoldDate_Grouped":"/Date(1458457200000)/","StatusDate":"/Date(1459753200000)/","PriceRatio":110.25112328429864,"WhenReceived":"/Date(1461685851990)/","ListAgentFirstName":"","ListAgentLastName":"Brent Roberts","ListAgentPhone":"604-585-7653","ListAgentEmail":"[email protected]","DayOfMarket":16,"StatDOM":15,"PendingDate":null,"ExpiredDate":"/Date(1473404400000)/","SortDate":"/Date(1458802800000)/"},{"Id":65,"ImageFile":"R2050460.jpg","Address":"15568 96th Avenue","Bath":5,"Bed":6,"Sq":"3280","LotSize":"0.18","ListingPrice":868000,"SoldPrice":0,"DisplayPrice":868000,"PricePerSq":265,"ListingDate":"/Date(1459234800000)/","SoldDate":null,"DaysOnMarket":28,"ListingId":681631130,"Status_UI":3,"Status":0,"Latitude":49.17673873901367,"Longitude":-122.7908935546875,"GeocodingLevelId":1,"MlsNumber":"R2050460","ListingDate_Grouped":"/Date(1459062000000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1459321200000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853030)/","ListAgentFirstName":"","ListAgentLastName":"Jay Sidhu","ListAgentPhone":"604-339-5132","ListAgentEmail":"[email protected]","DayOfMarket":40,"StatDOM":28,"PendingDate":null,"ExpiredDate":"/Date(1472626800000)/","SortDate":"/Date(1459234800000)/"},{"Id":66,"ImageFile":"R2043362.jpg","Address":"15451 96 Avenue","Bath":5,"Bed":6,"Sq":"3660","LotSize":"0.19","ListingPrice":929900,"SoldPrice":930000,"DisplayPrice":930000,"PricePerSq":254,"ListingDate":"/Date(1457334000000)/","SoldDate":"/Date(1457679600000)/","DaysOnMarket":4,"ListingId":681630955,"Status_UI":1,"Status":1,"Latitude":49.177059173583984,"Longitude":-122.79408264160156,"GeocodingLevelId":1,"MlsNumber":"R2043362","ListingDate_Grouped":"/Date(1457247600000)/","SoldDate_Grouped":"/Date(1457247600000)/","StatusDate":"/Date(1458025200000)/","PriceRatio":100.0107538444994,"WhenReceived":"/Date(1461685851947)/","ListAgentFirstName":"","ListAgentLastName":"John Tsai PREC*","ListAgentPhone":"604-681-8898","ListAgentEmail":"[email protected]","DayOfMarket":5,"StatDOM":4,"PendingDate":null,"ExpiredDate":"/Date(1462604400000)/","SortDate":"/Date(1457679600000)/"},{"Id":67,"ImageFile":"R2033470.jpg","Address":"15451 96 Avenue","Bath":5,"Bed":6,"Sq":"3660","LotSize":"0.19","ListingPrice":930000,"SoldPrice":0,"DisplayPrice":930000,"PricePerSq":254,"ListingDate":"/Date(1454914800000)/","SoldDate":null,"DaysOnMarket":16,"ListingId":681631152,"Status_UI":4,"Status":2,"Latitude":49.177059173583984,"Longitude":-122.79408264160156,"GeocodingLevelId":1,"MlsNumber":"R2033470","ListingDate_Grouped":"/Date(1454828400000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1456470000000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853110)/","ListAgentFirstName":"","ListAgentLastName":"Elvira Hall","ListAgentPhone":"604-783-9632","ListAgentEmail":"[email protected]","DayOfMarket":19,"StatDOM":16,"PendingDate":null,"ExpiredDate":"/Date(1456470000000)/","SortDate":"/Date(1456470000000)/"},{"Id":68,"ImageFile":"R2048331.jpg","Address":"11438 Roxburgh Road","Bath":6,"Bed":10,"Sq":"4230","LotSize":"0.19","ListingPrice":968888,"SoldPrice":0,"DisplayPrice":968888,"PricePerSq":229,"ListingDate":"/Date(1458630000000)/","SoldDate":null,"DaysOnMarket":35,"ListingId":681631134,"Status_UI":3,"Status":0,"Latitude":49.21049880981445,"Longitude":-122.81224822998047,"GeocodingLevelId":1,"MlsNumber":"R2048331","ListingDate_Grouped":"/Date(1458457200000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1458630000000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853050)/","ListAgentFirstName":"","ListAgentLastName":"Avtar Rai - PREC","ListAgentPhone":"","ListAgentEmail":"[email protected]","DayOfMarket":47,"StatDOM":35,"PendingDate":null,"ExpiredDate":"/Date(1469689200000)/","SortDate":"/Date(1458630000000)/"},{"Id":69,"ImageFile":"R2045171.jpg","Address":"9627 154 Street","Bath":2,"Bed":5,"Sq":"2100","LotSize":"0.22","ListingPrice":750000,"SoldPrice":0,"DisplayPrice":750000,"PricePerSq":357,"ListingDate":"/Date(1457506800000)/","SoldDate":null,"DaysOnMarket":48,"ListingId":681631083,"Status_UI":3,"Status":0,"Latitude":49.17741012573242,"Longitude":-122.79573059082031,"GeocodingLevelId":1,"MlsNumber":"R2045171","ListingDate_Grouped":"/Date(1457247600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1457766000000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852813)/","ListAgentFirstName":"","ListAgentLastName":"Daljinder Aujla","ListAgentPhone":"778-323-6505","ListAgentEmail":"[email protected]","DayOfMarket":60,"StatDOM":48,"PendingDate":null,"ExpiredDate":"/Date(1467270000000)/","SortDate":"/Date(1457506800000)/"},{"Id":70,"ImageFile":"R2031855.jpg","Address":"11450 Roxburgh Road","Bath":2,"Bed":7,"Sq":"2610","LotSize":"0.14","ListingPrice":499500,"SoldPrice":0,"DisplayPrice":499500,"PricePerSq":191,"ListingDate":"/Date(1453964400000)/","SoldDate":null,"DaysOnMarket":89,"ListingId":681631094,"Status_UI":3,"Status":0,"Latitude":49.210601806640625,"Longitude":-122.81226348876953,"GeocodingLevelId":1,"MlsNumber":"R2031855","ListingDate_Grouped":"/Date(1453618800000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1454655600000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852867)/","ListAgentFirstName":"","ListAgentLastName":"Chamkaur Gill","ListAgentPhone":"604-572-1211","ListAgentEmail":"[email protected]","DayOfMarket":101,"StatDOM":89,"PendingDate":null,"ExpiredDate":"/Date(1461740400000)/","SortDate":"/Date(1453964400000)/"},{"Id":71,"ImageFile":"R2047509.jpg","Address":"14751 Wellington Drive","Bath":6,"Bed":7,"Sq":"3850","LotSize":"0.18","ListingPrice":989000,"SoldPrice":0,"DisplayPrice":989000,"PricePerSq":257,"ListingDate":"/Date(1457852400000)/","SoldDate":null,"DaysOnMarket":44,"ListingId":681631116,"Status_UI":3,"Status":0,"Latitude":49.20930099487305,"Longitude":-122.81310272216797,"GeocodingLevelId":1,"MlsNumber":"R2047509","ListingDate_Grouped":"/Date(1457852400000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1458284400000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852973)/","ListAgentFirstName":"","ListAgentLastName":"Amrit Punni","ListAgentPhone":"604-765-3620","ListAgentEmail":"[email protected]","DayOfMarket":56,"StatDOM":44,"PendingDate":null,"ExpiredDate":"/Date(1471071600000)/","SortDate":"/Date(1457852400000)/"},{"Id":72,"ImageFile":"R2056663.jpg","Address":"14744 Wellington Drive","Bath":3,"Bed":4,"Sq":"2116","LotSize":"0.21","ListingPrice":699900,"SoldPrice":820000,"DisplayPrice":820000,"PricePerSq":388,"ListingDate":"/Date(1460444400000)/","SoldDate":"/Date(1460962800000)/","DaysOnMarket":6,"ListingId":681631013,"Status_UI":1,"Status":1,"Latitude":49.20901107788086,"Longitude":-122.81331634521484,"GeocodingLevelId":1,"MlsNumber":"R2056663","ListingDate_Grouped":"/Date(1460271600000)/","SoldDate_Grouped":"/Date(1460876400000)/","StatusDate":"/Date(1461222000000)/","PriceRatio":117.15959422774682,"WhenReceived":"/Date(1461685852237)/","ListAgentFirstName":"","ListAgentLastName":"Dale Redekop","ListAgentPhone":"604-315-4431","ListAgentEmail":"[email protected]","DayOfMarket":7,"StatDOM":6,"PendingDate":null,"ExpiredDate":"/Date(1469948400000)/","SortDate":"/Date(1460962800000)/"},{"Id":73,"ImageFile":"R2042285.jpg","Address":"11292 Roxburgh Road","Bath":3,"Bed":6,"Sq":"2343","LotSize":"0.18","ListingPrice":679900,"SoldPrice":655000,"DisplayPrice":655000,"PricePerSq":280,"ListingDate":"/Date(1457334000000)/","SoldDate":"/Date(1457938800000)/","DaysOnMarket":7,"ListingId":681630960,"Status_UI":1,"Status":1,"Latitude":49.20745086669922,"Longitude":-122.8137435913086,"GeocodingLevelId":1,"MlsNumber":"R2042285","ListingDate_Grouped":"/Date(1457247600000)/","SoldDate_Grouped":"/Date(1457852400000)/","StatusDate":"/Date(1458630000000)/","PriceRatio":96.3376967201059,"WhenReceived":"/Date(1461685851973)/","ListAgentFirstName":"","ListAgentLastName":"Sukh Sangha","ListAgentPhone":"604-512-3797","ListAgentEmail":"[email protected]","DayOfMarket":8,"StatDOM":7,"PendingDate":null,"ExpiredDate":"/Date(1475218800000)/","SortDate":"/Date(1457938800000)/"},{"Id":74,"ImageFile":"R2039364.jpg","Address":"15286 96a Avenue","Bath":2,"Bed":3,"Sq":"1570","LotSize":"0.17","ListingPrice":680000,"SoldPrice":750000,"DisplayPrice":750000,"PricePerSq":478,"ListingDate":"/Date(1456902000000)/","SoldDate":"/Date(1457334000000)/","DaysOnMarket":5,"ListingId":681631050,"Status_UI":1,"Status":1,"Latitude":49.17763137817383,"Longitude":-122.79804992675781,"GeocodingLevelId":1,"MlsNumber":"R2039364","ListingDate_Grouped":"/Date(1456642800000)/","SoldDate_Grouped":"/Date(1457247600000)/","StatusDate":"/Date(1457593200000)/","PriceRatio":110.29411764705883,"WhenReceived":"/Date(1461685852663)/","ListAgentFirstName":"","ListAgentLastName":"Donna-Leah Warren","ListAgentPhone":"604-992-2010","ListAgentEmail":"[email protected]","DayOfMarket":6,"StatDOM":5,"PendingDate":null,"ExpiredDate":"/Date(1472626800000)/","SortDate":"/Date(1457334000000)/"},{"Id":75,"ImageFile":"R2052649.jpg","Address":"15368 96 Avenue","Bath":4,"Bed":5,"Sq":"2336","LotSize":"0.16","ListingPrice":779000,"SoldPrice":0,"DisplayPrice":779000,"PricePerSq":333,"ListingDate":"/Date(1459753200000)/","SoldDate":null,"DaysOnMarket":22,"ListingId":681631129,"Status_UI":3,"Status":0,"Latitude":49.176719665527344,"Longitude":-122.7964096069336,"GeocodingLevelId":1,"MlsNumber":"R2052649","ListingDate_Grouped":"/Date(1459666800000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1459753200000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853027)/","ListAgentFirstName":"","ListAgentLastName":"Aman Ghuman","ListAgentPhone":"778-899-4500","ListAgentEmail":"[email protected]","DayOfMarket":34,"StatDOM":22,"PendingDate":null,"ExpiredDate":"/Date(1469948400000)/","SortDate":"/Date(1459753200000)/"},{"Id":76,"ImageFile":"R2035565.jpg","Address":"14712 110 Avenue","Bath":3,"Bed":6,"Sq":"2588","LotSize":"0.22","ListingPrice":638888,"SoldPrice":718000,"DisplayPrice":718000,"PricePerSq":277,"ListingDate":"/Date(1455346800000)/","SoldDate":"/Date(1456124400000)/","DaysOnMarket":9,"ListingId":681630989,"Status_UI":1,"Status":1,"Latitude":49.2022705078125,"Longitude":-122.81477355957031,"GeocodingLevelId":1,"MlsNumber":"R2035565","ListingDate_Grouped":"/Date(1454828400000)/","SoldDate_Grouped":"/Date(1456038000000)/","StatusDate":"/Date(1456470000000)/","PriceRatio":112.38276505428182,"WhenReceived":"/Date(1461685852150)/","ListAgentFirstName":"","ListAgentLastName":"Dwayne Engelsman","ListAgentPhone":"604-657-5591","ListAgentEmail":"[email protected]","DayOfMarket":10,"StatDOM":9,"PendingDate":null,"ExpiredDate":"/Date(1469948400000)/","SortDate":"/Date(1456124400000)/"},{"Id":77,"ImageFile":"R2034698.jpg","Address":"9625 152b Street","Bath":3,"Bed":3,"Sq":"1903","LotSize":"0.16","ListingPrice":619800,"SoldPrice":718000,"DisplayPrice":718000,"PricePerSq":377,"ListingDate":"/Date(1455519600000)/","SoldDate":"/Date(1456038000000)/","DaysOnMarket":6,"ListingId":681631064,"Status_UI":1,"Status":1,"Latitude":49.17719268798828,"Longitude":-122.79829406738281,"GeocodingLevelId":1,"MlsNumber":"R2034698","ListingDate_Grouped":"/Date(1455433200000)/","SoldDate_Grouped":"/Date(1456038000000)/","StatusDate":"/Date(1456210800000)/","PriceRatio":115.84382058728622,"WhenReceived":"/Date(1461685852750)/","ListAgentFirstName":"","ListAgentLastName":"Mike Marfori - PREC","ListAgentPhone":"","ListAgentEmail":"[email protected]","DayOfMarket":7,"StatDOM":6,"PendingDate":null,"ExpiredDate":"/Date(1463295600000)/","SortDate":"/Date(1456038000000)/"},{"Id":78,"ImageFile":"R2034562.jpg","Address":"11438 Surrey Road","Bath":4,"Bed":5,"Sq":"2344","LotSize":"0.21","ListingPrice":699999,"SoldPrice":0,"DisplayPrice":699999,"PricePerSq":299,"ListingDate":"/Date(1455087600000)/","SoldDate":null,"DaysOnMarket":26,"ListingId":681631154,"Status_UI":4,"Status":2,"Latitude":49.21031951904297,"Longitude":-122.81360626220703,"GeocodingLevelId":1,"MlsNumber":"R2034562","ListingDate_Grouped":"/Date(1454828400000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1457334000000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853113)/","ListAgentFirstName":"","ListAgentLastName":"Sukhdev Grewal","ListAgentPhone":"604-581-8400","ListAgentEmail":"[email protected]","DayOfMarket":27,"StatDOM":26,"PendingDate":null,"ExpiredDate":"/Date(1485846000000)/","SortDate":"/Date(1485846000000)/"},{"Id":79,"ImageFile":"R2002344.jpg","Address":"11438 Surrey Road","Bath":4,"Bed":5,"Sq":"2344","LotSize":"0.21","ListingPrice":629000,"SoldPrice":0,"DisplayPrice":629000,"PricePerSq":268,"ListingDate":"/Date(1442214000000)/","SoldDate":null,"DaysOnMarket":149,"ListingId":681631161,"Status_UI":4,"Status":2,"Latitude":49.21031951904297,"Longitude":-122.81360626220703,"GeocodingLevelId":1,"MlsNumber":"R2002344","ListingDate_Grouped":"/Date(1442127600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1455087600000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853137)/","ListAgentFirstName":"","ListAgentLastName":"Sukhdev Grewal","ListAgentPhone":"604-581-8400","ListAgentEmail":"[email protected]","DayOfMarket":150,"StatDOM":149,"PendingDate":null,"ExpiredDate":"/Date(1455087600000)/","SortDate":"/Date(1455087600000)/"},{"Id":80,"ImageFile":"R2021489.jpg","Address":"14704 109a Avenue","Bath":3,"Bed":5,"Sq":"2158","LotSize":"0.17","ListingPrice":629000,"SoldPrice":0,"DisplayPrice":629000,"PricePerSq":291,"ListingDate":"/Date(1450162800000)/","SoldDate":null,"DaysOnMarket":63,"ListingId":681631173,"Status_UI":4,"Status":2,"Latitude":49.2012939453125,"Longitude":-122.81502532958984,"GeocodingLevelId":1,"MlsNumber":"R2021489","ListingDate_Grouped":"/Date(1449990000000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1455606000000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853173)/","ListAgentFirstName":"","ListAgentLastName":"Randy Dhillon","ListAgentPhone":"604-315-2525","ListAgentEmail":"[email protected]","DayOfMarket":64,"StatDOM":63,"PendingDate":null,"ExpiredDate":"/Date(1455519600000)/","SortDate":"/Date(1455519600000)/"},{"Id":81,"ImageFile":"R2054853.jpg","Address":"9493 156th Street","Bath":2,"Bed":3,"Sq":"1050","LotSize":"0.16","ListingPrice":759000,"SoldPrice":0,"DisplayPrice":759000,"PricePerSq":723,"ListingDate":"/Date(1460012400000)/","SoldDate":null,"DaysOnMarket":19,"ListingId":681631125,"Status_UI":3,"Status":0,"Latitude":49.17504119873047,"Longitude":-122.79020690917969,"GeocodingLevelId":1,"MlsNumber":"R2054853","ListingDate_Grouped":"/Date(1459666800000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460098800000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853013)/","ListAgentFirstName":"","ListAgentLastName":"Sara Sharma","ListAgentPhone":"604-992-7253","ListAgentEmail":"[email protected]","DayOfMarket":31,"StatDOM":19,"PendingDate":null,"ExpiredDate":"/Date(1472626800000)/","SortDate":"/Date(1460012400000)/"},{"Id":82,"ImageFile":"R2059913.jpg","Address":"15392 95a Avenue","Bath":3,"Bed":3,"Sq":"2244","LotSize":"0.11","ListingPrice":750000,"SoldPrice":0,"DisplayPrice":750000,"PricePerSq":334,"ListingDate":"/Date(1461135600000)/","SoldDate":null,"DaysOnMarket":6,"ListingId":681631087,"Status_UI":2,"Status":0,"Latitude":49.17596435546875,"Longitude":-122.79553985595703,"GeocodingLevelId":1,"MlsNumber":"R2059913","ListingDate_Grouped":"/Date(1460876400000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1461135600000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852837)/","ListAgentFirstName":"","ListAgentLastName":"Michael Kwung PREC*","ListAgentPhone":"604-616-7203","ListAgentEmail":"[email protected]","DayOfMarket":18,"StatDOM":6,"PendingDate":null,"ExpiredDate":"/Date(1483167600000)/","SortDate":"/Date(1461135600000)/"},{"Id":83,"ImageFile":"R2016804.jpg","Address":"11523 Roxburgh Road","Bath":4,"Bed":6,"Sq":"2528","LotSize":"0.13","ListingPrice":729000,"SoldPrice":0,"DisplayPrice":729000,"PricePerSq":288,"ListingDate":"/Date(1447830000000)/","SoldDate":null,"DaysOnMarket":160,"ListingId":681631113,"Status_UI":3,"Status":0,"Latitude":49.21215057373047,"Longitude":-122.81331634521484,"GeocodingLevelId":1,"MlsNumber":"R2016804","ListingDate_Grouped":"/Date(1447570800000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1448262000000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852963)/","ListAgentFirstName":"","ListAgentLastName":"Maria Delos Santos","ListAgentPhone":"778-885-3207","ListAgentEmail":"[email protected]","DayOfMarket":172,"StatDOM":160,"PendingDate":null,"ExpiredDate":"/Date(1461999600000)/","SortDate":"/Date(1447830000000)/"},{"Id":84,"ImageFile":"R2055799.jpg","Address":"14709 111a Avenue","Bath":2,"Bed":4,"Sq":"2117","LotSize":"0.18","ListingPrice":649000,"SoldPrice":666000,"DisplayPrice":666000,"PricePerSq":315,"ListingDate":"/Date(1460098800000)/","SoldDate":"/Date(1460962800000)/","DaysOnMarket":10,"ListingId":681631026,"Status_UI":1,"Status":1,"Latitude":49.20531463623047,"Longitude":-122.8151626586914,"GeocodingLevelId":1,"MlsNumber":"R2055799","ListingDate_Grouped":"/Date(1459666800000)/","SoldDate_Grouped":"/Date(1460876400000)/","StatusDate":"/Date(1461308400000)/","PriceRatio":102.61941448382126,"WhenReceived":"/Date(1461685852477)/","ListAgentFirstName":"","ListAgentLastName":"Danny Steele","ListAgentPhone":"604-760-5400","ListAgentEmail":"[email protected]","DayOfMarket":11,"StatDOM":10,"PendingDate":null,"ExpiredDate":"/Date(1467961200000)/","SortDate":"/Date(1460962800000)/"},{"Id":85,"ImageFile":"R2061187.jpg","Address":"14678 109a Avenue","Bath":2,"Bed":5,"Sq":"2134","LotSize":"0.17","ListingPrice":675000,"SoldPrice":0,"DisplayPrice":675000,"PricePerSq":316,"ListingDate":"/Date(1461567600000)/","SoldDate":null,"DaysOnMarket":1,"ListingId":681631133,"Status_UI":2,"Status":0,"Latitude":49.2012939453125,"Longitude":-122.8155517578125,"GeocodingLevelId":1,"MlsNumber":"R2061187","ListingDate_Grouped":"/Date(1461481200000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1461567600000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853043)/","ListAgentFirstName":"","ListAgentLastName":"Di Pham","ListAgentPhone":"604-307-2475","ListAgentEmail":"[email protected]","DayOfMarket":13,"StatDOM":1,"PendingDate":null,"ExpiredDate":"/Date(1477983600000)/","SortDate":"/Date(1461567600000)/"},{"Id":86,"ImageFile":"R2042446.jpg","Address":"14700 107 Avenue","Bath":2,"Bed":4,"Sq":"2330","LotSize":"0.18","ListingPrice":699900,"SoldPrice":775000,"DisplayPrice":775000,"PricePerSq":333,"ListingDate":"/Date(1456988400000)/","SoldDate":"/Date(1457852400000)/","DaysOnMarket":10,"ListingId":681630966,"Status_UI":1,"Status":1,"Latitude":49.19676971435547,"Longitude":-122.8151626586914,"GeocodingLevelId":1,"MlsNumber":"R2042446","ListingDate_Grouped":"/Date(1456642800000)/","SoldDate_Grouped":"/Date(1457852400000)/","StatusDate":"/Date(1458025200000)/","PriceRatio":110.73010430061437,"WhenReceived":"/Date(1461685852013)/","ListAgentFirstName":"","ListAgentLastName":"Mayur Arora","ListAgentPhone":"604-725-1000","ListAgentEmail":"[email protected]","DayOfMarket":11,"StatDOM":10,"PendingDate":null,"ExpiredDate":"/Date(1467270000000)/","SortDate":"/Date(1457852400000)/"},{"Id":87,"ImageFile":"R2053430.jpg","Address":"15332 95a Avenue","Bath":3,"Bed":4,"Sq":"1811","LotSize":"0.16","ListingPrice":775000,"SoldPrice":845000,"DisplayPrice":845000,"PricePerSq":467,"ListingDate":"/Date(1459839600000)/","SoldDate":"/Date(1460358000000)/","DaysOnMarket":6,"ListingId":681630993,"Status_UI":1,"Status":1,"Latitude":49.17595672607422,"Longitude":-122.79745483398438,"GeocodingLevelId":1,"MlsNumber":"R2053430","ListingDate_Grouped":"/Date(1459666800000)/","SoldDate_Grouped":"/Date(1460271600000)/","StatusDate":"/Date(1460962800000)/","PriceRatio":109.03225806451613,"WhenReceived":"/Date(1461685852170)/","ListAgentFirstName":"","ListAgentLastName":"Minerva Pizarro","ListAgentPhone":"604-785-3167","ListAgentEmail":"[email protected]","DayOfMarket":7,"StatDOM":6,"PendingDate":null,"ExpiredDate":"/Date(1472540400000)/","SortDate":"/Date(1460358000000)/"},{"Id":88,"ImageFile":"R2043731.jpg","Address":"14718 Wellington Drive","Bath":2,"Bed":5,"Sq":"2752","LotSize":"0.41","ListingPrice":949000,"SoldPrice":945000,"DisplayPrice":945000,"PricePerSq":343,"ListingDate":"/Date(1457334000000)/","SoldDate":"/Date(1458716400000)/","DaysOnMarket":16,"ListingId":681631016,"Status_UI":1,"Status":1,"Latitude":49.209571838378906,"Longitude":-122.81478118896484,"GeocodingLevelId":1,"MlsNumber":"R2043731","ListingDate_Grouped":"/Date(1457247600000)/","SoldDate_Grouped":"/Date(1458457200000)/","StatusDate":"/Date(1459234800000)/","PriceRatio":99.57850368809272,"WhenReceived":"/Date(1461685852250)/","ListAgentFirstName":"","ListAgentLastName":"Rob Johnson","ListAgentPhone":"604-230-5050","ListAgentEmail":"[email protected]","DayOfMarket":17,"StatDOM":16,"PendingDate":null,"ExpiredDate":"/Date(1472626800000)/","SortDate":"/Date(1458716400000)/"},{"Id":89,"ImageFile":"R2046715.jpg","Address":"10191 148 Street","Bath":3,"Bed":4,"Sq":"1858","LotSize":"0.17","ListingPrice":699000,"SoldPrice":765000,"DisplayPrice":765000,"PricePerSq":412,"ListingDate":"/Date(1457938800000)/","SoldDate":"/Date(1459321200000)/","DaysOnMarket":16,"ListingId":681631010,"Status_UI":1,"Status":1,"Latitude":49.18777847290039,"Longitude":-122.81230163574219,"GeocodingLevelId":1,"MlsNumber":"R2046715","ListingDate_Grouped":"/Date(1457852400000)/","SoldDate_Grouped":"/Date(1459062000000)/","StatusDate":"/Date(1460012400000)/","PriceRatio":109.44206008583691,"WhenReceived":"/Date(1461685852227)/","ListAgentFirstName":"","ListAgentLastName":"Tony Tran","ListAgentPhone":"604-780-1555","ListAgentEmail":"[email protected]","DayOfMarket":17,"StatDOM":16,"PendingDate":null,"ExpiredDate":"/Date(1475218800000)/","SortDate":"/Date(1459321200000)/"},{"Id":90,"ImageFile":"R2059297.jpg","Address":"14655 108a Avenue","Bath":2,"Bed":4,"Sq":"2301","LotSize":"0.16","ListingPrice":619900,"SoldPrice":0,"DisplayPrice":619900,"PricePerSq":269,"ListingDate":"/Date(1460962800000)/","SoldDate":null,"DaysOnMarket":8,"ListingId":681631145,"Status_UI":2,"Status":0,"Latitude":49.19974136352539,"Longitude":-122.8160629272461,"GeocodingLevelId":1,"MlsNumber":"R2059297","ListingDate_Grouped":"/Date(1460876400000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1461135600000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853090)/","ListAgentFirstName":"","ListAgentLastName":"Stephan Zandbergen","ListAgentPhone":"604-970-6995","ListAgentEmail":"[email protected]","DayOfMarket":20,"StatDOM":8,"PendingDate":null,"ExpiredDate":"/Date(1466492400000)/","SortDate":"/Date(1460962800000)/"},{"Id":91,"ImageFile":"R2054422.jpg","Address":"14689 106 Avenue","Bath":2,"Bed":5,"Sq":"3009","LotSize":"0.17","ListingPrice":824000,"SoldPrice":903000,"DisplayPrice":903000,"PricePerSq":300,"ListingDate":"/Date(1459839600000)/","SoldDate":"/Date(1460358000000)/","DaysOnMarket":6,"ListingId":681630978,"Status_UI":1,"Status":1,"Latitude":49.195289611816406,"Longitude":-122.81543731689453,"GeocodingLevelId":1,"MlsNumber":"R2054422","ListingDate_Grouped":"/Date(1459666800000)/","SoldDate_Grouped":"/Date(1460271600000)/","StatusDate":"/Date(1460962800000)/","PriceRatio":109.5873786407767,"WhenReceived":"/Date(1461685852073)/","ListAgentFirstName":"","ListAgentLastName":"Michael Tanlimco","ListAgentPhone":"604-571-0909","ListAgentEmail":"[email protected]","DayOfMarket":7,"StatDOM":6,"PendingDate":null,"ExpiredDate":"/Date(1491375600000)/","SortDate":"/Date(1460358000000)/"},{"Id":92,"ImageFile":"R2031239.jpg","Address":"9558 152 Street","Bath":4,"Bed":7,"Sq":"2941","LotSize":"0.16","ListingPrice":824000,"SoldPrice":866500,"DisplayPrice":866500,"PricePerSq":295,"ListingDate":"/Date(1454310000000)/","SoldDate":"/Date(1458543600000)/","DaysOnMarket":49,"ListingId":681630959,"Status_UI":1,"Status":1,"Latitude":49.17633056640625,"Longitude":-122.8007583618164,"GeocodingLevelId":1,"MlsNumber":"R2031239","ListingDate_Grouped":"/Date(1454223600000)/","SoldDate_Grouped":"/Date(1458457200000)/","StatusDate":"/Date(1459494000000)/","PriceRatio":105.15776699029126,"WhenReceived":"/Date(1461685851970)/","ListAgentFirstName":"","ListAgentLastName":"Angela Pelayo","ListAgentPhone":"604-538-2125","ListAgentEmail":"[email protected]","DayOfMarket":50,"StatDOM":49,"PendingDate":null,"ExpiredDate":"/Date(1464678000000)/","SortDate":"/Date(1458543600000)/"},{"Id":93,"ImageFile":"R2045136.jpg","Address":"15566 94 Avenue","Bath":3,"Bed":5,"Sq":"2319","LotSize":"0.16","ListingPrice":868000,"SoldPrice":0,"DisplayPrice":868000,"PricePerSq":374,"ListingDate":"/Date(1457506800000)/","SoldDate":null,"DaysOnMarket":29,"ListingId":681631175,"Status_UI":4,"Status":2,"Latitude":49.17338943481445,"Longitude":-122.79105377197266,"GeocodingLevelId":1,"MlsNumber":"R2045136","ListingDate_Grouped":"/Date(1457247600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460012400000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853187)/","ListAgentFirstName":"","ListAgentLastName":"Christina Ashby","ListAgentPhone":"604-263-1144","ListAgentEmail":"[email protected]","DayOfMarket":30,"StatDOM":29,"PendingDate":null,"ExpiredDate":"/Date(1475218800000)/","SortDate":"/Date(1475218800000)/"},{"Id":94,"ImageFile":"R2047336.jpg","Address":"14664 112 Avenue","Bath":5,"Bed":6,"Sq":"3404","LotSize":"0.09","ListingPrice":1199990,"SoldPrice":0,"DisplayPrice":1199990,"PricePerSq":353,"ListingDate":"/Date(1458111600000)/","SoldDate":null,"DaysOnMarket":41,"ListingId":681631093,"Status_UI":3,"Status":0,"Latitude":49.20576095581055,"Longitude":-122.81664276123047,"GeocodingLevelId":1,"MlsNumber":"R2047336","ListingDate_Grouped":"/Date(1457852400000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1458284400000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852863)/","ListAgentFirstName":"","ListAgentLastName":"Sukhdev (Sukhi) Khera","ListAgentPhone":"604-218-1355","ListAgentEmail":"[email protected]","DayOfMarket":53,"StatDOM":41,"PendingDate":null,"ExpiredDate":"/Date(1469948400000)/","SortDate":"/Date(1458111600000)/"},{"Id":95,"ImageFile":"R2042381.jpg","Address":"15477 93a Avenue","Bath":3,"Bed":3,"Sq":"2175","LotSize":"0.16","ListingPrice":769999,"SoldPrice":835000,"DisplayPrice":835000,"PricePerSq":384,"ListingDate":"/Date(1457334000000)/","SoldDate":"/Date(1457852400000)/","DaysOnMarket":6,"ListingId":681631019,"Status_UI":1,"Status":1,"Latitude":49.173274993896484,"Longitude":-122.79364776611328,"GeocodingLevelId":1,"MlsNumber":"R2042381","ListingDate_Grouped":"/Date(1457247600000)/","SoldDate_Grouped":"/Date(1457852400000)/","StatusDate":"/Date(1459494000000)/","PriceRatio":108.44169927493412,"WhenReceived":"/Date(1461685852263)/","ListAgentFirstName":"","ListAgentLastName":"Shafiq Kazemi","ListAgentPhone":"604-961-0543","ListAgentEmail":"[email protected]","DayOfMarket":7,"StatDOM":6,"PendingDate":null,"ExpiredDate":"/Date(1472626800000)/","SortDate":"/Date(1457852400000)/"},{"Id":96,"ImageFile":"R2060477.jpg","Address":"14621 106a Avenue","Bath":4,"Bed":5,"Sq":"2576","LotSize":"0.18","ListingPrice":799000,"SoldPrice":0,"DisplayPrice":799000,"PricePerSq":310,"ListingDate":"/Date(1461135600000)/","SoldDate":null,"DaysOnMarket":6,"ListingId":681631122,"Status_UI":2,"Status":0,"Latitude":49.19620132446289,"Longitude":-122.81687927246094,"GeocodingLevelId":1,"MlsNumber":"R2060477","ListingDate_Grouped":"/Date(1460876400000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1461222000000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853000)/","ListAgentFirstName":"","ListAgentLastName":"Maura Boguski","ListAgentPhone":"604-415-9800","ListAgentEmail":"[email protected]","DayOfMarket":18,"StatDOM":6,"PendingDate":null,"ExpiredDate":"/Date(1483167600000)/","SortDate":"/Date(1461135600000)/"},{"Id":97,"ImageFile":"R2032631.jpg","Address":"14680 St. Andrews Drive","Bath":3,"Bed":5,"Sq":"2728","LotSize":"0.16","ListingPrice":769000,"SoldPrice":0,"DisplayPrice":769000,"PricePerSq":282,"ListingDate":"/Date(1454569200000)/","SoldDate":null,"DaysOnMarket":18,"ListingId":681631156,"Status_UI":4,"Status":2,"Latitude":49.21165084838867,"Longitude":-122.81580352783203,"GeocodingLevelId":1,"MlsNumber":"R2032631","ListingDate_Grouped":"/Date(1454223600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1461049200000)/","PriceRatio":100,"WhenReceived":"/Date(1461685853120)/","ListAgentFirstName":"","ListAgentLastName":"Geordie Moski","ListAgentPhone":"604-728-8017","ListAgentEmail":"[email protected]","DayOfMarket":76,"StatDOM":18,"PendingDate":null,"ExpiredDate":"/Date(1472626800000)/","SortDate":"/Date(1472626800000)/"},{"Id":98,"ImageFile":"R2032091.jpg","Address":"9872 149 Street","Bath":3,"Bed":4,"Sq":"2071","LotSize":"0.15","ListingPrice":689879,"SoldPrice":700500,"DisplayPrice":700500,"PricePerSq":338,"ListingDate":"/Date(1454482800000)/","SoldDate":"/Date(1456470000000)/","DaysOnMarket":23,"ListingId":681630963,"Status_UI":1,"Status":1,"Latitude":49.18183135986328,"Longitude":-122.8095703125,"GeocodingLevelId":1,"MlsNumber":"R2032091","ListingDate_Grouped":"/Date(1454223600000)/","SoldDate_Grouped":"/Date(1456038000000)/","StatusDate":"/Date(1457506800000)/","PriceRatio":101.53954534055973,"WhenReceived":"/Date(1461685851987)/","ListAgentFirstName":"","ListAgentLastName":"Tony Bal - PREC","ListAgentPhone":"604-716-4781","ListAgentEmail":"[email protected]","DayOfMarket":24,"StatDOM":23,"PendingDate":null,"ExpiredDate":"/Date(1470207600000)/","SortDate":"/Date(1456470000000)/"},{"Id":99,"ImageFile":"R2057151.jpg","Address":"15120 96 Avenue","Bath":2,"Bed":5,"Sq":"1918","LotSize":"0.16","ListingPrice":798000,"SoldPrice":0,"DisplayPrice":798000,"PricePerSq":416,"ListingDate":"/Date(1460358000000)/","SoldDate":null,"DaysOnMarket":15,"ListingId":681631114,"Status_UI":3,"Status":0,"Latitude":49.17668151855469,"Longitude":-122.8031997680664,"GeocodingLevelId":1,"MlsNumber":"R2057151","ListingDate_Grouped":"/Date(1460271600000)/","SoldDate_Grouped":null,"StatusDate":"/Date(1460530800000)/","PriceRatio":100,"WhenReceived":"/Date(1461685852967)/","ListAgentFirstName":"","ListAgentLastName":"Benson Lee PREC*","ListAgentPhone":"604-354-8266","ListAgentEmail":"[email protected]","DayOfMarket":27,"StatDOM":15,"PendingDate":null,"ExpiredDate":"/Date(1468566000000)/","SortDate":"/Date(1460358000000)/"},{"Id":100,"ImageFile":"R2052832.jpg","Address":"15518 93rd Avenue","Bath":3,"Bed":3,"Sq":"2160","LotSize":"0.16","ListingPrice":879000,"SoldPrice":885000,"DisplayPrice":885000,"PricePerSq":410,"ListingDate":"/Date(1459926000000)/","SoldDate":"/Date(1460358000000)/","DaysOnMarket":5,"ListingId":681631021,"Status_UI":1,"Status":1,"Latitude":49.172481536865234,"Longitude":-122.79254150390625,"GeocodingLevelId":1,"MlsNumber":"R2052832","ListingDate_Grouped":"/Date(1459666800000)/","SoldDate_Grouped":"/Date(1460271600000)/","StatusDate":"/Date(1460703600000)/","PriceRatio":100.68259385665529,"WhenReceived":"/Date(1461685852270)/","ListAgentFirstName":"","ListAgentLastName":"Drew Steeves","ListAgentPhone":"604-575-5262","ListAgentEmail":"[email protected]","DayOfMarket":6,"StatDOM":5,"PendingDate":null,"ExpiredDate":"/Date(1475737200000)/","SortDate":"/Date(1460358000000)/"}],"LStatusNew":15,"LStatusForSale":19,"LStatusSold":53,"LStatusPending":0,"LStatusExpired":13,"LStatusAll":100,"ShowSoldListings":true,"ShowPendingListings":true,"ShowExpiredListings":true,"AverageDaysOnMarket":27,"SellingVsAskingPrice":5,"MedianSoldPrice":770000,"NumberOfHomes":182,"AverageDaysOnMarketChart":{"Sold_AvgDaysOnMarket":13,"Sold_Total":113,"Sold_MostRecent":"/Date(1461049200000)/","Sold_Shortest":1,"Sold_Longest":102,"ForSale_AvgDaysOnMarket":33,"ForSale_Total":79,"ForSale_MostRecent":"/Date(1461567600000)/","ForSale_Shortest":1,"ForSale_Longest":315,"Series_HighDays":[{"xValue":"/Date(1453618800000)/","yValue":282},{"xValue":"/Date(1454223600000)/","yValue":284},{"xValue":"/Date(1454828400000)/","yValue":261},{"xValue":"/Date(1455433200000)/","yValue":268},{"xValue":"/Date(1456038000000)/","yValue":275},{"xValue":"/Date(1456642800000)/","yValue":282},{"xValue":"/Date(1457247600000)/","yValue":289},{"xValue":"/Date(1457852400000)/","yValue":296},{"xValue":"/Date(1458457200000)/","yValue":303},{"xValue":"/Date(1459062000000)/","yValue":310},{"xValue":"/Date(1459666800000)/","yValue":317},{"xValue":"/Date(1460271600000)/","yValue":324},{"xValue":"/Date(1460876400000)/","yValue":330},{"xValue":"/Date(1461481200000)/","yValue":326},{"xValue":"/Date(1462086000000)/","yValue":333}],"Series_LowDays":[{"xValue":"/Date(1453618800000)/","yValue":1},{"xValue":"/Date(1454223600000)/","yValue":1},{"xValue":"/Date(1454828400000)/","yValue":1},{"xValue":"/Date(1455433200000)/","yValue":3},{"xValue":"/Date(1456038000000)/","yValue":1},{"xValue":"/Date(1456642800000)/","yValue":2},{"xValue":"/Date(1457247600000)/","yValue":1},{"xValue":"/Date(1457852400000)/","yValue":2},{"xValue":"/Date(1458457200000)/","yValue":2},{"xValue":"/Date(1459062000000)/","yValue":1},{"xValue":"/Date(1459666800000)/","yValue":1},{"xValue":"/Date(1460271600000)/","yValue":1},{"xValue":"/Date(1460876400000)/","yValue":1},{"xValue":"/Date(1461481200000)/","yValue":6},{"xValue":"/Date(1462086000000)/","yValue":13}],"Series_AverageDays":[{"xValue":"/Date(1453618800000)/","yValue":56},{"xValue":"/Date(1454223600000)/","yValue":46},{"xValue":"/Date(1454828400000)/","yValue":38},{"xValue":"/Date(1455433200000)/","yValue":33},{"xValue":"/Date(1456038000000)/","yValue":37},{"xValue":"/Date(1456642800000)/","yValue":41},{"xValue":"/Date(1457247600000)/","yValue":37},{"xValue":"/Date(1457852400000)/","yValue":38},{"xValue":"/Date(1458457200000)/","yValue":39},{"xValue":"/Date(1459062000000)/","yValue":38},{"xValue":"/Date(1459666800000)/","yValue":37},{"xValue":"/Date(1460271600000)/","yValue":35},{"xValue":"/Date(1460876400000)/","yValue":36},{"xValue":"/Date(1461481200000)/","yValue":38},{"xValue":"/Date(1462086000000)/","yValue":45}],"Series_RecentSales":[{"xValue":"/Date(1453618800000)/","yValue":78},{"xValue":"/Date(1453618800000)/","yValue":8},{"xValue":"/Date(1453618800000)/","yValue":16},{"xValue":"/Date(1453618800000)/","yValue":3},{"xValue":"/Date(1453618800000)/","yValue":22},{"xValue":"/Date(1453618800000)/","yValue":6},{"xValue":"/Date(1453618800000)/","yValue":15},{"xValue":"/Date(1453618800000)/","yValue":17},{"xValue":"/Date(1453618800000)/","yValue":0},{"xValue":"/Date(1454223600000)/","yValue":0},{"xValue":"/Date(1454223600000)/","yValue":6},{"xValue":"/Date(1454223600000)/","yValue":7},{"xValue":"/Date(1454223600000)/","yValue":86},{"xValue":"/Date(1454223600000)/","yValue":24},{"xValue":"/Date(1454223600000)/","yValue":2},{"xValue":"/Date(1454223600000)/","yValue":7},{"xValue":"/Date(1454828400000)/","yValue":8},{"xValue":"/Date(1454828400000)/","yValue":5},{"xValue":"/Date(1454828400000)/","yValue":10},{"xValue":"/Date(1454828400000)/","yValue":8},{"xValue":"/Date(1454828400000)/","yValue":1},{"xValue":"/Date(1454828400000)/","yValue":7},{"xValue":"/Date(1455433200000)/","yValue":5},{"xValue":"/Date(1455433200000)/","yValue":3},{"xValue":"/Date(1455433200000)/","yValue":5},{"xValue":"/Date(1455433200000)/","yValue":6},{"xValue":"/Date(1455433200000)/","yValue":14},{"xValue":"/Date(1455433200000)/","yValue":2},{"xValue":"/Date(1455433200000)/","yValue":13},{"xValue":"/Date(1455433200000)/","yValue":5},{"xValue":"/Date(1455433200000)/","yValue":24},{"xValue":"/Date(1455433200000)/","yValue":5},{"xValue":"/Date(1455433200000)/","yValue":5},{"xValue":"/Date(1455433200000)/","yValue":3},{"xValue":"/Date(1456038000000)/","yValue":44},{"xValue":"/Date(1456038000000)/","yValue":15},{"xValue":"/Date(1456038000000)/","yValue":6},{"xValue":"/Date(1456038000000)/","yValue":30},{"xValue":"/Date(1456038000000)/","yValue":5},{"xValue":"/Date(1456038000000)/","yValue":0},{"xValue":"/Date(1456038000000)/","yValue":23},{"xValue":"/Date(1456038000000)/","yValue":6},{"xValue":"/Date(1456038000000)/","yValue":9},{"xValue":"/Date(1456038000000)/","yValue":13},{"xValue":"/Date(1456038000000)/","yValue":6},{"xValue":"/Date(1456038000000)/","yValue":8},{"xValue":"/Date(1456038000000)/","yValue":4},{"xValue":"/Date(1456642800000)/","yValue":13},{"xValue":"/Date(1456642800000)/","yValue":6},{"xValue":"/Date(1456642800000)/","yValue":74},{"xValue":"/Date(1456642800000)/","yValue":23},{"xValue":"/Date(1456642800000)/","yValue":102},{"xValue":"/Date(1456642800000)/","yValue":8},{"xValue":"/Date(1456642800000)/","yValue":7},{"xValue":"/Date(1456642800000)/","yValue":14},{"xValue":"/Date(1456642800000)/","yValue":3},{"xValue":"/Date(1456642800000)/","yValue":6},{"xValue":"/Date(1456642800000)/","yValue":1},{"xValue":"/Date(1457247600000)/","yValue":4},{"xValue":"/Date(1457247600000)/","yValue":6},{"xValue":"/Date(1457247600000)/","yValue":5},{"xValue":"/Date(1457247600000)/","yValue":9},{"xValue":"/Date(1457247600000)/","yValue":31},{"xValue":"/Date(1457852400000)/","yValue":5},{"xValue":"/Date(1457852400000)/","yValue":12},{"xValue":"/Date(1457852400000)/","yValue":10},{"xValue":"/Date(1457852400000)/","yValue":14},{"xValue":"/Date(1457852400000)/","yValue":6},{"xValue":"/Date(1457852400000)/","yValue":13},{"xValue":"/Date(1457852400000)/","yValue":10},{"xValue":"/Date(1457852400000)/","yValue":5},{"xValue":"/Date(1457852400000)/","yValue":5},{"xValue":"/Date(1457852400000)/","yValue":5},{"xValue":"/Date(1457852400000)/","yValue":41},{"xValue":"/Date(1457852400000)/","yValue":2},{"xValue":"/Date(1457852400000)/","yValue":7},{"xValue":"/Date(1458457200000)/","yValue":16},{"xValue":"/Date(1458457200000)/","yValue":73},{"xValue":"/Date(1458457200000)/","yValue":49},{"xValue":"/Date(1458457200000)/","yValue":6},{"xValue":"/Date(1458457200000)/","yValue":7},{"xValue":"/Date(1458457200000)/","yValue":7},{"xValue":"/Date(1458457200000)/","yValue":19},{"xValue":"/Date(1458457200000)/","yValue":15},{"xValue":"/Date(1458457200000)/","yValue":1},{"xValue":"/Date(1458457200000)/","yValue":1},{"xValue":"/Date(1459062000000)/","yValue":20},{"xValue":"/Date(1459062000000)/","yValue":7},{"xValue":"/Date(1459062000000)/","yValue":21},{"xValue":"/Date(1459062000000)/","yValue":6},{"xValue":"/Date(1459062000000)/","yValue":7},{"xValue":"/Date(1459062000000)/","yValue":16},{"xValue":"/Date(1459062000000)/","yValue":2},{"xValue":"/Date(1459062000000)/","yValue":7},{"xValue":"/Date(1459062000000)/","yValue":5},{"xValue":"/Date(1459666800000)/","yValue":6},{"xValue":"/Date(1459666800000)/","yValue":8},{"xValue":"/Date(1459666800000)/","yValue":18},{"xValue":"/Date(1459666800000)/","yValue":9},{"xValue":"/Date(1459666800000)/","yValue":12},{"xValue":"/Date(1459666800000)/","yValue":16},{"xValue":"/Date(1459666800000)/","yValue":6},{"xValue":"/Date(1459666800000)/","yValue":6},{"xValue":"/Date(1460271600000)/","yValue":12},{"xValue":"/Date(1460271600000)/","yValue":5},{"xValue":"/Date(1460271600000)/","yValue":6},{"xValue":"/Date(1460271600000)/","yValue":12},{"xValue":"/Date(1460271600000)/","yValue":6},{"xValue":"/Date(1460271600000)/","yValue":8},{"xValue":"/Date(1460271600000)/","yValue":71},{"xValue":"/Date(1460271600000)/","yValue":3},{"xValue":"/Date(1460271600000)/","yValue":6},{"xValue":"/Date(1460876400000)/","yValue":2},{"xValue":"/Date(1460876400000)/","yValue":6},{"xValue":"/Date(1460876400000)/","yValue":16},{"xValue":"/Date(1460876400000)/","yValue":10}],"DynamicText":"\r\n Some of the 2+ bedroom, 2+ bath Single-family homes in this area have been on the market 315 days while the average time is 33 days.\r\n \r\n Homes that sold in the last 3 months have taken up to 102 days while the average time is 13 days. The minimum is 1 days.\r\n "},"SellingVsAskingPriceChart":{"PercentAskingPriceRecieved":105,"SalesUnderAsking":26,"SalesOverAsking":87,"SalesAtAsking":3,"Series_AskingPrice":[{"xValue":"/Date(1453618800000)/","yValue":835522.2222222222},{"xValue":"/Date(1454223600000)/","yValue":796100},{"xValue":"/Date(1454828400000)/","yValue":668463.3333333334},{"xValue":"/Date(1455433200000)/","yValue":764222.8333333334},{"xValue":"/Date(1456038000000)/","yValue":779689.7692307692},{"xValue":"/Date(1456642800000)/","yValue":778687.1818181818},{"xValue":"/Date(1457247600000)/","yValue":721960},{"xValue":"/Date(1457852400000)/","yValue":777984.5384615385},{"xValue":"/Date(1458457200000)/","yValue":725236.8},{"xValue":"/Date(1459062000000)/","yValue":727322},{"xValue":"/Date(1459666800000)/","yValue":806062.5},{"xValue":"/Date(1460271600000)/","yValue":810420},{"xValue":"/Date(1460876400000)/","yValue":709197},{"xValue":"/Date(1461481200000)/","yValue":0},{"xValue":"/Date(1462086000000)/","yValue":0}],"Series_SellingPrice":[{"xValue":"/Date(1453618800000)/","yValue":841444.4444444445},{"xValue":"/Date(1454223600000)/","yValue":819714.2857142857},{"xValue":"/Date(1454828400000)/","yValue":735958.3333333334},{"xValue":"/Date(1455433200000)/","yValue":791157.3333333334},{"xValue":"/Date(1456038000000)/","yValue":805853.8461538461},{"xValue":"/Date(1456642800000)/","yValue":800289.7272727273},{"xValue":"/Date(1457247600000)/","yValue":758000},{"xValue":"/Date(1457852400000)/","yValue":815438.4615384615},{"xValue":"/Date(1458457200000)/","yValue":764700},{"xValue":"/Date(1459062000000)/","yValue":772575.2222222222},{"xValue":"/Date(1459666800000)/","yValue":860125},{"xValue":"/Date(1460271600000)/","yValue":860590.3333333334},{"xValue":"/Date(1460876400000)/","yValue":774500},{"xValue":"/Date(1461481200000)/","yValue":0},{"xValue":"/Date(1462086000000)/","yValue":0}],"DynamicText":"\r\n Of the 113 2+ bedroom, 2+ bath, Single-family homes in this area that sold in the last 3 months, 87 got over their listing price, 26 got less than the listing price, and 3 sold at their listing price.\r\n "},"AskingAndSoldPriceChart":{"Sold_Total":116,"Sold_MostRecent":"/Date(1461222000000)/","Sold_Highest":1548000,"Sold_Lowest":489900,"Sold_Average":803579,"Sold_Median":770000,"ForSale_Total":79,"ForSale_MostRecent":"/Date(1461567600000)/","ForSale_Highest":2988000,"ForSale_Lowest":89000,"ForSale_Average":919244,"ForSale_Median":799880,"Series_MaxPrice":[{"xValue":"/Date(1453618800000)/","yValue":1568000},{"xValue":"/Date(1454223600000)/","yValue":1568000},{"xValue":"/Date(1454828400000)/","yValue":1569000},{"xValue":"/Date(1455433200000)/","yValue":1569000},{"xValue":"/Date(1456038000000)/","yValue":1569000},{"xValue":"/Date(1456642800000)/","yValue":1569000},{"xValue":"/Date(1457247600000)/","yValue":1569000},{"xValue":"/Date(1457852400000)/","yValue":1569000},{"xValue":"/Date(1458457200000)/","yValue":1569000},{"xValue":"/Date(1459062000000)/","yValue":1569000},{"xValue":"/Date(1459666800000)/","yValue":1699900},{"xValue":"/Date(1460271600000)/","yValue":2988000},{"xValue":"/Date(1460876400000)/","yValue":2988000},{"xValue":"/Date(1461481200000)/","yValue":2988000},{"xValue":"/Date(1462086000000)/","yValue":2988000}],"Series_MinPrice":[{"xValue":"/Date(1453618800000)/","yValue":499000},{"xValue":"/Date(1454223600000)/","yValue":475000},{"xValue":"/Date(1454828400000)/","yValue":475000},{"xValue":"/Date(1455433200000)/","yValue":499000},{"xValue":"/Date(1456038000000)/","yValue":469000},{"xValue":"/Date(1456642800000)/","yValue":469000},{"xValue":"/Date(1457247600000)/","yValue":469000},{"xValue":"/Date(1457852400000)/","yValue":469000},{"xValue":"/Date(1458457200000)/","yValue":469000},{"xValue":"/Date(1459062000000)/","yValue":469000},{"xValue":"/Date(1459666800000)/","yValue":499000},{"xValue":"/Date(1460271600000)/","yValue":499000},{"xValue":"/Date(1460876400000)/","yValue":89000},{"xValue":"/Date(1461481200000)/","yValue":89000},{"xValue":"/Date(1462086000000)/","yValue":89000}],"Series_AveragePrice":[{"xValue":"/Date(1453618800000)/","yValue":801117.7297297297},{"xValue":"/Date(1454223600000)/","yValue":803363.5227272727},{"xValue":"/Date(1454828400000)/","yValue":804414.5306122449},{"xValue":"/Date(1455433200000)/","yValue":830121.425925926},{"xValue":"/Date(1456038000000)/","yValue":838656.7551020408},{"xValue":"/Date(1456642800000)/","yValue":822269.4583333334},{"xValue":"/Date(1457247600000)/","yValue":837777.7894736842},{"xValue":"/Date(1457852400000)/","yValue":845663.5666666667},{"xValue":"/Date(1458457200000)/","yValue":842307.9090909091},{"xValue":"/Date(1459062000000)/","yValue":883263.3134328359},{"xValue":"/Date(1459666800000)/","yValue":928024.3835616439},{"xValue":"/Date(1460271600000)/","yValue":932670.1724137932},{"xValue":"/Date(1460876400000)/","yValue":916107.7093023256},{"xValue":"/Date(1461481200000)/","yValue":919243.9873417722},{"xValue":"/Date(1462086000000)/","yValue":919243.9873417722}],"Series_RecentSales":[{"xValue":"/Date(1453618800000)/","yValue":1140000},{"xValue":"/Date(1453618800000)/","yValue":1160000},{"xValue":"/Date(1453618800000)/","yValue":1080000},{"xValue":"/Date(1453618800000)/","yValue":965000},{"xValue":"/Date(1453618800000)/","yValue":856000},{"xValue":"/Date(1453618800000)/","yValue":640000},{"xValue":"/Date(1453618800000)/","yValue":610000},{"xValue":"/Date(1453618800000)/","yValue":610000},{"xValue":"/Date(1453618800000)/","yValue":512000},{"xValue":"/Date(1454223600000)/","yValue":998000},{"xValue":"/Date(1454223600000)/","yValue":990000},{"xValue":"/Date(1454223600000)/","yValue":875000},{"xValue":"/Date(1454223600000)/","yValue":865000},{"xValue":"/Date(1454223600000)/","yValue":835000},{"xValue":"/Date(1454223600000)/","yValue":595000},{"xValue":"/Date(1454223600000)/","yValue":580000},{"xValue":"/Date(1454828400000)/","yValue":1010000},{"xValue":"/Date(1454828400000)/","yValue":801250},{"xValue":"/Date(1454828400000)/","yValue":730000},{"xValue":"/Date(1454828400000)/","yValue":726500},{"xValue":"/Date(1454828400000)/","yValue":588000},{"xValue":"/Date(1454828400000)/","yValue":560000},{"xValue":"/Date(1455433200000)/","yValue":1220000},{"xValue":"/Date(1455433200000)/","yValue":930000},{"xValue":"/Date(1455433200000)/","yValue":970000},{"xValue":"/Date(1455433200000)/","yValue":841000},{"xValue":"/Date(1455433200000)/","yValue":772000},{"xValue":"/Date(1455433200000)/","yValue":722000},{"xValue":"/Date(1455433200000)/","yValue":720000},{"xValue":"/Date(1455433200000)/","yValue":752000},{"xValue":"/Date(1455433200000)/","yValue":711000},{"xValue":"/Date(1455433200000)/","yValue":661000},{"xValue":"/Date(1455433200000)/","yValue":678888},{"xValue":"/Date(1455433200000)/","yValue":516000},{"xValue":"/Date(1456038000000)/","yValue":1548000},{"xValue":"/Date(1456038000000)/","yValue":1090000},{"xValue":"/Date(1456038000000)/","yValue":915000},{"xValue":"/Date(1456038000000)/","yValue":735000},{"xValue":"/Date(1456038000000)/","yValue":811000},{"xValue":"/Date(1456038000000)/","yValue":730000},{"xValue":"/Date(1456038000000)/","yValue":700500},{"xValue":"/Date(1456038000000)/","yValue":725000},{"xValue":"/Date(1456038000000)/","yValue":718000},{"xValue":"/Date(1456038000000)/","yValue":620100},{"xValue":"/Date(1456038000000)/","yValue":718000},{"xValue":"/Date(1456038000000)/","yValue":615500},{"xValue":"/Date(1456038000000)/","yValue":550000},{"xValue":"/Date(1456642800000)/","yValue":1250000},{"xValue":"/Date(1456642800000)/","yValue":1020000},{"xValue":"/Date(1456642800000)/","yValue":855000},{"xValue":"/Date(1456642800000)/","yValue":742500},{"xValue":"/Date(1456642800000)/","yValue":738000},{"xValue":"/Date(1456642800000)/","yValue":797500},{"xValue":"/Date(1456642800000)/","yValue":729999},{"xValue":"/Date(1456642800000)/","yValue":785000},{"xValue":"/Date(1456642800000)/","yValue":750000},{"xValue":"/Date(1456642800000)/","yValue":645288},{"xValue":"/Date(1456642800000)/","yValue":489900},{"xValue":"/Date(1457247600000)/","yValue":930000},{"xValue":"/Date(1457247600000)/","yValue":710000},{"xValue":"/Date(1457247600000)/","yValue":750000},{"xValue":"/Date(1457247600000)/","yValue":670000},{"xValue":"/Date(1457247600000)/","yValue":730000},{"xValue":"/Date(1457852400000)/","yValue":1130000},{"xValue":"/Date(1457852400000)/","yValue":1011000},{"xValue":"/Date(1457852400000)/","yValue":888000},{"xValue":"/Date(1457852400000)/","yValue":815000},{"xValue":"/Date(1457852400000)/","yValue":835000},{"xValue":"/Date(1457852400000)/","yValue":725000},{"xValue":"/Date(1457852400000)/","yValue":775000},{"xValue":"/Date(1457852400000)/","yValue":786000},{"xValue":"/Date(1457852400000)/","yValue":836700},{"xValue":"/Date(1457852400000)/","yValue":770000},{"xValue":"/Date(1457852400000)/","yValue":675000},{"xValue":"/Date(1457852400000)/","yValue":699000},{"xValue":"/Date(1457852400000)/","yValue":655000},{"xValue":"/Date(1458457200000)/","yValue":945000},{"xValue":"/Date(1458457200000)/","yValue":859000},{"xValue":"/Date(1458457200000)/","yValue":866500},{"xValue":"/Date(1458457200000)/","yValue":778000},{"xValue":"/Date(1458457200000)/","yValue":702000},{"xValue":"/Date(1458457200000)/","yValue":825000},{"xValue":"/Date(1458457200000)/","yValue":640000},{"xValue":"/Date(1458457200000)/","yValue":716500},{"xValue":"/Date(1458457200000)/","yValue":750000},{"xValue":"/Date(1458457200000)/","yValue":565000},{"xValue":"/Date(1459062000000)/","yValue":852000},{"xValue":"/Date(1459062000000)/","yValue":888000},{"xValue":"/Date(1459062000000)/","yValue":700000},{"xValue":"/Date(1459062000000)/","yValue":770000},{"xValue":"/Date(1459062000000)/","yValue":751000},{"xValue":"/Date(1459062000000)/","yValue":765000},{"xValue":"/Date(1459062000000)/","yValue":770000},{"xValue":"/Date(1459062000000)/","yValue":675000},{"xValue":"/Date(1459062000000)/","yValue":782177},{"xValue":"/Date(1459666800000)/","yValue":1150000},{"xValue":"/Date(1459666800000)/","yValue":1275000},{"xValue":"/Date(1459666800000)/","yValue":885000},{"xValue":"/Date(1459666800000)/","yValue":805000},{"xValue":"/Date(1459666800000)/","yValue":703000},{"xValue":"/Date(1459666800000)/","yValue":670000},{"xValue":"/Date(1459666800000)/","yValue":705000},{"xValue":"/Date(1459666800000)/","yValue":688000},{"xValue":"/Date(1460271600000)/","yValue":1150000},{"xValue":"/Date(1460271600000)/","yValue":885000},{"xValue":"/Date(1460271600000)/","yValue":903000},{"xValue":"/Date(1460271600000)/","yValue":910000},{"xValue":"/Date(1460271600000)/","yValue":845000},{"xValue":"/Date(1460271600000)/","yValue":760000},{"xValue":"/Date(1460271600000)/","yValue":805000},{"xValue":"/Date(1460271600000)/","yValue":807313},{"xValue":"/Date(1460271600000)/","yValue":680000},{"xValue":"/Date(1460876400000)/","yValue":860000},{"xValue":"/Date(1460876400000)/","yValue":820000},{"xValue":"/Date(1460876400000)/","yValue":752000},{"xValue":"/Date(1460876400000)/","yValue":666000}],"DynamicText":"\r\n The 2+ bedroom, 2+ bath, Single-family homes in this area currently on the market in the last 3 months range in price from 89,000 to 2,988,000, with a median price of 799,880.\r\n \r\n To compare, houses are selling in a range of 489,900 to 1,548,000 with a median selling price of 770,000.\r\n "},"NumberOfHomeForSaleChart":{"NumberOfSales":116,"NumberOfNewListings":197,"AvgHomeForSale":49,"Series_New":[{"xValue":"/Date(1453618800000)/","yValue":9},{"xValue":"/Date(1454223600000)/","yValue":18},{"xValue":"/Date(1454828400000)/","yValue":14},{"xValue":"/Date(1455433200000)/","yValue":13},{"xValue":"/Date(1456038000000)/","yValue":10},{"xValue":"/Date(1456642800000)/","yValue":12},{"xValue":"/Date(1457247600000)/","yValue":19},{"xValue":"/Date(1457852400000)/","yValue":13},{"xValue":"/Date(1458457200000)/","yValue":17},{"xValue":"/Date(1459062000000)/","yValue":16},{"xValue":"/Date(1459666800000)/","yValue":18},{"xValue":"/Date(1460271600000)/","yValue":23},{"xValue":"/Date(1460876400000)/","yValue":13},{"xValue":"/Date(1461481200000)/","yValue":2},{"xValue":"/Date(1462086000000)/","yValue":0}],"Series_ForSale":[{"xValue":"/Date(1453618800000)/","yValue":28},{"xValue":"/Date(1454223600000)/","yValue":27},{"xValue":"/Date(1454828400000)/","yValue":36},{"xValue":"/Date(1455433200000)/","yValue":43},{"xValue":"/Date(1456038000000)/","yValue":42},{"xValue":"/Date(1456642800000)/","yValue":39},{"xValue":"/Date(1457247600000)/","yValue":37},{"xValue":"/Date(1457852400000)/","yValue":49},{"xValue":"/Date(1458457200000)/","yValue":48},{"xValue":"/Date(1459062000000)/","yValue":52},{"xValue":"/Date(1459666800000)/","yValue":55},{"xValue":"/Date(1460271600000)/","yValue":60},{"xValue":"/Date(1460876400000)/","yValue":68},{"xValue":"/Date(1461481200000)/","yValue":77},{"xValue":"/Date(1462086000000)/","yValue":79}],"Series_Sold":[{"xValue":"/Date(1453618800000)/","yValue":9},{"xValue":"/Date(1454223600000)/","yValue":7},{"xValue":"/Date(1454828400000)/","yValue":6},{"xValue":"/Date(1455433200000)/","yValue":12},{"xValue":"/Date(1456038000000)/","yValue":13},{"xValue":"/Date(1456642800000)/","yValue":11},{"xValue":"/Date(1457247600000)/","yValue":5},{"xValue":"/Date(1457852400000)/","yValue":13},{"xValue":"/Date(1458457200000)/","yValue":10},{"xValue":"/Date(1459062000000)/","yValue":9},{"xValue":"/Date(1459666800000)/","yValue":8},{"xValue":"/Date(1460271600000)/","yValue":9},{"xValue":"/Date(1460876400000)/","yValue":4},{"xValue":"/Date(1461481200000)/","yValue":0},{"xValue":"/Date(1462086000000)/","yValue":0}],"DynamicText":"\r\n Of the 2+ bedroom, 2+ bath Single-family homes in this area in the last 3 months, 113 homes have sold and 49 are currently on the market. Plus, 197 new listings came on the market and 113 were sold.\r\n \r\n This means there is 1.3 months of inventory in the market today.\r\n \r\n A balanced market is defined as having 5 - 7 months of inventory, while a seller’s market has less than 5 months of inventory and a buyer’s market has more than 7 months of inventory.\r\n "},"WhenCreated":"/Date(1461685795390)/","PeriodStartDate":"/Date(1453791600000)/","PeriodEndDate":"/Date(1461654000000)/","PeriodStartDate_Chart":"/Date(1453618800000)/","HasCommunityLicens":true,"version":"1.0"} |
||
DSymtabMod.py | # Copyright 2017 Rice University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from program_helper.ast.ops import Node
from utilities.vocab_building_dictionary import DELIM
class DSymtabMod(Node):
def | (self, val,
type_helper=None,
child=None, sibling=None):
super().__init__(val, child, sibling)
self.type_helper = type_helper if type_helper is not None else DELIM
self.type = DSymtabMod.name()
@staticmethod
def name():
return 'DSymtabMod'
| __init__ |
index.ts | function | (n: number): number[] {
return Array.from({ length: n }, (_, index) => n - index);
}
export default reverseSeq;
| reverseSeq |
Bar.js | import { makeStyles } from '@material-ui/core/styles';
import clsx from 'clsx';
import AppBar from '@material-ui/core/AppBar';
import BugReportIcon from '@material-ui/icons/BugReport';
import HelpIcon from '@material-ui/icons/Help';
import FolderOpenIcon from '@material-ui/icons/FolderOpen';
import IconButton from '@material-ui/core/IconButton';
import ListIcon from '@material-ui/icons/List';
import SettingsIcon from '@material-ui/icons/Settings';
import Toolbar from '@material-ui/core/Toolbar';
import Tooltip from '@material-ui/core/Tooltip';
import Typography from '@material-ui/core/Typography';
const useStyles = makeStyles((theme) => ({
title: {
flexGrow: 1,
},
appBar: {
transition: theme.transitions.create(['margin', 'width'], {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
}),
},
appBarShift: {
width: ({drawerWidth}) => `calc(100% - ${drawerWidth}px)`,
transition: theme.transitions.create(['margin', 'width'], {
easing: theme.transitions.easing.easeOut,
duration: theme.transitions.duration.enteringScreen,
}),
marginRight: ({drawerWidth}) => drawerWidth,
},
copyHistoryButton: {
transform: 'scaleX(1)',
width: 48,
padding: 12,
transition: theme.transitions.create(['transform', 'padding', 'width'], {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
})
},
copyHistoryButtonShift: {
transform: 'scaleX(0)',
width: 0,
padding: 5,
transition: theme.transitions.create(['transform', 'padding', 'width'], {
easing: theme.transitions.easing.easeOut,
duration: theme.transitions.duration.enteringScreen,
}),
},
hide: {
display: 'none'
}
}));
const useCopyHistoryTooltipStyles = makeStyles((theme) => ({
tooltip: ({show}) => ({
display: show ? 'block' : 'none',
}),
}));
function | ({show, ...toolTipProps}) {
const classes = useCopyHistoryTooltipStyles({show: show});
return <Tooltip classes={classes} {...toolTipProps} />;
}
export default function Bar(props) {
const classes = useStyles(props);
return (
<AppBar
position="static"
elevation={0}
className={clsx(classes.appBar, {
[classes.appBarShift]: props.drawerOpen,
})}
>
<Toolbar>
<Tooltip title="Open Files">
<IconButton
edge="start"
color="inherit"
onClick={props.onFileSelector}
>
<FolderOpenIcon />
</IconButton>
</Tooltip>
<Typography variant="h6" className={classes.title}>
{props.title}
</Typography>
<Tooltip title="Submit Issue">
<IconButton
edge="end"
color="inherit"
component="a"
href="https://github.com/killergerbah/asbplayer/issues"
target="_blank"
rel="noreferrer"
>
<BugReportIcon />
</IconButton>
</Tooltip>
<Tooltip title="Help">
<IconButton
edge="end"
color="inherit"
onClick={props.onOpenHelp}
>
<HelpIcon />
</IconButton>
</Tooltip>
<Tooltip title="Settings">
<IconButton
edge="end"
color="inherit"
onClick={props.onOpenSettings}
>
<SettingsIcon />
</IconButton>
</Tooltip>
<CopyHistoryTooltip title="Copy History" show={!props.drawerOpen}>
<IconButton
edge="end"
color="inherit"
aria-label="menu"
className={clsx(classes.copyHistoryButton, {
[classes.copyHistoryButtonShift]: props.drawerOpen,
})}
onClick={props.onOpenCopyHistory}
>
<ListIcon />
</IconButton>
</CopyHistoryTooltip>
</Toolbar>
</AppBar>
);
}
| CopyHistoryTooltip |
report.ts | import { Symptom } from './symptom';
import { User } from './user';
export class Report {
private _symptoms: Symptom[];
private _user: User;
private _end_date: Date;
private _start_date: Date;
get user(): User {
return this._user;
}
set user(value: User) {
this._user = value;
}
get symptoms(): Symptom[] {
return this._symptoms;
}
set symptoms(value: Symptom[]) { | }
get start_date(): Date {
return this._start_date;
}
set start_date(value: Date) {
this._start_date = value;
}
get end_date(): Date {
return this._end_date;
}
set end_date(value: Date) {
this._end_date = value;
}
} | this._symptoms = value; |
ipfs-reader.d.ts | import { IPackageJson } from 'package-json-type';
import { Reader } from './reader';
export declare class | implements Reader {
private readonly cid;
private ipfs;
constructor(cid: string, gateway: string);
getPkg(): Promise<IPackageJson | undefined>;
getProjectSchema(): Promise<unknown | undefined>;
getFile(fileName: string): Promise<unknown | undefined>;
}
| IPFSReader |
_machsignals.py | global __bootstrap__, __loader__, __file__
import sys, pkg_resources, imp
__file__ = pkg_resources.resource_filename(__name__, '_machsignals.cpython-36dm-darwin.so')
__loader__ = None; del __bootstrap__, __loader__
imp.load_dynamic(__name__,__file__)
__bootstrap__() | def __bootstrap__(): |
|
compiler.py | import datetime
from django.conf import settings
from django.core.exceptions import FieldError
from django.db.backends.util import truncate_name
from django.db.models.constants import LOOKUP_SEP
from django.db.models.query_utils import select_related_descend, QueryWrapper
from django.db.models.sql.constants import (SINGLE, MULTI, ORDER_DIR,
GET_ITERATOR_CHUNK_SIZE, SelectInfo)
from django.db.models.sql.datastructures import EmptyResultSet
from django.db.models.sql.expressions import SQLEvaluator
from django.db.models.sql.query import get_order_dir, Query
from django.db.utils import DatabaseError
from django.utils import six
from django.utils.six.moves import zip
from django.utils import timezone
class SQLCompiler(object):
def | (self, query, connection, using):
self.query = query
self.connection = connection
self.using = using
self.quote_cache = {}
# When ordering a queryset with distinct on a column not part of the
# select set, the ordering column needs to be added to the select
# clause. This information is needed both in SQL construction and
# masking away the ordering selects from the returned row.
self.ordering_aliases = []
self.ordering_params = []
def pre_sql_setup(self):
"""
Does any necessary class setup immediately prior to producing SQL. This
is for things that can't necessarily be done in __init__ because we
might not have all the pieces in place at that time.
# TODO: after the query has been executed, the altered state should be
# cleaned. We are not using a clone() of the query here.
"""
if not self.query.tables:
self.query.join((None, self.query.get_meta().db_table, None))
if (not self.query.select and self.query.default_cols and not
self.query.included_inherited_models):
self.query.setup_inherited_models()
if self.query.select_related and not self.query.related_select_cols:
self.fill_related_selections()
def quote_name_unless_alias(self, name):
"""
A wrapper around connection.ops.quote_name that doesn't quote aliases
for table names. This avoids problems with some SQL dialects that treat
quoted strings specially (e.g. PostgreSQL).
"""
if name in self.quote_cache:
return self.quote_cache[name]
if ((name in self.query.alias_map and name not in self.query.table_map) or
name in self.query.extra_select):
self.quote_cache[name] = name
return name
r = self.connection.ops.quote_name(name)
self.quote_cache[name] = r
return r
def as_sql(self, with_limits=True, with_col_aliases=False):
"""
Creates the SQL for this query. Returns the SQL string and list of
parameters.
If 'with_limits' is False, any limit/offset information is not included
in the query.
"""
if with_limits and self.query.low_mark == self.query.high_mark:
return '', ()
self.pre_sql_setup()
# After executing the query, we must get rid of any joins the query
# setup created. So, take note of alias counts before the query ran.
# However we do not want to get rid of stuff done in pre_sql_setup(),
# as the pre_sql_setup will modify query state in a way that forbids
# another run of it.
self.refcounts_before = self.query.alias_refcount.copy()
out_cols, s_params = self.get_columns(with_col_aliases)
ordering, o_params, ordering_group_by = self.get_ordering()
distinct_fields = self.get_distinct()
# This must come after 'select', 'ordering' and 'distinct' -- see
# docstring of get_from_clause() for details.
from_, f_params = self.get_from_clause()
qn = self.quote_name_unless_alias
where, w_params = self.query.where.as_sql(qn=qn, connection=self.connection)
having, h_params = self.query.having.as_sql(qn=qn, connection=self.connection)
having_group_by = self.query.having.get_cols()
params = []
for val in six.itervalues(self.query.extra_select):
params.extend(val[1])
result = ['SELECT']
if self.query.distinct:
result.append(self.connection.ops.distinct_sql(distinct_fields))
params.extend(o_params)
result.append(', '.join(out_cols + self.ordering_aliases))
params.extend(s_params)
params.extend(self.ordering_params)
result.append('FROM')
result.extend(from_)
params.extend(f_params)
if where:
result.append('WHERE %s' % where)
params.extend(w_params)
grouping, gb_params = self.get_grouping(having_group_by, ordering_group_by)
if grouping:
if distinct_fields:
raise NotImplementedError(
"annotate() + distinct(fields) not implemented.")
if not ordering:
ordering = self.connection.ops.force_no_ordering()
result.append('GROUP BY %s' % ', '.join(grouping))
params.extend(gb_params)
if having:
result.append('HAVING %s' % having)
params.extend(h_params)
if ordering:
result.append('ORDER BY %s' % ', '.join(ordering))
if with_limits:
if self.query.high_mark is not None:
result.append('LIMIT %d' % (self.query.high_mark - self.query.low_mark))
if self.query.low_mark:
if self.query.high_mark is None:
val = self.connection.ops.no_limit_value()
if val:
result.append('LIMIT %d' % val)
result.append('OFFSET %d' % self.query.low_mark)
if self.query.select_for_update and self.connection.features.has_select_for_update:
# If we've been asked for a NOWAIT query but the backend does not support it,
# raise a DatabaseError otherwise we could get an unexpected deadlock.
nowait = self.query.select_for_update_nowait
if nowait and not self.connection.features.has_select_for_update_nowait:
raise DatabaseError('NOWAIT is not supported on this database backend.')
result.append(self.connection.ops.for_update_sql(nowait=nowait))
# Finally do cleanup - get rid of the joins we created above.
self.query.reset_refcounts(self.refcounts_before)
return ' '.join(result), tuple(params)
def as_nested_sql(self):
"""
Perform the same functionality as the as_sql() method, returning an
SQL string and parameters. However, the alias prefixes are bumped
beforehand (in a copy -- the current query isn't changed), and any
ordering is removed if the query is unsliced.
Used when nesting this query inside another.
"""
obj = self.query.clone()
if obj.low_mark == 0 and obj.high_mark is None:
# If there is no slicing in use, then we can safely drop all ordering
obj.clear_ordering(True)
obj.bump_prefix()
return obj.get_compiler(connection=self.connection).as_sql()
def get_columns(self, with_aliases=False):
"""
Returns the list of columns to use in the select statement, as well as
a list any extra parameters that need to be included. If no columns
have been specified, returns all columns relating to fields in the
model.
If 'with_aliases' is true, any column names that are duplicated
(without the table names) are given unique aliases. This is needed in
some cases to avoid ambiguity with nested queries.
"""
qn = self.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
result = ['(%s) AS %s' % (col[0], qn2(alias)) for alias, col in six.iteritems(self.query.extra_select)]
params = []
aliases = set(self.query.extra_select.keys())
if with_aliases:
col_aliases = aliases.copy()
else:
col_aliases = set()
if self.query.select:
only_load = self.deferred_to_columns()
for col, _ in self.query.select:
if isinstance(col, (list, tuple)):
alias, column = col
table = self.query.alias_map[alias].table_name
if table in only_load and column not in only_load[table]:
continue
r = '%s.%s' % (qn(alias), qn(column))
if with_aliases:
if col[1] in col_aliases:
c_alias = 'Col%d' % len(col_aliases)
result.append('%s AS %s' % (r, c_alias))
aliases.add(c_alias)
col_aliases.add(c_alias)
else:
result.append('%s AS %s' % (r, qn2(col[1])))
aliases.add(r)
col_aliases.add(col[1])
else:
result.append(r)
aliases.add(r)
col_aliases.add(col[1])
else:
col_sql, col_params = col.as_sql(qn, self.connection)
result.append(col_sql)
params.extend(col_params)
if hasattr(col, 'alias'):
aliases.add(col.alias)
col_aliases.add(col.alias)
elif self.query.default_cols:
cols, new_aliases = self.get_default_columns(with_aliases,
col_aliases)
result.extend(cols)
aliases.update(new_aliases)
max_name_length = self.connection.ops.max_name_length()
for alias, aggregate in self.query.aggregate_select.items():
agg_sql, agg_params = aggregate.as_sql(qn, self.connection)
if alias is None:
result.append(agg_sql)
else:
result.append('%s AS %s' % (agg_sql, qn(truncate_name(alias, max_name_length))))
params.extend(agg_params)
for (table, col), _ in self.query.related_select_cols:
r = '%s.%s' % (qn(table), qn(col))
if with_aliases and col in col_aliases:
c_alias = 'Col%d' % len(col_aliases)
result.append('%s AS %s' % (r, c_alias))
aliases.add(c_alias)
col_aliases.add(c_alias)
else:
result.append(r)
aliases.add(r)
col_aliases.add(col)
self._select_aliases = aliases
return result, params
def get_default_columns(self, with_aliases=False, col_aliases=None,
start_alias=None, opts=None, as_pairs=False, from_parent=None):
"""
Computes the default columns for selecting every field in the base
model. Will sometimes be called to pull in related models (e.g. via
select_related), in which case "opts" and "start_alias" will be given
to provide a starting point for the traversal.
Returns a list of strings, quoted appropriately for use in SQL
directly, as well as a set of aliases used in the select statement (if
'as_pairs' is True, returns a list of (alias, col_name) pairs instead
of strings as the first component and None as the second component).
"""
result = []
if opts is None:
opts = self.query.get_meta()
qn = self.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
aliases = set()
only_load = self.deferred_to_columns()
if not start_alias:
start_alias = self.query.get_initial_alias()
# The 'seen_models' is used to optimize checking the needed parent
# alias for a given field. This also includes None -> start_alias to
# be used by local fields.
seen_models = {None: start_alias}
for field, model in opts.get_concrete_fields_with_model():
if from_parent and model is not None and issubclass(from_parent, model):
# Avoid loading data for already loaded parents.
continue
alias = self.query.join_parent_model(opts, model, start_alias,
seen_models)
table = self.query.alias_map[alias].table_name
if table in only_load and field.column not in only_load[table]:
continue
if as_pairs:
result.append((alias, field.column))
aliases.add(alias)
continue
if with_aliases and field.column in col_aliases:
c_alias = 'Col%d' % len(col_aliases)
result.append('%s.%s AS %s' % (qn(alias),
qn2(field.column), c_alias))
col_aliases.add(c_alias)
aliases.add(c_alias)
else:
r = '%s.%s' % (qn(alias), qn2(field.column))
result.append(r)
aliases.add(r)
if with_aliases:
col_aliases.add(field.column)
return result, aliases
def get_distinct(self):
"""
Returns a quoted list of fields to use in DISTINCT ON part of the query.
Note that this method can alter the tables in the query, and thus it
must be called before get_from_clause().
"""
qn = self.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
result = []
opts = self.query.get_meta()
for name in self.query.distinct_fields:
parts = name.split(LOOKUP_SEP)
field, cols, alias, _, _ = self._setup_joins(parts, opts, None)
cols, alias = self._final_join_removal(cols, alias)
for col in cols:
result.append("%s.%s" % (qn(alias), qn2(col)))
return result
def get_ordering(self):
"""
Returns a tuple containing a list representing the SQL elements in the
"order by" clause, and the list of SQL elements that need to be added
to the GROUP BY clause as a result of the ordering.
Also sets the ordering_aliases attribute on this instance to a list of
extra aliases needed in the select.
Determining the ordering SQL can change the tables we need to include,
so this should be run *before* get_from_clause().
"""
if self.query.extra_order_by:
ordering = self.query.extra_order_by
elif not self.query.default_ordering:
ordering = self.query.order_by
else:
ordering = (self.query.order_by
or self.query.get_meta().ordering
or [])
qn = self.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
distinct = self.query.distinct
select_aliases = self._select_aliases
result = []
group_by = []
ordering_aliases = []
if self.query.standard_ordering:
asc, desc = ORDER_DIR['ASC']
else:
asc, desc = ORDER_DIR['DESC']
# It's possible, due to model inheritance, that normal usage might try
# to include the same field more than once in the ordering. We track
# the table/column pairs we use and discard any after the first use.
processed_pairs = set()
params = []
ordering_params = []
for pos, field in enumerate(ordering):
if field == '?':
result.append(self.connection.ops.random_function_sql())
continue
if isinstance(field, int):
if field < 0:
order = desc
field = -field
else:
order = asc
result.append('%s %s' % (field, order))
group_by.append((str(field), []))
continue
col, order = get_order_dir(field, asc)
if col in self.query.aggregate_select:
result.append('%s %s' % (qn(col), order))
continue
if '.' in field:
# This came in through an extra(order_by=...) addition. Pass it
# on verbatim.
table, col = col.split('.', 1)
if (table, col) not in processed_pairs:
elt = '%s.%s' % (qn(table), col)
processed_pairs.add((table, col))
if not distinct or elt in select_aliases:
result.append('%s %s' % (elt, order))
group_by.append((elt, []))
elif get_order_dir(field)[0] not in self.query.extra:
# 'col' is of the form 'field' or 'field1__field2' or
# '-field1__field2__field', etc.
for table, cols, order in self.find_ordering_name(field,
self.query.get_meta(), default_order=asc):
for col in cols:
if (table, col) not in processed_pairs:
elt = '%s.%s' % (qn(table), qn2(col))
processed_pairs.add((table, col))
if distinct and elt not in select_aliases:
ordering_aliases.append(elt)
result.append('%s %s' % (elt, order))
group_by.append((elt, []))
else:
elt = qn2(col)
if col not in self.query.extra_select:
sql = "(%s) AS %s" % (self.query.extra[col][0], elt)
ordering_aliases.append(sql)
ordering_params.extend(self.query.extra[col][1])
else:
if distinct and col not in select_aliases:
ordering_aliases.append(elt)
ordering_params.extend(params)
result.append('%s %s' % (elt, order))
group_by.append(self.query.extra[col])
self.ordering_aliases = ordering_aliases
self.ordering_params = ordering_params
return result, params, group_by
def find_ordering_name(self, name, opts, alias=None, default_order='ASC',
already_seen=None):
"""
Returns the table alias (the name might be ambiguous, the alias will
not be) and column name for ordering by the given 'name' parameter.
The 'name' is of the form 'field1__field2__...__fieldN'.
"""
name, order = get_order_dir(name, default_order)
pieces = name.split(LOOKUP_SEP)
field, cols, alias, joins, opts = self._setup_joins(pieces, opts, alias)
# If we get to this point and the field is a relation to another model,
# append the default ordering for that model.
if field.rel and len(joins) > 1 and opts.ordering:
# Firstly, avoid infinite loops.
if not already_seen:
already_seen = set()
join_tuple = tuple([self.query.alias_map[j].table_name for j in joins])
if join_tuple in already_seen:
raise FieldError('Infinite loop caused by ordering.')
already_seen.add(join_tuple)
results = []
for item in opts.ordering:
results.extend(self.find_ordering_name(item, opts, alias,
order, already_seen))
return results
cols, alias = self._final_join_removal(cols, alias)
return [(alias, cols, order)]
def _setup_joins(self, pieces, opts, alias):
"""
A helper method for get_ordering and get_distinct. This method will
call query.setup_joins, handle refcounts and then promote the joins.
Note that get_ordering and get_distinct must produce same target
columns on same input, as the prefixes of get_ordering and get_distinct
must match. Executing SQL where this is not true is an error.
"""
if not alias:
alias = self.query.get_initial_alias()
field, targets, opts, joins, _ = self.query.setup_joins(
pieces, opts, alias)
# We will later on need to promote those joins that were added to the
# query afresh above.
joins_to_promote = [j for j in joins if self.query.alias_refcount[j] < 2]
alias = joins[-1]
cols = [target.column for target in targets]
if not field.rel:
# To avoid inadvertent trimming of a necessary alias, use the
# refcount to show that we are referencing a non-relation field on
# the model.
self.query.ref_alias(alias)
# Must use left outer joins for nullable fields and their relations.
# Ordering or distinct must not affect the returned set, and INNER
# JOINS for nullable fields could do this.
self.query.promote_joins(joins_to_promote)
return field, cols, alias, joins, opts
def _final_join_removal(self, cols, alias):
"""
A helper method for get_distinct and get_ordering. This method will
trim extra not-needed joins from the tail of the join chain.
This is very similar to what is done in trim_joins, but we will
trim LEFT JOINS here. It would be a good idea to consolidate this
method and query.trim_joins().
"""
if alias:
while 1:
join = self.query.alias_map[alias]
lhs_cols, rhs_cols = zip(*[(lhs_col, rhs_col) for lhs_col, rhs_col in join.join_cols])
if set(cols) != set(rhs_cols):
break
cols = [lhs_cols[rhs_cols.index(col)] for col in cols]
self.query.unref_alias(alias)
alias = join.lhs_alias
return cols, alias
def get_from_clause(self):
"""
Returns a list of strings that are joined together to go after the
"FROM" part of the query, as well as a list any extra parameters that
need to be included. Sub-classes, can override this to create a
from-clause via a "select".
This should only be called after any SQL construction methods that
might change the tables we need. This means the select columns,
ordering and distinct must be done first.
"""
result = []
qn = self.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
first = True
from_params = []
for alias in self.query.tables:
if not self.query.alias_refcount[alias]:
continue
try:
name, alias, join_type, lhs, join_cols, _, join_field = self.query.alias_map[alias]
except KeyError:
# Extra tables can end up in self.tables, but not in the
# alias_map if they aren't in a join. That's OK. We skip them.
continue
alias_str = '' if alias == name else (' %s' % alias)
if join_type and not first:
extra_cond = join_field.get_extra_restriction(
self.query.where_class, alias, lhs)
if extra_cond:
extra_sql, extra_params = extra_cond.as_sql(
qn, self.connection)
extra_sql = 'AND (%s)' % extra_sql
from_params.extend(extra_params)
else:
extra_sql = ""
result.append('%s %s%s ON ('
% (join_type, qn(name), alias_str))
for index, (lhs_col, rhs_col) in enumerate(join_cols):
if index != 0:
result.append(' AND ')
result.append('%s.%s = %s.%s' %
(qn(lhs), qn2(lhs_col), qn(alias), qn2(rhs_col)))
result.append('%s)' % extra_sql)
else:
connector = '' if first else ', '
result.append('%s%s%s' % (connector, qn(name), alias_str))
first = False
for t in self.query.extra_tables:
alias, unused = self.query.table_alias(t)
# Only add the alias if it's not already present (the table_alias()
# calls increments the refcount, so an alias refcount of one means
# this is the only reference.
if alias not in self.query.alias_map or self.query.alias_refcount[alias] == 1:
connector = '' if first else ', '
result.append('%s%s' % (connector, qn(alias)))
first = False
return result, from_params
def get_grouping(self, having_group_by, ordering_group_by):
"""
Returns a tuple representing the SQL elements in the "group by" clause.
"""
qn = self.quote_name_unless_alias
result, params = [], []
if self.query.group_by is not None:
select_cols = self.query.select + self.query.related_select_cols
# Just the column, not the fields.
select_cols = [s[0] for s in select_cols]
if (len(self.query.get_meta().concrete_fields) == len(self.query.select)
and self.connection.features.allows_group_by_pk):
self.query.group_by = [
(self.query.get_meta().db_table, self.query.get_meta().pk.column)
]
select_cols = []
seen = set()
cols = self.query.group_by + having_group_by + select_cols
for col in cols:
col_params = ()
if isinstance(col, (list, tuple)):
sql = '%s.%s' % (qn(col[0]), qn(col[1]))
elif hasattr(col, 'as_sql'):
sql, col_params = col.as_sql(qn, self.connection)
else:
sql = '(%s)' % str(col)
if sql not in seen:
result.append(sql)
params.extend(col_params)
seen.add(sql)
# Still, we need to add all stuff in ordering (except if the backend can
# group by just by PK).
if ordering_group_by and not self.connection.features.allows_group_by_pk:
for order, order_params in ordering_group_by:
# Even if we have seen the same SQL string, it might have
# different params, so, we add same SQL in "has params" case.
if order not in seen or params:
result.append(order)
params.extend(order_params)
seen.add(order)
# Unconditionally add the extra_select items.
for extra_select, extra_params in self.query.extra_select.values():
sql = '(%s)' % str(extra_select)
result.append(sql)
params.extend(extra_params)
return result, params
def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1,
requested=None, restricted=None, nullable=None):
"""
Fill in the information needed for a select_related query. The current
depth is measured as the number of connections away from the root model
(for example, cur_depth=1 means we are looking at models with direct
connections to the root model).
"""
if not restricted and self.query.max_depth and cur_depth > self.query.max_depth:
# We've recursed far enough; bail out.
return
if not opts:
opts = self.query.get_meta()
root_alias = self.query.get_initial_alias()
self.query.related_select_cols = []
only_load = self.query.get_loaded_field_names()
# Setup for the case when only particular related fields should be
# included in the related selection.
if requested is None:
if isinstance(self.query.select_related, dict):
requested = self.query.select_related
restricted = True
else:
restricted = False
for f, model in opts.get_fields_with_model():
# The get_fields_with_model() returns None for fields that live
# in the field's local model. So, for those fields we want to use
# the f.model - that is the field's local model.
field_model = model or f.model
if not select_related_descend(f, restricted, requested,
only_load.get(field_model)):
continue
promote = nullable or f.null
_, _, _, joins, _ = self.query.setup_joins(
[f.name], opts, root_alias, outer_if_first=promote)
alias = joins[-1]
columns, aliases = self.get_default_columns(start_alias=alias,
opts=f.rel.to._meta, as_pairs=True)
self.query.related_select_cols.extend(
SelectInfo(col, field) for col, field in zip(columns, f.rel.to._meta.concrete_fields))
if restricted:
next = requested.get(f.name, {})
else:
next = False
new_nullable = f.null or promote
self.fill_related_selections(f.rel.to._meta, alias, cur_depth + 1,
next, restricted, new_nullable)
if restricted:
related_fields = [
(o.field, o.model)
for o in opts.get_all_related_objects()
if o.field.unique
]
for f, model in related_fields:
if not select_related_descend(f, restricted, requested,
only_load.get(model), reverse=True):
continue
_, _, _, joins, _ = self.query.setup_joins(
[f.related_query_name()], opts, root_alias, outer_if_first=True)
alias = joins[-1]
from_parent = (opts.model if issubclass(model, opts.model)
else None)
columns, aliases = self.get_default_columns(start_alias=alias,
opts=model._meta, as_pairs=True, from_parent=from_parent)
self.query.related_select_cols.extend(
SelectInfo(col, field) for col, field
in zip(columns, model._meta.concrete_fields))
next = requested.get(f.related_query_name(), {})
# Use True here because we are looking at the _reverse_ side of
# the relation, which is always nullable.
new_nullable = True
table = model._meta.db_table
self.fill_related_selections(model._meta, table, cur_depth+1,
next, restricted, new_nullable)
def deferred_to_columns(self):
"""
Converts the self.deferred_loading data structure to mapping of table
names to sets of column names which are to be loaded. Returns the
dictionary.
"""
columns = {}
self.query.deferred_to_data(columns, self.query.deferred_to_columns_cb)
return columns
def results_iter(self):
"""
Returns an iterator over the results from executing this query.
"""
resolve_columns = hasattr(self, 'resolve_columns')
fields = None
has_aggregate_select = bool(self.query.aggregate_select)
for rows in self.execute_sql(MULTI):
for row in rows:
if resolve_columns:
if fields is None:
# We only set this up here because
# related_select_cols isn't populated until
# execute_sql() has been called.
# We also include types of fields of related models that
# will be included via select_related() for the benefit
# of MySQL/MySQLdb when boolean fields are involved
# (#15040).
# This code duplicates the logic for the order of fields
# found in get_columns(). It would be nice to clean this up.
if self.query.select:
fields = [f.field for f in self.query.select]
else:
fields = self.query.get_meta().concrete_fields
fields = fields + [f.field for f in self.query.related_select_cols]
# If the field was deferred, exclude it from being passed
# into `resolve_columns` because it wasn't selected.
only_load = self.deferred_to_columns()
if only_load:
db_table = self.query.get_meta().db_table
fields = [f for f in fields if db_table in only_load and
f.column in only_load[db_table]]
row = self.resolve_columns(row, fields)
if has_aggregate_select:
loaded_fields = self.query.get_loaded_field_names().get(self.query.model, set()) or self.query.select
aggregate_start = len(self.query.extra_select) + len(loaded_fields)
aggregate_end = aggregate_start + len(self.query.aggregate_select)
row = tuple(row[:aggregate_start]) + tuple([
self.query.resolve_aggregate(value, aggregate, self.connection)
for (alias, aggregate), value
in zip(self.query.aggregate_select.items(), row[aggregate_start:aggregate_end])
]) + tuple(row[aggregate_end:])
yield row
def execute_sql(self, result_type=MULTI):
"""
Run the query against the database and returns the result(s). The
return value is a single data item if result_type is SINGLE, or an
iterator over the results if the result_type is MULTI.
result_type is either MULTI (use fetchmany() to retrieve all rows),
SINGLE (only retrieve a single row), or None. In this last case, the
cursor is returned if any query is executed, since it's used by
subclasses such as InsertQuery). It's possible, however, that no query
is needed, as the filters describe an empty set. In that case, None is
returned, to avoid any unnecessary database interaction.
"""
try:
sql, params = self.as_sql()
if not sql:
raise EmptyResultSet
except EmptyResultSet:
if result_type == MULTI:
return iter([])
else:
return
cursor = self.connection.cursor()
cursor.execute(sql, params)
if not result_type:
return cursor
if result_type == SINGLE:
if self.ordering_aliases:
return cursor.fetchone()[:-len(self.ordering_aliases)]
return cursor.fetchone()
# The MULTI case.
if self.ordering_aliases:
result = order_modified_iter(cursor, len(self.ordering_aliases),
self.connection.features.empty_fetchmany_value)
else:
result = iter((lambda: cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)),
self.connection.features.empty_fetchmany_value)
if not self.connection.features.can_use_chunked_reads:
# If we are using non-chunked reads, we return the same data
# structure as normally, but ensure it is all read into memory
# before going any further.
return list(result)
return result
def as_subquery_condition(self, alias, columns, qn):
qn2 = self.connection.ops.quote_name
if len(columns) == 1:
sql, params = self.as_sql()
return '%s.%s IN (%s)' % (qn(alias), qn2(columns[0]), sql), params
for index, select_col in enumerate(self.query.select):
lhs = '%s.%s' % (qn(select_col.col[0]), qn2(select_col.col[1]))
rhs = '%s.%s' % (qn(alias), qn2(columns[index]))
self.query.where.add(
QueryWrapper('%s = %s' % (lhs, rhs), []), 'AND')
sql, params = self.as_sql()
return 'EXISTS (%s)' % sql, params
class SQLInsertCompiler(SQLCompiler):
def __init__(self, *args, **kwargs):
self.return_id = False
super(SQLInsertCompiler, self).__init__(*args, **kwargs)
def placeholder(self, field, val):
if field is None:
# A field value of None means the value is raw.
return val
elif hasattr(field, 'get_placeholder'):
# Some fields (e.g. geo fields) need special munging before
# they can be inserted.
return field.get_placeholder(val, self.connection)
else:
# Return the common case for the placeholder
return '%s'
def as_sql(self):
# We don't need quote_name_unless_alias() here, since these are all
# going to be column names (so we can avoid the extra overhead).
qn = self.connection.ops.quote_name
opts = self.query.get_meta()
result = ['INSERT INTO %s' % qn(opts.db_table)]
has_fields = bool(self.query.fields)
fields = self.query.fields if has_fields else [opts.pk]
result.append('(%s)' % ', '.join([qn(f.column) for f in fields]))
if has_fields:
params = values = [
[
f.get_db_prep_save(getattr(obj, f.attname) if self.query.raw else f.pre_save(obj, True), connection=self.connection)
for f in fields
]
for obj in self.query.objs
]
else:
values = [[self.connection.ops.pk_default_value()] for obj in self.query.objs]
params = [[]]
fields = [None]
can_bulk = (not any(hasattr(field, "get_placeholder") for field in fields) and
not self.return_id and self.connection.features.has_bulk_insert)
if can_bulk:
placeholders = [["%s"] * len(fields)]
else:
placeholders = [
[self.placeholder(field, v) for field, v in zip(fields, val)]
for val in values
]
# Oracle Spatial needs to remove some values due to #10888
params = self.connection.ops.modify_insert_params(placeholders, params)
if self.return_id and self.connection.features.can_return_id_from_insert:
params = params[0]
col = "%s.%s" % (qn(opts.db_table), qn(opts.pk.column))
result.append("VALUES (%s)" % ", ".join(placeholders[0]))
r_fmt, r_params = self.connection.ops.return_insert_id()
# Skip empty r_fmt to allow subclasses to customize behaviour for
# 3rd party backends. Refs #19096.
if r_fmt:
result.append(r_fmt % col)
params += r_params
return [(" ".join(result), tuple(params))]
if can_bulk:
result.append(self.connection.ops.bulk_insert_sql(fields, len(values)))
return [(" ".join(result), tuple([v for val in values for v in val]))]
else:
return [
(" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals)
for p, vals in zip(placeholders, params)
]
def execute_sql(self, return_id=False):
assert not (return_id and len(self.query.objs) != 1)
self.return_id = return_id
cursor = self.connection.cursor()
for sql, params in self.as_sql():
cursor.execute(sql, params)
if not (return_id and cursor):
return
if self.connection.features.can_return_id_from_insert:
return self.connection.ops.fetch_returned_insert_id(cursor)
return self.connection.ops.last_insert_id(cursor,
self.query.get_meta().db_table, self.query.get_meta().pk.column)
class SQLDeleteCompiler(SQLCompiler):
def as_sql(self):
"""
Creates the SQL for this query. Returns the SQL string and list of
parameters.
"""
assert len(self.query.tables) == 1, \
"Can only delete from one table at a time."
qn = self.quote_name_unless_alias
result = ['DELETE FROM %s' % qn(self.query.tables[0])]
where, params = self.query.where.as_sql(qn=qn, connection=self.connection)
if where:
result.append('WHERE %s' % where)
return ' '.join(result), tuple(params)
class SQLUpdateCompiler(SQLCompiler):
def as_sql(self):
"""
Creates the SQL for this query. Returns the SQL string and list of
parameters.
"""
self.pre_sql_setup()
if not self.query.values:
return '', ()
table = self.query.tables[0]
qn = self.quote_name_unless_alias
result = ['UPDATE %s' % qn(table)]
result.append('SET')
values, update_params = [], []
for field, model, val in self.query.values:
if hasattr(val, 'prepare_database_save'):
val = val.prepare_database_save(field)
else:
val = field.get_db_prep_save(val, connection=self.connection)
# Getting the placeholder for the field.
if hasattr(field, 'get_placeholder'):
placeholder = field.get_placeholder(val, self.connection)
else:
placeholder = '%s'
if hasattr(val, 'evaluate'):
val = SQLEvaluator(val, self.query, allow_joins=False)
name = field.column
if hasattr(val, 'as_sql'):
sql, params = val.as_sql(qn, self.connection)
values.append('%s = %s' % (qn(name), sql))
update_params.extend(params)
elif val is not None:
values.append('%s = %s' % (qn(name), placeholder))
update_params.append(val)
else:
values.append('%s = NULL' % qn(name))
if not values:
return '', ()
result.append(', '.join(values))
where, params = self.query.where.as_sql(qn=qn, connection=self.connection)
if where:
result.append('WHERE %s' % where)
return ' '.join(result), tuple(update_params + params)
def execute_sql(self, result_type):
"""
Execute the specified update. Returns the number of rows affected by
the primary update query. The "primary update query" is the first
non-empty query that is executed. Row counts for any subsequent,
related queries are not available.
"""
cursor = super(SQLUpdateCompiler, self).execute_sql(result_type)
rows = cursor.rowcount if cursor else 0
is_empty = cursor is None
del cursor
for query in self.query.get_related_updates():
aux_rows = query.get_compiler(self.using).execute_sql(result_type)
if is_empty:
rows = aux_rows
is_empty = False
return rows
def pre_sql_setup(self):
"""
If the update depends on results from other tables, we need to do some
munging of the "where" conditions to match the format required for
(portable) SQL updates. That is done here.
Further, if we are going to be running multiple updates, we pull out
the id values to update at this point so that they don't change as a
result of the progressive updates.
"""
self.query.select_related = False
self.query.clear_ordering(True)
super(SQLUpdateCompiler, self).pre_sql_setup()
count = self.query.count_active_tables()
if not self.query.related_updates and count == 1:
return
# We need to use a sub-select in the where clause to filter on things
# from other tables.
query = self.query.clone(klass=Query)
query.bump_prefix()
query.extra = {}
query.select = []
query.add_fields([query.get_meta().pk.name])
# Recheck the count - it is possible that fiddling with the select
# fields above removes tables from the query. Refs #18304.
count = query.count_active_tables()
if not self.query.related_updates and count == 1:
return
must_pre_select = count > 1 and not self.connection.features.update_can_self_select
# Now we adjust the current query: reset the where clause and get rid
# of all the tables we don't need (since they're in the sub-select).
self.query.where = self.query.where_class()
if self.query.related_updates or must_pre_select:
# Either we're using the idents in multiple update queries (so
# don't want them to change), or the db backend doesn't support
# selecting from the updating table (e.g. MySQL).
idents = []
for rows in query.get_compiler(self.using).execute_sql(MULTI):
idents.extend([r[0] for r in rows])
self.query.add_filter(('pk__in', idents))
self.query.related_ids = idents
else:
# The fast path. Filters and updates in one query.
self.query.add_filter(('pk__in', query))
for alias in self.query.tables[1:]:
self.query.alias_refcount[alias] = 0
class SQLAggregateCompiler(SQLCompiler):
def as_sql(self, qn=None):
"""
Creates the SQL for this query. Returns the SQL string and list of
parameters.
"""
if qn is None:
qn = self.quote_name_unless_alias
sql, params = [], []
for aggregate in self.query.aggregate_select.values():
agg_sql, agg_params = aggregate.as_sql(qn, self.connection)
sql.append(agg_sql)
params.extend(agg_params)
sql = ', '.join(sql)
params = tuple(params)
sql = 'SELECT %s FROM (%s) subquery' % (sql, self.query.subquery)
params = params + self.query.sub_params
return sql, params
class SQLDateCompiler(SQLCompiler):
def results_iter(self):
"""
Returns an iterator over the results from executing this query.
"""
resolve_columns = hasattr(self, 'resolve_columns')
if resolve_columns:
from django.db.models.fields import DateField
fields = [DateField()]
else:
from django.db.backends.util import typecast_date
needs_string_cast = self.connection.features.needs_datetime_string_cast
offset = len(self.query.extra_select)
for rows in self.execute_sql(MULTI):
for row in rows:
date = row[offset]
if resolve_columns:
date = self.resolve_columns(row, fields)[offset]
elif needs_string_cast:
date = typecast_date(str(date))
if isinstance(date, datetime.datetime):
date = date.date()
yield date
class SQLDateTimeCompiler(SQLCompiler):
def results_iter(self):
"""
Returns an iterator over the results from executing this query.
"""
resolve_columns = hasattr(self, 'resolve_columns')
if resolve_columns:
from django.db.models.fields import DateTimeField
fields = [DateTimeField()]
else:
from django.db.backends.util import typecast_timestamp
needs_string_cast = self.connection.features.needs_datetime_string_cast
offset = len(self.query.extra_select)
for rows in self.execute_sql(MULTI):
for row in rows:
datetime = row[offset]
if resolve_columns:
datetime = self.resolve_columns(row, fields)[offset]
elif needs_string_cast:
datetime = typecast_timestamp(str(datetime))
# Datetimes are artifically returned in UTC on databases that
# don't support time zone. Restore the zone used in the query.
if settings.USE_TZ:
if datetime is None:
raise ValueError("Database returned an invalid value "
"in QuerySet.dates(). Are time zone "
"definitions and pytz installed?")
datetime = datetime.replace(tzinfo=None)
datetime = timezone.make_aware(datetime, self.query.tzinfo)
yield datetime
def order_modified_iter(cursor, trim, sentinel):
"""
Yields blocks of rows from a cursor. We use this iterator in the special
case when extra output columns have been added to support ordering
requirements. We must trim those extra columns before anything else can use
the results, since they're only needed to make the SQL valid.
"""
for rows in iter((lambda: cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)),
sentinel):
yield [r[:-trim] for r in rows]
| __init__ |
slider.rs | use crate::prelude::*;
static THUMB: &'static str = "thumb";
static TRACK: &'static str = "track";
#[derive(Copy, Clone)]
enum SliderAction {
Move { mouse_x: f64 },
}
/// The `SliderState` is used to manipulate the position of the thumb of the slider widget.
#[derive(Default, AsAny)]
pub struct SliderState {
action: Option<SliderAction>,
value: f64,
minimum: f64,
maximum: f64,
thumb: Entity,
track: Entity,
}
impl SliderState {
// register an action
fn action(&mut self, action: SliderAction) {
self.action = Some(action);
}
// adjust minimum, maximum and value
fn adjust(&mut self, ctx: &mut Context) -> bool {
let mut has_changes = false;
if *ctx.widget().get::<f64>("minimum") != self.minimum {
let minimum = adjust_minimum(
*ctx.widget().get::<f64>("minimum"),
*ctx.widget().get::<f64>("maximum"),
);
ctx.widget().set("minimum", minimum);
self.minimum = minimum;
has_changes = true;
}
if *ctx.widget().get::<f64>("maximum") != self.maximum {
let maximum = adjust_maximum(
*ctx.widget().get::<f64>("minimum"),
*ctx.widget().get::<f64>("maximum"),
);
ctx.widget().set("maximum", maximum);
self.maximum = maximum;
has_changes = true;
}
if *ctx.widget().get::<f64>("value") != self.value {
let value = adjust_value(
*ctx.widget().get::<f64>("value"),
*ctx.widget().get::<f64>("minimum"),
*ctx.widget().get::<f64>("maximum"),
);
ctx.widget().set("value", value);
self.value = value;
has_changes = true;
}
has_changes
}
// adjust the thump position
fn adjust_thumb_x(&self, ctx: &mut Context) {
let value = *ctx.widget().get::<f64>("value");
let minimum = *ctx.widget().get::<f64>("minimum");
let maximum = *ctx.widget().get::<f64>("maximum");
let thumb_width = ctx
.get_widget(self.thumb)
.get::<Rectangle>("bounds")
.width();
let track_width = ctx
.get_widget(self.track)
.get::<Rectangle>("bounds")
.width();
ctx.get_widget(self.thumb)
.get_mut::<Thickness>("margin")
.set_left(calculate_thumb_x_from_value(
value,
minimum,
maximum,
track_width,
thumb_width,
));
}
}
impl State for SliderState {
fn init(&mut self, _: &mut Registry, ctx: &mut Context) {
self.thumb = ctx
.entity_of_child(THUMB)
.expect("SliderState.init: Thumb child could not be found.");
self.track = ctx
.entity_of_child(TRACK)
.expect("SliderState.init: Track child could not be found.");
}
fn update_post_layout(&mut self, _: &mut Registry, ctx: &mut Context) {
if let Some(action) = self.action {
match action {
SliderAction::Move { mouse_x } => {
if *ctx.get_widget(self.thumb).get::<bool>("pressed") {
let thumb_width = ctx
.get_widget(self.thumb)
.get::<Rectangle>("bounds")
.width();
let track_width = ctx
.get_widget(self.track)
.get::<Rectangle>("bounds")
.width();
let slider_x = ctx.widget().get::<Point>("position").x;
let thumb_x =
calculate_thumb_x(mouse_x, thumb_width, slider_x, track_width);
ctx.get_widget(self.thumb)
.get_mut::<Thickness>("margin")
.set_left(thumb_x);
let minimum = *ctx.widget().get("minimum");
let maximum = *ctx.widget().get("maximum");
ctx.widget().set(
"value",
calculate_value(thumb_x, minimum, maximum, thumb_width, track_width),
);
ctx.push_event(ChangedEvent(ctx.entity));
}
}
}
self.action = None;
return;
}
if self.adjust(ctx) {
self.adjust_thumb_x(ctx);
ctx.push_event(ChangedEvent(ctx.entity));
}
}
}
widget!(
/// The `Slider` allows to use a value in a range of values.
///
/// **CSS element:** `Slider`
Slider<SliderState>: MouseHandler, ChangedHandler {
/// Sets or shares the minimum of the range.
minimum: f64,
/// Sets or shared the maximum of the range.
maximum: f64,
/// Sets or shares the current value of the range.
value: f64, |
/// Sets or shares the background property.
background: Brush,
/// Sets or shares the border radius property.
border_radius: f64,
/// Sets or shares the border thickness property.
border_width: Thickness,
/// Sets or shares the border brush property.
border_brush: Brush,
/// Sets or shares the css selector property.
selector: Selector
}
);
impl Template for Slider {
fn template(self, id: Entity, ctx: &mut BuildContext) -> Self {
self.name("Slider")
.selector("slider")
.minimum(0.0)
.maximum(100.0)
.value(0.0)
.height(32.0)
.border_radius(4.0)
.child(
Grid::create()
.selector(Selector::default().id(TRACK))
.margin((8.0, 0.0, 8.0, 0.0))
.child(
Container::create()
.border_radius(id)
.background(id)
.vertical_alignment("center")
.height(8.0)
.build(ctx),
)
.child(
Button::create()
.selector(Selector::from("thumb").id(THUMB))
.vertical_alignment("center")
.horizontal_alignment("start")
.max_width(28.0)
.max_height(28.0)
.border_radius(16.0)
.build(ctx),
)
.build(ctx),
)
.on_mouse_move(move |states, p| {
states
.get_mut::<SliderState>(id)
.action(SliderAction::Move { mouse_x: p.x });
true
})
}
}
// --- Helpers --
fn adjust_value(value: f64, minimum: f64, maximum: f64) -> f64 {
if value < minimum {
return minimum;
}
if value > maximum {
return maximum;
}
value
}
fn adjust_minimum(minimum: f64, maximum: f64) -> f64 {
if minimum > maximum {
return maximum;
}
minimum
}
fn adjust_maximum(minimum: f64, maximum: f64) -> f64 {
if maximum < minimum {
return minimum;
}
maximum
}
fn calculate_thumb_x(mouse_x: f64, thumb_width: f64, slider_x: f64, track_width: f64) -> f64 {
(mouse_x - slider_x - thumb_width)
.max(0.0)
.min(track_width - thumb_width)
}
fn calculate_value(
thumb_x: f64,
minimum: f64,
maximum: f64,
thumb_width: f64,
track_width: f64,
) -> f64 {
(thumb_x / (track_width - thumb_width) * (maximum - minimum))
}
fn calculate_thumb_x_from_value(
value: f64,
minimum: f64,
maximum: f64,
track_width: f64,
thumb_width: f64,
) -> f64 {
(value / (maximum - minimum)) * (track_width - thumb_width)
}
// --- Helpers --
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_calculate_thumb_x() {
assert_eq!(0.0, calculate_thumb_x(-1000.0, 32.0, 0.0, 100.0));
assert_eq!(0.0, calculate_thumb_x(0.0, 32.0, 0.0, 100.0));
assert_eq!(18.0, calculate_thumb_x(50.0, 32.0, 0.0, 100.0));
assert_eq!(36.0, calculate_thumb_x(68.0, 32.0, 0.0, 100.0));
assert_eq!(68.0, calculate_thumb_x(100.0, 32.0, 0.0, 100.0));
assert_eq!(68.0, calculate_thumb_x(1000.0, 32.0, 0.0, 100.0));
}
#[test]
fn test_calculate_value() {
assert_eq!(0.0, calculate_value(0.0, 0.0, 100.0, 32.0, 100.0));
assert_eq!(50.0, calculate_value(34.0, 0.0, 100.0, 32.0, 100.0));
assert_eq!(100.0, calculate_value(68.0, 0.0, 100.0, 32.0, 100.0));
assert_eq!(0.0, calculate_value(0.0, -50.0, 50.0, 32.0, 100.0));
assert_eq!(50.0, calculate_value(34.0, -50.0, 50.0, 32.0, 100.0));
assert_eq!(100.0, calculate_value(68.0, -50.0, 50.0, 32.0, 100.0));
}
#[test]
fn test_adjust_value() {
assert_eq!(0.0, adjust_value(-10.0, 0.0, 100.0));
assert_eq!(10.0, adjust_value(10.0, 0.0, 100.0));
assert_eq!(100.0, adjust_value(500.0, 0.0, 100.0));
}
#[test]
fn test_adjust_minimum() {
assert_eq!(0.0, adjust_minimum(0.0, 100.0));
assert_eq!(5.0, adjust_minimum(5.0, 100.0));
assert_eq!(100.0, adjust_minimum(500.0, 100.0));
}
#[test]
fn test_adjust_maximum() {
assert_eq!(100.0, adjust_maximum(0.0, 100.0));
assert_eq!(100.0, adjust_maximum(100.0, 5.0));
assert_eq!(100.0, adjust_maximum(0.0, 100.0));
}
#[test]
fn test_calculate_thumb_x_from_value() {
assert_eq!(0.0, calculate_thumb_x_from_value(0.0, 0.0, 100.0, 100.0, 32.0));
assert_eq!(34.0, calculate_thumb_x_from_value(50.0, 0.0, 100.0, 100.0, 32.0));
assert_eq!(68.0, calculate_thumb_x_from_value(100.0, 0.0, 100.0, 100.0, 32.0));
}
} | |
swagger.go | package swagger
import (
"encoding/json"
"io/ioutil"
"net/http"
"regexp"
"strings"
"fmt"
)
var skipRefs = map[string]bool{
// recursive
"io.k8s.apiextensions-apiserver.pkg.apis.apiextensions.v1.JSONSchemaProps": true,
"io.k8s.apiextensions-apiserver.pkg.apis.apiextensions.v1beta1.JSONSchemaProps": true,
// list will be filtered anyways
"io.k8s.apimachinery.pkg.apis.meta.v1.ListMeta": true,
}
func LoadHTTP(url string) (*Swagger, error) {
r, err := http.Get(url)
if err != nil {
return nil, err
}
data, err := ioutil.ReadAll(r.Body)
if err != nil {
return nil, err
}
return Load(data)
}
func Load(data []byte) (*Swagger, error) {
var s Swagger
fmt.Printf("%+v\n", string(data))
if err := json.Unmarshal(data, &s); err != nil {
return nil, err
}
for k, def := range s.Definitions {
s.Definitions[k] = resolveRefs(def, s.Definitions)
}
return &s, nil
}
func resolveRefs(d *Schema, defs Definitions) *Schema {
for key, prop := range d.Props {
resolved := get(prop, defs)
resolved.Items = get(resolved.Items, defs)
d.Props[key] = resolved
}
return d
}
func get(prop *Schema, defs Definitions) *Schema {
if prop == nil || prop.DollarRef == nil |
ref := prop.Ref()
if skipRefs[ref] {
return prop
}
rs := defs[ref]
rs.ResolvedRef = ref
return resolveRefs(rs, defs)
}
type Swagger struct {
Definitions Definitions `json:"definitions"`
}
type Definitions map[string]*Schema
func (ds Definitions) Filter(f func(k string, v Schema) bool) Definitions {
out := make(Definitions)
for k, v := range ds {
if f(k, *v) {
out[k] = v
}
}
return out
}
func (ds Definitions) Sub(exp string) Definitions {
rg := regexp.MustCompile(exp)
return ds.Filter(func(k string, v Schema) bool {
return rg.MatchString(k)
})
}
// swagger types
type Type string
const (
TypeString Type = "string"
TypeInt Type = "integer"
TypeBool Type = "boolean"
TypeObject Type = "object"
TypeArray Type = "array"
)
// Schema is a general object definition
type Schema struct {
// general
Type Type `json:"type"`
Desc string `json:"description"`
// type: object
Props map[string]*Schema `json:"properties"`
// type: array
Items *Schema `json:"items"`
// incomplete: reference
DollarRef *string `json:"$ref"`
ResolvedRef string
// vendor extensions
XGvk []XGvk `json:"x-kubernetes-group-version-kind"`
}
type XGvk struct {
Group string
Kind string
Version string
}
func (s Schema) Ref() string {
if s.DollarRef == nil {
return s.ResolvedRef
}
return strings.TrimPrefix(*s.DollarRef, "#/definitions/")
}
func (s Schema) GroupVersionKind() (*XGvk, bool) {
if len(s.XGvk) == 0 {
return nil, false
}
x := s.XGvk[0]
return &x, true
}
| {
return prop
} |
Item.tsx | import './Item.scss';
import { ItemProps } from '../Models/ItemProps';
import { Button } from '../Button';
import { Favorite } from '../Favorite';
import { t } from '../../core';
export const Item = ({ item, handleAddToCart, setFavorite }: ItemProps) => { | <div className="item__button"> <Favorite item={item} favoriteEvent={() => setFavorite(item)} ></Favorite></div>
<div className="block">
<div className="block__title"><span className="item__text">{item.productName}</span></div>
<div className="block__price"><span className="item__text">{item.price} {t("item.currency")}</span></div>
</div>
<div className="item__description"><span className="item__text">{item.productDescription}</span></div>
<div className="block block--margin">
<div className="block__left"><span className="item__text">{item.stock} {t("item.left")}</span></div>
<div className="block__add"><Button text={t("item.add")} click={() => handleAddToCart(item)}></Button></div>
</div>
</div>
}
export default Item; | return <div className="item">
<div className="item__image"><img src={item.image_url} /></div> |
mir_codegen_calls_diverging_drops.rs | // run-fail
// error-pattern:diverging_fn called
// error-pattern:0 dropped
// ignore-emscripten no processes
struct Droppable(u8);
impl Drop for Droppable {
fn drop(&mut self) {
eprintln!("{} dropped", self.0);
}
}
fn diverging_fn() -> ! {
panic!("diverging_fn called")
}
fn mir(d: Droppable) {
diverging_fn();
}
| mir(d);
} | fn main() {
let d = Droppable(0); |
stat.go | package cmd
import (
"bytes"
"context"
"encoding/json"
"fmt"
"os"
"sort"
"strings"
"text/tabwriter"
"time"
"github.com/linkerd/linkerd2/pkg/cmd"
pkgcmd "github.com/linkerd/linkerd2/pkg/cmd"
"github.com/linkerd/linkerd2/pkg/healthcheck"
"github.com/linkerd/linkerd2/pkg/k8s"
pb "github.com/linkerd/linkerd2/viz/metrics-api/gen/viz"
"github.com/linkerd/linkerd2/viz/metrics-api/util"
"github.com/linkerd/linkerd2/viz/pkg/api"
pkgUtil "github.com/linkerd/linkerd2/viz/pkg/util"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
v1 "k8s.io/api/core/v1"
)
type statOptions struct {
statOptionsBase
toNamespace string
toResource string
fromNamespace string
fromResource string
allNamespaces bool
labelSelector string
unmeshed bool
}
type statOptionsBase struct {
namespace string
timeWindow string
outputFormat string
}
func newStatOptionsBase() *statOptionsBase {
return &statOptionsBase{
timeWindow: "1m",
outputFormat: tableOutput,
}
}
func (o *statOptionsBase) validateOutputFormat() error {
switch o.outputFormat {
case tableOutput, jsonOutput, wideOutput:
return nil
default:
return fmt.Errorf("--output currently only supports %s, %s and %s", tableOutput, jsonOutput, wideOutput)
}
}
type indexedResults struct {
ix int
rows []*pb.StatTable_PodGroup_Row
err error
}
func newStatOptions() *statOptions {
return &statOptions{
statOptionsBase: *newStatOptionsBase(),
toNamespace: "",
toResource: "",
fromNamespace: "",
fromResource: "",
allNamespaces: false,
labelSelector: "",
unmeshed: false,
}
}
// NewCmdStat creates a new cobra command `stat` for stat functionality
func NewCmdStat() *cobra.Command {
options := newStatOptions()
cmd := &cobra.Command{
Use: "stat [flags] (RESOURCES)",
Short: "Display traffic stats about one or many resources",
Long: `Display traffic stats about one or many resources.
The RESOURCES argument specifies the target resource(s) to aggregate stats over:
(TYPE [NAME] | TYPE/NAME)
or (TYPE [NAME1] [NAME2]...)
or (TYPE1/NAME1 TYPE2/NAME2...)
Examples:
* cronjob/my-cronjob
* deploy
* deploy/my-deploy
* deploy/ po/
* ds/my-daemonset
* job/my-job
* ns/my-ns
* po/mypod1 rc/my-replication-controller
* po mypod1 mypod2
* rc/my-replication-controller
* rs
* rs/my-replicaset
* sts/my-statefulset
* ts/my-split
* authority
* au/my-authority
* all
Valid resource types include:
* cronjobs
* daemonsets
* deployments
* namespaces
* jobs
* pods
* replicasets
* replicationcontrollers
* statefulsets
* trafficsplits
* authorities (not supported in --from)
* services (only supported if a --from is also specified, or as a --to)
* all (all resource types, not supported in --from or --to)
This command will hide resources that have completed, such as pods that are in the Succeeded or Failed phases.
If no resource name is specified, displays stats about all resources of the specified RESOURCETYPE`,
Example: ` # Get all deployments in the test namespace.
linkerd viz stat deployments -n test
# Get the hello1 replication controller in the test namespace.
linkerd viz stat replicationcontrollers hello1 -n test
# Get all namespaces.
linkerd viz stat namespaces
# Get all inbound stats to the web deployment.
linkerd viz stat deploy/web
# Get all inbound stats to the pod1 and pod2 pods
linkerd viz stat po pod1 pod2
# Get all inbound stats to the pod1 pod and the web deployment
linkerd viz stat po/pod1 deploy/web
# Get all pods in all namespaces that call the hello1 deployment in the test namespace.
linkerd viz stat pods --to deploy/hello1 --to-namespace test --all-namespaces
# Get all pods in all namespaces that call the hello1 service in the test namespace.
linkerd viz stat pods --to svc/hello1 --to-namespace test --all-namespaces
# Get the web service. With Services, metrics are generated from the outbound metrics
# of clients, and thus will not include unmeshed client request metrics.
linkerd viz stat svc/web
# Get the web services and metrics for any traffic coming to the service from the hello1 deployment
# in the test namespace.
linkerd viz stat svc/web --from deploy/hello1 --from-namespace test
# Get the web services and metrics for all the traffic that reaches the web-pod1 pod
# in the test namespace exclusively.
linkerd viz stat svc/web --to pod/web-pod1 --to-namespace test
# Get all services in all namespaces that receive calls from hello1 deployment in the test namespace.
linkerd viz stat services --from deploy/hello1 --from-namespace test --all-namespaces
# Get all trafficsplits and their leaf services.
linkerd viz stat ts
# Get the hello-split trafficsplit and its leaf services.
linkerd viz stat ts/hello-split
# Get all trafficsplits and their leaf services, and metrics for any traffic coming to the leaf services from the hello1 deployment.
linkerd viz stat ts --from deploy/hello1
# Get all namespaces that receive traffic from the default namespace.
linkerd viz stat namespaces --from ns/default
# Get all inbound stats to the test namespace.
linkerd viz stat ns/test`,
Args: cobra.MinimumNArgs(1),
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
k8sAPI, err := k8s.NewAPI(kubeconfigPath, kubeContext, impersonate, impersonateGroup, 0)
if err != nil {
return nil, cobra.ShellCompDirectiveError
}
if options.namespace == "" {
options.namespace = pkgcmd.GetDefaultNamespace(kubeconfigPath, kubeContext)
}
if options.allNamespaces {
options.namespace = v1.NamespaceAll
}
cc := k8s.NewCommandCompletion(k8sAPI, options.namespace)
results, err := cc.Complete(args, toComplete)
if err != nil {
return nil, cobra.ShellCompDirectiveError
}
return results, cobra.ShellCompDirectiveDefault
},
RunE: func(cmd *cobra.Command, args []string) error {
if options.namespace == "" {
options.namespace = pkgcmd.GetDefaultNamespace(kubeconfigPath, kubeContext)
}
reqs, err := buildStatSummaryRequests(args, options)
if err != nil {
return fmt.Errorf("error creating metrics request while making stats request: %v", err)
}
// The gRPC client is concurrency-safe, so we can reuse it in all the following goroutines
// https://github.com/grpc/grpc-go/issues/682
client := api.CheckClientOrExit(healthcheck.Options{
ControlPlaneNamespace: controlPlaneNamespace,
KubeConfig: kubeconfigPath,
Impersonate: impersonate,
ImpersonateGroup: impersonateGroup,
KubeContext: kubeContext,
APIAddr: apiAddr,
})
c := make(chan indexedResults, len(reqs))
for num, req := range reqs {
go func(num int, req *pb.StatSummaryRequest) {
resp, err := requestStatsFromAPI(client, req)
rows := respToRows(resp)
c <- indexedResults{num, rows, err}
}(num, req)
}
totalRows := make([]*pb.StatTable_PodGroup_Row, 0)
i := 0
for res := range c {
if res.err != nil {
fmt.Fprint(os.Stderr, res.err.Error())
os.Exit(1)
}
totalRows = append(totalRows, res.rows...)
if i++; i == len(reqs) {
close(c)
}
}
output := renderStatStats(totalRows, options)
_, err = fmt.Print(output)
return err
},
}
cmd.PersistentFlags().StringVarP(&options.namespace, "namespace", "n", options.namespace, "Namespace of the specified resource")
cmd.PersistentFlags().StringVarP(&options.timeWindow, "time-window", "t", options.timeWindow, "Stat window (for example: \"15s\", \"1m\", \"10m\", \"1h\"). Needs to be at least 15s.")
cmd.PersistentFlags().StringVar(&options.toResource, "to", options.toResource, "If present, restricts outbound stats to the specified resource name")
cmd.PersistentFlags().StringVar(&options.toNamespace, "to-namespace", options.toNamespace, "Sets the namespace used to lookup the \"--to\" resource; by default the current \"--namespace\" is used")
cmd.PersistentFlags().StringVar(&options.fromResource, "from", options.fromResource, "If present, restricts outbound stats from the specified resource name")
cmd.PersistentFlags().StringVar(&options.fromNamespace, "from-namespace", options.fromNamespace, "Sets the namespace used from lookup the \"--from\" resource; by default the current \"--namespace\" is used")
cmd.PersistentFlags().BoolVarP(&options.allNamespaces, "all-namespaces", "A", options.allNamespaces, "If present, returns stats across all namespaces, ignoring the \"--namespace\" flag")
cmd.PersistentFlags().StringVarP(&options.outputFormat, "output", "o", options.outputFormat, "Output format; one of: \"table\" or \"json\" or \"wide\"")
cmd.PersistentFlags().StringVarP(&options.labelSelector, "selector", "l", options.labelSelector, "Selector (label query) to filter on, supports '=', '==', and '!='")
cmd.PersistentFlags().BoolVar(&options.unmeshed, "unmeshed", options.unmeshed, "If present, include unmeshed resources in the output")
pkgcmd.ConfigureNamespaceFlagCompletion(
cmd, []string{"namespace", "to-namespace", "from-namespace"},
kubeconfigPath, impersonate, impersonateGroup, kubeContext)
return cmd
}
func respToRows(resp *pb.StatSummaryResponse) []*pb.StatTable_PodGroup_Row {
rows := make([]*pb.StatTable_PodGroup_Row, 0)
if resp != nil {
for _, statTable := range resp.GetOk().StatTables {
rows = append(rows, statTable.GetPodGroup().Rows...)
}
}
return rows
}
func requestStatsFromAPI(client pb.ApiClient, req *pb.StatSummaryRequest) (*pb.StatSummaryResponse, error) {
resp, err := client.StatSummary(context.Background(), req)
if err != nil {
return nil, fmt.Errorf("StatSummary API error: %v", err)
}
if e := resp.GetError(); e != nil {
return nil, fmt.Errorf("StatSummary API response error: %v", e.Error)
}
return resp, nil
}
func renderStatStats(rows []*pb.StatTable_PodGroup_Row, options *statOptions) string {
var buffer bytes.Buffer
w := tabwriter.NewWriter(&buffer, 0, 0, padding, ' ', tabwriter.AlignRight)
writeStatsToBuffer(rows, w, options)
w.Flush()
return renderStats(buffer, &options.statOptionsBase)
}
const padding = 3
type rowStats struct {
route string
dst string
requestRate float64
successRate float64
latencyP50 uint64
latencyP95 uint64
latencyP99 uint64
tcpOpenConnections uint64
tcpReadBytes float64
tcpWriteBytes float64
}
type row struct {
meshed string
status string
*rowStats
*tsStats
*dstStats
}
type tsStats struct {
apex string
leaf string
weight string
}
type dstStats struct {
dst string
weight string
}
var (
nameHeader = "NAME"
namespaceHeader = "NAMESPACE"
apexHeader = "APEX"
leafHeader = "LEAF"
weightHeader = "WEIGHT"
)
func statHasRequestData(stat *pb.BasicStats) bool {
return stat.GetSuccessCount() != 0 || stat.GetFailureCount() != 0 || stat.GetActualSuccessCount() != 0 || stat.GetActualFailureCount() != 0
}
func isPodOwnerResource(typ string) bool {
return typ != k8s.TrafficSplit && typ != k8s.Authority && typ != k8s.Service
}
func writeStatsToBuffer(rows []*pb.StatTable_PodGroup_Row, w *tabwriter.Writer, options *statOptions) {
maxNameLength := len(nameHeader)
maxNamespaceLength := len(namespaceHeader)
maxApexLength := len(apexHeader)
maxLeafLength := len(leafHeader)
maxDstLength := len(dstHeader)
maxWeightLength := len(weightHeader)
statTables := make(map[string]map[string]*row)
prefixTypes := make(map[string]bool)
for _, r := range rows {
prefixTypes[r.Resource.Type] = true
}
usePrefix := false
if len(prefixTypes) > 1 {
usePrefix = true
}
for _, r := range rows {
// Skip unmeshed pods if the unmeshed option isn't enabled.
if !options.unmeshed && r.GetMeshedPodCount() == 0 &&
// Skip only if the resource can own pods
isPodOwnerResource(r.Resource.Type) &&
// Skip only if --from isn't specified (unmeshed resources can show
// stats in --from mode because metrics are collected on the client
// side).
options.fromResource == "" {
continue
}
name := r.Resource.Name
nameWithPrefix := name
if usePrefix {
nameWithPrefix = getNamePrefix(r.Resource.Type) + nameWithPrefix
}
namespace := r.Resource.Namespace
key := fmt.Sprintf("%s/%s", namespace, name)
if r.Resource.Type == k8s.TrafficSplit || (r.Resource.Type == k8s.Service && r.TsStats != nil) {
key = fmt.Sprintf("%s/%s/%s", namespace, name, r.TsStats.Leaf)
}
resourceKey := r.Resource.Type
if _, ok := statTables[resourceKey]; !ok {
statTables[resourceKey] = make(map[string]*row)
}
if len(nameWithPrefix) > maxNameLength {
maxNameLength = len(nameWithPrefix)
}
if len(namespace) > maxNamespaceLength {
maxNamespaceLength = len(namespace)
}
meshedCount := fmt.Sprintf("%d/%d", r.MeshedPodCount, r.RunningPodCount)
if resourceKey == k8s.Authority || resourceKey == k8s.Service {
meshedCount = "-"
}
statTables[resourceKey][key] = &row{
meshed: meshedCount,
status: r.Status,
}
if r.Stats != nil && statHasRequestData(r.Stats) {
statTables[resourceKey][key].rowStats = &rowStats{
requestRate: getRequestRate(r.Stats.GetSuccessCount(), r.Stats.GetFailureCount(), r.TimeWindow),
successRate: getSuccessRate(r.Stats.GetSuccessCount(), r.Stats.GetFailureCount()),
latencyP50: r.Stats.LatencyMsP50,
latencyP95: r.Stats.LatencyMsP95,
latencyP99: r.Stats.LatencyMsP99,
tcpOpenConnections: r.GetTcpStats().GetOpenConnections(),
tcpReadBytes: getByteRate(r.GetTcpStats().GetReadBytesTotal(), r.TimeWindow),
tcpWriteBytes: getByteRate(r.GetTcpStats().GetWriteBytesTotal(), r.TimeWindow),
}
}
if r.TsStats != nil {
if r.GetResource().GetType() == k8s.TrafficSplit {
leaf := r.TsStats.Leaf
apex := r.TsStats.Apex
weight := r.TsStats.Weight
if len(leaf) > maxLeafLength {
maxLeafLength = len(leaf)
}
if len(apex) > maxApexLength {
maxApexLength = len(apex)
}
statTables[resourceKey][key].tsStats = &tsStats{
apex: apex,
leaf: leaf,
weight: weight,
}
} else {
dst := r.TsStats.Leaf
weight := r.TsStats.Weight
if len(dst) > maxDstLength {
maxDstLength = len(dst)
}
if len(weight) > maxWeightLength {
maxWeightLength = len(weight)
}
statTables[resourceKey][key].dstStats = &dstStats{
dst: dst,
weight: weight,
}
}
}
}
switch options.outputFormat {
case tableOutput, wideOutput:
if len(statTables) == 0 {
fmt.Fprintln(os.Stderr, "No traffic found.")
return
}
printStatTables(statTables, w, maxNameLength, maxNamespaceLength, maxLeafLength, maxApexLength, maxDstLength, maxWeightLength, options)
case jsonOutput:
printStatJSON(statTables, w)
}
}
func printStatTables(statTables map[string]map[string]*row, w *tabwriter.Writer, maxNameLength, maxNamespaceLength, maxLeafLength, maxApexLength, maxDstLength, maxWeightLength int, options *statOptions) {
usePrefix := false
if len(statTables) > 1 {
usePrefix = true
}
firstDisplayedStat := true // don't print a newline before the first stat
for _, resourceType := range k8s.AllResources {
if stats, ok := statTables[resourceType]; ok {
if !firstDisplayedStat {
fmt.Fprint(w, "\n")
}
firstDisplayedStat = false
resourceTypeLabel := resourceType
if !usePrefix {
resourceTypeLabel = ""
}
printSingleStatTable(stats, resourceTypeLabel, resourceType, w, maxNameLength, maxNamespaceLength, maxLeafLength, maxApexLength, maxDstLength, maxWeightLength, options)
}
}
}
func showTCPBytes(options *statOptions, resourceType string) bool {
return (options.outputFormat == wideOutput || options.outputFormat == jsonOutput) &&
showTCPConns(resourceType)
}
func showTCPConns(resourceType string) bool {
return resourceType != k8s.Authority && resourceType != k8s.TrafficSplit
}
func printSingleStatTable(stats map[string]*row, resourceTypeLabel, resourceType string, w *tabwriter.Writer, maxNameLength, maxNamespaceLength, maxLeafLength, maxApexLength, maxDstLength, maxWeightLength int, options *statOptions) {
headers := make([]string, 0)
nameTemplate := fmt.Sprintf("%%-%ds", maxNameLength)
namespaceTemplate := fmt.Sprintf("%%-%ds", maxNamespaceLength)
apexTemplate := fmt.Sprintf("%%-%ds", maxApexLength)
leafTemplate := fmt.Sprintf("%%-%ds", maxLeafLength)
dstTemplate := fmt.Sprintf("%%-%ds", maxDstLength)
weightTemplate := fmt.Sprintf("%%-%ds", maxWeightLength)
hasDstStats := false
for _, r := range stats {
if r.dstStats != nil {
hasDstStats = true
}
}
hasTsStats := false
for _, r := range stats {
if r.tsStats != nil {
hasTsStats = true
}
}
if options.allNamespaces {
headers = append(headers,
fmt.Sprintf(namespaceTemplate, namespaceHeader))
}
headers = append(headers,
fmt.Sprintf(nameTemplate, nameHeader))
if resourceType == k8s.Pod {
headers = append(headers, "STATUS")
}
if hasDstStats {
headers = append(headers,
fmt.Sprintf(dstTemplate, dstHeader),
fmt.Sprintf(weightTemplate, weightHeader))
} else if hasTsStats {
headers = append(headers,
fmt.Sprintf(apexTemplate, apexHeader),
fmt.Sprintf(leafTemplate, leafHeader),
fmt.Sprintf(weightTemplate, weightHeader))
} else {
headers = append(headers, "MESHED")
}
headers = append(headers, []string{
"SUCCESS",
"RPS",
"LATENCY_P50",
"LATENCY_P95",
"LATENCY_P99",
}...)
if resourceType != k8s.TrafficSplit {
headers = append(headers, "TCP_CONN")
}
if showTCPBytes(options, resourceType) {
headers = append(headers, []string{
"READ_BYTES/SEC",
"WRITE_BYTES/SEC",
}...)
}
headers[len(headers)-1] = headers[len(headers)-1] + "\t" // trailing \t is required to format last column
fmt.Fprintln(w, strings.Join(headers, "\t"))
sortedKeys := sortStatsKeys(stats)
for _, key := range sortedKeys {
namespace, name := namespaceName(resourceTypeLabel, key)
values := make([]interface{}, 0)
templateString := "%s\t%s\t%.2f%%\t%.1frps\t%dms\t%dms\t%dms\t"
templateStringEmpty := "%s\t%s\t-\t-\t-\t-\t-\t-\t"
if resourceType == k8s.Pod {
templateString = "%s\t" + templateString
templateStringEmpty = "%s\t" + templateStringEmpty
}
if hasTsStats {
templateString = "%s\t%s\t%s\t%s\t%.2f%%\t%.1frps\t%dms\t%dms\t%dms\t"
templateStringEmpty = "%s\t%s\t%s\t%s\t-\t-\t-\t-\t-\t"
} else if hasDstStats {
templateString = "%s\t%s\t%s\t%.2f%%\t%.1frps\t%dms\t%dms\t%dms\t"
templateStringEmpty = "%s\t%s\t%s\t-\t-\t-\t-\t-\t"
}
if !showTCPConns(resourceType) {
if resourceType == k8s.Authority {
// always show TCP Connections as - for Authorities
templateString = templateString + "-\t"
}
} else {
templateString = templateString + "%d\t"
}
if showTCPBytes(options, resourceType) {
templateString = templateString + "%.1fB/s\t%.1fB/s\t"
templateStringEmpty = templateStringEmpty + "-\t-\t"
}
if options.allNamespaces {
values = append(values,
namespace+strings.Repeat(" ", maxNamespaceLength-len(namespace)))
templateString = "%s\t" + templateString
templateStringEmpty = "%s\t" + templateStringEmpty
}
templateString = templateString + "\n"
templateStringEmpty = templateStringEmpty + "\n"
padding := 0
if maxNameLength > len(name) {
padding = maxNameLength - len(name)
}
apexPadding := 0
leafPadding := 0
dstPadding := 0
if stats[key].tsStats != nil {
if maxApexLength > len(stats[key].tsStats.apex) {
apexPadding = maxApexLength - len(stats[key].tsStats.apex)
}
if maxLeafLength > len(stats[key].tsStats.leaf) {
leafPadding = maxLeafLength - len(stats[key].tsStats.leaf)
}
} else if stats[key].dstStats != nil {
if maxDstLength > len(stats[key].dstStats.dst) {
dstPadding = maxDstLength - len(stats[key].dstStats.dst)
}
}
values = append(values, name+strings.Repeat(" ", padding))
if resourceType == k8s.Pod {
values = append(values, stats[key].status)
}
if hasTsStats {
values = append(values,
stats[key].tsStats.apex+strings.Repeat(" ", apexPadding),
stats[key].tsStats.leaf+strings.Repeat(" ", leafPadding),
stats[key].tsStats.weight,
)
} else if hasDstStats {
values = append(values,
stats[key].dstStats.dst+strings.Repeat(" ", dstPadding),
stats[key].dstStats.weight,
)
} else {
values = append(values, []interface{}{
stats[key].meshed,
}...)
}
if stats[key].rowStats != nil {
values = append(values, []interface{}{
stats[key].successRate * 100,
stats[key].requestRate,
stats[key].latencyP50,
stats[key].latencyP95,
stats[key].latencyP99,
}...)
if showTCPConns(resourceType) {
values = append(values, stats[key].tcpOpenConnections)
}
if showTCPBytes(options, resourceType) {
values = append(values, []interface{}{
stats[key].tcpReadBytes,
stats[key].tcpWriteBytes,
}...)
}
fmt.Fprintf(w, templateString, values...)
} else {
fmt.Fprintf(w, templateStringEmpty, values...)
}
}
}
func namespaceName(resourceType string, key string) (string, string) {
parts := strings.Split(key, "/")
namespace := parts[0]
namePrefix := getNamePrefix(resourceType)
name := namePrefix + parts[1]
return namespace, name
}
// Using pointers where the value is NA and the corresponding json is null
type jsonStats struct {
Namespace string `json:"namespace"`
Kind string `json:"kind"`
Name string `json:"name"`
Meshed string `json:"meshed,omitempty"`
Success *float64 `json:"success"`
Rps *float64 `json:"rps"`
LatencyMSp50 *uint64 `json:"latency_ms_p50"`
LatencyMSp95 *uint64 `json:"latency_ms_p95"`
LatencyMSp99 *uint64 `json:"latency_ms_p99"`
TCPConnections *uint64 `json:"tcp_open_connections,omitempty"`
TCPReadBytes *float64 `json:"tcp_read_bytes_rate,omitempty"`
TCPWriteBytes *float64 `json:"tcp_write_bytes_rate,omitempty"`
Apex string `json:"apex,omitempty"`
Leaf string `json:"leaf,omitempty"`
Dst string `json:"dst,omitempty"`
Weight string `json:"weight,omitempty"`
}
func printStatJSON(statTables map[string]map[string]*row, w *tabwriter.Writer) {
// avoid nil initialization so that if there are not stats it gets marshalled as an empty array vs null
entries := []*jsonStats{}
for _, resourceType := range k8s.AllResources {
if stats, ok := statTables[resourceType]; ok {
sortedKeys := sortStatsKeys(stats)
for _, key := range sortedKeys {
namespace, name := namespaceName("", key)
entry := &jsonStats{
Namespace: namespace,
Kind: resourceType,
Name: name,
}
if resourceType != k8s.TrafficSplit {
entry.Meshed = stats[key].meshed
}
if stats[key].rowStats != nil {
entry.Success = &stats[key].successRate
entry.Rps = &stats[key].requestRate
entry.LatencyMSp50 = &stats[key].latencyP50
entry.LatencyMSp95 = &stats[key].latencyP95
entry.LatencyMSp99 = &stats[key].latencyP99
if showTCPConns(resourceType) {
entry.TCPConnections = &stats[key].tcpOpenConnections
entry.TCPReadBytes = &stats[key].tcpReadBytes
entry.TCPWriteBytes = &stats[key].tcpWriteBytes
}
}
if stats[key].tsStats != nil {
entry.Apex = stats[key].apex
entry.Leaf = stats[key].leaf
entry.Weight = stats[key].tsStats.weight
} else if stats[key].dstStats != nil {
entry.Dst = stats[key].dstStats.dst
entry.Weight = stats[key].dstStats.weight
}
entries = append(entries, entry)
}
}
}
b, err := json.MarshalIndent(entries, "", " ")
if err != nil {
log.Error(err.Error())
return
}
fmt.Fprintf(w, "%s\n", b)
}
func getNamePrefix(resourceType string) string {
if resourceType == "" {
return ""
}
canonicalType := k8s.ShortNameFromCanonicalResourceName(resourceType)
return canonicalType + "/"
}
func buildStatSummaryRequests(resources []string, options *statOptions) ([]*pb.StatSummaryRequest, error) {
targets, err := pkgUtil.BuildResources(options.namespace, resources)
if err != nil {
return nil, err
}
var toRes, fromRes *pb.Resource
if options.toResource != "" {
toRes, err = pkgUtil.BuildResource(options.toNamespace, options.toResource)
if err != nil {
return nil, err
}
}
if options.fromResource != "" |
requests := make([]*pb.StatSummaryRequest, 0)
for _, target := range targets {
err = options.validate(target.Type)
if err != nil {
return nil, err
}
requestParams := util.StatsSummaryRequestParams{
StatsBaseRequestParams: util.StatsBaseRequestParams{
TimeWindow: options.timeWindow,
ResourceName: target.Name,
ResourceType: target.Type,
Namespace: options.namespace,
AllNamespaces: options.allNamespaces,
},
ToNamespace: options.toNamespace,
FromNamespace: options.fromNamespace,
TCPStats: true,
LabelSelector: options.labelSelector,
}
if fromRes != nil {
requestParams.FromName = fromRes.Name
requestParams.FromType = fromRes.Type
}
if toRes != nil {
requestParams.ToName = toRes.Name
requestParams.ToType = toRes.Type
}
req, err := util.BuildStatSummaryRequest(requestParams)
if err != nil {
return nil, err
}
requests = append(requests, req)
}
return requests, nil
}
func sortStatsKeys(stats map[string]*row) []string {
var sortedKeys []string
for key := range stats {
sortedKeys = append(sortedKeys, key)
}
sort.Strings(sortedKeys)
return sortedKeys
}
// validate performs all validation on the command-line options.
// It returns the first error encountered, or `nil` if the options are valid.
func (o *statOptions) validate(resourceType string) error {
err := o.validateConflictingFlags()
if err != nil {
return err
}
if resourceType == k8s.Namespace {
err := o.validateNamespaceFlags()
if err != nil {
return err
}
}
return o.validateOutputFormat()
}
// validateConflictingFlags validates that the options do not contain mutually
// exclusive flags.
func (o *statOptions) validateConflictingFlags() error {
if o.toResource != "" && o.fromResource != "" {
return fmt.Errorf("--to and --from flags are mutually exclusive")
}
if o.toNamespace != "" && o.fromNamespace != "" {
return fmt.Errorf("--to-namespace and --from-namespace flags are mutually exclusive")
}
if o.allNamespaces && o.namespace != cmd.GetDefaultNamespace(kubeconfigPath, kubeContext) {
return fmt.Errorf("--all-namespaces and --namespace flags are mutually exclusive")
}
return nil
}
// validateNamespaceFlags performs additional validation for options when the target
// resource type is a namespace.
func (o *statOptions) validateNamespaceFlags() error {
if o.toNamespace != "" {
return fmt.Errorf("--to-namespace flag is incompatible with namespace resource type")
}
if o.fromNamespace != "" {
return fmt.Errorf("--from-namespace flag is incompatible with namespace resource type")
}
// Note: technically, this allows you to say `stat ns --namespace <default-namespace-from-kubectl-context>`, but that
// seems like an edge case.
if o.namespace != cmd.GetDefaultNamespace(kubeconfigPath, kubeContext) {
return fmt.Errorf("--namespace flag is incompatible with namespace resource type")
}
return nil
}
// get byte rate calculates the read/write byte rate
func getByteRate(bytes uint64, timeWindow string) float64 {
windowLength, err := time.ParseDuration(timeWindow)
if err != nil {
log.Error(err.Error())
return 0.0
}
return float64(bytes) / windowLength.Seconds()
}
func renderStats(buffer bytes.Buffer, options *statOptionsBase) string {
var out string
switch options.outputFormat {
case jsonOutput:
out = buffer.String()
default:
// strip left padding on the first column
b := buffer.Bytes()
if len(b) > padding {
out = string(b[padding:])
}
out = strings.Replace(out, "\n"+strings.Repeat(" ", padding), "\n", -1)
}
return out
}
| {
fromRes, err = pkgUtil.BuildResource(options.fromNamespace, options.fromResource)
if err != nil {
return nil, err
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.